Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(8)  |  Call(4)  |  Derive(0)  |  Import(4)

src/a/i/AINews-HEAD/CorpusCategories.py   AINews(Download)
 
import sys
from AINewsDB import AINewsDB
 
if __name__ == "__main__":
                     "Speech", "Systems","Vision"]
 
    db = AINewsDB()
 
    url_counts = {}

src/a/i/AINews-HEAD/AINewsPublisher.py   AINews(Download)
from AINewsTools import savefile
from AINewsConfig import config, paths, blacklist_urls
from AINewsDB import AINewsDB
from AINewsCorpus import AINewsCorpus
from AINewsDuplicates import AINewsDuplicates
    def __init__(self):
        self.debug = config['ainews.debug']
        self.today = date.today()
        self.earliest_date = self.today - timedelta(days = int(config['ainews.period']))
        self.db = AINewsDB()

src/a/i/AINews-HEAD/AINewsCrawler.py   AINews(Download)
from AINewsConfig import config, paths, blacklist_words
from AINewsTools import trunc, convert_to_printable
from AINewsDB import AINewsDB
from AINewsSummarizer import AINewsSummarizer
 
class AINewsCrawler:
    def __init__(self):
        self.today = date.today()
        self.earliest_date = self.today - timedelta(days = int(config['ainews.period']))
        self.db = AINewsDB()

src/a/i/AINews-HEAD/AINewsCorpus.py   AINews(Download)
from itertools import izip
from AINewsConfig import config, paths
from AINewsDB import AINewsDB
from AINewsTextProcessor import AINewsTextProcessor
 
        self.wordids = {}
 
        self.db = AINewsDB()
 
        self.categories = ["AIOverview","Agents", "Applications", \