Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(7)  |  Call(0)  |  Derive(0)  |  Import(7)

src/a/i/AINews-HEAD/AINewsTextProcessor.py   AINews(Download)
import types
import nltk
from AINewsConfig import config, stopwords, whitelist
 
class AINewsTextProcessor:

src/a/i/AINews-HEAD/AINewsPublisher.py   AINews(Download)
from datetime import date, datetime, timedelta
from AINewsTools import savefile
from AINewsConfig import config, paths, blacklist_urls
from AINewsDB import AINewsDB
from AINewsCorpus import AINewsCorpus

src/a/i/AINews-HEAD/AINewsCrawler.py   AINews(Download)
import urllib2
 
from AINewsConfig import config, paths, blacklist_words
from AINewsTools import trunc, convert_to_printable
from AINewsDB import AINewsDB

src/a/i/AINews-HEAD/AINewsWekaClassifier.py   AINews(Download)
from subprocess import *
from AINewsCorpus import AINewsCorpus
from AINewsConfig import config, paths
from AINewsTextProcessor import AINewsTextProcessor
 

src/a/i/AINews-HEAD/AINewsCorpus.py   AINews(Download)
import re
from itertools import izip
from AINewsConfig import config, paths
from AINewsDB import AINewsDB
from AINewsTextProcessor import AINewsTextProcessor

src/a/i/AINews-HEAD/AINews.py   AINews(Download)
import locale
 
from AINewsConfig import config, paths
from AINewsCrawler import AINewsCrawler
from AINewsPublisher import AINewsPublisher

src/a/i/AINews-HEAD/AINewsDuplicates.py   AINews(Download)
from datetime import date, timedelta
from AINewsCorpus import AINewsCorpus
from AINewsConfig import config
 
def add_to_duplicates(duplicates, urlid1, urlid2):