Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(5)  |  Call(5)  |  Derive(0)  |  Import(0)

src/f/e/feedcache-1.4.1/feedcache/example_threads.py   feedcache(Download)
            break
 
        feed_data = c.fetch(next_url)
        for entry in feed_data.entries:
            output_queue.put( (feed_data.feed, entry) )

src/f/e/feedcache-1.4.1/feedcache/example.py   feedcache(Download)
        fc = cache.Cache(storage)
        for url in urls:
            parsed_data = fc.fetch(url)
            print parsed_data.feed.title
            for entry in parsed_data.entries:

src/x/b/xbmc-newznab-HEAD/default.py   xbmc-newznab(Download)
def load_xml(url):
    return CACHE.fetch(url)
    # Cache the url calls
 
def search(dialog_name, index):

src/f/e/feedcache-1.4.1/feedcache/test_shovefilesystem.py   feedcache(Download)
        try:
            fc = Cache(storage)
            parsed_data = fc.fetch(self.TEST_URL)
            self.failUnlessEqual(parsed_data.feed.title, 'CacheTest test data')
        finally:

src/f/e/feedcache-1.4.1/feedcache/test_cachestoragelock.py   feedcache(Download)
 
            # First fetch the data through the cache
            parsed_data = fc.fetch(self.TEST_URL)
            self.failUnlessEqual(parsed_data.feed.title, 'CacheTest test data')