Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(336)  |  Call(138)  |  Derive(3)  |  Import(195)
Sets up generic caching, creating an instance of both CacheInRam and
CacheOnDisk.
In case of GAE will make use of gluon.contrib.gae_memcache.

- self.ram is an instance of CacheInRam
- self.disk is an instance of CacheOnDisk

src/s/n/sneakernet-HEAD/appengine/teamadmin.py   sneakernet(Download)
  def get(self):
    #get the whole team's caches
    from cache import CacheLocation, Cache, TYPE_LOCAL_FD, TYPE_EXTERN_FD
    from team import get_team_for_team_leader
    from user_sn import confirm
        u = confirm(self)
        team = get_team_for_team_leader(u)
        from cache import Cache, CacheLocation
        type = self.request.get("type")
        if type=="INTERN":
        where = CacheLocation.get(self.request.get("location"))
 
        c = Cache(friendlyName = self.request.get("name"),type=ctype,last_touched=u,space_left=long(self.request.get("freespace")),permanent_location=where,checked_out=True)
        c.put()
        self.response.out.write("OK")

src/s/n/sneakernet-HEAD/appengine/cachetalk.py   sneakernet(Download)
  def get(self):
    from cache import can_operate_on, ContentCopy, get_cache_by_name, Cache, TYPE_COMPUTER
    from user_sn import confirm
    u = confirm(self)
    cname = self.request.get("cache")
    logging.info("Syncing a cache named %s" % cname)
    c = get_cache_by_name(cname)
    if c==None:
        logging.info("No cached named %s, making a new one" % cname)
        c = Cache(friendlyName = cname,type=TYPE_COMPUTER,last_touched=u,space_left=-1,person_responsible=u)

src/f/e/feedcache-1.4.1/feedcache/example_threads.py   feedcache(Download)
def fetch_urls(storage, input_queue, output_queue):
    """Thread target for fetching feed data.
    """
    c = cache.Cache(storage)
 

src/f/e/feedcache-1.4.1/feedcache/example.py   feedcache(Download)
def main(urls=[]):
    print 'Saving feed data to ./.feedcache'
    storage = shelve.open('.feedcache')
    try:
        fc = cache.Cache(storage)

src/p/y/PyCaching-0.1.3/pycaching/geocaching.py   PyCaching(Download)
import json
from util import Util
from cache import Cache
from urlparse import urljoin
from urllib import urlencode
 
            # assemble cache object
            c = Cache(wp, name, cacheType, None, state, found, size, dif, ter, author, hidden)
            logging.debug("Parsing cache: %s", c)
            result.append(c)
 
        # assemble cache object
        c = Cache(wp, data["name"], data["type"]["text"], None, data["available"], None,
            size, data["difficulty"]["text"], data["terrain"]["text"],
            data["owner"]["text"], hidden, None)
 
        # assemble cache object
        c = Cache(wp, name, cacheType, location, state, found,
            size, dif, ter, author, hidden, attributes,
            summary, description, hint, favorites)

src/p/y/pycaching-HEAD/pycaching/geocaching.py   pycaching(Download)
import json
from util import Util
from cache import Cache
from urlparse import urljoin
from urllib import urlencode
 
            # assemble cache object
            c = Cache(wp, name, cacheType, None, state, found, size, dif, ter, author, hidden)
            logging.debug("Parsing cache: %s", c)
            result.append(c)
 
        # assemble cache object
        c = Cache(wp, data["name"], data["type"]["text"], None, data["available"], None,
            size, data["difficulty"]["text"], data["terrain"]["text"],
            data["owner"]["text"], hidden, None)
 
        # assemble cache object
        c = Cache(wp, name, cacheType, location, state, found,
            size, dif, ter, author, hidden, attributes,
            summary, description, hint, favorites)

src/t/r/trytond-3.2.0/trytond/ir/translation.py   trytond(Download)
from ..transaction import Transaction
from ..pool import Pool
from ..cache import Cache
from ..const import RECORD_CACHE_SIZE
 
            searcher='search_model')
    overriding_module = fields.Char('Overriding Module', readonly=True)
    _translation_cache = Cache('ir.translation', size_limit=10240,
        context=False)
    _get_language_cache = Cache('ir.translation')

src/w/i/wikidata-HEAD/wikivoyage/importer.py   wikidata(Download)
envoy = pywikibot.Site(lang, 'wikivoyage')
repo = enwiki.data_repository()
CACHE = cache.Cache(backend='redis', prefix='wikivoyage-importer', host='tools-mc')
 
headers = {'User-agent': 'https://www.wikidata.org/wiki/User:Legobot'}

src/t/r/trytond-HEAD/trytond/ir/translation.py   trytond(Download)
from ..transaction import Transaction
from ..pool import Pool
from ..cache import Cache
from ..const import RECORD_CACHE_SIZE
 
            searcher='search_model')
    overriding_module = fields.Char('Overriding Module', readonly=True)
    _translation_cache = Cache('ir.translation', size_limit=10240,
        context=False)
    _get_language_cache = Cache('ir.translation')

src/o/h/oh-mainline-HEAD/vendor/packages/gdata/samples/oauth/oauth_on_appengine/appengine_utilities/paginator.py   oh-mainline(Download)
 
from google.appengine.ext import db
from cache import Cache
 
class Paginator(object):
            cache_string = cache_string + q_filter + "_" + q_filters[q_filter] + "_"
        cache_string = cache_string + "index"
        c = Cache()
        if c.has_key(cache_string):
            return c[cache_string]

  1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9  Next