Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(3)  |  Call(0)  |  Derive(0)  |  Import(3)

src/t/w/TweetHit-HEAD/tweethit/handlers/taskworker.py   TweetHit(Download)
from google.appengine.ext.db import Key
 
from PerformanceEngine import LOCAL,MEMCACHE,DATASTORE, \
NAME_DICT,pdb,time_util
 
  def post(self):
    payloads = [Payload(simple_url['url'],simple_url['user_id']) for simple_url in eval(self.request.get('data'))]
 
    cached_urls = Url.get_by_key_name([payload.url for payload in payloads],
                                      _storage = [LOCAL,MEMCACHE],
 
    logging.info('UrlFetchWorker finished, counter targets: %s' %counter_targets)   
    pdb.put(urls, _storage = [LOCAL,MEMCACHE]) #Urls are stored in cache only
 
    if len(counter_targets):
 
    product_counters = ProductCounter.get_by_key_name(product_targets.keys(),
                                                      _storage = [MEMCACHE,DATASTORE],
                                                      _result_type=NAME_DICT)
    user_counters = UserCounter.get_by_key_name(user_targets.keys(),
    key_names = [counter.key().name() for counter in product_counters ]
    product_renderers = ProductRenderer.get_by_key_name(key_names,
                                                        _storage=[MEMCACHE,DATASTORE],
                                                        _result_type=NAME_DICT)
    for counter in product_counters:

src/t/w/TweetHit-HEAD/tweethit/handlers/cron.py   TweetHit(Download)
from config import SPAM_COUNT_LIMIT
 
from PerformanceEngine import pdb,DATASTORE,MEMCACHE,time_util
from google.appengine.runtime.apiproxy_errors import CapabilityDisabledError
 
      return
    logging.info('Counter keys retrieved: %s' %len(counter_keys))
    counters = CounterBase.get(counter_keys,_storage=MEMCACHE)    
    if len(counters):
      try:
    ban_list = Banlist.retrieve()
    ban_list.products += targets
    ban_list.put(_storage=[MEMCACHE,DATASTORE])     
    pdb.put(products+renderers+product_counters,_storage = [MEMCACHE,DATASTORE])
 
      ban_list = Banlist.retrieve()
      ban_list.users += targets
      ban_list.put(_storage=[MEMCACHE,DATASTORE])
      #TwitterUser.update_banlist([user.key().name() for user in users])
      logging.info('Banning users with keys: %s' %[user.key().name() for user in users])

src/t/w/TweetHit-HEAD/tweethit/model.py   TweetHit(Download)
from PerformanceEngine import pdb,MEMCACHE,DATASTORE
 
from google.appengine.ext import db
from google.appengine.api import memcache
from tweethit.utils.parser_util import AmazonURLParser
class OperationFlags(pdb.Model):
  '''Singleton container class that holds data for synching operation'''
  _key_name = 'OperationFlags'
  _storage = [MEMCACHE,DATASTORE]
 
  def filtered_update(cls,models):
    db_targets = [str(model.key()) for model in models 
                        if model.count >= cls._MIN_COUNT_FOR_DB_WRITE]
    pdb.put(models,_storage=MEMCACHE)
    if len(db_targets):