Did I find the right examples for you? yes no

All Samples(47)  |  Call(32)  |  Derive(0)  |  Import(15)

        def utcnow():
    return datetime.datetime.utcnow().replace(tzinfo=utc)
        


src/z/e/zenodo-HEAD/zenodo/modules/github/views/github.py   zenodo(Download)
 
from ..tasks import handle_github_payload
from ..utils import sync, utcnow, parse_timestamp, remove_hook, create_hook
from ..helpers import get_api, get_token, get_account, check_token
 
        # Check if sync is needed - should probably not be done here
        last_sync = parse_timestamp(extra_data["last_sync"])
        now = utcnow()
        yesterday = now - timedelta(days=1)
 
        user_id=account.user_id,
        last_sync=humanize.naturaltime(
            utcnow() - parse_timestamp(account.extra_data['last_sync'])
        ),
    )

src/t/a/tagger-0.3/tagger/tag.py   tagger(Download)
from rdflib.sparql.graphPattern import GraphPattern
from rdflib.sparql.sparqlGraph import SPARQLGraph
from utils import any, all, nscollection, set_intersect, utcnow, implements
 
import dispatch
        def _annwrapper(self, item, author, taggedby, spec=None):
            now = Literal(utcnow())
            result = self._lookup_annotation(item, author, taggedby)
            if result:
                annotation = result[0]

src/r/e/RedKindle-HEAD/rq2/job.py   RedKindle(Download)
from .connections import resolve_connection
from .exceptions import UnpickleError, NoSuchJobError
from .utils import import_attribute, utcnow, utcformat, utcparse
from rq.compat import text_type, decode_redis_hash, as_text
 
    def __init__(self, id=None, connection=None):
        self.connection = resolve_connection(connection)
        self._id = id
        self.created_at = utcnow()
        self._data = UNEVALUATED
    def dump(self):
        """Returns a serialization of the current job instance"""
        obj = {}
        obj['created_at'] = utcformat(self.created_at or utcnow())
        obj['data'] = self.data

src/r/e/RedKindle-HEAD/rq2/worker.py   RedKindle(Download)
from .connections import get_current_connection
from .job import Job, Status
from .utils import make_colorizer, utcnow, utcformat
from .logutils import setup_loghandlers
from .exceptions import NoQueueError, UnpickleError, DequeueTimeout
        with self.connection._pipeline() as p:
            p.delete(key)
            p.hset(key, 'birth', utcformat(utcnow()))
            p.hset(key, 'queues', queues)
            p.sadd(self.redis_workers_keys, key)
            # rollback the pipeline
            p.srem(self.redis_workers_keys, self.key)
            p.hset(self.key, 'death', utcformat(utcnow()))
            p.expire(self.key, 60)
            p.execute()
            job._result = rv
            job._status = Status.FINISHED
            job.ended_at = utcnow()
 
            result_ttl = job.get_ttl(self.default_result_ttl)

src/o/r/ormist-HEAD/ormist/managers.py   ormist(Download)
# -*- coding: utf-8 -*-
import pickle
import redis
from .utils import (timestamp_to_datetime, datetime_to_timestamp, random_string,
                    utcnow, random_true)
            expire_value = get_redis(system).get(expire_key)
            expire = timestamp_to_datetime(expire_value)
            if expire and expire < utcnow():
                return None
            attrs = pickle.loads(value)
    def expire(self, system=None):
        system = self.get_system(system)
        expire_ts = datetime_to_timestamp(utcnow())
        expire_key = self._key('__expire__')
        remove_ids = get_redis(system).zrangebyscore(expire_key, 0, expire_ts)

src/w/e/webalerts-0.0.3/webalerts/__init__.py   webalerts(Download)
from .app import App, Post, Browser, Response
from .exceptions import (WebAlertsException, ConfigurationError, SiteException,
                         LoginError, ParseError, NotificationException)
from .utils import current_time, sleep, utcnow, cached, parse_html, urlencode_utf8
from .version import __version__

src/r/q/rq-0.4.2/rq/job.py   rq(Download)
from .connections import resolve_connection
from .exceptions import UnpickleError, NoSuchJobError
from .utils import import_attribute, utcnow, utcformat, utcparse
from rq.compat import text_type, decode_redis_hash, as_text
 
    def __init__(self, id=None, connection=None):
        self.connection = resolve_connection(connection)
        self._id = id
        self.created_at = utcnow()
        self._data = UNEVALUATED
    def dump(self):
        """Returns a serialization of the current job instance"""
        obj = {}
        obj['created_at'] = utcformat(self.created_at or utcnow())
        obj['data'] = self.data
            self._result = self.func(*self.args, **self.kwargs)
            self.set_status(Status.FINISHED)
            self.ended_at = utcnow()
        finally:
            assert self.id == _job_stack.pop()

src/r/q/rq-HEAD/rq/job.py   rq(Download)
from .exceptions import NoSuchJobError, UnpickleError
from .local import LocalStack
from .utils import import_attribute, utcformat, utcnow, utcparse
 
try:
    def __init__(self, id=None, connection=None):
        self.connection = resolve_connection(connection)
        self._id = id
        self.created_at = utcnow()
        self._data = UNEVALUATED
    def dump(self):
        """Returns a serialization of the current job instance"""
        obj = {}
        obj['created_at'] = utcformat(self.created_at or utcnow())
        obj['data'] = self.data
            self._result = self.func(*self.args, **self.kwargs)
            self.set_status(Status.FINISHED)
            self.ended_at = utcnow()
        finally:
            assert self.id == _job_stack.pop()

src/r/q/rq-0.4.2/rq/worker.py   rq(Download)
from .connections import get_current_connection
from .job import Job, Status
from .utils import make_colorizer, utcnow, utcformat
from .logutils import setup_loghandlers
from .exceptions import NoQueueError, DequeueTimeout
        with self.connection._pipeline() as p:
            p.delete(key)
            p.hset(key, 'birth', utcformat(utcnow()))
            p.hset(key, 'queues', queues)
            p.sadd(self.redis_workers_keys, key)
            # rollback the pipeline
            p.srem(self.redis_workers_keys, self.key)
            p.hset(self.key, 'death', utcformat(utcnow()))
            p.expire(self.key, 60)
            p.execute()

src/r/q/rq-HEAD/rq/worker.py   rq(Download)
from .queue import get_failed_queue, Queue
from .timeouts import UnixSignalDeathPenalty
from .utils import make_colorizer, utcformat, utcnow
from .version import VERSION
 
        with self.connection._pipeline() as p:
            p.delete(key)
            p.hset(key, 'birth', utcformat(utcnow()))
            p.hset(key, 'queues', queues)
            p.sadd(self.redis_workers_keys, key)
            # rollback the pipeline
            p.srem(self.redis_workers_keys, self.key)
            p.hset(self.key, 'death', utcformat(utcnow()))
            p.expire(self.key, 60)
            p.execute()

  1 | 2  Next