Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(12)  |  Call(6)  |  Derive(0)  |  Import(6)

src/c/v/CVSAnalY-HEAD/pycvsanaly2/DBProxyContentHandler.py   CVSAnalY(Download)

src/m/i/MininGit-HEAD/pycvsanaly2/DBProxyContentHandler.py   MininGit(Download)
from DBContentHandler import DBContentHandler
from DBTempLog import DBTempLog
from AsyncQueue import AsyncQueue, TimeOut
from utils import printdbg
import threading
        self.db_handler.repository(self.repo_uri)
 
        queue = AsyncQueue(50)
        reader_thread = threading.Thread(target=self.__reader,
                                          args=(self.templog, queue))

src/c/v/CVSAnalY-HEAD/pycvsanaly2/DBTempLog.py   CVSAnalY(Download)

src/m/i/MininGit-HEAD/pycvsanaly2/DBTempLog.py   MininGit(Download)
                      statement, ICursor)
from Repository import Commit
from AsyncQueue import AsyncQueue
 
import threading
            self.__create_table()
 
        self.queue = AsyncQueue(50)
        self.writer_thread = threading.Thread(target=self.__writer,
                                               args=(self.queue,))

src/c/v/CVSAnalY-HEAD/pycvsanaly2/Log.py   CVSAnalY(Download)

src/m/i/MininGit-HEAD/pycvsanaly2/Log.py   MininGit(Download)
import threading
from repositoryhandler.backends.watchers import LOG
from AsyncQueue import AsyncQueue, TimeOut
from utils import printerr
 
    def _read_from_repository(self, new_line_cb, user_data):
        queue = AsyncQueue()
        logreader_thread = threading.Thread(target=self._logreader,
                                             args=(self.repo, queue))
        logreader_thread.setDaemon(True)