Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(34)  |  Call(24)  |  Derive(0)  |  Import(10)

src/p/y/pyload-HEAD/module/web/pyload_app.py   pyload(Download)
from filters import relpath, unquotepath
 
from module.utils import formatSize, save_join, fs_encode, fs_decode
 
# Helper
    }
 
    items = listdir(fs_encode(root))
 
    for item in sorted([fs_decode(x) for x in items]):
    path = path.replace("..", "")
    try:
        return static_file(fs_encode(path), fs_encode(root))
 
    except Exception, e:

src/p/y/pyload-HEAD/module/plugins/Plugin.py   pyload(Download)
from itertools import islice
 
from module.utils import save_join, save_path, fs_encode, fs_decode
 
def chunks(iterable, size):
            filename = join(location, newname)
 
        fs_filename = fs_encode(filename)
 
        if self.core.config["permission"]["change_file"]:
        :return: dictionary key of the first rule that matched
        """
        lastDownload = fs_encode(self.lastDownload)
        if not exists(lastDownload): return None
 

src/p/y/pyload-HEAD/module/plugins/hooks/Checksum.py   pyload(Download)
import re
 
from module.utils import save_join, fs_encode
from module.plugins.Hook import Hook
 
            self.checkFailed(pyfile, None, "No file downloaded")
 
        local_file = fs_encode(pyfile.plugin.lastDownload)
        #download_folder = self.config['general']['download_folder']
        #local_file = fs_encode(save_join(download_folder, pyfile.package().folder, pyfile.name))
                continue
 
            hash_file = fs_encode(save_join(download_folder, link["name"]))
            if not isfile(hash_file):
                self.logWarning("File not found: %s" % link["name"])
                self.logDebug(link["name"], data)
 
                local_file = fs_encode(save_join(download_folder, data["name"]))
                algorithm = self.methods.get(file_type, file_type)
                checksum = computeChecksum(local_file, algorithm)

src/p/y/pyload-HEAD/module/network/HTTPDownload.py   pyload(Download)
 
from module.plugins.Plugin import Abort
from module.utils import save_join, fs_encode
 
class HTTPDownload():
    def _copyChunks(self):
        init = fs_encode(self.info.getChunkName(0)) #initial chunk name
 
        if self.info.getCount() > 1:
            fo = open(init, "rb+") #first chunkfile
            for i in range(1, self.info.getCount()):
                #input file
                fo.seek(
                    self.info.getChunkRange(i - 1)[1] + 1) #seek to beginning of chunk, to get rid of overlapping chunks
                fname = fs_encode("%s.chunk%d" % (self.filename, i))
            self.filename = save_join(dirname(self.filename), self.nameDisposition)
 
        move(init, fs_encode(self.filename))
        self.info.remove() #remove info file
 
                            self.closeChunk(chunk)
                            self.chunks.remove(chunk)
                            remove(fs_encode(self.info.getChunkName(chunk.id)))
 
                        #let first chunk load the rest and update the info file

src/p/y/pyload-HEAD/module/plugins/hooks/UnSkipOnFail.py   pyload(Download)
from os.path import basename
 
from module.utils import fs_encode
from module.plugins.Hook import Hook
from module.PyFile import PyFile
        """
        dups = []
        pyfile_name = fs_encode(basename(pyfile.name))
        # get packages (w/o files, as most file data is useless here)
        queue = self.core.api.getQueue()
        for package in queue:
            # check if package-folder equals pyfile's package folder
            if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
                if pdata.links:
                    for link in pdata.links:
                        link_name = fs_encode(basename(link.name))
                        # check if link name collides with pdata's name
                        if link_name == pyfile_name:

src/p/y/pyload-HEAD/module/plugins/hooks/ExtractArchive.py   pyload(Download)
    from grp import getgrnam
 
from module.utils import save_join, fs_encode
from module.plugins.Hook import Hook, threaded, Expose
from module.plugins.internal.AbstractExtractor import ArchiveError, CRCError, WrongPassword
 
                if self.getConfig("subfolder"):
                    out = join(out, fs_encode(p.folder))
 
                if not exists(out):

src/p/y/pyload-HEAD/module/plugins/hooks/MergeFiles.py   pyload(Download)
 
from os.path import join
from module.utils import save_join, fs_encode
from module.plugins.Hook import Hook
 
        for name, file_list in files.iteritems():
            self.logInfo("Starting merging of %s" % name)
            final_file = open(join(download_folder, fs_encode(name)), "wb")
 
            for splitted_file in file_list:

src/p/y/pyload-HEAD/module/network/HTTPChunk.py   pyload(Download)
from time import sleep
from re import search
from module.utils import fs_encode
import codecs
import pycurl
    def save(self):
        fs_name = fs_encode("%s.chunks" % self.name)
        fh = codecs.open(fs_name, "w", "utf_8")
        fh.write("name:%s\n" % self.name)
        fh.write("size:%s\n" % self.size)
    def load(name):
        fs_name = fs_encode("%s.chunks" % name)
        if not exists(fs_name):
            raise IOError()
        fh = codecs.open(fs_name, "r", "utf_8")
    def remove(self):
        fs_name = fs_encode("%s.chunks" % self.name)
        if exists(fs_name): remove(fs_name)
 
    def getCount(self):
        # request all bytes, since some servers in russia seems to have a defect arihmetic unit
 
        fs_name = fs_encode(self.p.info.getChunkName(self.id))
        if self.resume:
            self.fp = open(fs_name, "ab")

src/p/y/pyload-HEAD/module/plugins/hoster/Premium4Me.py   pyload(Download)
 
from module.plugins.Hoster import Hoster
from module.utils import fs_encode
 
 
        if self.req.http.code == '420':
            # Custom error code send - fail
            lastDownload = fs_encode(self.lastDownload)
 
            if exists(lastDownload):

src/p/y/pyload-HEAD/module/plugins/container/LinkList.py   pyload(Download)
# -*- coding: utf-8 -*-
 
import codecs
from module.utils import fs_encode
from module.plugins.Container import Container
        print pyfile.url
 
        file_name = fs_encode(pyfile.url)
 
        txt = codecs.open(file_name, 'r', file_enc)