Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(0)  |  Call(0)  |  Derive(0)  |  Import(0)

src/c/r/CRABServer-HEAD/src/python/CRABInterface/HTCondorDataWorkflow.py   CRABServer(Download)
        self.logger.debug("Retrieving %s output of jobs: %s" % (','.join(filetype), jobids))
        rows = self.api.query(None, None, self.FileMetaData.GetFromTaskAndType_sql, filetype=','.join(filetype), taskname=workflow)
        rows = filter(lambda row: row[GetFromTaskAndType.PANDAID] in jobids, rows)
        if howmany!=-1:
            rows=rows[:howmany]
                pfn = self.phedex.getPFN(row[GetFromTaskAndType.TMPLOCATION], lfn)[(row[GetFromTaskAndType.TMPLOCATION], lfn)]
            else:
                if row[GetFromTaskAndType.PANDAID] in finishedIds:
                    lfn = temp_to_lfn(row[GetFromTaskAndType.LFN], username)
                    pfn = self.phedex.getPFN(row[GetFromTaskAndType.LOCATION], lfn)[(row[GetFromTaskAndType.LOCATION], lfn)]
                elif row[GetFromTaskAndType.PANDAID] in transferingIds:
        res['runsAndLumis'] = {}
        for row in rows:
            self.logger.debug("Got lumi info for job %d." % row[GetFromTaskAndType.PANDAID])
            if row[GetFromTaskAndType.PANDAID] in jobids:
                res['runsAndLumis'][str(row[GetFromTaskAndType.PANDAID])] = { 'parents' : row[GetFromTaskAndType.PARENTS].read(),