Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(28)  |  Call(28)  |  Derive(0)  |  Import(0)

src/p/i/pipeline2.0-HEAD/lib/python/CornellFTP.py   pipeline2.0(Download)
            raise get_ftp_exception(str(e))
        else:
            cout.outs("CornellFTP - Connected and logged in")
 
    def __del__(self):
                     + "-p 31001 arecibo.tc.cornell.edu'"
 
            cout.outs("CornellFTP - Starting Download of: %s" % \
                        os.path.split(ftp_path)[-1])
 
 
            self.sendcmd("TYPE I")
            cout.outs("CornellFTP - Starting Download of: %s" % \
                        os.path.split(ftp_path)[-1])
            self.retrbinary("RETR "+ftp_path, write)
            f.close()
 
        cout.outs("CornellFTP - Finished download of: %s" % \
    def upload(self, local_path, ftp_path):
        f = open(local_path, 'r')
 
        self.sendcmd("TYPE I")
        cout.outs("CornellFTP - Starting upload of: %s" % \

src/p/i/pipeline2.0-HEAD/lib/python/Downloader.py   pipeline2.0(Download)
    for attempt in attempts:
        if attempt['id'] not in active_ids:
            dlm_cout.outs("Download attempt (ID: %d) is no longer running." % \
                            attempt['id'])
            queries = []
        # Request size is 0
        return
    dlm_cout.outs("Requesting data\nIssuing a request of size %d" % num_beams)
 
    web_service = CornellWebservice.Client()
                                        pw=config.download.api_password)
        if location == "done":
            dlm_cout.outs("Restore (%s) is done. Will create file entries." % \
                            request['guid'])
            create_file_entries(request)
            row = jobtracker.query(query, fetchone=True)
            if row['deltaT_hours'] > config.download.request_timeout:
                dlm_cout.outs("Restore (%s) is over %d hr old " \
                                "and still not ready. Marking " \
                                "it as failed." % \
    except CornellFTP.M2Crypto.ftpslib.error_perm:
        exctype, excvalue, exctb = sys.exc_info()
        dlm_cout.outs("FTP error getting file information.\n" \
                        "\tGUID: %s\n\tError: %s" % \
                        (request['guid'], \

src/p/i/pipeline2.0-HEAD/lib/python/job.py   pipeline2.0(Download)
    status_str+="Num. of jobs permanently failed: %d\n" % len(dead_jobs)
    if log:
        jobpool_cout.outs(status_str)
    else:
        print status_str
 
    if complete_groups:
        jobpool_cout.outs("Inserting %d new entries into jobs table" % \
                            len(complete_groups))
    for complete in complete_groups:
                errormsg = config.jobpooler.queue_manager.get_errors(submit['queue_id'])
 
                jobpool_cout.outs("Processing of Job #%d (Submit ID: %d; Queue ID: %s) " \
                                    "had errors." % \
                                (submit['job_id'], submit['id'], submit['queue_id']))
                             "WHERE id=%d" % \
                             (jobtracker.nowstr(), job['id']))
            jobpool_cout.outs("Job #%d will be retried." % job['id'])
        else:
            # We've run out of attempts for this job
                msg += "*** Job will NOT be re-submitted! ***\n"
                if config.basic.delete_rawdata:
                    jobpool_cout.outs("Job #%d will NOT be retried. " \
                                        "Data files will be deleted." % job['id'])
                    msg += "*** Raw data files will be deleted. ***\n"