Did I find the right examples for you? yes no

All Samples(11)  |  Call(11)  |  Derive(0)  |  Import(0)
@type gs_access_key_id: string
@param gs_access_key_id: Your Google Cloud Storage Access Key ID

@type gs_secret_access_key: string
@param gs_secret_access_key: Your Google Cloud Storage Secret Access Key

@rtype: L{GSConnection<boto.gs.connection.GSConnection>}
@return: A connection to Google's Storage service

        def connect_gs(gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
    """
    @type gs_access_key_id: string
    @param gs_access_key_id: Your Google Cloud Storage Access Key ID

    @type gs_secret_access_key: string
    @param gs_secret_access_key: Your Google Cloud Storage Secret Access Key

    @rtype: L{GSConnection}
    @return: A connection to Google's Storage service
    """
    from boto.gs.connection import GSConnection
    return GSConnection(gs_access_key_id, gs_secret_access_key, **kwargs)
        


src/g/r/grow-0.0.27/grow/deployments/google_cloud_storage.py   grow(Download)
  def prelaunch(self):
    super(BaseGoogleCloudStorageDeployment, self).prelaunch()
    logging.info('Connecting to GCS...')
    connection = boto.connect_gs(self.access_key, self.secret, is_secure=False)
    self.bucket = connection.get_bucket(self.bucket_name)

src/p/y/pygrow-HEAD/grow/deployments/google_cloud_storage.py   pygrow(Download)
  def prelaunch(self, dry_run=False):
    logging.info('Connecting to GCS...')
    connection = boto.connect_gs(self.access_key, self.secret, is_secure=False)
    self.bucket = connection.get_bucket(self.bucket_name)
    logging.info('Connected!')

src/o/f/ofs-0.4.2/ofs/remote/botostore.py   ofs(Download)
    def __init__(self, gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
        conn = boto.connect_gs(gs_access_key_id, gs_secret_access_key, **kwargs)
        super(GSOFS, self).__init__(conn)    
 
class ArchiveOrgOFS(S3OFS):

src/o/f/ofs-HEAD/ofs/remote/botostore.py   ofs(Download)
    def __init__(self, gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
        conn = boto.connect_gs(gs_access_key_id, gs_secret_access_key, **kwargs)
        super(GSOFS, self).__init__(conn)    
 
class ArchiveOrgOFS(S3OFS):

src/g/r/grow-0.0.27/grow/deployments/google_cloud_storage_from_app_engine.py   grow(Download)
  def deploy(self, pod, dry_run=False):
    source_connection = boto.connect_gs(self.source_keys[0], self.source_keys[1])
    source_bucket = source_connection.get_bucket(config.BUCKET)
 
    dest_connection = boto.connect_gs(self.dest_keys[0], self.dest_keys[1])

src/p/y/pygrow-HEAD/grow/deployments/google_cloud_storage_from_app_engine.py   pygrow(Download)
  def deploy(self, pod, dry_run=False):
    source_connection = boto.connect_gs(self.source_keys[0], self.source_keys[1])
    source_bucket = source_connection.get_bucket(config.BUCKET)
 
    dest_connection = boto.connect_gs(self.dest_keys[0], self.dest_keys[1])

src/c/l/CloudFusion-HEAD/cloudfusion/store/gs/google_store.py   CloudFusion(Download)
    def reconnect(self):
        self.conn = boto.connect_gs(self.access_key_id,self.secret_access_key)
        buckets = map( lambda x: x.name, self.conn.get_all_buckets())
        if not self.bucket_name in buckets:
            try:

src/g/i/git-bigstore-0.2.6/bigstore/backends/google.py   git-bigstore(Download)
    def __init__(self, key, secret, bucket_name):
        self.access_key = key
        self.secret = secret
        self.bucket = bucket_name
        self.conn = boto.connect_gs(key, secret)

src/g/i/git-bigstore-HEAD/bigstore/backends/google.py   git-bigstore(Download)
    def __init__(self, key, secret, bucket_name):
        self.access_key = key
        self.secret = secret
        self.bucket = bucket_name
        self.conn = boto.connect_gs(key, secret)