Did I find the right examples for you? yes no

All Samples(79)  |  Call(71)  |  Derive(0)  |  Import(8)
Instantiate a StorageUri from a URI string.

:type uri_str: string
:param uri_str: URI naming bucket + optional object.
:type default_scheme: string
:param default_scheme: default scheme for scheme-less URIs.
:type debug: int
:param debug: debug level to pass in to boto connection (range 0..2).
:type validate: bool
:param validate: whether to check for bucket name validity.(more...)

        def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
                bucket_storage_uri_class=BucketStorageUri,
                suppress_consec_slashes=True):
    """
    Instantiate a StorageUri from a URI string.

    :type uri_str: string
    :param uri_str: URI naming bucket + optional object.
    :type default_scheme: string
    :param default_scheme: default scheme for scheme-less URIs.
    :type debug: int
    :param debug: debug level to pass in to boto connection (range 0..2).
    :type validate: bool
    :param validate: whether to check for bucket name validity.
    :type bucket_storage_uri_class: BucketStorageUri interface.
    :param bucket_storage_uri_class: Allows mocking for unit tests.
    :param suppress_consec_slashes: If provided, controls whether
        consecutive slashes will be suppressed in key paths.

    We allow validate to be disabled to allow caller
    to implement bucket-level wildcarding (outside the boto library;
    see gsutil).

    :rtype: :class:`boto.StorageUri` subclass
    :return: StorageUri subclass for given URI.

    ``uri_str`` must be one of the following formats:

    * gs://bucket/name
    * s3://bucket/name
    * gs://bucket
    * s3://bucket
    * filename

    The last example uses the default scheme ('file', unless overridden)
    """

    # Manually parse URI components instead of using urlparse.urlparse because
    # what we're calling URIs don't really fit the standard syntax for URIs
    # (the latter includes an optional host/net location part).
    end_scheme_idx = uri_str.find('://')
    if end_scheme_idx == -1:
        # Check for common error: user specifies gs:bucket instead
        # of gs://bucket. Some URI parsers allow this, but it can cause
        # confusion for callers, so we don't.
        if uri_str.find(':') != -1:
            raise InvalidUriError('"%s" contains ":" instead of "://"' % uri_str)
        scheme = default_scheme.lower()
        path = uri_str
    else:
        scheme = uri_str[0:end_scheme_idx].lower()
        path = uri_str[end_scheme_idx + 3:]

    if scheme not in ['file', 's3', 'gs']:
        raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
    if scheme == 'file':
        # For file URIs we have no bucket name, and use the complete path
        # (minus 'file://') as the object name.
        is_stream = False
        if path == '-':
            is_stream = True
        return FileStorageUri(path, debug, is_stream)
    else:
        path_parts = path.split('/', 1)
        bucket_name = path_parts[0]
        if (validate and bucket_name and
            # Disallow buckets violating charset or not [3..255] chars total.
            (not re.match('^[a-z0-9][a-z0-9\._-]{1,253}[a-z0-9]$', bucket_name)
            # Disallow buckets with individual DNS labels longer than 63.
             or re.search('[-_a-z0-9]{64}', bucket_name))):
            raise InvalidUriError('Invalid bucket name in URI "%s"' % uri_str)
        # If enabled, ensure the bucket name is valid, to avoid possibly
        # confusing other parts of the code. (For example if we didn't
        # catch bucket names containing ':', when a user tried to connect to
        # the server with that name they might get a confusing error about
        # non-integer port numbers.)
        object_name = ''
        if len(path_parts) > 1:
            object_name = path_parts[1]
        return bucket_storage_uri_class(
            scheme, bucket_name, object_name, debug,
            suppress_consec_slashes=suppress_consec_slashes)
        


src/g/s/gsutil-3.42/gslib/storage_uri_builder.py   gsutil(Download)
    """
    return boto.storage_uri(
        uri_str, 'file', debug=self.debug, validate=False,
        bucket_storage_uri_class=self.bucket_storage_uri_class,
        suppress_consec_slashes=False, is_latest=is_latest)

src/g/s/gsutil-HEAD/gslib/storage_uri_builder.py   gsutil(Download)
    """
    return boto.storage_uri(
        uri_str, 'file', debug=self.debug, validate=False,
        bucket_storage_uri_class=self.bucket_storage_uri_class,
        suppress_consec_slashes=False, is_latest=is_latest)

src/g/s/gsutil-3.42/gslib/wildcard_iterator.py   gsutil(Download)
          bucket_uris.append(
              boto.storage_uri(
                  uri_str, debug=self.debug,
                  bucket_storage_uri_class=self.bucket_storage_uri_class,
                  suppress_consec_slashes=False))
    # chars.
    uri = boto.storage_uri(
        uri_or_str, debug=debug, validate=False,
        bucket_storage_uri_class=bucket_storage_uri_class,
        suppress_consec_slashes=False)

src/g/s/gsutil-HEAD/gslib/wildcard_iterator.py   gsutil(Download)
          bucket_uris.append(
              boto.storage_uri(
                  uri_str, debug=self.debug,
                  bucket_storage_uri_class=self.bucket_storage_uri_class,
                  suppress_consec_slashes=False))
    # chars.
    uri = boto.storage_uri(
        uri_or_str, debug=debug, validate=False,
        bucket_storage_uri_class=bucket_storage_uri_class,
        suppress_consec_slashes=False)

src/h/e/heroku-wal-e-HEAD/wal_e/worker/s3_worker.py   heroku-wal-e(Download)
    # XXX: disable validation as a kludge to get around use of
    # upper-case bucket names.
    suri = boto.storage_uri(s3_uri, validate=False)
    k = suri.new_key()
 
    def download():
        with open(path, 'wb') as decomp_out:
            suri = boto.storage_uri(s3_url, validate=False)
            bucket = suri.get_bucket()
            key = bucket.get_key(suri.object_name)

src/s/y/syndicate-HEAD/RG/drivers/gstorage/driver.py   syndicate(Download)
def create_bucket(bucket_name):
 
    # instantiate a BucketStorageUri object.
    uri = boto.storage_uri(bucket_name, GOOGLE_STORAGE)
 
def list_buckets():
 
    uri = boto.storage_uri('', GOOGLE_STORAGE)
 
    for bucket in uri.get_all_buckets(headers=HEADER_VALUES):
def delete_bucket(bucket_name):
 
    uri = boto.storage_uri(bucket, GOOGLE_STORAGE)
 
 
    contents = file(file_name, 'r')
 
    dst_uri = boto.storage_uri(bucket_name + '/' + file_name, GOOGLE_STORAGE)
 
    # the key-related functions are a consequence of boto's interoperability with s3
    object_contents = StringIO.StringIO()
 
    src_uri = boto.storage_uri(bucket_name + '/' + file_name, GOOGLE_STORAGE)
 
    # get_file() doesn't return the file contents

src/g/o/google-storage-ui-HEAD/gs.py   google-storage-ui(Download)
		(error code, reason for error) if bucket creation fails"""	
	try:
		bucket_uri = boto.storage_uri(bucketName,"gs")
		bucket_uri.create_bucket()
		return (0,"Successfully created!")
	Returns: (0,"Successfully deleted") if bucket has been created. """
	try:
		bucket_uri = boto.storage_uri(bucketName,"gs")
		objs = bucket_uri.get_bucket()
		if objs:
def getBuckets():
	"""Get all the buckets in your account"""
	try:
		uri = boto.storage_uri("","gs")
		buckets = uri.get_all_buckets()
def getObjects(bucketname):
	"""Get objects in specified bucketname"""
	try:
		uri = boto.storage_uri(bucketname,"gs")
		objs = uri.get_bucket()
def downloadObject(bucketname,objname,dest_dir,cb=callBack):
	"""Download a specific object from an exising bucket into a specified directory"""
	src_uri = boto.storage_uri(bucketname+"/"+objname,"gs")
	dst_uri = boto.storage_uri(dest_dir,"file")
 

src/s/t/storyboard-HEAD/storage/views.py   storyboard(Download)
                # Google Storage
                if STORAGE_SERVICE == 'gs':
                    dst_uri = boto.storage_uri(STORAGE_BUCKET, STORAGE_SERVICE)
 
                    new_dst_uri = dst_uri.clone_replace_name(file_name)
 
    if STORAGE_SERVICE == 'gs':
        src_uri = boto.storage_uri(s.bucket + '/' + s.path, 'gs')
        src_key = src_uri.get_key()
        tmp = tempfile.TemporaryFile()
        read_path =  '/%s/%s/%s'% (s.storage, s.bucket, s.path)
        #image_data = read_gs(read_path)
        src_uri = boto.storage_uri(s.bucket + '/' + s.path, 'gs')
        src_key = src_uri.get_key()
        tmp = tempfile.TemporaryFile()

src/t/i/TileStache-HEAD/TileStache/Goodies/Caches/GoogleCloud.py   TileStache(Download)
        config.set('Credentials', 'gs_secret_access_key', secret)
 
        uri = boto.storage_uri('', GOOGLE_STORAGE)
        for b in uri.get_all_buckets():
          if b.name == bucket:

src/b/o/boto-2.27.0/tests/unit/s3/test_uri.py   boto(Download)
import urllib
from boto.exception import InvalidUriError
from boto import storage_uri
from boto.s3.keyfile import KeyFile
from tests.integration.s3.mock_storage_service import MockBucket
    def test_provider_uri(self):
        for prov in ('gs', 's3'):
            uri_str = '%s://' % prov
            uri = boto.storage_uri(uri_str, validate=False,
                suppress_consec_slashes=False)
    def test_bucket_uri_no_trailing_slash(self):
        for prov in ('gs', 's3'):
            uri_str = '%s://bucket' % prov
            uri = boto.storage_uri(uri_str, validate=False,
                suppress_consec_slashes=False)
    def test_bucket_uri_with_trailing_slash(self):
        for prov in ('gs', 's3'):
            uri_str = '%s://bucket/' % prov
            uri = boto.storage_uri(uri_str, validate=False,
                suppress_consec_slashes=False)
    def test_non_versioned_object_uri(self):
        for prov in ('gs', 's3'):
            uri_str = '%s://bucket/obj/a/b' % prov
            uri = boto.storage_uri(uri_str, validate=False,
                suppress_consec_slashes=False)

  1 | 2 | 3  Next