Python boto 模块,s3() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用boto.s3()

项目:mongodb_consistent_backup    作者:Percona-Lab    | 项目源码 | 文件源码
def connect(self):
        if not self._conn:
            try:
                logging.debug("Connecting to AWS S3 with Access Key: %s" % self.access_key)
                self._conn = boto.s3.connect_to_region(
                    self.region,
                    aws_access_key_id=self.access_key,
                    aws_secret_access_key=self.secret_key,
                    is_secure=self.secure,
                    calling_format=self.calling_format
                )
                logging.debug("Successfully connected to AWS S3 with Access Key: %s" % self.access_key)
            except Exception, e:
                logging.error("Cannot connect to AWS S3 with Access Key: %s!" % self.access_key)
                raise OperationError(e)
        return self._conn
项目:drift    作者:dgnorth    | 项目源码 | 文件源码
def init_command(args):
    # Fetch master config file from disk, url or s3 bucket.
    master_config = vars(args)["master-config"]
    json_text = fetch(master_config)
    if not json_text:
        return
    verify_master_config(json.loads(json_text))

    print "Initializing master config file {}".format(master_config)
    with open(get_config_path(TIERS_CONFIG_FILENAME), "w") as f:
        f.write(json_text)

    # Remove current tier selection
    tier_selection_file = get_config_path("TIER")
    if os.path.exists(tier_selection_file):
        os.remove(tier_selection_file)

    # Report currently selected config
    get_tiers_config()

    if args.activate:
        print "Activating tier '{}'...".format(args.activate)
        args.tier = args.activate
        args.vpn = False
        use_command(args)
项目:sci-pype    作者:jay-johnson    | 项目源码 | 文件源码
def ml_get_training_base_dir(self, dataset_name, debug=False):

        basedir_path        =  os.getenv("ENV_SYNTHESIZE_DIR", "/opt/work/data/src")

        # Prob want to sync these with a common s3 location in the future:
        if self.running_on_aws():
            basedir_path    =  os.getenv("ENV_SYNTHESIZE_DIR", "/opt/work/data/src")

        return basedir_path
    # end of ml_get_training_base_dir


    #####################################################################################################
    #
    # Seaborn Methods
    #
    #####################################################################################################
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
    # retries is the number of loops; range/xrange needs to be one
    # more to get that count of loops.
    bucket = s3.lookup(bucket)
    key = bucket.get_key(obj, version_id=version)
    for x in range(0, retries + 1):
        try:
            key.get_contents_to_filename(dest)
            module.exit_json(msg="GET operation complete", changed=True)
        except s3.provider.storage_copy_error as e:
            module.fail_json(msg= str(e))
        except SSLError as e:
            # actually fail on last pass through the loop.
            if x >= retries:
                module.fail_json(msg="s3 download failed; %s" % e)
            # otherwise, try again, this may be a transient timeout.
            pass
项目:datanode    作者:jay-johnson    | 项目源码 | 文件源码
def ml_get_training_base_dir(self, dataset_name, debug=False):

        basedir_path        =  os.getenv("ENV_SYNTHESIZE_DIR", "/opt/work/data/src")

        # Prob want to sync these with a common s3 location in the future:
        if self.running_on_aws():
            basedir_path    =  os.getenv("ENV_SYNTHESIZE_DIR", "/opt/work/data/src")

        return basedir_path
    # end of ml_get_training_base_dir


    #####################################################################################################
    #
    # Seaborn Methods
    #
    #####################################################################################################
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:node-gn    作者:Shouqun    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:node-gn    作者:Shouqun    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:alfred-ec2    作者:SoMuchToGrok    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:alfred-ec2    作者:SoMuchToGrok    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:depot_tools    作者:webrtc-uwp    | 项目源码 | 文件源码
def get_logging_status(self, headers=None):
        """
        Get the logging status for this bucket.

        :rtype: :class:`boto.s3.bucketlogging.BucketLogging`
        :return: A BucketLogging object for this bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='logging', headers=headers)
        body = response.read()
        if response.status == 200:
            blogging = BucketLogging()
            h = handler.XmlHandler(blogging, self)
            xml.sax.parseString(body, h)
            return blogging
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:depot_tools    作者:webrtc-uwp    | 项目源码 | 文件源码
def get_lifecycle_config(self, headers=None):
        """
        Returns the current lifecycle configuration on the bucket.

        :rtype: :class:`boto.s3.lifecycle.Lifecycle`
        :returns: A LifecycleConfig object that describes all current
            lifecycle rules in effect for the bucket.
        """
        response = self.connection.make_request('GET', self.name,
                query_args='lifecycle', headers=headers)
        body = response.read()
        boto.log.debug(body)
        if response.status == 200:
            lifecycle = Lifecycle()
            h = handler.XmlHandler(lifecycle, self)
            xml.sax.parseString(body, h)
            return lifecycle
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:z3    作者:PressLabs    | 项目源码 | 文件源码
def main():
    cfg = get_config()
    parser = argparse.ArgumentParser(
        description='Read a key from s3 and write the content to stdout',
    )
    parser.add_argument('name', help='name of S3 key')
    args = parser.parse_args()
    bucket = boto.connect_s3(
        cfg['S3_KEY_ID'], cfg['S3_SECRET']).get_bucket(cfg['BUCKET'])
    download(bucket, args.name)
项目:sci-pype    作者:jay-johnson    | 项目源码 | 文件源码
def s3_create_new_bucket(self, bucketname, bucket_location="sa-east-1", debug=False):

        record              = {
                            }

        results             = self.build_def_hash("Display Error", "Not Run", record)

        try:

            cur_keys        = self.aws_get_keys(debug)

            import boto 
            import boto.s3

            conn_s3         = boto.connect_s3(cur_keys["Key"], cur_keys["Secret"])
            bucket          = conn_s3.create_bucket(bucketname, location=bucket_location)

            if bucket:
                self.lg("Created Bucket(" + str(bucketname) + ")", 6)
                results     = self.build_def_hash("SUCCESS", "", {})
            else:
                results     = self.build_def_hash("Display Error", "Failed to Create Bucket(" + str(bucketname) + ")", {})

        except Exception,k:
            status          = "FAILED"
            err_msg         = "Unable to Create new S3 Bucket(" + str(bucketname) + ") with Ex(" + str(k) + ")"
            self.lg("ERROR: " + str(err_msg), 0)
            results         = self.build_def_hash("Display Error", err_msg, {})
        # end of try/ex

        return results
    # end of s3_create_new_s3_bucket
项目:sci-pype    作者:jay-johnson    | 项目源码 | 文件源码
def s3_calculate_bucket_size(self, bucket_name, debug=False):

        record          = {
                            "Size"     : 0,
                            "SizeMB"   : 0.0,
                            "SizeGB"   : 0.0,
                            "Files"    : 0 
                        }    

        results         = self.build_def_hash("Display Error", "Not Run", record)

        try:
            import boto, math
            import boto.s3

            cur_keys        = self.aws_get_keys(debug)
            conn_s3         = boto.connect_s3(cur_keys["Key"], cur_keys["Secret"])
            bucket          = conn_s3.get_bucket(bucket_name, validate=False)
            total_bytes     = 0
            for key in bucket:
                record["Size"]  += int(key.size)
                record["Files"] += 1
            # end for all keys

            record["SizeMB"]    = float(self.to_float_str(float(float(record["Size"]) / 1024.0 / 1024.0)))
            record["SizeGB"]    = float(self.to_float_str(float(float(record["SizeMB"]) / 1024.0)))

            results             = self.build_def_hash("SUCCESS", "", record)
        except Exception,w:
            self.lg("Failed to Process S3 Bucket(" + str(bucket_name) + ") Size Ex(" + str(w) + ")", 0)
            results         = self.build_def_hash("Display Error", "Not Run", record)

        return results
    # end of s3_calculate_bucket_size
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def AWS_UPLOAD(self):
        """
        TODO: Let's make this workflow simpler, we can get a duration
        from the hotstore url, check and delete if needed

        For now, old style workflow with checks and deletes at end
        """
        if not self.video_query.inst_class.s3_proc:
            return False

        if self.video_proto.filesize < self.auth_dict['multi_upload_barrier']:
            """
            Upload single part
            """
            if self._BOTO_SINGLEPART() is False:
                return False

        else:
            """
            Upload multipart
            """
            if self._BOTO_MULTIPART() is False:
                return False

        self.endpoint_url = '/'.join((
            'https://s3.amazonaws.com',
            self.auth_dict['edx_s3_endpoint_bucket'],
            self.encoded_file
        ))
        return True
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def _BOTO_SINGLEPART(self):
        """
        Upload single part (under threshold in node_config)
        node_config MULTI_UPLOAD_BARRIER
        """
        try:
            conn = boto.connect_s3()
        except S3ResponseError:
            ErrorObject.print_error(
                message='Deliverable Fail: s3 Connection Error\n \
                Check node_config DELIVERY_ENDPOINT'
            )
            return False
        delv_bucket = conn.get_bucket(
            self.auth_dict['edx_s3_endpoint_bucket']
        )
        upload_key = Key(delv_bucket)
        upload_key.key = os.path.basename(os.path.join(
            self.node_work_directory,
            self.encoded_file
        ))
        headers = {"Content-Disposition": "attachment"}
        upload_key.set_contents_from_filename(
            os.path.join(
                self.node_work_directory,
                self.encoded_file
            ),
            headers=headers,
            replace=True
        )
        upload_key.set_acl('public-read')
        return True
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def set_key_class(self, key_class):
        """
        Set the Key class associated with this bucket.  By default, this
        would be the boto.s3.key.Key class but if you want to subclass that
        for some reason this allows you to associate your new class with a
        bucket so that when you call bucket.new_key() or when you get a listing
        of keys in the bucket you will get an instances of your key class
        rather than the default.

        :type key_class: class
        :param key_class: A subclass of Key that can be more specific
        """
        self.key_class = key_class
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def lookup(self, key_name, headers=None):
        """
        Deprecated: Please use get_key method.

        :type key_name: string
        :param key_name: The name of the key to retrieve

        :rtype: :class:`boto.s3.key.Key`
        :returns: A Key object from this bucket.
        """
        return self.get_key(key_name, headers=headers)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def new_key(self, key_name=None):
        """
        Creates a new key

        :type key_name: string
        :param key_name: The name of the key to create

        :rtype: :class:`boto.s3.key.Key` or subclass
        :returns: An instance of the newly created key object
        """
        if not key_name:
            raise ValueError('Empty key names are not allowed')
        return self.key_class(self, key_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def delete_key(self, key_name, headers=None, version_id=None,
                   mfa_token=None):
        """
        Deletes a key from the bucket.  If a version_id is provided,
        only that version of the key will be deleted.

        :type key_name: string
        :param key_name: The key name to delete

        :type version_id: string
        :param version_id: The version ID (optional)

        :type mfa_token: tuple or list of strings
        :param mfa_token: A tuple or list consisting of the serial
            number from the MFA device and the current value of the
            six-digit token associated with the device.  This value is
            required anytime you are deleting versioned objects from a
            bucket that has the MFADelete option on the bucket.

        :rtype: :class:`boto.s3.key.Key` or subclass
        :returns: A key object holding information on what was
            deleted.  The Caller can see if a delete_marker was
            created or removed and what version_id the delete created
            or removed.
        """
        if not key_name:
            raise ValueError('Empty key names are not allowed')
        return self._delete_key_internal(key_name, headers=headers,
                                         version_id=version_id,
                                         mfa_token=mfa_token,
                                         query_args_l=None)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def configure_lifecycle(self, lifecycle_config, headers=None):
        """
        Configure lifecycle for this bucket.

        :type lifecycle_config: :class:`boto.s3.lifecycle.Lifecycle`
        :param lifecycle_config: The lifecycle configuration you want
            to configure for this bucket.
        """
        xml = lifecycle_config.to_xml()
        #xml = xml.encode('utf-8')
        fp = StringIO(xml)
        md5 = boto.utils.compute_md5(fp)
        if headers is None:
            headers = {}
        headers['Content-MD5'] = md5[1]
        headers['Content-Type'] = 'text/xml'
        response = self.connection.make_request('PUT', self.name,
                                                data=fp.getvalue(),
                                                query_args='lifecycle',
                                                headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def set_website_configuration(self, config, headers=None):
        """
        :type config: boto.s3.website.WebsiteConfiguration
        :param config: Configuration data
        """
        return self.set_website_configuration_xml(config.to_xml(),
          headers=headers)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def get_website_configuration_obj(self, headers=None):
        """Get the website configuration as a
        :class:`boto.s3.website.WebsiteConfiguration` object.
        """
        config_xml = self.get_website_configuration_xml(headers=headers)
        config = website.WebsiteConfiguration()
        h = handler.XmlHandler(config, self)
        xml.sax.parseString(config_xml, h)
        return config
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def set_cors(self, cors_config, headers=None):
        """
        Set the CORS for this bucket given a boto CORSConfiguration
        object.

        :type cors_config: :class:`boto.s3.cors.CORSConfiguration`
        :param cors_config: The CORS configuration you want
            to configure for this bucket.
        """
        return self.set_cors_xml(cors_config.to_xml())
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def get_cors(self, headers=None):
        """
        Returns the current CORS configuration on the bucket.

        :rtype: :class:`boto.s3.cors.CORSConfiguration`
        :returns: A CORSConfiguration object that describes all current
            CORS rules in effect for the bucket.
        """
        body = self.get_cors_xml(headers)
        cors = CORSConfiguration()
        h = handler.XmlHandler(cors, self)
        xml.sax.parseString(body, h)
        return cors
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def key_check(module, s3, bucket, obj, version=None):
    try:
        bucket = s3.lookup(bucket)
        key_check = bucket.get_key(obj, version_id=version)
    except s3.provider.storage_response_error as e:
        if version is not None and e.status == 400: # If a specified version doesn't exist a 400 is returned.
            key_check = None
        else:
            module.fail_json(msg=str(e))
    if key_check:
        return True
    else:
        return False
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def keysum(module, s3, bucket, obj, version=None):
    bucket = s3.lookup(bucket)
    key_check = bucket.get_key(obj, version_id=version)
    if not key_check:
        return None
    md5_remote = key_check.etag[1:-1]
    etag_multipart = '-' in md5_remote # Check for multipart, etag is not md5
    if etag_multipart is True:
        module.fail_json(msg="Files uploaded with multipart of s3 are not supported with checksum, unable to compute checksum.")
    return md5_remote
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def bucket_check(module, s3, bucket):
    try:
        result = s3.lookup(bucket)
    except s3.provider.storage_response_error as e:
        module.fail_json(msg= str(e))
    if result:
        return True
    else:
        return False
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def create_bucket(module, s3, bucket, location=None):
    if location is None:
        location = Location.DEFAULT
    try:
        bucket = s3.create_bucket(bucket, location=location)
        for acl in module.params.get('permission'):
            bucket.set_acl(acl)
    except s3.provider.storage_response_error as e:
        module.fail_json(msg= str(e))
    if bucket:
        return True
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def get_bucket(module, s3, bucket):
    try:
        return s3.lookup(bucket)
    except s3.provider.storage_response_error as e:
        module.fail_json(msg= str(e))
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def delete_key(module, s3, bucket, obj):
    try:
        bucket = s3.lookup(bucket)
        bucket.delete_key(obj)
        module.exit_json(msg="Object deleted from bucket %s"%bucket, changed=True)
    except s3.provider.storage_response_error as e:
        module.fail_json(msg= str(e))
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def create_dirkey(module, s3, bucket, obj):
    try:
        bucket = s3.lookup(bucket)
        key = bucket.new_key(obj)
        key.set_contents_from_string('')
        module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket.name), changed=True)
    except s3.provider.storage_response_error as e:
        module.fail_json(msg= str(e))
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def upload_s3file(module, s3, bucket, obj, src, expiry, metadata, encrypt, headers):
    try:
        bucket = s3.lookup(bucket)
        key = bucket.new_key(obj)
        if metadata:
            for meta_key in metadata.keys():
                key.set_metadata(meta_key, metadata[meta_key])

        key.set_contents_from_filename(src, encrypt_key=encrypt, headers=headers)
        for acl in module.params.get('permission'):
            key.set_acl(acl)
        url = key.generate_url(expiry)
        module.exit_json(msg="PUT operation complete", url=url, changed=True)
    except s3.provider.storage_copy_error as e:
        module.fail_json(msg= str(e))
项目:DevOps    作者:YoLoveLife    | 项目源码 | 文件源码
def download_s3str(module, s3, bucket, obj, version=None):
    try:
        bucket = s3.lookup(bucket)
        key = bucket.get_key(obj, version_id=version)
        contents = key.get_contents_as_string()
        module.exit_json(msg="GET operation complete", contents=contents, changed=True)
    except s3.provider.storage_copy_error as e:
        module.fail_json(msg= str(e))
项目:datanode    作者:jay-johnson    | 项目源码 | 文件源码
def s3_create_new_bucket(self, bucketname, bucket_location="sa-east-1", debug=False):

        record              = {
                            }

        results             = self.build_def_hash("Display Error", "Not Run", record)

        try:

            cur_keys        = self.aws_get_keys(debug)

            import boto 
            import boto.s3

            conn_s3         = boto.connect_s3(cur_keys["Key"], cur_keys["Secret"])
            bucket          = conn_s3.create_bucket(bucketname, location=bucket_location)

            if bucket:
                self.lg("Created Bucket(" + str(bucketname) + ")", 6)
                results     = self.build_def_hash("SUCCESS", "", {})
            else:
                results     = self.build_def_hash("Display Error", "Failed to Create Bucket(" + str(bucketname) + ")", {})

        except Exception,k:
            status          = "FAILED"
            err_msg         = "Unable to Create new S3 Bucket(" + str(bucketname) + ") with Ex(" + str(k) + ")"
            self.lg("ERROR: " + str(err_msg), 0)
            results         = self.build_def_hash("Display Error", err_msg, {})
        # end of try/ex

        return results
    # end of s3_create_new_s3_bucket
项目:datanode    作者:jay-johnson    | 项目源码 | 文件源码
def s3_calculate_bucket_size(self, bucket_name, debug=False):

        record          = {
                            "Size"     : 0,
                            "SizeMB"   : 0.0,
                            "SizeGB"   : 0.0,
                            "Files"    : 0 
                        }    

        results         = self.build_def_hash("Display Error", "Not Run", record)

        try:
            import boto, math
            import boto.s3

            cur_keys        = self.aws_get_keys(debug)
            conn_s3         = boto.connect_s3(cur_keys["Key"], cur_keys["Secret"])
            bucket          = conn_s3.get_bucket(bucket_name, validate=False)
            total_bytes     = 0
            for key in bucket:
                record["Size"]  += int(key.size)
                record["Files"] += 1
            # end for all keys

            record["SizeMB"]    = float(self.to_float_str(float(float(record["Size"]) / 1024.0 / 1024.0)))
            record["SizeGB"]    = float(self.to_float_str(float(float(record["SizeMB"]) / 1024.0)))

            results             = self.build_def_hash("SUCCESS", "", record)
        except Exception,w:
            self.lg("Failed to Process S3 Bucket(" + str(bucket_name) + ") Size Ex(" + str(w) + ")", 0)
            results         = self.build_def_hash("Display Error", "Not Run", record)

        return results
    # end of s3_calculate_bucket_size
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def set_key_class(self, key_class):
        """
        Set the Key class associated with this bucket.  By default, this
        would be the boto.s3.key.Key class but if you want to subclass that
        for some reason this allows you to associate your new class with a
        bucket so that when you call bucket.new_key() or when you get a listing
        of keys in the bucket you will get an instances of your key class
        rather than the default.

        :type key_class: class
        :param key_class: A subclass of Key that can be more specific
        """
        self.key_class = key_class
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def lookup(self, key_name, headers=None):
        """
        Deprecated: Please use get_key method.

        :type key_name: string
        :param key_name: The name of the key to retrieve

        :rtype: :class:`boto.s3.key.Key`
        :returns: A Key object from this bucket.
        """
        return self.get_key(key_name, headers=headers)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def new_key(self, key_name=None):
        """
        Creates a new key

        :type key_name: string
        :param key_name: The name of the key to create

        :rtype: :class:`boto.s3.key.Key` or subclass
        :returns: An instance of the newly created key object
        """
        if not key_name:
            raise ValueError('Empty key names are not allowed')
        return self.key_class(self, key_name)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def delete_key(self, key_name, headers=None, version_id=None,
                   mfa_token=None):
        """
        Deletes a key from the bucket.  If a version_id is provided,
        only that version of the key will be deleted.

        :type key_name: string
        :param key_name: The key name to delete

        :type version_id: string
        :param version_id: The version ID (optional)

        :type mfa_token: tuple or list of strings
        :param mfa_token: A tuple or list consisting of the serial
            number from the MFA device and the current value of the
            six-digit token associated with the device.  This value is
            required anytime you are deleting versioned objects from a
            bucket that has the MFADelete option on the bucket.

        :rtype: :class:`boto.s3.key.Key` or subclass
        :returns: A key object holding information on what was
            deleted.  The Caller can see if a delete_marker was
            created or removed and what version_id the delete created
            or removed.
        """
        if not key_name:
            raise ValueError('Empty key names are not allowed')
        return self._delete_key_internal(key_name, headers=headers,
                                         version_id=version_id,
                                         mfa_token=mfa_token,
                                         query_args_l=None)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def configure_lifecycle(self, lifecycle_config, headers=None):
        """
        Configure lifecycle for this bucket.

        :type lifecycle_config: :class:`boto.s3.lifecycle.Lifecycle`
        :param lifecycle_config: The lifecycle configuration you want
            to configure for this bucket.
        """
        xml = lifecycle_config.to_xml()
        #xml = xml.encode('utf-8')
        fp = StringIO(xml)
        md5 = boto.utils.compute_md5(fp)
        if headers is None:
            headers = {}
        headers['Content-MD5'] = md5[1]
        headers['Content-Type'] = 'text/xml'
        response = self.connection.make_request('PUT', self.name,
                                                data=fp.getvalue(),
                                                query_args='lifecycle',
                                                headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def set_website_configuration(self, config, headers=None):
        """
        :type config: boto.s3.website.WebsiteConfiguration
        :param config: Configuration data
        """
        return self.set_website_configuration_xml(config.to_xml(),
          headers=headers)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def get_website_configuration_obj(self, headers=None):
        """Get the website configuration as a
        :class:`boto.s3.website.WebsiteConfiguration` object.
        """
        config_xml = self.get_website_configuration_xml(headers=headers)
        config = website.WebsiteConfiguration()
        h = handler.XmlHandler(config, self)
        xml.sax.parseString(config_xml, h)
        return config
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def set_cors(self, cors_config, headers=None):
        """
        Set the CORS for this bucket given a boto CORSConfiguration
        object.

        :type cors_config: :class:`boto.s3.cors.CORSConfiguration`
        :param cors_config: The CORS configuration you want
            to configure for this bucket.
        """
        return self.set_cors_xml(cors_config.to_xml())