Python boto.s3.connection 模块,S3Connection() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用boto.s3.connection.S3Connection()

项目:django-webpacker    作者:MicroPyramid    | 项目源码 | 文件源码
def upload_to_s3(css_file):
    bucket_name = settings.AWS_BUCKET_NAME
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)

    folder = 'webpack_bundles/'
    bucket = conn.get_bucket(bucket_name=bucket_name)

    filename = css_file.split('/')[-1]
    file_obj = open(css_file, 'r')
    content = file_obj.read()

    key = folder + filename
    bucket = conn.get_bucket(bucket_name=bucket_name)
    mime = mimetypes.guess_type(filename)[0]
    k = Key(bucket)
    k.key = key  # folder + filename
    k.set_metadata("Content-Type", mime)
    k.set_contents_from_string(content)
    public_read = True
    if public_read:
        k.set_acl("public-read")
项目:open-source-feeds    作者:mhfowler    | 项目源码 | 文件源码
def s3_list_files_in_folder(s3_path):
    conn = S3Connection(ENV_DICT['AWS_ACCESS_KEY'], ENV_DICT['AWS_SECRET_KEY'])
    bucket = conn.get_bucket(ENV_DICT['S3_BUCKET_NAME'])
    keys = bucket.list(prefix=s3_path)
    keys = [k.name for k in keys]
    return keys
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def setUp(self):
        video_proto = VideoProto()
        video_proto.veda_id = 'XXXXXXXX2014-V00TEST'
        self.upload_filepath = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            'test_files',
            'OVTESTFILE_01.mp4'
        )

        with patch.object(Hotstore, '_READ_AUTH', PropertyMock(return_value=lambda: CONFIG_DATA)):
            self.hotstore = Hotstore(
                video_object=video_proto,
                upload_filepath=self.upload_filepath,
                video_proto=video_proto
            )

        # do s3 mocking
        mock = mock_s3_deprecated()
        mock.start()
        conn = S3Connection()
        conn.create_bucket(CONFIG_DATA['veda_s3_hotstore_bucket'])
        self.addCleanup(mock.stop)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def test_storage_uri_regionless(self):
        # First, create a bucket in a different region.
        conn = S3Connection(
            host='s3-us-west-2.amazonaws.com'
        )
        bucket_name = 'keytest-%d' % int(time.time())
        bucket = conn.create_bucket(bucket_name, location=Location.USWest2)
        self.addCleanup(self.nuke_bucket, bucket)

        # Now use ``storage_uri`` to try to make a new key.
        # This would throw a 301 exception.
        suri = boto.storage_uri('s3://%s/test' % bucket_name)
        the_key = suri.new_key()
        the_key.key = 'Test301'
        the_key.set_contents_from_string(
            'This should store in a different region.'
        )

        # Check it a different way.
        alt_conn = boto.connect_s3(host='s3-us-west-2.amazonaws.com')
        alt_bucket = alt_conn.get_bucket(bucket_name)
        alt_key = alt_bucket.get_key('Test301')
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def _get_bucket(self):
        if isinstance(self.config.origin, S3Origin):
            if not self._bucket:
                bucket_dns_name = self.config.origin.dns_name
                bucket_name = bucket_dns_name.replace('.s3.amazonaws.com', '')
                from boto.s3.connection import S3Connection
                s3 = S3Connection(self.connection.aws_access_key_id,
                                  self.connection.aws_secret_access_key,
                                  proxy=self.connection.proxy,
                                  proxy_port=self.connection.proxy_port,
                                  proxy_user=self.connection.proxy_user,
                                  proxy_pass=self.connection.proxy_pass)
                self._bucket = s3.get_bucket(bucket_name)
                self._bucket.distribution = self
                self._bucket.set_key_class(self._object_class)
            return self._bucket
        else:
            raise NotImplementedError('Unable to get_objects on CustomOrigin')
项目:storefact    作者:blue-yonder    | 项目源码 | 文件源码
def _get_s3bucket(host, bucket, access_key, secret_key, force_bucket_suffix=True, create_if_missing=True):
    from boto.s3.connection import S3Connection, OrdinaryCallingFormat, S3ResponseError

    s3con = S3Connection(aws_access_key_id=access_key,
                         aws_secret_access_key=secret_key,
                         host=host, is_secure=False,
                         calling_format=OrdinaryCallingFormat())
    # add access key prefix to bucket name, unless explicitly prohibited
    if force_bucket_suffix and not bucket.lower().endswith('-' + access_key.lower()):
        bucket = bucket + '-' + access_key.lower()
    try:
        return s3con.get_bucket(bucket)
    except S3ResponseError as ex:
        if ex.status == 404:
            if create_if_missing:
                return s3con.create_bucket(bucket)
            else:
                raise IOError("Bucket {} does not exist".format(bucket))
        raise
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def _get_bucket(self):
        if isinstance(self.config.origin, S3Origin):
            if not self._bucket:
                bucket_dns_name = self.config.origin.dns_name
                bucket_name = bucket_dns_name.replace('.s3.amazonaws.com', '')
                from boto.s3.connection import S3Connection
                s3 = S3Connection(self.connection.aws_access_key_id,
                                  self.connection.aws_secret_access_key,
                                  proxy=self.connection.proxy,
                                  proxy_port=self.connection.proxy_port,
                                  proxy_user=self.connection.proxy_user,
                                  proxy_pass=self.connection.proxy_pass)
                self._bucket = s3.get_bucket(bucket_name)
                self._bucket.distribution = self
                self._bucket.set_key_class(self._object_class)
            return self._bucket
        else:
            raise NotImplementedError('Unable to get_objects on CustomOrigin')
项目:dvc    作者:dataversioncontrol    | 项目源码 | 文件源码
def remove(self, data_item):
        aws_file_name = self.cache_file_key(data_item.cache.dvc)

        Logger.debug(u'[Cmd-Remove] Remove from cloud {}.'.format(aws_file_name))

        if not self._aws_creds.access_key_id or not self._aws_creds.secret_access_key:
            Logger.debug('[Cmd-Remove] Unable to check cache file in the cloud')
            return
        conn = S3Connection(self._aws_creds.access_key_id, self._aws_creds.secret_access_key)
        bucket_name = self.storage_bucket
        bucket = conn.lookup(bucket_name)
        if bucket:
            key = bucket.get_key(aws_file_name)
            if not key:
                Logger.warn('[Cmd-Remove] S3 remove warning: '
                            'file "{}" does not exist in S3'.format(aws_file_name))
            else:
                key.delete()
                Logger.info('[Cmd-Remove] File "{}" was removed from S3'.format(aws_file_name))
项目:itaplay    作者:lhalam    | 项目源码 | 文件源码
def save_on_amazon_with_boto(clipfile):
    """Function that uploads clip on amazon

        Returns :
            str : url
    """
    if clipfile.size > MAX_CLIP_SIZE:
        raise ValidationError("Your file is too large. Please enter valid file")
    else:
        conn = S3Connection(local_settings.AWS_ACCESS_KEY_ID,
                            local_settings.AWS_SECRET_ACCESS_KEY)
        bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
        k = boto.s3.key.Key(bucket)
        k.key = settings.MEDIAFILES_LOCATION + clipfile.name
        # save on S3
        k.set_contents_from_file(clipfile)
        # make public
        k.set_acl('public-read')
        # generate url which will be save in database 
        url = k.generate_url(expires_in=0, query_auth=False)
        return url
项目:trustcode-addons    作者:Trust-Code    | 项目源码 | 文件源码
def send_for_amazon_s3(self, file_to_send, name_to_store, database):
        try:
            if self.aws_access_key and self.aws_secret_key:
                access_key = self.aws_access_key
                secret_key = self.aws_secret_key

                conexao = S3Connection(access_key, secret_key)
                bucket_name = '%s_bkp_pelican' % database
                bucket = conexao.create_bucket(bucket_name)

                k = Key(bucket)
                k.key = name_to_store
                k.set_contents_from_filename(file_to_send)
                return k.key
            else:
                _logger.error(
                    u'Configurações do Amazon S3 não setadas, \
                    pulando armazenamento de backup')
        except Exception:
            _logger.error('Erro ao enviar dados para S3', exc_info=True)
项目:alfred-ec2    作者:SoMuchToGrok    | 项目源码 | 文件源码
def _get_bucket(self):
        if isinstance(self.config.origin, S3Origin):
            if not self._bucket:
                bucket_dns_name = self.config.origin.dns_name
                bucket_name = bucket_dns_name.replace('.s3.amazonaws.com', '')
                from boto.s3.connection import S3Connection
                s3 = S3Connection(self.connection.aws_access_key_id,
                                  self.connection.aws_secret_access_key,
                                  proxy=self.connection.proxy,
                                  proxy_port=self.connection.proxy_port,
                                  proxy_user=self.connection.proxy_user,
                                  proxy_pass=self.connection.proxy_pass)
                self._bucket = s3.get_bucket(bucket_name)
                self._bucket.distribution = self
                self._bucket.set_key_class(self._object_class)
            return self._bucket
        else:
            raise NotImplementedError('Unable to get_objects on CustomOrigin')
项目:spark-notebook    作者:mas-dse    | 项目源码 | 文件源码
def open_bucket(self, bucket_name):
        """Open a S3 bucket.

            Args:
                bucket_name
            Returns:
                None
        """
        if bucket_name.startswith('s3n://') or bucket_name.startswith('s3://'):
            raise ValueError('bucket_name must NOT contain any prefix '
                             '(e.g. s3:// or s3n://)')

        while bucket_name[-1] == '/':
            bucket_name = bucket_name[:-1]
        self.bucket_name = bucket_name
        self.conn = S3Connection(host="s3.amazonaws.com")
        try:
            self.bucket = self.conn.get_bucket(self.bucket_name)
        except S3ResponseError as e:
            print('Open S3 bucket "%s" failed.\nError code %d: '
                  % (bucket_name, e.status) + e.reason)
项目:srepp_server    作者:SummitRoute    | 项目源码 | 文件源码
def copyS3FileToLocal(s3auth, s3path):
    conn = S3Connection(s3auth["access_key"], s3auth["secret_key"])

    bucket = conn.get_bucket(s3auth["bucket_name"])
    k = bucket.get_key(s3path, validate=True)

    f = tempfile.NamedTemporaryFile(prefix="fa-", dir=config.tmp_file_path, delete=False)
    filepath = os.path.abspath(f.name)
    k.get_contents_to_filename(filename=filepath)
    return filepath


################################################################################
# analyzeExecutable
项目:EnglishDiary    作者:jupiny    | 项目源码 | 文件源码
def delete_file_from_s3(filename):
    conn = S3Connection(
        settings.AWS_ACCESS_KEY_ID,
        settings.AWS_SECRET_ACCESS_KEY,
    )
    b = Bucket(
        conn,
        settings.AWS_STORAGE_BUCKET_NAME,
    )
    k = Key(b)
    k.key = filename
    b.delete_key(k)
项目:mist    作者:cogcmd    | 项目源码 | 文件源码
def connect(self):
        try:
            self.conn = S3Connection()
        except Exception as e:
            Logger.error("Failed connecting to S3: %s" % (e))
            self.resp.send_error("S3 connection failed.")
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def setup_s3_bucket(self):
        """
        Creates an s3 bucket. That is happening in moto's virtual environment.
        """
        connection = S3Connection()
        connection.create_bucket(CONFIG_DATA['aws_video_transcripts_bucket'])
        return connection
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def setUp(self):
        self.file_name = u'OVTESTFILE_01.mp4'
        self.video_file_path = os.path.join(TEST_FILES_DIR, self.file_name)

        # Create s3 bucket -- all this is happening in moto virtual environment
        connection = S3Connection()
        connection.create_bucket(CONFIG_DATA['edx_s3_ingest_bucket'])
项目:edx-video-pipeline    作者:edx    | 项目源码 | 文件源码
def upload_video_with_metadata(self, **metadata):
        """
        Sets the metadata on an S3 video key.
        """
        # Upload the video file to ingest bucket
        connection = S3Connection()
        self.ingest_bucket = connection.get_bucket(CONFIG_DATA['edx_s3_ingest_bucket'])

        key_name = os.path.join(CONFIG_DATA['edx_s3_ingest_prefix'], self.file_name)
        self.video_key = Key(self.ingest_bucket, key_name)
        for metadata_name, value in dict(S3_METADATA, **metadata).iteritems():
            if value is not None:
                self.video_key.set_metadata(metadata_name, value)

        self.video_key.set_contents_from_filename(self.video_file_path)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def test_basic_anon(self):
        auth_con = S3Connection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        auth_bucket = auth_con.create_bucket(bucket_name)

        # try read the bucket anonymously
        anon_con = S3Connection(anon=True)
        anon_bucket = Bucket(anon_con, bucket_name)
        try:
            next(iter(anon_bucket.list()))
            self.fail("anon bucket list should fail")
        except S3ResponseError:
            pass

        # give bucket anon user access and anon read again
        auth_bucket.set_acl('public-read')
        time.sleep(5)
        try:
            next(iter(anon_bucket.list()))
            self.fail("not expecting contents")
        except S3ResponseError as e:
            self.fail("We should have public-read access, but received "
                      "an error: %s" % e)
        except StopIteration:
            pass

        # cleanup
        auth_con.delete_bucket(auth_bucket)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def test_error_code_populated(self):
        c = S3Connection()
        try:
            c.create_bucket('bad$bucket$name')
        except S3ResponseError as e:
            self.assertEqual(e.error_code, 'InvalidBucketName')
        else:
            self.fail("S3ResponseError not raised.")
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def setUp(self):
        self.conn = S3Connection()
        self.bucket_name = 'multidelete-%d' % int(time.time())
        self.bucket = self.conn.create_bucket(self.bucket_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def setUp(self):
        self.conn = S3Connection(is_secure=False)
        self.bucket_name = 'multipart-%d' % int(time.time())
        self.bucket = self.conn.create_bucket(self.bucket_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def setUp(self):
        self.conn = S3Connection()
        self.bucket_name = 'keytest-%d' % int(time.time())
        self.bucket = self.conn.create_bucket(self.bucket_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def do_test_valid_cert(self):
        # When connecting to actual servers with bundled root certificates, no
        # cert errors should be thrown; instead we will get "invalid
        # credentials" errors since the config used does not contain any
        # credentials.
        self.assertConnectionThrows(S3Connection, exception.S3ResponseError)
        self.assertConnectionThrows(GSConnection, exception.GSResponseError)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def do_test_invalid_signature(self):
        boto.config.set('Boto', 'ca_certificates_file', DEFAULT_CA_CERTS_FILE)
        self.assertConnectionThrows(S3Connection, ssl.SSLError)
        self.assertConnectionThrows(GSConnection, ssl.SSLError)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def do_test_invalid_host(self):
        boto.config.set('Credentials', 'gs_host', INVALID_HOSTNAME_HOST)
        boto.config.set('Credentials', 's3_host', INVALID_HOSTNAME_HOST)
        self.assertConnectionThrows(S3Connection, ssl.SSLError)
        self.assertConnectionThrows(GSConnection, ssl.SSLError)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def do_test_invalid_host(self):
        boto.config.set('Credentials', 'gs_host', INVALID_HOSTNAME_HOST)
        boto.config.set('Credentials', 's3_host', INVALID_HOSTNAME_HOST)
        self.assertConnectionThrows(
                S3Connection, https_connection.InvalidCertificateException)
        self.assertConnectionThrows(
                GSConnection, https_connection.InvalidCertificateException)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def setUp(self):
        self.conn = S3Connection()
        self.bucket_name = 'version-%d' % int(time.time())
        self.bucket = self.conn.create_bucket(self.bucket_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def setUp(self):
        self.conn = S3Connection()
        self.bucket_name = 'bucket-%d' % int(time.time())
        self.bucket = self.conn.create_bucket(self.bucket_name)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def test_session_token(self):
        print('--- running Session Token tests ---')
        c = STSConnection()

        # Create a session token
        token = c.get_session_token()

        # Save session token to a file
        token.save('token.json')

        # Now load up a copy of that token
        token_copy = Credentials.load('token.json')
        assert token_copy.access_key == token.access_key
        assert token_copy.secret_key == token.secret_key
        assert token_copy.session_token == token.session_token
        assert token_copy.expiration == token.expiration
        assert token_copy.request_id == token.request_id

        os.unlink('token.json')

        assert not token.is_expired()

        # Try using the session token with S3
        s3 = S3Connection(aws_access_key_id=token.access_key,
                          aws_secret_access_key=token.secret_key,
                          security_token=token.session_token)
        buckets = s3.get_all_buckets()

        print('--- tests completed ---')
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
    """
    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Amazon's S3
    """
    from boto.s3.connection import S3Connection
    return S3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def connect_walrus(host=None, aws_access_key_id=None,
                   aws_secret_access_key=None,
                   port=8773, path='/services/Walrus', is_secure=False,
                   **kwargs):
    """
    Connect to a Walrus service.

    :type host: string
    :param host: the host name or ip address of the Walrus server

    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Walrus
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    # Check for values in boto config, if not supplied as args
    if not aws_access_key_id:
        aws_access_key_id = config.get('Credentials',
                                       'euca_access_key_id',
                                       None)
    if not aws_secret_access_key:
        aws_secret_access_key = config.get('Credentials',
                                           'euca_secret_access_key',
                                           None)
    if not host:
        host = config.get('Boto', 'walrus_host', None)

    return S3Connection(aws_access_key_id, aws_secret_access_key,
                        host=host, port=port, path=path,
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:cuny-bdif    作者:aristotle-tek    | 项目源码 | 文件源码
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None,
               is_secure=False, **kwargs):
    """
    Connect to the Internet Archive via their S3-like API.

    :type ia_access_key_id: string
    :param ia_access_key_id: Your IA Access Key ID.  This will also look
        in your boto config file for an entry in the Credentials
        section called "ia_access_key_id"

    :type ia_secret_access_key: string
    :param ia_secret_access_key: Your IA Secret Access Key.  This will also
        look in your boto config file for an entry in the Credentials
        section called "ia_secret_access_key"

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to the Internet Archive
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    access_key = config.get('Credentials', 'ia_access_key_id',
                            ia_access_key_id)
    secret_key = config.get('Credentials', 'ia_secret_access_key',
                            ia_secret_access_key)

    return S3Connection(access_key, secret_key,
                        host='s3.us.archive.org',
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:dnn-quant    作者:euclidjda    | 项目源码 | 文件源码
def s3sign(bucket, path, access_key, secret_key, https, expiry):
    c = S3Connection(access_key, secret_key)
    return c.generate_url(
        expires_in=int(expiry),
        method='GET',
        bucket=bucket,
        key=path,
        query_auth=True,
        force_http=(not https)
    )
项目:server-backup-s3    作者:ph1l    | 项目源码 | 文件源码
def __connect_to_bucket(self):

        from boto.s3.connection import S3Connection
        from boto.s3.connection import OrdinaryCallingFormat

        if self.verbose:
            print "DEBUG: Setting up S3Connection to", \
                self.host+":"+self.bucket_name

        self.conn = S3Connection(
            host=self.host,
            calling_format=OrdinaryCallingFormat()
            )
        self.bucket = self.conn.get_bucket(self.bucket_name, validate=False)
项目:epilepsy_diary    作者:bfortuner    | 项目源码 | 文件源码
def get_connection():
    """
    For EC2 hosts this is managed by roles
    IAM users must add the correct AWS tokens to their .bash_profile
    """
    try:
        return connect_s3()
    except Exception as e:
        raise Exception("Unable to connect to S3:", e)
        # return S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
项目:toil-rnaseq    作者:BD2KGenomics    | 项目源码 | 文件源码
def _assertOutput(self, num_samples=None, bam=False):
        with closing(S3Connection()) as s3:
            bucket = Bucket(s3, self.output_dir.netloc)
            prefix = self.output_dir.path[1:]
            for i in range(1 if num_samples is None else num_samples):
                value = None if num_samples is None else i
                output_file = self._sample_name(value, bam=bam) + '.tar.gz'
                output_file = 'FAIL.' + output_file  # This flag is added by bamQC
                key = bucket.get_key(posixpath.join(prefix, output_file), validate=True)
                # FIXME: We may want to validate the output a bit more
                self.assertTrue(key.size > 0)
项目:toil-rnaseq    作者:BD2KGenomics    | 项目源码 | 文件源码
def tearDown(self):
        shutil.rmtree(self.workdir)
        with closing(S3Connection()) as s3:
            bucket = Bucket(s3, self.output_dir.netloc)
            prefix = self.output_dir.path[1:]
            for key in bucket.list(prefix=prefix):
                assert key.name.startswith(prefix)
                key.delete()
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
    """
    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Amazon's S3
    """
    from boto.s3.connection import S3Connection
    return S3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def connect_walrus(host=None, aws_access_key_id=None,
                   aws_secret_access_key=None,
                   port=8773, path='/services/Walrus', is_secure=False,
                   **kwargs):
    """
    Connect to a Walrus service.

    :type host: string
    :param host: the host name or ip address of the Walrus server

    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Walrus
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    # Check for values in boto config, if not supplied as args
    if not aws_access_key_id:
        aws_access_key_id = config.get('Credentials',
                                       'euca_access_key_id',
                                       None)
    if not aws_secret_access_key:
        aws_secret_access_key = config.get('Credentials',
                                           'euca_secret_access_key',
                                           None)
    if not host:
        host = config.get('Boto', 'walrus_host', None)

    return S3Connection(aws_access_key_id, aws_secret_access_key,
                        host=host, port=port, path=path,
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:learneveryword    作者:karan    | 项目源码 | 文件源码
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None,
               is_secure=False, **kwargs):
    """
    Connect to the Internet Archive via their S3-like API.

    :type ia_access_key_id: string
    :param ia_access_key_id: Your IA Access Key ID.  This will also look
        in your boto config file for an entry in the Credentials
        section called "ia_access_key_id"

    :type ia_secret_access_key: string
    :param ia_secret_access_key: Your IA Secret Access Key.  This will also
        look in your boto config file for an entry in the Credentials
        section called "ia_secret_access_key"

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to the Internet Archive
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    access_key = config.get('Credentials', 'ia_access_key_id',
                            ia_access_key_id)
    secret_key = config.get('Credentials', 'ia_secret_access_key',
                            ia_secret_access_key)

    return S3Connection(access_key, secret_key,
                        host='s3.us.archive.org',
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:mu    作者:excamera    | 项目源码 | 文件源码
def sign(bucket, path, access_key, secret_key, https, expiry):
    c = S3Connection(access_key, secret_key)
    return c.generate_url(
        expires_in=long(expiry),
        method='GET',
        bucket=bucket,
        key=path,
        query_auth=True,
        force_http=(not https)
    )
项目:dvc    作者:dataversioncontrol    | 项目源码 | 文件源码
def _get_bucket_aws(self, bucket_name):
        """ get a bucket object, aws """
        if all([self._aws_creds.access_key_id,
                self._aws_creds.secret_access_key,
                self.aws_region_host]):
            conn = S3Connection(self._aws_creds.access_key_id,
                                self._aws_creds.secret_access_key,
                                host=self.aws_region_host)
        else:
            conn = S3Connection()
        bucket = conn.lookup(bucket_name)
        if bucket is None:
            raise DataCloudError('Storage path {} is not setup correctly'.format(bucket_name))
        return bucket
项目:TensorFlowFlask    作者:PythonWorkshop    | 项目源码 | 文件源码
def save_to_s3(self, filename, model_name):
        try:
            AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
            AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
            c = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
            b = c.get_bucket('flasktensorflow')  # substitute your bucket name here
            k = b.new_key(model_name)
            f = open(filename, 'rb')
            k.set_contents_from_file(f, encrypt_key=True)
            print("Saving to S3")
        except:
            return False
        return True
项目:TensorFlowFlask    作者:PythonWorkshop    | 项目源码 | 文件源码
def load_from_s3(self, filename, model_name):
        try:
            AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
            AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
            c = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
            b = c.get_bucket('flasktensorflow')  # substitute your bucket name here
            k = b.Key(b)
            k.key = model_name
            f = open(filename, 'rb')
            k.get(f, encrypt_key=True)
            print("Saving to S3")
        except:
            return False
        return True
项目:itaplay    作者:lhalam    | 项目源码 | 文件源码
def delete_from_amazon_with_boto(url):
    """Function that delete clip from amazon

        Returns : True
    """
    conn = S3Connection(local_settings.AWS_ACCESS_KEY_ID,
                        local_settings.AWS_SECRET_ACCESS_KEY)
    bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
    k = boto.s3.key.Key(bucket)
    filename_from_url = url.split('/')[-1]
    k.key = settings.MEDIAFILES_LOCATION + filename_from_url
    bucket.delete_key(k)
    return True
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
    """
    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Amazon's S3
    """
    from boto.s3.connection import S3Connection
    return S3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def connect_walrus(host=None, aws_access_key_id=None,
                   aws_secret_access_key=None,
                   port=8773, path='/services/Walrus', is_secure=False,
                   **kwargs):
    """
    Connect to a Walrus service.

    :type host: string
    :param host: the host name or ip address of the Walrus server

    :type aws_access_key_id: string
    :param aws_access_key_id: Your AWS Access Key ID

    :type aws_secret_access_key: string
    :param aws_secret_access_key: Your AWS Secret Access Key

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to Walrus
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    # Check for values in boto config, if not supplied as args
    if not aws_access_key_id:
        aws_access_key_id = config.get('Credentials',
                                       'euca_access_key_id',
                                       None)
    if not aws_secret_access_key:
        aws_secret_access_key = config.get('Credentials',
                                           'euca_secret_access_key',
                                           None)
    if not host:
        host = config.get('Boto', 'walrus_host', None)

    return S3Connection(aws_access_key_id, aws_secret_access_key,
                        host=host, port=port, path=path,
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None,
               is_secure=False, **kwargs):
    """
    Connect to the Internet Archive via their S3-like API.

    :type ia_access_key_id: string
    :param ia_access_key_id: Your IA Access Key ID.  This will also look
        in your boto config file for an entry in the Credentials
        section called "ia_access_key_id"

    :type ia_secret_access_key: string
    :param ia_secret_access_key: Your IA Secret Access Key.  This will also
        look in your boto config file for an entry in the Credentials
        section called "ia_secret_access_key"

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to the Internet Archive
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    access_key = config.get('Credentials', 'ia_access_key_id',
                            ia_access_key_id)
    secret_key = config.get('Credentials', 'ia_secret_access_key',
                            ia_secret_access_key)

    return S3Connection(access_key, secret_key,
                        host='s3.us.archive.org',
                        calling_format=OrdinaryCallingFormat(),
                        is_secure=is_secure, **kwargs)
项目:aws-lambda-fsm-workflows    作者:Workiva    | 项目源码 | 文件源码
def execute(self, context, obj):
        connection = S3Connection()
        bucket = Bucket(connection=connection, name=context['bucket'])
        key = Key(bucket=bucket, name=context['name'])
        if key.exists():
            return 'done'
        else:
            return 'missing'