Python requests_toolbelt 模块,MultipartEncoderMonitor() 实例源码

我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用requests_toolbelt.MultipartEncoderMonitor()

项目:catalearn    作者:Catalearn    | 项目源码 | 文件源码
def upload_data(gpu_ip, job_hash, data_path):
    url = 'http://%s:%s/runJobDecorator' % (gpu_ip, settings.GPU_PORT)
    file_size = path.getsize(data_path)
    pbar = tqdm(total=file_size, unit='B', unit_scale=True)

    def callback(monitor):
        progress = monitor.bytes_read - callback.last_bytes_read
        pbar.update(progress)
        callback.last_bytes_read = monitor.bytes_read
    callback.last_bytes_read = 0

    with open(data_path, 'rb') as f:
        data = {
            'file': ('uploads.pkl', f, 'application/octet-stream'),
            'hash': job_hash
        }
        encoder = MultipartEncoder(
            fields=data
        )
        monitor = MultipartEncoderMonitor(encoder, callback)
        r = requests.post(url, data=monitor, headers={
            'Content-Type': monitor.content_type})

    remove(data_path)
    # pbar might not close when the user interrupts, need to fix this
    pbar.close()
    status_check(r)
项目:mender-backend-cli    作者:bboozzoo    | 项目源码 | 文件源码
def do_artifacts_artifact_add(opts):
    logging.debug('add artifact %r', opts)
    url = artifacts_url(opts.service)
    image = {
        'name': opts.name,
        'description': opts.description,
    }
    # build contents of multipart/form-data, image meta must come first, hence
    # we use an OrderedDict to preserve the order
    files = OrderedDict()
    for k, v in image.items():
        files[k] = (None, io.StringIO(v))
    # followed by firmware data
    # but first, try to find out the size of firmware data
    files['size'] = str(os.stat(opts.infile).st_size)
    files['artifact'] = (opts.infile, open(opts.infile, 'rb'), "application/octet-stream", {})

    encoder = MultipartEncoder(files)

    if sys.stderr.isatty():
        try:
            from requests_toolbelt import MultipartEncoderMonitor
            from clint.textui.progress import Bar as ProgressBar

            pb = ProgressBar(expected_size=encoder.len, filled_char='=', every=1024*1024)
            monitor = MultipartEncoderMonitor(encoder,
                                              lambda mon: pb.show(mon.bytes_read))
            encoder = monitor
        except ImportError:
            pass

    with api_from_opts(opts) as api:
        rsp = api.post(url, data=encoder,
                       headers={'Content-Type': encoder.content_type})
        if rsp.status_code == 201:
            # created
            location = rsp.headers.get('Location', '')
            print("created with URL: {}".format(location))
            print('artifact ID: ', location.rsplit('/')[-1])
        else:
            errorprinter(rsp)
项目:tempfiles_cmdline    作者:periket2000    | 项目源码 | 文件源码
def upload_file(self, filepath):
        try:
            # bypass multipart encoder / don't works with nginx direct upload.
            # encoder = self.create_upload(filepath)
            encoder = open(filepath, 'rb')
            try:
                encoder.len = os.path.getsize(filepath)
            except AttributeError:
                # supporting python 2.7 trick for adding len to file stream
                class Wrapped(object):
                    def __init__(self, enc, path):
                        self._enc = enc
                        self.len = os.path.getsize(path)

                    def __getattr__(self, attr):
                        return getattr(self._enc, attr)
                encoder = Wrapped(encoder, filepath)
            callback = self.create_callback(encoder)
            monitor = MultipartEncoderMonitor(encoder, callback)
            response = requests.post(self.up_url,
                                     data=monitor,
                                     headers={
                                         # 'Content-Type': monitor.content_type,
                                         'X-NAME': os.path.basename(filepath)
                                     })
            print(ENDL)
            print(json.loads(response.text))
            return response
        except requests.exceptions.ConnectionError:
            self.configuration_service.log('CONNECTION_CLOSED')
项目:kolibri    作者:learningequality    | 项目源码 | 文件源码
def handle(self, *args, **options):

        self.stdout.write("Uploading database to central server...\n")

        encoder = MultipartEncoder({
            "project": options['project'],
            "file": ("db.sqlite3", open(DB_PATH, "rb"), "application/octet-stream")
        })
        monitor = MultipartEncoderMonitor(encoder, create_callback(encoder))
        r = requests.post(CENTRAL_SERVER_DB_UPLOAD_URL, data=monitor, headers={"Content-Type": monitor.content_type})
        print("\nUpload finished! (Returned status {0} {1})".format(r.status_code, r.reason))
项目:send-cli    作者:ehuggett    | 项目源码 | 文件源码
def api_upload(service, encData, encMeta, keys):
    '''
       Uploads data to Send.
       Caution! Data is uploaded as given, this function will not encrypt it for you
    '''
    service += 'api/upload'
    files = requests_toolbelt.MultipartEncoder(fields={'file': ('blob', encData, 'application/octet-stream') })
    pbar = progbar(files.len)
    monitor = requests_toolbelt.MultipartEncoderMonitor(files, lambda files: pbar.update(monitor.bytes_read - pbar.n))

    headers = {
        'X-File-Metadata' : unpadded_urlsafe_b64encode(encMeta),
        'Authorization' : 'send-v1 ' + unpadded_urlsafe_b64encode(keys.authKey),
        'Content-type' : monitor.content_type
    }

    r = requests.post(service, data=monitor, headers=headers, stream=True)
    r.raise_for_status()
    pbar.close()

    body_json = r.json()
    secretUrl = body_json['url'] + '#' + unpadded_urlsafe_b64encode(keys.secretKey)
    fileId = body_json['id']
    fileNonce = unpadded_urlsafe_b64decode(r.headers['WWW-Authenticate'].replace('send-v1 ', ''))
    try:
        owner_token = body_json['owner']
    except:
        owner_token = body_json['delete']
    return secretUrl, fileId, fileNonce, owner_token
项目:nelson    作者:udacity    | 项目源码 | 文件源码
def submit(self):

    self.submit_url = self._get_submit_url()

    mkzip(self.zipfile_root, SUBMISSION_FILENAME, self.filenames, self.max_zip_size)

    fd = open(SUBMISSION_FILENAME, "rb")

    m = MultipartEncoder(fields={'zipfile': ('student.zip', fd, 'application/zip')})
    monitor = MultipartEncoderMonitor(m, self.upload_progress_callback)

    try:
      r = self.s.post(self.submit_url, 
                      data=monitor,
                      headers={'Content-Type': monitor.content_type})
      r.raise_for_status()
    except requests.exceptions.HTTPError as e:
      if r.status_code == 403:
        raise RuntimeError("You don't have access to this quiz.")
      elif r.status_code in [404,429,500]:
        try:
          response_json = r.json()
          message = response_json.get("message") or "An internal server error occurred."
        except:
          message = "An unknown error occurred"
        raise RuntimeError(message)
      else:
        raise

    fd.close()

    self.submission = r.json()
项目:valohai-cli    作者:valohai    | 项目源码 | 文件源码
def create_adhoc_commit(project):
    """
    Create an ad-hoc tarball and commit of the project directory.

    :param project: Project
    :type project: valohai_cli.models.project.Project
    :return: Commit response object from API
    :rtype: dict[str, object]
    """
    tarball = None
    try:
        click.echo('Packaging {dir}...'.format(dir=project.directory))
        tarball = package_directory(project.directory, progress=True)
        # TODO: We could check whether the commit is known already
        size = os.stat(tarball).st_size

        click.echo('Uploading {size:.2f} KiB...'.format(size=size / 1024.))
        upload = MultipartEncoder({'data': ('data.tgz', open(tarball, 'rb'), 'application/gzip')})
        prog = click.progressbar(length=upload.len, width=0)
        prog.is_hidden = (size < 524288)  # Don't bother with the bar if the upload is small
        with prog:
            def callback(upload):
                prog.pos = upload.bytes_read
                prog.update(0)  # Step is 0 because we set pos above

            monitor = MultipartEncoderMonitor(upload, callback)
            resp = request(
                'post',
                '/api/v0/projects/{id}/import-package/'.format(id=project.id),
                data=monitor,
                headers={'Content-Type': monitor.content_type},
            ).json()
        success('Uploaded ad-hoc code {identifier}'.format(identifier=resp['identifier']))
    finally:
        if tarball:
            os.unlink(tarball)
    return resp
项目:floyd-cli    作者:floydhub    | 项目源码 | 文件源码
def create(self, module):
        try:
            upload_files, total_file_size = get_files_in_current_directory(file_type='code')
        except OSError:
            sys.exit("Directory contains too many files to upload. If you have data files in the current directory, "
                     "please upload them separately using \"floyd data\" command and remove them from here.\n"
                     "See http://docs.floydhub.com/faqs/job/#i-get-too-many-open-files-error-when-i-run-my-project "
                     "for more details on how to fix this.")

        if total_file_size > self.MAX_UPLOAD_SIZE:
            sys.exit(("Code size too large to sync, please keep it under %s.\n"
                      "If you have data files in the current directory, please upload them "
                      "separately using \"floyd data\" command and remove them from here.\n"
                      "You may find the following documentation useful:\n\n"
                      "\thttps://docs.floydhub.com/guides/create_and_upload_dataset/\n"
                      "\thttps://docs.floydhub.com/guides/data/mounting_data/\n"
                      "\thttps://docs.floydhub.com/guides/floyd_ignore/") % (sizeof_fmt(self.MAX_UPLOAD_SIZE)))

        floyd_logger.info("Creating project run. Total upload size: %s",
                          sizeof_fmt(total_file_size))
        floyd_logger.debug("Creating module. Uploading: %s files",
                           len(upload_files))
        floyd_logger.info("Syncing code ...")

        # Add request data
        upload_files.append(("json", json.dumps(module.to_dict())))
        multipart_encoder = MultipartEncoder(
            fields=upload_files
        )

        # Attach progress bar
        progress_callback, bar = create_progress_callback(multipart_encoder)
        multipart_encoder_monitor = MultipartEncoderMonitor(multipart_encoder, progress_callback)

        try:
            response = self.request("POST",
                                    self.url,
                                    data=multipart_encoder_monitor,
                                    headers={"Content-Type": multipart_encoder.content_type},
                                    timeout=3600)
        finally:
            # always make sure we clear the console
            bar.done()
        return response.json().get("id")
项目:tmper    作者:mattbierbaum    | 项目源码 | 文件源码
def upload(url, filename, code='', password='', num=1, time='', disp=False):
    """ Upload the file 'filename' to tmper url """
    url = url or conf_read('url')
    password = password or conf_read('pass')

    if not url:
        print("No URL provided! Provide one or set on via conf.", file=sys.stderr)
        sys.exit(1)

    url = url if not code else urlparse.urljoin(url, code)
    arg = {} if not password else {'key': password}
    arg = arg if num == 1 else dict(arg, n=num)
    arg = arg if time == '' else dict(arg, time=time)

    name = os.path.basename(filename)

    if not os.path.exists(filename):
        print("File '{}' does not exist".format(filename), file=sys.stderr)
        sys.exit(1)

    def create_callback(encoder):
        bar = progress.ProgressBar(encoder.len, display=disp)

        def callback(monitor):
            bar.update(monitor.bytes_read)

        return callback

    with open(filename, 'rb') as f:
        mimetype = mimetypes.guess_type(filename)[0] or 'application/unknown'

        # prepare the streaming form uploader (with progress bar)
        encoder = MultipartEncoder(dict(arg, filearg=(filename, f, mimetype)))
        callback = create_callback(encoder)
        monitor = MultipartEncoderMonitor(encoder, callback)

        header = {
            'User-Agent': 'tmper/{}'.format(__version__),
            'Content-Type': monitor.content_type
        }

        r = requests.post(url, data=monitor, headers=header)
        print(r.content.decode('utf-8'))
        r.close()
项目:abrio-cli    作者:Kianenigma    | 项目源码 | 文件源码
def upload(name) :
    '''
    Upload Abrio component to server.
    '''

    if not ensure_abrio_root():
        click.secho('\nAbrio Root Directory Not Detected.\n', fg="red", bold=True)
        return

    if not ensure_component_exists(name):
        click.secho("\nComponent <{0}> does not exist.\n".format(name), bold=True, fg="red")

    build_dir = '/sample/build/libs/'

    os.system('cd {0} && gradle jar && cd ..'.format(name))
    jar_dir = name + build_dir + name + '.jar'
    os.rename(name + build_dir + 'sample.jar',jar_dir)

    encoder = create_upload(jar_dir)
    callback = create_callback(encoder)
    monitor = MultipartEncoderMonitor(encoder, callback)

    component_config = load_component_config(name)
    component_config['last_uploaded'] = str(datetime.datetime.now())
    write_component_config(name, component_config)

    headers = {
        'Content-Type': monitor.content_type,
        'private key': component_config['pkey'],
        'version' : component_config['version']
    }

    upload_response = requests.post(
        config['server']['host'] + "component/upload",
        data=monitor,
        # auth=HTTPBasicAuth(email, pwd),
        headers=headers)

    if upload_response.status_code == 200 :
        click.secho('\n\n\nComponent uploaded\n', bold=True, fg="green")

    else :
        click.secho(errors["UNKNOWN_NETWORK"], bold=True, fg="red")