Python aiohttp 模块,Timeout() 实例源码

我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用aiohttp.Timeout()

项目:peony-twitter    作者:odrling    | 项目源码 | 文件源码
def test_stream_cancel(event_loop):
    async def cancel(task):
        await asyncio.sleep(0.001)
        task.cancel()

    async def test_stream_iterations(stream):
        while True:
            await test_stream_iteration(stream)

    with aiohttp.ClientSession(loop=event_loop) as session:
        client = peony.client.BasePeonyClient("", "", session=session)
        context = peony.stream.StreamResponse(method='GET',
                                              url="http://whatever.com",
                                              client=client)

        with context as stream:
            with patch.object(stream, '_connect',
                              side_effect=stream_content):
                coro = test_stream_iterations(stream)
                task = event_loop.create_task(coro)
                cancel_task = event_loop.create_task(cancel(task))

                with aiohttp.Timeout(1):
                    await asyncio.wait([task, cancel_task])
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def test_auth_with_valid_data(self):
        s = TestAuthSession(login=USER_LOGIN, password=USER_PASSWORD, app_id=APP_ID)
        s.driver.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),
                                                 response_class=CustomClientResponse)
        yield from s.authorize()
        params = {'client_id': APP_ID, 'display': 'page', 'redirect_uri': REDIRECT_URI, 'response_type': 'code'}
        with aiohttp.Timeout(10):
            response = yield from s.driver.session.get("https://oauth.vk.com/authorize",
                                                       params=params, allow_redirects=True)
        s.close()
        code = response.url.query.get('code')
        self.assertIsNotNone(code)

        s = AuthorizationCodeSession(APP_ID, APP_SECRET, REDIRECT_URI, code)
        yield from s.authorize()
        s.close()
        self.assertIsNotNone(s.access_token)
项目:rauc-hawkbit    作者:rauc    | 项目源码 | 文件源码
def post_resource(self, api_path, data, **kwargs):
        """
        Helper method for HTTP POST API requests.

        Args:
            api_path(str): REST API path
            data: JSON data for POST request
        Keyword Args:
            kwargs: keyword args used for replacing items in the API path
        """
        post_headers = {
            'Content-Type': 'application/json',
            'Accept': 'application/json',
            **self.headers
        }
        url = self.build_api_url(
                api_path.format(
                    tenant=self.tenant,
                    controllerId=self.controller_id,
                    **kwargs))
        self.logger.debug('POST {}'.format(url))
        with aiohttp.Timeout(self.timeout):
            async with self.session.post(url, headers=post_headers,
                                         data=json.dumps(data)) as resp:
                await self.check_http_status(resp)
项目:rauc-hawkbit    作者:rauc    | 项目源码 | 文件源码
def put_resource(self, api_path, data, **kwargs):
        """
        Helper method for HTTP PUT API requests.

        Args:
            api_path(str): REST API path
            data: JSON data for POST request
        Keyword Args:
            kwargs: keyword args used for replacing items in the API path
        """
        put_headers = {
            'Content-Type': 'application/json',
            **self.headers
        }
        url = self.build_api_url(
                api_path.format(
                    tenant=self.tenant,
                    controllerId=self.controller_id,
                    **kwargs))
        self.logger.debug('PUT {}'.format(url))
        self.logger.debug(json.dumps(data))
        with aiohttp.Timeout(self.timeout):
            async with self.session.put(url, headers=put_headers,
                                        data=json.dumps(data)) as resp:
                await self.check_http_status(resp)
项目:py-restfmclient    作者:pcdummy    | 项目源码 | 文件源码
def _request(self, method, store_path=None, **kwargs):
        with aiohttp.Timeout(self.timeout, loop=self.session.loop):
            url = self.url()
            self.logger.debug('HTTP %s %s' % (method.upper(), url))
            kwargs['headers'] = self.headers
            async with self.session.request(method, url, **kwargs) as response:
                if self.headers['Content-Type'] == 'application/json':
                    result = await response.text()
                    if store_path is not None:
                        await self._store(store_path, method, url, result)

                    if response.status == 404:  # pragma: no cover
                        raise RestNotFoundException("Not found.")

                    try:
                        return json.loads(result)
                    except json.decoder.JSONDecodeError:
                        raise RestDecoderException(result)

                else:  # pragma: no cover
                    return await response.text()
项目:web_develop    作者:dongweiming    | 项目源码 | 文件源码
def fetch(retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    url = 'http://httpbin.org/ip'

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    return await resp.json()
    except (ProxyConnectionError, TimeoutError):
        try:
            p = Proxy.objects.get(address=proxy)
            if p:
                p.delete()
        except DoesNotExist:
            pass
        retry += 1
        if retry > 5:
            raise TimeoutError()
        await asyncio.sleep(1)
        return await fetch(retry=retry)
项目:web_develop    作者:dongweiming    | 项目源码 | 文件源码
def fetch(url, retry=0):
    proxy = 'http://{}'.format(Proxy.get_random()['address'])
    headers = {'user-agent': get_user_agent()}
    conn = aiohttp.ProxyConnector(proxy=proxy)

    js_url = gen_js_url(url)

    try:
        with aiohttp.ClientSession(connector=conn) as session:
            with aiohttp.Timeout(TIMEOUT):
                async with session.get(url, headers=headers) as resp:
                    html_text = await resp.text()

                async with session.get(js_url, headers=headers) as resp:
                    js_data = await resp.json()
    except:
        retry += 1
        if retry > 5:
            raise CrawlerError()
        await asyncio.sleep(1)
        return await fetch(url, retry=retry)
    return html_text, js_data
项目:wsp    作者:wangybnet    | 项目源码 | 文件源码
def _download(self, request):
        log.debug("Http Request: %s %s" % (request.method, request.url))
        with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
                                   cookies=request.cookies) as session:
            with aiohttp.Timeout(self._timeout):
                async with session.request(request.method,
                                           request.url,
                                           headers=request.headers,
                                           data=request.body) as resp:
                    body = await resp.read()
        response = HttpResponse(resp.url,
                                resp.status,
                                headers=resp.headers,
                                body=body,
                                cookies=resp.cookies)
        return response
项目:wsp    作者:wangybnet    | 项目源码 | 文件源码
def _download(self, request):
        log.debug("Http Request: %s %s" % (request.method, request.url))
        with aiohttp.ClientSession(connector=None if (request.proxy is None) else aiohttp.ProxyConnector(proxy=request.proxy),
                                   cookies=request.cookies) as session:
            with aiohttp.Timeout(self._timeout):
                async with session.request(request.method,
                                           request.url,
                                           headers=request.headers,
                                           data=request.body) as resp:
                    body = await resp.read()
        response = HttpResponse(resp.url,
                                resp.status,
                                headers=resp.headers,
                                body=body,
                                cookies=resp.cookies)
        return response
项目:lolme    作者:Isaac-Lozano    | 项目源码 | 文件源码
def request_url_json(self, url, params, limit=True):
        # simple token bucket limiting
        current_time = time.time()
        delta_time = current_time - self.last_time
        self.last_time = current_time
        self.bucket += delta_time * (self.limit_messages / self.limit_time)
        if self.bucket > self.limit_messages:
            self.bucket = self.limit_messages
        if self.bucket < 1:
            raise RiotApiRateExceededException("Riot Api rate request exceeded. Please wait until making the next request")
        self.bucket -= 1
        with aiohttp.Timeout(self.timeout):
            response = yield from self.session.get(url, params=params)
            if response.status != 200:
                raise RiotApiHttpException(response.status)
            return (yield from response.json())
项目:lolme    作者:Isaac-Lozano    | 项目源码 | 文件源码
def request_url_json(self, url, limit=True):
        # simple token bucket limiting
        current_time = time.time()
        delta_time = current_time - self.last_time
        self.last_time = current_time
        self.bucket += delta_time * (self.limit_messages / self.limit_time)
        if self.bucket > self.limit_messages:
            self.bucket = self.limit_messages
        if self.bucket < 1:
            raise OverwatchApiRateExceededException()
        self.bucket -= 1
        with aiohttp.Timeout(self.timeout):
            response = yield from self.session.get(url)
            if response.status != 200:
                raise OverwatchApiHttpException(response.status)
            return (yield from response.json())
项目:discord_chat_bot    作者:chromaticity    | 项目源码 | 文件源码
def cmd_setavatar(self, message, url=None):
        """
        Usage:
            {command_prefix}setavatar [url]

        Changes the bot's avatar.
        Attaching a file and leaving the url parameter blank also works.
        """

        if message.attachments:
            thing = message.attachments[0]['url']
        else:
            thing = url.strip('<>')

        try:
            with aiohttp.Timeout(10):
                async with self.aiosession.get(thing) as res:
                    await self.edit_profile(avatar=await res.read())

        except Exception as e:
            raise exceptions.CommandError("Unable to change avatar: %s" % e, expire_in=20)

        return Response(":ok_hand:", delete_after=20)
项目:freebora    作者:deeplook    | 项目源码 | 文件源码
def fetch(session, url, dest='.', overwrite=False, verbose=False):
    "Fetch a single PDF file if not already existing."

    pdf_name = os.path.basename(url)
    path = os.path.join(dest, pdf_name)
    if not os.path.exists(path) or overwrite:
        # if verbose:
        #     print(url)
        with aiohttp.Timeout(60, loop=session.loop):
            async with session.get(url) as response:
                pdf = await response.read()
                # if verbose:
                #     print('%s %d' % (url, len(pdf)))
                async with aiofiles.open(path, mode='wb') as f:
                    await f.write(pdf)
                    if verbose:
                        print('saved %s (%d bytes)' % (path, len(pdf)))
项目:mlimages    作者:icoxfog417    | 项目源码 | 文件源码
def fetch_image(self, session, relative, image_url):
        fname = self.file_api.get_file_name(image_url)
        p = os.path.join(relative, fname)
        fetched = False
        try:
            with aiohttp.Timeout(self.timeout):
                async with session.get(image_url) as r:
                    if r.status == 200 and self.file_api.get_file_name(r.url) == fname:
                        c = await r.read()
                        if c:
                            with open(self.file_api.to_abs(p), "wb") as f:
                                f.write(c)
                                fetched = True
        except FileNotFoundError as ex:
            self.logger.error("{0} is not found.".format(p))
        except concurrent.futures._base.TimeoutError as tx:
            self.logger.warning("{0} is timeouted.".format(image_url))
        except Exception as ex:
            self.logger.warning("fetch image is failed. url: {0}, cause: {1}".format(image_url, str(ex)))
        return fetched
项目:python-zeep    作者:mvantellingen    | 项目源码 | 文件源码
def _load_remote_data(self, url):
        result = None

        async def _load_remote_data_async():
            nonlocal result
            with aiohttp.Timeout(self.load_timeout):
                response = await self.session.get(url)
                result = await response.read()
                try:
                    response.raise_for_status()
                except aiohttp.ClientError as exc:
                    raise TransportError(
                        message=str(exc),
                        status_code=response.status,
                        content=result
                    ).with_traceback(exc.__traceback__) from exc

        # Block until we have the data
        self.loop.run_until_complete(_load_remote_data_async())
        return result
项目:pnu    作者:erasaur    | 项目源码 | 文件源码
def get_json (self, url, timeout=None):
        if timeout is None:
            timeout = pub_config["http"]["timeout_seconds"]

        result = {}
        try:
            with aiohttp.Timeout(timeout):
                async with self._session.get(url) as resp:
                    try:
                        res = await resp.json()
                    except Exception as e:
                        logging.info("failed decoding response, got exception: {}".format(e))
                        res = {}
                    result["res"] = res
                    result["status"] = resp.status
        except TimeoutError as e:
            logging.info("HTTP timeout ({}) with error: {}".format(url, e)) 

        return result
项目:my-spider    作者:time-river    | 项目源码 | 文件源码
def _fetch_page(self, request):
        try:
            with aiohttp.Timeout(10):
                async with aiohttp.get(request['url'], params=request['params'], headers=request['headers']) as response:
                    try:
                        assert response.status == 200
                        if request['type'] == 'json':
                            content = await response.json()
                        else:
                            content = await response.text(request['type'])
                        obj = {'order':request['order'], 'content':content}
                        redis_push(self.redis, self.content_key, obj)
                    except AssertionError:
                        logging.warning('{} {}'.format(response.status, url))
        except: # kinds of error, not only asyncio.TimeoutError
            #redis_push(self.redis, self.request_key, request)
            pass
项目:my-spider    作者:time-river    | 项目源码 | 文件源码
def _verify_proxy(self, proxy):
        addr = proxy['protocol'] + '://' + proxy['ip'] +':'+proxy['port']
        conn = aiohttp.ProxyConnector(proxy=addr)
        try:
            session = aiohttp.ClientSession(connector=conn)
            with aiohttp.Timeout(10):
                async with session.get(self.test_url[random.randrange(len(self.test_url))]) as response: # close connection and response, otherwise will tip: Unclosed connection and Unclosed response
                    try:
                        assert response.status == 200
                        redis_sadd(self.redis, self.proxy_key, proxy)
                    except: 
                        pass      
        except: #ProxyConnectionError, HttpProxyError and etc?
            pass
        finally:
            session.close() # close session when timeout
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def exchange(amount, from_curr, to_curr, timeout=10):
    """Converts an amount of money from one currency to another

    Args:
        amount (float): The amount of money you want to convert
        from_curr (str): The currency you want to convert from,
            either country symbol (e.g USD) or currency smybol (e.g. £)
        to_curr (str): The currency you want to convert to, same format as from_curr
        timeout (int, optional): The time in seconds aiohttp will take to timeout the request
    Returns:
        float: the converted amount of money to 2 d.p., or the original amount of the conversion failed.
    """
    try:
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.fixer.io/latest?symbols=" + from_curr + "," + to_curr)
                data = yield from resp.json()
                if "rates" in data:
                    return int((amount / data["rates"][from_curr]) * data["rates"][to_curr] * 100)/100
    except:
        return amount
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def get_recommendations(appid, timeout=10):
    appid = str(appid)
    similar = []
    with aiohttp.ClientSession() as session:
        with aiohttp.Timeout(timeout):
            resp = yield from session.get("http://store.steampowered.com/recommended/morelike/app/" + appid)
            text = yield from resp.text()
            print(text)

            soup = BeautifulSoup(text, "html.parser")


            items = soup.find_all("div", {"class": "similar_grid_item"})
            print("found %s items" % len(items))
            for item in items:
                subsoup = item.find("div", {"class": "similar_grid_capsule"})
                if subsoup is not None:
                    similar_id = subsoup.get("data-ds-appid")
                    if similar_id is not None:
                        similar.append(similar_id)
                    else:
                        print("failed to find appid")
                else:
                    print("failed to get item")
    return similar
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def get_user(steamid, timeout=10, be_specific=False):
    """Gets some information about a specific steamid

    Args:
        steamid (str): The user's steamid
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        a UserResult object
        """
    if not is_integer(steamid):
        steamid = yield from search_for_userid(steamid, be_specific=be_specific)
    if steamid is not None:
        _check_key_set()
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?key=" + STEAM_KEY + "&steamids=" + steamid)
                data = yield from resp.json()

                if "response" in data and "players" in data["response"] and len(data["response"]["players"]) > 0:
                    player = data["response"]["players"][0]
                    return UserResult(player)
    return None
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def get_user_id(name, timeout=10):
    """Resolves a username to a steamid, however is limited to ONLY vanity URL's. search_user_id is recommended

    Args:
        name (str): The name of the user to find the steamid of
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        either None or a steamid (str) if a vanity url matching that name is found
        """
    if name in userid_cache:
        return userid_cache[name]
    else:
        _check_key_set()
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):

                resp = yield from session.get("http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/?key=" + STEAM_KEY + "&vanityurl=" + parse.quote(name))
                data = yield from resp.json()

                if "response" in data and "success" in data["response"] and data["response"]["success"] == 1:
                    id = data["response"]["steamid"]
                    if STEAM_CACHE:
                        userid_cache[name] = id
                    return id
                return None
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def steam_user_data(timeout=10):
    """Gets information about the amount of users on steam over the past 48 hours

    Args:
        timeout (int, optional): The amount of time before aiohttp raises a timeout error
    Returns:
        A tuple containing (min_users (int), max_users (int), current_users (int))"""
    with aiohttp.ClientSession() as session:
        with aiohttp.Timeout(timeout):
            resp = yield from session.get("http://store.steampowered.com/stats/userdata.json")
            data = yield from resp.json()
            data = data[0]["data"]

            min_users = -1
            max_users = -1
            for pair in data:
                if min_users == -1 or pair[1] < min_users:
                    min_users = pair[1]
                if max_users == -1 or pair[1] > max_users:
                    max_users = pair[1]
            return min_users, max_users, data[-1][1]
项目:steamsearch    作者:billy-yoyo    | 项目源码 | 文件源码
def get_user_achievements(username, gameid, timeout=10, be_specific=False):
    """Gets information about a specific user's achievements for a specific game

    Args:
        username (str): the id or name of the user you want the achievements for
        gameid (str): the id or name of the game you want the achievements for
        timeout (int): the amount of time before aiohttp raises a timeout error
    Returns:
        UserAchievement: the user achievements found"""
    if not is_integer(username):
        username = yield from search_for_userid(username, timeout=timeout, be_specific=be_specific)
    if not is_integer(gameid):
        gameid, gamename = yield from get_app(gameid, timeout=timeout)
    else:
        gamename = "???"
    _check_key_set()
    if username is not None and gameid is not None:
        with aiohttp.ClientSession() as session:
            with aiohttp.Timeout(timeout):
                resp = yield from session.get("http://api.steampowered.com/ISteamUserStats/GetPlayerAchievements/v0001/?appid=" + gameid + "&key=" + STEAM_KEY + "&steamid=" + username)
                data = yield from resp.json()
                if "playerstats" in data and "achievements" in data["playerstats"]:
                    return UserAchievements(gameid, gamename, data["playerstats"]["achievements"])
项目:beatrice-witchcogs    作者:PookaMustard    | 项目源码 | 文件源码
def isitdown(self, url):
        """Checks if a website is down or up."""
        if url == "":
            await self.bot.say("You haven't entered a website to check.")
            return
        if "http://" not in url or "https://" not in url:
            url = "http://" + url
        try:
            with aiohttp.Timeout(15):
                await self.bot.say("Testing " + url + "…")
                try:
                    response = await aiohttp.get(url, headers = { 'user_agent': headers })
                    if response.status == 200:
                        await self.bot.say(url + " is up and running.")
                    else:
                        await self.bot.say(url + " is down.")
                except:
                    await self.bot.say(url + " is down.")
        except asyncio.TimeoutError:
            await self.bot.say(url + " is down.")
项目:PRCDNS    作者:lbp0200    | 项目源码 | 文件源码
def fetch(session, url, proxy=None):
        with aiohttp.Timeout(10):
            # http://127.0.0.1:8123
            async with session.get(url, proxy=proxy) as response:
                return await response.text()
项目:fingerprint-securedrop    作者:freedomofpress    | 项目源码 | 文件源码
def fetch(self, url):
        """Load a webpage and read return the body as plaintext."""
        self.logger.info("{url}: loading...".format(**locals()))
        try:
            with aiohttp.Timeout(self.page_load_timeout, loop=self.loop):
                async with self.session.get(url,
                                            allow_redirects=True,
                                            headers=self.headers) as resp:

                    if resp.status != 200:
                        self.logger.warning("{url} was not reachable. HTTP "
                                            "error code {resp.status} was "
                                            "returned".format(**locals()))
                        raise SorterResponseCodeError

                    self.logger.info("{url}: loaded "
                                     "successfully.".format(**locals()))
                    return await resp.text()
        except asyncio.TimeoutError:
            self.logger.warning("{url}: timed out after "
                                "{self.page_load_timeout}.".format(**locals()))
            raise SorterTimeoutError
        except (aiosocks.errors.SocksError,
                aiohttp.errors.ServerDisconnectedError,
                aiohttp.errors.ClientResponseError) as exc:
            self.logger.warning("{url} was not reachable: "
                                "{exc}".format(**locals()))
            raise SorterConnectionError
        except aiohttp.errors.ClientOSError as exception_msg:
            if "SSL" in exception_msg:
                self.logger.warning("{url}: certificate error (probably due to "
                                    "use of a self-signed "
                                    "cert.".format(**locals()))
                raise SorterCertError
            else:
                raise
        except (ssl.CertificateError, aiohttp.errors.ClientOSError):
            self.logger.warning("{url}: certificate error (probably due to "
                                "use of a self-signed "
                                "cert.".format(**locals()))
            raise SorterCertError
项目:telegram-uz-bot    作者:vit-    | 项目源码 | 文件源码
def call(self, endpoint, method='POST', raw=False, *args, **kwargs):
        if 'headers' not in kwargs:
            kwargs['headers'] = await self.get_headers()

        uri = self.uri(endpoint)
        logger.debug('Fetching: %s', uri)
        logger.debug('Headers: %s', kwargs['headers'])
        logger.debug('Cookies: %s', self.session.cookies)

        with aiohttp.Timeout(self.request_timeout):
            async with self.session.request(
                    method, uri, *args, **kwargs) as response:
                body = await response.read()
                if not response.status == 200:
                    try:
                        json = await response.json()
                    except Exception:  # TODO: narrow exception
                        json = None
                    ex = BadRequest if response.status == 400 else HTTPError
                    raise ex(response.status, body, kwargs.get('data'), json)
                if raw:
                    return body
                json = await response.json()
                if json.get('error'):
                    raise ResponseError(response.status, body, kwargs.get('data'), json)
                return json
项目:WeenieBot    作者:Beefywhale    | 项目源码 | 文件源码
def fetch(session, url):
    with aiohttp.Timeout(10, loop=session.loop):
        async with session.get(url) as response:
            tmp = await response.text()
            return (tmp, response.status)
项目:peony-twitter    作者:odrling    | 项目源码 | 文件源码
def connect(self):
        """
            Create the connection

        Returns
        -------
        self

        Raises
        ------
        exception.PeonyException
            On a response status in 4xx that are not status 420 or 429
            Also on statuses in 1xx or 3xx since this should not be the status
            received here
        """
        with aiohttp.Timeout(self.timeout):
            self.response = await self._connect()

        if self.response.status in range(200, 300):
            self._error_timeout = 0
            self.state = NORMAL
        elif self.response.status == 500:
            self.state = DISCONNECTION
        elif self.response.status in range(501, 600):
            self.state = RECONNECTION
        elif self.response.status in (420, 429):
            self.state = ENHANCE_YOUR_CALM
        else:
            logger.debug("raising error during stream connection")
            raise await exceptions.throw(self.response,
                                         loads=self.client._loads,
                                         url=self.kwargs['url'])

        logger.debug("stream state: %d" % self.state)
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def json(self, url, params, timeout=None):
        with aiohttp.Timeout(timeout or self.timeout):
            async with self.session.get(url, params=params) as response:
                return await response.json()
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def get_text(self, url, params, timeout=None):
        with aiohttp.Timeout(timeout or self.timeout):
            response = await self.session.get(url, params=params)
            return response.status, await response.text()
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def get_bin(self, url, params, timeout=None):
        with aiohttp.Timeout(timeout or self.timeout):
            response = await self.session.get(url, params=params)
            return await response.read()
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def post_text(self, url, data, timeout=None):
        with aiohttp.Timeout(timeout or self.timeout):
            response = await self.session.post(url, data=data)
            return response.url, await response.text()
项目:toshi-reputation-service    作者:toshiapp    | 项目源码 | 文件源码
def do_push(push_url, body, address, signing_key, reviewee_id):

    path = '/' + push_url.split('/', 3)[-1]

    method = 'POST'
    backoff = 5
    retries = 10

    terminate = False
    async with aiohttp.ClientSession() as session:
        while not terminate:
            timestamp = int(time.time())
            signature = sign_request(signing_key, method, path, timestamp, body)

            with aiohttp.Timeout(10):
                async with session.post(push_url,
                                        headers={
                                            'content-type': 'application/json',
                                            TOSHI_SIGNATURE_HEADER: signature,
                                            TOSHI_ID_ADDRESS_HEADER: address,
                                            TOSHI_TIMESTAMP_HEADER: str(timestamp)},
                                        data=body) as response:
                    if response.status == 204 or response.status == 200:
                        terminate = True
                    else:
                        log.error("Error updating user details")
                        log.error("URL: {}".format(push_url))
                        log.error("User Address: {}".format(reviewee_id))
                    retries -= 1
                    if retries <= 0:
                        terminate = True
            await asyncio.sleep(backoff)
            backoff = min(backoff + 5, 30)
项目:rauc-hawkbit    作者:rauc    | 项目源码 | 文件源码
def get_resource(self, api_path, query_params={}, **kwargs):
        """
        Helper method for HTTP GET API requests.

        Args:
            api_path(str): REST API path
        Keyword Args:
            query_params: Query parameters to add to the API URL
            kwargs: Other keyword args used for replacing items in the API path

        Returns:
            Response JSON data
        """
        get_headers = {
            'Accept': 'application/json',
            **self.headers
        }
        url = self.build_api_url(
                api_path.format(
                    tenant=self.tenant,
                    controllerId=self.controller_id,
                    **kwargs))

        self.logger.debug('GET {}'.format(url))
        with aiohttp.Timeout(self.timeout):
            async with self.session.get(url, headers=get_headers,
                                        params=query_params) as resp:
                await self.check_http_status(resp)
                json = await resp.json()
                self.logger.debug(json)
                return json
项目:rauc-hawkbit    作者:rauc    | 项目源码 | 文件源码
def get_binary(self, url, dl_location,
                         mime='application/octet-stream', chunk_size=512,
                         timeout=3600):
        """
        Actual download method with checksum checking.

        Args:
            url(str): URL of item to download
            dl_location(str): storage path for downloaded artifact
        Keyword Args:
            mime: mimetype of content to retrieve
                  (default: 'application/octet-stream')
            chunk_size: size of chunk to retrieve
            timeout: download timeout
                     (default: 3600)

        Returns:
            MD5 hash of downloaded content
        """
        get_bin_headers = {
            'Accept': mime,
            **self.headers
        }
        hash_md5 = hashlib.md5()

        self.logger.debug('GET binary {}'.format(url))
        with aiohttp.Timeout(timeout, loop=self.session.loop):
            async with self.session.get(url, headers=get_bin_headers) as resp:
                await self.check_http_status(resp)
                with open(dl_location, 'wb') as fd:
                    while True:
                        with aiohttp.Timeout(60):
                            chunk = await resp.content.read(chunk_size)
                            if not chunk:
                                break
                            fd.write(chunk)
                            hash_md5.update(chunk)
        return hash_md5.hexdigest()
项目:apm-agent-python    作者:elastic    | 项目源码 | 文件源码
def send(self, data, headers, timeout=None):
        """Use synchronous interface, because this is a coroutine."""

        try:
            with aiohttp.Timeout(timeout):
                async with self.client.post(self._url,
                                            data=data,
                                            headers=headers) as response:
                    assert response.status == 202
        except asyncio.TimeoutError as e:
            print_trace = True
            message = ("Connection to APM Server timed out "
                       "(url: %s, timeout: %s seconds)" % (self._url, timeout))
            raise TransportException(message, data,
                                     print_trace=print_trace) from e
        except AssertionError as e:
            print_trace = True
            body = await response.read()
            if response.status == 429:
                message = 'Temporarily rate limited: '
                print_trace = False
            else:
                message = 'Unable to reach APM Server: '
            message += '%s (url: %s, body: %s)' % (e, self._url, body)
            raise TransportException(message, data,
                                     print_trace=print_trace) from e
        except Exception as e:
            print_trace = True
            message = 'Unable to reach APM Server: %s (url: %s)' % (
                e, self._url)
            raise TransportException(message, data,
                                     print_trace=print_trace) from e
        else:
            return response.headers.get('Location')
项目:votebot    作者:greut    | 项目源码 | 文件源码
def call(method, file=None, **kwargs):
    r"""
    Perform an API call to Slack.

    :param file: File pointer
    :type file: file
    :param \**kwargs: see below

    :Keyword Arguments:
        All the arguments required by the method from the `Slack Web API`_.

    :returns: JSON response.
    :rtype: dict
    """
    # JSON encode any sub-structure...
    for k, w in kwargs.items():
        # keep str as is.
        if not isinstance(w, (bytes, str)):
            kwargs[k] = json.dumps(w)

    form = FormData(kwargs)

    # Handle file upload
    if file:
        form.add_field('file', file)

    logging.debug('POST (m=%s) /api/%s %s', form.is_multipart, method, kwargs)

    with ClientSession() as session:
        with Timeout(10):
            response = yield from session.post('https://{0}/api/{1}'
                                               .format(SLACK_DOMAIN, method),
                                               data=form)
            assert 200 == response.status, response
            try:
                body = yield from response.json()
                logging.debug('Response /api/%s %d %s',
                              method, response.status, body)
                return body
            finally:
                yield from response.release()
项目:python3wos_asyncio    作者:cclauss    | 项目源码 | 文件源码
def fetch(session, url):
    with aiohttp.Timeout(60 * 4):
        async with session.get(url) as response:
            return await response.text()
项目:python3wos_asyncio    作者:cclauss    | 项目源码 | 文件源码
def fetch_json(session, url):
    with aiohttp.Timeout(10):
        async with session.get(url) as response:
            assert response.status == 200
            return await response.json()
项目:python3wos_asyncio    作者:cclauss    | 项目源码 | 文件源码
def fetch_json(session, url):
    with aiohttp.Timeout(10):
        async with session.get(url) as response:
            assert response.status == 200
            return await response.json()
项目:discordbot.py    作者:rauenzi    | 项目源码 | 文件源码
def fetchURL(url, loop):
    async with aiohttp.ClientSession(loop=loop) as session:
        with aiohttp.Timeout(10, loop=session.loop):
            async with session.get(url) as response:
                return await response.text()
项目:discordbot.py    作者:rauenzi    | 项目源码 | 文件源码
def downloadImage(url, folder, name, loop, chunkSize=20):
    result = {'canAccessURL': False, 'isImage': False, 'fileSaved': False}
    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
        'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
        'Accept-Encoding': 'none',
        'Accept-Language': 'en-US,en;q=0.8',
        'Connection': 'keep-alive'}
    async with aiohttp.ClientSession(loop=loop) as session:
        with aiohttp.Timeout(10, loop=session.loop):
            async with session.get(url, headers=headers) as response:
                content_type = response.headers['content-type']
                if response.status == 200:
                    result['canAccessURL'] = True
                if "image" in content_type:
                    result['isImage'] = True
                if not result['canAccessURL'] or not result['isImage']:
                    return result
                extension = mimetypes.guess_extension(content_type)
                if extension == '.jpe':
                    extension = '.jpg'

                with open(folder + "/" + name + extension, 'wb') as fd:
                    while True:
                        chunk = await response.content.read(chunkSize)
                        if not chunk:
                            break
                        fd.write(chunk)
                result['fileSaved'] = True
                return result
项目:discordbot.py    作者:rauenzi    | 项目源码 | 文件源码
def fetchURL(url, loop):
        async with aiohttp.ClientSession(loop=loop) as session:
            with aiohttp.Timeout(10, loop=session.loop):
                async with session.get(url) as response:
                    return await response.text()
项目:discordbot.py    作者:rauenzi    | 项目源码 | 文件源码
def downloadImage(url, folder, name, loop, chunkSize=20):
        result = {'canAccessURL': False, 'isImage': False, 'fileSaved': False}
        headers = {
            'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
            'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
            'Accept-Encoding': 'none',
            'Accept-Language': 'en-US,en;q=0.8',
            'Connection': 'keep-alive'}
        async with aiohttp.ClientSession(loop=loop) as session:
            with aiohttp.Timeout(10, loop=session.loop):
                async with session.get(url, headers=headers) as response:
                    content_type = response.headers['content-type']
                    if response.status == 200:
                        result['canAccessURL'] = True
                    if "image" in content_type:
                        result['isImage'] = True
                    if not result['canAccessURL'] or not result['isImage']:
                        return result
                    extension = mimetypes.guess_extension(content_type)
                    if extension == '.jpe':
                        extension = '.jpg'

                    with open(folder + "/" + name + extension, 'wb') as fd:
                        while True:
                            chunk = await response.content.read(chunkSize)
                            if not chunk:
                                break
                            fd.write(chunk)
                    result['fileSaved'] = True
                    return result
项目:web_develop    作者:dongweiming    | 项目源码 | 文件源码
def fetch(url, proxy=None):
    conn = aiohttp.ProxyConnector(proxy=proxy)
    headers = {'user-agent': get_user_agent()}
    with aiohttp.ClientSession(connector=conn) as session:
        with aiohttp.Timeout(TIMEOUT):
            async with session.get('http://python.org', headers) as resp:
                return resp.json()
项目:omnic    作者:michaelpb    | 项目源码 | 文件源码
def _download_async(self, url, f_handle):
        DOWNLOAD_TIMEOUT = 10
        DOWNLOAD_CHUNK_SIZE = 1024
        with aiohttp.Timeout(DOWNLOAD_TIMEOUT):
            async with self.aiohttp.get(url) as response:
                while True:
                    chunk = await response.content.read(DOWNLOAD_CHUNK_SIZE)
                    if not chunk:
                        break
                    f_handle.write(chunk)
                return await response.release()
项目:Squid-Plugins    作者:tekulvw    | 项目源码 | 文件源码
def _get_feed(self, url):
        text = None
        try:
            with aiohttp.ClientSession() as session:
                with aiohttp.Timeout(3):
                    async with session.get(url) as r:
                        text = await r.text()
        except:
            pass
        return text