Python async_timeout 模块,timeout() 实例源码

我们从Python开源项目中,提取了以下49个代码示例,用于说明如何使用async_timeout.timeout()

项目:ha-ffmpeg    作者:pvizeli    | 项目源码 | 文件源码
def close(self, timeout=5):
        """Stop a ffmpeg instance."""
        if not self.is_running:
            _LOGGER.warning("FFmpeg isn't running!")
            return

        try:
            # send stop to ffmpeg
            with async_timeout.timeout(timeout, loop=self._loop):
                yield from self._proc.communicate(input=b'q')
            _LOGGER.debug("Close FFmpeg process")

        except (asyncio.TimeoutError, ValueError):
            _LOGGER.warning("Timeout while waiting of FFmpeg")
            self._proc.kill()

        finally:
            self._clear()
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout(loop):
    canceled_raised = False

    @asyncio.coroutine
    def long_running_task():
        try:
            yield from asyncio.sleep(10, loop=loop)
        except asyncio.CancelledError:
            nonlocal canceled_raised
            canceled_raised = True
            raise

    with pytest.raises(asyncio.TimeoutError):
        with timeout(0.01, loop=loop) as t:
            yield from long_running_task()
            assert t._loop is loop
    assert canceled_raised, 'CancelledError was not raised'
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_time(loop):
    foo_running = None

    start = loop.time()
    with pytest.raises(asyncio.TimeoutError):
        with timeout(0.1, loop=loop):
            foo_running = True
            try:
                yield from asyncio.sleep(0.2, loop=loop)
            finally:
                foo_running = False

    dt = loop.time() - start
    if not (0.09 < dt < 0.11) and os.environ.get('APPVEYOR'):
        pytest.xfail('appveyor sometimes is toooo sloooow')
    assert 0.09 < dt < 0.11
    assert not foo_running
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_remaining(loop):
    with timeout(None, loop=loop) as cm:
        assert cm.remaining is None

    t = timeout(1.0, loop=loop)
    assert t.remaining is None

    with timeout(1.0, loop=loop) as cm:
        yield from asyncio.sleep(0.1, loop=loop)
        assert cm.remaining < 1.0

    with pytest.raises(asyncio.TimeoutError):
        with timeout(0.1, loop=loop) as cm:
            yield from asyncio.sleep(0.5, loop=loop)

    assert cm.remaining == 0.0
项目:goldmine    作者:Armored-Dragon    | 项目源码 | 文件源码
def ocr(self, ctx):
        """OCR an image.
        Usage: ocr [attach an image]"""
        or_check_perms(ctx, ('bot_owner',))
        warnings.simplefilter('error', Image.DecompressionBombWarning)
        if ctx.message.attachments:
            with async_timeout.timeout(5):
                async with self.bot.cog_http.get(ctx.message.attachments[0].proxy_url) as r:
                    raw_image = await r.read()
        else:
            await ctx.send(':warning: No attachment found.')
            return
        img_bytes = BytesIO(raw_image)
        image = Image.open(img_bytes)
        text = tesserocr.image_to_text(image)
        if text:
            await ctx.send(text)
        else:
            await ctx.send('No results.')
项目:goldmine    作者:Armored-Dragon    | 项目源码 | 文件源码
def update_dbots(self):
        if not discord_bots_token:
            self.logger.warning('Tried to contact Discord Bots, but no token set!')
            return False
        data = dict(guild_count=len(self.bot.guilds))
        dest = 'https://bots.discord.pw/api/bots/' + str(self.bot.user.id) + '/stats'
        headers = {
            'Authorization': discord_bots_token,
            'Content-Type': 'application/json'
        }
        with async_timeout.timeout(6):
            async with self.bot.cog_http.post(dest, data=json.dumps(data), headers=headers) as r:
                resp_key = f'(got {r.status} {r.reason})'
                if r.status == 200:
                    self.logger.info('Successfully sent Discord Bots our guild count (got 200 OK)')
                else:
                    self.logger.warning('Failed sending our guild count to Discord Bots! ' + resp_key)
项目:goldmine    作者:Armored-Dragon    | 项目源码 | 文件源码
def update_discordlist(self):
        if not discordlist_token:
            self.logger.warning('Tried to contact DiscordList, but no token set!')
            return False
        data = {
            'token': discordlist_token,
            'guilds': len(self.bot.guilds)
        }
        dest = 'https://bots.discordlist.net/api'
        headers = {'Content-Type': 'application/json'}
        with async_timeout.timeout(6):
            async with self.bot.cog_http.post(dest, data=json.dumps(data), headers=headers) as r:
                resp_key = f'(got {r.status} {r.reason})'
                if r.status == 200:
                    self.logger.info('Successfully sent DiscordList our guild count! (got 200 OK)')
                else:
                    self.logger.warning('Failed sending our guild count to DiscordList! ' + resp_key)
项目:goldmine    作者:Armored-Dragon    | 项目源码 | 文件源码
def add_emote(self, ctx, emote: str):
        """Add a Twitch, FrankerFaceZ, BetterTTV, or Discord emote to the current guild.
        Usage: add_emote [name of emote]"""
        echeck_perms(ctx, ('bot_owner',))
        emote = emote.replace(':', '')
        with async_timeout.timeout(12):
            try:
                async with self.bot.cog_http.get('https://static-cdn.jtvnw.net/emoticons/v1/' + str(self.bot.emotes['twitch'][emote]['image_id']) + '/1.0') as resp:
                    emote_img = await resp.read()
            except KeyError: # let's try frankerfacez
                try:
                    async with self.bot.cog_http.get('https://cdn.frankerfacez.com/emoticon/' + str(self.bot.emotes['ffz'][emote]) + '/1') as resp:
                        emote_img = await resp.read()
                except KeyError: # let's try BetterTTV
                    try:
                        async with self.bot.cog_http.get(self.bot.emotes['bttv'][emote]) as resp:
                            emote_img = await resp.read()
                    except KeyError: # let's try Discord
                        await ctx.send('**No such emote!** I can fetch from Twitch, FrankerFaceZ, BetterTTV, or Discord (soon).')
                        return False
        result = ctx.guild.create_custom_emoji(emote, emote_img)
        await ctx.send('Added. ' + str(result))
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def fetch(client, url, novels_name):
    with async_timeout.timeout(20):
        try:
            headers = {
                'user-agent': get_random_user_agent(),
                'referer': "https://www.bing.com/"
            }
            params = {'q': novels_name, 'ensearch': 0}
            async with client.get(url, params=params, headers=headers) as response:
                assert response.status == 200
                LOGGER.info('Task url: {}'.format(response.url))
                try:
                    text = await response.text()
                except:
                    text = await response.read()
                return text
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def fetch(client, url, novels_name):
    with async_timeout.timeout(20):
        try:
            headers = {
                'user-agent': get_random_user_agent(),
                'referer': "https://duckduckgo.com/"
            }
            params = {'q': novels_name}
            async with client.get(url, params=params, headers=headers) as response:
                assert response.status == 200
                LOGGER.info('Task url: {}'.format(response.url))
                try:
                    text = await response.text()
                except:
                    text = await response.read()
                return text
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def fetch(client, url, name, is_web):
    with async_timeout.timeout(15):
        try:
            headers = {'user-agent': get_random_user_agent()}
            if is_web:
                params = {'wd': name, 'ie': 'utf-8', 'rn': CONFIG.BAIDU_RN, 'vf_bl': 1}
            else:
                params = {'word': name}
            async with client.get(url, params=params, headers=headers) as response:
                assert response.status == 200
                LOGGER.info('Task url: {}'.format(response.url))
                try:
                    text = await response.text()
                except:
                    text = await response.read()
                return text
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def get_real_url(client, url):
    with async_timeout.timeout(10):
        try:
            headers = {'user-agent': get_random_user_agent()}
            async with client.head(url, headers=headers, allow_redirects=True) as response:
                assert response.status == 200
                LOGGER.info('Parse url: {}'.format(response.url))
                # text = ""
                # try:
                #     text = await response.text()
                # except:
                #     text = await response.read()
                # if text:
                #     print(text)
                #     text = re.findall(r'replace\(\"(.*?)\"\)', str(text))
                #     text = text[0] if text[0] else ""
                url = response.url if response.url else None
                return url
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def data_extraction_for_phone(html):
    with async_timeout.timeout(10):
        try:
            # Get title
            data_log = eval(html['data-log'])
            url = data_log.get('mu', None)
            if not url:
                return None
            # Get title
            title = html.find('h3').get_text()
            # Get author and update_time (option)
            novel_mess = html.findAll(class_='c-gap-right-large')
            basic_mess = [i.get_text() for i in novel_mess] if novel_mess else None
            return {'title': title, 'url': url, 'basic_mess': basic_mess}
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:owllook    作者:howie6879    | 项目源码 | 文件源码
def target_fetch(client, url):
    """
    :param client: aiohttp client
    :param url: target url
    :return: text
    """
    with async_timeout.timeout(30):
        try:
            headers = {'user-agent': get_random_user_agent()}
            async with client.get(url, headers=headers) as response:
                assert response.status == 200
                LOGGER.info('Task url: {}'.format(response.url))
                try:
                    text = await response.text()
                except:
                    try:
                        text = await response.read()
                    except aiohttp.ServerDisconnectedError as e:
                        LOGGER.exception(e)
                        text = None
                return text
        except Exception as e:
            LOGGER.exception(e)
            return None
项目:freehp    作者:jadbin    | 项目源码 | 文件源码
def _update_proxy_list(self):
        try:
            self.params['detail'] = ''
            async with aiohttp.ClientSession(loop=self.loop) as session:
                with async_timeout.timeout(self.timeout, loop=self.loop):
                    async with session.request('GET', self.agent_addr,
                                               auth=self.auth,
                                               params=self.params) as resp:
                        body = await resp.read()
                        proxies = json.loads(body.decode('utf-8'))
                        if len(proxies) > 0:
                            res = []
                            for p in proxies:
                                if self.min_success_rate > 0:
                                    if p['success'] >= self.min_success_rate * (p['success'] + p['fail']):
                                        res.append(p['addr'])
                                    elif self.min_count > 0 and len(res) < self.min_count:
                                        res.append(p['addr'])
                                    else:
                                        break
                                else:
                                    res.append(p['addr'])
                            self.proxies = res
        except Exception:
            log.warning("Error occurred when get proxy list", exc_info=True)
项目:freehp    作者:jadbin    | 项目源码 | 文件源码
def _update_proxy_list(self):
        try:
            self.params['detail'] = ''
            async with aiohttp.ClientSession(loop=self.loop) as session:
                with async_timeout.timeout(self.timeout, loop=self.loop):
                    async with session.request('GET',
                                               self.agent_addr,
                                               auth=self.auth,
                                               params=self.params) as resp:
                        body = await resp.read()
                        proxies = json.loads(body.decode('utf-8'))

                        self._remove_block()
                        t = time.time()
                        for p in proxies:
                            r = 0.8 * (p['success'] / (p['success'] + p['fail'] + 1.0))
                            proxy = PoolProxyInfo(p['addr'], t, base_rate=r)
                            self._add_new_proxy(proxy)
        except Exception:
            log.warning("Error occurred when get proxy list", exc_info=True)
项目:freehp    作者:jadbin    | 项目源码 | 文件源码
def check_proxy(self, addr):
        if not addr.startswith("http://"):
            proxy = "http://{0}".format(addr)
        else:
            proxy = addr
        try:
            async with aiohttp.ClientSession(loop=self._loop) as session:
                with async_timeout.timeout(self._timeout, loop=self._loop):
                    seed = str(random.randint(0, 99999999))
                    url = "http://httpbin.org/get?seed={}".format(seed)
                    async with session.request("GET", url, proxy=proxy) as resp:
                        body = await resp.read()
                        data = json.loads(body.decode('utf-8'))
                        if "args" not in data:
                            return False
                        args = data["args"]
                        if "seed" not in args or args["seed"] != seed:
                            return False
        except Exception:
            return False
        log.debug("Proxy {} is OK".format(addr))
        return True
项目:freehp    作者:jadbin    | 项目源码 | 文件源码
def check_proxy(self, addr):
        if not addr.startswith("http://"):
            proxy = "http://{0}".format(addr)
        else:
            proxy = addr
        try:
            async with aiohttp.ClientSession(loop=self._loop) as session:
                with async_timeout.timeout(self._timeout, loop=self._loop):
                    async with session.request("GET", self._url, proxy=proxy) as resp:
                        url = str(resp.url)
                        if not self.match_status(self._http_status, resp.status):
                            return False
                        if self._url_match and not self._url_match.search(url):
                            return False
                        body = await resp.read()
                        if self._body_match and not self._body_match.search(body):
                            return False
        except Exception:
            return False
        return True
项目:freehp    作者:jadbin    | 项目源码 | 文件源码
def _update_proxy(self, urls):
        for u in urls:
            retry_cnt = 3
            while retry_cnt > 0:
                retry_cnt -= 1
                try:
                    async with aiohttp.ClientSession(loop=self._loop) as session:
                        with async_timeout.timeout(self._timeout, loop=self._loop):
                            async with session.request("GET", u, headers=self._headers) as resp:
                                url = str(resp.url)
                                body = await resp.read()
                except Exception as e:
                    log.info("{} error occurred when update proxy on url={}: {}".format(type(e), u, e))
                else:
                    retry_cnt = 0
                    addr_list = self._proxy_finder.find_proxy(url, body)
                    log.debug("Find {} proxies on the page '{}'".format(len(addr_list), u))
                    if addr_list:
                        await self._callback(*addr_list)
            await asyncio.sleep(self._sleep_time, loop=self._loop)
项目:aioping    作者:stellarbit    | 项目源码 | 文件源码
def verbose_ping(dest_addr, timeout=2, count=3):
    """
    Send >count< ping to >dest_addr< with the given >timeout< and display
    the result.
    :param dest_addr:
    :param timeout:
    :param count:
    """
    for i in range(count):
        try:
            delay = await ping(dest_addr, timeout)
        except Exception as e:
            print("%s failed: %s" % (dest_addr, str(e)))
            break

        if delay is None:
            print('%s timed out after %ss' % (dest_addr, timeout))
        else:
            delay *= 1000
            print("%s get ping in %0.4fms" % (dest_addr, delay))

    print()
项目:aiothrift    作者:ryanwang520    | 项目源码 | 文件源码
def create_connection(service, address=('127.0.0.1', 6000), *,
                      protocol_cls=TBinaryProtocol, timeout=None, loop=None, **kw):
    """Create a thrift connection.
    This function is a :ref:`coroutine <coroutine>`.

    Open a connection to the thrift server by address argument.

    :param service: a thrift service object
    :param address: a (host, port) tuple
    :param protocol_cls: protocol type, default is :class:`TBinaryProtocol`
    :param timeout: if specified, would raise `asyncio.TimeoutError` if one rpc call is longer than `timeout`
    :param loop: :class:`Eventloop <asyncio.AbstractEventLoop>` instance, if not specified, default loop is used.
    :param kw: params relaied to asyncio.open_connection()
    :return: newly created :class:`ThriftConnection` instance.
    """
    host, port = address
    reader, writer = yield from asyncio.open_connection(
        host, port, loop=loop, **kw)
    iprotocol = protocol_cls(reader)
    oprotocol = protocol_cls(writer)

    return ThriftConnection(service, iprot=iprotocol, oprot=oprotocol,
                            address=address, loop=loop, timeout=timeout)
项目:aiothrift    作者:ryanwang520    | 项目源码 | 文件源码
def __call__(self, reader, writer):
        iproto = self.protocol_cls(reader)
        oproto = self.protocol_cls(writer)
        while not reader.at_eof():
            try:
                with async_timeout.timeout(self.timeout):
                    yield from self.processor.process(iproto, oproto)
            except ConnectionError:
                logger.debug('client has closed the connection')
                writer.close()
            except asyncio.TimeoutError:
                logger.debug('timeout when processing the client request')
                writer.close()
            except asyncio.IncompleteReadError:
                logger.debug('client has closed the connection')
                writer.close()
            except Exception:
                # app exception
                logger.exception('unhandled app exception')
                writer.close()
        writer.close()
项目:retr    作者:aikipooh    | 项目源码 | 文件源码
def __init__(self, pp, headers={}, proxy_headers={},
                 max_retries=0, timeout=60, ca_certs=None,
                 discard_timeout=False):
        '''pp - proxypool to use
headers - the headers to use in the underlying requests.Session
max_retries and timeout - the same as in ClientSession.

discard_timeout set to True will discard timed out proxies. Should have some sort of refresh, or we'll run out of proxies
'''
        self.pp = pp # Proxypool
        self.p = None
        self.headers=headers # To use when downloading
        self.proxy_headers=proxy_headers # To use when downloading
        self.max_retries=max_retries
        self.timeout=timeout
        self.discard_timeout=discard_timeout
        self.ca_certs=ca_certs
        self.s=None
项目:hass-config    作者:postlund    | 项目源码 | 文件源码
def get_url(hass, url):
    websession = async_get_clientsession(hass)
    request = None
    try:
        with async_timeout.timeout(10, loop=hass.loop):
            request = yield from websession.get(url)

            if request.status != 200:
                _LOGGER.error("Error %d on load url %s",
                              request.status, request.url)
                return None

            return (yield from request.read())

    except (asyncio.TimeoutError, aiohttp.errors.ClientError):
        _LOGGER.error('Timeout downloading url.')

    finally:
        if request is not None:
            yield from request.release()

    return None
项目:Charlie    作者:nxintech    | 项目源码 | 文件源码
def __init__(self, loop=None,
                 host='127.0.0.1', port=80, request_timeout=10,
                 polling_timeout=30, polling_interval=5):
        """
        :param loop: event loop
        :param host: API Server host
        :param port: API Server port
        :param request_timeout: HTTP request timeout
        :param polling_timeout: Async API polling timeout
        :param polling_interval: Async API polling interval
        """
        super().__init__()
        self.loop = loop
        self._host = host
        self._port = port
        self._request_timeout = request_timeout
        self._polling_timeout = polling_timeout
        self._polling_interval = polling_interval

        self.session = None
        self._conn = aiohttp.TCPConnector(
            verify_ssl=False, limit=50, use_dns_cache=True)
项目:Charlie    作者:nxintech    | 项目源码 | 文件源码
def poll(self, action, body):
        if not action.NEED_POLL:
            return raise_error(202, body)

        location = body['location']

        with async_timeout.timeout(self._polling_timeout):
            count = 0
            while True:
                status, body = await self._do_request('GET', location)
                if status in [200, 503]:
                    return {"value": json.loads(body)}

                count += 1
                await asyncio.sleep(self._polling_interval)

        # polling timeout
        return raise_error(500, "Location {} polling timeout, count: {}".format(location, count))
项目:pydest    作者:jgayfer    | 项目源码 | 文件源码
def _download_file(self, url, name):
        """Async file download

        Args:
            url (str):
                The URL from which to download the file
            name (str):
                The name to give to the downloaded file
        """
        with async_timeout.timeout(10):
            async with self.api.session.get(url) as response:
                filename = os.path.basename(name)
                with open(filename, 'wb') as f_handle:
                    while True:
                        chunk = await response.content.read(1024)
                        if not chunk:
                            break
                        f_handle.write(chunk)
                return await response.release()
项目:jussi    作者:steemit    | 项目源码 | 文件源码
def fetch_http(sanic_http_request: HTTPRequest,
                     jsonrpc_request: SingleJsonRpcRequest,
                     url: str,
                     batch_index: int) -> SingleJsonRpcResponse:

    session = sanic_http_request.app.config.aiohttp['session']
    args = sanic_http_request.app.config.args
    headers = {}
    headers['x-amzn-trace_id'] = sanic_http_request.headers.get('x-amzn-trace-id')
    headers['x-jussi-request-id'] = sanic_http_request.headers.get('x-jussi-request-id')

    upstream_request = {k: jsonrpc_request[k] for k in
                        {'jsonrpc', 'method', 'params'} if k in jsonrpc_request}
    upstream_request['id'] = sanic_http_request['request_id_int'] + batch_index

    with async_timeout.timeout(args.upstream_http_timeout):
        async with session.post(url, json=upstream_request, headers=headers) as resp:
            upstream_response = await resp.json()

        del upstream_response['id']
        if 'id' in jsonrpc_request:
            upstream_response['id'] = jsonrpc_request['id']
        return upstream_response
# pylint: enable=no-value-for-parameter
项目:arq    作者:samuelcolvin    | 项目源码 | 文件源码
def finish(self, timeout=None):
        """
        Cancel all pending tasks and optionally re-enqueue jobs which haven't finished after the timeout.

        :param timeout: how long to wait for tasks to finish, defaults to ``shutdown_delay``
        """
        timeout = timeout or self.shutdown_delay
        self.running = False
        cancelled_tasks = 0
        if self.pending_tasks:
            with await self._finish_lock:
                work_logger.info('drain waiting %0.1fs for %d tasks to finish', timeout, len(self.pending_tasks))
                _, pending = await asyncio.wait(self.pending_tasks, timeout=timeout, loop=self.loop)
                if pending:
                    pipe = self.redis.pipeline()
                    for task in pending:
                        if task.re_enqueue:
                            pipe.rpush(task.job.raw_queue, task.job.raw_data)
                        task.cancel()
                        cancelled_tasks += 1
                    if pipe._results:
                        await pipe.execute()
                self.pending_tasks = set()
        return cancelled_tasks
项目:homeassistant    作者:NAStools    | 项目源码 | 文件源码
def _send(self, url):
        """Send the url to the Hook API."""
        response = None
        try:
            _LOGGER.debug("Sending: %s", url)
            websession = async_get_clientsession(self.hass)
            with async_timeout.timeout(TIMEOUT, loop=self.hass.loop):
                response = yield from websession.get(
                    url, params={"token": self._token})
            data = yield from response.json()

        except (asyncio.TimeoutError,
                aiohttp.errors.ClientError,
                aiohttp.errors.ClientDisconnectedError) as error:
            _LOGGER.error("Failed setting state: %s", error)
            return False

        finally:
            if response is not None:
                yield from response.release()

        _LOGGER.debug("Got: %s", data)
        return data['return_value'] == '1'
项目:homeassistant    作者:NAStools    | 项目源码 | 文件源码
def async_camera_image(self):
        """Return a still image response from the camera."""
        image_url = SYNO_API_URL.format(
            self._synology_url, WEBAPI_PATH, self._camera_path)

        image_payload = {
            'api': CAMERA_API,
            'method': 'GetSnapshot',
            'version': '1',
            'cameraId': self._camera_id
        }
        try:
            with async_timeout.timeout(TIMEOUT, loop=self.hass.loop):
                response = yield from self._websession.get(
                    image_url,
                    params=image_payload
                )
        except (asyncio.TimeoutError, aiohttp.errors.ClientError):
            _LOGGER.exception("Error on %s", image_url)
            return None

        image = yield from response.read()
        yield from response.release()

        return image
项目:backend.ai-client-py    作者:lablup    | 项目源码 | 文件源码
def asend(self, *, sess=None, timeout=10.0):
        '''
        Sends the request to the server.

        This method is a coroutine.
        '''
        assert self.method in self._allowed_methods
        if sess is None:
            sess = aiohttp.ClientSession()
        else:
            assert isinstance(sess, aiohttp.ClientSession)
        with sess:
            if self.content_type == 'multipart/form-data':
                with aiohttp.MultipartWriter('mixed') as mpwriter:
                    for file in self._content:
                        part = mpwriter.append(file.file)
                        part.set_content_disposition('attachment',
                                                     filename=file.filename)
                data = mpwriter
            else:
                data = self._content
            self._sign()
            reqfunc = getattr(sess, self.method.lower())
            try:
                with _timeout(timeout):
                    resp = await reqfunc(self.build_url(),
                                         data=data,
                                         headers=self.headers)
                    async with resp:
                        body = await resp.read()
                        return Response(resp.status, resp.reason, body,
                                        resp.content_type,
                                        len(body))
            except Exception as e:
                msg = 'Request to the API endpoint has failed.\n' \
                      'Check your network connection and/or the server status.'
                raise BackendClientError(msg) from e
项目:Software-Architecture-with-Python    作者:PacktPublishing    | 项目源码 | 文件源码
def fetch_page(session, url, timeout=60):
    """ Asynchronous URL fetcher """

    with async_timeout.timeout(timeout):
        response = session.get(url)
        return response
项目:Software-Architecture-with-Python    作者:PacktPublishing    | 项目源码 | 文件源码
def fetch_page(session, url, timeout=60):
    """ Asynchronous URL fetcher """

    with async_timeout.timeout(timeout):
        response = session.get(url)
        return response
项目:home-assistant-dlna-dmr    作者:StevenLooman    | 项目源码 | 文件源码
def async_http_request(self, method, url, headers=None, body=None):
        websession = async_get_clientsession(self.hass)
        try:
            with async_timeout.timeout(5, loop=self.hass.loop):
                response = yield from websession.request(method, url, headers=headers, data=body)
                response_body = yield from response.text()
        except (asyncio.TimeoutError, aiohttp.ClientError) as ex:
            _LOGGER.debug("Error in %s.async_call_action(): %s", self, ex)
            raise

        return response.status, response.headers, response_body
项目:home-assistant-dlna-dmr    作者:StevenLooman    | 项目源码 | 文件源码
def async_notify_listeners(self, **kwargs):
        property = str(self.__class__.__name__)
        value = str(self.value)

        LOGGER.debug('async_notify_listeners(): %s -> %s', property, value)

        event_base = '<Event xmlns="urn:schemas-upnp-org:metadata-1-0/RCS/">' \
                     '<InstanceID val="0" />' \
                     '</Event>'
        el_event = ET.fromstring(event_base)
        el_instance_id = el_event.find('.//rcs:InstanceID', NS)
        args = kwargs.copy()
        args.update({'val': value})
        ET.SubElement(el_instance_id, 'rcs:' + property, **args)

        notify_base = '<?xml version="1.0" encoding="utf-8"?>' \
                      '<e:propertyset xmlns:e="urn:schemas-upnp-org:event-1-0">' \
                      '<e:property>' \
                      '<LastChange />' \
                      '</e:property>' \
                      '</e:propertyset>'
        el_notify = ET.fromstring(notify_base)
        el_last_change = el_notify.find('.//LastChange', NS)
        el_last_change.text = ET.tostring(el_event).decode('utf-8')

        global SUBSCRIBED_CLIENTS
        service_name = self.SERVICE_NAME
        for sid, url in SUBSCRIBED_CLIENTS[service_name].items():
            headers = {
                'SID': sid
            }
            with ClientSession(loop=asyncio.get_event_loop()) as session:
                with async_timeout.timeout(10):
                    data = ET.tostring(el_notify)
                    LOGGER.debug('Calling: %s', url)
                    yield from session.request('NOTIFY', url, headers=headers, data=data)
项目:ha-ffmpeg    作者:pvizeli    | 项目源码 | 文件源码
def run_test(self, input_source, timeout=15):
        """Start a test and give a TRUE or FALSE."""
        command = [
            "-frames:v",
            "1",
            "-frames:a",
            "1",
        ]

        # Run a short test with input
        is_open = yield from self.open(
            cmd=command, input_source=input_source, stderr_pipe=True,
            output=None)

        # error after open?
        if not is_open:
            return False

        try:
            with async_timeout.timeout(timeout, loop=self._loop):
                out, error = yield from self._proc.communicate()

        except (OSError, asyncio.TimeoutError, ValueError):
            _LOGGER.warning("Timeout/Error reading test.")
            self._proc.kill()
            return False

        # check error code
        if self._proc.returncode == 0:
            _LOGGER.debug("STD: %s / ERR: %s", out, error)
            return True

        # error state
        _LOGGER.error("ReturnCode: %i / STD: %s / ERR: %s",
                      self._proc.returncode, out, error)
        return False
项目:ha-ffmpeg    作者:pvizeli    | 项目源码 | 文件源码
def get_image(self, input_source, output_format=IMAGE_JPEG, extra_cmd=None,
                  timeout=15):
        """Open FFmpeg process as capture 1 frame."""
        command = [
            "-an",
            "-frames:v",
            "1",
            "-c:v",
            output_format,
        ]

        # open input for capture 1 frame
        is_open = yield from self.open(
            cmd=command, input_source=input_source, output="-f image2pipe -",
            extra_cmd=extra_cmd)

        # error after open?
        if not is_open:
            _LOGGER.warning("Error starting FFmpeg.")
            return None

        # read image
        try:
            with async_timeout.timeout(timeout, loop=self._loop):
                image, _ = yield from self._proc.communicate()

            return image

        except (asyncio.TimeoutError, ValueError):
            _LOGGER.warning("Timeout reading image.")
            self._proc.kill()
            return None
项目:ha-ffmpeg    作者:pvizeli    | 项目源码 | 文件源码
def close(self, timeout=5):
        """Stop a ffmpeg instance.

        Return a coroutine
        """
        if self._read_task is not None and not self._read_task.cancelled():
            self._read_task.cancel()

        return super().close(timeout)
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_async_timeout(loop):
    with pytest.raises(asyncio.TimeoutError):
        async with timeout(0.01, loop=loop) as cm:
            await asyncio.sleep(10, loop=loop)
    assert cm.expired
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_async_no_timeout(loop):
    async with timeout(1, loop=loop) as cm:
        await asyncio.sleep(0, loop=loop)
    assert not cm.expired
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_async_zero(loop):
    with pytest.raises(asyncio.TimeoutError):
        async with timeout(0, loop=loop) as cm:
            await asyncio.sleep(10, loop=loop)
    assert cm.expired
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_async_zero_coro_not_started(loop):
    coro_started = False

    async def coro():
        nonlocal coro_started
        coro_started = True

    with pytest.raises(asyncio.TimeoutError):
        async with timeout(0, loop=loop) as cm:
            await asyncio.sleep(0, loop=loop)
            await coro()

    assert cm.expired
    assert coro_started is False
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_global_loop(loop):
    asyncio.set_event_loop(loop)

    @asyncio.coroutine
    def run():
        with timeout(10) as t:
            yield from asyncio.sleep(0.01)
            assert t._loop is loop

    loop.run_until_complete(run())
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_disable(loop):
    @asyncio.coroutine
    def long_running_task():
        yield from asyncio.sleep(0.1, loop=loop)
        return 'done'

    t0 = loop.time()
    with timeout(None, loop=loop):
        resp = yield from long_running_task()
    assert resp == 'done'
    dt = loop.time() - t0
    assert 0.09 < dt < 0.13, dt
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_is_none_no_task(loop):
    with timeout(None, loop=loop) as cm:
        assert cm._task is None
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_enable_zero(loop):
    with pytest.raises(asyncio.TimeoutError):
        with timeout(0, loop=loop) as cm:
            yield from asyncio.sleep(0.1, loop=loop)

    assert cm.expired
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_enable_zero_coro_not_started(loop):
    coro_started = False

    @asyncio.coroutine
    def coro():
        nonlocal coro_started
        coro_started = True

    with pytest.raises(asyncio.TimeoutError):
        with timeout(0, loop=loop) as cm:
            yield from asyncio.sleep(0, loop=loop)
            yield from coro()

    assert cm.expired
    assert coro_started is False
项目:async-timeout    作者:aio-libs    | 项目源码 | 文件源码
def test_timeout_canceled_error_is_not_converted_to_timeout(loop):
    yield from asyncio.sleep(0, loop=loop)
    with pytest.raises(asyncio.CancelledError):
        with timeout(0.001, loop=loop):
            raise asyncio.CancelledError