Python aiohttp 模块,TCPConnector() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用aiohttp.TCPConnector()

项目:rci    作者:seecloud    | 项目源码 | 文件源码
def __init__(self, auth_url, username, tenant, loop=None, log=None,
                 cafile=None, token_renew_delay=3300):
        self.auth_url = auth_url
        self.username = username
        self.tenant = tenant
        self.log = log
        self.token_renew_delay = token_renew_delay
        self.loop = loop or asyncio.get_event_loop()
        self.headers = {"content-type": "application/json",
                        "accept": "application/json"}
        if cafile:
            sslcontext = ssl.create_default_context(cafile=cafile)
            conn = aiohttp.TCPConnector(ssl_context=sslcontext)
            self.session = aiohttp.ClientSession(connector=conn, loop=self.loop)
        else:
            session = aiohttp.ClientSession(loop=self.loop)
项目:aiovk    作者:Fahreeve    | 项目源码 | 文件源码
def test_auth_with_valid_data(self):
        s = TestAuthSession(login=USER_LOGIN, password=USER_PASSWORD, app_id=APP_ID)
        s.driver.session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False),
                                                 response_class=CustomClientResponse)
        yield from s.authorize()
        params = {'client_id': APP_ID, 'display': 'page', 'redirect_uri': REDIRECT_URI, 'response_type': 'code'}
        with aiohttp.Timeout(10):
            response = yield from s.driver.session.get("https://oauth.vk.com/authorize",
                                                       params=params, allow_redirects=True)
        s.close()
        code = response.url.query.get('code')
        self.assertIsNotNone(code)

        s = AuthorizationCodeSession(APP_ID, APP_SECRET, REDIRECT_URI, code)
        yield from s.authorize()
        s.close()
        self.assertIsNotNone(s.access_token)
项目:scriptworker    作者:mozilla-releng    | 项目源码 | 文件源码
def main():
    """Scriptworker entry point: get everything set up, then enter the main loop."""
    context, credentials = get_context_from_cmdln(sys.argv[1:])
    log.info("Scriptworker starting up at {} UTC".format(arrow.utcnow().format()))
    cleanup(context)
    conn = aiohttp.TCPConnector(limit=context.config['aiohttp_max_connections'])
    loop = asyncio.get_event_loop()
    with aiohttp.ClientSession(connector=conn) as session:
        context.session = session
        context.credentials = credentials
        while True:
            try:
                loop.run_until_complete(async_main(context))
            except Exception:
                log.critical("Fatal exception", exc_info=1)
                raise
项目:python_api    作者:DomainTools    | 项目源码 | 文件源码
def __awaitable__(self):
        if self._data is None:
            with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=self.api.verify_ssl)) as session:
                wait_time = self._wait_time()
                if wait_time is None and self.api:
                    try:
                        await self._make_async_request(session)
                    except ServiceUnavailableException:
                        await asyncio.sleep(60)
                        self._wait_time()
                        await self._make_async_request(session)
                else:
                    await asyncio.sleep(wait_time)
                    await self._make_async_request(session)

        return self
项目:hangoutsbot    作者:das7pad    | 项目源码 | 文件源码
def _send_to_external_chat(self, bot, event, config):
        if event.from_bot:
            # don't send my own messages
            return

        conversation_id = event.conv_id
        conversation_text = event.text

        user_id = event.user_id

        url = config["HUBOT_URL"] + conversation_id
        payload = {"from" : str(user_id.chat_id), "message" : conversation_text}
        headers = {'content-type': 'application/json'}

        connector = aiohttp.TCPConnector(verify_ssl=False)
        asyncio.ensure_future(
            aiohttp.request('post', url, data=json.dumps(payload),
                            headers=headers, connector=connector)
        )
项目:ProxyPool    作者:Python3WebSpider    | 项目源码 | 文件源码
def test_single_proxy(self, proxy):
        """
        ??????
        :param proxy:
        :return:
        """
        conn = aiohttp.TCPConnector(verify_ssl=False)
        async with aiohttp.ClientSession(connector=conn) as session:
            try:
                if isinstance(proxy, bytes):
                    proxy = proxy.decode('utf-8')
                real_proxy = 'http://' + proxy
                print('????', proxy)
                async with session.get(TEST_URL, proxy=real_proxy, timeout=15, allow_redirects=False) as response:
                    if response.status in VALID_STATUS_CODES:
                        self.redis.max(proxy)
                        print('????', proxy)
                    else:
                        self.redis.decrease(proxy)
                        print('???????? ', response.status, 'IP', proxy)
            except (ClientError, aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, AttributeError):
                self.redis.decrease(proxy)
                print('??????', proxy)
项目:aiovault    作者:terrycain    | 项目源码 | 文件源码
def __init__(self, vault_url: str = VAUTL_URL, token: Optional[str] = None, verify: bool = True, timeout: int = 10,
                 session: Optional[aiohttp.ClientSession]=None, loop: asyncio.AbstractEventLoop=None):
        self.loop = loop
        if loop is None:
            self.loop = asyncio.get_event_loop()

        self.vault_url = vault_url.rstrip('/')

        self.session = session
        if session is None:
            if not verify:
                connector = aiohttp.TCPConnector(verify_ssl=False, loop=self.loop)
            else:
                connector = None

            self.session = aiohttp.ClientSession(connector=connector, read_timeout=timeout, conn_timeout=timeout, loop=self.loop)  # pylint: disable=unexpected-keyword-arg

        self._auth_token = token
        self.timeout = timeout
项目:async-fetcher    作者:night-crawler    | 项目源码 | 文件源码
def get_tcp_connector(self) -> aiohttp.TCPConnector:
        if self._connector_owner:
            # return valid connector
            if self._tcp_connector and not self._tcp_connector.closed:
                return self._tcp_connector
            # create ssl context if no valid connector is present
            ssl_context = ssl.create_default_context(cafile=self.cafile)

            # memoize tcp_connector for reuse
            # noinspection PyAttributeOutsideInit
            self._tcp_connector = aiohttp.TCPConnector(
                loop=self.loop,
                ssl_context=ssl_context,
                keepalive_timeout=self.keepalive_timeout,
            )
            return self._tcp_connector

        return self._tcp_connector
项目:powershift    作者:getwarped    | 项目源码 | 文件源码
def _async_request_(self, method, url, verify, params, headers, data):
        connector = aiohttp.TCPConnector(verify_ssl=verify)
        async with aiohttp.ClientSession(connector=connector) as session:
            if data is not None:
                async with getattr(session, method)(url, params=params,
                        headers=headers, data=data) as response:
                    data = await response.read()
                    result = resources.loads(data.decode('UTF-8'))
            else:
                async with getattr(session, method)(url, params=params,
                        headers=headers) as response:
                    data = await response.read()
                    result = resources.loads(data.decode('UTF-8'))
        if result.__kind__ != getattr(self, '_%s_type_' % method):
            raise Exception(str(result))
        return result
项目:powershift    作者:getwarped    | 项目源码 | 文件源码
def __aenter__(self):
        connector = aiohttp.TCPConnector(verify_ssl=self._verify)

        self._session_cm = aiohttp.ClientSession(connector=connector)
        self._session = await self._session_cm.__aenter__()

        if self._data is not None:
            self._response_cm = getattr(self._session, self._method)(
                    self._url, params=self._params, headers=self._headers,
                    data=data)
        else:
            self._response_cm = getattr(self._session, self._method)(
                    self._url, params=self._params, headers=self._headers)

        self._response = await self._response_cm.__aenter__()

        if self._response.status != 200:
            raise Exception(await self._response.text())

        return WatcherSession(self)
项目:tictax    作者:bede    | 项目源码 | 文件源码
def oc_classify(records, one_codex_api_key, progress=False, stdout=False):
    oc_auth = aiohttp.BasicAuth(one_codex_api_key)
    conn = aiohttp.TCPConnector(limit=10)
    with aiohttp.ClientSession(auth=oc_auth, connector=conn) as oc_session:
        with aiohttp.ClientSession(connector=conn) as ebi_session:
            tasks = [classify_taxify(oc_session, ebi_session, r.id, str(r.seq)) for r in records]
            # No async generators in 3.5... :'(
            # return [await f for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks))]
            records = []
            for f in tqdm.tqdm(asyncio.as_completed(tasks),
                               disable=not progress,
                               total=len(tasks)):
                response = await f
                record = build_record(response[0], response[1])
                if stdout:
                    print(record.format('fasta'), end='')
                records.append(record)
            return records


# --------------------------------------------------------------------------------------------------
项目:checkip    作者:china-shang    | 项目源码 | 文件源码
def Server(self):
        self.Running = asyncio.Future()
        self.startindexIndex = self.ipfactory.getIndex()
        context = ssl.create_default_context()
        context.check_hostname = False

        if(not self.scan):
            self.loop.create_task(self.SaveIp())

        async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=context, force_close=True),
                                         conn_timeout=0.7, read_timeout=0.8) as self.session:
            self.start_time = time.time()
            # print("create session Success")
            # print("startindex Scan Ip")
            while self._running:
                if self.now < self.max:
                    self.now += 1
                    # print("create task at", self.now)
                    # print("startindex Task Sum: ", self.now)
                    self.loop.create_task(self.worker())
                    if self.now == self.max:
                        self.future = asyncio.Future()
                else:
                    await self.future
项目:mlimages    作者:icoxfog417    | 项目源码 | 文件源码
def create_session(self, loop):
        conn = None

        if self.proxy and self.proxy_user:
            conn = aiohttp.ProxyConnector(
                loop=loop,
                limit=self.parallel,
                proxy=self.proxy,
                proxy_auth=aiohttp.BasicAuth(self.proxy_user, self.proxy_password)
            )
        elif self.proxy:
            conn = aiohttp.ProxyConnector(loop=loop, limit=self.parallel, proxy=self.proxy)
        else:
            conn = aiohttp.TCPConnector(loop=loop, limit=self.parallel)

        session = aiohttp.ClientSession(connector=conn)
        return session
项目:goblin-legacy    作者:ZEROFAIL    | 项目源码 | 文件源码
def _get_connector(ssl_context):
    if _scheme in SECURE_SCHEMES:
        if ssl_context is None:
            raise ValueError("Please pass ssl_context for secure protocol")

        if _client_module == AIOHTTP_CLIENT_MODULE:
            import aiohttp
            connector = aiohttp.TCPConnector(ssl_context=ssl_context,
                                             loop=loop)

        elif _client_module == TORNADO_CLIENT_MODULE:
            from functools import partial
            from tornado import httpclient
            connector = partial(
                httpclient.HTTPRequest, ssl_options=sslcontext)
        else:
            raise ValueError("Unknown client module")
    elif _scheme in INSECURE_SCHEMES:
        connector = None
    else:
        raise ValueError("Unknown protocol")
    return connector
项目:zenchmarks    作者:squeaky-pl    | 项目源码 | 文件源码
def ws_connect(url, *, protocols=(), timeout=10.0, connector=None, auth=None,
               ws_response_class=ClientWebSocketResponse, autoclose=True,
               autoping=True, loop=None, origin=None, headers=None):

    warnings.warn("Use ClientSession().ws_connect() instead",
                  DeprecationWarning)
    if loop is None:
        loop = asyncio.get_event_loop()

    if connector is None:
        connector = aiohttp.TCPConnector(loop=loop, force_close=True)

    session = aiohttp.ClientSession(loop=loop, connector=connector, auth=auth,
                                    ws_response_class=ws_response_class,
                                    headers=headers)

    return _DetachedWSRequestContextManager(
        session._ws_connect(url,
                            protocols=protocols,
                            timeout=timeout,
                            autoclose=autoclose,
                            autoping=autoping,
                            origin=origin),
        session=session)
项目:scripts    作者:vulnersCom    | 项目源码 | 文件源码
def wait_with_progress(urlList, concurency = 30, timeout = 120, rawResults = False, cloudflare = False, headers = None):
    sem = asyncio.Semaphore(concurency)
    # Client session worker
    headers = headers or {}
    headers.update({
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36 vulners.com/bot'})
    if cloudflare:
        sessionClient = CloudflareScraper
    else:
        sessionClient = aiohttp.ClientSession
    urlToResultDict = {}
    with sessionClient(connector=aiohttp.TCPConnector(verify_ssl=False), headers=headers) as session:
        coros = [parseUrl(url = d, semaphore = sem, session = session, timeout = timeout, rawResults=rawResults) for d in urlList]
        for f in tqdm.tqdm(asyncio.as_completed(coros), total=len(coros)):
            result = yield from f
            urlToResultDict.update(result)
    return urlToResultDict
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _youtube(self, context, *, query: str):
        """Search on Youtube"""
        try:
            url = 'https://www.youtube.com/results?'
            payload = {'search_query': ''.join(query)}
            headers = {'user-agent': 'Red-cog/1.0'}
            conn = aiohttp.TCPConnector()
            session = aiohttp.ClientSession(connector=conn)
            async with session.get(url, params=payload, headers=headers) as r:
                result = await r.text()
            session.close()
            yt_find = re.findall(r'href=\"\/watch\?v=(.{11})', result)
            url = 'https://www.youtube.com/watch?v={}'.format(yt_find[0])
            await self.bot.say(url)
        except Exception as e:
            message = 'Something went terribly wrong! [{}]'.format(e)
            await self.bot.say(message)
项目:talkbot    作者:nimnull    | 项目源码 | 文件源码
def on_startup(app):
    connector = aiohttp.TCPConnector(limit=5, use_dns_cache=True, loop=app.loop)
    session = aiohttp.ClientSession(connector=connector, raise_for_status=True)
    bot = TelegramBot(app['config'].token, session)
    image_model = fit_model(app['config'].sample_df)

    def config_injections(binder):
        # injection bindings
        binder.bind(Config, app['config'])
        binder.bind(TelegramBot, bot)
        binder.bind(GradientBoostingClassifier, image_model)
        binder.bind_to_constructor(AsyncIOMotorDatabase, init_database)


    try:
        inject.configure(config_injections)
    except inject.InjectorException:
        log.error("Injector already configured", exc_info=True)

    setup_logging(log)

    app.loop.create_task(bot.set_hook())
项目:Charlie    作者:nxintech    | 项目源码 | 文件源码
def __init__(self, loop=None,
                 host='127.0.0.1', port=80, request_timeout=10,
                 polling_timeout=30, polling_interval=5):
        """
        :param loop: event loop
        :param host: API Server host
        :param port: API Server port
        :param request_timeout: HTTP request timeout
        :param polling_timeout: Async API polling timeout
        :param polling_interval: Async API polling interval
        """
        super().__init__()
        self.loop = loop
        self._host = host
        self._port = port
        self._request_timeout = request_timeout
        self._polling_timeout = polling_timeout
        self._polling_interval = polling_interval

        self.session = None
        self._conn = aiohttp.TCPConnector(
            verify_ssl=False, limit=50, use_dns_cache=True)
项目:PrivateSolution    作者:AdaJass    | 项目源码 | 文件源码
def fetchData(url, callback = pd.Xm, params=None):
    #set request url and parameters here or you can pass from outside.


    con = aiohttp.TCPConnector(limit=config.REQ_AMOUNTS)    
    s = aiohttp.ClientSession(headers = config.HEADERS, connector=con)
    #use s.** request a webside will keep-alive the connection automaticaly,
    #so you can set multi request here without close the connection 
    #while in the same domain.
    #i.e. 
    #await s.get('***/page1')
    #await s.get('***/page2')
    ########################################################################
    r=yield from s.get(url, params = params)
    #here the conection closed automaticly.
    data = yield from r.text(encoding='utf-8')    
    yield from callback(data)
    r.close()
项目:plumeria    作者:sk89q    | 项目源码 | 文件源码
def render(url, width=1024, max_height=4096, trim_image=False):
    with DefaultClientSession(connector=TCPConnector()) as session:
        async with session.request(method="post", url=render_url(), data=json.dumps({
            "url": url,
            "key": api_key(),
            "width": str(width),
            "max_height": str(max_height),
            "trim": "true" if trim_image else "false",
        })) as r:
            if r.status == 200:
                buffer = io.BytesIO()
                buffer.write(await r.read())
                return Response("", attachments=[MemoryAttachment(buffer, "screenshot.jpg", "image/jpeg")])
            else:
                try:
                    data = await r.json()
                    raise CommandError("error occurred: {}".format(data['error']))
                except JSONDecodeError:
                    raise CommandError("error occurred with status code {}".format(r.status))
项目:okami    作者:ambrozic    | 项目源码 | 文件源码
def process(self, request):
        """
        Processes passed :class:`Request <okami.api.Request>` object.
        Exceptions should be caught otherwise entire pipeline terminates.

        :param request: :class:`Request <okami.api.Request>` object
        :returns: altered passed :class:`Request <okami.api.Request>` object
        """
        if not self.controller.session or self.controller.session.closed:
            try:
                self.controller.session = self.controller.spider.session()
            except NotImplementedError:
                connector = aiohttp.TCPConnector(
                    limit=settings.CONN_MAX_CONCURRENT_CONNECTIONS,
                    verify_ssl=settings.CONN_VERIFY_SSL,
                )
                self.controller.session = aiohttp.ClientSession(connector=connector)
        return request
项目:mach9    作者:silver-castle    | 项目源码 | 文件源码
def _local_request(self, method, uri, cookies=None, *args, **kwargs):
        import aiohttp
        if uri.startswith(('http:', 'https:', 'ftp:', 'ftps://' '//')):
            url = uri
        else:
            url = 'http://{host}:{port}{uri}'.format(
                host=HOST, port=PORT, uri=uri)

        log.info(url)
        conn = aiohttp.TCPConnector(verify_ssl=False)
        async with aiohttp.ClientSession(
                cookies=cookies, connector=conn) as session:
            async with getattr(
                    session, method.lower())(url, *args, **kwargs) as response:
                try:
                    response.text = await response.text()
                except UnicodeDecodeError as e:
                    response.text = None
                response.body = await response.read()
                return response
项目:instawow    作者:layday    | 项目源码 | 文件源码
def __init__(self,
                 *,
                 config: Config,
                 loop: asyncio.BaseEventLoop=_init_loop(),
                 show_progress: bool=True):
        self.config = config
        self.show_progress = show_progress

        db_engine = create_engine(f'sqlite:///{config.config_dir/config.db_name}')
        ModelBase.metadata.create_all(db_engine)
        self.db = sessionmaker(bind=db_engine)()
        self.client = ClientSession(connector=TCPConnector(limit_per_host=10, loop=loop),
                                    headers={'User-Agent': _UA_STRING}, loop=loop)
        self.resolvers = {n: r(manager=self)
                          for n, r in BaseResolver.__members__.items()}
        self.runner = _Runner(self)

        self._loop = loop
        self._resolver_lock = asyncio.Lock(loop=loop)
        self._tpes = [ThreadPoolExecutor(max_workers=1), ThreadPoolExecutor(max_workers=1)]
项目:hips    作者:hipspy    | 项目源码 | 文件源码
def fetch_all_tiles_aiohttp(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties,
                                  progress_bar: bool, n_parallel: int, timeout: float) -> List[HipsTile]:
    """Generator function to fetch HiPS tiles from a remote URL using aiohttp."""
    import aiohttp

    connector = aiohttp.TCPConnector(limit=n_parallel)
    async with aiohttp.ClientSession(connector=connector) as session:
        futures = []
        for meta in tile_metas:
            url = hips_survey.tile_url(meta)
            future = asyncio.ensure_future(fetch_tile_aiohttp(url, meta, session, timeout))
            futures.append(future)

        futures = asyncio.as_completed(futures)
        if progress_bar:
            from tqdm import tqdm
            futures = tqdm(futures, total=len(tile_metas), desc='Fetching tiles')

        tiles = []
        for future in futures:
            tiles.append(await future)

    return tiles
项目:homeassistant    作者:NAStools    | 项目源码 | 文件源码
def test_create_clientsession_with_ssl_and_cookies(self):
        """Test create clientsession with ssl."""
        def _async_helper():
            return client.async_create_clientsession(
                self.hass,
                cookies={'bla': True}
            )

        session = run_callback_threadsafe(
            self.hass.loop,
            _async_helper,
        ).result()

        assert isinstance(
            session, aiohttp.ClientSession)
        assert isinstance(
            self.hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector)
项目:homeassistant    作者:NAStools    | 项目源码 | 文件源码
def test_create_clientsession_without_ssl_and_cookies(self):
        """Test create clientsession without ssl."""
        def _async_helper():
            return client.async_create_clientsession(
                self.hass,
                False,
                cookies={'bla': True}
            )

        session = run_callback_threadsafe(
            self.hass.loop,
            _async_helper,
        ).result()

        assert isinstance(
            session, aiohttp.ClientSession)
        assert isinstance(
            self.hass.data[client.DATA_CONNECTOR_NOTVERIFY],
            aiohttp.TCPConnector)
项目:homeassistant    作者:NAStools    | 项目源码 | 文件源码
def _async_get_connector(hass, verify_ssl=True):
    """Return the connector pool for aiohttp.

    This method must be run in the event loop.
    """
    if verify_ssl:
        if DATA_CONNECTOR not in hass.data:
            connector = aiohttp.TCPConnector(loop=hass.loop)
            hass.data[DATA_CONNECTOR] = connector

            _async_register_connector_shutdown(hass, connector)
        else:
            connector = hass.data[DATA_CONNECTOR]
    else:
        if DATA_CONNECTOR_NOTVERIFY not in hass.data:
            connector = aiohttp.TCPConnector(loop=hass.loop, verify_ssl=False)
            hass.data[DATA_CONNECTOR_NOTVERIFY] = connector

            _async_register_connector_shutdown(hass, connector)
        else:
            connector = hass.data[DATA_CONNECTOR_NOTVERIFY]

    return connector
项目:socketshark    作者:closeio    | 项目源码 | 文件源码
def http_post(shark, url, data):
    log = shark.log.bind(url=url)
    opts = shark.config['HTTP']
    if opts.get('ssl_cafile'):
        ssl_context = ssl.create_default_context(cafile=opts['ssl_cafile'])
    else:
        ssl_context = None
    conn = aiohttp.TCPConnector(ssl_context=ssl_context)
    async with aiohttp.ClientSession(connector=conn) as session:
        wait = opts['wait']
        for n in range(opts['tries']):
            if n > 0:
                await asyncio.sleep(wait)
            try:
                log.debug('http request', data=data)
                async with session.post(url, json=data,
                                        timeout=opts['timeout']) as resp:
                    if resp.status == 429:  # Too many requests.
                        wait = _get_rate_limit_wait(log, resp, opts)
                        continue
                    else:
                        wait = opts['wait']
                    resp.raise_for_status()
                    data = await resp.json()
                    log.debug('http response', data=data)
                    return data
            except aiohttp.ClientError:
                log.exception('unhandled exception in http_post')
            except asyncio.TimeoutError:
                log.exception('timeout in http_post')
        return {'status': 'error', 'error': c.ERR_SERVICE_UNAVAILABLE}
项目:hdx-data-freshness    作者:OCHA-DAP    | 项目源码 | 文件源码
def check_urls(urls, loop):
    tasks = list()

    conn = aiohttp.TCPConnector(limit=100, limit_per_host=2, loop=loop)
    async with aiohttp.ClientSession(connector=conn, read_timeout=300, conn_timeout=10, loop=loop) as session:
        for metadata in urls:
            task = fetch(metadata, session)
            tasks.append(task)
        responses = dict()
        for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks)):
            resource_id, url, err, http_last_modified, hash, force_hash = await f
            responses[resource_id] = (url, err, http_last_modified, hash, force_hash)
        return responses
项目:peony-twitter    作者:odrling    | 项目源码 | 文件源码
def fixture_medias(event_loop):
    if os.environ.get('FORCE_IPV4', False):
        connector = aiohttp.TCPConnector(family=socket.AF_INET)
    else:
        connector = aiohttp.TCPConnector()
    with aiohttp.ClientSession(loop=event_loop,
                               connector=connector) as session:
        task = asyncio.gather(*[media.download(session=session)
                                for media in medias.values()])
        event_loop.run_until_complete(task)

    return medias
项目:xmppwb    作者:saqura    | 项目源码 | 文件源码
def _parse_outgoing_webhooks(self, bridge_cfg):
        """Parses the `outgoing webhooks` from this bridge's config file
        section.

        This also sets up the HTTP client session for each webhook."""
        if 'outgoing_webhooks' not in bridge_cfg:
            # No outgoing webhooks in this bridge.
            return

        outgoing_webhooks = bridge_cfg['outgoing_webhooks']

        for outgoing_webhook in outgoing_webhooks:
            if 'url' not in outgoing_webhook:
                raise InvalidConfigError("Error in config file: "
                                         "'url' is missing from an "
                                         "outgoing webhook definition.")

            # Set up SSL context for certificate pinning.
            if 'cafile' in outgoing_webhook:
                cafile = os.path.abspath(outgoing_webhook['cafile'])
                sslcontext = ssl.create_default_context(cafile=cafile)
                conn = aiohttp.TCPConnector(ssl_context=sslcontext)
                session = aiohttp.ClientSession(loop=self.main_bridge.loop,
                                                connector=conn)
            else:
                session = aiohttp.ClientSession(loop=self.main_bridge.loop)
            # TODO: Handle ConnectionRefusedError.
            outgoing_webhook['session'] = session

            self.outgoing_webhooks.append(outgoing_webhook)
项目:py-restfmclient    作者:pcdummy    | 项目源码 | 文件源码
def session(self):
        if self._session is None:
            conn = aiohttp.TCPConnector(
                loop=self._loop, verify_ssl=self.verify_ssl
            )
            self._session = aiohttp.ClientSession(
                loop=self._loop, connector=conn
            )

        return self._session
项目:apm-agent-python    作者:elastic    | 项目源码 | 文件源码
def __init__(self, parsed_url, **kwargs):
        super(AsyncioHTTPTransport, self).__init__(parsed_url, **kwargs)
        loop = asyncio.get_event_loop()
        session_kwargs = {'loop': loop}
        if not self._verify_server_cert:
            session_kwargs['connector'] = aiohttp.TCPConnector(verify_ssl=False)
        self.client = aiohttp.ClientSession(**session_kwargs)
项目:hangoutsbot    作者:das7pad    | 项目源码 | 文件源码
def telegram_api_request(self, configuration, method, data):
        connector = aiohttp.TCPConnector(verify_ssl=True)
        headers = {'content-type': 'application/x-www-form-urlencoded'}

        BOT_API_KEY = configuration["bot_api_key"]

        url = "https://api.telegram.org/bot{}/{}".format(BOT_API_KEY, method)

        async with aiohttp.ClientSession() as session:
            async with session.post(url, data=data, headers=headers, connector=connector) as response:
                results = await response.text()

        return results
项目:sanic    作者:channelcat    | 项目源码 | 文件源码
def _local_request(self, method, uri, cookies=None, *args, **kwargs):
        import aiohttp
        if uri.startswith(('http:', 'https:', 'ftp:', 'ftps://' '//')):
            url = uri
        else:
            url = 'http://{host}:{port}{uri}'.format(
                host=HOST, port=self.port, uri=uri)

        logger.info(url)
        conn = aiohttp.TCPConnector(verify_ssl=False)
        async with aiohttp.ClientSession(
                cookies=cookies, connector=conn) as session:
            async with getattr(
                    session, method.lower())(url, *args, **kwargs) as response:
                try:
                    response.text = await response.text()
                except UnicodeDecodeError as e:
                    response.text = None

                try:
                    response.json = await response.json()
                except (JSONDecodeError,
                        UnicodeDecodeError,
                        aiohttp.ClientResponseError):
                    response.json = None

                response.body = await response.read()
                return response
项目:twtxt    作者:buckket    | 项目源码 | 文件源码
def get_remote_tweets(sources, limit=None, timeout=5.0, cache=None):
    conn = aiohttp.TCPConnector(use_dns_cache=True)
    headers = generate_user_agent()
    with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:
        loop = asyncio.get_event_loop()

        def start_loop(client, sources, limit, cache=None):
            return loop.run_until_complete(process_sources_for_file(client, sources, limit, cache))

        tweets = start_loop(client, sources, limit, cache)

    return tweets
项目:twtxt    作者:buckket    | 项目源码 | 文件源码
def get_remote_status(sources, timeout=5.0):
    conn = aiohttp.TCPConnector(use_dns_cache=True)
    headers = generate_user_agent()
    with aiohttp.ClientSession(connector=conn, headers=headers, conn_timeout=timeout) as client:
        loop = asyncio.get_event_loop()
        result = loop.run_until_complete(process_sources_for_status(client, sources))
    return result
项目:client    作者:syncrypt    | 项目源码 | 文件源码
def retrieve_available_version(platform_id):
    sslcontext = ssl.create_default_context(cafile=certifi.where())
    conn = aiohttp.TCPConnector(ssl_context=sslcontext)
    with aiohttp.ClientSession(connector=conn) as c:
        r = yield from c.get(CURRENT_ENDPOINT)
        content = yield from r.json()
        return content[platform_id]
项目:client    作者:syncrypt    | 项目源码 | 文件源码
def init_client(self, client, headers={}):
        sslcontext = ssl.create_default_context(cafile=certifi.where())
        conn = aiohttp.TCPConnector(ssl_context=sslcontext)
        if client:
            self.client_owned, self.client = False, client
        else:
            self.client_owned, self.client = True, aiohttp.ClientSession(
                    connector=conn,
                    headers=headers,
                    skip_auto_headers=["Content-Type", "User-Agent"]
                    )
项目:async-fetcher    作者:night-crawler    | 项目源码 | 文件源码
def __init__(self,
                 task_map: dict, timeout: int = 10, num_retries: int = 0,
                 retry_timeout: float = 1.0,
                 service_name: str = 'api',
                 cafile: str = None,
                 loop: t.Optional[asyncio.AbstractEventLoop] = None,
                 tcp_connector: t.Union[aiohttp.TCPConnector, None] = None,
                 keepalive_timeout: int = 60):
        """
        :param task_map: dict, task bundle mapping like {'task_name': <task_bundle>}
        :param timeout: int, request timeout
        :param num_retries: int, max retry count before exception rising
        :param retry_timeout: float, wait before retry
        :param service_name: str, service name label for verbose logging
        :param keepalive_timeout: int, keepalive timeout for TCPConnector created __internally__
        """
        self.task_map = OrderedDict(task_map.items())
        self.timeout = timeout
        self.num_retries = num_retries
        self.max_retries = num_retries
        self.retry_timeout = retry_timeout
        self.service_name = service_name

        self.cafile = cafile
        self.loop = loop or get_or_create_event_loop()
        self._tcp_connector = tcp_connector
        self._connector_owner = not bool(tcp_connector)

        # keepalive_timeout for __internally__ created connector
        self.keepalive_timeout = keepalive_timeout
项目:build-deploy-stats    作者:luigiberrettini    | 项目源码 | 文件源码
def __aenter__(self):
        tcp_connector = None if self.verify_ssl_certs else aiohttp.TCPConnector(verify_ssl = False)
        self.session = aiohttp.ClientSession(auth = self.basic_auth_credentials, headers = self.headers, connector = tcp_connector)
        return self
项目:overwatch-api    作者:anthok    | 项目源码 | 文件源码
def testing(loop):
    # Instantiating the api
    client = AsyncOWAPI()

    data = {}

    # We use our own clientsession to demonstrate that it's possible to pool connections in that way
    async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
        # We await an api method and get a dict back as a result
        # We pass our session, and we pass the platform we want results for, in this case it's PC and we don't actually need to pass, since it's a default
        print('Testing......[get_profile]')
        data[PC] = await client.get_profile("Danielfrogs#2552", session=session, platform=PC)
        print('Testing......[get_profile]')
        data[XBOX] = await client.get_profile("Danielfrogs#2552", session=session, platform=XBOX)
        print('Testing......[get_profile]')
        data[PLAYSTATION] = await client.get_profile("Danielfrogs#2552", session=session, platform=PLAYSTATION)
        print('Testing......[get_stats]')
        data[PC] = await client.get_stats("Danielfrogs#2552", session=session, platform=PC)
        print('Testing......[get_stats]')
        data[XBOX] = await client.get_stats("Danielfrogs#2552", session=session, platform=XBOX)
        print('Testing......[get_stats]')
        data[PC] = await client.get_stats("Danielfrogs#2552", session=session, platform=PLAYSTATION)
        print('Testing......[get_achievements]')
        data[PC] = await client.get_achievements("Danielfrogs#2552", session=session, platform=PC)
        print('Testing......[get_hero_stats]')
        data[PC] = await client.get_hero_stats("Danielfrogs#2552", session=session, platform=PC)

    print(data)
项目:aiosolr    作者:TigorC    | 项目源码 | 文件源码
def __init__(self, url, decoder=None, timeout=60, results_cls=Results, loop=None):
        if loop is None:
            loop = asyncio.get_event_loop()
        self.loop = loop
        self.decoder = decoder or json.JSONDecoder()
        self.url = url
        self.timeout = timeout
        self.log = self._get_log()
        self.session = aiohttp.ClientSession(
            connector=aiohttp.TCPConnector(use_dns_cache=True, loop=loop),
            loop=loop)
        self.results_cls = results_cls
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _api_request(self, location):
        payload = {'q': location, 'appid': self.settings['WEATHER_API_KEY']}
        url = 'http://api.openweathermap.org/data/2.5/weather?'
        conn = aiohttp.TCPConnector()
        session = aiohttp.ClientSession(connector=conn)
        async with session.get(url, params=payload) as r:
            data = await r.json()
        session.close()
        return data
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _get_query(self, payload, gateway):
        headers = {'user-agent': 'Red-cog/1.0'}
        conn = aiohttp.TCPConnector(verify_ssl=False)
        session = aiohttp.ClientSession(connector=conn)
        async with session.get(gateway, params=payload, headers=headers) as r:
            data = await r.text()
        session.close()
        return data
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _api_request(self, payload):
        url = 'http://ws.audioscrobbler.com/2.0/'
        headers = {'user-agent': 'Red-cog/1.0'}
        conn = aiohttp.TCPConnector()
        session = aiohttp.ClientSession(connector=conn)
        async with session.get(url, params=payload, headers=headers) as r:
            data = await r.json()
        session.close()
        return data
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _update_apps(self):
        payload = {}
        url = 'http://api.steampowered.com/ISteamApps/GetAppList/v0001/'
        headers = {'user-agent': 'Red-cog/1.0'}
        conn = aiohttp.TCPConnector(verify_ssl=False)
        session = aiohttp.ClientSession(connector=conn)
        async with session.get(url, params=payload, headers=headers) as r:
            data = await r.json()
        session.close()
        self.games = data['applist']['apps']['app']
        dataIO.save_json('data/steam/games.json', data)
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _app_info(self, gid):
        url = 'http://store.steampowered.com/api/appdetails?'
        payload = {}
        payload['appids'] = gid
        headers = {'user-agent': 'Red-cog/1.0'}
        conn = aiohttp.TCPConnector(verify_ssl=False)
        session = aiohttp.ClientSession(connector=conn)
        async with session.get(url, params=payload, headers=headers) as r:
            data = await r.json()
        session.close()
        if data[str(gid)]['success']:
            data = data[str(gid)]['data']
            info = {}
            info['name'] = data['name']
            info['developers'] = data['developers']
            info['publishers'] = data['publishers']

            if data['is_free']:
                info['price'] = 'Free to Play'
            elif 'price_overview' not in data:
                info['price'] = 'Not available'
            else:
                info['price'] = '{} {}'.format(str(data['price_overview']['final'] / 100), (data['price_overview']['currency']))
                if data['price_overview']['discount_percent'] > 0:
                    info['price'] = '{} {} ({} -{}%)'.format(str(data['price_overview']['final'] / 100), data['price_overview']['currency'], str(data['price_overview']['initial'] / 100), str(data['price_overview']['discount_percent']))
            if data['release_date']['coming_soon']:
                info['release_date'] = 'Coming Soon'
            else:
                info['release_date'] = data['release_date']['date']
            info['genres'] = data['genres']
            info['recommendations'] = ''
            if 'recommendations' in data:
                info['recommendations'] = 'Recommendations: {}\n\n'.format(str(data['recommendations']['total']))
            info['about_the_game'] = re.sub("<.*?>", " ", data['about_the_game'].replace('  ', '').replace('\r', '').replace('<br>', '\n').replace('\t', ''))
            if len(info['about_the_game']) > 500:
                info['about_the_game'] = '{}...'.format(info['about_the_game'][:500-3])
            return info
        return False
项目:PaddoCogs    作者:PaddoInWonderland    | 项目源码 | 文件源码
def _wikipedia(self, context, *, query: str):
        """
        Get information from Wikipedia
        """
        try:
            url = 'https://en.wikipedia.org/w/api.php?'
            payload = {}
            payload['action'] = 'query'
            payload['format'] = 'json'
            payload['prop'] = 'extracts'
            payload['titles'] = ''.join(query).replace(' ', '_')
            payload['exsentences'] = '5'
            payload['redirects'] = '1'
            payload['explaintext'] = '1'
            headers = {'user-agent': 'Red-cog/1.0'}
            conn = aiohttp.TCPConnector(verify_ssl=False)
            session = aiohttp.ClientSession(connector=conn)
            async with session.get(url, params=payload, headers=headers) as r:
                result = await r.json()
            session.close()
            if '-1' not in result['query']['pages']:
                for page in result['query']['pages']:
                    title = result['query']['pages'][page]['title']
                    description = result['query']['pages'][page]['extract'].replace('\n', '\n\n')
                em = discord.Embed(title='Wikipedia: {}'.format(title), description=u'\u2063\n{}...\n\u2063'.format(description[:-3]), color=discord.Color.blue(), url='https://en.wikipedia.org/wiki/{}'.format(title.replace(' ', '_')))
                em.set_footer(text='Information provided by Wikimedia', icon_url='https://upload.wikimedia.org/wikipedia/commons/thumb/5/53/Wikimedia-logo.png/600px-Wikimedia-logo.png')
                await self.bot.say(embed=em)
            else:
                message = 'I\'m sorry, I can\'t find {}'.format(''.join(query))
                await self.bot.say('```{}```'.format(message))
        except Exception as e:
            message = 'Something went terribly wrong! [{}]'.format(e)
            await self.bot.say('```{}```'.format(message))