Python requests.exceptions 模块,ProxyError() 实例源码

我们从Python开源项目中,提取了以下20个代码示例,用于说明如何使用requests.exceptions.ProxyError()

项目:Ugly-Distributed-Crawler    作者:A1014280203    | 项目源码 | 文件源码
def get_url(url):
    headers['Referer'] = url
    count = 0
    while True:
        count += 1
        if count < settings['maxtries']:
            proxy = get_proxy()
        else:
            proxy = None
        try:
            resp = request('get', url, headers=headers, proxies={'http': proxy})
            return resp
        except ProxyError:
            if count > settings['maxtries']+2:
                print('Exit: Can not get url.<@get_url>')
                exit(1)
            continue
项目:Ugly-Distributed-Crawler    作者:A1014280203    | 项目源码 | 文件源码
def get_url(url):
    headers['Referer'] = url
    count = 0
    while True:
        count += 1
        if count < settings['maxtries']:
            proxy = get_proxy()
        else:
            proxy = None
        try:
            resp = request('get', url, headers=headers, proxies={'http': proxy})
            return resp
        except ProxyError:
            if count > settings['maxtries']+2:
                print('Exit: Could not get url.<@get_url>')
                exit(1)
            continue
项目:netease-dl    作者:ziwenxie    | 项目源码 | 文件源码
def exception_handle(method):
    """Handle exception raised by requests library."""

    def wrapper(*args, **kwargs):
        try:
            result = method(*args, **kwargs)
            return result
        except ProxyError:
            LOG.exception('ProxyError when try to get %s.', args)
            raise ProxyError('A proxy error occurred.')
        except ConnectionException:
            LOG.exception('ConnectionError when try to get %s.', args)
            raise ConnectionException('DNS failure, refused connection, etc.')
        except Timeout:
            LOG.exception('Timeout when try to get %s', args)
            raise Timeout('The request timed out.')
        except RequestException:
            LOG.exception('RequestException when try to get %s.', args)
            raise RequestException('Please check out your network.')

    return wrapper
项目:Ugly-Distributed-Crawler    作者:A1014280203    | 项目源码 | 文件源码
def check_and_save(self, proxy):
        self.headers['Referer'] = self.query_address
        try:
            resp = requests.get(self.query_address, proxies={'http': proxy}, headers=self.headers)
            html = PyQuery(resp.content.decode())
        except ProxyError:
            print('Expired:', proxy)
            return
        except UnicodeDecodeError:
            return
        result = html('code').eq(0).text()
        if result != self.primary_ip:
            self.redis_handler.sadd(r_server['s_name'], proxy)
项目:ProxyPool    作者:Germey    | 项目源码 | 文件源码
def main():
    count = 0
    while True:
        # print('? ',count,' ???')
        count = count + 1
        try:
            #????????headers
            global headers,count_proxys
            headers = {'User-Agent': ua.random}
            count_proxys = get_count_proxys()
            print('????? ',count_proxys,'  ????????',proxy,'\n',headers)
            start_time = time.clock()
            html = crawl('http://www.baidu.com', proxy)
            end_time = time.clock()
            print('??????? ',(str(end_time-start_time))[:4],' ?')
            if html.status_code==200:
                print(html)
                return count
                break
            elif count>=10:
                print('??????')
                break

        except (ChunkedEncodingError,ConnectionError,Timeout,UnboundLocalError,UnicodeError,ProxyError):
            global proxy
            proxy = get_proxy()
            print('????,????','\n')
            # print(' ')
项目:STF    作者:nokia    | 项目源码 | 文件源码
def updateCaseStatus(self, caseName, Status):
        if self.isValidCaseInput(caseName, Status):
            caseList = self.buildValidList(caseName)
            statusList = self.buildValidList(Status)
            if len(caseList) == 0:
                logger.error(WARNING_NO_RESULT % 'Case')
                return False
        else:
            return False

        result = True
        try:
            cycleId = self.createCycle(self.cycleName)
            if cycleId == '':
                logger.error("Error: create clcyle.")
                return False
            existCaseList = self.getExistedTestNames(cycleId)
            for index, case in enumerate(caseList):
                if case not in enumerate(existCaseList):
                    self.addTestToCycle(cycleId, case)
                executionId = self.getExecutionId(case)
                try:
                    number = self.statusNumber[statusList[index]]
                except KeyError:
                    logger.error(ERROR_NO_STATUS % statusList[index])
                    return False
                url = self.zapiBaseUrl + '/zapi/latest/execution/' + str(executionId) + '/execute'
                headers = {'Content-Type': 'application/json'}
                result = json.dumps({'status': str(number)})
                response = requests.put(url, data=result, headers=headers, auth=self.authentication, verify=False)
                result = result and (response.status_code == 200)
        except ProxyError:
            logger.error(ERROR_CANNOT_CONNECT_JIRA)
            return False
        return result
项目:STF    作者:nokia    | 项目源码 | 文件源码
def updateStepStatus(self, caseName, TestStep, Status):
        if caseName is None or TestStep is None or Status is None:
            return
        try:
            executionId = self.getExecutionId(caseName)
            if executionId == '':
                logger.error(WARNING_NO_RESULT % 'Case')
                return False
            stepResultId = self.getStepResultId(executionId, caseName, TestStep)
            if stepResultId == '':
                logger.error(WARNING_NO_RESULT % 'Step')
                return False

            try:
                number = self.statusNumber[Status]
            except KeyError:
                logger.error(ERROR_NO_STATUS % Status)
                return False
            url = self.zapiBaseUrl + '/zapi/latest/stepResult/' + str(stepResultId)
            headers = {'Content-Type': 'application/json'}
            result = json.dumps({'status': str(number)})
            response = requests.put(url, data=result, headers=headers, auth=self.authentication, verify=False)
            return response.status_code == 200
        except ProxyError:
            logger.error(ERROR_CANNOT_CONNECT_JIRA)
            return False
项目:STF    作者:nokia    | 项目源码 | 文件源码
def getCaseList(self, featureName = None, campaignType = None, title = None):
        result = []

        baseURL = self.zapiBaseUrl + '/api/2/search?jql='
        projectURL = 'project="' + self.projectName + '"'
        featureURL = ''
        if featureName is not None:
            featureURL = '&"epic link"="' + str(featureName) + '"'
        campaignURL = ''
        if campaignType is not None:
            campaignURL = '&cf[11720]["value"]="' + str(campaignType) + '"'
        titleURL = ''
        if title is not None and self.useRegularExpression == 'False':
            titleURL = '&summary~"' + str(title) + '"'
        testURL = '&issueType="Test"'

        try:
            url = baseURL + urllib.quote(projectURL + featureURL + campaignURL + titleURL + testURL, ':/?')
            response = requests.get(url, auth=self.authentication, verify=False)
            response_body = json.loads(response.text)
            #print json.dumps(response_body)
            for case in response_body['issues']:
                result += [case['key']]

            if self.useRegularExpression == 'True':
                result = self.filterTestsInIllegibilityTitle(result, title)
            if len(result) == 0:
                logger.error(WARNING_NO_RESULT % 'Case')
            return result
        except ProxyError:
            logger.error(ERROR_CANNOT_CONNECT_JIRA)
            return False
项目:ScrapyPythonAnalysis    作者:IMYin    | 项目源码 | 文件源码
def conn(self,url,ipList,userAgent):
        for num in range(8):
            proxies = {}
            headers = {}
            #???ip????????????ip
            proxies['http'] = ip = random.choice(ipList)
            self.log.info("select "+proxies['http']+" as the proxy IP.")
            headers['User-Agent'] = random.choice(userAgent)
            self.log.info("select "+headers['User-Agent']+" as the user-agent.")
            session = requests.Session()
            # ?????

            try:
                req = session.get(url,headers=headers,proxies=proxies)
                bsObj = BeautifulSoup(req.text,"html.parser",from_encoding='utf-8')
                self.log.info("It connected with the url.")
                break
            except ProxyError as e:
                self.log.warn("The address is not work,it will try again...\n\n"+str(e.message))
                ipList.remove(ip)
                continue
            except ConnectionError as e:
                self.log.warn("The address is not work,it will try again...\n\n"+str(e.message))
                ipList.remove(ip)
                continue
        return bsObj
项目:MrMime    作者:sLoPPydrive    | 项目源码 | 文件源码
def exception_caused_by_proxy_error(ex):
    if not ex.args:
        return False

    for arg in ex.args:
        if isinstance(arg, ProxyError) or isinstance(arg, SSLError) or isinstance(arg, ConnectionError):
            return True
        if isinstance(arg, Exception):
            return exception_caused_by_proxy_error(arg)

    return False
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def default_proxy_fail_exception_filter(req_exc):
    return isinstance(req_exc, (ProxyError, ConnectTimeout))
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_unreachable_proxy(self, proxy_host):
        session = requests.Session()
        with pytest.raises(ProxyError):
            session.get(arbitrary_url, proxies=proxy_parameter_for_requests('http://' + proxy_host))
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_timeout_proxy(self):
        # Travis can refuse quickly, and trigger ProxyError instead.
        session = requests.Session()
        with pytest.raises(ConnectTimeout):
            session.get(arbitrary_url, timeout=0.001, proxies=proxy_parameter_for_requests('http://localhost'))
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_bad_proxy_no_failover(self, proxy_host):
        """Verify that Requests returns ProxyError when given a non-existent proxy."""
        sess = PACSession(pac=PACFile(proxy_pac_js_tpl % 'PROXY %s:80' % proxy_host))
        with pytest.raises(ProxyError):
            sess.get(arbitrary_url)
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_pac_failover_to_direct(self):
        """Proxy fails. Next in line is DIRECT keyword."""
        sess = PACSession(pac=PACFile(proxy_pac_js))

        def fake_request_reject_proxy(method, url, proxies=None, **kwargs):
            if proxies and proxies['http'] is not None:
                raise ProxyError()

        with _patch_request_base(side_effect=fake_request_reject_proxy) as request:
            sess.get(arbitrary_url)
            request.assert_has_calls([
                get_call(arbitrary_url, fake_proxy_url),
                get_call(arbitrary_url, 'DIRECT'),
            ])
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_pac_failover_to_direct_also_fails(self):
        """Proxy fails. Next in line is DIRECT keyword, but direct connection also fails. Error should bubble up.
        Subsequent requests go straight to DIRECT, despite DIRECT failing."""
        sess = PACSession(pac=PACFile(proxy_pac_js))
        with _patch_request_base(side_effect=ProxyError()) as request:
            for _ in range(2):
                with pytest.raises(ProxyError):
                    sess.get(arbitrary_url)
        request.assert_has_calls([
            get_call(arbitrary_url, fake_proxy_url),
            get_call(arbitrary_url, 'DIRECT'),
            get_call(arbitrary_url, 'DIRECT'),
        ])
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def test_pac_no_failover_available_exc_case(self):
        """Special case where proxy fails but there's no DIRECT fallback. Error should bubble up,
        and all applicable proxies should be tried again in the next request. Proxy failure from exception."""
        sess = PACSession(pac=PACFile(proxy_pac_js_tpl % 'PROXY a:80; PROXY b:80'))
        for _ in range(2):
            with _patch_request_base(side_effect=ProxyError()) as request, \
                    pytest.raises(ProxyError):
                sess.get(arbitrary_url)
            request.assert_has_calls([
                get_call(arbitrary_url, 'http://a:80'),
                get_call(arbitrary_url, 'http://b:80'),
            ])
项目:STF    作者:nokia    | 项目源码 | 文件源码
def getCaseInfo(self, CaseId):
        try:
            url = self.zapiBaseUrl + '/api/2/issue/' + CaseId.strip('\n')
            response = requests.get(url, auth=self.authentication, verify=False)
            responst_body = json.loads(response.text)
            if not responst_body.has_key('errorMessages') and responst_body['fields']['issuetype']['name'] == 'Test':
                case = TmsCase()

                case.id = responst_body['id']
                case.key = responst_body['key']
                case.summary = responst_body['fields']['summary']
                case.description = responst_body['fields']['description']
                case.reference = responst_body['fields']['customfield_15308']

                case.creator = Worker()
                case.creator.name = responst_body['fields']['creator']['name']
                case.creator.key = responst_body['fields']['creator']['key']
                case.creator.displayName = responst_body['fields']['creator']['displayName']
                case.creator.emailAddress = responst_body['fields']['creator']['emailAddress']

                case.reporter = Worker()
                case.reporter.name = responst_body['fields']['reporter']['name']
                case.reporter.key = responst_body['fields']['reporter']['key']
                case.reporter.displayName = responst_body['fields']['reporter']['displayName']
                case.reporter.emailAddress = responst_body['fields']['reporter']['emailAddress']

                case.assignee = Worker()
                case.assignee.name = responst_body['fields']['assignee']['name']
                case.assignee.key = responst_body['fields']['assignee']['key']
                case.assignee.displayName = responst_body['fields']['assignee']['displayName']
                case.assignee.emailAddress = responst_body['fields']['assignee']['emailAddress']

                case.project = Project()
                case.project.id = responst_body['fields']['project']['id']
                case.project.key = responst_body['fields']['project']['key']
                case.project.name = responst_body['fields']['project']['name']

                return case
            else:
                return None
        except ProxyError:
            logger.error(ERROR_CANNOT_CONNECT_JIRA)
        return WARNING_NO_RESULT % 'Case'
项目:dpdownload    作者:doupengs    | 项目源码 | 文件源码
def download(self, method, url, proxyEnable=False, **kwargs):
        '''
        :param method: 'GET','POST','PUT','DELETE','HEAD','OPTIONS'
        :param url: url
        :param proxyEnable: use proxy or not
        :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`
        :param json: (optional) json data to send in the body of the :class:`Request`
        :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`
        :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`
        :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``)
                      for multipart encoding upload.``file-tuple`` can be a 2-tuple ``('filename', fileobj)``,
                      3-tuple ``('filename', fileobj, 'content_type')``or a 4-tuple ``('filename', fileobj,
                      'content_type', custom_headers)``, where ``'content-type'`` is a string defining the
                      content type of the given file and ``custom_headers`` a dict-like object containing
                      additional headers to add for the file
        :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth
        :param timeout: (optional) How long to wait for the server to send data
                        before giving up, as a float, or a :ref:`(connect timeout, read
                        timeout) <timeouts>` tuple <float or tuple>
        :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed <class bool>
        :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy
        :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``
        :param stream: (optional) if ``False``, the response content will be immediately downloaded
        :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair
        :return: Response if failed Response=None
        '''
        if (not proxyEnable) or (proxyEnable and not self.proxies):
            if proxyEnable and not self.proxies:
                logger.warning('No initialization proxy file or proxy file is not available')
            try:
                return requests.request(method, url, **kwargs)
            except Exception as e:
                logger.warning(e)
        else:
            try:
                oneProxy = self.proxies.pop(0)
                self.proxies.append(oneProxy)
                key = oneProxy.split(":")[0]
                oneProxy = {key: oneProxy}
                logger.debug('USE PROXY [-] %s' % oneProxy.values()[0])
                return requests.request(method, url, proxies=oneProxy, **kwargs)
            except ProxyError:
                return self.download(method, url, proxyEnable, **kwargs)
            except Exception as e:
                logger.warning(e)
项目:pypac    作者:carsonyl    | 项目源码 | 文件源码
def __init__(self, pac=None, proxy_auth=None, pac_enabled=True,
                 response_proxy_fail_filter=None, exception_proxy_fail_filter=None,
                 socks_scheme='socks5', recursion_limit=ARBITRARY_HIGH_RECURSION_LIMIT):
        """
        :param PACFile pac: The PAC file to consult for proxy configuration info.
            If not provided, then upon the first request, :func:`get_pac` is called with default arguments
            in order to find a PAC file.
        :param requests.auth.HTTPProxyAuth proxy_auth: Username and password proxy authentication.
        :param bool pac_enabled: Set to ``False`` to disable all PAC functionality, including PAC auto-discovery.
        :param response_proxy_fail_filter: Callable that takes a ``requests.Response`` and returns
            a boolean for whether the response means the proxy used for the request should no longer be used.
            By default, the response is not inspected.
        :param exception_proxy_fail_filter: Callable that takes an exception and returns
            a boolean for whether the exception means the proxy used for the request should no longer be used.
            By default, :class:`requests.exceptions.ConnectTimeout` and
            :class:`requests.exceptions.ProxyError` are matched.
        :param int recursion_limit: Python recursion limit when executing JavaScript.
            PAC files are often complex enough to need this to be higher than the interpreter default.
            This value is passed to auto-discovered :class:`PACFile` only.
        :param str socks_scheme: Scheme to use when PAC file returns a SOCKS proxy. `socks5` by default.
        """
        super(PACSession, self).__init__()
        self._tried_get_pac = False

        self._proxy_resolver = None
        self._proxy_auth = proxy_auth
        self._socks_scheme = socks_scheme
        self._recursion_limit = recursion_limit

        #: Set to ``False`` to disable all PAC functionality, including PAC auto-discovery.
        self.pac_enabled = pac_enabled

        if pac:
            self._tried_get_pac = True
            self._proxy_resolver = self._get_proxy_resolver(pac)

        self._response_proxy_failure_filter = default_proxy_fail_response_filter
        if response_proxy_fail_filter:
            self._response_proxy_failure_filter = response_proxy_fail_filter

        self._exc_proxy_failure_filter = default_proxy_fail_exception_filter
        if exception_proxy_fail_filter:
            self._exc_proxy_failure_filter = exception_proxy_fail_filter