Python pycurl 模块,RESPONSE_CODE 实例源码

我们从Python开源项目中,提取了以下16个代码示例,用于说明如何使用pycurl.RESPONSE_CODE

项目:ceiba-dl    作者:lantw44    | 项目源码 | 文件源码
def file(self, path, output, args={}, progress_callback=lambda *x: None):
        self.logger.debug('??????????')
        self.web_cache[path] = dict(args)
        url = urllib.parse.urljoin(self.file_url, urllib.parse.quote(path))
        if len(args) > 0:
            url += '?' + urllib.parse.urlencode(args)
        self.logger.debug('HTTP ?????{}'.format(url))
        self.curl.setopt(pycurl.URL, url)
        self.curl.setopt(pycurl.COOKIE, self.web_cookie)
        self.curl.setopt(pycurl.NOBODY, False)
        self.curl.setopt(pycurl.NOPROGRESS, False)
        self.curl.setopt(pycurl.WRITEDATA, output)
        self.curl.setopt(pycurl.HEADERFUNCTION, lambda *x: None)
        self.curl.setopt(pycurl.XFERINFOFUNCTION, progress_callback)
        self.curl.perform()
        status = self.curl.getinfo(pycurl.RESPONSE_CODE)
        if status != 200:
            raise ServerError(status)
项目:ceiba-dl    作者:lantw44    | 项目源码 | 文件源码
def file_size(self, path, args={}):
        self.logger.debug('????????????')
        self.web_cache[path] = dict(args)
        url = urllib.parse.urljoin(self.file_url, urllib.parse.quote(path))
        if len(args) > 0:
            url += '?' + urllib.parse.urlencode(args)
        self.logger.debug('HTTP ?????{}'.format(url))
        self.curl.setopt(pycurl.URL, url)
        self.curl.setopt(pycurl.COOKIE, self.web_cookie)
        self.curl.setopt(pycurl.NOBODY, True)
        self.curl.setopt(pycurl.NOPROGRESS, True)
        self.curl.setopt(pycurl.WRITEDATA, io.BytesIO())
        self.curl.setopt(pycurl.HEADERFUNCTION, lambda *x: None)
        self.curl.setopt(pycurl.XFERINFOFUNCTION, lambda *x: None)
        self.curl.perform()
        status = self.curl.getinfo(pycurl.RESPONSE_CODE)
        if status != 200:
            raise ServerError(status)
        return self.curl.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
项目:ceiba-dl    作者:lantw44    | 项目源码 | 文件源码
def web_redirect(self, path, args={}):
        self.logger.debug('????????????')
        self.web_cache[path] = dict(args)
        url = urllib.parse.urljoin(self.web_url, urllib.parse.quote(path))
        if len(args) > 0:
            url += '?' + urllib.parse.urlencode(args)
        self.logger.debug('HTTP ?????{}'.format(url))
        headers = io.BytesIO()
        self.curl.setopt(pycurl.URL, url)
        self.curl.setopt(pycurl.COOKIE, self.web_cookie)
        self.curl.setopt(pycurl.NOBODY, False)
        self.curl.setopt(pycurl.NOPROGRESS, True)
        self.curl.setopt(pycurl.WRITEDATA, NoneIO())
        self.curl.setopt(pycurl.HEADERFUNCTION, headers.write)
        self.curl.setopt(pycurl.XFERINFOFUNCTION, lambda *x: None)
        self.curl.perform()
        status = self.curl.getinfo(pycurl.RESPONSE_CODE)
        if status != 302:
            raise ServerError(status)
        for header_line in headers.getvalue().split(b'\r\n'):
            if header_line.startswith(b'Location:'):
                return header_line.split(b':', maxsplit=1)[1].strip().decode()
        return None
项目:ceiba-dl    作者:lantw44    | 项目源码 | 文件源码
def api(self, args, encoding='utf-8', allow_return_none=False):
        self.logger.debug('???? API ??')
        if args.get('mode', '') == 'semester':
            semester = args.get('semester', '')
            if allow_return_none and self.api_cache == semester:
                self.logger.debug('????? {} ?? API ??'.format(semester))
                return
            self.api_cache = semester
        query_args = dict()
        query_args.update(self.api_args)
        query_args.update(args)
        url = self.api_url + '?' + urllib.parse.urlencode(query_args)
        data = io.BytesIO()
        self.logger.debug('HTTP ?????{}'.format(url))
        self.curl.setopt(pycurl.URL, url)
        self.curl.setopt(pycurl.COOKIE, self.api_cookie)
        self.curl.setopt(pycurl.NOBODY, False)
        self.curl.setopt(pycurl.NOPROGRESS, True)
        self.curl.setopt(pycurl.WRITEDATA, data)
        self.curl.setopt(pycurl.HEADERFUNCTION, lambda *x: None)
        self.curl.setopt(pycurl.XFERINFOFUNCTION, lambda *x: None)
        self.curl.perform()
        status = self.curl.getinfo(pycurl.RESPONSE_CODE)
        if status != 200:
            raise ServerError(status)
        try:
            value = data.getvalue()
            return json.loads(value.decode(encoding))
        except json.decoder.JSONDecodeError:
            raise NotJSONError(value.decode(encoding))
项目:ceiba-dl    作者:lantw44    | 项目源码 | 文件源码
def web(self, path, args={}, encoding=None, allow_return_none=False):
        self.logger.debug('????????')
        if allow_return_none:
            if path in self.web_cache and self.web_cache[path] == args:
                self.logger.debug('????? {} ????'.format(path))
                self.logger.debug('???{}'.format(args))
                return
        self.web_cache[path] = dict(args)
        url = urllib.parse.urljoin(self.web_url, urllib.parse.quote(path))
        if len(args) > 0:
            url += '?' + urllib.parse.urlencode(args)
        self.logger.debug('HTTP ?????{}'.format(url))
        data = io.BytesIO()
        self.curl.setopt(pycurl.URL, url)
        self.curl.setopt(pycurl.COOKIE, self.web_cookie)
        self.curl.setopt(pycurl.NOBODY, False)
        self.curl.setopt(pycurl.NOPROGRESS, True)
        self.curl.setopt(pycurl.WRITEDATA, data)
        self.curl.setopt(pycurl.HEADERFUNCTION, lambda *x: None)
        self.curl.setopt(pycurl.XFERINFOFUNCTION, lambda *x: None)
        self.curl.perform()
        status = self.curl.getinfo(pycurl.RESPONSE_CODE)
        if status != 200:
            raise ServerError(status)
        data.seek(io.SEEK_SET)
        return etree.parse(data, etree.HTMLParser(
            encoding=encoding, remove_comments=True))
项目:download-manager    作者:thispc    | 项目源码 | 文件源码
def verifyHeader(self):
        """ raise an exceptions on bad headers """
        code = int(self.c.getinfo(pycurl.RESPONSE_CODE))
        if code in bad_headers:
            #404 will NOT raise an exception
            raise BadHeader(code, self.getResponse())
        return code
项目:download-manager    作者:thispc    | 项目源码 | 文件源码
def checkHeader(self):
        """ check if header indicates failure"""
        return int(self.c.getinfo(pycurl.RESPONSE_CODE)) not in bad_headers
项目:xtdpy    作者:psycofdj    | 项目源码 | 文件源码
def __init__(self, p_client):
    super(HTTPResponse, self).__init__()
    self.m_error       = p_client.m_handle.errstr()
    self.m_data        = None
    self.m_rawdata     = p_client.m_data.getvalue()
    self.m_headers     = p_client.m_headers
    self.m_statusCode  = p_client.m_handle.getinfo(pycurl.RESPONSE_CODE)
    self.m_mimetype    = "text/plain"
    self.m_encoding    = "iso-8859-1"
    self._read()
项目:sogaQuant    作者:idoplay    | 项目源码 | 文件源码
def curl_get(self, url, refUrl=None):
        buf = cStringIO.StringIO()
        curl = pycurl.Curl()
        curl.setopt(curl.URL, url)
        curl.setopt(curl.WRITEFUNCTION, buf.write)
        curl.setopt(pycurl.SSL_VERIFYPEER, 0)
        #curl.setopt(pycurl.SSL_VERIFYHOST, 0)
        #curl.setopt(pycurl.HEADERFUNCTION, self.headerCookie)
        curl.setopt(pycurl.VERBOSE, 0)
        curl.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:46.0) Gecko/20100101 Firefox/46.0')
        #curl.setopt(pycurl.HTTPGET,1)
        #curl.setopt(pycurl.COOKIE, Cookie)
        #curl.setopt(pycurl.POSTFIELDS, 'j_username={ngnms_user}&j_password={ngnms_password}'.format(**self.ngnms_login))
        curl.setopt(pycurl.COOKIEJAR, '/htdocs/logs/py_cookie.txt')
        curl.setopt(pycurl.COOKIEFILE, '/htdocs/logs/py_cookie.txt')
        if refUrl:
            curl.setopt(pycurl.REFERER, refUrl)
        #curl.setopt(c.CONNECTTIMEOUT, 5)
        #curl.setopt(c.TIMEOUT, 8)
        curl.perform()
        backinfo = ''
        if curl.getinfo(pycurl.RESPONSE_CODE) == 200:
            backinfo = buf.getvalue()
        curl.close()
        return backinfo
项目:tus-py-client    作者:tus    | 项目源码 | 文件源码
def _finish_request(self):
        self.status_code = self.handle.getinfo(pycurl.RESPONSE_CODE)
项目:easyCurl    作者:MosVerkstad    | 项目源码 | 文件源码
def runCurl(requestObj):
    global rcId, url, method, requestHeaders, requestBodyStr
    rcId, url, method, requestHeaders, requestBodyStr = requestObj
    requestBody = BytesIO(requestBodyStr)
    responseHeaders = BytesIO()
    responseBody = BytesIO()
    responseCode, responseHeaderStr, responseBodyStr = None, None, None

    cType = method if optCurlMethods.get(method, None) != None else 'OTHERS'
    c = setOptCurl(setOptCurl(pycurl.Curl(), optCurlMethods['COMMON']), optCurlMethods[cType])

    c.setopt(c.URL, url)
    c.setopt(c.HTTPHEADER, requestHeaders)
    c.setopt(c.READFUNCTION, requestBody.read)
    c.setopt(c.HEADERFUNCTION, responseHeaders.write)
    c.setopt(c.WRITEFUNCTION, responseBody.write)

    try:
        startTime = datetime.now()
        c.perform()
        endTime = datetime.now()
    except Exception as e:
        responseCode, responseHeaderStr, responseBodyStr, startTime, endTime, error = None, None, None, None, None, e
    else:
        responseCode, responseHeaderStr, responseBodyStr, error = c.getinfo(pycurl.RESPONSE_CODE), responseHeaders.getvalue(), responseBody.getvalue(), None
    finally:
        c.close()
        requestBody.close()
        responseHeaders.close()
        responseBody.close()

    return responseCode, responseHeaderStr, responseBodyStr, startTime, endTime, error
项目:pyload-requests    作者:pyload    | 项目源码 | 文件源码
def verify_header(self):
        """
        Raise an exceptions on bad headers.
        """
        code = int(self.c.getinfo(pycurl.RESPONSE_CODE))
        if code in BAD_HEADERS:
            raise ResponseException(
                code, responses.get(code, "Unknown statuscode"))
        return code
项目:stash-scanner    作者:senuido    | 项目源码 | 文件源码
def info(c):
    "Return a dictionary with all info on the last response."
    m = {}
    m['effective-url'] = c.getinfo(pycurl.EFFECTIVE_URL)
    m['http-code'] = c.getinfo(pycurl.HTTP_CODE)
    m['total-time'] = c.getinfo(pycurl.TOTAL_TIME)
    m['namelookup-time'] = c.getinfo(pycurl.NAMELOOKUP_TIME)
    m['connect-time'] = c.getinfo(pycurl.CONNECT_TIME)
    m['pretransfer-time'] = c.getinfo(pycurl.PRETRANSFER_TIME)
    m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME)
    m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT)
    # m['size-upload'] = c.getinfo(pycurl.SIZE_UPLOAD)
    m['size-download'] = c.getinfo(pycurl.SIZE_DOWNLOAD)
    # m['speed-upload'] = c.getinfo(pycurl.SPEED_UPLOAD)
    m['header-size'] = c.getinfo(pycurl.HEADER_SIZE)
    m['request-size'] = c.getinfo(pycurl.REQUEST_SIZE)
    m['content-length-download'] = c.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
    m['content-length-upload'] = c.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
    m['content-type'] = c.getinfo(pycurl.CONTENT_TYPE)
    m['response-code'] = c.getinfo(pycurl.RESPONSE_CODE)
    m['speed-download'] = c.getinfo(pycurl.SPEED_DOWNLOAD)
    # m['ssl-verifyresult'] = c.getinfo(pycurl.SSL_VERIFYRESULT)
    m['filetime'] = c.getinfo(pycurl.INFO_FILETIME)
    m['starttransfer-time'] = c.getinfo(pycurl.STARTTRANSFER_TIME)
    m['redirect-time'] = c.getinfo(pycurl.REDIRECT_TIME)
    m['redirect-count'] = c.getinfo(pycurl.REDIRECT_COUNT)
    m['http-connectcode'] = c.getinfo(pycurl.HTTP_CONNECTCODE)
    # m['httpauth-avail'] = c.getinfo(pycurl.HTTPAUTH_AVAIL)
    # m['proxyauth-avail'] = c.getinfo(pycurl.PROXYAUTH_AVAIL)
    # m['os-errno'] = c.getinfo(pycurl.OS_ERRNO)
    m['num-connects'] = c.getinfo(pycurl.NUM_CONNECTS)
    # m['ssl-engines'] = c.getinfo(pycurl.SSL_ENGINES)
    # m['cookielist'] = c.getinfo(pycurl.INFO_COOKIELIST)
    # m['lastsocket'] = c.getinfo(pycurl.LASTSOCKET)
    # m['ftp-entry-path'] = c.getinfo(pycurl.FTP_ENTRY_PATH)
    return m
项目:alfred-10000ft-scripts    作者:jceelen    | 项目源码 | 文件源码
def info(self):
        "Return a dictionary with all info on the last response."
        m = {}
        m['effective-url'] = self.handle.getinfo(pycurl.EFFECTIVE_URL)
        m['http-code'] = self.handle.getinfo(pycurl.HTTP_CODE)
        m['total-time'] = self.handle.getinfo(pycurl.TOTAL_TIME)
        m['namelookup-time'] = self.handle.getinfo(pycurl.NAMELOOKUP_TIME)
        m['connect-time'] = self.handle.getinfo(pycurl.CONNECT_TIME)
        m['pretransfer-time'] = self.handle.getinfo(pycurl.PRETRANSFER_TIME)
        m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME)
        m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT)
        m['size-upload'] = self.handle.getinfo(pycurl.SIZE_UPLOAD)
        m['size-download'] = self.handle.getinfo(pycurl.SIZE_DOWNLOAD)
        m['speed-upload'] = self.handle.getinfo(pycurl.SPEED_UPLOAD)
        m['header-size'] = self.handle.getinfo(pycurl.HEADER_SIZE)
        m['request-size'] = self.handle.getinfo(pycurl.REQUEST_SIZE)
        m['content-length-download'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
        m['content-length-upload'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
        m['content-type'] = self.handle.getinfo(pycurl.CONTENT_TYPE)
        m['response-code'] = self.handle.getinfo(pycurl.RESPONSE_CODE)
        m['speed-download'] = self.handle.getinfo(pycurl.SPEED_DOWNLOAD)
        m['ssl-verifyresult'] = self.handle.getinfo(pycurl.SSL_VERIFYRESULT)
        m['filetime'] = self.handle.getinfo(pycurl.INFO_FILETIME)
        m['starttransfer-time'] = self.handle.getinfo(pycurl.STARTTRANSFER_TIME)
        m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME)
        m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT)
        m['http-connectcode'] = self.handle.getinfo(pycurl.HTTP_CONNECTCODE)
        m['httpauth-avail'] = self.handle.getinfo(pycurl.HTTPAUTH_AVAIL)
        m['proxyauth-avail'] = self.handle.getinfo(pycurl.PROXYAUTH_AVAIL)
        m['os-errno'] = self.handle.getinfo(pycurl.OS_ERRNO)
        m['num-connects'] = self.handle.getinfo(pycurl.NUM_CONNECTS)
        m['ssl-engines'] = self.handle.getinfo(pycurl.SSL_ENGINES)
        m['cookielist'] = self.handle.getinfo(pycurl.INFO_COOKIELIST)
        m['lastsocket'] = self.handle.getinfo(pycurl.LASTSOCKET)
        m['ftp-entry-path'] = self.handle.getinfo(pycurl.FTP_ENTRY_PATH)
        return m
项目:oriskami-python    作者:oriskami    | 项目源码 | 文件源码
def request(self, method, url, headers, post_data=None):
        s = util.StringIO.StringIO()
        rheaders = util.StringIO.StringIO()
        curl = pycurl.Curl()

        proxy = self._get_proxy(url)
        if proxy:
            if proxy.hostname:
                curl.setopt(pycurl.PROXY, proxy.hostname)
            if proxy.port:
                curl.setopt(pycurl.PROXYPORT, proxy.port)
            if proxy.username or proxy.password:
                curl.setopt(
                    pycurl.PROXYUSERPWD,
                    "%s:%s" % (proxy.username, proxy.password))

        if method == 'get':
            curl.setopt(pycurl.HTTPGET, 1)
        elif method == 'post':
            curl.setopt(pycurl.POST, 1)
            curl.setopt(pycurl.POSTFIELDS, post_data)
        else:
            curl.setopt(pycurl.CUSTOMREQUEST, method.upper())

        # pycurl doesn't like unicode URLs
        curl.setopt(pycurl.URL, util.utf8(url))

        curl.setopt(pycurl.WRITEFUNCTION, s.write)
        curl.setopt(pycurl.HEADERFUNCTION, rheaders.write)
        curl.setopt(pycurl.NOSIGNAL, 1)
        curl.setopt(pycurl.CONNECTTIMEOUT, 30)
        curl.setopt(pycurl.TIMEOUT, 80)
        curl.setopt(pycurl.HTTPHEADER, ['%s: %s' % (k, v)
                                        for k, v in headers.items()])
        if self._verify_ssl_certs:
            curl.setopt(pycurl.CAINFO, os.path.join(
                os.path.dirname(__file__), 'data/ca-certificates.crt'))
        else:
            curl.setopt(pycurl.SSL_VERIFYHOST, False)

        try:
            curl.perform()
        except pycurl.error as e:
            self._handle_request_error(e)
        rbody = s.getvalue()
        rcode = curl.getinfo(pycurl.RESPONSE_CODE)

        return rbody, rcode, self.parse_headers(rheaders.getvalue())
项目:falsy    作者:pingf    | 项目源码 | 文件源码
def curl_result(c):
    effective_url = c.getinfo(pycurl.EFFECTIVE_URL)
    primary_ip = c.getinfo(pycurl.PRIMARY_IP)
    primary_port = c.getinfo(pycurl.PRIMARY_PORT)
    local_ip = c.getinfo(pycurl.LOCAL_IP)
    local_port = c.getinfo(pycurl.LOCAL_PORT)
    speed_download = c.getinfo(pycurl.SPEED_DOWNLOAD)
    size_download = c.getinfo(pycurl.SIZE_DOWNLOAD)
    redirect_time = c.getinfo(pycurl.REDIRECT_TIME)
    redirect_count = c.getinfo(pycurl.REDIRECT_COUNT)
    redirect_url = c.getinfo(pycurl.REDIRECT_URL)
    http_code = c.getinfo(pycurl.HTTP_CODE)
    response_code = c.getinfo(pycurl.RESPONSE_CODE)
    total_time = c.getinfo(pycurl.TOTAL_TIME)
    content_type = c.getinfo(pycurl.CONTENT_TYPE)
    namelookup_time = c.getinfo(pycurl.NAMELOOKUP_TIME)
    info_filetime = c.getinfo(pycurl.INFO_FILETIME)
    http_connectcode = c.getinfo(pycurl.HTTP_CONNECTCODE)
    starttransfer_time = c.getinfo(pycurl.STARTTRANSFER_TIME)
    pretransfer_time = c.getinfo(pycurl.PRETRANSFER_TIME)
    header_size = c.getinfo(pycurl.HEADER_SIZE)
    request_size = c.getinfo(pycurl.REQUEST_SIZE)
    ssl_verifyresult = c.getinfo(pycurl.SSL_VERIFYRESULT)
    num_connects = c.getinfo(pycurl.NUM_CONNECTS)

    return {
        'effective_url': effective_url,
        'primary_ip': primary_ip,
        'primary_port': primary_port,
        'local_ip': local_ip,
        'local_port': local_port,
        'speed_download': speed_download,
        'size_download': size_download,
        'redirect_time': redirect_time,
        'redirect_count': redirect_count,
        'redirect_url': redirect_url,
        'http_code': http_code,
        'response_code': response_code,
        'total_time': total_time,
        'content_type': content_type,
        'namelookup_time': namelookup_time,
        'info_filetime': info_filetime,
        'http_connectcode': http_connectcode,
        'starttransfer_time': starttransfer_time,
        'pretransfer_time': pretransfer_time,
        'header_size': header_size,
        'request_size': request_size,
        'ssl_verifyresult': ssl_verifyresult,
        'num_connects': num_connects,
        # 'proxy_ssl_verifyresult': proxy_ssl_verifyresult,
        # 'app_connecttime': app_connecttime,

    }