Python pycurl 模块,COOKIEFILE 实例源码

我们从Python开源项目中,提取了以下24个代码示例,用于说明如何使用pycurl.COOKIEFILE

项目:Instagram-API    作者:danleyb2    | 项目源码 | 文件源码
def request(self, endpoint, post=None):
        buffer = BytesIO()

        ch = pycurl.Curl()
        ch.setopt(pycurl.URL, Constants.API_URL + endpoint)
        ch.setopt(pycurl.USERAGENT, self.userAgent)
        ch.setopt(pycurl.WRITEFUNCTION, buffer.write)
        ch.setopt(pycurl.FOLLOWLOCATION, True)
        ch.setopt(pycurl.HEADER, True)
        ch.setopt(pycurl.VERBOSE, False)
        ch.setopt(pycurl.COOKIEFILE, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat"))
        ch.setopt(pycurl.COOKIEJAR, os.path.join(self.IGDataPath, self.username, self.username + "-cookies.dat"))

        if post is not None:
            ch.setopt(pycurl.POST, True)
            ch.setopt(pycurl.POSTFIELDS, post)

        if self.proxy:
            ch.setopt(pycurl.PROXY, self.proxyHost)
            if self.proxyAuth:
                ch.setopt(pycurl.PROXYUSERPWD, self.proxyAuth)

        ch.perform()
        resp = buffer.getvalue()
        header_len = ch.getinfo(pycurl.HEADER_SIZE)
        header = resp[0: header_len]
        body = resp[header_len:]

        ch.close()

        if self.debug:
            print("REQUEST: " + endpoint)
            if post is not None:
                if not isinstance(post, list):
                    print("DATA: " + str(post))
            print("RESPONSE: " + body)

        return [header, json_decode(body)]
项目:QQBot    作者:springhack    | 项目源码 | 文件源码
def CurlPOST(url, data, cookie):
    c = pycurl.Curl()
    b = StringIO.StringIO()
    c.setopt(pycurl.URL, url)
    c.setopt(pycurl.POST, 1)
    c.setopt(pycurl.HTTPHEADER,['Content-Type: application/json'])
    # c.setopt(pycurl.TIMEOUT, 10)
    c.setopt(pycurl.WRITEFUNCTION, b.write)
    c.setopt(pycurl.COOKIEFILE, cookie)
    c.setopt(pycurl.COOKIEJAR, cookie)
    c.setopt(pycurl.POSTFIELDS, data)
    c.perform()
    html = b.getvalue()
    b.close()
    c.close()
    return html
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def get_user_login(self, token):
        '''
        Description:
         login
        '''
        self.token = token

        uri = "user/login?username="+self.login+"&token="+self.token

        api_url = self.url + uri
        c = pycurl.Curl()
        output = BytesIO()

        c.setopt(c.URL, api_url)
        ### Read the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output.write)
        c.perform()

        return json.loads(output.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def get_all_virtual_endpoints(self):
        '''
        Description:
         get all virtual endpoints
        '''
        uri = "virtualEndpoints"
        api_url = self.url + uri
        c = pycurl.Curl()
        output_init = BytesIO()

        c.setopt(c.URL, api_url)
        ### Create the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output_init.write)
        c.perform()

        return json.loads(output_init.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def create_virtual_endpoints(self, data, category):
        '''
        Description:
         create a virtual endpoints
         category:
          SENSOR
          METER
          GAUGE
          ONOFF
          LEVEL_CONTROL
        '''
        self.data = data
        self.category = category

        uri = "virtualEndpoints/?category=" + self.category
        api_url = self.url + uri

        c = pycurl.Curl()
        c.setopt(pycurl.URL, api_url)
        c.setopt(pycurl.HTTPHEADER, ['Accept: application/json','Content-Type: application/json','charset=UTF-8'])
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.POST, 1)
        c.setopt(pycurl.POSTFIELDS, self.data)
        c.setopt(pycurl.VERBOSE, 1)
        c.perform()
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def get_virtual_endpoints_config(self, uuid):
        '''
        Description:
         get virtual endpoints config
        '''
        self.uuid = uuid

        uri = "virtualEndpoints/"+self.uuid+"/config"
        api_url = self.url + uri

        c = pycurl.Curl()
        output_init = BytesIO()

        c.setopt(c.URL, api_url)
        ### Create the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output_init.write)
        c.perform()

        return json.loads(output_init.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def get_virtual_endpoint(self, uuid, network="false", device="false", clusterEndpoints="false", config="false", icons="true",
                             bindings="false", descriptor="false", room="false", info="false", full="false", attributes="false"):
        '''
        Description:
         get virtual endpoint
        '''
        self.uuid = uuid

        uri = ("virtualEndpoints/"+self.uuid+"?network="+ network +"&device=" + device + "&clusterEndpoints=" + clusterEndpoints + "&config=" + config + "&icons=" + icons + 
              "&type=false&bindings=" + bindings + "&descriptor=" + descriptor + "&room=" + room + "&info=" + info + "&full=" + full + "&attributes=" + attributes)
        api_url = self.url + uri
        c = pycurl.Curl()
        output_init = BytesIO()

        c.setopt(c.URL, api_url)
        ### Create the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output_init.write)
        c.perform()

        return json.loads(output_init.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def put_attributes_config(self, data, uuid):
        '''
        Description:
         modify an attribute
        '''
        self.data = data
        self.uuid = uuid

        uri = "attributes/" + self.uuid + "/config"
        api_url = self.url + uri

        c = pycurl.Curl()
        c.setopt(pycurl.URL, api_url)
        c.setopt(pycurl.HTTPHEADER, ['Accept: application/json','Content-Type: application/json','charset=UTF-8'])
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.POST, 1)
        c.setopt(pycurl.POSTFIELDS, self.data)
        c.setopt(pycurl.VERBOSE, 1)
        c.perform()
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def save_and_synchronize(self, wait="false", timeout=30):
        '''
        Description:
         synchronize Zipato with the Server
        '''
        self.wait = wait
        self.timeout = timeout

        uri = "box/saveAndSynchronize?wait=" + self.wait + "&timeout=" + str(self.timeout)

        api_url = self.url + uri

        c = pycurl.Curl()
        output_init = BytesIO()

        c.setopt(c.URL, api_url)
        ### Create the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output_init.write)
        c.perform()
        c.close()

        return json.loads(output_init.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def synchronize(self, ifneeded="false", wait="false", timeout=30):
        '''
        Description:
         synchronize Zipato with the Server
        '''
        self.ifneeded = ifneeded
        self.wait = wait
        self.timeout = timeout

        uri = "box/synchronize?ifNeeded=" + self.ifneeded + "wait=" + self.wait + "&timeout=" + str(self.timeout)

        api_url = self.url + uri

        c = pycurl.Curl()
        output_init = BytesIO()

        c.setopt(c.URL, api_url)
        ### Create the cookie File
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(c.WRITEFUNCTION, output_init.write)
        c.perform()
        c.close()

        return json.loads(output_init.getvalue())
项目:Zipatoapi    作者:ggruner    | 项目源码 | 文件源码
def put_attributes(self, data, uuid):
        '''
        Description:
         set attribute value with application/json content
        '''
        self.data = data
        self.uuid = uuid

        uri = "attributes/" + self.uuid + "/value"
        api_url = self.url + uri

        c = pycurl.Curl()
        c.setopt(pycurl.URL, api_url)
        c.setopt(pycurl.HTTPHEADER, ['Accept: application/json','Content-Type: application/json','charset=UTF-8'])
        c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.CUSTOMREQUEST, "PUT")
        c.setopt(pycurl.POSTFIELDS, self.data)
        c.setopt(pycurl.VERBOSE, 1)
        c.perform()
项目:recipebook    作者:dpapathanasiou    | 项目源码 | 文件源码
def get (url, user_agent=UA, referrer=None):
    """Make a GET request of the url using pycurl and return the data
    (which is None if unsuccessful)"""

    data = None
    databuffer = StringIO()

    curl = pycurl.Curl()
    curl.setopt(pycurl.URL, url)
    curl.setopt(pycurl.FOLLOWLOCATION, 1)
    curl.setopt(pycurl.CONNECTTIMEOUT, 5)
    curl.setopt(pycurl.TIMEOUT, 8)
    curl.setopt(pycurl.WRITEFUNCTION, databuffer.write)
    curl.setopt(pycurl.COOKIEFILE, '')
    if user_agent:
        curl.setopt(pycurl.USERAGENT, user_agent)
    if referrer is not None:
        curl.setopt(pycurl.REFERER, referrer)
    try:
        curl.perform()
        data = databuffer.getvalue()
    except Exception:
        pass
    curl.close()

    return data
项目:Instagram-API    作者:danleyb2    | 项目源码 | 文件源码
def request(self, endpoint, headers=None, post=None, first=True):
        buffer = BytesIO()

        ch = pycurl.Curl()

        ch.setopt(pycurl.URL, endpoint)
        ch.setopt(pycurl.USERAGENT, self.userAgent)
        ch.setopt(pycurl.WRITEFUNCTION, buffer.write)
        ch.setopt(pycurl.FOLLOWLOCATION, True)
        ch.setopt(pycurl.HEADER, True)
        if headers:
            ch.setopt(pycurl.HTTPHEADER, headers)

        ch.setopt(pycurl.VERBOSE, self.debug)
        ch.setopt(pycurl.SSL_VERIFYPEER, False)
        ch.setopt(pycurl.SSL_VERIFYHOST, False)
        ch.setopt(pycurl.COOKIEFILE, self.settingsPath + self.username + '-cookies.dat')
        ch.setopt(pycurl.COOKIEJAR, self.settingsPath + self.username + '-cookies.dat')

        if post:
            import urllib
            ch.setopt(pycurl.POST, len(post))
            ch.setopt(pycurl.POSTFIELDS, urllib.urlencode(post))

        ch.perform()
        resp = buffer.getvalue()
        header_len = ch.getinfo(pycurl.HEADER_SIZE)
        header = resp[0: header_len]
        body = resp[header_len:]
        ch.close()

        if self.debug:
            import urllib
            print("REQUEST: " + endpoint)
            if post is not None:
                if not isinstance(post, list):
                    print('DATA: ' + urllib.unquote_plus(json.dumps(post)))
            print("RESPONSE: " + body + "\n")

        return [header, json_decode(body)]
项目:QQBot    作者:springhack    | 项目源码 | 文件源码
def CurlGET(url, cookie):
    c = pycurl.Curl()
    b = StringIO.StringIO()
    c.setopt(pycurl.URL, url)
    # c.setopt(pycurl.TIMEOUT, 10)
    # c.setopt(pycurl.POST, 1)
    c.setopt(pycurl.WRITEFUNCTION, b.write)
    c.setopt(pycurl.COOKIEFILE, cookie)
    c.setopt(pycurl.COOKIEJAR, cookie)
    c.perform()
    html = b.getvalue()
    b.close()
    c.close()
    return html
项目:baidu-dropbox-sniffer    作者:cls1991    | 项目源码 | 文件源码
def get_download_link(fs_id):
    """
    ??????
    :param fs_id:
    :return:
    """
    curl = pycurl.Curl()
    curl.setopt(pycurl.USERAGENT, const.USER_AGENT)
    curl.setopt(pycurl.REFERER, const.PAN_REFER_URL)

    buffers = StringIO()
    request_dict = {
        'channel': 'chunlei',
        'timestamp': '1473685224',
        'fidlist': [fs_id],
        'type': 'dlink',
        'web': 1,
        'clienttype': 0,
        'bdstoken': 'e0e895bb3ef7b0cb70899ee66b74e809',
        'sign': decode_sign(parse_sign2('d76e889b6aafd3087ac3bd56f4d4053a', '3545d271c5d07ba27355d39da0c62a4ee06d2d25'))
    }
    target_url = const.PAN_API_URL + 'download?' + urllib.urlencode(request_dict)
    curl.setopt(pycurl.URL, target_url)
    curl.setopt(pycurl.WRITEDATA, buffers)
    curl.setopt(pycurl.COOKIEFILE, "cookie.txt")
    curl.perform()
    body = buffers.getvalue()
    buffers.close()
    curl.close()
    data = json.loads(body)
    if data['errno']:
        return None

    return data['dlink'][0]['dlink']
项目:download-manager    作者:thispc    | 项目源码 | 文件源码
def setRequestContext(self, url, get, post, referer, cookies, multipart=False):
        """ sets everything needed for the request """

        url = myquote(url)

        if get:
            get = urlencode(get)
            url = "%s?%s" % (url, get)

        self.c.setopt(pycurl.URL, url)
        self.c.lastUrl = url

        if post:
            self.c.setopt(pycurl.POST, 1)
            if not multipart:
                if type(post) == unicode:
                    post = str(post) #unicode not allowed
                elif type(post) == str:
                    pass
                else:
                    post = myurlencode(post)

                self.c.setopt(pycurl.POSTFIELDS, post)
            else:
                post = [(x, y.encode('utf8') if type(y) == unicode else y ) for x, y in post.iteritems()]
                self.c.setopt(pycurl.HTTPPOST, post)
        else:
            self.c.setopt(pycurl.POST, 0)

        if referer and self.lastURL:
            self.c.setopt(pycurl.REFERER, str(self.lastURL))

        if cookies:
            self.c.setopt(pycurl.COOKIEFILE, "")
            self.c.setopt(pycurl.COOKIEJAR, "")
            self.getCookies()
项目:sogaQuant    作者:idoplay    | 项目源码 | 文件源码
def curl_get(self, url, refUrl=None):
        buf = cStringIO.StringIO()
        curl = pycurl.Curl()
        curl.setopt(curl.URL, url)
        curl.setopt(curl.WRITEFUNCTION, buf.write)
        curl.setopt(pycurl.SSL_VERIFYPEER, 0)
        #curl.setopt(pycurl.SSL_VERIFYHOST, 0)
        #curl.setopt(pycurl.HEADERFUNCTION, self.headerCookie)
        curl.setopt(pycurl.VERBOSE, 0)
        curl.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:46.0) Gecko/20100101 Firefox/46.0')
        #curl.setopt(pycurl.HTTPGET,1)
        #curl.setopt(pycurl.COOKIE, Cookie)
        #curl.setopt(pycurl.POSTFIELDS, 'j_username={ngnms_user}&j_password={ngnms_password}'.format(**self.ngnms_login))
        curl.setopt(pycurl.COOKIEJAR, '/htdocs/logs/py_cookie.txt')
        curl.setopt(pycurl.COOKIEFILE, '/htdocs/logs/py_cookie.txt')
        if refUrl:
            curl.setopt(pycurl.REFERER, refUrl)
        #curl.setopt(c.CONNECTTIMEOUT, 5)
        #curl.setopt(c.TIMEOUT, 8)
        curl.perform()
        backinfo = ''
        if curl.getinfo(pycurl.RESPONSE_CODE) == 200:
            backinfo = buf.getvalue()
        curl.close()
        return backinfo
项目:pyload-requests    作者:pyload    | 项目源码 | 文件源码
def init_handle(self):
        """
        Sets common options to curl handle.
        """
        self.setopt(pycurl.FOLLOWLOCATION, 1)
        self.setopt(pycurl.MAXREDIRS, 5)
        self.setopt(pycurl.CONNECTTIMEOUT, 30)
        self.setopt(pycurl.NOSIGNAL, 1)
        self.setopt(pycurl.NOPROGRESS, 1)
        if hasattr(pycurl, "AUTOREFERER"):
            self.setopt(pycurl.AUTOREFERER, 1)
        self.setopt(pycurl.SSL_VERIFYPEER, 0)
        # Interval for low speed, detects connection loss, but can abort dl if
        # hoster stalls the download
        self.setopt(pycurl.LOW_SPEED_TIME, 45)
        self.setopt(pycurl.LOW_SPEED_LIMIT, 5)

        # do not save the cookies
        self.setopt(pycurl.COOKIEFILE, '')
        self.setopt(pycurl.COOKIEJAR, '')

        # self.setopt(pycurl.VERBOSE, 1)

        self.setopt(
            pycurl.USERAGENT,
            'Mozilla/5.0 (Windows NT 10.0; Win64; rv:53.0) '
            'Gecko/20100101 Firefox/53.0')
        if pycurl.version_info()[7]:
            self.setopt(pycurl.ENCODING, 'gzip,deflate')

        self.headers.update(
            {'Accept': "*/*",
             'Accept-Language': "en-US,en",
             'Accept-Charset': "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
             'Connection': "keep-alive",
             'Keep-Alive': "300",
             'Expect': ""})
项目:multiplierz    作者:BlaisProteomics    | 项目源码 | 文件源码
def __init__(self, data_file, verbose=False, **kwargs):
        self.file_type = 'mzurl'
        # strip off the final slash, if it exists
        if data_file[-1] == '/':
            data_file = data_file[:-1]
        # Likewise, html or other madness.
        if any([data_file.lower().endswith(x) for x in ['html', 'raw', 'wiff']]):
            data_file = ".".join(data_file.split(".")[:-1])
        self.data_file = data_file # actually a URL to a file
        self.verbose = verbose

        self._scans = None # cache of scan_info results for the whole file

        # A string with the name and path of an appropriate temp file
        # (varies by platform)
        fd, self.cookie_file_name = tempfile.mkstemp(text=True)
        os.close(fd)

        # Handle to libcurl object
        self.crl = pycurl.Curl()

        # set some general options
        self.crl.setopt(pycurl.COOKIEFILE, self.cookie_file_name)
        self.crl.setopt(pycurl.COOKIEJAR, self.cookie_file_name)
        self.crl.setopt(pycurl.FOLLOWLOCATION, True)
        self.crl.setopt(pycurl.VERBOSE, verbose)

        self.output = cStringIO.StringIO()
        self.crl.setopt(pycurl.WRITEFUNCTION, self.output.write)

        # how would you store an info file?
        #if os.path.exists(data_file + '.mzi'):
            #self._info_file = data_file + '.mzi'
            #info_fh = open(self._info_file)
            #self._info_scans = cPickle.load(info_fh)
            #info_fh.close()
        #else:
            #self._info_file = None
项目:taller-de-scraping    作者:mekler    | 项目源码 | 文件源码
def pideURL(url,cookie=False,cookie_name='cookie.txt', contador_curl = 0):
    time.sleep(2)

    print ("\n"+url+"\n")
    c = pycurl.Curl()
    if cookie:
        c.setopt(pycurl.COOKIEJAR, 'cookies/'+cookie_name)
        c.setopt(pycurl.COOKIEFILE, 'cookies/'+cookie_name)
    c.setopt(pycurl.URL, url)       
    c.setopt(pycurl.CONNECTTIMEOUT, 15) 
    c.setopt(pycurl.TIMEOUT, 25) 
    c.setopt(pycurl.HTTPHEADER, ['Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' ,'Accept-Language: en-US,en;q=0.5' ,'Connection: keep-alive' ,'Content-Type: application/x-www-form-urlencoded' ,'Host: services6.arcgis.com' ,'Origin: https://sig-ruv.maps.arcgis.com' ,'Referer: https://sig-ruv.maps.arcgis.com/apps/webappviewer/index.html?id=1e3873d1c01749929457c7a7b9315cda'])
    #c.setopt(pycurl.VERBOSE, 1)

    b = BytesIO()
    BytesIO
    c.setopt(pycurl.WRITEFUNCTION, b.write)

    try:
        c.perform()
        return b.getvalue()
        #print (response_string)
        b.close()
    except Exception as e:
        #log ('Razon:',e)
        response_string = None
        if contador_curl<=10:
            time.sleep(5)
            pideURL(url,contador_curl+1)
        else:
            print ('Error: ',url)
            print ('Error log: ',e)
项目:taller-de-scraping    作者:mekler    | 项目源码 | 文件源码
def pideURL(self,url,compressed = False, cookie=False, contador_curl = 0):
        time.sleep(3)
        Scrape.contador+=1
        print ("\n"+url)
        print ("\n\t.l."+str(Scrape.contador))
        c = pycurl.Curl()
        if cookie:
            c.setopt(pycurl.COOKIEJAR, 'cookie.txt')
            c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.URL, url)       
        c.setopt(pycurl.CONNECTTIMEOUT, 15) 
        c.setopt(pycurl.TIMEOUT, 25) 
        c.setopt(pycurl.HTTPHEADER, self.headers)

        c.setopt( pycurl.PROXY, '127.0.0.1' )
        c.setopt( pycurl.PROXYPORT, 9050 )
        c.setopt( pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME )

        b = BytesIO()
        BytesIO
        c.setopt(pycurl.WRITEFUNCTION, b.write)
        self.url = url
        try:
            c.perform()
            self.response_string = b.getvalue()
            #print (self.response_string)
            b.close()
        except Exception as e:
            #self.log ('Razon:',e)

            self.response_string = None
            if contador_curl<=10:
                time.sleep(5)
                self.pideURL(url,contador_curl+1)
            else:
                print ('Error: ',url)
                print ('Error log: ',e)
项目:taller-de-scraping    作者:mekler    | 项目源码 | 文件源码
def pidePOST(self,url,data,compressed = False,cookie=False, contador_curl = 0, debug=False):
        time.sleep(3)
        Scrape.contador+=1
        print ("\n"+url)
        print ("\n\t.l."+str(Scrape.contador))
        c = pycurl.Curl()
        if cookie:
            c.setopt(pycurl.COOKIEJAR, 'cookie.txt')
            c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.URL, url)
        c.setopt(pycurl.CONNECTTIMEOUT, 15)
        c.setopt(pycurl.TIMEOUT, 25)
        c.setopt(pycurl.HTTPHEADER, self.headers)

        if compressed:
            c.setopt(pycurl.ENCODING, 'gzip,deflate')

        c.setopt(c.POSTFIELDS, data)

        if debug:
            c.setopt(c.VERBOSE, True)

        c.setopt( pycurl.PROXY, '127.0.0.1' )
        c.setopt( pycurl.PROXYPORT, 9050 )
        c.setopt( pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME )

        b = BytesIO()
        BytesIO
        c.setopt(pycurl.WRITEFUNCTION, b.write)
        self.url = url
        try:
            c.perform()
            self.response_string = b.getvalue()
            #print (self.response_string)
            b.close()
        except Exception as e:
            #print ('Razon:',e)
            self.response_string = None
项目:alfred-10000ft-scripts    作者:jceelen    | 项目源码 | 文件源码
def __init__(self, base_url="", fakeheaders=[]):
        self.handle = pycurl.Curl()
        # These members might be set.
        self.set_url(base_url)
        self.verbosity = 0
        self.fakeheaders = fakeheaders
        # Nothing past here should be modified by the caller.
        self.payload = None
        self.payload_io = BytesIO()
        self.hrd = ""
        # Verify that we've got the right site; harmless on a non-SSL connect.
        self.set_option(pycurl.SSL_VERIFYHOST, 2)
        # Follow redirects in case it wants to take us to a CGI...
        self.set_option(pycurl.FOLLOWLOCATION, 1)
        self.set_option(pycurl.MAXREDIRS, 5)
        self.set_option(pycurl.NOSIGNAL, 1)
        # Setting this option with even a nonexistent file makes libcurl
        # handle cookie capture and playback automatically.
        self.set_option(pycurl.COOKIEFILE, "/dev/null")
        # Set timeouts to avoid hanging too long
        self.set_timeout(30)
        # Use password identification from .netrc automatically
        self.set_option(pycurl.NETRC, 1)
        self.set_option(pycurl.WRITEFUNCTION, self.payload_io.write)
        def header_callback(x):
            self.hdr += x.decode('ascii')
        self.set_option(pycurl.HEADERFUNCTION, header_callback)
项目:baidu-dropbox-sniffer    作者:cls1991    | 项目源码 | 文件源码
def list_dir(dir_name):
    """
    ????????
    :param dir_name: ??
    :return:
    """
    result = list()
    curl = pycurl.Curl()
    curl.setopt(pycurl.USERAGENT, const.USER_AGENT)
    curl.setopt(pycurl.REFERER, const.PAN_REFER_URL)

    buffers = StringIO()
    request_dict = {
        'channel': 'chunlei',
        'clienttype': 0,
        'showempty': 0,
        'web': 1,
        'order': 'time',
        'desc': 1,
        'page': 1,
        'num': 100,
        'dir': dir_name,
        'bdstoken': 'e0e895bb3ef7b0cb70899ee66b74e809'
    }
    target_url = const.PAN_API_URL + 'list?' + urllib.urlencode(request_dict)
    curl.setopt(pycurl.URL, target_url)
    curl.setopt(pycurl.WRITEDATA, buffers)
    curl.setopt(pycurl.COOKIEFILE, "cookie.txt")
    curl.perform()
    body = buffers.getvalue()
    print body
    buffers.close()
    curl.close()
    data = json.loads(body)
    if data['errno'] == 0:
        for a_list in data['list']:
            dlink = get_download_link(a_list['fs_id'])
            if dlink:
                dlink = dlink.replace('\\', '')
                result.append(dlink)

    return result