Python pycurl 模块,PROXYTYPE 实例源码

我们从Python开源项目中,提取了以下11个代码示例,用于说明如何使用pycurl.PROXYTYPE

项目:wfuzz    作者:gwen001    | 项目源码 | 文件源码
def _set_proxy(self, c, freq):
    ip, port, ptype = self._proxies.next()

    freq.wf_proxy = (("%s:%s" % (ip, port)), ptype)

    c.setopt(pycurl.PROXY, "%s:%s" % (ip, port))
    if ptype == "SOCKS5":
        c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
    elif ptype == "SOCKS4":
        c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
    elif ptype == "HTML":
        pass
    else:
        raise FuzzException(FuzzException.FATAL, "Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.")

    return c
项目:defcon-workshop    作者:devsecops    | 项目源码 | 文件源码
def _set_proxy(self, c, freq):
    ip, port, ptype = self._proxies.next()

    freq.wf_proxy = (("%s:%s" % (ip, port)), ptype)

    c.setopt(pycurl.PROXY, "%s:%s" % (ip, port))
    if ptype == "SOCKS5":
        c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
    elif ptype == "SOCKS4":
        c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
    elif ptype == "HTML":
        pass
    else:
        raise FuzzException(FuzzException.FATAL, "Bad proxy type specified, correct values are HTML, SOCKS4 or SOCKS5.")

    return c
项目:download-manager    作者:thispc    | 项目源码 | 文件源码
def setInterface(self, options):

        interface, proxy, ipv6 = options["interface"], options["proxies"], options["ipv6"]

        if interface and interface.lower() != "none":
            self.c.setopt(pycurl.INTERFACE, str(interface))

        if proxy:
            if proxy["type"] == "socks4":
                self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
            elif proxy["type"] == "socks5":
                self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
            else:
                self.c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_HTTP)

            self.c.setopt(pycurl.PROXY, str(proxy["address"]))
            self.c.setopt(pycurl.PROXYPORT, proxy["port"])

            if proxy["username"]:
                self.c.setopt(pycurl.PROXYUSERPWD, str("%s:%s" % (proxy["username"], proxy["password"])))

        if ipv6:
            self.c.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
        else:
            self.c.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)

        if "auth" in options:
            self.c.setopt(pycurl.USERPWD, str(options["auth"]))

        if "timeout" in options:
            self.c.setopt(pycurl.LOW_SPEED_TIME, options["timeout"])
项目:pyload-requests    作者:pyload    | 项目源码 | 文件源码
def set_interface(self, options):
        interface, proxy, ipv6 = options[
            'interface'], options['proxies'], options['ipv6']

        if interface and interface.lower() != "none":
            self.setopt(pycurl.INTERFACE, interface)

        if proxy:
            if proxy['type'] == "socks4":
                self.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
            elif proxy['type'] == "socks5":
                self.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
            else:
                self.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_HTTP)

            self.setopt(pycurl.PROXY, proxy['host'])
            self.setopt(pycurl.PROXYPORT, proxy['port'])

            if proxy['username']:
                userpwd = "{0}:{1}".format(
                    proxy['username'], proxy['password'])
                self.setopt(pycurl.PROXYUSERPWD, userpwd)

        if ipv6:
            self.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
        else:
            self.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)

        if "timeout" in options:
            self.setopt(pycurl.LOW_SPEED_TIME, options['timeout'])

        if "auth" in options:
            self.setopt(pycurl.USERPWD, self.options['auth'])
项目:smart-realestate    作者:stevensshi    | 项目源码 | 文件源码
def query(url):

  output = io.BytesIO()

  query = pycurl.Curl()
  query.setopt(pycurl.URL, url)
  query.setopt(pycurl.HTTPHEADER, getHeaders())
  query.setopt(pycurl.PROXY, 'localhost')
  query.setopt(pycurl.PROXYPORT, SOCKS_PORT)
  query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
  query.setopt(pycurl.WRITEFUNCTION, output.write)

  through = False
  while not through:
    try:
      query.perform()
      http_code = query.getinfo(pycurl.HTTP_CODE)
      if http_code == 200:
        through = True
      else:
        # renew tor to retry
        print 'error httpcode:' +str(http_code)
        renew_tor()
        # time.sleep(3)
    except pycurl.error as exc:
      print "pycurl error in tor.py %s" % exc
      # return "Unable to reach %s (%s)" % (url, exc)



  return output.getvalue()
项目:trough    作者:internetarchive    | 项目源码 | 文件源码
def _do_write(self, query):
        # send provision query to server if not self._write_url.
        # after send provision query, set self._write_url.
        # send query to server, return JSON
        rethinker = doublethink.Rethinker(db="trough_configuration", servers=self.rethinkdb)
        services = doublethink.ServiceRegistry(rethinker)
        master_node = services.unique_service('trough-sync-master')
        logging.info('master_node=%r', master_node)
        if not master_node:
            raise Exception('no healthy trough-sync-master in service registry')
        if not self._write_url:
            buffer = BytesIO()
            c = pycurl.Curl()
            c.setopt(c.URL, master_node.get('url'))
            c.setopt(c.POSTFIELDS, self.database)
            if self.proxy:
                c.setopt(pycurl.PROXY, self.proxy)
                c.setopt(pycurl.PROXYPORT, int(self.proxy_port))
                c.setopt(pycurl.PROXYTYPE, self.proxy_type)
            c.setopt(c.WRITEDATA, buffer)
            c.perform()
            c.close()
            self._write_url = buffer.getvalue()
            logging.info('self._write_url=%r', self._write_url)
        buffer = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, self._write_url)
        c.setopt(c.POSTFIELDS, query)
        if self.proxy:
            c.setopt(pycurl.PROXY, self.proxy)
            c.setopt(pycurl.PROXYPORT, int(self.proxy_port))
            c.setopt(pycurl.PROXYTYPE, self.proxy_type)
        c.setopt(c.WRITEDATA, buffer)
        c.perform()
        c.close()
        response = buffer.getvalue()
        if response.strip() != b'OK':
            raise Exception('Trough Query Failed: Database: %r Response: %r Query: %.200r' % (self.database, response, query))
        self._last_results = None
项目:taller-de-scraping    作者:mekler    | 项目源码 | 文件源码
def pideURL(self,url,compressed = False, cookie=False, contador_curl = 0):
        time.sleep(3)
        Scrape.contador+=1
        print ("\n"+url)
        print ("\n\t.l."+str(Scrape.contador))
        c = pycurl.Curl()
        if cookie:
            c.setopt(pycurl.COOKIEJAR, 'cookie.txt')
            c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.URL, url)       
        c.setopt(pycurl.CONNECTTIMEOUT, 15) 
        c.setopt(pycurl.TIMEOUT, 25) 
        c.setopt(pycurl.HTTPHEADER, self.headers)

        c.setopt( pycurl.PROXY, '127.0.0.1' )
        c.setopt( pycurl.PROXYPORT, 9050 )
        c.setopt( pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME )

        b = BytesIO()
        BytesIO
        c.setopt(pycurl.WRITEFUNCTION, b.write)
        self.url = url
        try:
            c.perform()
            self.response_string = b.getvalue()
            #print (self.response_string)
            b.close()
        except Exception as e:
            #self.log ('Razon:',e)

            self.response_string = None
            if contador_curl<=10:
                time.sleep(5)
                self.pideURL(url,contador_curl+1)
            else:
                print ('Error: ',url)
                print ('Error log: ',e)
项目:taller-de-scraping    作者:mekler    | 项目源码 | 文件源码
def pidePOST(self,url,data,compressed = False,cookie=False, contador_curl = 0, debug=False):
        time.sleep(3)
        Scrape.contador+=1
        print ("\n"+url)
        print ("\n\t.l."+str(Scrape.contador))
        c = pycurl.Curl()
        if cookie:
            c.setopt(pycurl.COOKIEJAR, 'cookie.txt')
            c.setopt(pycurl.COOKIEFILE, 'cookie.txt')
        c.setopt(pycurl.URL, url)
        c.setopt(pycurl.CONNECTTIMEOUT, 15)
        c.setopt(pycurl.TIMEOUT, 25)
        c.setopt(pycurl.HTTPHEADER, self.headers)

        if compressed:
            c.setopt(pycurl.ENCODING, 'gzip,deflate')

        c.setopt(c.POSTFIELDS, data)

        if debug:
            c.setopt(c.VERBOSE, True)

        c.setopt( pycurl.PROXY, '127.0.0.1' )
        c.setopt( pycurl.PROXYPORT, 9050 )
        c.setopt( pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME )

        b = BytesIO()
        BytesIO
        c.setopt(pycurl.WRITEFUNCTION, b.write)
        self.url = url
        try:
            c.perform()
            self.response_string = b.getvalue()
            #print (self.response_string)
            b.close()
        except Exception as e:
            #print ('Razon:',e)
            self.response_string = None
项目:pathspider    作者:mami-project    | 项目源码 | 文件源码
def connect_tor_http(controller,
                     circuit_path,
                     job,
                     conn_timeout,
                     curlopts=None,
                     curlinfos=None):
    """
    This helper function will perform an HTTP request over Tor. It will not
    perform any special action in the event that this is the experimental flow,
    but can be customised on a per-call basis through the curlopts argument.
    """

    if curlopts is None:
        curlopts = {}

    curlopts[pycurl.PROXY] = "localhost"
    curlopts[pycurl.PROXYPORT] = 9050
    curlopts[pycurl.PROXYTYPE] = pycurl.PROXYTYPE_SOCKS5_HOSTNAME

    attach_error = []

    try:
        if circuit_path is not None:
            circuit_path = circuit_path.split(",")
        circuit_id = controller.new_circuit(circuit_path, await_build=True)
    except stem.CircuitExtensionFailed:
        return {"spdr_state": CONN_DISCARD}

    def attach_stream(stream):
        try:
            if stream.status == 'NEW':
                if (stream.target_address == job['dip'] and
                        stream.target_port == job['dp']):
                    controller.attach_stream(stream.id, circuit_id)
        except stem.OperationFailed:
            attach_error.append(None)

    controller.add_event_listener(attach_stream, stem.control.EventType.STREAM) # pylint: disable=no-member
    result = connect_http(None, job, conn_timeout, curlopts, curlinfos)
    controller.remove_event_listener(attach_stream)

    if len(attach_error) > 0:
        return {"spdr_state": CONN_DISCARD}

    return result
项目:defcon-workshop    作者:devsecops    | 项目源码 | 文件源码
def to_pycurl_object(c, req):

        c.setopt(pycurl.MAXREDIRS, 5)

        c.setopt(pycurl.WRITEFUNCTION, req.body_callback)
        c.setopt(pycurl.HEADERFUNCTION, req.header_callback)

        c.setopt(pycurl.NOSIGNAL, 1)
        c.setopt(pycurl.SSL_VERIFYPEER, False)
        c.setopt(pycurl.SSL_VERIFYHOST, 0)

        c.setopt(pycurl.URL,req.completeUrl)

        if req.getConnTimeout():
        c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout())

        if req.getTotalTimeout():
        c.setopt(pycurl.TIMEOUT, req.getTotalTimeout())


        authMethod, userpass = req.getAuth()
        if authMethod or userpass:
        if authMethod == "basic":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
        elif authMethod == "ntlm":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
        elif authMethod == "digest":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
        c.setopt(pycurl.USERPWD, userpass)

        c.setopt(pycurl.HTTPHEADER, req.getHeaders())
        if req.method == "POST":
        c.setopt(pycurl.POSTFIELDS, req.postdata)

        if req.method != "GET" and req.method != "POST":
        c.setopt(pycurl.CUSTOMREQUEST, req.method)
        if req.method == "HEAD":
        c.setopt(pycurl.NOBODY, True)

        if req.followLocation:
        c.setopt(pycurl.FOLLOWLOCATION, 1)

        proxy = req.getProxy()
        if proxy != None:
            c.setopt(pycurl.PROXY, proxy)
            if req.proxytype=="SOCKS5":
                c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS5)
            elif req.proxytype=="SOCKS4":
                c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS4)
            req.delHeader("Proxy-Connection")

        return c
项目:wfuzz    作者:gwen001    | 项目源码 | 文件源码
def to_pycurl_object(c, req):

        c.setopt(pycurl.MAXREDIRS, 5)

        c.setopt(pycurl.WRITEFUNCTION, req.body_callback)
        c.setopt(pycurl.HEADERFUNCTION, req.header_callback)

        c.setopt(pycurl.NOSIGNAL, 1)
        c.setopt(pycurl.SSL_VERIFYPEER, False)
        c.setopt(pycurl.SSL_VERIFYHOST, 0)

        c.setopt(pycurl.URL,req.completeUrl)

        if req.getConnTimeout():
        c.setopt(pycurl.CONNECTTIMEOUT, req.getConnTimeout())

        if req.getTotalTimeout():
        c.setopt(pycurl.TIMEOUT, req.getTotalTimeout())


        authMethod, userpass = req.getAuth()
        if authMethod or userpass:
        if authMethod == "basic":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
        elif authMethod == "ntlm":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
        elif authMethod == "digest":
            c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
        c.setopt(pycurl.USERPWD, userpass)

        c.setopt(pycurl.HTTPHEADER, req.getHeaders())
        if req.method == "POST":
        c.setopt(pycurl.POSTFIELDS, req.postdata)

        if req.method != "GET" and req.method != "POST":
        c.setopt(pycurl.CUSTOMREQUEST, req.method)
        if req.method == "HEAD":
        c.setopt(pycurl.NOBODY, True)

        if req.followLocation:
        c.setopt(pycurl.FOLLOWLOCATION, 1)

        proxy = req.getProxy()
        if proxy != None:
            c.setopt(pycurl.PROXY, proxy)
            if req.proxytype=="SOCKS5":
                c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS5)
            elif req.proxytype=="SOCKS4":
                c.setopt(pycurl.PROXYTYPE,pycurl.PROXYTYPE_SOCKS4)
            req.delHeader("Proxy-Connection")

        return c