Python mechanize 模块,Request() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用mechanize.Request()

项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_open_novisit(self):
        def test_state(br):
            self.assert_(br.request is None)
            self.assert_(br.response() is None)
            self.assertRaises(mechanize.BrowserStateError, br.back)
        test_state(self.browser)
        uri = urljoin(self.uri, "test_fixtures")
        # note this involves a redirect, which should itself be non-visiting
        r = self.browser.open_novisit(uri)
        test_state(self.browser)
        self.assert_("GeneralFAQ.html" in r.read(2048))

        # Request argument instead of URL
        r = self.browser.open_novisit(mechanize.Request(uri))
        test_state(self.browser)
        self.assert_("GeneralFAQ.html" in r.read(2048))
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def parse_file_ex(file,
                  base_uri,
                  select_default=False,
                  request_class=mechanize.Request,
                  encoding=None,
                  backwards_compat=False,
                  add_global=True):
    raw = file.read()
    root = content_parser(raw, transport_encoding=encoding)
    forms, global_form = _form.parse_forms(
        root,
        base_uri,
        select_default=select_default,
        request_class=request_class)
    if not add_global:
        return list(forms)
    return [global_form] + list(forms)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_request_host_lc(self):
        from mechanize._clientcookie import request_host_lc
        # this request is illegal (RFC2616, 14.2.3)
        req = Request("http://1.1.1.1/", headers={"Host": "www.acme.com:80"})
        # libwww-perl wants this response, but that seems wrong (RFC 2616,
        # section 5.2, point 1., and RFC 2965 section 1, paragraph 3)
        #assert request_host_lc(req) == "www.acme.com"
        assert request_host_lc(req) == "1.1.1.1"
        req = Request(
            "http://www.acme.com/", headers={"Host": "irrelevant.com"})
        assert request_host_lc(req) == "www.acme.com"
        # not actually sure this one is valid Request object, so maybe should
        # remove test for no host in url in request_host_lc function?
        req = Request("/resource.html", headers={"Host": "www.acme.com"})
        assert request_host_lc(req) == "www.acme.com"
        # port shouldn't be in request-host
        req = Request(
            "http://www.acme.com:2345/resource.html",
            headers={"Host": "www.acme.com:5432"})
        assert request_host_lc(req) == "www.acme.com"
        # the _lc function lower-cases the result
        req = Request("http://EXAMPLE.com")
        assert request_host_lc(req) == "example.com"
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_netscape_misc(self):
        # Some additional Netscape cookies tests.
        from mechanize import CookieJar, Request

        c = CookieJar()
        headers = []
        req = Request("http://foo.bar.acme.com/foo")

        # Netscape allows a host part that contains dots
        headers.append("Set-Cookie: Customer=WILE_E_COYOTE; domain=.acme.com")
        res = FakeResponse(headers, "http://www.acme.com/foo")
        c.extract_cookies(res, req)

        # and that the domain is the same as the host without adding a leading
        # dot to the domain.  Should not quote even if strange chars are used
        # in the cookie value.
        headers.append("Set-Cookie: PART_NUMBER=3,4; domain=foo.bar.acme.com")
        res = FakeResponse(headers, "http://www.acme.com/foo")
        c.extract_cookies(res, req)

        req = Request("http://foo.bar.acme.com/foo")
        c.add_cookie_header(req)
        assert (req.get_header("Cookie").find("PART_NUMBER=3,4") != -1 and
                req.get_header("Cookie").find("Customer=WILE_E_COYOTE") != -1)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def handle(self, fn_name, action, *args, **kwds):
        self.parent.calls.append((self, fn_name, args, kwds))
        if action is None:
            return None
        elif action == "return self":
            return self
        elif action == "return response":
            res = MockResponse(200, "OK", {}, "")
            return res
        elif action == "return request":
            return Request("http://blah/")
        elif action.startswith("error"):
            code = action[action.rfind(" ") + 1:]
            try:
                code = int(code)
            except ValueError:
                pass
            res = MockResponse(200, "OK", {}, "")
            return self.parent.error("http", args[0], res, code, "", {})
        elif action == "raise":
            raise mechanize.URLError("blah")
        assert False
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_raise(self):
        # raising URLError stops processing of request
        o = OpenerDirector()
        meth_spec = [
            [("http_open", "raise")],
            [("http_open", "return self")],
        ]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        req = Request("http://example.com/")
        self.assertRaises(mechanize.URLError, o.open, req)
        self.assertEqual(o.calls, [(handlers[0], "http_open", (req, ), {})])

# def test_error(self):
# XXX this doesn't actually seem to be used in standard library,
# but should really be tested anyway...
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_http_error(self):
        # XXX http_error_default
        # http errors are a special case
        o = OpenerDirector()
        meth_spec = [
            [("http_open", "error 302")],
            [("http_error_400", "raise"), "http_open"],
            [("http_error_302", "return response"), "http_error_303",
             "http_error"],
            [("http_error_302")],
        ]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        req = Request("http://example.com/")
        r = o.open(req)
        assert len(o.calls) == 2
        calls = [(handlers[0], "http_open", (req, )), (
            handlers[2], "http_error_302", (req, AlwaysEqual(), 302, "", {}))]
        for expected, got in zip(calls, o.calls):
            handler, method_name, args = expected
            self.assertEqual((handler, method_name), got[:2])
            self.assertEqual(args, got[2])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_errors(self):
        h = HTTPErrorProcessor()
        o = h.parent = MockOpener()

        req = Request("http://example.com")
        # all 2xx are passed through
        r = mechanize._response.test_response()
        newr = h.http_response(req, r)
        self.assertTrue(r is newr)
        self.assertTrue(not hasattr(o, "proto"))  # o.error not called
        r = mechanize._response.test_response(code=202, msg="Accepted")
        newr = h.http_response(req, r)
        self.assertTrue(r is newr)
        self.assertTrue(not hasattr(o, "proto"))  # o.error not called
        r = mechanize._response.test_response(code=206, msg="Partial content")
        newr = h.http_response(req, r)
        self.assertTrue(r is newr)
        self.assertTrue(not hasattr(o, "proto"))  # o.error not called
        # anything else calls o.error (and MockOpener returns None, here)
        r = mechanize._response.test_response(code=502, msg="Bad gateway")
        self.assertTrue(h.http_response(req, r) is None)
        self.assertEqual(o.proto, "http")  # o.error called
        self.assertEqual(o.args, (req, r, 502, "Bad gateway", AlwaysEqual()))
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_referer(self):
        h = HTTPRefererProcessor()
        o = h.parent = MockOpener()

        # normal case
        url = "http://example.com/"
        req = Request(url)
        r = MockResponse(200, "OK", {}, "", url)
        newr = h.http_response(req, r)
        self.assert_(r is newr)
        self.assert_(h.referer == url)
        newreq = h.http_request(req)
        self.assert_(req is newreq)
        self.assert_(req.unredirected_hdrs["Referer"] == url)
        # don't clobber existing Referer
        ref = "http://set.by.user.com/"
        req.add_unredirected_header("Referer", ref)
        newreq = h.http_request(req)
        self.assert_(req is newreq)
        self.assert_(req.unredirected_hdrs["Referer"] == ref)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_http_equiv(self):
        h = HTTPEquivProcessor()
        o = h.parent = MockOpener()

        data = ('<html><head>'
                '<meta http-equiv="Refresh" content="spam&amp;eggs">'
                '</head></html>')
        headers = [
            ("Foo", "Bar"),
            ("Content-type", "text/html"),
            ("Refresh", "blah"),
        ]
        url = "http://example.com/"
        req = Request(url)
        r = mechanize._response.make_response(data, headers, url, 200, "OK")
        newr = h.http_response(req, r)

        new_headers = newr.info()
        self.assertEqual(new_headers["Foo"], "Bar")
        self.assertEqual(new_headers["Refresh"], "spam&eggs")
        self.assertEqual(
            new_headers.getheaders("Refresh"), ["blah", "spam&eggs"])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_redirect_bad_uri(self):
        # bad URIs should be cleaned up before redirection
        from mechanize._response import test_html_response
        from_url = "http://example.com/a.html"
        bad_to_url = "http://example.com/b. |html"
        good_to_url = "http://example.com/b.%20%7Chtml"

        h = HTTPRedirectHandler()
        o = h.parent = MockOpener()

        req = Request(from_url)
        h.http_error_302(
            req,
            test_html_response(),
            302,
            "Blah",
            http_message({
                "location": bad_to_url
            }), )
        self.assertEqual(o.req.get_full_url(), good_to_url)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_refresh_bad_uri(self):
        # bad URIs should be cleaned up before redirection
        from mechanize._response import test_html_response
        from_url = "http://example.com/a.html"
        bad_to_url = "http://example.com/b. |html"
        good_to_url = "http://example.com/b.%20%7Chtml"

        h = HTTPRefreshProcessor(max_time=None, honor_time=False)
        o = h.parent = MockOpener()

        req = Request("http://example.com/")
        r = test_html_response(
            headers=[("refresh", '0; url="%s"' % bad_to_url)])
        newr = h.http_response(req, r)
        headers = o.args[-1]
        self.assertEqual(headers["Location"], good_to_url)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_proxy(self):
        o = OpenerDirector()
        ph = mechanize.ProxyHandler(dict(http="proxy.example.com:3128"))
        o.add_handler(ph)
        meth_spec = [[("http_open", "return response")]]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        o._maybe_reindex_handlers()

        req = Request("http://acme.example.com/")
        self.assertEqual(req.get_host(), "acme.example.com")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com:3128")

        self.assertEqual([(handlers[0], "http_open")],
                         [tup[0:2] for tup in o.calls])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_proxy_https_proxy_authorization(self):
        o = OpenerDirector()
        ph = mechanize.ProxyHandler(dict(https='proxy.example.com:3128'))
        o.add_handler(ph)
        https_handler = MockHTTPSHandler()
        o.add_handler(https_handler)
        req = Request("https://www.example.com/")
        req.add_header("Proxy-Authorization", "FooBar")
        req.add_header("User-Agent", "Grail")
        self.assertEqual(req.get_host(), "www.example.com")
        self.assertIsNone(req._tunnel_host)
        r = o.open(req)
        # Verify Proxy-Authorization gets tunneled to request.
        # httpsconn req_headers do not have the Proxy-Authorization header but
        # the req will have.
        self.assertFalse(("Proxy-Authorization",
                          "FooBar") in https_handler.httpconn.req_headers)
        self.assertTrue(
            ("User-Agent", "Grail") in https_handler.httpconn.req_headers)
        self.assertIsNotNone(req._tunnel_host)
        self.assertEqual(req.get_host(), "proxy.example.com:3128")
        self.assertEqual(req.get_header("Proxy-authorization"), "FooBar")
项目:qxf2-page-object-model    作者:qxf2    | 项目源码 | 文件源码
def get(self, url, headers={}):
        "Mechanize Get request"
        browser = self.get_browser()
        request_headers = []
        response = {}
        error = {}
        for key, value in headers.iteritems():
            request_headers.append((key, value))
            browser.addheaders = request_headers
        try:
            response = browser.open(mechanize.Request(url))
            response = json.loads(response.read())
        except (mechanize.HTTPError, mechanize.URLError) as e:
            error = e
            if isinstance(e, mechanize.HTTPError):
                error_message = e.read()
                print("\n******\nGET Error: %s %s" %
                      (url, error_message))
            else:
                print(e.reason.args)
            # bubble error back up after printing relevant details
                raise e

        return {'response': response, 'error': error}
项目:qxf2-page-object-model    作者:qxf2    | 项目源码 | 文件源码
def post(self, url, data=None, headers={}):
        "Mechanize Post request"
        browser = self.get_browser()
        response = {}
        error = {}
        try:
            response = browser.open(mechanize.Request(
                url=url, data=data, headers=headers))
        except (mechanize.HTTPError, mechanize.URLError) as e:
            error = e
            if isinstance(e, mechanize.HTTPError):
                error_message = e.read()
                print("\n******\nPOST Error: %s %s %s" %
                      (url, error_message, str(data)))
            else:
                print(e.reason.args)
            # bubble error back up after printing relevant details
            raise e

        return {'response': response, 'error': error}
项目:pelisalacarta-ce    作者:pelisalacarta-ce    | 项目源码 | 文件源码
def add_cookie_header(self, request):
        """Add correct Cookie: header to request (mechanize.Request object).

        The Cookie2 header is also added unless policy.hide_cookie2 is true.

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_host, is_unverifiable, get_type,
        has_header, get_header, header_items and add_unredirected_header, as
        documented by urllib2.
        """
        debug("add_cookie_header")
        cookies = self.cookies_for_request(request)

        attrs = self._cookie_attrs(cookies)
        if attrs:
            if not request.has_header("Cookie"):
                request.add_unredirected_header("Cookie", "; ".join(attrs))

        # if necessary, advertise that we know RFC 2965
        if self._policy.rfc2965 and not self._policy.hide_cookie2:
            for cookie in cookies:
                if cookie.version != 1 and not request.has_header("Cookie2"):
                    request.add_unredirected_header("Cookie2", '$Version="1"')
                    break

        self.clear_expired_cookies()
项目:pelisalacarta-ce    作者:pelisalacarta-ce    | 项目源码 | 文件源码
def extract_cookies(self, response, request):
        """Extract cookies from response, where allowable given the request.

        Look for allowable Set-Cookie: and Set-Cookie2: headers in the response
        object passed as argument.  Any of these headers that are found are
        used to update the state of the object (subject to the policy.set_ok
        method's approval).

        The response object (usually be the result of a call to
        mechanize.urlopen, or similar) should support an info method, which
        returns a mimetools.Message object (in fact, the 'mimetools.Message
        object' may be any object that provides a getheaders method).

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_type, get_host, and is_unverifiable, as
        documented by mechanize, and the port attribute (the port number).  The
        request is used to set default values for cookie-attributes as well as
        for checking that the cookie is OK to be set.

        """
        debug("extract_cookies: %s", response.info())
        self._policy._now = self._now = int(time.time())

        for cookie in self._make_cookies(response, request):
            if cookie.expires is not None and cookie.expires <= self._now:
                # Expiry date in past is request to delete cookie.  This can't be
                # in DefaultCookiePolicy, because can't delete cookies there.
                try:
                    self.clear(cookie.domain, cookie.path, cookie.name)
                except KeyError:
                    pass
                debug("Expiring cookie, domain='%s', path='%s', name='%s'",
                      cookie.domain, cookie.path, cookie.name)
            elif self._policy.set_ok(cookie, request):
                debug(" setting cookie: %s", cookie)
                self.set_cookie(cookie)
项目:plugin.video.streamondemand-pureita    作者:orione7    | 项目源码 | 文件源码
def add_cookie_header(self, request):
        """Add correct Cookie: header to request (mechanize.Request object).

        The Cookie2 header is also added unless policy.hide_cookie2 is true.

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_host, is_unverifiable, get_type,
        has_header, get_header, header_items and add_unredirected_header, as
        documented by urllib2.
        """
        debug("add_cookie_header")
        cookies = self.cookies_for_request(request)

        attrs = self._cookie_attrs(cookies)
        if attrs:
            if not request.has_header("Cookie"):
                request.add_unredirected_header("Cookie", "; ".join(attrs))

        # if necessary, advertise that we know RFC 2965
        if self._policy.rfc2965 and not self._policy.hide_cookie2:
            for cookie in cookies:
                if cookie.version != 1 and not request.has_header("Cookie2"):
                    request.add_unredirected_header("Cookie2", '$Version="1"')
                    break

        self.clear_expired_cookies()
项目:plugin.video.streamondemand-pureita    作者:orione7    | 项目源码 | 文件源码
def extract_cookies(self, response, request):
        """Extract cookies from response, where allowable given the request.

        Look for allowable Set-Cookie: and Set-Cookie2: headers in the response
        object passed as argument.  Any of these headers that are found are
        used to update the state of the object (subject to the policy.set_ok
        method's approval).

        The response object (usually be the result of a call to
        mechanize.urlopen, or similar) should support an info method, which
        returns a mimetools.Message object (in fact, the 'mimetools.Message
        object' may be any object that provides a getheaders method).

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_type, get_host, and is_unverifiable, as
        documented by mechanize, and the port attribute (the port number).  The
        request is used to set default values for cookie-attributes as well as
        for checking that the cookie is OK to be set.

        """
        debug("extract_cookies: %s", response.info())
        self._policy._now = self._now = int(time.time())

        for cookie in self._make_cookies(response, request):
            if cookie.expires is not None and cookie.expires <= self._now:
                # Expiry date in past is request to delete cookie.  This can't be
                # in DefaultCookiePolicy, because can't delete cookies there.
                try:
                    self.clear(cookie.domain, cookie.path, cookie.name)
                except KeyError:
                    pass
                debug("Expiring cookie, domain='%s', path='%s', name='%s'",
                      cookie.domain, cookie.path, cookie.name)
            elif self._policy.set_ok(cookie, request):
                debug(" setting cookie: %s", cookie)
                self.set_cookie(cookie)
项目:kodi-tk_del    作者:hubsif    | 项目源码 | 文件源码
def add_cookie_header(self, request):
        """Add correct Cookie: header to request (mechanize.Request object).

        The Cookie2 header is also added unless policy.hide_cookie2 is true.

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_host, is_unverifiable, get_type,
        has_header, get_header, header_items and add_unredirected_header, as
        documented by urllib2.
        """
        debug("add_cookie_header")
        cookies = self.cookies_for_request(request)

        attrs = self._cookie_attrs(cookies)
        if attrs:
            if not request.has_header("Cookie"):
                request.add_unredirected_header("Cookie", "; ".join(attrs))

        # if necessary, advertise that we know RFC 2965
        if self._policy.rfc2965 and not self._policy.hide_cookie2:
            for cookie in cookies:
                if cookie.version != 1 and not request.has_header("Cookie2"):
                    request.add_unredirected_header("Cookie2", '$Version="1"')
                    break

        self.clear_expired_cookies()
项目:kodi-tk_del    作者:hubsif    | 项目源码 | 文件源码
def extract_cookies(self, response, request):
        """Extract cookies from response, where allowable given the request.

        Look for allowable Set-Cookie: and Set-Cookie2: headers in the response
        object passed as argument.  Any of these headers that are found are
        used to update the state of the object (subject to the policy.set_ok
        method's approval).

        The response object (usually be the result of a call to
        mechanize.urlopen, or similar) should support an info method, which
        returns a mimetools.Message object (in fact, the 'mimetools.Message
        object' may be any object that provides a getheaders method).

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_type, get_host, and is_unverifiable, as
        documented by mechanize, and the port attribute (the port number).  The
        request is used to set default values for cookie-attributes as well as
        for checking that the cookie is OK to be set.

        """
        debug("extract_cookies: %s", response.info())
        self._policy._now = self._now = int(time.time())

        for cookie in self._make_cookies(response, request):
            if cookie.expires is not None and cookie.expires <= self._now:
                # Expiry date in past is request to delete cookie.  This can't be
                # in DefaultCookiePolicy, because can't delete cookies there.
                try:
                    self.clear(cookie.domain, cookie.path, cookie.name)
                except KeyError:
                    pass
                debug("Expiring cookie, domain='%s', path='%s', name='%s'",
                      cookie.domain, cookie.path, cookie.name)
            elif self._policy.set_ok(cookie, request):
                debug(" setting cookie: %s", cookie)
                self.set_cookie(cookie)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_redirect_with_timeout(self):
        timeout_log = self._monkey_patch_socket()
        timeout = 10.
        # 301 redirect due to missing final '/'
        req = mechanize.Request(urljoin(self.test_uri, "test_fixtures"),
                                timeout=timeout)
        r = self.browser.open(req)
        self.assert_("GeneralFAQ.html" in r.read(2048))
        timeout_log.verify(timeout)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_retrieve(self):
        # not passing an explicit filename downloads to a temporary file
        # using a Request object instead of a URL works
        url = urljoin(self.uri, "/mechanize/")
        opener = self.build_opener()
        verif = CallbackVerifier(self)
        request = mechanize.Request(url)
        filename, headers = opener.retrieve(request, reporthook=verif.callback)
        self.assertEquals(request.visit, False)
        self._check_retrieve(url, filename, headers)
        opener.close()
        # closing the opener removed the temporary file
        self.failIf(os.path.isfile(filename))
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_domain_return_ok(self):
        # test optimization: .domain_return_ok() should filter out most
        # domains in the CookieJar before we try to access them (because that
        # may require disk access -- in particular, with MSIECookieJar)
        # This is only a rough check for performance reasons, so it's not too
        # critical as long as it's sufficiently liberal.
        import mechanize
        pol = mechanize.DefaultCookiePolicy()
        for url, domain, ok in [
            ("http://foo.bar.com/", "blah.com", False),
            ("http://foo.bar.com/", "rhubarb.blah.com", False),
            ("http://foo.bar.com/", "rhubarb.foo.bar.com", False),
            ("http://foo.bar.com/", ".foo.bar.com", True),
            ("http://foo.bar.com/", "foo.bar.com", True),
            ("http://foo.bar.com/", ".bar.com", True),
            ("http://foo.bar.com/", "com", True),
            ("http://foo.com/", "rhubarb.foo.com", False),
            ("http://foo.com/", ".foo.com", True),
            ("http://foo.com/", "foo.com", True),
            ("http://foo.com/", "com", True),
            ("http://foo/", "rhubarb.foo", False),
            ("http://foo/", ".foo", True),
            ("http://foo/", "foo", True),
            ("http://foo/", "foo.local", True),
            ("http://foo/", ".local", True),
        ]:
            request = mechanize.Request(url)
            r = pol.domain_return_ok(domain, request)
            if ok:
                self.assert_(r)
            else:
                self.assert_(not r)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_request_path(self):
        from mechanize._clientcookie import request_path
        # with parameters
        req = Request("http://www.example.com/rheum/rhaponticum;"
                      "foo=bar;sing=song?apples=pears&spam=eggs#ni")
        self.assertEquals(
            request_path(req), "/rheum/rhaponticum;foo=bar;sing=song")
        # without parameters
        req = Request("http://www.example.com/rheum/rhaponticum?"
                      "apples=pears&spam=eggs#ni")
        self.assertEquals(request_path(req), "/rheum/rhaponticum")
        # missing final slash
        req = Request("http://www.example.com")
        self.assert_(request_path(req) == "/")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_request_port(self):
        from mechanize._clientcookie import request_port, DEFAULT_HTTP_PORT
        req = Request(
            "http://www.acme.com:1234/", headers={"Host": "www.acme.com:4321"})
        assert request_port(req) == "1234"
        req = Request(
            "http://www.acme.com/", headers={"Host": "www.acme.com:4321"})
        assert request_port(req) == DEFAULT_HTTP_PORT
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_effective_request_host(self):
        from mechanize import effective_request_host
        self.assertEquals(
            effective_request_host(Request("http://www.EXAMPLE.com/spam")),
            "www.example.com")
        self.assertEquals(
            effective_request_host(Request("http://bob/spam")), "bob.local")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_domain_block(self):
        from mechanize import CookieJar, DefaultCookiePolicy

        #import logging; logging.getLogger("mechanize").setLevel(logging.DEBUG)

        pol = DefaultCookiePolicy(rfc2965=True, blocked_domains=[".acme.com"])
        c = CookieJar(policy=pol)
        headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"]

        req = Request("http://www.acme.com/")
        res = FakeResponse(headers, "http://www.acme.com/")
        c.extract_cookies(res, req)
        assert len(c) == 0

        pol.set_blocked_domains(["acme.com"])
        c.extract_cookies(res, req)
        assert len(c) == 1

        c.clear()
        req = Request("http://www.roadrunner.net/")
        res = FakeResponse(headers, "http://www.roadrunner.net/")
        c.extract_cookies(res, req)
        assert len(c) == 1
        req = Request("http://www.roadrunner.net/")
        c.add_cookie_header(req)
        assert (req.has_header("Cookie") and req.has_header("Cookie2"))

        c.clear()
        pol.set_blocked_domains([".acme.com"])
        c.extract_cookies(res, req)
        assert len(c) == 1

        # set a cookie with blocked domain...
        req = Request("http://www.acme.com/")
        res = FakeResponse(headers, "http://www.acme.com/")
        cookies = c.make_cookies(res, req)
        c.set_cookie(cookies[0])
        assert len(c) == 2
        # ... and check it doesn't get returned
        c.add_cookie_header(req)
        assert not req.has_header("Cookie")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_missing_final_slash(self):
        # Missing slash from request URL's abs_path should be assumed present.
        from mechanize import CookieJar, Request, DefaultCookiePolicy
        url = "http://www.acme.com"
        c = CookieJar(DefaultCookiePolicy(rfc2965=True))
        interact_2965(c, url, "foo=bar; Version=1")
        req = Request(url)
        assert len(c) == 1
        c.add_cookie_header(req)
        assert req.has_header("Cookie")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_bad_cookie_header(self):
        def cookiejar_from_cookie_headers(headers):
            from mechanize import CookieJar, Request
            c = CookieJar()
            req = Request("http://www.example.com/")
            r = FakeResponse(headers, "http://www.example.com/")
            c.extract_cookies(r, req)
            return c

        # none of these bad headers should cause an exception to be raised
        for headers in [
            ["Set-Cookie: "],  # actually, nothing wrong with this
            ["Set-Cookie2: "],  # ditto
                # missing domain value
            ["Set-Cookie2: a=foo; path=/; Version=1; domain"],
                # bad max-age
            ["Set-Cookie: b=foo; max-age=oops"],
                # bad version
            ["Set-Cookie: b=foo; version=spam"],
        ]:
            c = cookiejar_from_cookie_headers(headers)
            # these bad cookies shouldn't be set
            assert len(c) == 0

        # cookie with invalid expires is treated as session cookie
        headers = ["Set-Cookie: c=foo; expires=Foo Bar 12 33:22:11 2000"]
        c = cookiejar_from_cookie_headers(headers)
        cookie = c._cookies["www.example.com"]["/"]["c"]
        assert cookie.expires is None

        # cookie with unset path should have path=/
        headers = ["Set-Cookie: c=foo; path; expires=Foo Bar 12 33:22:11 2000"]
        c = cookiejar_from_cookie_headers(headers)
        assert ('www.example.com' in c._cookies and
                '/' in c._cookies['www.example.com'])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_cookies_for_request(self):
        from mechanize import CookieJar, Request

        cj = CookieJar()
        interact_netscape(cj, "http://example.com/", "short=path")
        interact_netscape(cj, "http://example.com/longer/path", "longer=path")
        for_short_path = cj.cookies_for_request(Request("http://example.com/"))
        self.assertEquals([cookie.name for cookie in for_short_path],
                          ["short"])
        for_long_path = cj.cookies_for_request(
            Request("http://example.com/longer/path"))
        self.assertEquals([cookie.name for cookie in for_long_path],
                          ["longer", "short"])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_firefox3_cookiejar_add_cookie_header(self):
        try:
            from mechanize import Firefox3CookieJar
        except ImportError:
            pass
        else:
            filename = self.mktemp()
            hide_experimental_warnings()
            try:
                cj = Firefox3CookieJar(filename)
            finally:
                reset_experimental_warnings()
            cj.connect()
            # Session cookies (true .discard) and persistent cookies (false
            # .discard) are stored differently.  Check they both get sent.
            year_plus_one = time.localtime(time.time())[0] + 1
            expires = "expires=09-Nov-%d 23:12:40 GMT" % (year_plus_one, )
            interact_netscape(cj, "http://www.foo.com/", "fooa=bar")
            interact_netscape(cj, "http://www.foo.com/",
                              "foob=bar; %s" % expires)
            ca, cb = cj
            self.assert_(ca.discard)
            self.assertFalse(cb.discard)
            request = Request("http://www.foo.com/")
            cj.add_cookie_header(request)
            self.assertEquals(
                request.get_header("Cookie"), "fooa=bar; foob=bar")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_gzip(self):
        p = HTTPGzipProcessor()
        url = "https://www.example.com/"
        req = p.https_request(mechanize.Request(url))
        self.assertIsNone(req.get_header('Accept-Encoding'))
        p.request_gzip = True
        req = p.https_request(mechanize.Request(url))
        self.assertEqual(req.get_header('Accept-Encoding'), 'gzip')
        req = mechanize.Request(url)
        req.add_header('Accept-Encoding', 'moo, *')
        req = p.https_request(req)
        self.assertEqual(req.get_header('Accept-Encoding'), 'moo, *, gzip')
        data = os.urandom(1024 * 1024)
        cdata = b''.join(compress_readable_output(BytesIO(data)))
        r = MockResponse(
            url,
            data=cdata,
            info={
                'Content-Encoding': 'gzip',
                'Content-Length': str(len(cdata))
            })
        r = p.https_response(req, r)
        self.assertEqual(r.read(), data)
        h = r.info()
        self.assertFalse(h.getheaders('content-encoding'))
        self.assertFalse(h.getheaders('content-length'))
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_add_host_header(self):
        headers = []

        def putheader(self_, header, value):
            headers.append((header, value))

        self.monkey_patch_httplib(putheader)
        browser = self.make_browser()
        request = mechanize.Request("http://example.com/")
        browser.addheaders = [("Host", "myway.example.com")]
        browser.open(request)
        self.assertIn(("Host", "myway.example.com"), headers)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_pickle_cookie(self):
        from mechanize._clientcookie import cookies_equal
        cookiejar = mechanize.CookieJar()
        url = "http://example.com/"
        request = mechanize.Request(url)
        response = mechanize._response.test_response(
            headers=[("Set-Cookie", "spam=eggs")], url=url)
        [cookie] = cookiejar.make_cookies(response, request)

        def check_equality(b):
            self.assertTrue(cookies_equal(cookie, b))

        test_pickling(cookie, check_equality)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_request_headers_dict():
    """
    The Request.headers dictionary is not a documented interface.  It should
    stay that way, because the complete set of headers are only accessible
    through the .get_header(), .has_header(), .header_items() interface.
    However, .headers pre-dates those methods, and so real code will be using
    the dictionary.

    The introduction in 2.4 of those methods was a mistake for the same reason:
    code that previously saw all (urllib2 user)-provided headers in .headers
    now sees only a subset (and the function interface is ugly and incomplete).
    A better change would have been to replace .headers dict with a dict
    subclass (or UserDict.DictMixin instance?)  that preserved the .headers
    interface and also provided access to the "unredirected" headers.  It's
    probably too late to fix that, though.


    Check .capitalize() case normalization:

    >>> url = "http://example.com"
    >>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"]
    'blah'
    >>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"]
    'blah'

    Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError,
    but that could be changed in future.

    """
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_request_headers_methods():
    """
    Note the case normalization of header names here, to .capitalize()-case.
    This should be preserved for backwards-compatibility.  (In the HTTP case,
    normalization to .title()-case is done by urllib2 before sending headers to
    httplib).

    >>> url = "http://example.com"
    >>> r = Request(url, headers={"Spam-eggs": "blah"})
    >>> r.has_header("Spam-eggs")
    True
    >>> r.header_items()
    [('Spam-eggs', 'blah')]
    >>> r.add_header("Foo-Bar", "baz")
    >>> items = r.header_items()
    >>> items.sort()
    >>> items
    [('Foo-bar', 'baz'), ('Spam-eggs', 'blah')]

    Note that e.g. r.has_header("spam-EggS") is currently False, and
    r.get_header("spam-EggS") returns None, but that could be changed in
    future.

    >>> r.has_header("Not-there")
    False
    >>> print r.get_header("Not-there")
    None
    >>> r.get_header("Not-there", "default")
    'default'

    """
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def add_ordered_mock_handlers(opener, meth_spec):
    """Create MockHandlers and add them to an OpenerDirector.

    meth_spec: list of lists of tuples and strings defining methods to define
    on handlers.  eg:

    [["http_error", "ftp_open"], ["http_open"]]

    defines methods .http_error() and .ftp_open() on one handler, and
    .http_open() on another.  These methods just record their arguments and
    return None.  Using a tuple instead of a string causes the method to
    perform some action (see MockHandler.handle()), eg:

    [["http_error"], [("http_open", "return request")]]

    defines .http_error() on one handler (which simply returns None), and
    .http_open() on another handler, which returns a Request object.

    """
    handlers = []
    count = 0
    for meths in meth_spec:

        class MockHandlerSubclass(MockHandler):
            pass

        h = MockHandlerSubclass(meths)
        h.handler_order += count
        h.add_parent(opener)
        count = count + 1
        handlers.append(h)
        opener.add_handler(h)
    return handlers
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_processors(self):
        # *_request / *_response methods get called appropriately
        o = OpenerDirector()
        meth_spec = [
            [("http_request", "return request"),
             ("http_response", "return response")],
            [("http_request", "return request"),
             ("http_response", "return response")],
        ]
        handlers = add_ordered_mock_handlers(o, meth_spec)

        req = Request("http://example.com/")
        r = o.open(req)
        # processor methods are called on *all* handlers that define them,
        # not just the first handler that handles the request
        calls = [(handlers[0], "http_request"), (handlers[1], "http_request"),
                 (handlers[0], "http_response"),
                 (handlers[1], "http_response")]

        self.assertEqual(len(o.calls), len(calls))
        for i, (handler, name, args, kwds) in enumerate(o.calls):
            if i < 2:
                # *_request
                self.assertEqual((handler, name), calls[i])
                self.assertEqual(len(args), 1)
                self.assertTrue(isinstance(args[0], Request))
            else:
                # *_response
                self.assertEqual((handler, name), calls[i])
                self.assertEqual(len(args), 2)
                self.assertTrue(isinstance(args[0], Request))
                # response from opener.open is None, because there's no
                # handler that defines http_open to handle it
                self.assertTrue(args[1] is None or
                                isinstance(args[1], MockResponse))
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_raise_http_errors(self):
        # HTTPDefaultErrorHandler should raise HTTPError if no error handler
        # handled the error response
        from mechanize import _response
        h = mechanize.HTTPDefaultErrorHandler()

        url = "http://example.com"
        code = 500
        msg = "Error"
        request = mechanize.Request(url)
        response = _response.test_response(url=url, code=code, msg=msg)

        # case 1. it's not an HTTPError
        try:
            h.http_error_default(request, response, code, msg, response.info())
        except mechanize.HTTPError as exc:
            self.assert_(exc is not response)
            self.assert_(exc.fp is response)
        else:
            self.assert_(False)

        # case 2. response object is already an HTTPError, so just re-raise it
        error = mechanize.HTTPError(url, code, msg, "fake headers", response)
        try:
            h.http_error_default(request, error, code, msg, error.info())
        except mechanize.HTTPError as exc:
            self.assert_(exc is error)
        else:
            self.assert_(False)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_cookies(self):
        cj = MockCookieJar()
        h = HTTPCookieProcessor(cj)
        o = h.parent = MockOpener()

        req = Request("http://example.com/")
        r = MockResponse(200, "OK", {}, "")
        newreq = h.http_request(req)
        self.assertTrue(cj.ach_req is req is newreq)
        self.assertEquals(req.get_origin_req_host(), "example.com")
        self.assertFalse(cj.ach_u)
        newr = h.http_response(req, r)
        self.assertTrue(cj.ec_req is req)
        self.assertTrue(cj.ec_r is r is newr)
        self.assertFalse(cj.ec_u)
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_proxy_no_proxy(self):
        self.monkey_patch_environ("no_proxy", "python.org")
        o = OpenerDirector()
        ph = mechanize.ProxyHandler(dict(http="proxy.example.com"))
        o.add_handler(ph)
        req = Request("http://www.perl.org/")
        self.assertEqual(req.get_host(), "www.perl.org")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com")
        req = Request("http://www.python.org")
        self.assertEqual(req.get_host(), "www.python.org")
        r = o.open(req)
        if sys.version_info >= (2, 6):
            # no_proxy environment variable not supported in python 2.5
            self.assertEqual(req.get_host(), "www.python.org")
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_proxy_https(self):
        o = OpenerDirector()
        ph = mechanize.ProxyHandler(dict(https='proxy.example.com:3128'))
        o.add_handler(ph)
        meth_spec = [[("https_open", "return response")]]
        handlers = add_ordered_mock_handlers(o, meth_spec)
        req = Request("https://www.example.com/")
        self.assertEqual(req.get_host(), "www.example.com")
        r = o.open(req)
        self.assertEqual(req.get_host(), "proxy.example.com:3128")
        self.assertEqual([(handlers[0], "https_open")],
                         [tup[0:2] for tup in o.calls])
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def setUp(self):
        self.get = Request("http://www.python.org/~jeremy/")
        self.post = Request(
            "http://www.python.org/~jeremy/",
            "data",
            headers={"X-Test": "test"})
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_selector(self):
        self.assertEqual("/~jeremy/", self.get.get_selector())
        req = Request("http://www.python.org/")
        self.assertEqual("/", req.get_selector())
项目:mechanize    作者:python-mechanize    | 项目源码 | 文件源码
def test_get_host_unquote(self):
        req = Request("http://www.%70ython.org/")
        self.assertEqual("www.python.org", req.get_host())
项目:qxf2-page-object-model    作者:qxf2    | 项目源码 | 文件源码
def get(self,url,headers={}):
        "Mechanize Get request"
        response = self.browser.open(mechanize.Request(url,headers=headers))
        return response
项目:qxf2-page-object-model    作者:qxf2    | 项目源码 | 文件源码
def post(self,url,data=None,headers={}):
        "Mechanize Post request"
        response = self.browser.open(mechanize.Request(url=url, data= data, headers=headers))
        return response
项目:addon    作者:alfa-addon    | 项目源码 | 文件源码
def add_cookie_header(self, request):
        """Add correct Cookie: header to request (mechanize.Request object).

        The Cookie2 header is also added unless policy.hide_cookie2 is true.

        The request object (usually a mechanize.Request instance) must support
        the methods get_full_url, get_host, is_unverifiable, get_type,
        has_header, get_header, header_items and add_unredirected_header, as
        documented by urllib2.
        """
        debug("add_cookie_header")
        cookies = self.cookies_for_request(request)

        attrs = self._cookie_attrs(cookies)
        if attrs:
            if not request.has_header("Cookie"):
                request.add_unredirected_header("Cookie", "; ".join(attrs))

        # if necessary, advertise that we know RFC 2965
        if self._policy.rfc2965 and not self._policy.hide_cookie2:
            for cookie in cookies:
                if cookie.version != 1 and not request.has_header("Cookie2"):
                    request.add_unredirected_header("Cookie2", '$Version="1"')
                    break

        self.clear_expired_cookies()