Python cookielib 模块,MozillaCookieJar() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用cookielib.MozillaCookieJar()

项目:google_scholar_paper_finder    作者:maikelronnau    | 项目源码 | 文件源码
def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                ScholarUtils.log('warn', 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar() # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None # Last settings object, if any
项目:citations    作者:frederick0329    | 项目源码 | 文件源码
def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                ScholarUtils.log('warn', 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar() # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None # Last settings object, if any
项目:auto-laod-hosts    作者:yanjinyi1987    | 项目源码 | 文件源码
def urlopen_test(host):
    headers = [('Host',host),
    ('Connection', 'keep-alive'),
    ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
    ('User-Agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0'),
    #('Accept-Encoding','gzip,deflate'), 
    ('Accept-Language', 'en-US,en;q=0.5')]

    #????MozillaCookieJar???????cookie
    cookie=cookielib.MozillaCookieJar()
    handler=urllib2.HTTPCookieProcessor(cookie)

    req=urllib2.Request(u'https://'+host)
    first_opener = urllib2.build_opener(handler)
    first_opener.addheaders = headers
    try:
        result=first_opener.open(req,timeout=60) #60s??
        if result.read()!=None:
            return True
    except Exception,e:
        print e
        return False
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def test_bad_magic(self):
        from cookielib import LWPCookieJar, MozillaCookieJar, LoadError
        # IOErrors (eg. file doesn't exist) are allowed to propagate
        filename = test_support.TESTFN
        for cookiejar_class in LWPCookieJar, MozillaCookieJar:
            c = cookiejar_class()
            try:
                c.load(filename="for this test to work, a file with this "
                                "filename should not exist")
            except IOError, exc:
                # exactly IOError, not LoadError
                self.assertEqual(exc.__class__, IOError)
            else:
                self.fail("expected IOError for invalid filename")
        # Invalid contents of cookies file (eg. bad magic string)
        # causes a LoadError.
        try:
            f = open(filename, "w")
            f.write("oops\n")
            for cookiejar_class in LWPCookieJar, MozillaCookieJar:
                c = cookiejar_class()
                self.assertRaises(LoadError, c.load, filename)
        finally:
            try: os.unlink(filename)
            except OSError: pass
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def test_bad_magic(self):
        from cookielib import LWPCookieJar, MozillaCookieJar, LoadError
        # IOErrors (eg. file doesn't exist) are allowed to propagate
        filename = test_support.TESTFN
        for cookiejar_class in LWPCookieJar, MozillaCookieJar:
            c = cookiejar_class()
            try:
                c.load(filename="for this test to work, a file with this "
                                "filename should not exist")
            except IOError, exc:
                # exactly IOError, not LoadError
                self.assertEqual(exc.__class__, IOError)
            else:
                self.fail("expected IOError for invalid filename")
        # Invalid contents of cookies file (eg. bad magic string)
        # causes a LoadError.
        try:
            f = open(filename, "w")
            f.write("oops\n")
            for cookiejar_class in LWPCookieJar, MozillaCookieJar:
                c = cookiejar_class()
                self.assertRaises(LoadError, c.load, filename)
        finally:
            try: os.unlink(filename)
            except OSError: pass
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_following(user_id):
    url = 'https://medium.com/_/api/users/' + user_id + '/following'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    following = re.findall('"username":"(.*?)","createdAt"', data)
    following_set = set(following)
    to = re.findall('"to":"(.*?)"}}},"v"', data)
    while to:
        url = 'https://medium.com/_/api/users/' + user_id + '/following?to=' + to[0]
        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()
        following = re.findall('"username":"(.*?)","createdAt"', data)
        following_set.update(following)
        to = re.findall('"to":"(.*?)"}}},"v"', data)
    return list(following_set)
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_followers(user_id):
    url = 'https://medium.com/_/api/users/' + user_id + '/followers'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    followers = re.findall('"username":"(.*?)","createdAt"', data)
    followers_set = set(followers)
    to = re.findall('"to":"(.*?)"}}},"v"', data)
    while to:
        url = 'https://medium.com/_/api/users/' + user_id + '/followers?to=' + to[0]
        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()
        followers = re.findall('"username":"(.*?)","createdAt"', data)
        followers_set.update(followers)
        to = re.findall('"to":"(.*?)"}}},"v"', data)
    return list(followers_set)
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_latest(user_id):
    url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=latest'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    latest = re.findall('"postId":"(.*?)"},"randomId"', data)
    latest_set = set(latest)
    to = re.findall('"to":"(.*?)","source":"latest"', data)
    while to:
        url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=latest&to=' + to[0]
        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()
        latest = re.findall('"postId":"(.*?)"},"randomId"', data)
        latest_set.update(latest)
        to = re.findall('"to":"(.*?)","source":"latest"', data)
    return list(latest_set)
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_recommends(user_id):
    url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=has-recommended'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    recommends = re.findall('w":{"postId":"(.*?)"},"randomId"', data)
    recommends_set = set(recommends)
    to = re.findall('"to":"(.*?)","source":"has-recommended"', data)
    while to:
        url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=has-recommended&to=' + to[0]
        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()
        recommends = re.findall('w":{"postId":"(.*?)"},"randomId"', data)
        recommends_set.update(recommends)
        to = re.findall('"to":"(.*?)","source":"has-recommended"', data)
    return list(recommends_set)
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_highlights(user_id):
    url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=quotes'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    highlights = re.findall('","postId":"(.*?)","userId":"', data)
    highlights_set = set(highlights)
    to = re.findall('"to":"(.*?)","source":"quotes"', data)
    while to:
        url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=quotes&to=' + to[0]
        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()
        highlights = re.findall('","postId":"(.*?)","userId":"', data)
        highlights_set.update(highlights)
        to = re.findall('"to":"(.*?)","source":"quotes"', data)
    return list(highlights_set)
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_twitter_profile(username, twitter_id):
    url = "https://twitter.com/" + str(twitter_id) + "?lang=en"
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    response = opener.open(req, timeout=10)
    data = response.read()
    profile_data = re.findall('class="json-data" value="(.*?)">', data)
    profile = json.loads(profile_data[0].replace('"', '"'))
    profile.pop("promptbirdData", None)
    profile.pop("wtfOptions", None)
    profile.pop("typeaheadData", None)
    profile.pop("dm", None)
    profile.pop("initialState", None)
    profile.pop("activeHashflags", None)
    profile.pop("keyboardShortcuts", None)
    profile.pop("deciders", None)
    out = codecs.open("./Twitter/%s_t.json" % username, 'w', 'utf-8')
    out.write(json.dumps(profile, indent=4))
    out.close()
项目:snowballing    作者:JoaoFelipe    | 项目源码 | 文件源码
def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                ScholarUtils.log('warn', 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar() # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None # Last settings object, if any
项目:KDDCUP2016    作者:hugochan    | 项目源码 | 文件源码
def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                ScholarUtils.log('warn', 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar() # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None # Last settings object, if any
项目:pefile.pypy    作者:cloudtracer    | 项目源码 | 文件源码
def test_bad_magic(self):
        from cookielib import LWPCookieJar, MozillaCookieJar, LoadError
        # IOErrors (eg. file doesn't exist) are allowed to propagate
        filename = test_support.TESTFN
        for cookiejar_class in LWPCookieJar, MozillaCookieJar:
            c = cookiejar_class()
            try:
                c.load(filename="for this test to work, a file with this "
                                "filename should not exist")
            except IOError, exc:
                # exactly IOError, not LoadError
                self.assertEqual(exc.__class__, IOError)
            else:
                self.fail("expected IOError for invalid filename")
        # Invalid contents of cookies file (eg. bad magic string)
        # causes a LoadError.
        try:
            f = open(filename, "w")
            f.write("oops\n")
            for cookiejar_class in LWPCookieJar, MozillaCookieJar:
                c = cookiejar_class()
                self.assertRaises(LoadError, c.load, filename)
        finally:
            try: os.unlink(filename)
            except OSError: pass
项目:ndk-python    作者:gittor    | 项目源码 | 文件源码
def test_bad_magic(self):
        from cookielib import LWPCookieJar, MozillaCookieJar, LoadError
        # IOErrors (eg. file doesn't exist) are allowed to propagate
        filename = test_support.TESTFN
        for cookiejar_class in LWPCookieJar, MozillaCookieJar:
            c = cookiejar_class()
            try:
                c.load(filename="for this test to work, a file with this "
                                "filename should not exist")
            except IOError, exc:
                # exactly IOError, not LoadError
                self.assertEqual(exc.__class__, IOError)
            else:
                self.fail("expected IOError for invalid filename")
        # Invalid contents of cookies file (eg. bad magic string)
        # causes a LoadError.
        try:
            f = open(filename, "w")
            f.write("oops\n")
            for cookiejar_class in LWPCookieJar, MozillaCookieJar:
                c = cookiejar_class()
                self.assertRaises(LoadError, c.load, filename)
        finally:
            try: os.unlink(filename)
            except OSError: pass
项目:slack_scholar    作者:xLeitix    | 项目源码 | 文件源码
def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                print "Using cookie file"
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                print "Ignoring cookie file: %s" % msg
                ScholarUtils.log('warn', 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar() # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None # Last settings object, if any
项目:Airbnb-superhosts-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_name(uid):
    url = 'https://www.airbnb.com/users/show/' + uid + '?locale=en'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    try:
        response = opener.open(req, timeout=10)
    except Exception as inst:
        print type(inst)
        print inst.args
        print inst
        print('-----fail to get name data')
        return ''
    data = response.read()
    name = re.findall('Hey, I’m (.*?)!', data)
    if len(name):
        return name[0]
    else:
        return ''
项目:Airbnb-superhosts-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_name(uid):
    url = 'https://www.airbnb.com/users/show/' + uid + '?locale=en'
    cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    req = urllib2.Request(url)
    req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                    Chrome/50.0.2661.102 Safari/537.36')
    try:
        response = opener.open(req, timeout=10)
    except Exception as inst:
        print type(inst)
        print inst.args
        print inst
        print('-----fail to get name data')
        return ''
    data = response.read()
    name = re.findall('Hey, I’m (.*?)!', data)
    if len(name):
        return name[0]
    else:
        return ''
项目:crossplatform_iptvplayer    作者:j00zek    | 项目源码 | 文件源码
def clearCookie(self, cookiefile, leaveNames=[], removeNames=None, ignore_discard = True):
        try:
            toRemove = []
            if not self.useMozillaCookieJar:
                cj = cookielib.LWPCookieJar()
            else:
                cj = cookielib.MozillaCookieJar()
            cj.load(cookiefile, ignore_discard = ignore_discard)
            for cookie in cj:
                if cookie.name not in leaveNames and (None == removeNames or cookie.name in removeNames):
                    toRemove.append(cookie)
            for cookie in toRemove:
                cj.clear(cookie.domain, cookie.path, cookie.name)
            cj.save(cookiefile, ignore_discard = ignore_discard)
        except Exception:
            printExc()
            return False
        return True
项目:OpenCouture-Dev    作者:9-9-0    | 项目源码 | 文件源码
def __init__(self):
        self.br = mechanize.Browser()
        #self.cj = cookielib.LWPCookieJar()
        self.cj = cookielib.MozillaCookieJar()

        self.br.set_cookiejar(self.cj)
        self.br.set_handle_equiv(True)
        self.br.set_handle_referer(True)
        self.br.set_handle_robots(False)
        self.br.addheaders = [('User-agent', 'Firefox')]

        self.item_url = 'http://shop.bdgastore.com/collections/footwear/products/y-3-pureboost-zg'

        # Create variables for user credentials and a function to import them
项目:WeiboPictureWorkflow    作者:cielpy    | 项目源码 | 文件源码
def login(form_data):
    url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)'
    headers = ('User-Agent', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0')
    cookie = cookielib.MozillaCookieJar(cookie_file)
    handler = urllib2.HTTPCookieProcessor(cookie)
    opener = urllib2.build_opener(handler)
    opener.addheaders.append(headers)
    req = opener.open(url, form_data)
    redirect_result = req.read()
    login_pattern = r'location.replace\(\'(.*?)\'\)'
    login_url = re.search(login_pattern, redirect_result).group(1)
    opener.open(login_url).read()
    cookie.save(cookie_file, ignore_discard=True, ignore_expires=True)
项目:WeiboPictureWorkflow    作者:cielpy    | 项目源码 | 文件源码
def request_image_url(image_path):
    cookie = cookielib.MozillaCookieJar()
    cookie.load(cookie_file, ignore_expires=False, ignore_discard=True)
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
    image_url = 'http://picupload.service.weibo.com/interface/pic_upload.php?mime=image%2Fjpeg&data=base64&url=0&markpos=1&logo=&nick=0&marks=1&app=miniblog'
    b = base64.b64encode(file(image_path).read())
    data = urllib.urlencode({'b64_data': b})
    result = opener.open(image_url, data).read()
    result = re.sub(r"<meta.*</script>", "", result, flags=re.S)
    image_result = json.loads(result)
    image_id = image_result.get('data').get('pics').get('pic_1').get('pid')
    return 'https://ws3.sinaimg.cn/large/%s.jpg' % image_id
项目:Daily-code    作者:rui7157    | 项目源码 | 文件源码
def login(self,login_url="http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)"):
        j_data = self.get_sso()
        postdata = {
            'entry': "weibo",
            'gateway': '1',
            'from': '',
            'savestate': '7',
            'userticket': '1',
            'pagerefer': "",
            'vsnf': '1',
            'su': self.get_user(),
            'service': 'miniblog',
            'servertime': j_data.get("servertime"),
            'nonce': j_data.get("nonce"),
            'pwencode': 'rsa2',
            'rsakv': j_data.get("rsakv"),
            'sp': self.get_passwd(j_data.get("pubkey"), j_data.get("servertime"), j_data.get("nonce")),
            'sr': "1440*900",
            'encoding': 'UTF-8',
            'prelt': '503',
            'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
            'returntype': 'META'
        }



        cookie=cookielib.MozillaCookieJar("Cookie.txt")
        opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
        result=opener.open(login_url,urllib.urlencode(postdata))
        cookie.save(ignore_discard=True, ignore_expires=True)
        with open(os.path.join(os.path.dirname(__file__),"test.html"),"wb") as f:
            f.write(result.read())
        print u"????"
项目:Daily-code    作者:rui7157    | 项目源码 | 文件源码
def login(self, login_url="http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)"):
        j_data = self.get_sso()
        postdata = {
            'entry': "weibo",
            'gateway': '1',
            'from': '',
            'savestate': '7',
            'userticket': '1',
            'pagerefer': "",
            'vsnf': '1',
            'su': self.get_user(),
            'service': 'miniblog',
            'servertime': j_data.get("servertime"),
            'nonce': j_data.get("nonce"),
            'pwencode': 'rsa2',
            'rsakv': j_data.get("rsakv"),
            'sp': self.get_passwd(j_data.get("pubkey"), j_data.get("servertime"), j_data.get("nonce")),
            'sr': "1440*900",
            'encoding': 'UTF-8',
            'prelt': '503',
            'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack',
            'returntype': 'META'
        }

        cookie = cookielib.MozillaCookieJar("Cookie.txt")
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
        result = opener.open(login_url, urllib.urlencode(postdata))
        cookie.save(ignore_discard=True, ignore_expires=True)

        html = opener.open(
            "http://weibo.com/p/1005055887581312").read()
        with open(os.path.join(os.path.dirname(__file__), "text.html"), "wb") as f:
            f.write(html)
        print "????"
项目:oil    作者:oilshell    | 项目源码 | 文件源码
def test_missing_value(self):
        from cookielib import MozillaCookieJar, lwp_cookie_str

        # missing = sign in Cookie: header is regarded by Mozilla as a missing
        # name, and by cookielib as a missing value
        filename = test_support.TESTFN
        c = MozillaCookieJar(filename)
        interact_netscape(c, "http://www.acme.com/", 'eggs')
        interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
        cookie = c._cookies["www.acme.com"]["/"]["eggs"]
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, "eggs")
        cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, '"spam"')
        self.assertEqual(lwp_cookie_str(cookie), (
            r'"spam"; path="/foo/"; domain="www.acme.com"; '
            'path_spec; discard; version=0'))
        old_str = repr(c)
        c.save(ignore_expires=True, ignore_discard=True)
        try:
            c = MozillaCookieJar(filename)
            c.revert(ignore_expires=True, ignore_discard=True)
        finally:
            os.unlink(c.filename)
        # cookies unchanged apart from lost info re. whether path was specified
        self.assertEqual(
            repr(c),
            re.sub("path_specified=%s" % True, "path_specified=%s" % False,
                   old_str)
            )
        self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"),
                         '"spam"; eggs')
项目:python2-tracer    作者:extremecoders-re    | 项目源码 | 文件源码
def test_missing_value(self):
        from cookielib import MozillaCookieJar, lwp_cookie_str

        # missing = sign in Cookie: header is regarded by Mozilla as a missing
        # name, and by cookielib as a missing value
        filename = test_support.TESTFN
        c = MozillaCookieJar(filename)
        interact_netscape(c, "http://www.acme.com/", 'eggs')
        interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
        cookie = c._cookies["www.acme.com"]["/"]["eggs"]
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, "eggs")
        cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, '"spam"')
        self.assertEqual(lwp_cookie_str(cookie), (
            r'"spam"; path="/foo/"; domain="www.acme.com"; '
            'path_spec; discard; version=0'))
        old_str = repr(c)
        c.save(ignore_expires=True, ignore_discard=True)
        try:
            c = MozillaCookieJar(filename)
            c.revert(ignore_expires=True, ignore_discard=True)
        finally:
            os.unlink(c.filename)
        # cookies unchanged apart from lost info re. whether path was specified
        self.assertEqual(
            repr(c),
            re.sub("path_specified=%s" % True, "path_specified=%s" % False,
                   old_str)
            )
        self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"),
                         '"spam"; eggs')
项目:Medium-crawler-with-data-analyzer    作者:lifei96    | 项目源码 | 文件源码
def get_top_stories():
    current_date = START_DATE
    while current_date <= END_DATE:
        top_stories = TopStories()
        date_string = current_date.strftime("%B-%d-%Y").lower()
        url = "https://medium.com/browse/top/" + date_string
        top_stories.data['date'] = current_date.isoformat()
        top_stories.data['url'] = url

        cj = cookielib.MozillaCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
        req = urllib2.Request(url)
        req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \
                        Chrome/50.0.2661.102 Safari/537.36')
        response = opener.open(req, timeout=10)
        data = response.read()

        stories = []
        story_url = re.findall('<a class="link link--darken" href="(.*?)\?source=top_stories---------[0-9]*-" data-action="open-post"', data)
        num = len(story_url)
        for i in range(num):
            story_data = get_story(story_url[i]).data
            if story_data['success']:
                stories.append(story_data)
            print(i)
        top_stories.data['stories'] = stories

        out = codecs.open("./TopStories/%s.json" % current_date.isoformat(), 'w', 'utf-8')
        out.write(top_stories.getstr())
        out.close()
        print("-----%s obtained" % current_date.isoformat())
        current_date = current_date + datetime.timedelta(days=1)
项目:free-rider-killer    作者:YukiSora    | 项目源码 | 文件源码
def baiduInitialization(filename):
    global _cj
    _cj = cookielib.MozillaCookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(_cj))
    urllib2.install_opener(opener)

    if os.path.exists(filename):
        _cj.load(filename, True)
项目:jd-tools    作者:gogodick    | 项目源码 | 文件源码
def load_cookie(self, filename):
        try:
            load_cookiejar = cookielib.MozillaCookieJar()
            try:
                load_cookiejar.load(self.cookie_dir+'/'+filename, ignore_discard=True)
            except:
                pass
            self.sess.cookies = load_cookiejar
            return True
        except Exception, e:
            logging.error('Exp {0} : {1}'.format(FuncName(), e))
            return False
项目:Anki-Youdao    作者:megachweng    | 项目源码 | 文件源码
def saveCookies(self, cookiejar):
        MozillaCookieJar = cookielib.MozillaCookieJar()
        for c in cookiejar:
            args = dict(vars(c).items())
            args['rest'] = args['_rest']
            del args['_rest']
            c = cookielib.Cookie(**args)
            MozillaCookieJar.set_cookie(c)
        MozillaCookieJar.save('youdaoCookies', ignore_discard=True)
项目:Anki-Youdao    作者:megachweng    | 项目源码 | 文件源码
def loadCookies(self):
        if os.path.exists('youdaoCookies'):
            self.window.debug.appendPlainText('625: Cookie exists!')
            MozillaCookieJar = cookielib.MozillaCookieJar()
            MozillaCookieJar.load('youdaoCookies', ignore_discard=True)
            return MozillaCookieJar
        else:
            return False
项目:UCAS_GET_Course    作者:Hurray0    | 项目源码 | 文件源码
def __init__(self):
        self.cookie = cookielib.MozillaCookieJar("")
        self.header = [(
            'User-agent',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14'
        ), ('DNT', '1'), ('Accept', '*/*'), ('X-Requested-With',
                                             'XMLHttpRequest')]
        handler = urllib2.HTTPCookieProcessor(self.cookie)
        self.opener = urllib2.build_opener(handler)
        self.opener.addheaders = self.header
项目:request2doc    作者:kongxinchi    | 项目源码 | 文件源码
def set_cookie_jar(self, cookie_jar_path):
        self.cookie = cookielib.MozillaCookieJar()
        self.cookie.load(cookie_jar_path, ignore_discard=True, ignore_expires=True)
项目:relational-social-media-search-engine    作者:indervirbanipal    | 项目源码 | 文件源码
def __init__(self, login, password):
        """ Start up... """
        self.login = login
        self.password = password
        # Simulate browser with cookies enabled
        self.cj = cookielib.MozillaCookieJar(cookie_filename)
        '''
        Creating settings for the proxy
        '''
        # proxy_handler = urllib2.ProxyHandler({'http':'209.222.25.83:3128'})
        # 216.58.194.113
        # proxy_handler = urllib2.ProxyHandler({'http':'8.8.8.8'})

        proxy_handler = urllib2.ProxyHandler({'http':'notional-sign-110911.appspot.com'})
        # proxy_auth_handler = urllib2.ProxyBasicAuthHandler()
        if os.access(cookie_filename, os.F_OK):
            self.cj.load()
        self.opener = urllib2.build_opener(
            urllib2.HTTPRedirectHandler(),
            urllib2.HTTPHandler(debuglevel=0),
            urllib2.HTTPSHandler(debuglevel=0),
            proxy_handler,
            urllib2.HTTPCookieProcessor(self.cj)
        )

        self.opener.addheaders = [
            ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; '
                           'Windows NT 5.2; .NET CLR 1.1.4322)'))
        ]
项目:relational-social-media-search-engine    作者:indervirbanipal    | 项目源码 | 文件源码
def performFullSearch(self, searchParams, dbHost, dbPort, dbName):
        """ Performs search and Saves the information gathered into DB. This method almost performs everything this class is created for """
        print "inside Perform Search ... "
        try:
            #self.login = login
            #self.password = password
            # Simulate browser with cookies enabled
            self.cj = cookielib.MozillaCookieJar(cookie_filename)
            if os.access(cookie_filename, os.F_OK):
                self.cj.load()
            self.opener = urllib2.build_opener(
                urllib2.HTTPRedirectHandler(),
                urllib2.HTTPHandler(debuglevel=0),
                urllib2.HTTPSHandler(debuglevel=0),
                urllib2.HTTPCookieProcessor(self.cj)
            )
            self.opener.addheaders = [
                ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; '
                               'Windows NT 5.2; .NET CLR 1.1.4322)'))
            ]
            self.checkLogin(url1)
            fName = searchParams['firstName']
            mailId = searchParams['email']
            if fName == 'EMPTY' or mailId == 'EMPTY':
                raise Exception('Info: Search has to be performed from Search page only, Please try again', 'Info')
            fSrchURL = self.formSearchURL(searchParams)
            linkedJSON = self.loadSearch(fSrchURL, fName)
            recordJSON = self.formTrimmedJSON(linkedJSON)
            dbRecord = self.formDBRecord(recordJSON, mailId)
            client = self.connect2DB(dbHost, dbPort)
            print "Client details : "+client.__str__()
            self.store2DB(dbRecord, mailId, client)
            return 'Success'
        except Exception as e:
            x,y = e.args
            return x
项目:relational-social-media-search-engine    作者:indervirbanipal    | 项目源码 | 文件源码
def filterResult(self, filterParams, dbHost, dbPort, dbName):
        """Performs a filter based on the filter parameters """
        print "Inside Filter Result view ..."
        try:
            self.cj = cookielib.MozillaCookieJar(cookie_filename)
            if os.access(cookie_filename, os.F_OK):
                self.cj.load()
            self.opener = urllib2.build_opener(
                urllib2.HTTPRedirectHandler(),
                urllib2.HTTPHandler(debuglevel=0),
                urllib2.HTTPSHandler(debuglevel=0),
                urllib2.HTTPCookieProcessor(self.cj)
            )
            self.opener.addheaders = [
                ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; '
                               'Windows NT 5.2; .NET CLR 1.1.4322)'))
            ]
            self.checkLogin(url1)

            ## start here ##
            print " Data So Far : \n"+Person.objects.all()
            return 'Success'

        except Exception as e:
            x,y = e.args
            return x
项目:cpp-boilerplate    作者:xwvvvvwx    | 项目源码 | 文件源码
def _GetOpener(self):
    """Returns an OpenerDirector that supports cookies and ignores redirects.

    Returns:
      A urllib2.OpenerDirector object.
    """
    opener = urllib2.OpenerDirector()
    opener.add_handler(urllib2.ProxyHandler())
    opener.add_handler(urllib2.UnknownHandler())
    opener.add_handler(urllib2.HTTPHandler())
    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
    opener.add_handler(urllib2.HTTPSHandler())
    opener.add_handler(urllib2.HTTPErrorProcessor())
    if self.save_cookies:
      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
      if os.path.exists(self.cookie_file):
        try:
          self.cookie_jar.load()
          self.authenticated = True
          StatusUpdate("Loaded authentication cookies from %s" %
                       self.cookie_file)
        except (cookielib.LoadError, IOError):
          # Failed to load cookies - just ignore them.
          pass
      else:
        # Create an empty cookie file with mode 600
        fd = os.open(self.cookie_file, os.O_CREAT, 0600)
        os.close(fd)
      # Always chmod the cookie file
      os.chmod(self.cookie_file, 0600)
    else:
      # Don't save cookies across runs of update.py.
      self.cookie_jar = cookielib.CookieJar()
    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
    return opener
项目:cpp-boilerplate    作者:xwvvvvwx    | 项目源码 | 文件源码
def _GetOpener(self):
    """Returns an OpenerDirector that supports cookies and ignores redirects.

    Returns:
      A urllib2.OpenerDirector object.
    """
    opener = urllib2.OpenerDirector()
    opener.add_handler(urllib2.ProxyHandler())
    opener.add_handler(urllib2.UnknownHandler())
    opener.add_handler(urllib2.HTTPHandler())
    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
    opener.add_handler(urllib2.HTTPSHandler())
    opener.add_handler(urllib2.HTTPErrorProcessor())
    if self.save_cookies:
      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
      if os.path.exists(self.cookie_file):
        try:
          self.cookie_jar.load()
          self.authenticated = True
          StatusUpdate("Loaded authentication cookies from %s" %
                       self.cookie_file)
        except (cookielib.LoadError, IOError):
          # Failed to load cookies - just ignore them.
          pass
      else:
        # Create an empty cookie file with mode 600
        fd = os.open(self.cookie_file, os.O_CREAT, 0600)
        os.close(fd)
      # Always chmod the cookie file
      os.chmod(self.cookie_file, 0600)
    else:
      # Don't save cookies across runs of update.py.
      self.cookie_jar = cookielib.CookieJar()
    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
    return opener
项目:Eagle    作者:magerx    | 项目源码 | 文件源码
def _urllib2Opener():
    """
    This function creates the urllib2 OpenerDirector.
    """

    debugMsg = "creating HTTP requests opener object"
    logger.debug(debugMsg)

    handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler, httpsHandler]

    if not conf.dropSetCookie:
        if not conf.loadCookies:
            conf.cj = cookielib.CookieJar()
        else:
            conf.cj = cookielib.MozillaCookieJar()
            resetCookieJar(conf.cj)

        handlers.append(urllib2.HTTPCookieProcessor(conf.cj))

    # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
    if conf.keepAlive:
        warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
        warnMsg += "been disabled because of its incompatibility "

        if conf.proxy:
            warnMsg += "with HTTP(s) proxy"
            logger.warn(warnMsg)
        elif conf.authType:
            warnMsg += "with authentication methods"
            logger.warn(warnMsg)
        else:
            handlers.append(keepAliveHandler)

    opener = urllib2.build_opener(*handlers)
    urllib2.install_opener(opener)
项目:pefile.pypy    作者:cloudtracer    | 项目源码 | 文件源码
def test_missing_value(self):
        from cookielib import MozillaCookieJar, lwp_cookie_str

        # missing = sign in Cookie: header is regarded by Mozilla as a missing
        # name, and by cookielib as a missing value
        filename = test_support.TESTFN
        c = MozillaCookieJar(filename)
        interact_netscape(c, "http://www.acme.com/", 'eggs')
        interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
        cookie = c._cookies["www.acme.com"]["/"]["eggs"]
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, "eggs")
        cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
        self.assertIsNone(cookie.value)
        self.assertEqual(cookie.name, '"spam"')
        self.assertEqual(lwp_cookie_str(cookie), (
            r'"spam"; path="/foo/"; domain="www.acme.com"; '
            'path_spec; discard; version=0'))
        old_str = repr(c)
        c.save(ignore_expires=True, ignore_discard=True)
        try:
            c = MozillaCookieJar(filename)
            c.revert(ignore_expires=True, ignore_discard=True)
        finally:
            os.unlink(c.filename)
        # cookies unchanged apart from lost info re. whether path was specified
        self.assertEqual(
            repr(c),
            re.sub("path_specified=%s" % True, "path_specified=%s" % False,
                   old_str)
            )
        self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"),
                         '"spam"; eggs')
项目:ndk-python    作者:gittor    | 项目源码 | 文件源码
def test_missing_value(self):
        from cookielib import MozillaCookieJar, lwp_cookie_str

        # missing = sign in Cookie: header is regarded by Mozilla as a missing
        # name, and by cookielib as a missing value
        filename = test_support.TESTFN
        c = MozillaCookieJar(filename)
        interact_netscape(c, "http://www.acme.com/", 'eggs')
        interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
        cookie = c._cookies["www.acme.com"]["/"]["eggs"]
        self.assertTrue(cookie.value is None)
        self.assertEqual(cookie.name, "eggs")
        cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
        self.assertTrue(cookie.value is None)
        self.assertEqual(cookie.name, '"spam"')
        self.assertEqual(lwp_cookie_str(cookie), (
            r'"spam"; path="/foo/"; domain="www.acme.com"; '
            'path_spec; discard; version=0'))
        old_str = repr(c)
        c.save(ignore_expires=True, ignore_discard=True)
        try:
            c = MozillaCookieJar(filename)
            c.revert(ignore_expires=True, ignore_discard=True)
        finally:
            os.unlink(c.filename)
        # cookies unchanged apart from lost info re. whether path was specified
        self.assertEqual(
            repr(c),
            re.sub("path_specified=%s" % True, "path_specified=%s" % False,
                   old_str)
            )
        self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"),
                         '"spam"; eggs')
项目:GoodBooks    作者:moverzp    | 项目源码 | 文件源码
def _set_cookie(self, fileName):
        cookie = cookielib.MozillaCookieJar()
        cookie.load(fileName, ignore_discard=True, ignore_expires=True)
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
        urllib2.install_opener(opener)
项目:GoodBooks    作者:moverzp    | 项目源码 | 文件源码
def save_cookie(self, fileName, url): #?????????????
        #????MozillaCookieJar????cookie
        cookie = cookielib.MozillaCookieJar(fileName)
        #??opener
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
        urllib2.install_opener(opener)

        request = urllib2.Request(url)
        request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36')
        response = urllib2.urlopen(request)
        print response.getcode()
        cookie.save(ignore_discard=True, ignore_expires=True)
        print 'Successfully saved'
项目:Chromium_DepotTools    作者:p07r0457    | 项目源码 | 文件源码
def _GetOpener(self):
    """Returns an OpenerDirector that supports cookies and ignores redirects.

    Returns:
      A urllib2.OpenerDirector object.
    """
    opener = urllib2.OpenerDirector()
    opener.add_handler(urllib2.ProxyHandler())
    opener.add_handler(urllib2.UnknownHandler())
    opener.add_handler(urllib2.HTTPHandler())
    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
    opener.add_handler(urllib2.HTTPSHandler())
    opener.add_handler(urllib2.HTTPErrorProcessor())
    if self.save_cookies:
      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
      if os.path.exists(self.cookie_file):
        try:
          self.cookie_jar.load()
          self.authenticated = True
          StatusUpdate("Loaded authentication cookies from %s" %
                       self.cookie_file)
        except (cookielib.LoadError, IOError):
          # Failed to load cookies - just ignore them.
          pass
      else:
        # Create an empty cookie file with mode 600
        fd = os.open(self.cookie_file, os.O_CREAT, 0o600)
        os.close(fd)
      # Always chmod the cookie file
      os.chmod(self.cookie_file, 0o600)
    else:
      # Don't save cookies across runs of update.py.
      self.cookie_jar = cookielib.CookieJar()
    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
    return opener
项目:node-gn    作者:Shouqun    | 项目源码 | 文件源码
def _GetOpener(self):
    """Returns an OpenerDirector that supports cookies and ignores redirects.

    Returns:
      A urllib2.OpenerDirector object.
    """
    opener = urllib2.OpenerDirector()
    opener.add_handler(urllib2.ProxyHandler())
    opener.add_handler(urllib2.UnknownHandler())
    opener.add_handler(urllib2.HTTPHandler())
    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
    opener.add_handler(urllib2.HTTPSHandler())
    opener.add_handler(urllib2.HTTPErrorProcessor())
    if self.save_cookies:
      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
      if os.path.exists(self.cookie_file):
        try:
          self.cookie_jar.load()
          self.authenticated = True
          StatusUpdate("Loaded authentication cookies from %s" %
                       self.cookie_file)
        except (cookielib.LoadError, IOError):
          # Failed to load cookies - just ignore them.
          pass
      else:
        # Create an empty cookie file with mode 600
        fd = os.open(self.cookie_file, os.O_CREAT, 0o600)
        os.close(fd)
      # Always chmod the cookie file
      os.chmod(self.cookie_file, 0o600)
    else:
      # Don't save cookies across runs of update.py.
      self.cookie_jar = cookielib.CookieJar()
    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
    return opener
项目:autoscan    作者:b01u    | 项目源码 | 文件源码
def _urllib2Opener():
    """
    This function creates the urllib2 OpenerDirector.
    """

    debugMsg = "creating HTTP requests opener object"
    logger.debug(debugMsg)

    handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler, httpsHandler]

    if not conf.dropSetCookie:
        if not conf.loadCookies:
            conf.cj = cookielib.CookieJar()
        else:
            conf.cj = cookielib.MozillaCookieJar()
            resetCookieJar(conf.cj)

        handlers.append(urllib2.HTTPCookieProcessor(conf.cj))

    # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
    if conf.keepAlive:
        warnMsg = "persistent HTTP(s) connections, Keep-Alive, has "
        warnMsg += "been disabled because of its incompatibility "

        if conf.proxy:
            warnMsg += "with HTTP(s) proxy"
            logger.warn(warnMsg)
        elif conf.authType:
            warnMsg += "with authentication methods"
            logger.warn(warnMsg)
        else:
            handlers.append(keepAliveHandler)

    opener = urllib2.build_opener(*handlers)
    urllib2.install_opener(opener)
项目:dftimewolf    作者:log2timeline    | 项目源码 | 文件源码
def _GetOpener(self):
    """Returns an OpenerDirector that supports cookies and ignores redirects.

    Returns:
      A urllib2.OpenerDirector object.
    """
    opener = urllib2.OpenerDirector()
    opener.add_handler(urllib2.ProxyHandler())
    opener.add_handler(urllib2.UnknownHandler())
    opener.add_handler(urllib2.HTTPHandler())
    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
    opener.add_handler(urllib2.HTTPSHandler())
    opener.add_handler(urllib2.HTTPErrorProcessor())
    if self.save_cookies:
      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
      if os.path.exists(self.cookie_file):
        try:
          self.cookie_jar.load()
          self.authenticated = True
          StatusUpdate("Loaded authentication cookies from %s" %
                       self.cookie_file)
        except (cookielib.LoadError, IOError):
          # Failed to load cookies - just ignore them.
          pass
      else:
        # Create an empty cookie file with mode 600
        fd = os.open(self.cookie_file, os.O_CREAT, 0600)
        os.close(fd)
      # Always chmod the cookie file
      os.chmod(self.cookie_file, 0600)
    else:
      # Don't save cookies across runs of update.py.
      self.cookie_jar = cookielib.CookieJar()
    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
    return opener
项目:django-learning    作者:adoggie    | 项目源码 | 文件源码
def __init__(self,name=''):
        self.name = name
        self.cookie = cookielib.CookieJar()
        cookie_file = 'cookie.txt'
        # cookie = cookielib.MozillaCookieJar(cookie_file)
        # self.cookie = cookielib.FileCookieJar(cookie_file)
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor( self.cookie))
        self.headers ={"User-agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1"}
        for line in HEADERS.split('\n'):
            if not line.strip(): continue
            k,v = line.split(':')
            self.headers[k] = v

        self.method = 'GET'
项目:django-learning    作者:adoggie    | 项目源码 | 文件源码
def __init__(self,name=''):
        self.name = name
        self.cookie = cookielib.CookieJar()
        cookie_file = 'cookie.txt'
        # cookie = cookielib.MozillaCookieJar(cookie_file)
        # self.cookie = cookielib.FileCookieJar(cookie_file)
        self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor( self.cookie))
        self.headers ={"User-agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1"}
        for line in HEADERS.split('\n'):
            if not line.strip(): continue
            k,v = line.split(':')
            self.headers[k] = v

        self.method = 'GET'
项目:shoes    作者:colakang    | 项目源码 | 文件源码
def saveCookies (self,uName,uPass):
        cookiefile = "./log/"+uName+"_cookies.txt" 
        self.username = uName
        self.password = uPass
        self.rememberMe = "false"
        self.url = "https://www.nike.com/profile/login?Content-Locale=en_US"
        self.request_body = urllib.urlencode({
            'login':self.username,
            'rememberMe':self.rememberMe,
            'password':self.password
            })
        self.hdr = {'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36'}
        #self.cookie = cookielib.CookieJar()
        self.cookie = cookielib.MozillaCookieJar(cookiefile)
        self.cookie_support = urllib2.HTTPCookieProcessor(self.cookie)
        #opener = urllib2.build_opener(self.proxy_support,self.cookie_support,urllib2.HTTPHandler)
        opener = urllib2.build_opener(self.cookie_support)
        urllib2.install_opener(opener)
        req = urllib2.Request(
            self.url,
            self.request_body,
            #self.hdr
            )
        self.result = opener.open(req).read()
        file_object = open('./log/'+self.username+'_login.txt', 'w')
        file_object.write(self.result)
        file_object.close( )
        #print (self.cookie)
        cs = self.parse("NIKE_COMMERCE_COUNTRY=US; NIKE_COMMERCE_LANG_LOCALE=en_US; mt.m=%7B%22membership%22%3A%5B%22us_aa-el1-ae%22%5D%7D; CONSUMERCHOICE_SESSION=t; CONSUMERCHOICE=us/en_us; nike_locale=us/en_us; cookies.js=1;",".nike.com")
        for c in cs:
            self.cookie.set_cookie(c)
        req_test = urllib2.Request('https://secure-store.nike.com/us/checkout/html/cart.jsp')
        req_test.add_header('Referer', 'http://store.nike.com/us/en_us/?ipp=120')
        #req_test.add_header('User-agent', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36')
        req2 = opener.open(req_test)
        #file_object = open(self.username+'_cart.txt', 'w')
        #file_object.write(req2.read())
        #file_object.close( )
        #print(self.cookie)
        self.cookie.save(ignore_discard=True, ignore_expires=True)