我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用cookielib.MozillaCookieJar()。
def __init__(self): self.articles = [] self.query = None self.cjar = MozillaCookieJar() # If we have a cookie file, load it: if ScholarConf.COOKIE_JAR_FILE and \ os.path.exists(ScholarConf.COOKIE_JAR_FILE): try: self.cjar.load(ScholarConf.COOKIE_JAR_FILE, ignore_discard=True) ScholarUtils.log('info', 'loaded cookies file') except Exception as msg: ScholarUtils.log('warn', 'could not load cookies file: %s' % msg) self.cjar = MozillaCookieJar() # Just to be safe self.opener = build_opener(HTTPCookieProcessor(self.cjar)) self.settings = None # Last settings object, if any
def urlopen_test(host): headers = [('Host',host), ('Connection', 'keep-alive'), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'), ('User-Agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0'), #('Accept-Encoding','gzip,deflate'), ('Accept-Language', 'en-US,en;q=0.5')] #????MozillaCookieJar???????cookie cookie=cookielib.MozillaCookieJar() handler=urllib2.HTTPCookieProcessor(cookie) req=urllib2.Request(u'https://'+host) first_opener = urllib2.build_opener(handler) first_opener.addheaders = headers try: result=first_opener.open(req,timeout=60) #60s?? if result.read()!=None: return True except Exception,e: print e return False
def test_bad_magic(self): from cookielib import LWPCookieJar, MozillaCookieJar, LoadError # IOErrors (eg. file doesn't exist) are allowed to propagate filename = test_support.TESTFN for cookiejar_class in LWPCookieJar, MozillaCookieJar: c = cookiejar_class() try: c.load(filename="for this test to work, a file with this " "filename should not exist") except IOError, exc: # exactly IOError, not LoadError self.assertEqual(exc.__class__, IOError) else: self.fail("expected IOError for invalid filename") # Invalid contents of cookies file (eg. bad magic string) # causes a LoadError. try: f = open(filename, "w") f.write("oops\n") for cookiejar_class in LWPCookieJar, MozillaCookieJar: c = cookiejar_class() self.assertRaises(LoadError, c.load, filename) finally: try: os.unlink(filename) except OSError: pass
def get_following(user_id): url = 'https://medium.com/_/api/users/' + user_id + '/following' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() following = re.findall('"username":"(.*?)","createdAt"', data) following_set = set(following) to = re.findall('"to":"(.*?)"}}},"v"', data) while to: url = 'https://medium.com/_/api/users/' + user_id + '/following?to=' + to[0] cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() following = re.findall('"username":"(.*?)","createdAt"', data) following_set.update(following) to = re.findall('"to":"(.*?)"}}},"v"', data) return list(following_set)
def get_followers(user_id): url = 'https://medium.com/_/api/users/' + user_id + '/followers' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() followers = re.findall('"username":"(.*?)","createdAt"', data) followers_set = set(followers) to = re.findall('"to":"(.*?)"}}},"v"', data) while to: url = 'https://medium.com/_/api/users/' + user_id + '/followers?to=' + to[0] cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() followers = re.findall('"username":"(.*?)","createdAt"', data) followers_set.update(followers) to = re.findall('"to":"(.*?)"}}},"v"', data) return list(followers_set)
def get_latest(user_id): url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=latest' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() latest = re.findall('"postId":"(.*?)"},"randomId"', data) latest_set = set(latest) to = re.findall('"to":"(.*?)","source":"latest"', data) while to: url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=latest&to=' + to[0] cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() latest = re.findall('"postId":"(.*?)"},"randomId"', data) latest_set.update(latest) to = re.findall('"to":"(.*?)","source":"latest"', data) return list(latest_set)
def get_recommends(user_id): url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=has-recommended' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() recommends = re.findall('w":{"postId":"(.*?)"},"randomId"', data) recommends_set = set(recommends) to = re.findall('"to":"(.*?)","source":"has-recommended"', data) while to: url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=has-recommended&to=' + to[0] cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() recommends = re.findall('w":{"postId":"(.*?)"},"randomId"', data) recommends_set.update(recommends) to = re.findall('"to":"(.*?)","source":"has-recommended"', data) return list(recommends_set)
def get_highlights(user_id): url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=quotes' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() highlights = re.findall('","postId":"(.*?)","userId":"', data) highlights_set = set(highlights) to = re.findall('"to":"(.*?)","source":"quotes"', data) while to: url = 'https://medium.com/_/api/users/' + user_id + '/profile/stream?source=quotes&to=' + to[0] cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() highlights = re.findall('","postId":"(.*?)","userId":"', data) highlights_set.update(highlights) to = re.findall('"to":"(.*?)","source":"quotes"', data) return list(highlights_set)
def get_twitter_profile(username, twitter_id): url = "https://twitter.com/" + str(twitter_id) + "?lang=en" cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() profile_data = re.findall('class="json-data" value="(.*?)">', data) profile = json.loads(profile_data[0].replace('"', '"')) profile.pop("promptbirdData", None) profile.pop("wtfOptions", None) profile.pop("typeaheadData", None) profile.pop("dm", None) profile.pop("initialState", None) profile.pop("activeHashflags", None) profile.pop("keyboardShortcuts", None) profile.pop("deciders", None) out = codecs.open("./Twitter/%s_t.json" % username, 'w', 'utf-8') out.write(json.dumps(profile, indent=4)) out.close()
def __init__(self): self.articles = [] self.query = None self.cjar = MozillaCookieJar() # If we have a cookie file, load it: if ScholarConf.COOKIE_JAR_FILE and \ os.path.exists(ScholarConf.COOKIE_JAR_FILE): try: self.cjar.load(ScholarConf.COOKIE_JAR_FILE, ignore_discard=True) print "Using cookie file" ScholarUtils.log('info', 'loaded cookies file') except Exception as msg: print "Ignoring cookie file: %s" % msg ScholarUtils.log('warn', 'could not load cookies file: %s' % msg) self.cjar = MozillaCookieJar() # Just to be safe self.opener = build_opener(HTTPCookieProcessor(self.cjar)) self.settings = None # Last settings object, if any
def get_name(uid): url = 'https://www.airbnb.com/users/show/' + uid + '?locale=en' cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') try: response = opener.open(req, timeout=10) except Exception as inst: print type(inst) print inst.args print inst print('-----fail to get name data') return '' data = response.read() name = re.findall('Hey, I’m (.*?)!', data) if len(name): return name[0] else: return ''
def clearCookie(self, cookiefile, leaveNames=[], removeNames=None, ignore_discard = True): try: toRemove = [] if not self.useMozillaCookieJar: cj = cookielib.LWPCookieJar() else: cj = cookielib.MozillaCookieJar() cj.load(cookiefile, ignore_discard = ignore_discard) for cookie in cj: if cookie.name not in leaveNames and (None == removeNames or cookie.name in removeNames): toRemove.append(cookie) for cookie in toRemove: cj.clear(cookie.domain, cookie.path, cookie.name) cj.save(cookiefile, ignore_discard = ignore_discard) except Exception: printExc() return False return True
def __init__(self): self.br = mechanize.Browser() #self.cj = cookielib.LWPCookieJar() self.cj = cookielib.MozillaCookieJar() self.br.set_cookiejar(self.cj) self.br.set_handle_equiv(True) self.br.set_handle_referer(True) self.br.set_handle_robots(False) self.br.addheaders = [('User-agent', 'Firefox')] self.item_url = 'http://shop.bdgastore.com/collections/footwear/products/y-3-pureboost-zg' # Create variables for user credentials and a function to import them
def login(form_data): url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)' headers = ('User-Agent', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0') cookie = cookielib.MozillaCookieJar(cookie_file) handler = urllib2.HTTPCookieProcessor(cookie) opener = urllib2.build_opener(handler) opener.addheaders.append(headers) req = opener.open(url, form_data) redirect_result = req.read() login_pattern = r'location.replace\(\'(.*?)\'\)' login_url = re.search(login_pattern, redirect_result).group(1) opener.open(login_url).read() cookie.save(cookie_file, ignore_discard=True, ignore_expires=True)
def request_image_url(image_path): cookie = cookielib.MozillaCookieJar() cookie.load(cookie_file, ignore_expires=False, ignore_discard=True) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) image_url = 'http://picupload.service.weibo.com/interface/pic_upload.php?mime=image%2Fjpeg&data=base64&url=0&markpos=1&logo=&nick=0&marks=1&app=miniblog' b = base64.b64encode(file(image_path).read()) data = urllib.urlencode({'b64_data': b}) result = opener.open(image_url, data).read() result = re.sub(r"<meta.*</script>", "", result, flags=re.S) image_result = json.loads(result) image_id = image_result.get('data').get('pics').get('pic_1').get('pid') return 'https://ws3.sinaimg.cn/large/%s.jpg' % image_id
def login(self,login_url="http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)"): j_data = self.get_sso() postdata = { 'entry': "weibo", 'gateway': '1', 'from': '', 'savestate': '7', 'userticket': '1', 'pagerefer': "", 'vsnf': '1', 'su': self.get_user(), 'service': 'miniblog', 'servertime': j_data.get("servertime"), 'nonce': j_data.get("nonce"), 'pwencode': 'rsa2', 'rsakv': j_data.get("rsakv"), 'sp': self.get_passwd(j_data.get("pubkey"), j_data.get("servertime"), j_data.get("nonce")), 'sr': "1440*900", 'encoding': 'UTF-8', 'prelt': '503', 'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack', 'returntype': 'META' } cookie=cookielib.MozillaCookieJar("Cookie.txt") opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) result=opener.open(login_url,urllib.urlencode(postdata)) cookie.save(ignore_discard=True, ignore_expires=True) with open(os.path.join(os.path.dirname(__file__),"test.html"),"wb") as f: f.write(result.read()) print u"????"
def login(self, login_url="http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)"): j_data = self.get_sso() postdata = { 'entry': "weibo", 'gateway': '1', 'from': '', 'savestate': '7', 'userticket': '1', 'pagerefer': "", 'vsnf': '1', 'su': self.get_user(), 'service': 'miniblog', 'servertime': j_data.get("servertime"), 'nonce': j_data.get("nonce"), 'pwencode': 'rsa2', 'rsakv': j_data.get("rsakv"), 'sp': self.get_passwd(j_data.get("pubkey"), j_data.get("servertime"), j_data.get("nonce")), 'sr': "1440*900", 'encoding': 'UTF-8', 'prelt': '503', 'url': 'http://weibo.com/ajaxlogin.php?framelogin=1&callback=parent.sinaSSOController.feedBackUrlCallBack', 'returntype': 'META' } cookie = cookielib.MozillaCookieJar("Cookie.txt") opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) result = opener.open(login_url, urllib.urlencode(postdata)) cookie.save(ignore_discard=True, ignore_expires=True) html = opener.open( "http://weibo.com/p/1005055887581312").read() with open(os.path.join(os.path.dirname(__file__), "text.html"), "wb") as f: f.write(html) print "????"
def test_missing_value(self): from cookielib import MozillaCookieJar, lwp_cookie_str # missing = sign in Cookie: header is regarded by Mozilla as a missing # name, and by cookielib as a missing value filename = test_support.TESTFN c = MozillaCookieJar(filename) interact_netscape(c, "http://www.acme.com/", 'eggs') interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/') cookie = c._cookies["www.acme.com"]["/"]["eggs"] self.assertIsNone(cookie.value) self.assertEqual(cookie.name, "eggs") cookie = c._cookies["www.acme.com"]['/foo/']['"spam"'] self.assertIsNone(cookie.value) self.assertEqual(cookie.name, '"spam"') self.assertEqual(lwp_cookie_str(cookie), ( r'"spam"; path="/foo/"; domain="www.acme.com"; ' 'path_spec; discard; version=0')) old_str = repr(c) c.save(ignore_expires=True, ignore_discard=True) try: c = MozillaCookieJar(filename) c.revert(ignore_expires=True, ignore_discard=True) finally: os.unlink(c.filename) # cookies unchanged apart from lost info re. whether path was specified self.assertEqual( repr(c), re.sub("path_specified=%s" % True, "path_specified=%s" % False, old_str) ) self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"), '"spam"; eggs')
def get_top_stories(): current_date = START_DATE while current_date <= END_DATE: top_stories = TopStories() date_string = current_date.strftime("%B-%d-%Y").lower() url = "https://medium.com/browse/top/" + date_string top_stories.data['date'] = current_date.isoformat() top_stories.data['url'] = url cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) req = urllib2.Request(url) req.add_header("User-agent", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) \ Chrome/50.0.2661.102 Safari/537.36') response = opener.open(req, timeout=10) data = response.read() stories = [] story_url = re.findall('<a class="link link--darken" href="(.*?)\?source=top_stories---------[0-9]*-" data-action="open-post"', data) num = len(story_url) for i in range(num): story_data = get_story(story_url[i]).data if story_data['success']: stories.append(story_data) print(i) top_stories.data['stories'] = stories out = codecs.open("./TopStories/%s.json" % current_date.isoformat(), 'w', 'utf-8') out.write(top_stories.getstr()) out.close() print("-----%s obtained" % current_date.isoformat()) current_date = current_date + datetime.timedelta(days=1)
def baiduInitialization(filename): global _cj _cj = cookielib.MozillaCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(_cj)) urllib2.install_opener(opener) if os.path.exists(filename): _cj.load(filename, True)
def load_cookie(self, filename): try: load_cookiejar = cookielib.MozillaCookieJar() try: load_cookiejar.load(self.cookie_dir+'/'+filename, ignore_discard=True) except: pass self.sess.cookies = load_cookiejar return True except Exception, e: logging.error('Exp {0} : {1}'.format(FuncName(), e)) return False
def saveCookies(self, cookiejar): MozillaCookieJar = cookielib.MozillaCookieJar() for c in cookiejar: args = dict(vars(c).items()) args['rest'] = args['_rest'] del args['_rest'] c = cookielib.Cookie(**args) MozillaCookieJar.set_cookie(c) MozillaCookieJar.save('youdaoCookies', ignore_discard=True)
def loadCookies(self): if os.path.exists('youdaoCookies'): self.window.debug.appendPlainText('625: Cookie exists!') MozillaCookieJar = cookielib.MozillaCookieJar() MozillaCookieJar.load('youdaoCookies', ignore_discard=True) return MozillaCookieJar else: return False
def __init__(self): self.cookie = cookielib.MozillaCookieJar("") self.header = [( 'User-agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14' ), ('DNT', '1'), ('Accept', '*/*'), ('X-Requested-With', 'XMLHttpRequest')] handler = urllib2.HTTPCookieProcessor(self.cookie) self.opener = urllib2.build_opener(handler) self.opener.addheaders = self.header
def set_cookie_jar(self, cookie_jar_path): self.cookie = cookielib.MozillaCookieJar() self.cookie.load(cookie_jar_path, ignore_discard=True, ignore_expires=True)
def __init__(self, login, password): """ Start up... """ self.login = login self.password = password # Simulate browser with cookies enabled self.cj = cookielib.MozillaCookieJar(cookie_filename) ''' Creating settings for the proxy ''' # proxy_handler = urllib2.ProxyHandler({'http':'209.222.25.83:3128'}) # 216.58.194.113 # proxy_handler = urllib2.ProxyHandler({'http':'8.8.8.8'}) proxy_handler = urllib2.ProxyHandler({'http':'notional-sign-110911.appspot.com'}) # proxy_auth_handler = urllib2.ProxyBasicAuthHandler() if os.access(cookie_filename, os.F_OK): self.cj.load() self.opener = urllib2.build_opener( urllib2.HTTPRedirectHandler(), urllib2.HTTPHandler(debuglevel=0), urllib2.HTTPSHandler(debuglevel=0), proxy_handler, urllib2.HTTPCookieProcessor(self.cj) ) self.opener.addheaders = [ ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; ' 'Windows NT 5.2; .NET CLR 1.1.4322)')) ]
def performFullSearch(self, searchParams, dbHost, dbPort, dbName): """ Performs search and Saves the information gathered into DB. This method almost performs everything this class is created for """ print "inside Perform Search ... " try: #self.login = login #self.password = password # Simulate browser with cookies enabled self.cj = cookielib.MozillaCookieJar(cookie_filename) if os.access(cookie_filename, os.F_OK): self.cj.load() self.opener = urllib2.build_opener( urllib2.HTTPRedirectHandler(), urllib2.HTTPHandler(debuglevel=0), urllib2.HTTPSHandler(debuglevel=0), urllib2.HTTPCookieProcessor(self.cj) ) self.opener.addheaders = [ ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; ' 'Windows NT 5.2; .NET CLR 1.1.4322)')) ] self.checkLogin(url1) fName = searchParams['firstName'] mailId = searchParams['email'] if fName == 'EMPTY' or mailId == 'EMPTY': raise Exception('Info: Search has to be performed from Search page only, Please try again', 'Info') fSrchURL = self.formSearchURL(searchParams) linkedJSON = self.loadSearch(fSrchURL, fName) recordJSON = self.formTrimmedJSON(linkedJSON) dbRecord = self.formDBRecord(recordJSON, mailId) client = self.connect2DB(dbHost, dbPort) print "Client details : "+client.__str__() self.store2DB(dbRecord, mailId, client) return 'Success' except Exception as e: x,y = e.args return x
def filterResult(self, filterParams, dbHost, dbPort, dbName): """Performs a filter based on the filter parameters """ print "Inside Filter Result view ..." try: self.cj = cookielib.MozillaCookieJar(cookie_filename) if os.access(cookie_filename, os.F_OK): self.cj.load() self.opener = urllib2.build_opener( urllib2.HTTPRedirectHandler(), urllib2.HTTPHandler(debuglevel=0), urllib2.HTTPSHandler(debuglevel=0), urllib2.HTTPCookieProcessor(self.cj) ) self.opener.addheaders = [ ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; ' 'Windows NT 5.2; .NET CLR 1.1.4322)')) ] self.checkLogin(url1) ## start here ## print " Data So Far : \n"+Person.objects.all() return 'Success' except Exception as e: x,y = e.args return x
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener
def _urllib2Opener(): """ This function creates the urllib2 OpenerDirector. """ debugMsg = "creating HTTP requests opener object" logger.debug(debugMsg) handlers = [proxyHandler, authHandler, redirectHandler, rangeHandler, httpsHandler] if not conf.dropSetCookie: if not conf.loadCookies: conf.cj = cookielib.CookieJar() else: conf.cj = cookielib.MozillaCookieJar() resetCookieJar(conf.cj) handlers.append(urllib2.HTTPCookieProcessor(conf.cj)) # Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html if conf.keepAlive: warnMsg = "persistent HTTP(s) connections, Keep-Alive, has " warnMsg += "been disabled because of its incompatibility " if conf.proxy: warnMsg += "with HTTP(s) proxy" logger.warn(warnMsg) elif conf.authType: warnMsg += "with authentication methods" logger.warn(warnMsg) else: handlers.append(keepAliveHandler) opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener)
def test_missing_value(self): from cookielib import MozillaCookieJar, lwp_cookie_str # missing = sign in Cookie: header is regarded by Mozilla as a missing # name, and by cookielib as a missing value filename = test_support.TESTFN c = MozillaCookieJar(filename) interact_netscape(c, "http://www.acme.com/", 'eggs') interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/') cookie = c._cookies["www.acme.com"]["/"]["eggs"] self.assertTrue(cookie.value is None) self.assertEqual(cookie.name, "eggs") cookie = c._cookies["www.acme.com"]['/foo/']['"spam"'] self.assertTrue(cookie.value is None) self.assertEqual(cookie.name, '"spam"') self.assertEqual(lwp_cookie_str(cookie), ( r'"spam"; path="/foo/"; domain="www.acme.com"; ' 'path_spec; discard; version=0')) old_str = repr(c) c.save(ignore_expires=True, ignore_discard=True) try: c = MozillaCookieJar(filename) c.revert(ignore_expires=True, ignore_discard=True) finally: os.unlink(c.filename) # cookies unchanged apart from lost info re. whether path was specified self.assertEqual( repr(c), re.sub("path_specified=%s" % True, "path_specified=%s" % False, old_str) ) self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"), '"spam"; eggs')
def _set_cookie(self, fileName): cookie = cookielib.MozillaCookieJar() cookie.load(fileName, ignore_discard=True, ignore_expires=True) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) urllib2.install_opener(opener)
def save_cookie(self, fileName, url): #????????????? #????MozillaCookieJar????cookie cookie = cookielib.MozillaCookieJar(fileName) #??opener opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) urllib2.install_opener(opener) request = urllib2.Request(url) request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36') response = urllib2.urlopen(request) print response.getcode() cookie.save(ignore_discard=True, ignore_expires=True) print 'Successfully saved'
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0o600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0o600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener
def __init__(self,name=''): self.name = name self.cookie = cookielib.CookieJar() cookie_file = 'cookie.txt' # cookie = cookielib.MozillaCookieJar(cookie_file) # self.cookie = cookielib.FileCookieJar(cookie_file) self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor( self.cookie)) self.headers ={"User-agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1"} for line in HEADERS.split('\n'): if not line.strip(): continue k,v = line.split(':') self.headers[k] = v self.method = 'GET'
def saveCookies (self,uName,uPass): cookiefile = "./log/"+uName+"_cookies.txt" self.username = uName self.password = uPass self.rememberMe = "false" self.url = "https://www.nike.com/profile/login?Content-Locale=en_US" self.request_body = urllib.urlencode({ 'login':self.username, 'rememberMe':self.rememberMe, 'password':self.password }) self.hdr = {'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36'} #self.cookie = cookielib.CookieJar() self.cookie = cookielib.MozillaCookieJar(cookiefile) self.cookie_support = urllib2.HTTPCookieProcessor(self.cookie) #opener = urllib2.build_opener(self.proxy_support,self.cookie_support,urllib2.HTTPHandler) opener = urllib2.build_opener(self.cookie_support) urllib2.install_opener(opener) req = urllib2.Request( self.url, self.request_body, #self.hdr ) self.result = opener.open(req).read() file_object = open('./log/'+self.username+'_login.txt', 'w') file_object.write(self.result) file_object.close( ) #print (self.cookie) cs = self.parse("NIKE_COMMERCE_COUNTRY=US; NIKE_COMMERCE_LANG_LOCALE=en_US; mt.m=%7B%22membership%22%3A%5B%22us_aa-el1-ae%22%5D%7D; CONSUMERCHOICE_SESSION=t; CONSUMERCHOICE=us/en_us; nike_locale=us/en_us; cookies.js=1;",".nike.com") for c in cs: self.cookie.set_cookie(c) req_test = urllib2.Request('https://secure-store.nike.com/us/checkout/html/cart.jsp') req_test.add_header('Referer', 'http://store.nike.com/us/en_us/?ipp=120') #req_test.add_header('User-agent', 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36') req2 = opener.open(req_test) #file_object = open(self.username+'_cart.txt', 'w') #file_object.write(req2.read()) #file_object.close( ) #print(self.cookie) self.cookie.save(ignore_discard=True, ignore_expires=True)