我们从Python开源项目中,提取了以下39个代码示例,用于说明如何使用cPickle.UnpicklingError()。
def read_snapshots(filename): """Sequentially reads the sets of signatures from a file. For each set of signatures, a GCSnapshot is created with the stored name. return the dict set name -> GCSnapshot object""" result = dict() f = open(filename, 'r') while 1: try: snap = GCSnapshot(f) result[snap.name] = snap except (EOFError, pickle.UnpicklingError): break f.close() return result #### BEGIN: ONLY FOR THE TESTS
def secure_loads(data, encryption_key, hash_key=None, compression_level=None): if not ':' in data: return None if not hash_key: hash_key = hashlib.sha1(encryption_key).hexdigest() signature, encrypted_data = data.split(':', 1) actual_signature = hmac.new(hash_key, encrypted_data).hexdigest() if not compare(signature, actual_signature): return None key = pad(encryption_key[:32]) encrypted_data = base64.urlsafe_b64decode(encrypted_data) IV, encrypted_data = encrypted_data[:16], encrypted_data[16:] cipher, _ = AES_new(key, IV=IV) try: data = cipher.decrypt(encrypted_data) data = data.rstrip(' ') if compression_level: data = zlib.decompress(data) return pickle.loads(data) except (TypeError, pickle.UnpicklingError): return None ### compute constant CTOKENS
def load_models(models_dir): """ Load saved models from disk. This will attempt to unpickle all files in a directory; any files that give errors on unpickling (such as README.txt) will be skipped. Inputs: - models_dir: String giving the path to a directory containing model files. Each model file is a pickled dictionary with a 'model' field. Returns: A dictionary mapping model file names to models. """ models = {} for model_file in os.listdir(models_dir): with open(os.path.join(models_dir, model_file), 'rb') as f: try: models[model_file] = pickle.load(f)['model'] except pickle.UnpicklingError: continue return models
def waitForNextObs(obsNum, statusUrl, sessionId, maxWaitTime, checkInterval = 60, isgleam = False): max_time = 0 while (max_time <= maxWaitTime): time.sleep(checkInterval) max_time += checkInterval try: strRes = urllib2.urlopen(statusUrl + sessionId).read() myRes = pickle.loads(strRes) if (0 == myRes.number_files_to_be_delivered): # modify database markObsDeliveredStatus(obsNum, isGleam = isgleam) break elif (myRes.errorcode): markObsDeliveredStatus(obsNum, -1, isGleam = isgleam) break except (UnpicklingError, socket.timeout) as uerr: logger.error("Something wrong while getting status for obsNum %s, %s" % (obsNum, str(uerr))) continue
def load_link_list(self): try: input = open('link_list.dat', 'rb') data = cPickle.load(input) disc, bkmark = data except IOError: log.msg("No link_list.dat found.") except ValueError: log.msg("File link_list.dat incomplete.") except cPickle.UnpicklingError: log.msg("File link_list.dat bad or corrupt.") else: for key, value in disc.iteritems(): self.add_link_list(value, key) for key, value in bkmark.iteritems(): self.add_bookmark(key)
def __init__(self, index): """Initialize the TokI object from a MongoDB or load from disk.""" self.index = index if pymongo: if 'toki' in self.index.mongo_db.collection_names(): self.mongo_toki = self.index.mongo_db['toki'] if self.mongo_toki.count() == 0: raise IndexLoadError else: raise IndexLoadError else: # Load into memory (not suitable for large corpora!) try: with open(self.index.base_fname + '.toki', mode='rb') as f: self.toki = pickle.load(f) if not self.toki: raise IndexLoadError except (IOError, pickle.UnpicklingError): raise IndexLoadError
def __init__(self, log, journaledService, path, loadedCallback): self.path = path if os.path.exists(path): try: self.lastSync, obj = pickle.load(open(path, "rb")) except (IOError, OSError, pickle.UnpicklingError): self.lastSync, obj = 0, None loadedCallback(obj) else: self.lastSync = 0 loadedCallback(None) Journal.__init__(self, log, journaledService)
def get(self, *args, **kw): # We do it with *args and **kw so if the default value wasn't # given nothing is passed to the extension module. That way # an exception can be raised if set_get_returns_none is turned # off. data = self.db.get(*args, **kw) try: return cPickle.loads(data) except (EOFError, TypeError, cPickle.UnpicklingError): return data # we may be getting the default value, or None, # so it doesn't need unpickled.
def load_object(file_path): file_path = os.path.expanduser(file_path) # reading to string and loads is 2.5x faster that using the file handle and load. with open(file_path, 'rb') as fh: data = fh.read() try: return pickle.loads(data, encoding='bytes') except pickle.UnpicklingError as e: raise ValueError from e
def get(self, timeout=None): """Receive, decode and return data from the pipe. Block gevent-cooperatively until data is available or timeout expires. The default decoder is ``pickle.loads``. :arg timeout: ``None`` (default) or a ``gevent.Timeout`` instance. The timeout must be started to take effect and is canceled when the first byte of a new message arrives (i.e. providing a timeout does not guarantee that the method completes within the timeout interval). :returns: a Python object. Raises: - :exc:`gevent.Timeout` (if provided) - :exc:`GIPCError` - :exc:`GIPCClosed` - :exc:`pickle.UnpicklingError` Recommended usage for silent timeout control:: with gevent.Timeout(TIME_SECONDS, False) as t: reader.get(timeout=t) .. warning:: The timeout control is currently not available on Windows, because Windows can't apply select() to pipe handles. An ``OSError`` is expected to be raised in case you set a timeout. """ self._validate() with self._lock: if timeout: # Wait for ready-to-read event. h = gevent.get_hub() h.wait(h.loop.io(self._fd, 1)) timeout.cancel() msize, = struct.unpack("!i", self._recv_in_buffer(4).getvalue()) bindata = self._recv_in_buffer(msize).getvalue() return self._decoder(bindata)
def getMessage(self): if len(self.__readBuffer) < 4: return None l = struct.unpack('i', self.__readBuffer[:4])[0] if len(self.__readBuffer) - 4 < l: return None data = self.__readBuffer[4:4 + l] try: message = cPickle.loads(zlib.decompress(data)) except (zlib.error, cPickle.UnpicklingError): self.__disconnected = True return None self.__readBuffer = self.__readBuffer[4 + l:] return message
def load_status(self): """Read the persistent state file and load the state it contains.""" try: status_format_version, status = self._load_status() if (status_format_version != self.status_format_version or status['comp_vn'] != self.competition.status_format_version): raise StandardError self.void_game_count = status['void_game_count'] self.games_in_progress = {} self.games_to_replay = {} competition_status = status['comp'] except pickle.UnpicklingError: raise RingmasterError("corrupt status file") except EnvironmentError, e: raise RingmasterError("error loading status file:\n%s" % e) except KeyError, e: raise RingmasterError("incompatible status file: missing %s" % e) except Exception, e: # Probably an exception from __setstate__ somewhere raise RingmasterError("incompatible status file") try: self.competition.set_status(competition_status) except CompetitionError, e: raise RingmasterError("error loading competition state: %s" % e) except KeyError, e: raise RingmasterError( "error loading competition state: missing %s" % e) except Exception, e: raise RingmasterError("error loading competition state:\n%s" % compact_tracebacks.format_traceback(skip=1)) self.status_is_loaded = True
def pollmessage(self, wait): packet = self.pollpacket(wait) if packet is None: return None try: message = pickle.loads(packet) except pickle.UnpicklingError: print >>sys.__stderr__, "-----------------------" print >>sys.__stderr__, "cannot unpickle packet:", repr(packet) traceback.print_stack(file=sys.__stderr__) print >>sys.__stderr__, "-----------------------" raise return message
def unpickle(pickled_string): """Unpickles a string, but raises a unified UnpickleError in case anything fails. This is a helper method to not have to deal with the fact that `loads()` potentially raises many types of exceptions (e.g. AttributeError, IndexError, TypeError, KeyError, etc.) """ try: obj = loads(pickled_string) except (StandardError, UnpicklingError): raise UnpickleError('Could not unpickle.', pickled_string) return obj
def test_bad_input(self): # Test issue4298 s = '\x58\0\0\0\x54' self.assertRaises(EOFError, self.module.loads, s) # Test issue7455 s = '0' # XXX Why doesn't pickle raise UnpicklingError? self.assertRaises((IndexError, cPickle.UnpicklingError), self.module.loads, s)
def _restore_drover(workdir): """Restores a saved drover state contained within a workdir. Args: workdir: A string containing the path to the workdir used by drover. """ try: with open(os.path.join(workdir, '.git', 'drover'), 'rb') as f: drover = cPickle.load(f) drover._process_options() return drover except (IOError, cPickle.UnpicklingError): raise Error('%r is not git drover workdir' % workdir)
def _load_binstring(self): len, = unpack('<i', self.read(4)) if len < 0: raise pickle.UnpicklingError("BINSTRING pickle has negative byte count") data = self.read(len) try: data = str(data, self.encoding, self.errors) except: pass self.append(data)
def init_from_string(self, s): """Initialize values from string. Return 0 if problem.""" try: val_dict = cPickle.loads(s) except cPickle.UnpicklingError: return 0 try: self.index = val_dict['index'] self.testfile_type = val_dict['testfile_type'] self.testfile_option = val_dict['testfile_option'] self.temp_index = val_dict['temp_index'] except TypeError, KeyError: return 0 return 1
def load_tokc(self): try: with open(self.base_fname + '.tokc', mode='rb') as f: self.tokc = pickle.load(f) if not self.tokc: raise IndexLoadError except (IOError, pickle.UnpicklingError): raise IndexLoadError
def findGlobal(self, module, klass): """Find class name.""" if (module, klass) not in self.allowedGlobals(): raise UnpicklingError("For security reasons, you can\'t unpickle" " objects from module %s with type %s." % (module, klass)) g = {} exec 'from %s import %s as theClass' % (module, klass) in g return g['theClass']
def decode(self, data): try: return cPickle.loads(data) except cPickle.UnpicklingError: raise CodingError()
def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError("Could not find Node class for %s" % e)
def load(pickle_file): """output: is_exist, value""" try: pickle_fd = open(pickle_file, "r") except IOError as err: if errno.ENOENT == err.errno: debug("cache file does not exist: %s" % pickle_file) return False, None assert False try: value = cPickle.load(pickle_fd) return True, value except (ValueError, UnpicklingError, EOFError): error("cannot read pickle file: %s, suggest re-fetch the pickle file" % pickle_file) assert False
def main(): opts = parseOptions() if (not opts): exit(1) #pushUrl = opts.push_url obsList = opts.obs_list.split(',') host = opts.push_host port = int(opts.port) client = ngamsPClient.ngamsPClient(host, port, timeOut = NGAMS_SOCK_TIMEOUT_DEF) toUrl = getPushURL("%s:%d" % (host, port), gateway = proxy_archive) stageUrl = 'http://%s/ASYNCLISTRETRIEVE' % opts.data_mover for obsNum in obsList: print "Checking observation: %s" % obsNum files = getFileIdsByObsNum(obsNum) deliverFileIds = [] for fileId in files: # first check if MIT has it or not if (not hasMITGotIt(client, fileId)): deliverFileIds.append(fileId) """ fileName = getFileFullPath(fileId) if (not os.path.exists(fileName)): print "\tFile %s does not exist" % fileName continue onTape = ngamsMWACortexTapeApi.isFileOnTape(fileName) if (1 == onTape): stageFile(fileName) print "\tPushing file %s to MIT" % fileId archiveFile(fileName, client) """ else: print "\tFile %s is already at MIT. Skip it." % fileId myReq = AsyncListRetrieveRequest(deliverFileIds, toUrl) strReq = pickle.dumps(myReq) try: print "Sending async retrieve request to the data mover %s" % opts.data_mover request = urllib2.Request(stageUrl) base64string = base64.encodestring('ngasmgr:ngas$dba').replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) strRes = urllib2.urlopen(request, data = strReq, timeout = NGAMS_SOCK_TIMEOUT_DEF).read() myRes = pickle.loads(strRes) #strRes = urllib2.urlopen(stageUrl, data = strReq, timeout = NGAMS_SOCK_TIMEOUT_DEF).read() #myRes = pickle.loads(strRes) if (myRes): print myRes.errorcode else: print 'Response is None when async staging files for obsNum %s' % obsNum except (UnpicklingError, socket.timeout) as uerr: print "Something wrong while sending async retrieve request for obsNum %s, %s" % (obsNum, str(uerr))
def loadpickle(fln): """ load a pickle and return content as dictionary Parameters ========== fln : string filename Returns ======= out : dict dictionary with content from file See Also ======== savepickle Examples ======== **note**: If fln is not found, but the same filename with '.gz' is found, will attempt to open the .gz as a gzipped file. >>> d = loadpickle('test.pbin') """ if not os.path.exists(fln) and os.path.exists(fln + '.gz'): gzip = True fln += '.gz' else: try: with open(fln, 'rb') as fh: try: #Py3k return pickle.load(fh, encoding='latin1') except TypeError: return pickle.load(fh) except pickle.UnpicklingError: #maybe it's a gzip? gzip = True else: gzip = False if gzip: try: import zlib with open(fln, 'rb') as fh: stream = zlib.decompress(fh.read(), 16 + zlib.MAX_WBITS) try: #Py3k return pickle.loads(stream, encoding='latin1') except TypeError: return pickle.loads(stream) except MemoryError: import gzip with open(fln) as fh: gzh = gzip.GzipFile(fileobj=fh) try: #Py3k contents = pickle.load(gzh, encoding='latin1') except TypeError: contents = pickle.load(gzh) gzh.close() return contents # -----------------------------------------------