我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用cPickle.Pickler()。
def __init__(self, request, servers, debug=0, pickleProtocol=0, pickler=pickle.Pickler, unpickler=pickle.Unpickler, pload=None, pid=None, default_time_expire = DEFAULT_TIME_EXPIRE): self.request=request self.default_time_expire = default_time_expire if request: app = request.application else: app = '' Client.__init__(self, servers, debug, pickleProtocol, pickler, unpickler, pload, pid) if not app in self.meta_storage: self.storage = self.meta_storage[app] = { CacheAbstract.cache_stats_name: { 'hit_total': 0, 'misses': 0, }} else: self.storage = self.meta_storage[app]
def save(self, file_path, **kwargs): """ save variables to file (using pickle) """ #check if any variable is a dict var_count = 0 for key in kwargs: var_count += 1 if isinstance(kwargs[key],dict): sys.stderr.write('Opps! Cannot write a dictionary into pickle') sys.exit(1) with open(file_path,'wb') as f: pickler = pickle.Pickler(f,-1) pickler.dump(var_count) for key in kwargs: pickler.dump(key) pickler.dump(kwargs[key])
def Write(self): """Write search indexes to the index file. This method is a no-op if index_file is set to None. """ if not self.__index_file: return descriptor, tmp_filename = tempfile.mkstemp( dir=os.path.dirname(self.__index_file)) tmpfile = os.fdopen(descriptor, 'wb') pickler = pickle.Pickler(tmpfile, protocol=1) pickler.fast = True pickler.dump((self._VERSION, self.__indexes)) tmpfile.close() self.__index_file_lock.acquire() try: try: os.rename(tmp_filename, self.__index_file) except OSError: os.remove(self.__index_file) os.rename(tmp_filename, self.__index_file) finally: self.__index_file_lock.release()
def loadData (self, filename, verbose=True, replace_missing=True): ''' Get the data from a text file in one of 3 formats: matrix, sparse, binary_sparse''' if verbose: print("========= Reading " + filename) start = time.time() if self.use_pickle and os.path.exists (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")): with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "r") as pickle_file: vprint (verbose, "Loading pickle file : " + os.path.join(self.tmp_dir, os.path.basename(filename) + ".pickle")) return pickle.load(pickle_file) if 'format' not in self.info.keys(): self.getFormatData(filename) if 'feat_num' not in self.info.keys(): self.getNbrFeatures(filename) data_func = {'dense':data_io.data, 'sparse':data_io.data_sparse, 'sparse_binary':data_io.data_binary_sparse} data = data_func[self.info['format']](filename, self.info['feat_num']) # INPORTANT: when we replace missing values we double the number of variables if self.info['format']=='dense' and replace_missing and np.any(map(np.isnan,data)): vprint (verbose, "Replace missing values by 0 (slow, sorry)") data = data_converter.replace_missing(data) if self.use_pickle: with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "wb") as pickle_file: vprint (verbose, "Saving pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) p = pickle.Pickler(pickle_file) p.fast = True p.dump(data) end = time.time() if verbose: print( "[+] Success in %5.2f sec" % (end - start)) return data
def loadLabel (self, filename, verbose=True): ''' Get the solution/truth values''' if verbose: print("========= Reading " + filename) start = time.time() if self.use_pickle and os.path.exists (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")): with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "r") as pickle_file: vprint (verbose, "Loading pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) return pickle.load(pickle_file) if 'task' not in self.info.keys(): self.getTypeProblem(filename) # IG: Here change to accommodate the new multiclass label format if self.info['task'] == 'multilabel.classification': label = data_io.data(filename) elif self.info['task'] == 'multiclass.classification': label = data_converter.convert_to_num(data_io.data(filename)) else: label = np.ravel(data_io.data(filename)) # get a column vector #label = np.array([np.ravel(data_io.data(filename))]).transpose() # get a column vector if self.use_pickle: with open (os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle"), "wb") as pickle_file: vprint (verbose, "Saving pickle file : " + os.path.join (self.tmp_dir, os.path.basename(filename) + ".pickle")) p = pickle.Pickler(pickle_file) p.fast = True p.dump(label) end = time.time() if verbose: print( "[+] Success in %5.2f sec" % (end - start)) return label
def __setitem__(self, key, value): if self.writeback: self.cache[key] = value f = StringIO() p = Pickler(f, self._protocol) p.dump(value) self.dict[key] = f.getvalue()
def _SaveDicts(): if is_readonly: raise RuntimeError("Trying to write to a readonly gencache ('%s')!" \ % win32com.__gen_path__) f = open(os.path.join(GetGeneratePath(), "dicts.dat"), "wb") try: p = pickle.Pickler(f) p.dump(pickleVersion) p.dump(clsidToTypelib) finally: f.close()
def __init__(self, servers=None, debug=0, pickleProtocol=cPickle.HIGHEST_PROTOCOL, pickler=cPickle.Pickler, unpickler=cPickle.Unpickler, pload=None, pid=None, make_sync_call=None, _app_id=None): """Create a new Client object. No parameters are required. Arguments: servers: Ignored; only for compatibility. debug: Ignored; only for compatibility. pickleProtocol: Pickle protocol to use for pickling the object. pickler: pickle.Pickler sub-class to use for pickling. unpickler: pickle.Unpickler sub-class to use for unpickling. pload: Callable to use for retrieving objects by persistent id. pid: Callable to use for determine the persistent id for objects, if any. make_sync_call: Ignored; only for compatibility with an earlier version. """ self._pickler_factory = pickler self._unpickler_factory = unpickler self._pickle_protocol = pickleProtocol self._persistent_id = pid self._persistent_load = pload self._app_id = _app_id self._cas_ids = {}
def dumps(self, arg, proto=0): f = self.output() try: p = cPickle.Pickler(f, proto) p.dump(arg) f.seek(0) return f.read() finally: self.close(f)
def dumps(self, arg, proto=0): p = cPickle.Pickler(proto) p.dump(arg) return p.getvalue()
def dumps(self, arg, proto=0): f = self.output() try: p = cPickle.Pickler(f, proto) p.fast = 1 p.dump(arg) f.seek(0) return f.read() finally: self.close(f)
def save(self, filename=""): from cPickle import Pickler if filename == "": timestamp = time.strftime("%Y-%m-%d-%H%M%S") filename = "reservoir-%s.bin" % timestamp f = open(filename,'wb') p = Pickler(f, 2) p.dump(self.__dict__) f.close() return ############################################################ # load network (restore from file) # @classmethod
def is_module_accelerated(module): return getattr(pickle.Pickler, '__module__', '<jython>') == 'pickle'
def serial(result, fname = "temp.bin"): if charade.detect(fname)['encoding'] == 'utf-8': fname = convert(fname) root_dir = os.path.dirname(__file__) fname = root_dir + "\\" +fname f = open(fname,"wb") p = cPickle.Pickler(f) p.clear_memo() p.fast = True p.dump(result) f.close()
def __WritePickled(self, obj, filename): """Pickles the object and writes it to the given file. """ if not filename or filename == '/dev/null' or not obj: return descriptor, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename)) tmpfile = os.fdopen(descriptor, 'wb') pickler = pickle.Pickler(tmpfile, protocol=1) pickler.fast = True pickler.dump(obj) tmpfile.close() self.__file_lock.acquire() try: try: os.rename(tmp_filename, filename) except OSError: try: os.remove(filename) except: pass os.rename(tmp_filename, filename) finally: self.__file_lock.release()
def __setitem__(self, key, value): with self._cache_write_lock: self._cache[key] = value f = StringIO() p = Pickler(f, self._protocol) p.dump(value) self._storage.redis.hset(self._hash_key, key, f.getvalue())
def sync(self): if not self._cache: return with self._cache_write_lock, self._storage.redis.pipeline() as pipeline: for key, entry in self._cache.items(): f = StringIO() p = Pickler(f, self._protocol) p.dump(entry) pipeline.hset(self._hash_key, key, f.getvalue()) pipeline.execute() self._cache.clear()
def dumps(self, obj, protocol=None, bin=None): src = BytesIO() p = Pickler(src) p.persistent_id = self._get_ids p.dump(obj) return src.getvalue()