我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用cPickle.dumps()。
def apply(self, callback, route): dumps = self.json_dumps if not self.json_dumps: return callback def wrapper(*a, **ka): try: rv = callback(*a, **ka) except HTTPResponse as resp: rv = resp if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): rv.body = dumps(rv.body) rv.content_type = 'application/json' return rv return wrapper
def memoized(func): """Decorator that caches a function's return value each time it is called. If called later with the same arguments, the cached value is returned, and the function is not re-evaluated. Based upon from http://wiki.python.org/moin/PythonDecoratorLibrary#Memoize Nota bene: this decorator memoizes /all/ calls to the function. For a memoization decorator with limited cache size, consider: http://code.activestate.com/recipes/496879-memoize-decorator-function-with-cache-size-limit/ """ cache = {} @wraps(func) def func_wrapper(*args, **kwargs): key = cPickle.dumps((args, kwargs)) if key not in cache: cache[key] = func(*args, **kwargs) return cache[key] return func_wrapper
def _testStruct(self, Struct, values = {}, delattrs = ()): schema = mapped_struct.Schema.from_typed_slots(Struct) x = Struct() for k in delattrs: delattr(x, k) for k,v in values.iteritems(): setattr(x, k, v) px = schema.pack(x) old_schema = schema schema = cPickle.loads(cPickle.dumps(schema, 2)) self.assertTrue(old_schema.compatible(schema)) self.assertTrue(schema.compatible(old_schema)) dx = schema.unpack(px) for k in Struct.__slots__: if k in values or k not in delattrs: self.assertEquals(getattr(dx, k, None), getattr(x, k, None)) else: self.assertFalse(hasattr(dx, k))
def testPackPickleUnpack(self): # hack - unregister subschema (can't register twice) mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None) mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None) for TEST_VALUES in self.TEST_VALUES: # re-register subschema mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}') x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()}) pschema = cPickle.dumps(self.schema) # Unregister schema to force the need for auto-register mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None) mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None) pschema = cPickle.loads(pschema) dx = pschema.unpack(self.schema.pack(x)) for k,v in TEST_VALUES.iteritems(): self.assertTrue(hasattr(dx, k)) self.assertEqual(getattr(dx, k), v) for k in self.Struct.__slots__: if k not in TEST_VALUES: self.assertFalse(hasattr(dx, k))
def news(): """Get news from different ATOM RSS feeds.""" import feedparser from pybossa.core import sentinel from pybossa.news import get_news, notify_news_admins, FEED_KEY try: import cPickle as pickle except ImportError: # pragma: no cover import pickle urls = ['https://github.com/pybossa/pybossa/releases.atom', 'http://scifabric.com/blog/all.atom.xml'] score = 0 notify = False if current_app.config.get('NEWS_URL'): urls += current_app.config.get('NEWS_URL') for url in urls: d = feedparser.parse(url) tmp = get_news(score) if (len(tmp) == 0) or (tmp[0]['updated'] != d.entries[0]['updated']): sentinel.master.zadd(FEED_KEY, float(score), pickle.dumps(d.entries[0])) notify = True score += 1 if notify: notify_news_admins()
def cache(key_prefix, timeout=300): """ Decorator for caching functions. Returns the function value from cache, or the function if cache disabled """ if timeout is None: timeout = 300 def decorator(f): @wraps(f) def wrapper(*args, **kwargs): key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix) if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None: output = sentinel.slave.get(key) if output: return pickle.loads(output) output = f(*args, **kwargs) sentinel.master.setex(key, timeout, pickle.dumps(output)) return output output = f(*args, **kwargs) sentinel.master.setex(key, timeout, pickle.dumps(output)) return output return wrapper return decorator
def distribute_encode(socks): writable = [] while True: to_all, msg = yield idiokit.next() msg_bytes = cPickle.dumps(msg, cPickle.HIGHEST_PROTOCOL) data = struct.pack("!I", len(msg_bytes)) + msg_bytes if to_all: for sock in socks: yield sock.sendall(data) writable = [] else: while not writable: _, writable, _ = yield select.select((), socks, ()) writable = list(writable) yield writable.pop().sendall(data)
def save_weights(fname, params, metadata=None): """ assumes all params have unique names. """ # Includes batchnorm params now names = [par.name for par in params] if len(names) != len(set(names)): raise ValueError('need unique param names') param_dict = { param.name : param.get_value(borrow=False) for param in params } if metadata is not None: param_dict['metadata'] = pickle.dumps(metadata) logging.info('saving {} parameters to {}'.format(len(params), fname)) # try to avoid half-written files fname = Path(fname) if fname.exists(): tmp_fname = Path(fname.stripext() + '.tmp.npz') # TODO yes, this is a hack np.savez_compressed(str(tmp_fname), **param_dict) tmp_fname.rename(fname) else: np.savez_compressed(str(fname), **param_dict)
def testNonIdentityHash(self): global ClassWithCustomHash class ClassWithCustomHash(styles.Versioned): def __init__(self, unique, hash): self.unique = unique self.hash = hash def __hash__(self): return self.hash v1 = ClassWithCustomHash('v1', 0) v2 = ClassWithCustomHash('v2', 0) pkl = pickle.dumps((v1, v2)) del v1, v2 ClassWithCustomHash.persistenceVersion = 1 ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True) v1, v2 = pickle.loads(pkl) styles.doUpgrade() self.assertEquals(v1.unique, 'v1') self.assertEquals(v2.unique, 'v2') self.failUnless(v1.upgraded) self.failUnless(v2.upgraded)
def testUpgradeDeserializesObjectsRequiringUpgrade(self): global ToyClassA, ToyClassB class ToyClassA(styles.Versioned): pass class ToyClassB(styles.Versioned): pass x = ToyClassA() y = ToyClassB() pklA, pklB = pickle.dumps(x), pickle.dumps(y) del x, y ToyClassA.persistenceVersion = 1 def upgradeToVersion1(self): self.y = pickle.loads(pklB) styles.doUpgrade() ToyClassA.upgradeToVersion1 = upgradeToVersion1 ToyClassB.persistenceVersion = 1 ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True) x = pickle.loads(pklA) styles.doUpgrade() self.failUnless(x.y.upgraded)
def makePickle(self, record): """ Pickles the record in binary format with a length prefix, and returns it ready for transmission across the socket. """ ei = record.exc_info if ei: # just to get traceback text into record.exc_text ... dummy = self.format(record) record.exc_info = None # to avoid Unpickleable error # See issue #14436: If msg or args are objects, they may not be # available on the receiving end. So we convert the msg % args # to a string, save it as msg and zap the args. d = dict(record.__dict__) d['msg'] = record.getMessage() d['args'] = None s = cPickle.dumps(d, 1) if ei: record.exc_info = ei # for next handler slen = struct.pack(">L", len(s)) return slen + s
def picklecompiler(sourcefile): """ Usually pickle can only be used to (de)serialize objects. This tiny snippet will allow you to transform arbitrary python source code into a pickle string. Unpickling this string with pickle.loads() will execute the given soruce code. The trick is actually prettey easy: Usually eval() will only accept expressions, thus class and function declarations does not work. Using the work-around of code objects (returned by compile()), we can execute real python source code :) """ sourcecode = file(sourcefile).read() payload = "c__builtin__\neval\n(c__builtin__\ncompile\n(%sS'<payload>'\nS'exec'\ntRtR." % (pickle.dumps( sourcecode )[:-4],) print payload fp =open("poc.pickle","w") fp.write(payload)
def _update_list(self, name, data, func): if not self.valid(name): return result = None if self._is_iterable(data): result = [self.dumps(i) for i in data] else: result = [self.dumps(data)] if not result: return name = str(name) try: func(name, *result) except Exception as e: [func(name, i) for i in result]
def loads(strg): """ Load a pickle from the current string. The result of ``cPickle.loads(strg)`` is returned. Parameters ---------- strg : str The string to load. See Also -------- dumps : Return a string corresponding to the pickling of a masked array. """ return pickle.loads(strg)
def apply(self, callback, route): dumps = self.json_dumps if not dumps: return callback def wrapper(*a, **ka): try: rv = callback(*a, **ka) except HTTPError: rv = _e() if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization succesful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): rv.body = dumps(rv.body) rv.content_type = 'application/json' return rv return wrapper
def pack(self): fullpayload=[] fullpayload.append("import pupyimporter") all_packages=[] for sl in self.scriptlets: all_packages.extend(sl.dependencies) all_packages=list(set(all_packages)) for p,n in all_packages: modules_dic=gen_package_pickled_dic(os.path.join(ROOT, p.replace("/",os.sep)), n) fullpayload.append("pupyimporter.pupy_add_package(%s)"%repr(cPickle.dumps(modules_dic))) for sl in self.scriptlets: if self.debug: fullpayload.append(sl.generate()) else: #if not in debug mode, catch all exception to continue an have a session if a scriptlet raises an exception fullpayload.append(wrap_try_except(sl.generate())) return compress_encode_obfs('\n'.join(fullpayload))
def flush(self): """ Save storage contents to disk This method saves new and changed :class:`Storage` contents to disk and invalidates the Storage instance. Unchanged Storage is not saved but simply invalidated. """ contents = pickle.dumps(self._storage) if self._hash is None or md5(contents).hexdigest() != self._hash: tmp = self._filename + '.tmp' try: with open(tmp, 'wb') as fo: fo.write(contents) except: os.remove(tmp) raise move(tmp, self._filename) # Atomic save del self._storage
def dump_stream(self, iterator, stream): batch, best = 1, self.bestSize iterator = iter(iterator) while True: vs = list(itertools.islice(iterator, batch)) if not vs: break bytes = self.serializer.dumps(vs) write_int(len(bytes), stream) stream.write(bytes) size = len(bytes) if size < best: batch *= 2 elif size > best * 10 and batch > 1: batch //= 2
def apply(self, callback, _): dumps = self.json_dumps if not dumps: return callback def wrapper(*a, **ka): try: rv = callback(*a, **ka) except HTTPError: rv = _e() if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): rv.body = dumps(rv.body) rv.content_type = 'application/json' return rv return wrapper
def cache_it(self, key, f, time_expire): if self.debug: self.r_server.incr('web2py_cache_statistics:misses') cache_set_key = self.cache_set_key expire_at = int(time.time() + time_expire) + 120 bucket_key = "%s:%s" % (cache_set_key, expire_at / 60) value = f() value_ = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) if time_expire == 0: time_expire = 1 self.r_server.setex(key, time_expire, value_) # print '%s will expire on %s: it goes in bucket %s' % (key, time.ctime(expire_at)) # print 'that will expire on %s' % (bucket_key, time.ctime(((expire_at / 60) + 1) * 60)) p = self.r_server.pipeline() # add bucket to the fixed set p.sadd(cache_set_key, bucket_key) # sets the key p.setex(key, time_expire, value_) # add the key to the bucket p.sadd(bucket_key, key) # expire the bucket properly p.expireat(bucket_key, ((expire_at / 60) + 1) * 60) p.execute() return value
def test_text_dataset(): with temporary_content_path(TEST_TEXT) as path: dataset = TextDataset(path, 100) stream = dataset.get_example_stream() it = stream.get_epoch_iterator() d = next(it) assert d == (['abc', 'abc', 'def'],) pickled_it = cPickle.dumps(it) d = next(it) assert d == (['def', 'def', 'xyz'],) it = cPickle.loads(pickled_it) d = next(it) assert d == (['def', 'def', 'xyz'],) d = next(it) assert d == (['xyz'],)
def new(self, c): """ Insert a new creature in the DB, and set c.id accordingly """ assert c.id is None # create a new row in the DB, to generate an ID self.cur.execute("INSERT INTO creatures(id) VALUES(NULL)") c.id = self.cur.lastrowid born_at = self.generation pickled = pickle.dumps(c) # save the updated c self.cur.execute(""" UPDATE creatures SET born_at = ?, pickled = ? WHERE id = ? """, (born_at, pickled, c.id))
def save_track_proto_to_zip(track_proto, save_file): zf = zipfile.ZipFile(save_file, 'w', allowZip64=True) print "Writing to zip file {}...".format(save_file) track_id = 0 for track in track_proto['tracks']: track_obj = {} for key in track[0]: try: track_obj[key] = np.asarray([box[key] for box in track]) except KeyError: continue zf.writestr('{:06d}.pkl'.format(track_id), cPickle.dumps(track_obj, cPickle.HIGHEST_PROTOCOL)) track_id += 1 if (track_id + 1) % 1000 == 0: print "\t{} tracks written.".format(track_id + 1) print "\tTotally {} tracks written.".format(track_id + 1) zf.close()
def apply(self, callback, _): dumps = self.json_dumps if not dumps: return callback def wrapper(*a, **ka): try: rv = callback(*a, **ka) except HTTPError as error: rv = error if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization successful response.content_type = 'application/json' return json_response elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): rv.body = dumps(rv.body) rv.content_type = 'application/json' return rv return wrapper
def serialize(obj): return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
def cookie_encode(data, key, digestmod=None): """ Encode and sign a pickle-able object. Return a (byte) string """ depr(0, 13, "cookie_encode() will be removed soon.", "Do not use this API directly.") digestmod = digestmod or hashlib.sha256 msg = base64.b64encode(pickle.dumps(data, -1)) sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=digestmod).digest()) return tob('!') + sig + tob('?') + msg
def makePickle(self, record): """ Pickles the record in binary format with a length prefix, and returns it ready for transmission across the socket. """ ei = record.exc_info if ei: dummy = self.format(record) # just to get traceback text into record.exc_text record.exc_info = None # to avoid Unpickleable error s = cPickle.dumps(record.__dict__, 1) if ei: record.exc_info = ei # for next handler slen = struct.pack(">L", len(s)) return slen + s
def value_encode(self, val): return val, _quote( dumps(val) ) # end SerialCookie
def value_encode(self, val): if type(val) == type(""): return val, _quote(val) else: return val, _quote( dumps(val) ) # end SmartCookie ########################################################### # Backwards Compatibility: Don't break any existing code! # We provide Cookie() as an alias for SmartCookie()
def fetch(self, spider): while True: item_dict = spider.fetch() item = [] for attr_name in spider.attr: item.append(item_dict[attr_name]) while self.redis.llen(config.solver_prototxt) >= self.max_cache_item_num: time.sleep(self.wait_time) self.redis.rpush(config.solver_prototxt, cPickle.dumps(item, cPickle.HIGHEST_PROTOCOL))
def pack(self, item): if isinstance(item, np.ndarray): return item else: return 'OBJ_' + cPickle.dumps(item, -1)
def picklechops(chops): """Pickles and base64encodes it's argument chops""" value = zlib.compress(dumps(chops)) encoded = base64.encodestring(value) return encoded.strip()
def testPackPickleUnpack(self): for TEST_VALUES in self.TEST_VALUES: x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()}) pschema = cPickle.loads(cPickle.dumps(self.schema)) dx = pschema.unpack(self.schema.pack(x)) for k,v in TEST_VALUES.iteritems(): self.assertTrue(hasattr(dx, k)) self.assertEqual(getattr(dx, k), v) for k in self.Struct.__slots__: if k not in TEST_VALUES: self.assertFalse(hasattr(dx, k))
def _handle_serialization(func): def wrapped(session, params): params = pickle.loads(params['params']) rv = func(session, *params['args'], **params['kwargs']) return pickle.dumps(rv) return wrapped
def call_plugin_serialized(self, plugin, fn, *args, **kwargs): params = {'params': pickle.dumps(dict(args=args, kwargs=kwargs))} rv = self.call_plugin(plugin, fn, params) return pickle.loads(rv)
def set(self, key, value, timeout=None): expires = self._get_expiration(timeout) self._prune() self._cache[key] = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) return True
def add(self, key, value, timeout=None): expires = self._get_expiration(timeout) self._prune() item = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) if key in self._cache: return False self._cache.setdefault(key, item) return True
def dump_object(self, value): """Dumps an object into a string for redis. By default it serializes integers as regular string and pickle dumps everything else. """ t = type(value) if t in integer_types: return str(value).encode('ascii') return b'!' + pickle.dumps(value)
def _encode_request(self, request): """Encode a request object""" return pickle.dumps(request_to_dict(request, self.spider), protocol=-1)
def send(self, obj): 'Send one object.' string = _base255.encode(_cPickle.dumps(obj, _cPickle.HIGHEST_PROTOCOL)) + '\0' self.__send.acquire() try: self.__sock.sendall(string) finally: self.__send.release()
def __call__(self, *args): import cPickle str = cPickle.dumps(args) if not self.memo.has_key(str): self.memo[str] = self.fn(*args) return self.memo[str]
def put(self,msg): if self.is_open(): data = cPickle.dumps(msg,1) self.out.write("%d\n" % len(data)) self.out.write(data) self.out.flush() else: raise Exception("Pipe closed")
def store(self): """ Store the data for next runs, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary file to avoid problems on ctrl+c. """ data = {} for x in SAVED_ATTRS: data[x] = getattr(self, x) db = os.path.join(self.variant_dir, Context.DBFILE) try: waflib.Node.pickle_lock.acquire() waflib.Node.Nod3 = self.node_class x = cPickle.dumps(data, PROTOCOL) finally: waflib.Node.pickle_lock.release() Utils.writef(db + '.tmp', x, m='wb') try: st = os.stat(db) os.remove(db) if not Utils.is_win32: # win32 has no chown but we're paranoid os.chown(db + '.tmp', st.st_uid, st.st_gid) except (AttributeError, OSError): pass # do not use shutil.move (copy is not thread-safe) os.rename(db + '.tmp', db)
def send_response(conn, ret, out, err, exc): if out or err or exc: data = (out, err, exc) data = cPickle.dumps(data, -1) else: data = '' params = [RES, str(ret), str(len(data))] # no need for the cookie in the response conn.send(make_header(params)) if data: conn.send(data)
def send_response(self, ret, out, err, exc): if out or err or exc: data = (out, err, exc) data = cPickle.dumps(data, -1) else: data = '' params = [RES, str(ret), str(len(data))] # no need for the cookie in the response self.wfile.write(make_header(params)) if data: self.wfile.write(data) self.wfile.flush()