Python pickle 模块,dumps() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用pickle.dumps()

项目:psycopg2-for-aws-lambda    作者:iwitaly    | 项目源码 | 文件源码
def test_adapt_dumps(self):
        from psycopg2.extras import json, Json

        class DecimalEncoder(json.JSONEncoder):
            def default(self, obj):
                if isinstance(obj, Decimal):
                    return float(obj)
                return json.JSONEncoder.default(self, obj)

        curs = self.conn.cursor()
        obj = Decimal('123.45')

        def dumps(obj):
            return json.dumps(obj, cls=DecimalEncoder)
        self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)),
            b"'123.45'")
项目:txt2evernote    作者:Xunius    | 项目源码 | 文件源码
def setUserprop(self, key, value):
        """
        Set single user's property. User's property must have key and value
        return True if all done
        return False if something wrong
        """
        value = pickle.dumps(value)

        instance = self.session.query(Userprop).filter_by(key=key).first()
        if instance:
            instance.value = value
        else:
            instance = Userprop(key, value)
            self.session.add(instance)

        self.session.commit()
        return True
项目:txt2evernote    作者:Xunius    | 项目源码 | 文件源码
def setSettings(self, settings):
        """
        Set multuple settings. Settings must be an instanse dict
        return True if all done
        return False if something wrong
        """
        if not isinstance(settings, dict):
            raise Exception("Wrong settings")

        for key in settings.keys():
            if not settings[key]:
                raise Exception("Wrong setting's item")

            instance = self.session.query(Setting).filter_by(key=key).first()
            if instance:
                instance.value = pickle.dumps(settings[key])
            else:
                instance = Setting(key, pickle.dumps(settings[key]))
                self.session.add(instance)

        self.session.commit()
        return True
项目:txt2evernote    作者:Xunius    | 项目源码 | 文件源码
def setSearch(self, search_obj):
        """
        Set searching.
        Previous searching items will be removed
        return True if all done
        return False if something wrong
        """
        for item in self.session.query(Search).all():
            self.session.delete(item)

        search = pickle.dumps(search_obj)
        instance = Search(search)
        self.session.add(instance)

        self.session.commit()
        return True
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_GET(self):
            if COORD.started:
                if self.path.startswith(PREFIX_NEXT_INDEX):
                    index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
                    if index >= 0:
                        self._send_answer(str(index))
                        return
                elif self.path.startswith(PREFIX_GET_JOB):
                    job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
                    if job:
                        self._send_answer(pickle.dumps(job))
                        return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_GET(self):
            if COORD.started:
                if self.path.startswith(PREFIX_NEXT_INDEX):
                    index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
                    if index >= 0:
                        self._send_answer(str(index))
                        return
                elif self.path.startswith(PREFIX_GET_JOB):
                    job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
                    if job:
                        self._send_answer(pickle.dumps(job))
                        return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_GET(self):
            if COORD.started:
                if self.path.startswith(PREFIX_NEXT_INDEX):
                    index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
                    if index >= 0:
                        self._send_answer(str(index))
                        return
                elif self.path.startswith(PREFIX_GET_JOB):
                    job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
                    if job:
                        self._send_answer(pickle.dumps(job))
                        return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:dabdabrevolution    作者:harryparkdotio    | 项目源码 | 文件源码
def apply(self, callback, route):
        dumps = self.json_dumps
        if not self.json_dumps: return callback

        def wrapper(*a, **ka):
            try:
                rv = callback(*a, **ka)
            except HTTPResponse as resp:
                rv = resp

            if isinstance(rv, dict):
                #Attempt to serialize, raises exception on failure
                json_response = dumps(rv)
                #Set content type only if serialization successful
                response.content_type = 'application/json'
                return json_response
            elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
                rv.body = dumps(rv.body)
                rv.content_type = 'application/json'
            return rv

        return wrapper
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def resolve(request):

    mimetype = 'application/json'
    data = {}

    if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
        data['entity'] = request.POST['entity']        
        data['status'] = 0
        data['timestamp'] = datetime.datetime.now().timestamp()        
        data['output'] = "resolve request by %s" % (request.user.username)
        data['result'] = 'okay'

        sensu_event_resolve(data)
        Channel('background-alert').send(dict(data))


    return HttpResponse(json.dumps(data), mimetype)



#@login_required(login_url=reverse_lazy('login'))
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def rmResult(request):

    mimetype = 'application/json'
    data = {}

    if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':        
        data['client'], data['check'] = request.POST['entity'].split(':')       
        data['status'] = 0
        data['timestamp'] = datetime.datetime.now().timestamp()

        if sensu_result_delete(data):
            data['result'] = 'okay'
        else:        
            data['result'] = 'failed deleting result using sensu api for: ' + request.POST['entity']

    return HttpResponse(json.dumps(data), mimetype)



#@login_required(login_url=reverse_lazy('login'))
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def user_settings(request):        

    logger.debug('settings view triggered by %s' % (request.user.username))

    form = ContactForm(request.POST, instance=Contact.objects.get(user=request.user.id))

    if form.is_valid:
        try:
            form.save()
            return HttpResponse('Done', status=200)
        except:        
            return HttpResponse(json.dumps(form.errors), status=409)
    else:
        return HttpResponse(json.dumps(form.errors), status=409)



    return render(request, 'isubscribe/user_settings.html', {'DATA':data, 'form': form})
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def entity_history(request):            

    data = []
    mimetype = 'application/json'   

    if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':

        entity = request.POST['entity']
        logger.debug("view entity_history user: %s entity: %s" % (request.user.username, entity))

        for history_data in r.lrange('history_entity_' + entity, 0, 100):
            data.append(pickle.loads(history_data))

    return HttpResponse(json.dumps(data), mimetype)



#@login_required(login_url=reverse_lazy('login'))
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def check_config(request):        

    mimetype = 'application/json'   
    data = {}

    if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':        
        client_name, check_name = request.POST['entity'].split(':')
        #check_name = 'check_gw_tomcat_errors_1h'
        #data = cache.get('check_' + check_name)
        data = cache.get('check_' + request.POST['entity'])


    return HttpResponse(json.dumps(data), mimetype)



#@login_required(login_url=reverse_lazy('login'))
项目:sensu_drive    作者:ilavender    | 项目源码 | 文件源码
def sensu_event_resolve(message):

    API_URL = settings.SENSU_API_URL + '/resolve'
    userAndPass = base64.b64encode(str.encode("%s:%s" % (settings.SENSU_API_USER, settings.SENSU_API_PASSWORD))).decode("ascii")
    headers = { 'X_REQUESTED_WITH' :'XMLHttpRequest',
               'Accept': 'application/json, text/javascript, */*; q=0.01',
               'Authorization' : 'Basic %s' %  userAndPass }

    try:
        client_name, check_name = message['entity'].split(':')
        post_params = {"client": client_name, "check": check_name}
        request = http.request('POST', API_URL, body=json.dumps(post_params), headers=headers)   
        response = request.status

        if response == 202:
            #reader = codecs.getreader('utf-8')
            #data = json.load(reader(request))
            request.release_conn()
        else:
            logger.error('response: %s' % str(response))

    except:
        logger.error("sensu_event_resolve failed resolving entity: %s" % message['entity'])
        raise
项目:wee-discord    作者:BlitzKraft    | 项目源码 | 文件源码
def command_openweb(current_buffer, args):
    trigger = w.config_get_plugin('trigger_value')
    if trigger != "0":
        if args is None:
            channel = channels.find(current_buffer)
            url = "{}/messages/{}".format(channel.server.server_buffer_name, channel.name)
            topic = w.buffer_get_string(channel.channel_buffer, "title")
            w.buffer_set(channel.channel_buffer, "title", "{}:{}".format(trigger, url))
            w.hook_timer(1000, 0, 1, "command_openweb", json.dumps({"topic": topic, "buffer": current_buffer}))
        else:
            #TODO: fix this dirty hack because i don't know the right way to send multiple args.
            args = current_buffer
            data = json.loads(args)
            channel_buffer = channels.find(data["buffer"]).channel_buffer
            w.buffer_set(channel_buffer, "title", data["topic"])
    return w.WEECHAT_RC_OK
项目:live-plotter    作者:anandtrex    | 项目源码 | 文件源码
def run(self):
        """
        Entry point for the live plotting when started as a separate process. This starts the loop
        """
        self.entity_name = current_process().name
        plogger.info("Starting new thread %s", self.entity_name)

        self.context = zmq.Context()
        self.socket = self.context.socket(zmq.SUB)

        self.socket.connect("tcp://localhost:%d" % self.port)
        topic = pickle.dumps(self.var_name, protocol=pickle.HIGHEST_PROTOCOL)

        self.socket.setsockopt(zmq.SUBSCRIBE, topic)
        plogger.info("Subscribed to topic %s on port %d", self.var_name, self.port)

        self.init(**self.init_kwargs)
        # Reference to animation required so that GC doesn't clean it up.
        # WILL NOT work if you remove it!!!!!
        # See: http://matplotlib.org/api/animation_api.html
        ani = animation.FuncAnimation(self.fig, self.loop, interval=100)
        self.plt.show()
项目:oscars2016    作者:0x0ece    | 项目源码 | 文件源码
def _make_flow(request, scopes, return_url=None):
    """Creates a Web Server Flow"""
    # Generate a CSRF token to prevent malicious requests.
    csrf_token = hashlib.sha256(os.urandom(1024)).hexdigest()

    request.session[_CSRF_KEY] = csrf_token

    state = json.dumps({
        'csrf_token': csrf_token,
        'return_url': return_url,
    })

    flow = client.OAuth2WebServerFlow(
        client_id=django_util.oauth2_settings.client_id,
        client_secret=django_util.oauth2_settings.client_secret,
        scope=scopes,
        state=state,
        redirect_uri=request.build_absolute_uri(
            urlresolvers.reverse("google_oauth:callback")))

    flow_key = _FLOW_KEY.format(csrf_token)
    request.session[flow_key] = pickle.dumps(flow)
    return flow
项目:sharedbuffers    作者:jampp    | 项目源码 | 文件源码
def _testStruct(self, Struct, values = {}, delattrs = ()):
        schema = mapped_struct.Schema.from_typed_slots(Struct)
        x = Struct()

        for k in delattrs:
            delattr(x, k)
        for k,v in values.iteritems():
            setattr(x, k, v)

        px = schema.pack(x)

        old_schema = schema
        schema = cPickle.loads(cPickle.dumps(schema, 2))

        self.assertTrue(old_schema.compatible(schema))
        self.assertTrue(schema.compatible(old_schema))

        dx = schema.unpack(px)
        for k in Struct.__slots__:
            if k in values or k not in delattrs:
                self.assertEquals(getattr(dx, k, None), getattr(x, k, None))
            else:
                self.assertFalse(hasattr(dx, k))
项目:sharedbuffers    作者:jampp    | 项目源码 | 文件源码
def testPackPickleUnpack(self):
        # hack - unregister subschema (can't register twice)
        mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
        mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)

        for TEST_VALUES in self.TEST_VALUES:
            # re-register subschema
            mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}')

            x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()})
            pschema = cPickle.dumps(self.schema)

            # Unregister schema to force the need for auto-register
            mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
            mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)

            pschema = cPickle.loads(pschema)

            dx = pschema.unpack(self.schema.pack(x))
            for k,v in TEST_VALUES.iteritems():
                self.assertTrue(hasattr(dx, k))
                self.assertEqual(getattr(dx, k), v)
            for k in self.Struct.__slots__:
                if k not in TEST_VALUES:
                    self.assertFalse(hasattr(dx, k))
项目:NeoAnalysis    作者:neoanalysis    | 项目源码 | 文件源码
def save(self):
        """Serialize this mesh to a string appropriate for disk storage"""
        import pickle
        if self._faces is not None:
            names = ['_vertexes', '_faces']
        else:
            names = ['_vertexesIndexedByFaces']

        if self._vertexColors is not None:
            names.append('_vertexColors')
        elif self._vertexColorsIndexedByFaces is not None:
            names.append('_vertexColorsIndexedByFaces')

        if self._faceColors is not None:
            names.append('_faceColors')
        elif self._faceColorsIndexedByFaces is not None:
            names.append('_faceColorsIndexedByFaces')

        state = dict([(n,getattr(self, n)) for n in names])
        return pickle.dumps(state)
项目:NeoAnalysis    作者:neoanalysis    | 项目源码 | 文件源码
def save(self):
        """Serialize this mesh to a string appropriate for disk storage"""
        import pickle
        if self._faces is not None:
            names = ['_vertexes', '_faces']
        else:
            names = ['_vertexesIndexedByFaces']

        if self._vertexColors is not None:
            names.append('_vertexColors')
        elif self._vertexColorsIndexedByFaces is not None:
            names.append('_vertexColorsIndexedByFaces')

        if self._faceColors is not None:
            names.append('_faceColors')
        elif self._faceColorsIndexedByFaces is not None:
            names.append('_faceColorsIndexedByFaces')

        state = dict([(n,getattr(self, n)) for n in names])
        return pickle.dumps(state)
项目:code    作者:ActiveState    | 项目源码 | 文件源码
def distinct(self):
        "Return copy of table having only distinct rows."
        copy = type(self)()
        copy.__columns = self.__columns
        copy.__data_area = self.__data_area.copy()
        copy.__row_index = self.__row_index
        valid_indexs = set()
        distinct_rows = set()
        for row in copy.__data_area:
            array = pickle.dumps(tuple(copy.__data_area[row][index] \
                                       for index, name, data_type \
                                       in self.__columns))
            if array not in distinct_rows:
                valid_indexs.add(row)
                distinct_rows.add(array)
        for row in tuple(copy.__data_area):
            if row not in valid_indexs:
                del copy.__data_area[row]
        return copy
项目:v2ex-tornado-2    作者:coderyy    | 项目源码 | 文件源码
def get(self, key):
        go = '/'
        member = CheckAuth(self)
        if member:
            member = Member.get(member.id)
            one = Member.get(key)
            if one:
                if one.num != member.num:
                    try:
                        blocked = pickle.loads(member.blocked.encode('utf-8'))
                    except:
                        blocked = []
                    if len(blocked) == 0:
                        blocked = []
                    if one.num not in blocked:
                        blocked.append(one.num)
                    member.blocked = pickle.dumps(blocked)
                    member.sync()
                    store.commit()  #jon add
                    memcache.set('Member_' + str(member.num), member, 86400)
        self.redirect(go)
项目:v2ex-tornado-2    作者:coderyy    | 项目源码 | 文件源码
def get(self, key):
        go = '/'
        member = CheckAuth(self)
        if member:
            member = Member.get(member.id)
            one = Member.get(key)
            if one:
                if one.num != member.num:
                    try:
                        blocked = pickle.loads(member.blocked.encode('utf-8'))
                    except:
                        blocked = []
                    if len(blocked) == 0:
                        blocked = []
                    if one.num  in blocked:
                        blocked.remove(one.num)
                    member.blocked = pickle.dumps(blocked)
                    member.sync()
                    store.commit()  #jon add
                    memcache.set('Member_' + str(member.num), member, 86400)
        self.redirect(go)
项目:FRG-Crowdsourcing    作者:97amarnathk    | 项目源码 | 文件源码
def news():
    """Get news from different ATOM RSS feeds."""
    import feedparser
    from pybossa.core import sentinel
    from pybossa.news import get_news, notify_news_admins, FEED_KEY
    try:
        import cPickle as pickle
    except ImportError:  # pragma: no cover
        import pickle
    urls = ['https://github.com/pybossa/pybossa/releases.atom',
            'http://scifabric.com/blog/all.atom.xml']
    score = 0
    notify = False
    if current_app.config.get('NEWS_URL'):
        urls += current_app.config.get('NEWS_URL')
    for url in urls:
        d = feedparser.parse(url)
        tmp = get_news(score)
        if (len(tmp) == 0) or (tmp[0]['updated'] != d.entries[0]['updated']):
            sentinel.master.zadd(FEED_KEY, float(score),
                                 pickle.dumps(d.entries[0]))
            notify = True
        score += 1
    if notify:
        notify_news_admins()
项目:FRG-Crowdsourcing    作者:97amarnathk    | 项目源码 | 文件源码
def cache(key_prefix, timeout=300):
    """
    Decorator for caching functions.

    Returns the function value from cache, or the function if cache disabled

    """
    if timeout is None:
        timeout = 300
    def decorator(f):
        @wraps(f)
        def wrapper(*args, **kwargs):
            key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix)
            if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None:
                output = sentinel.slave.get(key)
                if output:
                    return pickle.loads(output)
                output = f(*args, **kwargs)
                sentinel.master.setex(key, timeout, pickle.dumps(output))
                return output
            output = f(*args, **kwargs)
            sentinel.master.setex(key, timeout, pickle.dumps(output))
            return output
        return wrapper
    return decorator
项目:transfer    作者:viur-framework    | 项目源码 | 文件源码
def listCursors(self, backupkey=None, cursor=None, kind=None, *args, **kwargs):
        assert safeStringComparison(backupKey, backupkey)
        if cursor:
            c = datastore_query.Cursor(urlsafe=cursor)
        else:
            c = None
        r = []
        for x in range(0,10):
            q = datastore.Query(kind, cursor=c)
            q.Get(1, offset=999)
            newCursor = q.GetCursor()
            if newCursor != c:
                c = newCursor
                r.append(c.urlsafe())
            else:
                break
        return (pickle.dumps({"cursors": r}).encode("HEX"))
项目:transfer    作者:viur-framework    | 项目源码 | 文件源码
def exportDb(self, cursor=None, backupkey=None, endcursor=None, kind=None, *args, **kwargs):
        global backupKey
        assert safeStringComparison(backupKey, backupkey)
        if cursor:
            c = datastore_query.Cursor(urlsafe=cursor)
        else:
            c = None
        if endcursor:
            endCursor = datastore_query.Cursor(urlsafe=endcursor)
        else:
            endCursor = None
        q = datastore.Query(kind, cursor=c, end_cursor=endCursor)
        logging.error((cursor, backupkey, endcursor, kind))
        r = []
        for res in q.Run(limit=5):
            r.append(self.genDict(res))
        return (pickle.dumps({"cursor": str(q.GetCursor().urlsafe()), "values": r}).encode("HEX"))
项目:transfer    作者:viur-framework    | 项目源码 | 文件源码
def storeEntry(self, modul, entry):
        if not entry:
            return

        id = entry["id"]
        k = Key(encoded=id)
        if k.kind() != modul:
            raise ValueError("Invalid key! Key's kind should be %s, is %s" % (modul,k.kind()))
        if k.app() != self.getAppId():
            raise ValueError("Invalid key! Key's app should be %s, is %s" % (self.getAppId(),k.app()))
        try:
            t = {}
            for k,v in entry.items():
                if isinstance(v,unicode):
                    v = v.encode("UTF-8")
                t[k] = v
            self.ns.request("/dbtransfer/storeEntry", {"e":pickle.dumps(t).encode("HEX"),"key":self.importKey})
        except:
            print("------")
            print( entry )
            raise
项目:DomainDependencyMemeJsai2017    作者:GINK03    | 项目源码 | 文件源码
def step3():
  key_vec = {}
  maxx    = 12505807
  size    = 10000
  for i in range(size, maxx, size):
    print(i, maxx)
    res = os.popen("head -n {i} ./dataset/bind.txt | tail -n {size} | ./fasttext print-sentence-vectors ./models/model.bin".format(i=i, size=size)).read()
    for line in res.split("\n"):
      if line == "":
        continue
      vec = list(map(float, line.split()[-100:]))
      txt = line.split()[:-100]
      key = " ".join(txt)
      if key_vec.get(key) is None:
        key_vec[key] = vec
  open("key_vec.pkl", "wb").write(pickle.dumps(key_vec))
项目:DomainDependencyMemeJsai2017    作者:GINK03    | 项目源码 | 文件源码
def step4():
  key_vec = pickle.loads(open("key_vec.pkl", "rb").read()) 
  vecs = []
  for ev, vec in enumerate(key_vec.values()):
    x = np.array(vec)
    if np.isnan(x).any():
      # print(vec)
      continue
    vecs.append(x)
  vecs   = np.array(vecs)
  kmeans = KMeans(n_clusters=128, init='k-means++', n_init=10, max_iter=300,
                       tol=0.0001,precompute_distances='auto', verbose=0,
                       random_state=None, copy_x=True, n_jobs=1)
  print("now fitting...")
  kmeans.fit(vecs)

  open("kmeans.model", "wb").write( pickle.dumps(kmeans) )
  for p in kmeans.predict(vecs):
    print(p)
项目:DomainDependencyMemeJsai2017    作者:GINK03    | 项目源码 | 文件源码
def _step5(arr):
  kmeans = pickle.loads(open("kmeans.model", "rb").read())
  key, lines, tipe = arr
  print(key)
  open("./tmp/tmp.{tipe}.{key}.txt".format(tipe=tipe,key=key), "w").write("\n".join(lines))
  res  = os.popen("./fasttext print-sentence-vectors ./models/model.bin < tmp/tmp.{tipe}.{key}.txt".format(tipe=tipe, key=key)).read()
  w    = open("tmp/tmp.{tipe}.{key}.json".format(tipe=tipe,key=key), "w")
  for line in res.split("\n"):
    try:
      vec = list(map(float, line.split()[-100:]))
    except:
      print(line)
      print(res)
      continue
    x = np.array(vec)
    if np.isnan(x).any():
      continue
    cluster = kmeans.predict([vec])
    txt = line.split()[:-100]
    obj = {"txt": txt, "cluster": cluster.tolist()} 
    data = json.dumps(obj, ensure_ascii=False)
    w.write( data + "\n" )
项目:DomainDependencyMemeJsai2017    作者:GINK03    | 项目源码 | 文件源码
def step6():

  for tipe in ["news", "nocturne"]:
    names = [name for name in reversed(sorted(glob.glob("./tmp/tmp.{tipe}.*.json".format(tipe=tipe))))]
    size  = len(names)
    for en, name in enumerate(names):
      term_clus = {}
      oss = []
      with open(name) as f:
        for line in f:
          line = line.strip()
          oss.append(json.loads(line))
      for i in range(3, len(oss) - 3):
        terms = set( oss[i]["txt"] )
        for term in terms:
          if term_clus.get(term) is None:
             term_clus[term] = [0.0]*128
          cd = [oss[i+d]["cluster"][0] for d in [-3, -2, -1, 1, 2, 3]]
          for c in cd: 
            term_clus[term][c] += 1.0
      print("{}/{} finished {}".format(en, size, name))
    open("{tipe}.term_clus.pkl".format(tipe=tipe), "wb").write( pickle.dumps(term_clus) )
项目:DomainDependencyMemeJsai2017    作者:GINK03    | 项目源码 | 文件源码
def step7():
  term_clus = pickle.loads(open("./news.term_clus.pkl", "rb").read())
  term_clus = {term:clus for term, clus in filter(lambda x: sum(x[1]) > 30, term_clus.items()) }
  for term in term_clus.keys():
    vec = term_clus[term] 
    acc = sum(vec)
    term_clus[term] = list(map(lambda x:x/acc, vec))
  open("news.term_dist.pkl", "wb").write(pickle.dumps(term_clus))

  term_clus = pickle.loads(open("./nocturne.term_clus.pkl", "rb").read())
  term_clus = {term:clus for term, clus in filter(lambda x: sum(x[1]) > 30, term_clus.items()) }
  for term in term_clus.keys():
    vec = term_clus[term] 
    acc = sum(vec)
    term_clus[term] = list(map(lambda x:x/acc, vec))
  open("nocturne.term_dist.pkl", "wb").write(pickle.dumps(term_clus))
项目:YoutubeTV    作者:dude56987    | 项目源码 | 文件源码
def setProtected(self,name):
        '''
        Set a name in the table to be protected from removal
        because of limits.
        '''
        # generate the filepath to the protected values
        # list
        filePath=pathJoin(self.path,'protected.table')
        # check if the path exists
        if pathExists(filePath):
            # read the protected list from the file
            protectedList=unpickle(loadFile(filePath))
        else:
            # create the list and append the name
            protectedList=[]
        # append the new value to the list
        protectedList.append(name)
        # pickle the protected list for storage
        protectedList=pickle(protectedList)
        # write the changes back to the protected list
        writeFile(filePath,protectedList)
    ################################################################################
项目:YoutubeTV    作者:dude56987    | 项目源码 | 文件源码
def deleteValue(self,name):
        '''
        Delete a value with name name.
        '''
        # clean up names to avoid stupid
        debug.add('deleting value ',name)
        # figure out the path to the named value file
        if name in self.names:
            filePath=self.namePaths[name]
            # remove the metadata entry
            del self.namePaths[name]
            # write changes to database metadata file
            writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths))
            # update the length and names attributes
            self.names=self.namePaths.keys()
            self.length=len(self.names)
        else:
            return False
        if pathExists(filePath):
            # remove the file accocated with the value
            removeFile(filePath)
            return True
        else:
            return False
################################################################################
项目:psycopg2-for-aws-lambda    作者:iwitaly    | 项目源码 | 文件源码
def test_adapt_subclass(self):
        from psycopg2.extras import json, Json

        class DecimalEncoder(json.JSONEncoder):
            def default(self, obj):
                if isinstance(obj, Decimal):
                    return float(obj)
                return json.JSONEncoder.default(self, obj)

        class MyJson(Json):
            def dumps(self, obj):
                return json.dumps(obj, cls=DecimalEncoder)

        curs = self.conn.cursor()
        obj = Decimal('123.45')
        self.assertEqual(curs.mogrify("%s", (MyJson(obj),)), b"'123.45'")
项目:psycopg2-for-aws-lambda    作者:iwitaly    | 项目源码 | 文件源码
def test_adapt_dumps(self):
        from psycopg2.extras import json, Json

        class DecimalEncoder(json.JSONEncoder):
            def default(self, obj):
                if isinstance(obj, Decimal):
                    return float(obj)
                return json.JSONEncoder.default(self, obj)

        curs = self.conn.cursor()
        obj = Decimal('123.45')

        def dumps(obj):
            return json.dumps(obj, cls=DecimalEncoder)
        self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)),
            b"'123.45'")
项目:CodingDojo    作者:ComputerSocietyUNB    | 项目源码 | 文件源码
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
        self._createdir()  # Cache dir can be deleted at any time.
        fname = self._key_to_file(key, version)
        self._cull()  # make some room if necessary
        fd, tmp_path = tempfile.mkstemp(dir=self._dir)
        renamed = False
        try:
            with io.open(fd, 'wb') as f:
                expiry = self.get_backend_timeout(timeout)
                f.write(pickle.dumps(expiry, -1))
                f.write(zlib.compress(pickle.dumps(value), -1))
            file_move_safe(tmp_path, fname, allow_overwrite=True)
            renamed = True
        finally:
            if not renamed:
                os.remove(tmp_path)
项目:pycos    作者:pgiri    | 项目源码 | 文件源码
def serialize(obj):
    return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
项目:flora    作者:Lamden    | 项目源码 | 文件源码
def post(self):
        payload = {
            'owner' : request.form['owner'],
            'package' : request.form['package'],
            'data' : request.form['data']
        }

        owner = request.form['owner']
        package = request.form['package']
        data = request.form['data']
        b = ENGINE.get_named_secret(owner)
        print(b)
        secret = rsa.decrypt(eval(b), KEY[1])

        # data is a python tuple of the templated solidity at index 0 and an example payload at index 1
        # compilation of this code should return true
        # if there are errors, don't commit it to the db
        # otherwise, commit it
        raw_data = decrypt(secret, eval(data))
        package_data = json.loads(raw_data.decode('utf8'))
        '''
        payload = {
            'tsol' : open(code_path[0]).read(),
            'example' : example
        }
        '''

        # assert that the code compiles with the provided example
        tsol.compile(StringIO(package_data['tsol']), package_data['example'])

        template = pickle.dumps(package_data['tsol'])
        example = pickle.dumps(package_data['example'])

        if ENGINE.add_package(owner, package, template, example) == True:
            return success_payload(None, 'Package successfully uploaded.')
        return error_payload('Problem uploading package. Try again.')
项目:python-    作者:secondtonone1    | 项目源码 | 文件源码
def dump(type, exc):
        """
        Always return a dumped (pickled) type and exc. If exc can't be pickled,
        wrap it in UnpickleableException first.
        """
        try:
            return pickle.dumps(type), pickle.dumps(exc)
        except Exception:
            # get UnpickleableException inside the sandbox
            from setuptools.sandbox import UnpickleableException as cls
            return cls.dump(cls, cls(repr(exc)))
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_POST(self):
            if COORD.started:
                src = self.rfile.read(int(self.headers['content-length']))
                job = COORD.next_job(pickle.loads(src))
                if job:
                    self._send_answer(pickle.dumps(job))
                    return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def next_job(self, job):
        '''Sends a finished job back to the coordinator and retrieves in exchange the next one.

        Kwargs:
            job (WorkerJob): job that was finished by a worker and who's results are to be
                digested by the coordinator

        Returns:
            WorkerJob. next job of one of the running epochs that will get
                associated with the worker from the finished job and put into state 'running'
        '''
        if is_chief:
            # Try to find the epoch the job belongs to
            epoch = next((epoch for epoch in self._epochs_running if epoch.id == job.epoch_id), None)
            if epoch:
                # We are going to manipulate things - let's avoid undefined state
                with self._lock:
                    # Let the epoch finish the job
                    epoch.finish_job(job)
                    # Check, if epoch is done now
                    if epoch.done():
                        # If it declares itself done, move it from 'running' to 'done' collection
                        self._epochs_running.remove(epoch)
                        self._epochs_done.append(epoch)
                        # Show the short and/or full WER report
                        log_info(epoch)
            else:
                # There was no running epoch found for this job - this should never happen.
                log_error('There is no running epoch of id %d for job with ID %d.' % (job.epoch_id, job.id))
            return self.get_job(job.worker)

        # We are a remote worker and have to hand over to the chief worker by HTTP
        result = self._talk_to_chief('', data=pickle.dumps(job))
        if result:
            result = pickle.loads(result)
        return result
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_POST(self):
            if COORD.started:
                src = self.rfile.read(int(self.headers['content-length']))
                job = COORD.next_job(pickle.loads(src))
                if job:
                    self._send_answer(pickle.dumps(job))
                    return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def do_POST(self):
            if COORD.started:
                src = self.rfile.read(int(self.headers['content-length']))
                job = COORD.next_job(pickle.loads(src))
                if job:
                    self._send_answer(pickle.dumps(job))
                    return
                self.send_response(404)
            else:
                self.send_response(202)
            self.end_headers()
项目:AVSR-Deep-Speech    作者:pandeydivesh15    | 项目源码 | 文件源码
def next_job(self, job):
        '''Sends a finished job back to the coordinator and retrieves in exchange the next one.

        Kwargs:
            job (WorkerJob): job that was finished by a worker and who's results are to be
                digested by the coordinator

        Returns:
            WorkerJob. next job of one of the running epochs that will get
                associated with the worker from the finished job and put into state 'running'
        '''
        if is_chief:
            # Try to find the epoch the job belongs to
            epoch = next((epoch for epoch in self._epochs_running if epoch.id == job.epoch_id), None)
            if epoch:
                # We are going to manipulate things - let's avoid undefined state
                with self._lock:
                    # Let the epoch finish the job
                    epoch.finish_job(job)
                    # Check, if epoch is done now
                    if epoch.done():
                        # If it declares itself done, move it from 'running' to 'done' collection
                        self._epochs_running.remove(epoch)
                        self._epochs_done.append(epoch)
                        # Show the short and/or full WER report
                        log_info(epoch)
            else:
                # There was no running epoch found for this job - this should never happen.
                log_error('There is no running epoch of id %d for job with ID %d.' % (job.epoch_id, job.id))
            return self.get_job(job.worker)

        # We are a remote worker and have to hand over to the chief worker by HTTP
        result = self._talk_to_chief('', data=pickle.dumps(job))
        if result:
            result = pickle.loads(result)
        return result
项目:my-first-blog    作者:AnkurBegining    | 项目源码 | 文件源码
def dump(type, exc):
        """
        Always return a dumped (pickled) type and exc. If exc can't be pickled,
        wrap it in UnpickleableException first.
        """
        try:
            return pickle.dumps(type), pickle.dumps(exc)
        except Exception:
            # get UnpickleableException inside the sandbox
            from setuptools.sandbox import UnpickleableException as cls
            return cls.dump(cls, cls(repr(exc)))
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def fix_episode(episode_path):
    try:
        episode = load_episode(episode_path)
    except EOFError:
        print("Error reading: {}".format(episode_path))
        os.remove(episode_path)
        return
    if episode.version == 2:
        print("Version 2 already: {}".format(episode_path))
        return

    old_frames = episode.frames
    episode.frames = []
    for i in range(len(old_frames) - 1):
        f = old_frames[i]
        f.action = old_frames[i + 1].action
        episode.frames.append(f)

    episode.version = 2

    s = pickle.dumps(episode)
    with gzip.open(episode_path, "wb") as f:
        f.write(s)

        # pickler = pickle.Pickler(f)
        # pickler.dump(episode)

    # save_episode(episode_path, episode)
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def fix_episode(episode_path):
    try:
        episode = load_episode(episode_path)
    except EOFError:
        print("Error reading: {}".format(episode_path))
        os.remove(episode_path)
        return
    if episode.version == 2:
        print("Version 2 already: {}".format(episode_path))
        return

    old_frames = episode.frames
    episode.frames = []
    for i in range(len(old_frames) - 1):
        f = old_frames[i]
        f.action = old_frames[i + 1].action
        episode.frames.append(f)

    episode.version = 2

    s = pickle.dumps(episode)
    with gzip.open(episode_path, "wb") as f:
        f.write(s)

        # pickler = pickle.Pickler(f)
        # pickler.dump(episode)

    # save_episode(episode_path, episode)