Python redis 模块,from_url() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用redis.from_url()

项目:zinc    作者:PressLabs    | 项目源码 | 文件源码
def reconcile_zones(bind=True):
    """
    Periodic task that reconciles everything zone-related (zone deletion, policy record updates)
    """
    redis_client = redis.from_url(settings.LOCK_SERVER_URL)
    lock = redis_client.lock('reconcile_zones', timeout=60)

    if not lock.acquire(blocking=False):
        logger.info('Cannot aquire task lock. Probaly another task is running. Bailing out.')
        return

    try:
        for zone in models.Zone.need_reconciliation():
            try:
                zone.reconcile()
                lock.extend(5)  # extend the lease each time we rebuild a tree
            except Exception:
                logger.exception(
                    "reconcile failed for Zone %s.%s", zone, zone.root
                )
    finally:
        lock.release()
项目:zinc    作者:PressLabs    | 项目源码 | 文件源码
def update_ns_propagated(bind=True):
    redis_client = redis.from_url(settings.LOCK_SERVER_URL)

    # make this lock timeout big enough to cover updating about 1000 zones
    # ns_propagated flag and small enough to update the flag in an acceptable
    # time frame. 5 minutes sound good at the moment.

    lock = redis_client.lock('update_ns_propagated', timeout=300)
    if not lock.acquire(blocking=False):
        logger.info('Cannot aquire task lock. Probaly another task is running. Bailing out.')
        return
    try:
        models.Zone.update_ns_propagated(delay=getattr(settings, 'ZINC_NS_UPDATE_DELAY', 0.3))
    except Exception:
        logger.exception("Could not update ns_propagated flag")

    lock.release()
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def addfromcsv(self):
        if os.environ.get("REDIS_URL") :
            redis_url = os.environ.get("REDIS_URL")
        else:
            redis_url = "localhost"        
        r_server = redis.from_url(redis_url)
        with open('mapofinnovation/public/spaces_ready_for_merge.csv', 'rb') as csv_file:
                dialect = csv.Sniffer().sniff(csv_file.read(), delimiters=',')
                csv_file.seek(0)
                csv_reader = csv.DictReader(csv_file, dialect=dialect)
                for row in csv_reader:
            key = row['name']+str(datetime.now())                    
            row.update({'archived':False})
            row.update({'verified':True})
            r_server.hmset(re.sub(' ','',key),row) 
        return {'success':'true'}
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def wikipage(self,id=None):
      #Return a wiki for the given space 
      if os.environ.get("REDIS_URL") :
        redis_url = os.environ.get("REDIS_URL")
      else:
        redis_url = "localhost"
      r = redis.from_url(redis_url)
      if id is None :
        return 'Provide a valid space id'
      elif r.exists(id):
        data = r.hgetall(id)
        addresstext = str(data['street_address']).decode("ISO-8859-1")
        websitetext = urllib.unquote(data['primary_website']).decode('utf8')
        return render('/wikipage.html',extra_vars={'last_updated':str(data['last_updated']),'name':str(data['name']),'status':str(data['status']),'website_url':websitetext,'primarytype':str(data['primary_type']),'secondarytype':'','space_description':str(data['description']),'address':addresstext})
      else :
        return 'There is no space with this id. Please recheck and submit'
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def getAllSpaces(self):
        #return all spaces in json format
    spaceslist = []
    if os.environ.get("REDIS_URL") :
        redis_url = os.environ.get("REDIS_URL")
    else:
        redis_url = "localhost"
    r = redis.from_url(redis_url)
    for key in r.scan_iter():   
        row = r.hgetall(key)
        space={}
        for i in row:
            print i
            if i in ("image_url", "g_place_id"):
                pass
            else:
                space[i]=unicode(row[i], errors='replace') 
        spaceslist.append(space)
    return spaceslist
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def addSpace(self):
    #add a space
    if os.environ.get("REDIS_URL") :
        redis_url = os.environ.get("REDIS_URL")
    else:
        redis_url = "localhost"
    r = redis.from_url(redis_url)
    surl = request.params.get("primary_website")
    exists = False
    if surl is None :
        pass
    else :
        exists = self._search_space(surl)
    if exists is False:
        tparams=request.params
        dparams = {}
        for k,v in tparams.items():
            dparams.update({k:v})
    dparams.update({'archived':False})
    dparams.update({'verified':False})
    skey = request.params.get("name")+str(datetime.now())
    r.hmset(re.sub(' ','',skey),dparams)
    return {'sucess':'true'}
项目:oa_qian    作者:sunqb    | 项目源码 | 文件源码
def redis(app, config, args, kwargs):
        kwargs.update(dict(
            host=config.get('CACHE_REDIS_HOST', 'localhost'),
            port=config.get('CACHE_REDIS_PORT', 6379),
        ))
        password = config.get('CACHE_REDIS_PASSWORD')
        if password:
            kwargs['password'] = password

        key_prefix = config.get('CACHE_KEY_PREFIX')
        if key_prefix:
            kwargs['key_prefix'] = key_prefix

        db_number = config.get('CACHE_REDIS_DB')
        if db_number:
            kwargs['db'] = db_number

        redis_url = config.get('CACHE_REDIS_URL')
        if redis_url:
            kwargs['host'] = redis_from_url(
                                redis_url,
                                db=kwargs.pop('db', None),
                            )

        return RedisCache(*args, **kwargs)
项目:turingtweets    作者:jjfeore    | 项目源码 | 文件源码
def gen_markov():  # pragma: no cover
    """Compile all the tweets and create a Markov chain."""
    host_url = os.environ.get('REDIS_URL')
    access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')}
    engine = get_engine(access_dict)
    SessionFactory = sessionmaker(bind=engine)
    session = SessionFactory()

    tweets = session.query(Tweet).all()

    big_corpus = ''
    for tweet in tweets:
        big_corpus += tweet.tweet + '\n'
    markov_chain = markovify.NewlineText(big_corpus, state_size=3)
    to_redis = pickle.dumps(markov_chain)
    redis.from_url(host_url).set('markov_tweets', to_redis)
项目:turingtweets    作者:jjfeore    | 项目源码 | 文件源码
def gen_markov():
    """Compile all the tweets and create a Markov chain."""
    host_url = os.environ.get('REDIS_URL')
    access_dict = {'sqlalchemy.url': os.environ.get('DATABASE_URL')}
    engine = get_engine(access_dict)
    SessionFactory = sessionmaker(bind=engine)
    session = SessionFactory()

    tweets = session.query(Tweet).all()

    big_corpus = ''
    for tweet in tweets:
        big_corpus += tweet.tweet + '\n'
    markov_chain = markovify.NewlineText(big_corpus, state_size=3)
    to_redis = pickle.dumps(markov_chain)
    redis.from_url(host_url).set('markov_tweets', to_redis)
项目:bqueryd    作者:visualfabriq    | 项目源码 | 文件源码
def __init__(self, data_dir=bqueryd.DEFAULT_DATA_DIR, redis_url='redis://127.0.0.1:6379/0', loglevel=logging.DEBUG):
        if not os.path.exists(data_dir) or not os.path.isdir(data_dir):
            raise Exception("Datadir %s is not a valid directory" % data_dir)
        self.worker_id = binascii.hexlify(os.urandom(8))
        self.node_name = socket.gethostname()
        self.data_dir = data_dir
        self.data_files = set()
        context = zmq.Context()
        self.socket = context.socket(zmq.ROUTER)
        self.socket.setsockopt(zmq.LINGER, 500)
        self.socket.identity = self.worker_id
        self.poller = zmq.Poller()
        self.poller.register(self.socket, zmq.POLLIN | zmq.POLLOUT)
        self.redis_server = redis.from_url(redis_url)
        self.controllers = {}  # Keep a dict of timestamps when you last spoke to controllers
        self.check_controllers()
        self.last_wrm = 0
        self.start_time = time.time()
        self.logger = bqueryd.logger.getChild('worker ' + self.worker_id)
        self.logger.setLevel(loglevel)
        self.msg_count = 0
        signal.signal(signal.SIGTERM, self.term_signal())
项目:bqueryd    作者:visualfabriq    | 项目源码 | 文件源码
def __init__(self, address=None, timeout=120, redis_url='redis://127.0.0.1:6379/0', loglevel=logging.INFO, retries=3):
        self.logger = bqueryd.logger.getChild('rpc')
        self.logger.setLevel(loglevel)
        self.context = zmq.Context()
        self.redis_url = redis_url
        redis_server = redis.from_url(redis_url)
        self.retries = retries
        self.timeout = timeout
        self.identity = binascii.hexlify(os.urandom(8))

        if not address:
            # Bind to a random controller
            controllers = list(redis_server.smembers(bqueryd.REDIS_SET_KEY))
            if len(controllers) < 1:
                raise Exception('No Controllers found in Redis set: ' + bqueryd.REDIS_SET_KEY)
            random.shuffle(controllers)
        else:
            controllers = [address]
        self.controllers = controllers
        self.connect_socket()
项目:crm    作者:Incubaid    | 项目源码 | 文件源码
def _get_cache_client():
    """
    # we use bare python redis client to get list of all keys
    # since this is not supported in the flask cache
    :return: redis connection 
    """

    config = app.cache.config
    cache_type = config['CACHE_TYPE']
    if cache_type != 'redis':
        print('NOT SUPPORTED CACHE BACKEND, ONLY SUPPORTED IS (redis)')
        exit(1)
    try:
        return redis_from_url(app.cache.config['CACHE_REDIS_URL'])
    except:
        print('BAD REDIS URL PROVIDED BY (CACHE_BACKEND_URI)')
        exit(1)
项目:FileStoreGAE    作者:liantian-cn    | 项目源码 | 文件源码
def redis(app, config, args, kwargs):
        kwargs.update(dict(
            host=config.get('CACHE_REDIS_HOST', 'localhost'),
            port=config.get('CACHE_REDIS_PORT', 6379),
        ))
        password = config.get('CACHE_REDIS_PASSWORD')
        if password:
            kwargs['password'] = password

        key_prefix = config.get('CACHE_KEY_PREFIX')
        if key_prefix:
            kwargs['key_prefix'] = key_prefix

        db_number = config.get('CACHE_REDIS_DB')
        if db_number:
            kwargs['db'] = db_number

        redis_url = config.get('CACHE_REDIS_URL')
        if redis_url:
            kwargs['host'] = redis_from_url(
                                redis_url,
                                db=kwargs.pop('db', None),
                            )

        return RedisCache(*args, **kwargs)
项目:steppy    作者:ygravrand    | 项目源码 | 文件源码
def __init__(self, config):
        self.redis = None
        self.backend = None
        if config['server'].get('redis_url'):
            self.redis = redis.from_url(config['server']['redis_url'])
            self.redis_chan = config['server']['redis_chan']
            self.backend = ServerBackend(self.redis, self.redis_chan)
        else:
            print('No redis configured, disabling Websockets and remote web console')

        self.flask_host = config['server']['host']
        self.flask_port = config['server']['port']
        self.flask_app = Flask(__name__)
        self.flask_app.add_url_rule('/', 'index', self._index)
        sockets = Sockets(self.flask_app)
        # sockets.add_url_rule('/submit', 'submit', self._inbox)
        sockets.add_url_rule('/status', 'status', self._status)
        self.console = PushingConsole(self.redis, self.redis_chan, config['server']['terse']) if self.redis else None
项目:mWorkerService    作者:smices    | 项目源码 | 文件源码
def main(msg, config, silent=False):
    """
    Job enqueue
    :param msg:str
    :param config:
    :return:
    """
    queue_dsn = config["queue"]["dsn"]
    redis_conn = redis.from_url(queue_dsn)

    q = Queue('low', connection=redis_conn)

    ret = q.enqueue(push_messenger, msg, result_ttl=60)

    if silent is True:
        return ret
    else:
        print ret
项目:domain-discovery-crawler    作者:TeamHG-Memex    | 项目源码 | 文件源码
def make_crawler(spider_cls=ATestBaseSpider, **extra_settings):
    # clean up queue before starting spider
    assert spider_cls.name.startswith('test_'), 'pass a special test spider'
    redis_server = redis.from_url('redis://localhost')
    name = spider_cls.name
    redis_server.delete(
        SCHEDULER_DUPEFILTER_KEY % {'spider': name},
        *redis_server.keys(
            SCHEDULER_QUEUE_KEY % {'spider': name} + '*'))

    settings = Settings()
    settings.setmodule(dd_crawler.settings)
    settings['ITEM_PIPELINES']['tests.utils.CollectorPipeline'] = 100
    settings.update(extra_settings)
    runner = CrawlerRunner(settings)
    return runner.create_crawler(spider_cls)
项目:mes    作者:osess    | 项目源码 | 文件源码
def __init__(self):
        super(RedisBackend, self).__init__()
        self._prefix = settings.REDIS_PREFIX
        connection_cls = settings.CONNECTION_CLASS
        if connection_cls is not None:
            self._rd = utils.import_module_attr(connection_cls)()
        else:
            try:
                import redis
            except ImportError:
                raise ImproperlyConfigured(
                    "The Redis backend requires redis-py to be installed.")
            if isinstance(settings.REDIS_CONNECTION, basestring):
                self._rd = redis.from_url(settings.REDIS_CONNECTION)
            else:
                self._rd = redis.Redis(**settings.REDIS_CONNECTION)
项目:AlexaBot    作者:jacobajit    | 项目源码 | 文件源码
def gettoken(uid):
    red = redis.from_url(redis_url)
    token = red.get(uid+"-access_token")
    refresh = red.get(uid+"-refresh_token")
    if token:
        return token
    elif refresh:
        #good refresh token
        try:
            payload = {"client_id" : Client_ID, "client_secret" : Client_Secret, "refresh_token" : refresh, "grant_type" : "refresh_token", }
            url = "https://api.amazon.com/auth/o2/token"
            r = requests.post(url, data = payload)
            resp = json.loads(r.text)
            red.set(uid+"-access_token", resp['access_token'])
            red.expire(uid+"-access_token", 3600)
            return resp['access_token']
        #bad refresh token
        except:
            return False
    else:
        return False

#function version of getting Alexa's response in text
项目:AlexaBot    作者:jacobajit    | 项目源码 | 文件源码
def get(self):
        code=self.get_argument("code")
        mid=self.get_cookie("user")
        path = "https" + "://" + self.request.host 
        callback = path+"/code"
        payload = {"client_id" : Client_ID, "client_secret" : Client_Secret, "code" : code, "grant_type" : "authorization_code", "redirect_uri" : callback }
        url = "https://api.amazon.com/auth/o2/token"
        r = requests.post(url, data = payload)
        red = redis.from_url(redis_url)
        resp = json.loads(r.text)
        if mid != None:
            print("fetched MID: ",mid)
            red.set(mid+"-access_token", resp['access_token'])
            red.expire(mid+"-access_token", 3600)
            red.set(mid+"-refresh_token", resp['refresh_token'])
            self.render("static/return.html")
            bot.send_text_message(mid, "Great, you're logged in. Start talking to Alexa!")
        else:
            self.redirect("/?refreshtoken="+resp['refresh_token'])
项目:sbdspider    作者:onecer    | 项目源码 | 文件源码
def from_settings(settings):
    url = settings.get('REDIS_URL', REDIS_URL)
    host = settings.get('REDIS_HOST', REDIS_HOST)
    port = settings.get('REDIS_PORT', REDIS_PORT)

    # REDIS_URL takes precedence over host/port specification.
    if url:
        return redis.from_url(url)
    else:
        return redis.Redis(host=host, port=port)
项目:sbdspider    作者:onecer    | 项目源码 | 文件源码
def from_settings_filter(settings):
    url = settings.get('FILTER_URL', FILTER_URL)
    host = settings.get('FILTER_HOST', FILTER_HOST)
    port = settings.get('FILTER_PORT', FILTER_PORT)
    db = settings.get('FILTER_DB', FILTER_DB)

    if url:
        return redis.from_url(url)
    else:
        return redis.Redis(host=host, port=port, db=db)
项目:scrapy-mq-redis    作者:rdcprojects    | 项目源码 | 文件源码
def from_settings(settings):
    """
    :param: settings object
    :return: Channel object
    """

    connection_type = settings.get('RABBITMQ_CONNECTION_TYPE', RABBITMQ_CONNECTION_TYPE)
    connection_parameters = settings.get('RABBITMQ_CONNECTION_PARAMETERS', RABBITMQ_CONNECTION_PARAMETERS)

    connection = {
        'blocking': pika.BlockingConnection,
        'libev': pika.LibevConnection,
        'select': pika.SelectConnection,
        'tornado': pika.TornadoConnection,
        'twisted': pika.TwistedConnection
    }[connection_type](pika.ConnectionParameters(**connection_parameters))

    channel = connection.channel()
    channel.basic_qos(prefetch_count=1)

    url = settings.get('REDIS_URL', REDIS_URL)
    host = settings.get('REDIS_HOST', REDIS_HOST)
    port = settings.get('REDIS_PORT', REDIS_PORT)

    # REDIS_URL takes precedence over host/port specification.
    if url:
        redis_server = redis.from_url(url)
    else:
        redis_server = redis.Redis(host=host, port=port)

    return channel, redis_server
项目:posm-imagery-api    作者:mojodna    | 项目源码 | 文件源码
def redis_connect(self):
        return redis.from_url(os.environ.get('REDIS_URL', 'redis://'))
项目:rq-scheduler-dashboard    作者:lamflam    | 项目源码 | 文件源码
def setup_rq_connection():
    redis_url = current_app.config.get('REDIS_URL')
    if redis_url:
        current_app.redis_conn = from_url(redis_url)
    else:
        current_app.redis_conn = Redis(
            host=current_app.config.get('REDIS_HOST'),
            port=current_app.config.get('REDIS_PORT'),
            password=current_app.config.get('REDIS_PASSWORD'),
            db=current_app.config.get('REDIS_DB')
        )
项目:salesforce-stripe    作者:texastribune    | 项目源码 | 文件源码
def __init__(self, key):
        self.key = key
        self.connection = redis.from_url(REDIS_URL)
项目:toshi-reputation-service    作者:toshiapp    | 项目源码 | 文件源码
def main():
    app = Application(urls)
    conn = redis.from_url(app.config['redis']['url'])
    app.q = Queue(connection=conn)
    app.start()
项目:coscup-line-bot    作者:ncuoolab    | 项目源码 | 文件源码
def teardown_method(self, test_method):
        r = redis.from_url(REDIS_URL)
        r.flushall()
项目:coscup-line-bot    作者:ncuoolab    | 项目源码 | 文件源码
def test_add_command_one_lang(self):
        get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
        r = redis.from_url(REDIS_URL)
        assert 10 == len(r.keys('COMMAND::zh_TW::*'))
项目:coscup-line-bot    作者:ncuoolab    | 项目源码 | 文件源码
def test_add_command_two_lang(self):
        get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
        get_dao().add_commands(gen_test_commands(20, 'en'))
        r = redis.from_url(REDIS_URL)
        assert 10 == len(r.keys('COMMAND::zh_TW::*'))
        assert 20 == len(r.keys('COMMAND::en::*'))
        assert 30 == len(r.keys('COMMAND::*'))
项目:coscup-line-bot    作者:ncuoolab    | 项目源码 | 文件源码
def test_delete_command(self):
        self.test_add_command_two_lang()
        get_dao().clear_all_command()
        r = redis.from_url(REDIS_URL)
        assert 0 == len(r.keys('COMMAND::*'))
项目:coscup-line-bot    作者:ncuoolab    | 项目源码 | 文件源码
def test_update_command(self):
        r = redis.from_url(REDIS_URL)
        get_dao().add_commands(gen_test_commands(10, 'zh_TW'))
        assert 10 == len(r.keys('COMMAND::zh_TW::*'))
        get_dao().update_commands(gen_test_commands(20, 'en'))
        assert 0 == len(r.keys('COMMAND::zh_TW::*'))
        assert 20 == len(r.keys('COMMAND::en::*'))
        assert 20 == len(r.keys('COMMAND::*'))
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def index(self):
        # Return a rendered front page  template
        markers = []
        indices = {
          "name": "name",
          "city": "city",
          "country": "country",
          "website": "primary_website",
          "primarytype": "primary_type",
          "multitypes": "types_multiple",
          "description": "description",
          "latitude": "latitude",
          "longitude":"longitude",
          "services": "services"
        }
        if os.environ.get("REDIS_URL") :
          redis_url = os.environ.get("REDIS_URL")
        else:
          redis_url = "localhost"
        r = redis.from_url(redis_url)
        i = 0 
        for key in r.scan_iter():
          marker = {}
          row = r.hgetall(key)
          for header in indices.keys():
            marker[header] = unicode(row[str(indices[header])], errors='replace')
          markers.append(marker)
        c.markers = json.dumps(markers)     
        return render('/makermap.html')
项目:map-of-innovation    作者:AnanseGroup    | 项目源码 | 文件源码
def archiveSpace(self):
    #archive a space
    skey = request.params.get("id")
    if os.environ.get("REDIS_URL") :
        redis_url = os.environ.get("REDIS_URL")
    else:
        redis_url = "localhost"
    r = redis.from_url(redis_url)
    r.hset(skey,'archived',True) 
    return {'sucess':'true'}
项目:turingtweets    作者:jjfeore    | 项目源码 | 文件源码
def get_markov():
    """Return a redis connection."""
    from turingtweets.scripts.builddict import gen_markov
    gen_markov()
    host_url = os.environ.get('REDIS_URL')
    chains = redis.from_url(host_url)
    return chains
项目:turingtweets    作者:jjfeore    | 项目源码 | 文件源码
def gen_tweet():
    """Read the redis, and build a fake tweet from that."""
    host_url = os.environ.get('REDIS_URL')
    chains = redis.from_url(host_url)
    markov_chains = chains.get('markov_tweets')
    markov_chains = pickle.loads(markov_chains)
    the_tweet = None

    while not the_tweet:
        the_tweet = markov_chains.make_short_sentence(140, 70)
    return the_tweet
项目:bqueryd    作者:visualfabriq    | 项目源码 | 文件源码
def __init__(self, redis_url='redis://127.0.0.1:6379/0', loglevel=logging.INFO):

        self.redis_url = redis_url
        self.redis_server = redis.from_url(redis_url)
        self.context = zmq.Context()
        self.socket = self.context.socket(zmq.ROUTER)
        self.socket.setsockopt(zmq.LINGER, 500)
        self.socket.setsockopt(zmq.ROUTER_MANDATORY, 1)  # Paranoid for debugging purposes
        self.socket.setsockopt(zmq.SNDTIMEO, 1000)  # Short timeout
        self.poller = zmq.Poller()
        self.poller.register(self.socket, zmq.POLLIN | zmq.POLLOUT)

        self.node_name = socket.gethostname()
        self.address = bind_to_random_port(self.socket, 'tcp://' + get_my_ip(), min_port=14300, max_port=14399,
                                           max_tries=100)
        with open(os.path.join(RUNFILES_LOCATION, 'bqueryd_controller.address'), 'w') as F:
            F.write(self.address)
        with open(os.path.join(RUNFILES_LOCATION, 'bqueryd_controller.pid'), 'w') as F:
            F.write(str(os.getpid()))

        self.logger = bqueryd.logger.getChild('controller').getChild(self.address)
        self.logger.setLevel(loglevel)

        self.msg_count_in = 0
        self.rpc_results = []  # buffer of results that are ready to be returned to callers
        self.rpc_segments = {}  # Certain RPC calls get split and divided over workers, this dict tracks the original RPCs
        self.worker_map = {}  # maintain a list of connected workers TODO get rid of unresponsive ones...
        self.files_map = {}  # shows on which workers a file is available on
        self.worker_out_messages = {None: []}  # A dict of buffers, used to round-robin based on message affinity
        self.worker_out_messages_sequence = [None]  # used to round-robin the outgoing messages
        self.is_running = True
        self.last_heartbeat = 0
        self.others = {}  # A dict of other Controllers running on other DQE nodes
        self.start_time = time.time()
项目:bqueryd    作者:visualfabriq    | 项目源码 | 文件源码
def get_download_data(self):
        redis_server = redis.from_url(self.redis_url)
        tickets = set(redis_server.keys(bqueryd.REDIS_TICKET_KEY_PREFIX+'*'))
        data = {}
        for ticket in tickets:
            tmp = redis_server.hgetall(ticket)
            data[ticket] = tmp
        return data
项目:bqueryd    作者:visualfabriq    | 项目源码 | 文件源码
def delete_download(self, ticket):
        redis_server = redis.from_url(self.redis_url)
        tmp = redis_server.hgetall(ticket)
        count = 0
        for k, v in tmp.items():
            count += redis_server.hdel(ticket, k)
        return count
项目:scrummasterbot    作者:ariyorinoari    | 项目源码 | 文件源码
def setUp(self):
        self.redis = redis.from_url(TestLock.REDIS_URL)
        self.mutex= Mutex(self.redis, TestLock.KEY)
项目:scrummasterbot    作者:ariyorinoari    | 项目源码 | 文件源码
def test_multi_process(self):
        mutex = self.mutex

        def lock_success(self):
            r = redis.from_url(TestLock._REDIS_URL)
            m = Mutex(self.redis, TestLock._KEY)
            m.lock()
            self.assertTrue(m.is_lock())
            time.sleep(10)
            m.unlock()

        def lock_error(self):
            r = redis.from_url(TestLock._REDIS_URL)
            m = Mutex(self.redis, TestLock._KEY)
            m.lock()
            self.assertFalse(m.is_lock())

        jobs = [
            Process(target=lock_success, args=(self, )),
            Process(target=lock_error, args=(self, ))
        ]

        for i in jobs:
            i.start()

        for i in jobs:
            i.join()
项目:scrappy    作者:DormyMo    | 项目源码 | 文件源码
def __init__(self, crawler):
        self._dump = crawler.settings.getbool('STATS_DUMP')#default: STATS_DUMP = True
        redis_url = crawler.settings.get('REDIS_URL', REDIS_URL)
        self.stats_key = crawler.settings.get('STATS_KEY', 'scrappy:stats')
        self.server = redis.from_url(redis_url)
项目:scrappy    作者:DormyMo    | 项目源码 | 文件源码
def from_settings(settings):
    url = settings.get('REDIS_URL',  REDIS_URL)
    host = settings.get('REDIS_HOST', REDIS_HOST)
    port = settings.get('REDIS_PORT', REDIS_PORT)

    # REDIS_URL takes precedence over host/port specification.
    if url:
        return redis.from_url(url)
    else:
        return redis.Redis(host=host, port=port)
项目:mWorkerService    作者:smices    | 项目源码 | 文件源码
def main(msg, config=None, silent=False):
    """
    Job enqueue
    :param msg:str
    :param config:object
    :return:
    """
    queue_dsn = config["queue"]["dsn"]
    redis_conn = redis.from_url(queue_dsn)

    q = Queue('high', connection=redis_conn)
    ret = q.enqueue(push_messenger, msg, result_ttl=60)
    print ret
    return ret
项目:mWorkerService    作者:smices    | 项目源码 | 文件源码
def main(config):
    global worker_config
    worker_config = config

    listen = config["listen"].values()

    queue_dsn = config["queue"]["dsn"]

    conn = redis.from_url(queue_dsn)

    with Connection(conn):
        worker = Worker(map(Queue, listen))
        worker.work()
项目:mWorkerService    作者:smices    | 项目源码 | 文件源码
def main(config=None):

    listen = config["listen"].values()

    queue_dsn = config["queue"]["dsn"]

    conn = redis.from_url(queue_dsn)

    with Connection(conn):
        worker = Worker(map(Queue, listen))
        worker.work()
项目:zeus    作者:getsentry    | 项目源码 | 文件源码
def init_app(self, app):
        self.redis = redis.from_url(app.config['REDIS_URL'])
        self.logger = app.logger
项目:posm-opendronemap-api    作者:posm    | 项目源码 | 文件源码
def redis_connect(self):
        return redis.from_url(os.environ.get('REDIS_URL', 'redis://'))
项目:rqalpha-mod-fxdayu-source    作者:xingetouzi    | 项目源码 | 文件源码
def __init__(self, redis_url):
        import redis
        self._client = redis.from_url(redis_url)
项目:rqalpha    作者:ricequant    | 项目源码 | 文件源码
def __init__(self, path, redis_uri):
        super(RedisDataSource, self).__init__(path)
        self._env = Environment.get_instance()
        import redis
        self._redis_client = redis.from_url(redis_uri)
项目:flask-caching    作者:sh4nks    | 项目源码 | 文件源码
def redis(app, config, args, kwargs):
    try:
        from redis import from_url as redis_from_url
    except ImportError:
        raise RuntimeError('no redis module found')

    kwargs.update(dict(
        host=config.get('CACHE_REDIS_HOST', 'localhost'),
        port=config.get('CACHE_REDIS_PORT', 6379),
    ))
    password = config.get('CACHE_REDIS_PASSWORD')
    if password:
        kwargs['password'] = password

    key_prefix = config.get('CACHE_KEY_PREFIX')
    if key_prefix:
        kwargs['key_prefix'] = key_prefix

    db_number = config.get('CACHE_REDIS_DB')
    if db_number:
        kwargs['db'] = db_number

    redis_url = config.get('CACHE_REDIS_URL')
    if redis_url:
        kwargs['host'] = redis_from_url(
            redis_url,
            db=kwargs.pop('db', None),
        )

    return RedisCache(*args, **kwargs)