Python asyncio 模块,PriorityQueue() 实例源码

我们从Python开源项目中,提取了以下18个代码示例,用于说明如何使用asyncio.PriorityQueue()

项目:aioelasticsearch    作者:wikibusiness    | 项目源码 | 文件源码
def __init__(
        self,
        connections,
        dead_timeout=60,
        timeout_cutoff=5,
        selector_class=RoundRobinSelector,
        randomize_hosts=True,
        *,
        loop,
        **kwargs
    ):
        self._dead_timeout = dead_timeout
        self.timeout_cutoff = timeout_cutoff
        self.connection_opts = connections
        self.connections = [c for (c, _) in connections]
        self.orig_connections = set(self.connections)
        self.dead = asyncio.PriorityQueue(len(self.connections), loop=loop)
        self.dead_count = collections.Counter()

        self.loop = loop

        if randomize_hosts:
            random.shuffle(self.connections)

        self.selector = selector_class(dict(connections))
项目:Daniel-Arbuckles-Mastering-Python    作者:PacktPublishing    | 项目源码 | 文件源码
def using_queues():
    q = asyncio.Queue()

    q.put_nowait('Hello')

    await q.get()

    await q.put('world')

    q.get_nowait()


    pq = asyncio.PriorityQueue()

    stack = asyncio.LifoQueue()
项目:annotated-py-asyncio    作者:hhstore    | 项目源码 | 文件源码
def test_order(self):
        q = asyncio.PriorityQueue(loop=self.loop)
        for i in [1, 3, 2]:
            q.put_nowait(i)

        items = [q.get_nowait() for _ in range(3)]
        self.assertEqual([1, 2, 3], items)
项目:asyncio-mongo-reflection    作者:isanich    | 项目源码 | 文件源码
def __init__(self, loop=None, external_cb=None):
        self.loop = loop if loop else asyncio._get_running_loop()
        self.tasks_queue = asyncio.PriorityQueue()
        self.results_queue = asyncio.Queue(maxsize=10)
        self._process_next = asyncio.Event()
        self._dispatcher_task = None
        # Pass cb weakref to prevent gc in some cases and let dispatcher finish all tasks.
        # Cb takes 2 positional arguments: task result and task exception.
        self._external_cb = external_cb if callable(external_cb) else None
项目:plone.server    作者:plone    | 项目源码 | 文件源码
def __init__(self, settings):
        self._queue = asyncio.PriorityQueue()
        self._exceptions = False
        self._total_queued = 0
项目:PSpiderDemos    作者:xianhu    | 项目源码 | 文件源码
def __init__(self, fetcher, parser, saver, url_filter=None, loop=None):
        """
        constructor
        """
        BasePool.__init__(self, fetcher, parser, saver, url_filter=url_filter)

        self._loop = loop or asyncio.get_event_loop()           # event_loop from parameter or call asyncio.get_event_loop()
        self._queue = asyncio.PriorityQueue(loop=self._loop)    # (priority, url, keys, deep, repeat)

        self._start_time = None                                 # start time of this pool
        self._running_tasks = 0                                 # the count of running tasks
        return
项目:Squid-Plugins    作者:tekulvw    | 项目源码 | 文件源码
def __init__(self, bot):
        self.bot = bot
        self.events = fileIO('data/scheduler/events.json', 'load')
        self.queue = asyncio.PriorityQueue(loop=self.bot.loop)
        self.queue_lock = asyncio.Lock()
        self.to_kill = {}
        self._load_events()
项目:ribosome    作者:tek    | 项目源码 | 文件源码
def __init__(self) -> None:
        self._messages: asyncio.PriorityQueue = None
项目:ribosome    作者:tek    | 项目源码 | 文件源码
def _init_asyncio(self):
        self._loop = (
            asyncio.new_event_loop()
            if asyncio._get_running_loop() is None  # type: ignore
            else asyncio.get_event_loop()
        )
        self._messages = asyncio.PriorityQueue(loop=self._loop)
项目:ribosome    作者:tek    | 项目源码 | 文件源码
def __init__(self, sub: List[MachineBase]=List(), parent=None, name=None, debug=False) -> None:
        self.sub = sub
        self.debug = debug
        self.data = None
        self._messages: asyncio.PriorityQueue = None
        self.message_log = List()
        ModularMachine.__init__(self, parent, name=name)
项目:cocrawler    作者:cocrawler    | 项目源码 | 文件源码
def __init__(self, loop):
        self.q = asyncio.PriorityQueue(loop=loop)
        self.ridealong = {}
        self.awaiting_work = 0
        self.maxhostqps = None
        self.delta_t = None
        self.remaining_url_budget = None
        self.next_fetch = cachetools.ttl.TTLCache(10000, 10)  # 10 seconds good enough for QPS=0.1 and up
        self.frozen_until = cachetools.ttl.TTLCache(10000, 10)  # 10 seconds is longer than our typical delay
        self.maxhostqps = float(config.read('Crawl', 'MaxHostQPS'))
        self.delta_t = 1./self.maxhostqps
        self.remaining_url_budget = int(config.read('Crawl', 'MaxCrawledUrls') or 0) or None  # 0 => None
项目:cocrawler    作者:cocrawler    | 项目源码 | 文件源码
def load(self, crawler, f):
        header = pickle.load(f)  # XXX check that this is a good header... log it
        self.ridealong = pickle.load(f)
        crawler._seeds = pickle.load(f)
        self.q = asyncio.PriorityQueue()
        count = pickle.load(f)
        for _ in range(0, count):
            work = pickle.load(f)
            self.q.put_nowait(work)
项目:csss-minion    作者:henrymzhao    | 项目源码 | 文件源码
def __init__(self, bot):
        self.current = None
        self.voice = None
        self.bot = bot
        self.play_next_song = asyncio.Event()
        self.skip_votes = set() # a set of user_ids that voted
        self.audio_player = self.bot.loop.create_task(self.audio_player_task())
        self.playerheat = {} # keep track of how often each user requests. -------------
        self.queue = [] # easily track the songs without messing with threads
        if self.bot.music_priorityqueue:
            self.songs = asyncio.PriorityQueue() # gotta keep priority -----------------
        else:
            self.songs = asyncio.Queue()
        main_loop = asyncio.get_event_loop()
        main_loop.create_task(self.loop_cooldown())
项目:Chiaki-Nanami    作者:Ikusaba-san    | 项目源码 | 文件源码
def __init__(self, **kwargs):
        super().__init(**kwargs)
        self._pending = asyncio.PriorityQueue()

    # We have to override _restart as well because _get removes the entry.
项目:ouroboros    作者:pybee    | 项目源码 | 文件源码
def test_order(self):
        q = asyncio.PriorityQueue(loop=self.loop)
        for i in [1, 3, 2]:
            q.put_nowait(i)

        items = [q.get_nowait() for _ in range(3)]
        self.assertEqual([1, 2, 3], items)
项目:kbe_server    作者:xiaohaoppy    | 项目源码 | 文件源码
def test_order(self):
        q = asyncio.PriorityQueue(loop=self.loop)
        for i in [1, 3, 2]:
            q.put_nowait(i)

        items = [q.get_nowait() for _ in range(3)]
        self.assertEqual([1, 2, 3], items)
项目:asynces    作者:fabregas    | 项目源码 | 文件源码
def __init__(self, connections, *, dead_timeout=60, timeout_cutoff=5,
                 selector_factory=RoundRobinSelector,
                 loop):
        self._dead_timeout = dead_timeout
        self._timeout_cutoff = timeout_cutoff
        self._selector = selector_factory(None)
        self._dead = asyncio.PriorityQueue(len(connections), loop=loop)
        self._dead_count = collections.Counter()
        self._connections = connections
        self._loop = loop
项目:fcompile    作者:azag0    | 项目源码 | 文件源码
def build(tasks: Dict[Source, Task], dry: bool = False, njobs: int = 1) -> None:
    """Build tasks.

    This is the main entry point.
    """
    print('Scanning files...')
    tree = get_tree(tasks)
    try:
        with open(cachefile) as f:
            hashes: Dict[Filename, Hash] = json.load(f)['hashes']
    except (ValueError, FileNotFoundError):
        hashes = {}
    changed_files = [src for src in tasks if tree.hashes[src] != hashes.get(src)]
    print(f'Changed files: {len(changed_files)}/{len(tasks)}.')
    if not changed_files or dry:
        return
    task_queue: TaskQueue = PriorityQueue()
    result_queue: ResultQueue = Queue()
    loop = asyncio.get_event_loop()
    workers = [
        loop.create_task(worker(task_queue, result_queue))
        for _ in range(njobs)
    ]
    try:
        loop.run_until_complete(
            scheduler(tasks, task_queue, result_queue, tree, hashes, changed_files)
        )
    except CompilationError as e:
        print(f'error: Compilation of {e.source} returned {e.retcode}.')
        raise
    except:
        print()
        raise
    else:
        print()
    finally:
        for tsk in workers:
            tsk.cancel()
        with open(cachefile, 'w') as f:
            json.dump({'hashes': hashes}, f)
        if DEBUG:
            print_clocks()