Python asyncio 模块,StreamWriter() 实例源码

我们从Python开源项目中,提取了以下31个代码示例,用于说明如何使用asyncio.StreamWriter()

项目:azmq    作者:ereOn    | 项目源码 | 文件源码
def open_pipe_connection(
    path=None,
    *,
    loop=None,
    limit=DEFAULT_LIMIT,
    **kwargs
):
    """
    Connect to a server using a Windows named pipe.
    """
    path = path.replace('/', '\\')
    loop = loop or asyncio.get_event_loop()

    reader = asyncio.StreamReader(limit=limit, loop=loop)
    protocol = asyncio.StreamReaderProtocol(reader, loop=loop)
    transport, _ = await loop.create_pipe_connection(
        lambda: protocol,
        path,
        **kwargs
    )
    writer = asyncio.StreamWriter(transport, protocol, reader, loop)

    return reader, writer
项目:halite-starter-python3-alt    作者:dvdotsenko    | 项目源码 | 文件源码
def get_stdout(loop, exit_callbacks):
    """

    :param loop:
    :param exit_callbacks: A list of async callables to run before loop.close()
    :return:
    :rtype: asyncio.StreamWriter
    """

    stdout_fio = os.fdopen(os.dup(sys.stdout.fileno()), 'wb')
    writer_transport, writer_protocol = await loop.connect_write_pipe(
        asyncio.streams.FlowControlMixin,
        stdout_fio
    )

    # async def _close_fio():
    #     stdout_fio.close()
    #
    # exit_callbacks.append(
    #     _close_fio
    # )

    # async def _close_transport():
    #     writer_transport.close()
    #
    # exit_callbacks.append(
    #     _close_transport
    # )

    stdout = asyncio.StreamWriter(writer_transport, writer_protocol, None, loop)
    return stdout
项目:py-evm    作者:ethereum    | 项目源码 | 文件源码
def connect(self) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter]:
        return await asyncio.open_connection(
            host=self.remote.address.ip, port=self.remote.address.tcp_port)
项目:big-buck-asyncio    作者:qntln    | 项目源码 | 文件源码
def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None:
        '''
        Handle new client and create session for it
        '''
        ip_address = writer.get_extra_info('peername')
        session_id = uuid.uuid1().hex
        session = player.session.Session(session_id, reader, writer, self.filename)

        self._logger.info('New session = %s is created, from = %s', session_id, ip_address)
        self._sessions[session_id] = session

        try:
            await session.run()
        except (ConnectionResetError, BrokenPipeError,):
            pass
        finally:
            await self._killSession(session_id)
            self._logger.info('Session = %s was closed from = %s', session_id, ip_address)
项目:big-buck-asyncio    作者:qntln    | 项目源码 | 文件源码
def __init__(self, session_id: str, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, filename: str):
        self._logger = logging.getLogger('{}[{}]'.format(self.__class__.__name__, session_id))
        self.session_id = session_id
        self.filename = filename
        self._writer = writer
        self._reader = reader

        self._run_future = None # type: asyncio.Future
项目:simple_ws    作者:WSnettverksprog    | 项目源码 | 文件源码
def __init__(self, server: WebSocket, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, buffer_size: int):
        self.server = server
        self.reader = reader
        self.writer = writer
        self.buffer_size = buffer_size
        self.status = Client.CONNECTING
        self.sending_continuous = False
        self._close_sent = False
        self.__close_received = False
        self.__frame_reader = FrameReader()
        self.__pong_received = False
        self.__last_frame_received = time.time()
        self.rec = 0

        # Create async task to handle client data
        loop.create_task(self.__wait_for_data())
        # Create async task to send pings
        if self.server.ping:
            loop.create_task(self.__send_ping())
项目:aiotk    作者:AndreLouisCaron    | 项目源码 | 文件源码
def mempipe(loop=None, limit=None):
    """In-memory pipe, returns a ``(reader, writer)`` pair.

    .. versionadded:: 0.1
    """

    loop = loop or asyncio.get_event_loop()
    limit = limit or _DEFAULT_LIMIT

    reader = asyncio.StreamReader(loop=loop, limit=limit)
    writer = asyncio.StreamWriter(
        transport=_MemoryTransport(reader),
        protocol=asyncio.StreamReaderProtocol(reader, loop=loop),
        reader=reader,
        loop=loop,
    )
    return reader, writer
项目:halite-starter-python3-alt    作者:dvdotsenko    | 项目源码 | 文件源码
def get_stdout(self):
        """
        Call this **before** `get_stdin`
        :return:
        :rtype: asyncio.StreamWriter
        """

        if not self._stdout:
            self._stdout = await get_stdout(self.loop, self.exit_callbacks)

        return self._stdout
项目:photon-pump    作者:madedotcom    | 项目源码 | 文件源码
def __init__(
            self,
            writer: asyncio.StreamWriter,
            pending: Dict[UUID, Operation], loop):
        self.queue = asyncio.Queue(maxsize=100, loop=loop)
        self.pending_operations = pending
        self.writer = writer
        self.running = True
        self.write_loop = asyncio.ensure_future(self._process())
项目:photon-pump    作者:madedotcom    | 项目源码 | 文件源码
def close(self):
        """Close the underlying StreamWriter and cancel pending Operations."""
        self.writer.close()
        self.running = False
        self.write_loop.cancel()
项目:MoMMI    作者:PJB3005    | 项目源码 | 文件源码
def __init__(self):
        self.server: asyncio.Server = None
        self.clients: Dict[asyncio.Task, Tuple[asyncio.StreamReader, asyncio.StreamWriter]] = {}
项目:MoMMI    作者:PJB3005    | 项目源码 | 文件源码
def accept_client(self, client_reader: asyncio.StreamReader, client_writer: asyncio.StreamWriter):
        logger.info("Accepting new client!")
        task: asyncio.Task = asyncio.Task(self.handle_client(client_reader, client_writer))
        self.clients[task] = (client_reader, client_writer)

        def client_done(task):
            logger.info("Dropping client connection.")
            del self.clients[task]

        task.add_done_callback(client_done)
项目:mt7687-serial-uploader    作者:will127534    | 项目源码 | 文件源码
def open_serial_connection(**kwargs):
    """A wrapper for create_serial_connection() returning a (reader,
    writer) pair.

    The reader returned is a StreamReader instance; the writer is a
    StreamWriter instance.

    The arguments are all the usual arguments to Serial(). Additional
    optional keyword arguments are loop (to set the event loop instance
    to use) and limit (to set the buffer limit passed to the
    StreamReader.

    This function is a coroutine.
    """
    # in order to avoid errors when pySerial is installed under Python 2,
    # avoid Pyhthon 3 syntax here. So do not use this function as a good
    # example!
    loop = kwargs.get('loop', asyncio.get_event_loop())
    limit = kwargs.get('limit', asyncio.streams._DEFAULT_LIMIT)
    reader = asyncio.StreamReader(limit=limit, loop=loop)
    protocol = asyncio.StreamReaderProtocol(reader, loop=loop)
    # in Python 3 we would write "yield transport, _ from c()"
    for transport, _ in create_serial_connection(
            loop=loop,
            protocol_factory=lambda: protocol,
            **kwargs):
        yield transport, _
    writer = asyncio.StreamWriter(transport, protocol, reader, loop)
    # in Python 3 we would write "return reader, writer"
    raise StopIteration(reader, writer)


# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# test
项目:ammoo    作者:mwfrojdman    | 项目源码 | 文件源码
def _drain_writer(self) -> None:
        # need to wrap StreamWriter.drain in a lock to avoid AssertionError in FlowControlMixin._drain_helper
        # https://groups.google.com/forum/#!topic/python-tulip/JA0-FC_pliA
        async with self._drain_lock:
            await self._writer.drain()
        return
项目:ammoo    作者:mwfrojdman    | 项目源码 | 文件源码
def build_frame(writer: StreamWriter, frame_type: int, channel_id: int, payload):
    payload_size = len(payload)
    writer.write(pack('!BHI', frame_type, channel_id, payload_size))
    writer.write(payload)
    writer.write(FRAME_END)
项目:bit-torrent    作者:borzunov    | 项目源码 | 文件源码
def __init__(self, our_peer_id: bytes, peer: Peer):
        self._our_peer_id = our_peer_id
        self._peer = peer

        self._logger = logging.getLogger('[{}]'.format(peer))
        self._logger.setLevel(PeerTCPClient.LOGGER_LEVEL)

        self._download_info = None   # type: DownloadInfo
        self._file_structure = None  # type: FileStructure
        self._piece_owned = None     # type: bitarray

        self._am_choking = True
        self._am_interested = False
        self._peer_choking = True
        self._peer_interested = False

        self._downloaded = 0
        self._uploaded = 0

        self._reader = None               # type: asyncio.StreamReader
        self._writer = None               # type: asyncio.StreamWriter
        self._connected = False
项目:bit-torrent    作者:borzunov    | 项目源码 | 文件源码
def accept(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> bytes:
        self._reader = reader
        self._writer = writer

        self._send_protocol_data()

        await self._receive_protocol_data()
        return await self._receive_info()
项目:bit-torrent    作者:borzunov    | 项目源码 | 文件源码
def _accept(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
        addr = writer.get_extra_info('peername')
        peer = Peer(addr[0], addr[1])

        client = PeerTCPClient(self._our_peer_id, peer)

        try:
            info_hash = await client.accept(reader, writer)
            if info_hash not in self._torrent_managers:
                raise ValueError('Unknown info_hash')
        except Exception as e:
            client.close()

            if isinstance(e, asyncio.CancelledError):
                raise
            else:
                logger.debug("%s wasn't accepted because of %r", peer, e)
        else:
            self._torrent_managers[info_hash].accept_client(peer, client)
项目:bit-torrent    作者:borzunov    | 项目源码 | 文件源码
def send_object(obj: Any, writer: asyncio.StreamWriter):
        data = pickle.dumps(obj)
        length_data = struct.pack(ControlServer.LENGTH_FMT, len(data))
        writer.write(length_data)
        writer.write(data)
项目:bit-torrent    作者:borzunov    | 项目源码 | 文件源码
def _accept(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
        addr_repr = ':'.join(map(str, writer.get_extra_info('peername')))
        logger.info('accepted connection from %s', addr_repr)

        try:
            writer.write(ControlServer.HANDSHAKE_MESSAGE)

            while True:
                # FIXME: maybe do not allow to execute arbitrary object
                action = cast(Callable[[ControlManager], Any], await ControlServer.receive_object(reader))

                try:
                    result = action(self._control)
                    if asyncio.iscoroutine(result):
                        result = await result
                except asyncio.CancelledError:
                    raise
                except Exception as e:
                    result = e

                ControlServer.send_object(result, writer)

                if isinstance(result, DaemonExit):
                    logger.info('stop command received')
                    if self._daemon_stop_handler is not None:
                        self._daemon_stop_handler(self)
                    return
        except asyncio.IncompleteReadError:
            pass
        except asyncio.CancelledError:
            raise
        except Exception as e:
            logger.warning('%s disconnected because of %r', addr_repr, e)
        finally:
            writer.close()
项目:rbd2qcow2    作者:socketpair    | 项目源码 | 文件源码
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, flags: int,
                 process: Process):
        self._writer = writer
        self._handle = 0
        self._flags = flags
        self._tasks = dict()  # type: Dict[int, Tuple(asyncio.Future, int)]
        self._disconnect_was_sent = False
        self._process = process  # steal reference counter

        if not (self._flags & NBD_FLAG_SEND_WRITE_ZEROES):
            log.warning('Writing of zeroes is not supported by NBD server. Simulating using generic write.')

        self._reader_task = asyncio.ensure_future(self._response_reader(reader))
项目:imouto    作者:Hanaasagi    | 项目源码 | 文件源码
def _parse_request(self, request_reader: asyncio.StreamReader,
                             response_writer: asyncio.StreamWriter) -> Request:
        """parse data from StreamReader and build the request object
        """
        limit = 2 ** 16
        req = Request()
        parser = HttpRequestParser(req)

        while True:
            data = await request_reader.read(limit)
            parser.feed_data(data)
            if req.finished or not data:
                break
            elif req.needs_write_continue:
                response_writer.write(b'HTTP/1.1 100 (Continue)\r\n\r\n')
                req.reset_state()

        req.method = touni(parser.get_method()).upper()
        return req
项目:imouto    作者:Hanaasagi    | 项目源码 | 文件源码
def __call__(self, request_reader: asyncio.StreamReader,
                       response_writer: asyncio.StreamWriter):
        try:
            req = await self._parse_request(request_reader, response_writer)
            handler_class, args, kwargs = self._find_handler(req.path)
            try:
                res = await self._execute(handler_class, req, args, kwargs)
            except HTTPError as e:
                res = self._handle_error(e)
        except Exception as e:
            res = self._handle_error(e)

        # output the access log)
        log(status_code=res.status_code, method=req.method,
            path=req.path, query_string=req.query_string)
        self._write_response(res, response_writer)
        await response_writer.drain()
        response_writer.close()
项目:imouto    作者:Hanaasagi    | 项目源码 | 文件源码
def _write_response(self, res, writer: asyncio.StreamWriter):
        """get chunk from Response object and build http resposne"""
        writer.write(res.output())
        writer.write_eof()
项目:urban-journey    作者:urbanjourney    | 项目源码 | 文件源码
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, loop=None):
        self.loop = loop or event_loop.get()  #: Event loop onto which the host is running.
        self.close_event
        self.reader = reader  #: :class:`asyncio.StreamReader` object of the connection.
        self.writer = writer  #: :class:`asyncio.StreamReader` object of the connection.
        self.decoder = Decoder(self.close)  #: Decoder object.

        self.running = True  #: True if the connection is listening for packages.

        self.closing_semaphore = asyncio.Semaphore(0)
        self.ping_semaphore = asyncio.Semaphore(0)

        # Create and load in commands dictionary.
        self.__command_dictionary = {}
        for member_name in dir(self):
            member = inspect.getattr_static(self, member_name)
            if isinstance(member, NetworkCommandBase):
                if member.command_id in self.__command_dictionary:
                    raise ValueError("Network command ids must be unique. '{}' and '{}' have the same id '{}'.".format(
                        member.name,
                        self.__command_dictionary[member.command_id].func.__name__,
                        member.command_id
                    ))
                else:
                    self.__command_dictionary[member.command_id] = functools.partial(member.handler_func, self)
                    self.__dict__["transmit_" + member.name] = functools.partial(self.transmit, member.command_id)

        self.handshake_data = None

        self.__ready_condition = asyncio.Condition()
        self.ready = False

        self.__closed_condition = asyncio.Condition()

        self.__identify_semaphore = asyncio.Semaphore(0)

        self.hostname = socket.gethostname()
        self.remote_hostname = None

        # Start co-routines
        asyncio.run_coroutine_threadsafe(self.package_handler(), self.loop)
        asyncio.run_coroutine_threadsafe(self.data_reader(), self.loop)
        asyncio.run_coroutine_threadsafe(self.handshake(), self.loop)
项目:urban-journey    作者:urbanjourney    | 项目源码 | 文件源码
def create_connection(self, reader, writer):
        """
        Create a new connection.

        :param asyncio.StreamReader reader: Reader object
        :param asyncio.StreamWriter writer: Writer object
        :return: Connection object.
        """
        connection = Connection(reader, writer)
        # nlog.debug(self.log_prefix + "New connection '{}'".format(connection.name))

        self.connections(connection)
项目:py-evm    作者:ethereum    | 项目源码 | 文件源码
def __init__(self,
                 remote: Node,
                 privkey: datatypes.PrivateKey,
                 reader: asyncio.StreamReader,
                 writer: asyncio.StreamWriter,
                 aes_secret: bytes,
                 mac_secret: bytes,
                 egress_mac: sha3.keccak_256,
                 ingress_mac: sha3.keccak_256,
                 chaindb: BaseChainDB,
                 network_id: int,
                 received_msg_callback: Optional[_ReceivedMsgCallbackType] = None
                 ) -> None:
        self._finished = asyncio.Event()
        self._pending_replies = {}  # type: Dict[int, Callable[[protocol._DecodedMsgType], None]]
        self.remote = remote
        self.privkey = privkey
        self.reader = reader
        self.writer = writer
        self.base_protocol = P2PProtocol(self)
        self.chaindb = chaindb
        self.network_id = network_id
        self.received_msg_callback = received_msg_callback
        # The sub protocols that have been enabled for this peer; will be populated when
        # we receive the initial hello msg.
        self.enabled_sub_protocols = []  # type: List[protocol.Protocol]

        self.egress_mac = egress_mac
        self.ingress_mac = ingress_mac
        # FIXME: Yes, the encryption is insecure, see: https://github.com/ethereum/devp2p/issues/32
        iv = b"\x00" * 16
        aes_cipher = Cipher(algorithms.AES(aes_secret), modes.CTR(iv), default_backend())
        self.aes_enc = aes_cipher.encryptor()
        self.aes_dec = aes_cipher.decryptor()
        mac_cipher = Cipher(algorithms.AES(mac_secret), modes.ECB(), default_backend())
        self.mac_enc = mac_cipher.encryptor().update
项目:py-evm    作者:ethereum    | 项目源码 | 文件源码
def handshake(remote: kademlia.Node, privkey: datatypes.PrivateKey) -> Tuple[
        bytes, bytes, sha3.keccak_256, sha3.keccak_256, asyncio.StreamReader, asyncio.StreamWriter]:
    """
    Perform the auth handshake with given remote.

    Returns the established secrets and the StreamReader/StreamWriter pair already connected to
    the remote.
    """
    initiator = HandshakeInitiator(remote, privkey)
    reader, writer = await initiator.connect()
    aes_secret, mac_secret, egress_mac, ingress_mac = await _handshake(
        initiator, reader, writer)
    return aes_secret, mac_secret, egress_mac, ingress_mac, reader, writer
项目:py-evm    作者:ethereum    | 项目源码 | 文件源码
def _handshake(initiator: 'HandshakeInitiator', reader: asyncio.StreamReader,
                     writer: asyncio.StreamWriter
                     ) -> Tuple[bytes, bytes, sha3.keccak_256, sha3.keccak_256]:
    """See the handshake() function above.

    This code was factored out into this helper so that we can create Peers with directly
    connected readers/writers for our tests.
    """
    initiator_nonce = keccak(os.urandom(HASH_LEN))
    auth_msg = initiator.create_auth_message(initiator_nonce)
    auth_init = initiator.encrypt_auth_message(auth_msg)
    writer.write(auth_init)

    auth_ack = await reader.read(ENCRYPTED_AUTH_ACK_LEN)

    ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message(auth_ack)
    aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets(
        initiator_nonce,
        responder_nonce,
        ephemeral_pubkey,
        auth_init,
        auth_ack
    )

    return aes_secret, mac_secret, egress_mac, ingress_mac
项目:aiotk    作者:AndreLouisCaron    | 项目源码 | 文件源码
def mock_subprocess(run, loop=None):
    """Calls ``run()`` instead of spawning a sub-process.

    :param run: A coroutine function that simulates the sub-process.  Can
     return ``None`` or ``0`` to simulate successful process execution or a
     non-zero error code to simulate sub-process terminate with a non-zero exit
     code.  If an exception is raised, the result is 1 (non-zero).  This
     function can accept a variable number of arguments, see below.

    Dependency injection is used with the ``run()`` coroutine function to pass
    only arguments that are declared in the function's signature.  Omit all but
    the arguments you intend to use.  Here are all the available arguments:

    - ``argv``: a list of strings passed as positional arguments to
      ``asyncio.create_subprocess_exec()``.
    - ``stdin``: an ``asyncio.StreamReader`` instance.  When output is not
      redirected, this reads from the "real" ``sys.stdin``.
    - ``stdout``: an ``asyncio.StreamWriter`` instance.  When output is not
      redirected, this writes to the "real" ``sys.stdout``.
    - ``stderr``: an ``asyncio.StreamWriter`` instance.  When output is not
      redirected, this writes to the "real" ``sys.stderr``.
    - ``env``: a ``dict`` containing environment variables passed to
      ``asyncio.create_subprocess_exec()``.
    - ``signals``: an ``asyncio.Queue`` object that receives integers passed to
      ``asyncio.Process.send_signal()``.
    - ``kwds``: extra keyword arguments passed to
      ``asyncio.create_subprocess_exec()``.

    .. versionadded: 0.1

    """

    loop = loop or asyncio.get_event_loop()

    pid = count(start=1)

    def create_subprocess_exec(*args, stdin=None, stdout=None, env=None,
                               stderr=None, loop=None, limit=None, **kwds):
        """Mock for ``asyncio.create_subprocess_exec()``."""
        loop = loop or asyncio.get_event_loop()
        f = asyncio.Future()
        process = Process(
            pid=next(pid),
            run=run,
            loop=loop,
            argv=list(args),
            stdin=stdin,
            stdout=stdout,
            stderr=stderr,
            env=env,
            limit=limit,
            kwds=kwds,
        )
        loop.call_soon(f.set_result, process)
        return f

    with mock.patch('asyncio.create_subprocess_exec', create_subprocess_exec):
        yield
项目:MoMMI    作者:PJB3005    | 项目源码 | 文件源码
def handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
        data: bytes = await reader.read(2)  # Read ID.
        if data != b"\x30\x05":
            writer.write(ERROR_ID)
            return

        logger.info(f"Got ID packets: {data}.")

        auth: bytes = await reader.read(DIGEST_SIZE)
        logger.info(f"Got digest: {auth}.")

        length: int = struct.unpack("!I", await reader.read(4))[0]
        data = b""
        while len(data) < length:
            newdata = await reader.read(length - len(data))
            if len(newdata) == 0:
                break
            data += newdata
        logger.info(f"Got message ength: {length}, data: {data}.")
        try:
            logger.info(f"Decoded: {data.decode('UTF-8')}")
            message: Dict[str, Any] = json.loads(data.decode("UTF-8"))
            logger.info(f"Loaded: {message}")
            # Any of these will throw a KeyError with broken packets.
            message["type"], message["meta"], message["cont"]
        except:
            logger.exception("hrrm")
            writer.write(ERROR_PACK)
            return

        stomach: hmac.HMAC = hmac.new(AUTHKEY, data, sha512)
        if not hmac.compare_digest(stomach.digest(), auth):
            writer.write(ERROR_HMAC)
            return

        logger.info(message)
        writer.write(ERROR_OK)

        for event in events:
            try:
                await event(message)
            except:
                logger.exception("Caught exception inside commloop event handler.")