2018-11-12 06:15:55 +08:00
|
|
|
import asyncio
|
2019-08-05 11:22:44 +08:00
|
|
|
from typing import Dict, Tuple
|
2019-08-02 18:28:04 +08:00
|
|
|
|
2019-08-05 10:20:30 +08:00
|
|
|
from multiaddr import Multiaddr
|
2019-08-05 11:22:44 +08:00
|
|
|
|
2019-08-05 10:20:30 +08:00
|
|
|
from libp2p.network.connection.raw_connection_interface import IRawConnection
|
2019-08-05 10:35:56 +08:00
|
|
|
from libp2p.network.typing import GenericProtocolHandlerFn
|
2019-08-05 10:20:30 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-08-05 11:22:44 +08:00
|
|
|
from libp2p.security.secure_conn_interface import ISecureConn
|
2019-08-05 11:17:38 +08:00
|
|
|
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
|
2019-01-10 02:38:56 +08:00
|
|
|
|
2019-08-02 17:14:43 +08:00
|
|
|
from .constants import HeaderTags
|
2019-08-03 13:36:19 +08:00
|
|
|
from .mplex_stream import MplexStream
|
2019-08-05 11:22:44 +08:00
|
|
|
from .utils import decode_uvarint_from_stream, encode_uvarint
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
|
2018-11-21 09:28:41 +08:00
|
|
|
class Mplex(IMuxedConn):
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
|
|
|
|
"""
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-05 10:20:30 +08:00
|
|
|
secured_conn: ISecureConn
|
|
|
|
raw_conn: IRawConnection
|
2019-08-02 17:53:51 +08:00
|
|
|
initiator: bool
|
2019-08-05 10:20:30 +08:00
|
|
|
peer_id: ID
|
2019-08-05 10:35:56 +08:00
|
|
|
buffers: Dict[int, "asyncio.Queue[bytes]"]
|
|
|
|
stream_queue: "asyncio.Queue[int]"
|
2019-08-02 17:53:51 +08:00
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
2019-08-05 10:20:30 +08:00
|
|
|
secured_conn: ISecureConn,
|
|
|
|
generic_protocol_handler: GenericProtocolHandlerFn,
|
|
|
|
peer_id: ID,
|
2019-08-02 17:53:51 +08:00
|
|
|
) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
create a new muxed connection
|
|
|
|
:param conn: an instance of raw connection
|
2019-02-25 09:58:23 +08:00
|
|
|
:param generic_protocol_handler: generic protocol handler
|
|
|
|
for new muxed streams
|
2019-03-24 01:52:02 +08:00
|
|
|
:param peer_id: peer_id of peer the connection is to
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-02 18:03:34 +08:00
|
|
|
super().__init__(secured_conn, generic_protocol_handler, peer_id)
|
2019-02-25 09:58:23 +08:00
|
|
|
|
2019-04-30 15:09:05 +08:00
|
|
|
self.secured_conn = secured_conn
|
|
|
|
self.raw_conn = secured_conn.get_conn()
|
|
|
|
self.initiator = self.raw_conn.initiator
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Store generic protocol handler
|
|
|
|
self.generic_protocol_handler = generic_protocol_handler
|
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Set peer_id
|
|
|
|
self.peer_id = peer_id
|
|
|
|
|
2018-11-26 00:05:56 +08:00
|
|
|
# Mapping from stream ID -> buffer of messages for that stream
|
2018-11-12 06:15:55 +08:00
|
|
|
self.buffers = {}
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2018-11-13 00:00:43 +08:00
|
|
|
self.stream_queue = asyncio.Queue()
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Kick off reading
|
|
|
|
asyncio.ensure_future(self.handle_incoming())
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
def close(self) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
close the stream muxer and underlying raw connection
|
|
|
|
"""
|
2018-11-12 06:15:55 +08:00
|
|
|
self.raw_conn.close()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
def is_closed(self) -> bool:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
check connection is fully closed
|
|
|
|
:return: true if successful
|
|
|
|
"""
|
2019-08-02 17:53:51 +08:00
|
|
|
raise NotImplementedError()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def read_buffer(self, stream_id: int) -> bytes:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
|
|
|
Read a message from stream_id's buffer, check raw connection for new messages
|
|
|
|
:param stream_id: stream id of stream to read from
|
|
|
|
:return: message read
|
|
|
|
"""
|
2018-11-13 00:00:43 +08:00
|
|
|
# TODO: propagate up timeout exception and catch
|
2019-02-25 09:58:23 +08:00
|
|
|
# TODO: pass down timeout from user and use that
|
2018-11-29 01:58:16 +08:00
|
|
|
if stream_id in self.buffers:
|
2019-02-25 09:58:23 +08:00
|
|
|
try:
|
2019-03-24 01:52:02 +08:00
|
|
|
data = await asyncio.wait_for(self.buffers[stream_id].get(), timeout=8)
|
2019-02-25 09:58:23 +08:00
|
|
|
return data
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
return None
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Stream not created yet
|
2018-11-29 01:58:16 +08:00
|
|
|
return None
|
|
|
|
|
2019-08-05 11:21:20 +08:00
|
|
|
async def open_stream(self, protocol_id: str, multi_addr: Multiaddr) -> IMuxedStream:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
creates a new muxed_stream
|
2018-11-13 02:02:49 +08:00
|
|
|
:param protocol_id: protocol_id of stream
|
|
|
|
:param multi_addr: multi_addr that stream connects to
|
2018-11-01 05:31:00 +08:00
|
|
|
:return: a new stream
|
|
|
|
"""
|
2018-11-30 02:42:05 +08:00
|
|
|
stream_id = self.raw_conn.next_stream_id()
|
2018-11-21 09:28:41 +08:00
|
|
|
stream = MplexStream(stream_id, multi_addr, self)
|
2018-11-29 01:58:16 +08:00
|
|
|
self.buffers[stream_id] = asyncio.Queue()
|
2019-08-02 17:14:43 +08:00
|
|
|
await self.send_message(HeaderTags.NewStream, None, stream_id)
|
2018-11-12 06:15:55 +08:00
|
|
|
return stream
|
2018-11-12 00:52:26 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def accept_stream(self) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
accepts a muxed stream opened by the other end
|
|
|
|
"""
|
2018-11-13 00:00:43 +08:00
|
|
|
stream_id = await self.stream_queue.get()
|
2018-11-21 09:28:41 +08:00
|
|
|
stream = MplexStream(stream_id, False, self)
|
2019-02-25 09:58:23 +08:00
|
|
|
asyncio.ensure_future(self.generic_protocol_handler(stream))
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def send_message(self, flag: HeaderTags, data: bytes, stream_id: int) -> int:
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
|
|
|
sends a message over the connection
|
|
|
|
:param header: header to use
|
|
|
|
:param data: data to send in the message
|
2018-11-12 06:38:11 +08:00
|
|
|
:param stream_id: stream the message is in
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
2018-11-12 06:38:11 +08:00
|
|
|
# << by 3, then or with flag
|
2019-08-02 17:14:43 +08:00
|
|
|
header = (stream_id << 3) | flag.value
|
2018-11-12 06:38:11 +08:00
|
|
|
header = encode_uvarint(header)
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2018-11-19 00:22:17 +08:00
|
|
|
if data is None:
|
|
|
|
data_length = encode_uvarint(0)
|
|
|
|
_bytes = header + data_length
|
|
|
|
else:
|
|
|
|
data_length = encode_uvarint(len(data))
|
|
|
|
_bytes = header + data_length + data
|
2018-11-13 00:00:43 +08:00
|
|
|
|
|
|
|
return await self.write_to_stream(_bytes)
|
2018-11-12 06:38:11 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def write_to_stream(self, _bytes: bytearray) -> int:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
|
|
|
writes a byte array to a raw connection
|
|
|
|
:param _bytes: byte array to write
|
|
|
|
:return: length written
|
|
|
|
"""
|
2018-11-12 06:48:31 +08:00
|
|
|
self.raw_conn.writer.write(_bytes)
|
2018-11-12 06:38:11 +08:00
|
|
|
await self.raw_conn.writer.drain()
|
2018-11-12 06:48:31 +08:00
|
|
|
return len(_bytes)
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def handle_incoming(self) -> None:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
|
|
|
Read a message off of the raw connection and add it to the corresponding message buffer
|
|
|
|
"""
|
|
|
|
# TODO Deal with other types of messages using flag (currently _)
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
while True:
|
|
|
|
stream_id, flag, message = await self.read_message()
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
if stream_id is not None and flag is not None and message is not None:
|
|
|
|
if stream_id not in self.buffers:
|
|
|
|
self.buffers[stream_id] = asyncio.Queue()
|
|
|
|
await self.stream_queue.put(stream_id)
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-08-02 17:14:43 +08:00
|
|
|
if flag == HeaderTags.NewStream.value:
|
2019-02-25 09:58:23 +08:00
|
|
|
# new stream detected on connection
|
|
|
|
await self.accept_stream()
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
if message:
|
|
|
|
await self.buffers[stream_id].put(message)
|
|
|
|
|
|
|
|
# Force context switch
|
|
|
|
await asyncio.sleep(0)
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def read_message(self) -> Tuple[int, int, bytes]:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
|
|
|
Read a single message off of the raw connection
|
|
|
|
:return: stream_id, flag, message contents
|
|
|
|
"""
|
2019-01-29 05:15:22 +08:00
|
|
|
|
|
|
|
# Timeout is set to a relatively small value to alleviate wait time to exit
|
|
|
|
# loop in handle_incoming
|
2019-02-25 09:58:23 +08:00
|
|
|
timeout = 0.1
|
2018-11-29 01:58:16 +08:00
|
|
|
try:
|
2019-01-29 05:15:22 +08:00
|
|
|
header = await decode_uvarint_from_stream(self.raw_conn.reader, timeout)
|
|
|
|
length = await decode_uvarint_from_stream(self.raw_conn.reader, timeout)
|
2019-08-04 02:25:25 +08:00
|
|
|
message = await asyncio.wait_for(self.raw_conn.reader.read(length), timeout=timeout)
|
2018-11-29 01:58:16 +08:00
|
|
|
except asyncio.TimeoutError:
|
|
|
|
return None, None, None
|
|
|
|
|
|
|
|
flag = header & 0x07
|
|
|
|
stream_id = header >> 3
|
|
|
|
|
|
|
|
return stream_id, flag, message
|