2018-11-12 06:15:55 +08:00
|
|
|
import asyncio
|
2019-08-29 21:38:06 +08:00
|
|
|
from typing import Any # noqa: F401
|
|
|
|
from typing import Dict, List, Optional, Tuple
|
2019-08-02 18:28:04 +08:00
|
|
|
|
2019-08-05 10:35:56 +08:00
|
|
|
from libp2p.network.typing import GenericProtocolHandlerFn
|
2019-08-05 10:20:30 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-08-05 11:22:44 +08:00
|
|
|
from libp2p.security.secure_conn_interface import ISecureConn
|
2019-08-05 11:17:38 +08:00
|
|
|
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
|
2019-08-20 11:02:21 +08:00
|
|
|
from libp2p.typing import TProtocol
|
2019-08-26 20:35:17 +08:00
|
|
|
from libp2p.utils import (
|
|
|
|
decode_uvarint_from_stream,
|
|
|
|
encode_uvarint,
|
2019-09-05 18:18:08 +08:00
|
|
|
encode_varint_prefixed,
|
2019-08-26 20:35:17 +08:00
|
|
|
read_varint_prefixed_bytes,
|
|
|
|
)
|
2019-01-10 02:38:56 +08:00
|
|
|
|
2019-08-02 17:14:43 +08:00
|
|
|
from .constants import HeaderTags
|
2019-08-28 21:43:34 +08:00
|
|
|
from .datastructures import StreamID
|
2019-08-03 13:36:19 +08:00
|
|
|
from .mplex_stream import MplexStream
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-20 11:02:21 +08:00
|
|
|
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
|
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
|
2018-11-21 09:28:41 +08:00
|
|
|
class Mplex(IMuxedConn):
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
|
|
|
|
"""
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-05 10:20:30 +08:00
|
|
|
secured_conn: ISecureConn
|
|
|
|
peer_id: ID
|
2019-08-11 23:49:58 +08:00
|
|
|
# TODO: `dataIn` in go implementation. Should be size of 8.
|
|
|
|
# TODO: Also, `dataIn` is closed indicating EOF in Go. We don't have similar strategies
|
|
|
|
# to let the `MplexStream`s know that EOF arrived (#235).
|
2019-08-28 21:43:34 +08:00
|
|
|
next_channel_id: int
|
2019-09-05 22:29:33 +08:00
|
|
|
streams: Dict[StreamID, MplexStream]
|
|
|
|
streams_lock: asyncio.Lock
|
2019-09-05 18:18:08 +08:00
|
|
|
shutdown: asyncio.Event
|
2019-08-02 17:53:51 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
_tasks: List["asyncio.Future[Any]"]
|
|
|
|
|
2019-08-26 20:26:22 +08:00
|
|
|
# TODO: `generic_protocol_handler` should be refactored out of mplex conn.
|
2019-08-02 17:53:51 +08:00
|
|
|
def __init__(
|
|
|
|
self,
|
2019-08-05 10:20:30 +08:00
|
|
|
secured_conn: ISecureConn,
|
|
|
|
generic_protocol_handler: GenericProtocolHandlerFn,
|
|
|
|
peer_id: ID,
|
2019-08-02 17:53:51 +08:00
|
|
|
) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
create a new muxed connection
|
2019-08-16 17:01:27 +08:00
|
|
|
:param secured_conn: an instance of ``ISecureConn``
|
2019-02-25 09:58:23 +08:00
|
|
|
:param generic_protocol_handler: generic protocol handler
|
|
|
|
for new muxed streams
|
2019-03-24 01:52:02 +08:00
|
|
|
:param peer_id: peer_id of peer the connection is to
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
self.secured_conn = secured_conn
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
self.next_channel_id = 0
|
2019-08-25 03:12:08 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Store generic protocol handler
|
|
|
|
self.generic_protocol_handler = generic_protocol_handler
|
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Set peer_id
|
|
|
|
self.peer_id = peer_id
|
|
|
|
|
2018-11-26 00:05:56 +08:00
|
|
|
# Mapping from stream ID -> buffer of messages for that stream
|
2019-09-05 22:29:33 +08:00
|
|
|
self.streams = {}
|
|
|
|
self.streams_lock = asyncio.Lock()
|
2019-09-05 18:18:08 +08:00
|
|
|
self.shutdown = asyncio.Event()
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
self._tasks = []
|
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Kick off reading
|
2019-08-29 21:38:06 +08:00
|
|
|
self._tasks.append(asyncio.ensure_future(self.handle_incoming()))
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-16 17:01:27 +08:00
|
|
|
@property
|
|
|
|
def initiator(self) -> bool:
|
2019-08-25 14:42:44 +08:00
|
|
|
return self.secured_conn.initiator
|
2019-08-16 17:01:27 +08:00
|
|
|
|
2019-08-25 04:06:24 +08:00
|
|
|
async def close(self) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
close the stream muxer and underlying secured connection
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-29 21:38:06 +08:00
|
|
|
for task in self._tasks:
|
|
|
|
task.cancel()
|
2019-08-25 14:42:44 +08:00
|
|
|
await self.secured_conn.close()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
def is_closed(self) -> bool:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
check connection is fully closed
|
|
|
|
:return: true if successful
|
|
|
|
"""
|
2019-08-02 17:53:51 +08:00
|
|
|
raise NotImplementedError()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
def _get_next_channel_id(self) -> int:
|
2019-08-25 03:12:08 +08:00
|
|
|
"""
|
|
|
|
Get next available stream id
|
|
|
|
:return: next available stream id for the connection
|
|
|
|
"""
|
2019-08-28 21:43:34 +08:00
|
|
|
next_id = self.next_channel_id
|
|
|
|
self.next_channel_id += 1
|
2019-08-25 03:12:08 +08:00
|
|
|
return next_id
|
|
|
|
|
2019-09-05 22:29:33 +08:00
|
|
|
async def _initialize_stream(self, stream_id: StreamID, name: str) -> MplexStream:
|
|
|
|
async with self.streams_lock:
|
|
|
|
stream = MplexStream(name, stream_id, self)
|
|
|
|
self.streams[stream_id] = stream
|
|
|
|
return stream
|
|
|
|
|
2019-08-26 20:26:22 +08:00
|
|
|
async def open_stream(self) -> IMuxedStream:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
creates a new muxed_stream
|
2019-08-26 20:26:22 +08:00
|
|
|
:return: a new ``MplexStream``
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-28 21:43:34 +08:00
|
|
|
channel_id = self._get_next_channel_id()
|
|
|
|
stream_id = StreamID(channel_id=channel_id, is_initiator=True)
|
|
|
|
# Default stream name is the `channel_id`
|
2019-09-05 22:29:33 +08:00
|
|
|
name = str(channel_id)
|
|
|
|
stream = await self._initialize_stream(stream_id, name)
|
2019-08-26 20:35:17 +08:00
|
|
|
await self.send_message(HeaderTags.NewStream, name.encode(), stream_id)
|
2018-11-12 06:15:55 +08:00
|
|
|
return stream
|
2018-11-12 00:52:26 +08:00
|
|
|
|
2019-09-05 22:29:33 +08:00
|
|
|
async def accept_stream(self, stream_id: StreamID, name: str) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
accepts a muxed stream opened by the other end
|
|
|
|
"""
|
2019-09-05 22:29:33 +08:00
|
|
|
stream = await self._initialize_stream(stream_id, name)
|
|
|
|
# Perform protocol negotiation for the stream.
|
2019-08-29 21:38:06 +08:00
|
|
|
self._tasks.append(asyncio.ensure_future(self.generic_protocol_handler(stream)))
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
async def send_message(
|
2019-09-05 18:18:08 +08:00
|
|
|
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
|
2019-08-28 21:43:34 +08:00
|
|
|
) -> int:
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
|
|
|
sends a message over the connection
|
|
|
|
:param header: header to use
|
|
|
|
:param data: data to send in the message
|
2018-11-12 06:38:11 +08:00
|
|
|
:param stream_id: stream the message is in
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
2018-11-12 06:38:11 +08:00
|
|
|
# << by 3, then or with flag
|
2019-09-05 18:18:08 +08:00
|
|
|
header = encode_uvarint((stream_id.channel_id << 3) | flag.value)
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2018-11-19 00:22:17 +08:00
|
|
|
if data is None:
|
2019-09-05 18:18:08 +08:00
|
|
|
data = b""
|
|
|
|
|
|
|
|
_bytes = header + encode_varint_prefixed(data)
|
2018-11-13 00:00:43 +08:00
|
|
|
|
|
|
|
return await self.write_to_stream(_bytes)
|
2018-11-12 06:38:11 +08:00
|
|
|
|
2019-09-05 18:18:08 +08:00
|
|
|
async def write_to_stream(self, _bytes: bytes) -> int:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
writes a byte array to a secured connection
|
2018-11-29 01:58:16 +08:00
|
|
|
:param _bytes: byte array to write
|
|
|
|
:return: length written
|
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
await self.secured_conn.write(_bytes)
|
2018-11-12 06:48:31 +08:00
|
|
|
return len(_bytes)
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def handle_incoming(self) -> None:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
Read a message off of the secured connection and add it to the corresponding message buffer
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
|
|
|
# TODO Deal with other types of messages using flag (currently _)
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
while True:
|
2019-08-28 21:43:34 +08:00
|
|
|
channel_id, flag, message = await self.read_message()
|
|
|
|
if channel_id is not None and flag is not None and message is not None:
|
|
|
|
stream_id = StreamID(channel_id=channel_id, is_initiator=bool(flag & 1))
|
2019-09-05 22:29:33 +08:00
|
|
|
is_stream_id_seen: bool
|
|
|
|
async with self.streams_lock:
|
|
|
|
is_stream_id_seen = stream_id in self.streams
|
|
|
|
# Other consequent stream message should wait until the stream get accepted
|
2019-08-26 20:26:22 +08:00
|
|
|
# TODO: Handle more tags, and refactor `HeaderTags`
|
2019-08-02 17:14:43 +08:00
|
|
|
if flag == HeaderTags.NewStream.value:
|
2019-09-05 22:29:33 +08:00
|
|
|
if is_stream_id_seen:
|
|
|
|
# `NewStream` for the same id is received twice...
|
2019-09-05 23:44:22 +08:00
|
|
|
# TODO: Shutdown
|
2019-09-05 22:29:33 +08:00
|
|
|
pass
|
|
|
|
await self.accept_stream(stream_id, message.decode())
|
2019-08-26 20:26:22 +08:00
|
|
|
elif flag in (
|
|
|
|
HeaderTags.MessageInitiator.value,
|
|
|
|
HeaderTags.MessageReceiver.value,
|
|
|
|
):
|
2019-09-05 22:29:33 +08:00
|
|
|
if not is_stream_id_seen:
|
|
|
|
# We receive a message of the stream `stream_id` which is not accepted
|
|
|
|
# before. It is abnormal. Possibly disconnect?
|
|
|
|
# TODO: Warn and emit logs about this.
|
|
|
|
continue
|
|
|
|
async with self.streams_lock:
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
await stream.incoming_data.put(message)
|
2019-09-05 23:44:22 +08:00
|
|
|
elif flag in (
|
|
|
|
HeaderTags.CloseInitiator.value,
|
|
|
|
HeaderTags.CloseReceiver.value,
|
|
|
|
):
|
|
|
|
if not is_stream_id_seen:
|
|
|
|
continue
|
|
|
|
stream: MplexStream
|
|
|
|
async with self.streams_lock:
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
is_local_closed: bool
|
|
|
|
async with stream.close_lock:
|
|
|
|
stream.event_remote_closed.set()
|
|
|
|
is_local_closed = stream.event_local_closed.is_set()
|
|
|
|
# If local is also closed, both sides are closed. Then, we should clean up
|
|
|
|
# this stream.
|
|
|
|
if is_local_closed:
|
|
|
|
async with self.streams_lock:
|
|
|
|
del self.streams[stream_id]
|
|
|
|
elif flag in (
|
|
|
|
HeaderTags.ResetInitiator.value,
|
|
|
|
HeaderTags.ResetReceiver.value,
|
|
|
|
):
|
|
|
|
if not is_stream_id_seen:
|
|
|
|
# This is *ok*. We forget the stream on reset.
|
|
|
|
continue
|
|
|
|
stream: MplexStream
|
|
|
|
async with self.streams_lock:
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
async with stream.close_lock:
|
|
|
|
if not stream.event_remote_closed.is_set():
|
|
|
|
stream.event_reset.set()
|
|
|
|
stream.event_remote_closed.set()
|
|
|
|
if not stream.event_local_closed.is_set():
|
|
|
|
stream.event_local_closed.close()
|
|
|
|
async with self.streams_lock:
|
|
|
|
del self.streams[stream_id]
|
|
|
|
else:
|
|
|
|
# TODO: logging
|
|
|
|
print(f"message with unknown header on stream {stream_id}")
|
|
|
|
if is_stream_id_seen:
|
|
|
|
async with self.streams_lock:
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
await stream.reset()
|
2019-02-25 09:58:23 +08:00
|
|
|
|
|
|
|
# Force context switch
|
|
|
|
await asyncio.sleep(0)
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def read_message(self) -> Tuple[int, int, bytes]:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
Read a single message off of the secured connection
|
2018-11-29 01:58:16 +08:00
|
|
|
:return: stream_id, flag, message contents
|
|
|
|
"""
|
2019-01-29 05:15:22 +08:00
|
|
|
|
2019-08-11 23:49:58 +08:00
|
|
|
# FIXME: No timeout is used in Go implementation.
|
2019-01-29 05:15:22 +08:00
|
|
|
# Timeout is set to a relatively small value to alleviate wait time to exit
|
|
|
|
# loop in handle_incoming
|
2019-08-26 20:35:17 +08:00
|
|
|
header = await decode_uvarint_from_stream(self.secured_conn)
|
|
|
|
# TODO: Handle the case of EOF and other exceptions?
|
2018-11-29 01:58:16 +08:00
|
|
|
try:
|
2019-08-25 14:42:44 +08:00
|
|
|
message = await asyncio.wait_for(
|
2019-08-26 20:35:17 +08:00
|
|
|
read_varint_prefixed_bytes(self.secured_conn), timeout=5
|
2019-08-25 14:42:44 +08:00
|
|
|
)
|
2018-11-29 01:58:16 +08:00
|
|
|
except asyncio.TimeoutError:
|
2019-08-26 20:35:17 +08:00
|
|
|
# TODO: Investigate what we should do if time is out.
|
2018-11-29 01:58:16 +08:00
|
|
|
return None, None, None
|
|
|
|
|
|
|
|
flag = header & 0x07
|
2019-08-28 21:43:34 +08:00
|
|
|
channel_id = header >> 3
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
return channel_id, flag, message
|