2018-11-12 06:15:55 +08:00
|
|
|
import asyncio
|
2019-11-08 12:57:43 +08:00
|
|
|
import logging
|
2019-08-29 21:38:06 +08:00
|
|
|
from typing import Any # noqa: F401
|
2019-09-13 15:29:24 +08:00
|
|
|
from typing import Awaitable, Dict, List, Optional, Tuple
|
2019-08-02 18:28:04 +08:00
|
|
|
|
2019-09-15 16:58:08 +08:00
|
|
|
from libp2p.exceptions import ParseError
|
2019-09-13 15:32:10 +08:00
|
|
|
from libp2p.io.exceptions import IncompleteReadError
|
2019-09-21 18:17:00 +08:00
|
|
|
from libp2p.network.connection.exceptions import RawConnError
|
2019-08-05 10:20:30 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-08-05 11:22:44 +08:00
|
|
|
from libp2p.security.secure_conn_interface import ISecureConn
|
2019-08-05 11:17:38 +08:00
|
|
|
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
|
2019-08-20 11:02:21 +08:00
|
|
|
from libp2p.typing import TProtocol
|
2019-08-26 20:35:17 +08:00
|
|
|
from libp2p.utils import (
|
|
|
|
decode_uvarint_from_stream,
|
|
|
|
encode_uvarint,
|
2019-09-05 18:18:08 +08:00
|
|
|
encode_varint_prefixed,
|
2019-08-26 20:35:17 +08:00
|
|
|
read_varint_prefixed_bytes,
|
|
|
|
)
|
2019-01-10 02:38:56 +08:00
|
|
|
|
2019-08-02 17:14:43 +08:00
|
|
|
from .constants import HeaderTags
|
2019-08-28 21:43:34 +08:00
|
|
|
from .datastructures import StreamID
|
2019-09-13 15:29:24 +08:00
|
|
|
from .exceptions import MplexUnavailable
|
2019-08-03 13:36:19 +08:00
|
|
|
from .mplex_stream import MplexStream
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-20 11:02:21 +08:00
|
|
|
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
|
|
|
|
|
2019-11-08 12:57:43 +08:00
|
|
|
logger = logging.getLogger("libp2p.stream_muxer.mplex.mplex")
|
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
|
2018-11-21 09:28:41 +08:00
|
|
|
class Mplex(IMuxedConn):
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
|
|
|
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
|
|
|
|
"""
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-05 10:20:30 +08:00
|
|
|
secured_conn: ISecureConn
|
|
|
|
peer_id: ID
|
2019-08-28 21:43:34 +08:00
|
|
|
next_channel_id: int
|
2019-09-05 22:29:33 +08:00
|
|
|
streams: Dict[StreamID, MplexStream]
|
|
|
|
streams_lock: asyncio.Lock
|
2019-09-12 00:38:12 +08:00
|
|
|
new_stream_queue: "asyncio.Queue[IMuxedStream]"
|
2019-09-12 17:07:41 +08:00
|
|
|
event_shutting_down: asyncio.Event
|
|
|
|
event_closed: asyncio.Event
|
2019-08-02 17:53:51 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
_tasks: List["asyncio.Future[Any]"]
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
def __init__(self, secured_conn: ISecureConn, peer_id: ID) -> None:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
create a new muxed connection.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-16 17:01:27 +08:00
|
|
|
:param secured_conn: an instance of ``ISecureConn``
|
2019-02-25 09:58:23 +08:00
|
|
|
:param generic_protocol_handler: generic protocol handler
|
|
|
|
for new muxed streams
|
2019-03-24 01:52:02 +08:00
|
|
|
:param peer_id: peer_id of peer the connection is to
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
self.secured_conn = secured_conn
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
self.next_channel_id = 0
|
2019-08-25 03:12:08 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Set peer_id
|
|
|
|
self.peer_id = peer_id
|
|
|
|
|
2018-11-26 00:05:56 +08:00
|
|
|
# Mapping from stream ID -> buffer of messages for that stream
|
2019-09-05 22:29:33 +08:00
|
|
|
self.streams = {}
|
|
|
|
self.streams_lock = asyncio.Lock()
|
2019-09-12 00:38:12 +08:00
|
|
|
self.new_stream_queue = asyncio.Queue()
|
2019-09-12 17:07:41 +08:00
|
|
|
self.event_shutting_down = asyncio.Event()
|
|
|
|
self.event_closed = asyncio.Event()
|
2018-11-26 00:05:56 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
self._tasks = []
|
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
# Kick off reading
|
2019-08-29 21:38:06 +08:00
|
|
|
self._tasks.append(asyncio.ensure_future(self.handle_incoming()))
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-16 17:01:27 +08:00
|
|
|
@property
|
2019-10-25 01:25:34 +08:00
|
|
|
def is_initiator(self) -> bool:
|
2019-10-25 01:28:19 +08:00
|
|
|
return self.secured_conn.is_initiator
|
2019-08-16 17:01:27 +08:00
|
|
|
|
2019-08-25 04:06:24 +08:00
|
|
|
async def close(self) -> None:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""close the stream muxer and underlying secured connection."""
|
2019-09-13 15:29:24 +08:00
|
|
|
if self.event_shutting_down.is_set():
|
|
|
|
return
|
2019-09-12 17:07:41 +08:00
|
|
|
# Set the `event_shutting_down`, to allow graceful shutdown.
|
|
|
|
self.event_shutting_down.set()
|
2019-09-13 15:29:24 +08:00
|
|
|
await self.secured_conn.close()
|
2019-09-12 17:07:41 +08:00
|
|
|
# Blocked until `close` is finally set.
|
2019-09-13 15:29:24 +08:00
|
|
|
await self.event_closed.wait()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
def is_closed(self) -> bool:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
check connection is fully closed.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-01 05:31:00 +08:00
|
|
|
:return: true if successful
|
|
|
|
"""
|
2019-09-12 17:07:41 +08:00
|
|
|
return self.event_closed.is_set()
|
2018-11-01 05:31:00 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
def _get_next_channel_id(self) -> int:
|
2019-08-25 03:12:08 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
Get next available stream id.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-25 03:12:08 +08:00
|
|
|
:return: next available stream id for the connection
|
|
|
|
"""
|
2019-08-28 21:43:34 +08:00
|
|
|
next_id = self.next_channel_id
|
|
|
|
self.next_channel_id += 1
|
2019-08-25 03:12:08 +08:00
|
|
|
return next_id
|
|
|
|
|
2019-09-05 22:29:33 +08:00
|
|
|
async def _initialize_stream(self, stream_id: StreamID, name: str) -> MplexStream:
|
2019-09-12 00:38:12 +08:00
|
|
|
stream = MplexStream(name, stream_id, self)
|
2019-09-05 22:29:33 +08:00
|
|
|
async with self.streams_lock:
|
2019-09-12 00:38:12 +08:00
|
|
|
self.streams[stream_id] = stream
|
2019-09-05 22:29:33 +08:00
|
|
|
return stream
|
|
|
|
|
2019-08-26 20:26:22 +08:00
|
|
|
async def open_stream(self) -> IMuxedStream:
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
creates a new muxed_stream.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-26 20:26:22 +08:00
|
|
|
:return: a new ``MplexStream``
|
2018-11-01 05:31:00 +08:00
|
|
|
"""
|
2019-08-28 21:43:34 +08:00
|
|
|
channel_id = self._get_next_channel_id()
|
|
|
|
stream_id = StreamID(channel_id=channel_id, is_initiator=True)
|
|
|
|
# Default stream name is the `channel_id`
|
2019-09-05 22:29:33 +08:00
|
|
|
name = str(channel_id)
|
|
|
|
stream = await self._initialize_stream(stream_id, name)
|
2019-08-26 20:35:17 +08:00
|
|
|
await self.send_message(HeaderTags.NewStream, name.encode(), stream_id)
|
2018-11-12 06:15:55 +08:00
|
|
|
return stream
|
2018-11-12 00:52:26 +08:00
|
|
|
|
2019-09-13 15:29:24 +08:00
|
|
|
async def _wait_until_shutting_down_or_closed(self, coro: Awaitable[Any]) -> Any:
|
2019-09-12 17:07:41 +08:00
|
|
|
task_coro = asyncio.ensure_future(coro)
|
|
|
|
task_wait_closed = asyncio.ensure_future(self.event_closed.wait())
|
|
|
|
task_wait_shutting_down = asyncio.ensure_future(self.event_shutting_down.wait())
|
|
|
|
done, pending = await asyncio.wait(
|
2019-09-13 15:29:24 +08:00
|
|
|
[task_coro, task_wait_closed, task_wait_shutting_down],
|
|
|
|
return_when=asyncio.FIRST_COMPLETED,
|
2019-09-12 17:07:41 +08:00
|
|
|
)
|
2019-09-13 15:29:24 +08:00
|
|
|
for fut in pending:
|
|
|
|
fut.cancel()
|
|
|
|
if task_wait_closed in done:
|
|
|
|
raise MplexUnavailable("Mplex is closed")
|
2019-09-12 17:07:41 +08:00
|
|
|
if task_wait_shutting_down in done:
|
2019-09-13 15:29:24 +08:00
|
|
|
raise MplexUnavailable("Mplex is shutting down")
|
2019-09-12 17:07:41 +08:00
|
|
|
return task_coro.result()
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
async def accept_stream(self) -> IMuxedStream:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""accepts a muxed stream opened by the other end."""
|
2019-09-13 15:29:24 +08:00
|
|
|
return await self._wait_until_shutting_down_or_closed(
|
|
|
|
self.new_stream_queue.get()
|
|
|
|
)
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
async def send_message(
|
2019-09-05 18:18:08 +08:00
|
|
|
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
|
2019-08-28 21:43:34 +08:00
|
|
|
) -> int:
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
sends a message over the connection.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-12 06:15:55 +08:00
|
|
|
:param header: header to use
|
|
|
|
:param data: data to send in the message
|
2018-11-12 06:38:11 +08:00
|
|
|
:param stream_id: stream the message is in
|
2018-11-12 06:15:55 +08:00
|
|
|
"""
|
2018-11-12 06:38:11 +08:00
|
|
|
# << by 3, then or with flag
|
2019-09-05 18:18:08 +08:00
|
|
|
header = encode_uvarint((stream_id.channel_id << 3) | flag.value)
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2018-11-19 00:22:17 +08:00
|
|
|
if data is None:
|
2019-09-05 18:18:08 +08:00
|
|
|
data = b""
|
|
|
|
|
|
|
|
_bytes = header + encode_varint_prefixed(data)
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-09-13 15:29:24 +08:00
|
|
|
return await self._wait_until_shutting_down_or_closed(
|
|
|
|
self.write_to_stream(_bytes)
|
|
|
|
)
|
2018-11-12 06:38:11 +08:00
|
|
|
|
2019-09-05 18:18:08 +08:00
|
|
|
async def write_to_stream(self, _bytes: bytes) -> int:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
writes a byte array to a secured connection.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-29 01:58:16 +08:00
|
|
|
:param _bytes: byte array to write
|
|
|
|
:return: length written
|
|
|
|
"""
|
2019-08-25 14:42:44 +08:00
|
|
|
await self.secured_conn.write(_bytes)
|
2018-11-12 06:48:31 +08:00
|
|
|
return len(_bytes)
|
2018-11-12 06:15:55 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def handle_incoming(self) -> None:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Read a message off of the secured connection and add it to the
|
|
|
|
corresponding message buffer."""
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-02-25 09:58:23 +08:00
|
|
|
while True:
|
2019-09-13 15:29:24 +08:00
|
|
|
try:
|
2019-09-14 14:59:07 +08:00
|
|
|
await self._handle_incoming_message()
|
2019-11-08 12:57:43 +08:00
|
|
|
except MplexUnavailable as e:
|
|
|
|
logger.debug("mplex unavailable while waiting for incoming: %s", e)
|
2019-09-13 15:29:24 +08:00
|
|
|
break
|
2019-02-25 09:58:23 +08:00
|
|
|
# Force context switch
|
|
|
|
await asyncio.sleep(0)
|
2019-09-14 14:16:40 +08:00
|
|
|
# If we enter here, it means this connection is shutting down.
|
2019-09-14 14:59:07 +08:00
|
|
|
# We should clean things up.
|
2019-09-13 15:29:24 +08:00
|
|
|
await self._cleanup()
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-02 17:53:51 +08:00
|
|
|
async def read_message(self) -> Tuple[int, int, bytes]:
|
2018-11-29 01:58:16 +08:00
|
|
|
"""
|
2019-10-25 02:10:45 +08:00
|
|
|
Read a single message off of the secured connection.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-29 01:58:16 +08:00
|
|
|
:return: stream_id, flag, message contents
|
|
|
|
"""
|
2019-01-29 05:15:22 +08:00
|
|
|
|
2019-08-11 23:49:58 +08:00
|
|
|
# FIXME: No timeout is used in Go implementation.
|
2018-11-29 01:58:16 +08:00
|
|
|
try:
|
2019-09-14 14:16:40 +08:00
|
|
|
header = await decode_uvarint_from_stream(self.secured_conn)
|
2019-08-25 14:42:44 +08:00
|
|
|
message = await asyncio.wait_for(
|
2019-08-26 20:35:17 +08:00
|
|
|
read_varint_prefixed_bytes(self.secured_conn), timeout=5
|
2019-08-25 14:42:44 +08:00
|
|
|
)
|
2019-09-21 18:17:00 +08:00
|
|
|
except (ParseError, RawConnError, IncompleteReadError) as error:
|
2019-09-14 14:16:40 +08:00
|
|
|
raise MplexUnavailable(
|
|
|
|
"failed to read messages correctly from the underlying connection"
|
|
|
|
) from error
|
|
|
|
except asyncio.TimeoutError as error:
|
|
|
|
raise MplexUnavailable(
|
|
|
|
"failed to read more message body within the timeout"
|
|
|
|
) from error
|
2018-11-29 01:58:16 +08:00
|
|
|
|
|
|
|
flag = header & 0x07
|
2019-08-28 21:43:34 +08:00
|
|
|
channel_id = header >> 3
|
2018-11-29 01:58:16 +08:00
|
|
|
|
2019-08-28 21:43:34 +08:00
|
|
|
return channel_id, flag, message
|
2019-09-13 15:29:24 +08:00
|
|
|
|
2019-09-14 14:59:07 +08:00
|
|
|
async def _handle_incoming_message(self) -> None:
|
|
|
|
"""
|
|
|
|
Read and handle a new incoming message.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-09-14 14:59:07 +08:00
|
|
|
:raise MplexUnavailable: `Mplex` encounters fatal error or is shutting down.
|
|
|
|
"""
|
|
|
|
channel_id, flag, message = await self._wait_until_shutting_down_or_closed(
|
|
|
|
self.read_message()
|
|
|
|
)
|
|
|
|
stream_id = StreamID(channel_id=channel_id, is_initiator=bool(flag & 1))
|
|
|
|
|
|
|
|
if flag == HeaderTags.NewStream.value:
|
|
|
|
await self._handle_new_stream(stream_id, message)
|
|
|
|
elif flag in (
|
|
|
|
HeaderTags.MessageInitiator.value,
|
|
|
|
HeaderTags.MessageReceiver.value,
|
|
|
|
):
|
|
|
|
await self._handle_message(stream_id, message)
|
|
|
|
elif flag in (HeaderTags.CloseInitiator.value, HeaderTags.CloseReceiver.value):
|
|
|
|
await self._handle_close(stream_id)
|
|
|
|
elif flag in (HeaderTags.ResetInitiator.value, HeaderTags.ResetReceiver.value):
|
|
|
|
await self._handle_reset(stream_id)
|
|
|
|
else:
|
|
|
|
# Receives messages with an unknown flag
|
|
|
|
# TODO: logging
|
|
|
|
async with self.streams_lock:
|
|
|
|
if stream_id in self.streams:
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
await stream.reset()
|
|
|
|
|
|
|
|
async def _handle_new_stream(self, stream_id: StreamID, message: bytes) -> None:
|
|
|
|
async with self.streams_lock:
|
|
|
|
if stream_id in self.streams:
|
|
|
|
# `NewStream` for the same id is received twice...
|
|
|
|
raise MplexUnavailable(
|
|
|
|
f"received NewStream message for existing stream: {stream_id}"
|
|
|
|
)
|
|
|
|
mplex_stream = await self._initialize_stream(stream_id, message.decode())
|
|
|
|
await self._wait_until_shutting_down_or_closed(
|
|
|
|
self.new_stream_queue.put(mplex_stream)
|
|
|
|
)
|
|
|
|
|
|
|
|
async def _handle_message(self, stream_id: StreamID, message: bytes) -> None:
|
|
|
|
async with self.streams_lock:
|
|
|
|
if stream_id not in self.streams:
|
|
|
|
# We receive a message of the stream `stream_id` which is not accepted
|
|
|
|
# before. It is abnormal. Possibly disconnect?
|
|
|
|
# TODO: Warn and emit logs about this.
|
|
|
|
return
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
async with stream.close_lock:
|
|
|
|
if stream.event_remote_closed.is_set():
|
|
|
|
# TODO: Warn "Received data from remote after stream was closed by them. (len = %d)" # noqa: E501
|
|
|
|
return
|
|
|
|
await self._wait_until_shutting_down_or_closed(
|
|
|
|
stream.incoming_data.put(message)
|
|
|
|
)
|
|
|
|
|
|
|
|
async def _handle_close(self, stream_id: StreamID) -> None:
|
|
|
|
async with self.streams_lock:
|
|
|
|
if stream_id not in self.streams:
|
|
|
|
# Ignore unmatched messages for now.
|
|
|
|
return
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
# NOTE: If remote is already closed, then return: Technically a bug
|
|
|
|
# on the other side. We should consider killing the connection.
|
|
|
|
async with stream.close_lock:
|
|
|
|
if stream.event_remote_closed.is_set():
|
|
|
|
return
|
|
|
|
is_local_closed: bool
|
|
|
|
async with stream.close_lock:
|
|
|
|
stream.event_remote_closed.set()
|
|
|
|
is_local_closed = stream.event_local_closed.is_set()
|
|
|
|
# If local is also closed, both sides are closed. Then, we should clean up
|
|
|
|
# the entry of this stream, to avoid others from accessing it.
|
|
|
|
if is_local_closed:
|
|
|
|
async with self.streams_lock:
|
|
|
|
del self.streams[stream_id]
|
|
|
|
|
|
|
|
async def _handle_reset(self, stream_id: StreamID) -> None:
|
|
|
|
async with self.streams_lock:
|
|
|
|
if stream_id not in self.streams:
|
|
|
|
# This is *ok*. We forget the stream on reset.
|
|
|
|
return
|
|
|
|
stream = self.streams[stream_id]
|
|
|
|
|
|
|
|
async with stream.close_lock:
|
|
|
|
if not stream.event_remote_closed.is_set():
|
|
|
|
stream.event_reset.set()
|
|
|
|
|
|
|
|
stream.event_remote_closed.set()
|
|
|
|
# If local is not closed, we should close it.
|
|
|
|
if not stream.event_local_closed.is_set():
|
|
|
|
stream.event_local_closed.set()
|
|
|
|
async with self.streams_lock:
|
|
|
|
del self.streams[stream_id]
|
|
|
|
|
2019-09-13 15:29:24 +08:00
|
|
|
async def _cleanup(self) -> None:
|
|
|
|
if not self.event_shutting_down.is_set():
|
|
|
|
self.event_shutting_down.set()
|
|
|
|
async with self.streams_lock:
|
|
|
|
for stream in self.streams.values():
|
|
|
|
async with stream.close_lock:
|
|
|
|
if not stream.event_remote_closed.is_set():
|
|
|
|
stream.event_remote_closed.set()
|
|
|
|
stream.event_reset.set()
|
|
|
|
stream.event_local_closed.set()
|
|
|
|
self.streams = None
|
|
|
|
self.event_closed.set()
|