py-libp2p/stream_muxer/mplex/mplex.py

132 lines
4.3 KiB
Python
Raw Normal View History

import asyncio
2018-11-12 06:38:11 +08:00
from .utils import encode_uvarint, decode_uvarint
2018-11-21 09:28:41 +08:00
from .mplex_stream import MplexStream
from ..muxed_connection_interface import IMuxedConn
2018-11-01 05:31:00 +08:00
2018-11-13 02:02:49 +08:00
2018-11-21 09:28:41 +08:00
class Mplex(IMuxedConn):
2018-11-01 05:31:00 +08:00
"""
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
"""
def __init__(self, conn, initiator):
"""
create a new muxed connection
:param conn: an instance of raw connection
:param initiator: boolean to prevent multiplex with self
"""
self.raw_conn = conn
self.initiator = initiator
2018-11-26 00:05:56 +08:00
# Mapping from stream ID -> buffer of messages for that stream
self.buffers = {}
2018-11-26 00:05:56 +08:00
2018-11-13 00:00:43 +08:00
self.stream_queue = asyncio.Queue()
2018-11-21 10:46:18 +08:00
self.conn_lock = asyncio.Lock()
2018-11-26 00:05:56 +08:00
self._next_id = 0
2018-11-19 05:04:52 +08:00
# The initiator need not read upon construction time.
# It should read when the user decides that it wants to read from the constructed stream.
if not initiator:
asyncio.ensure_future(self.handle_incoming())
2018-11-01 05:31:00 +08:00
2018-11-26 00:05:56 +08:00
def _next_stream_id(self):
next_id = self._next_id
self._next_id += 1
return next_id
2018-11-01 05:31:00 +08:00
def close(self):
"""
close the stream muxer and underlying raw connection
"""
self.raw_conn.close()
2018-11-01 05:31:00 +08:00
def is_closed(self):
"""
check connection is fully closed
:return: true if successful
"""
2018-11-13 00:00:43 +08:00
async def read_buffer(self, stream_id):
# Empty buffer or nonexistent stream
# TODO: propagate up timeout exception and catch
if stream_id not in self.buffers or not self.buffers[stream_id]:
await self.handle_incoming()
2018-11-12 06:48:31 +08:00
data = self.buffers[stream_id]
self.buffers[stream_id] = bytearray()
return data
2018-11-26 00:05:56 +08:00
async def open_stream(self, protocol_id, peer_id, multi_addr):
2018-11-01 05:31:00 +08:00
"""
creates a new muxed_stream
2018-11-13 02:02:49 +08:00
:param protocol_id: protocol_id of stream
:param stream_id: stream_id of stream
:param peer_id: peer_id that stream connects to
:param multi_addr: multi_addr that stream connects to
2018-11-01 05:31:00 +08:00
:return: a new stream
"""
2018-11-26 00:05:56 +08:00
stream_id = self._next_stream_id()
2018-11-21 09:28:41 +08:00
stream = MplexStream(stream_id, multi_addr, self)
2018-11-26 00:05:56 +08:00
self.buffers[stream_id] = bytearray()
return stream
2018-11-13 00:00:43 +08:00
async def accept_stream(self):
2018-11-01 05:31:00 +08:00
"""
accepts a muxed stream opened by the other end
:return: the accepted stream
"""
2018-11-12 09:32:23 +08:00
# TODO update to pull out protocol_id from message
protocol_id = "/echo/1.0.0"
2018-11-13 00:00:43 +08:00
stream_id = await self.stream_queue.get()
2018-11-21 09:28:41 +08:00
stream = MplexStream(stream_id, False, self)
2018-11-12 09:32:23 +08:00
return stream, stream_id, protocol_id
2018-11-13 00:00:43 +08:00
async def send_message(self, flag, data, stream_id):
"""
sends a message over the connection
:param header: header to use
:param data: data to send in the message
2018-11-12 06:38:11 +08:00
:param stream_id: stream the message is in
:return: True if success
"""
2018-11-12 06:38:11 +08:00
# << by 3, then or with flag
header = (stream_id << 3) | flag
header = encode_uvarint(header)
2018-11-19 00:22:17 +08:00
if data is None:
data_length = encode_uvarint(0)
_bytes = header + data_length
else:
data_length = encode_uvarint(len(data))
_bytes = header + data_length + data
2018-11-13 00:00:43 +08:00
return await self.write_to_stream(_bytes)
2018-11-12 06:38:11 +08:00
async def write_to_stream(self, _bytes):
2018-11-12 06:48:31 +08:00
self.raw_conn.writer.write(_bytes)
2018-11-12 06:38:11 +08:00
await self.raw_conn.writer.drain()
2018-11-12 06:48:31 +08:00
return len(_bytes)
async def handle_incoming(self):
data = bytearray()
2018-11-13 00:00:43 +08:00
try:
chunk = await asyncio.wait_for(self.raw_conn.reader.read(1024), timeout=5)
data += chunk
2018-11-13 00:00:43 +08:00
header, end_index = decode_uvarint(data, 0)
2018-11-13 01:26:11 +08:00
length, end_index = decode_uvarint(data, end_index)
2018-11-13 00:00:43 +08:00
2018-11-13 01:26:11 +08:00
message = data[end_index:end_index + length + 1]
2018-11-13 00:00:43 +08:00
# Deal with other types of messages
# TODO use flag
# flag = header & 0x07
2018-11-13 00:00:43 +08:00
stream_id = header >> 3
if stream_id not in self.buffers:
self.buffers[stream_id] = message
await self.stream_queue.put(stream_id)
else:
self.buffers[stream_id] = self.buffers[stream_id] + message
except asyncio.TimeoutError:
print('timeout!')