py-libp2p/libp2p/stream_muxer/mplex/mplex.py

168 lines
5.5 KiB
Python
Raw Normal View History

import asyncio
2019-01-10 02:38:56 +08:00
2018-11-21 09:28:41 +08:00
from ..muxed_connection_interface import IMuxedConn
2019-08-03 13:36:19 +08:00
from .mplex_stream import MplexStream
from .utils import decode_uvarint_from_stream, encode_uvarint, get_flag
2018-11-01 05:31:00 +08:00
2018-11-13 02:02:49 +08:00
2018-11-21 09:28:41 +08:00
class Mplex(IMuxedConn):
2018-11-01 05:31:00 +08:00
"""
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
"""
def __init__(self, secured_conn, generic_protocol_handler, peer_id):
2018-11-01 05:31:00 +08:00
"""
create a new muxed connection
:param conn: an instance of raw connection
:param generic_protocol_handler: generic protocol handler
for new muxed streams
[WIP] PubSub and FloodSub development (#133) * Add notifee interface * Add notify function to network interface * Implement notify feature * Add tests for notify * Make notifee functions all async * Fix linting issue * Fix linting issue * Scaffold pubsub router interface * Scaffold pubsub directory * Store peer_id in muxed connection * Implement pubsub notifee * Remove outdated files * Implement pubsub first attempt * Prepare pubsub for floodsub * Add mplex conn to net stream and add conn in notify tests * Implement floodsub * Use NetStream in generic protocol handler * Debugging async issues * Modify test to perform proper assert. Test passes * Remove callbacks. Reduce sleep time * Add simple three node test * Clean up code. Add message classes * Add test for two topics * Add conn to net stream and conn tests * Refactor test setup to remove duplicate code * Fix linting issues * Fix linting issue * Fix linting issue * Fix outstanding unrelated lint issue in multiselect_client * Add connect function * Remove debug prints * Remove debug prints from floodsub * Use MessageTalk in place of direct message breakdown * Remove extra prints * Remove outdated function * Add message to queues for all topics in message * Debugging * Add message self delivery * Increase read timeout to 5 to get pubsub tests passing * Refactor testing helper func. Add tests * Add tests and increase timeout to get tests passing * Add dummy account demo scaffolding * Attempt to use threads. Test fails * Implement basic dummy node tests using threads * Add generic testing function * Add simple seven node tree test * Add more complex seven node tree tests * Add five node ring tests * Remove unnecessary get_message_type func * Add documentation to classes * Add message id to messages * Add documentation to test helper func * Add docs to dummy account node helper func * Add more docs to dummy account node test helper func * fixed linting errors in floodsub * small notify bugfix * move pubsub into libp2p * fixed pubsub linting * fixing pubsub test failures * linting
2019-03-24 01:52:02 +08:00
:param peer_id: peer_id of peer the connection is to
2018-11-01 05:31:00 +08:00
"""
super(Mplex, self).__init__(secured_conn, generic_protocol_handler, peer_id)
self.secured_conn = secured_conn
self.raw_conn = secured_conn.get_conn()
self.initiator = self.raw_conn.initiator
2018-11-26 00:05:56 +08:00
# Store generic protocol handler
self.generic_protocol_handler = generic_protocol_handler
[WIP] PubSub and FloodSub development (#133) * Add notifee interface * Add notify function to network interface * Implement notify feature * Add tests for notify * Make notifee functions all async * Fix linting issue * Fix linting issue * Scaffold pubsub router interface * Scaffold pubsub directory * Store peer_id in muxed connection * Implement pubsub notifee * Remove outdated files * Implement pubsub first attempt * Prepare pubsub for floodsub * Add mplex conn to net stream and add conn in notify tests * Implement floodsub * Use NetStream in generic protocol handler * Debugging async issues * Modify test to perform proper assert. Test passes * Remove callbacks. Reduce sleep time * Add simple three node test * Clean up code. Add message classes * Add test for two topics * Add conn to net stream and conn tests * Refactor test setup to remove duplicate code * Fix linting issues * Fix linting issue * Fix linting issue * Fix outstanding unrelated lint issue in multiselect_client * Add connect function * Remove debug prints * Remove debug prints from floodsub * Use MessageTalk in place of direct message breakdown * Remove extra prints * Remove outdated function * Add message to queues for all topics in message * Debugging * Add message self delivery * Increase read timeout to 5 to get pubsub tests passing * Refactor testing helper func. Add tests * Add tests and increase timeout to get tests passing * Add dummy account demo scaffolding * Attempt to use threads. Test fails * Implement basic dummy node tests using threads * Add generic testing function * Add simple seven node tree test * Add more complex seven node tree tests * Add five node ring tests * Remove unnecessary get_message_type func * Add documentation to classes * Add message id to messages * Add documentation to test helper func * Add docs to dummy account node helper func * Add more docs to dummy account node test helper func * fixed linting errors in floodsub * small notify bugfix * move pubsub into libp2p * fixed pubsub linting * fixing pubsub test failures * linting
2019-03-24 01:52:02 +08:00
# Set peer_id
self.peer_id = peer_id
2018-11-26 00:05:56 +08:00
# Mapping from stream ID -> buffer of messages for that stream
self.buffers = {}
2018-11-26 00:05:56 +08:00
2018-11-13 00:00:43 +08:00
self.stream_queue = asyncio.Queue()
# Kick off reading
asyncio.ensure_future(self.handle_incoming())
2018-11-01 05:31:00 +08:00
def close(self):
"""
close the stream muxer and underlying raw connection
"""
self.raw_conn.close()
2018-11-01 05:31:00 +08:00
def is_closed(self):
"""
check connection is fully closed
:return: true if successful
"""
2018-11-13 00:00:43 +08:00
async def read_buffer(self, stream_id):
"""
Read a message from stream_id's buffer, check raw connection for new messages
:param stream_id: stream id of stream to read from
:return: message read
"""
2018-11-13 00:00:43 +08:00
# TODO: propagate up timeout exception and catch
# TODO: pass down timeout from user and use that
if stream_id in self.buffers:
try:
[WIP] PubSub and FloodSub development (#133) * Add notifee interface * Add notify function to network interface * Implement notify feature * Add tests for notify * Make notifee functions all async * Fix linting issue * Fix linting issue * Scaffold pubsub router interface * Scaffold pubsub directory * Store peer_id in muxed connection * Implement pubsub notifee * Remove outdated files * Implement pubsub first attempt * Prepare pubsub for floodsub * Add mplex conn to net stream and add conn in notify tests * Implement floodsub * Use NetStream in generic protocol handler * Debugging async issues * Modify test to perform proper assert. Test passes * Remove callbacks. Reduce sleep time * Add simple three node test * Clean up code. Add message classes * Add test for two topics * Add conn to net stream and conn tests * Refactor test setup to remove duplicate code * Fix linting issues * Fix linting issue * Fix linting issue * Fix outstanding unrelated lint issue in multiselect_client * Add connect function * Remove debug prints * Remove debug prints from floodsub * Use MessageTalk in place of direct message breakdown * Remove extra prints * Remove outdated function * Add message to queues for all topics in message * Debugging * Add message self delivery * Increase read timeout to 5 to get pubsub tests passing * Refactor testing helper func. Add tests * Add tests and increase timeout to get tests passing * Add dummy account demo scaffolding * Attempt to use threads. Test fails * Implement basic dummy node tests using threads * Add generic testing function * Add simple seven node tree test * Add more complex seven node tree tests * Add five node ring tests * Remove unnecessary get_message_type func * Add documentation to classes * Add message id to messages * Add documentation to test helper func * Add docs to dummy account node helper func * Add more docs to dummy account node test helper func * fixed linting errors in floodsub * small notify bugfix * move pubsub into libp2p * fixed pubsub linting * fixing pubsub test failures * linting
2019-03-24 01:52:02 +08:00
data = await asyncio.wait_for(self.buffers[stream_id].get(), timeout=8)
return data
except asyncio.TimeoutError:
return None
2018-11-13 00:00:43 +08:00
# Stream not created yet
return None
async def open_stream(self, protocol_id, multi_addr):
2018-11-01 05:31:00 +08:00
"""
creates a new muxed_stream
2018-11-13 02:02:49 +08:00
:param protocol_id: protocol_id of stream
:param multi_addr: multi_addr that stream connects to
2018-11-01 05:31:00 +08:00
:return: a new stream
"""
2018-11-30 02:42:05 +08:00
stream_id = self.raw_conn.next_stream_id()
2018-11-21 09:28:41 +08:00
stream = MplexStream(stream_id, multi_addr, self)
self.buffers[stream_id] = asyncio.Queue()
await self.send_message(get_flag(self.initiator, "NEW_STREAM"), None, stream_id)
return stream
2018-11-13 00:00:43 +08:00
async def accept_stream(self):
2018-11-01 05:31:00 +08:00
"""
accepts a muxed stream opened by the other end
:return: the accepted stream
"""
2018-11-13 00:00:43 +08:00
stream_id = await self.stream_queue.get()
2018-11-21 09:28:41 +08:00
stream = MplexStream(stream_id, False, self)
asyncio.ensure_future(self.generic_protocol_handler(stream))
2018-11-13 00:00:43 +08:00
async def send_message(self, flag, data, stream_id):
"""
sends a message over the connection
:param header: header to use
:param data: data to send in the message
2018-11-12 06:38:11 +08:00
:param stream_id: stream the message is in
:return: True if success
"""
2018-11-12 06:38:11 +08:00
# << by 3, then or with flag
header = (stream_id << 3) | flag
header = encode_uvarint(header)
2018-11-19 00:22:17 +08:00
if data is None:
data_length = encode_uvarint(0)
_bytes = header + data_length
else:
data_length = encode_uvarint(len(data))
_bytes = header + data_length + data
2018-11-13 00:00:43 +08:00
return await self.write_to_stream(_bytes)
2018-11-12 06:38:11 +08:00
async def write_to_stream(self, _bytes):
"""
writes a byte array to a raw connection
:param _bytes: byte array to write
:return: length written
"""
2018-11-12 06:48:31 +08:00
self.raw_conn.writer.write(_bytes)
2018-11-12 06:38:11 +08:00
await self.raw_conn.writer.drain()
2018-11-12 06:48:31 +08:00
return len(_bytes)
async def handle_incoming(self):
"""
Read a message off of the raw connection and add it to the corresponding message buffer
"""
# TODO Deal with other types of messages using flag (currently _)
2018-11-13 00:00:43 +08:00
while True:
stream_id, flag, message = await self.read_message()
2018-11-13 00:00:43 +08:00
if stream_id is not None and flag is not None and message is not None:
if stream_id not in self.buffers:
self.buffers[stream_id] = asyncio.Queue()
await self.stream_queue.put(stream_id)
2018-11-13 00:00:43 +08:00
if flag is get_flag(True, "NEW_STREAM"):
# new stream detected on connection
await self.accept_stream()
2018-11-13 00:00:43 +08:00
if message:
await self.buffers[stream_id].put(message)
# Force context switch
await asyncio.sleep(0)
async def read_message(self):
"""
Read a single message off of the raw connection
:return: stream_id, flag, message contents
"""
# Timeout is set to a relatively small value to alleviate wait time to exit
# loop in handle_incoming
timeout = 0.1
try:
header = await decode_uvarint_from_stream(self.raw_conn.reader, timeout)
length = await decode_uvarint_from_stream(self.raw_conn.reader, timeout)
2019-08-04 02:25:25 +08:00
message = await asyncio.wait_for(self.raw_conn.reader.read(length), timeout=timeout)
except asyncio.TimeoutError:
return None, None, None
flag = header & 0x07
stream_id = header >> 3
return stream_id, flag, message