Refactor floodsub.publish

Passed the first test of floodsub
This commit is contained in:
mhchia 2019-07-25 14:08:16 +08:00
parent 93cf5a2c32
commit cae4f34034
No known key found for this signature in database
GPG Key ID: 389EFBEA1362589A
5 changed files with 87 additions and 72 deletions

View File

@ -1,10 +1,10 @@
from typing import (
Generator,
Sequence,
Iterable,
)
from libp2p.peer.id import (
ID,
id_b58_decode,
)
from .pb import rpc_pb2
@ -51,7 +51,7 @@ class FloodSub(IPubsubRouter):
:param rpc: rpc message
"""
async def publish(self, from_peer: ID, pubsub_message: rpc_pb2.Message) -> None:
async def publish(self, src: ID, pubsub_msg: rpc_pb2.Message) -> None:
"""
Invoked to forward a new message that has been validated.
This is where the "flooding" part of floodsub happens
@ -62,68 +62,36 @@ class FloodSub(IPubsubRouter):
so that seen messages are not further forwarded.
It also never forwards a message back to the source
or the peer that forwarded the message.
:param sender_peer_id: peer_id of message sender
:param rpc_message: pubsub message in RPC string format
:param src: the peer id of the peer who forwards the message to me.
:param pubsub_msg: pubsub message in protobuf.
"""
peers_gen = self._get_peers_to_send(
pubsub_message.topicIDs,
from_peer_id=from_peer,
src_peer_id=ID(pubsub_message.from_id),
pubsub_msg.topicIDs,
src=src,
origin=ID(pubsub_msg.from_id),
)
rpc_msg = rpc_pb2.RPC(
publish=[pubsub_message],
publish=[pubsub_msg],
)
for peer_id in peers_gen:
stream = self.pubsub.peers[str(peer_id)]
await stream.write(rpc_msg.SerializeToString())
# packet = rpc_pb2.RPC()
# packet.ParseFromString(rpc_message)
# from_peer_str = str(from_peer)
# for topic in pubsub_message.topicIDs:
# if topic not in self.pubsub.topics:
# continue
# peers = self.pubsub.peer_topics[topic]
# # Deliver to self if self was origin
# # Note: handle_talk checks if self is subscribed to topics in message
# for message in packet.publish:
# decoded_from_id = message.from_id.decode('utf-8')
# if msg_sender == decoded_from_id and msg_sender == str(self.pubsub.host.get_id()):
# id_in_seen_msgs = (message.seqno, message.from_id)
# if id_in_seen_msgs not in self.pubsub.seen_messages:
# self.pubsub.seen_messages[id_in_seen_msgs] = 1
# await self.pubsub.handle_talk(message)
# # Deliver to self and peers
# for topic in message.topicIDs:
# if topic in self.pubsub.peer_topics:
# for peer_id_in_topic in self.pubsub.peer_topics[topic]:
# # Forward to all known peers in the topic that are not the
# # message sender and are not the message origin
# if peer_id_in_topic not in (msg_sender, decoded_from_id):
# stream = self.pubsub.peers[peer_id_in_topic]
# # Create new packet with just publish message
# new_packet = rpc_pb2.RPC()
# new_packet.publish.extend([message])
# # Publish the packet
# await stream.write(new_packet.SerializeToString())
def _get_peers_to_send(
self,
topic_ids: Sequence[str],
from_peer_id: ID,
src_peer_id: ID) -> Generator[ID]:
# TODO: should send to self if `src_peer_id` is ourself?
topic_ids: Iterable[str],
src: ID,
origin: ID) -> Iterable[ID]:
"""
:return: the list of protocols supported by the router
"""
for topic in topic_ids:
if topic not in self.pubsub.topics:
if topic not in self.pubsub.peer_topics:
continue
for peer_id in self.pubsub.peer_topics[topic]:
if peer_id in (from_peer_id, src_peer_id):
for peer_id_str in self.pubsub.peer_topics[topic]:
peer_id = id_b58_decode(peer_id_str)
if peer_id in (src, origin):
continue
# FIXME: Should change `self.pubsub.peers` to Dict[PeerID, ...]
if str(peer_id) not in self.pubsub.peers:

View File

@ -45,7 +45,8 @@ class Pubsub:
outgoing_messages: asyncio.Queue()
seen_messages: LRU
my_topics: Dict[str, asyncio.Queue]
peer_topics: Dict[str, List[ID]]
# FIXME: Should be changed to `Dict[str, List[ID]]`
peer_topics: Dict[str, List[str]]
# FIXME: Should be changed to `Dict[ID, INetStream]`
peers: Dict[str, INetStream]
# NOTE: Be sure it is increased atomically everytime.
@ -320,23 +321,34 @@ class Pubsub:
# Write message to stream
await stream.write(rpc_msg)
def list_peers(self, topic_id: str) -> Tuple[ID]:
def list_peers(self, topic_id: str) -> Tuple[ID, ...]:
return
async def publish(self, topic_id: str, data: bytes) -> None:
"""
Publish data to a topic
:param topic_id: topic which we are going to publish the data to
:param data: data which we are publishing
"""
msg = rpc_pb2.Message(
data=data,
topicIDs=[topic_id],
# Origin is myself.
from_id=self.host.get_id().to_bytes(),
seqno=self._next_seqno(),
)
# TODO: Sign with our signing key
self.push_msg(self.host.get_id(), msg)
await self.push_msg(self.host.get_id(), msg)
async def push_msg(self, src: ID, msg: rpc_pb2.Message):
# TODO: - Check if the source is in the blacklist. If yes, reject.
async def push_msg(self, src: ID, msg: rpc_pb2.Message) -> None:
"""
Push a pubsub message to others.
:param src: the peer who forward us the message.
:param msg: the message we are going to push out.
"""
# TODO: - Check if the `source` is in the blacklist. If yes, reject.
# TODO: - Check if the `from` is in the blacklist. If yes, reject.
@ -352,6 +364,9 @@ class Pubsub:
await self.router.publish(src, msg)
def _next_seqno(self) -> bytes:
"""
Make the next message sequence id.
"""
self.counter += 1
return self.counter.to_bytes(8, 'big')

View File

@ -42,11 +42,11 @@ class IPubsubRouter(ABC):
"""
@abstractmethod
def publish(self, sender_peer_id, rpc_message):
async def publish(self, src, pubsub_msg) -> None:
"""
Invoked to forward a new message that has been validated
:param sender_peer_id: peer_id of message sender
:param rpc_message: message to forward
:param src: peer_id of message sender
:param pubsub_msg: pubsub message to forward
"""
@abstractmethod

View File

@ -8,7 +8,12 @@ from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.pubsub.pb import rpc_pb2
from libp2p.pubsub.pubsub import Pubsub
from libp2p.pubsub.floodsub import FloodSub
from utils import message_id_generator, generate_RPC_packet
from .utils import (
make_pubsub_msg,
message_id_generator,
generate_RPC_packet,
)
# pylint: disable=too-many-locals
@ -20,6 +25,7 @@ async def connect(node1, node2):
info = info_from_p2p_addr(addr)
await node1.connect(info)
@pytest.mark.asyncio
async def test_simple_two_nodes():
node_a = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
@ -29,6 +35,8 @@ async def test_simple_two_nodes():
await node_b.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
supported_protocols = ["/floodsub/1.0.0"]
topic = "my_topic"
data = b"some data"
floodsub_a = FloodSub(supported_protocols)
pubsub_a = Pubsub(node_a, floodsub_a, "a")
@ -38,26 +46,30 @@ async def test_simple_two_nodes():
await connect(node_a, node_b)
await asyncio.sleep(0.25)
qb = await pubsub_b.subscribe("my_topic")
sub_b = await pubsub_b.subscribe(topic)
await asyncio.sleep(0.25)
node_a_id = str(node_a.get_id())
next_msg_id_func = message_id_generator(0)
msg = generate_RPC_packet(node_a_id, ["my_topic"], "some data", next_msg_id_func())
await floodsub_a.publish(node_a_id, msg.SerializeToString())
msg = make_pubsub_msg(
origin_id=node_a.get_id(),
topic_ids=[topic],
data=data,
seqno=next_msg_id_func(),
)
await floodsub_a.publish(node_a.get_id(), msg)
await asyncio.sleep(0.25)
res_b = await qb.get()
res_b = await sub_b.get()
# Check that the msg received by node_b is the same
# as the message sent by node_a
assert res_b.SerializeToString() == msg.publish[0].SerializeToString()
assert res_b.SerializeToString() == msg.SerializeToString()
# Success, terminate pending tasks.
await cleanup()
@pytest.mark.asyncio
async def test_lru_cache_two_nodes():
# two nodes with cache_size of 4
@ -100,7 +112,7 @@ async def test_lru_cache_two_nodes():
messages = [first_message]
# for the next 5 messages
for i in range(2, 6):
# write first message
# write first message
await floodsub_a.publish(node_a_id, first_message.SerializeToString())
await asyncio.sleep(0.25)
@ -127,7 +139,7 @@ async def test_lru_cache_two_nodes():
res_b = await qb.get()
assert res_b.SerializeToString() == first_message.publish[0].SerializeToString()
assert qb.empty()
# Success, terminate pending tasks.
await cleanup()
@ -136,7 +148,7 @@ async def perform_test_from_obj(obj):
"""
Perform a floodsub test from a test obj.
test obj are composed as follows:
{
"supported_protocols": ["supported/protocol/1.0.0",...],
"adj_list": {
@ -190,7 +202,7 @@ async def perform_test_from_obj(obj):
if neighbor_id not in node_map:
neighbor_node = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
await neighbor_node.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
node_map[neighbor_id] = neighbor_node
floodsub = FloodSub(supported_protocols)

View File

@ -3,9 +3,14 @@ import multiaddr
import uuid
import random
import struct
from typing import (
Sequence,
)
from libp2p import new_node
from libp2p.pubsub.pb import rpc_pb2
from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.peer.id import ID
from libp2p.pubsub.pubsub import Pubsub
from libp2p.pubsub.gossipsub import GossipSub
@ -29,6 +34,20 @@ def message_id_generator(start_val):
return generator
def make_pubsub_msg(
origin_id: ID,
topic_ids: Sequence[str],
data: bytes,
seqno: bytes) -> rpc_pb2.Message:
return rpc_pb2.Message(
from_id=origin_id.to_bytes(),
seqno=seqno,
data=data,
topicIDs=list(topic_ids),
)
def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
"""
Generate RPC packet to send over wire
@ -42,7 +61,7 @@ def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
from_id=origin_id.encode('utf-8'),
seqno=msg_id,
data=msg_content.encode('utf-8'),
)
)
for topic in topics:
message.topicIDs.extend([topic.encode('utf-8')])
@ -50,6 +69,7 @@ def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
packet.publish.extend([message])
return packet
async def connect(node1, node2):
"""
Connect node1 to node2