Refactor floodsub.publish
Passed the first test of floodsub
This commit is contained in:
parent
93cf5a2c32
commit
cae4f34034
|
@ -1,10 +1,10 @@
|
||||||
from typing import (
|
from typing import (
|
||||||
Generator,
|
Iterable,
|
||||||
Sequence,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from libp2p.peer.id import (
|
from libp2p.peer.id import (
|
||||||
ID,
|
ID,
|
||||||
|
id_b58_decode,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .pb import rpc_pb2
|
from .pb import rpc_pb2
|
||||||
|
@ -51,7 +51,7 @@ class FloodSub(IPubsubRouter):
|
||||||
:param rpc: rpc message
|
:param rpc: rpc message
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async def publish(self, from_peer: ID, pubsub_message: rpc_pb2.Message) -> None:
|
async def publish(self, src: ID, pubsub_msg: rpc_pb2.Message) -> None:
|
||||||
"""
|
"""
|
||||||
Invoked to forward a new message that has been validated.
|
Invoked to forward a new message that has been validated.
|
||||||
This is where the "flooding" part of floodsub happens
|
This is where the "flooding" part of floodsub happens
|
||||||
|
@ -62,68 +62,36 @@ class FloodSub(IPubsubRouter):
|
||||||
so that seen messages are not further forwarded.
|
so that seen messages are not further forwarded.
|
||||||
It also never forwards a message back to the source
|
It also never forwards a message back to the source
|
||||||
or the peer that forwarded the message.
|
or the peer that forwarded the message.
|
||||||
:param sender_peer_id: peer_id of message sender
|
:param src: the peer id of the peer who forwards the message to me.
|
||||||
:param rpc_message: pubsub message in RPC string format
|
:param pubsub_msg: pubsub message in protobuf.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
peers_gen = self._get_peers_to_send(
|
peers_gen = self._get_peers_to_send(
|
||||||
pubsub_message.topicIDs,
|
pubsub_msg.topicIDs,
|
||||||
from_peer_id=from_peer,
|
src=src,
|
||||||
src_peer_id=ID(pubsub_message.from_id),
|
origin=ID(pubsub_msg.from_id),
|
||||||
)
|
)
|
||||||
rpc_msg = rpc_pb2.RPC(
|
rpc_msg = rpc_pb2.RPC(
|
||||||
publish=[pubsub_message],
|
publish=[pubsub_msg],
|
||||||
)
|
)
|
||||||
for peer_id in peers_gen:
|
for peer_id in peers_gen:
|
||||||
stream = self.pubsub.peers[str(peer_id)]
|
stream = self.pubsub.peers[str(peer_id)]
|
||||||
await stream.write(rpc_msg.SerializeToString())
|
await stream.write(rpc_msg.SerializeToString())
|
||||||
|
|
||||||
# packet = rpc_pb2.RPC()
|
|
||||||
# packet.ParseFromString(rpc_message)
|
|
||||||
|
|
||||||
# from_peer_str = str(from_peer)
|
|
||||||
# for topic in pubsub_message.topicIDs:
|
|
||||||
# if topic not in self.pubsub.topics:
|
|
||||||
# continue
|
|
||||||
# peers = self.pubsub.peer_topics[topic]
|
|
||||||
# # Deliver to self if self was origin
|
|
||||||
# # Note: handle_talk checks if self is subscribed to topics in message
|
|
||||||
# for message in packet.publish:
|
|
||||||
# decoded_from_id = message.from_id.decode('utf-8')
|
|
||||||
# if msg_sender == decoded_from_id and msg_sender == str(self.pubsub.host.get_id()):
|
|
||||||
# id_in_seen_msgs = (message.seqno, message.from_id)
|
|
||||||
|
|
||||||
# if id_in_seen_msgs not in self.pubsub.seen_messages:
|
|
||||||
# self.pubsub.seen_messages[id_in_seen_msgs] = 1
|
|
||||||
|
|
||||||
# await self.pubsub.handle_talk(message)
|
|
||||||
|
|
||||||
# # Deliver to self and peers
|
|
||||||
# for topic in message.topicIDs:
|
|
||||||
# if topic in self.pubsub.peer_topics:
|
|
||||||
# for peer_id_in_topic in self.pubsub.peer_topics[topic]:
|
|
||||||
# # Forward to all known peers in the topic that are not the
|
|
||||||
# # message sender and are not the message origin
|
|
||||||
# if peer_id_in_topic not in (msg_sender, decoded_from_id):
|
|
||||||
# stream = self.pubsub.peers[peer_id_in_topic]
|
|
||||||
# # Create new packet with just publish message
|
|
||||||
# new_packet = rpc_pb2.RPC()
|
|
||||||
# new_packet.publish.extend([message])
|
|
||||||
|
|
||||||
# # Publish the packet
|
|
||||||
# await stream.write(new_packet.SerializeToString())
|
|
||||||
|
|
||||||
def _get_peers_to_send(
|
def _get_peers_to_send(
|
||||||
self,
|
self,
|
||||||
topic_ids: Sequence[str],
|
topic_ids: Iterable[str],
|
||||||
from_peer_id: ID,
|
src: ID,
|
||||||
src_peer_id: ID) -> Generator[ID]:
|
origin: ID) -> Iterable[ID]:
|
||||||
# TODO: should send to self if `src_peer_id` is ourself?
|
"""
|
||||||
|
:return: the list of protocols supported by the router
|
||||||
|
"""
|
||||||
for topic in topic_ids:
|
for topic in topic_ids:
|
||||||
if topic not in self.pubsub.topics:
|
if topic not in self.pubsub.peer_topics:
|
||||||
continue
|
continue
|
||||||
for peer_id in self.pubsub.peer_topics[topic]:
|
for peer_id_str in self.pubsub.peer_topics[topic]:
|
||||||
if peer_id in (from_peer_id, src_peer_id):
|
peer_id = id_b58_decode(peer_id_str)
|
||||||
|
if peer_id in (src, origin):
|
||||||
continue
|
continue
|
||||||
# FIXME: Should change `self.pubsub.peers` to Dict[PeerID, ...]
|
# FIXME: Should change `self.pubsub.peers` to Dict[PeerID, ...]
|
||||||
if str(peer_id) not in self.pubsub.peers:
|
if str(peer_id) not in self.pubsub.peers:
|
||||||
|
|
|
@ -45,7 +45,8 @@ class Pubsub:
|
||||||
outgoing_messages: asyncio.Queue()
|
outgoing_messages: asyncio.Queue()
|
||||||
seen_messages: LRU
|
seen_messages: LRU
|
||||||
my_topics: Dict[str, asyncio.Queue]
|
my_topics: Dict[str, asyncio.Queue]
|
||||||
peer_topics: Dict[str, List[ID]]
|
# FIXME: Should be changed to `Dict[str, List[ID]]`
|
||||||
|
peer_topics: Dict[str, List[str]]
|
||||||
# FIXME: Should be changed to `Dict[ID, INetStream]`
|
# FIXME: Should be changed to `Dict[ID, INetStream]`
|
||||||
peers: Dict[str, INetStream]
|
peers: Dict[str, INetStream]
|
||||||
# NOTE: Be sure it is increased atomically everytime.
|
# NOTE: Be sure it is increased atomically everytime.
|
||||||
|
@ -320,23 +321,34 @@ class Pubsub:
|
||||||
# Write message to stream
|
# Write message to stream
|
||||||
await stream.write(rpc_msg)
|
await stream.write(rpc_msg)
|
||||||
|
|
||||||
def list_peers(self, topic_id: str) -> Tuple[ID]:
|
def list_peers(self, topic_id: str) -> Tuple[ID, ...]:
|
||||||
return
|
return
|
||||||
|
|
||||||
async def publish(self, topic_id: str, data: bytes) -> None:
|
async def publish(self, topic_id: str, data: bytes) -> None:
|
||||||
|
"""
|
||||||
|
Publish data to a topic
|
||||||
|
:param topic_id: topic which we are going to publish the data to
|
||||||
|
:param data: data which we are publishing
|
||||||
|
"""
|
||||||
msg = rpc_pb2.Message(
|
msg = rpc_pb2.Message(
|
||||||
data=data,
|
data=data,
|
||||||
topicIDs=[topic_id],
|
topicIDs=[topic_id],
|
||||||
|
# Origin is myself.
|
||||||
from_id=self.host.get_id().to_bytes(),
|
from_id=self.host.get_id().to_bytes(),
|
||||||
seqno=self._next_seqno(),
|
seqno=self._next_seqno(),
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Sign with our signing key
|
# TODO: Sign with our signing key
|
||||||
|
|
||||||
self.push_msg(self.host.get_id(), msg)
|
await self.push_msg(self.host.get_id(), msg)
|
||||||
|
|
||||||
async def push_msg(self, src: ID, msg: rpc_pb2.Message):
|
async def push_msg(self, src: ID, msg: rpc_pb2.Message) -> None:
|
||||||
# TODO: - Check if the source is in the blacklist. If yes, reject.
|
"""
|
||||||
|
Push a pubsub message to others.
|
||||||
|
:param src: the peer who forward us the message.
|
||||||
|
:param msg: the message we are going to push out.
|
||||||
|
"""
|
||||||
|
# TODO: - Check if the `source` is in the blacklist. If yes, reject.
|
||||||
|
|
||||||
# TODO: - Check if the `from` is in the blacklist. If yes, reject.
|
# TODO: - Check if the `from` is in the blacklist. If yes, reject.
|
||||||
|
|
||||||
|
@ -352,6 +364,9 @@ class Pubsub:
|
||||||
await self.router.publish(src, msg)
|
await self.router.publish(src, msg)
|
||||||
|
|
||||||
def _next_seqno(self) -> bytes:
|
def _next_seqno(self) -> bytes:
|
||||||
|
"""
|
||||||
|
Make the next message sequence id.
|
||||||
|
"""
|
||||||
self.counter += 1
|
self.counter += 1
|
||||||
return self.counter.to_bytes(8, 'big')
|
return self.counter.to_bytes(8, 'big')
|
||||||
|
|
||||||
|
|
|
@ -42,11 +42,11 @@ class IPubsubRouter(ABC):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def publish(self, sender_peer_id, rpc_message):
|
async def publish(self, src, pubsub_msg) -> None:
|
||||||
"""
|
"""
|
||||||
Invoked to forward a new message that has been validated
|
Invoked to forward a new message that has been validated
|
||||||
:param sender_peer_id: peer_id of message sender
|
:param src: peer_id of message sender
|
||||||
:param rpc_message: message to forward
|
:param pubsub_msg: pubsub message to forward
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
|
@ -8,7 +8,12 @@ from libp2p.peer.peerinfo import info_from_p2p_addr
|
||||||
from libp2p.pubsub.pb import rpc_pb2
|
from libp2p.pubsub.pb import rpc_pb2
|
||||||
from libp2p.pubsub.pubsub import Pubsub
|
from libp2p.pubsub.pubsub import Pubsub
|
||||||
from libp2p.pubsub.floodsub import FloodSub
|
from libp2p.pubsub.floodsub import FloodSub
|
||||||
from utils import message_id_generator, generate_RPC_packet
|
|
||||||
|
from .utils import (
|
||||||
|
make_pubsub_msg,
|
||||||
|
message_id_generator,
|
||||||
|
generate_RPC_packet,
|
||||||
|
)
|
||||||
|
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
|
|
||||||
|
@ -20,6 +25,7 @@ async def connect(node1, node2):
|
||||||
info = info_from_p2p_addr(addr)
|
info = info_from_p2p_addr(addr)
|
||||||
await node1.connect(info)
|
await node1.connect(info)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_simple_two_nodes():
|
async def test_simple_two_nodes():
|
||||||
node_a = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
|
node_a = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
|
||||||
|
@ -29,6 +35,8 @@ async def test_simple_two_nodes():
|
||||||
await node_b.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
|
await node_b.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
|
||||||
|
|
||||||
supported_protocols = ["/floodsub/1.0.0"]
|
supported_protocols = ["/floodsub/1.0.0"]
|
||||||
|
topic = "my_topic"
|
||||||
|
data = b"some data"
|
||||||
|
|
||||||
floodsub_a = FloodSub(supported_protocols)
|
floodsub_a = FloodSub(supported_protocols)
|
||||||
pubsub_a = Pubsub(node_a, floodsub_a, "a")
|
pubsub_a = Pubsub(node_a, floodsub_a, "a")
|
||||||
|
@ -38,26 +46,30 @@ async def test_simple_two_nodes():
|
||||||
await connect(node_a, node_b)
|
await connect(node_a, node_b)
|
||||||
|
|
||||||
await asyncio.sleep(0.25)
|
await asyncio.sleep(0.25)
|
||||||
qb = await pubsub_b.subscribe("my_topic")
|
sub_b = await pubsub_b.subscribe(topic)
|
||||||
|
|
||||||
await asyncio.sleep(0.25)
|
await asyncio.sleep(0.25)
|
||||||
|
|
||||||
node_a_id = str(node_a.get_id())
|
|
||||||
|
|
||||||
next_msg_id_func = message_id_generator(0)
|
next_msg_id_func = message_id_generator(0)
|
||||||
msg = generate_RPC_packet(node_a_id, ["my_topic"], "some data", next_msg_id_func())
|
msg = make_pubsub_msg(
|
||||||
await floodsub_a.publish(node_a_id, msg.SerializeToString())
|
origin_id=node_a.get_id(),
|
||||||
|
topic_ids=[topic],
|
||||||
|
data=data,
|
||||||
|
seqno=next_msg_id_func(),
|
||||||
|
)
|
||||||
|
await floodsub_a.publish(node_a.get_id(), msg)
|
||||||
await asyncio.sleep(0.25)
|
await asyncio.sleep(0.25)
|
||||||
|
|
||||||
res_b = await qb.get()
|
res_b = await sub_b.get()
|
||||||
|
|
||||||
# Check that the msg received by node_b is the same
|
# Check that the msg received by node_b is the same
|
||||||
# as the message sent by node_a
|
# as the message sent by node_a
|
||||||
assert res_b.SerializeToString() == msg.publish[0].SerializeToString()
|
assert res_b.SerializeToString() == msg.SerializeToString()
|
||||||
|
|
||||||
# Success, terminate pending tasks.
|
# Success, terminate pending tasks.
|
||||||
await cleanup()
|
await cleanup()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_lru_cache_two_nodes():
|
async def test_lru_cache_two_nodes():
|
||||||
# two nodes with cache_size of 4
|
# two nodes with cache_size of 4
|
||||||
|
|
|
@ -3,9 +3,14 @@ import multiaddr
|
||||||
import uuid
|
import uuid
|
||||||
import random
|
import random
|
||||||
import struct
|
import struct
|
||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
)
|
||||||
|
|
||||||
from libp2p import new_node
|
from libp2p import new_node
|
||||||
from libp2p.pubsub.pb import rpc_pb2
|
from libp2p.pubsub.pb import rpc_pb2
|
||||||
from libp2p.peer.peerinfo import info_from_p2p_addr
|
from libp2p.peer.peerinfo import info_from_p2p_addr
|
||||||
|
from libp2p.peer.id import ID
|
||||||
from libp2p.pubsub.pubsub import Pubsub
|
from libp2p.pubsub.pubsub import Pubsub
|
||||||
from libp2p.pubsub.gossipsub import GossipSub
|
from libp2p.pubsub.gossipsub import GossipSub
|
||||||
|
|
||||||
|
@ -29,6 +34,20 @@ def message_id_generator(start_val):
|
||||||
|
|
||||||
return generator
|
return generator
|
||||||
|
|
||||||
|
|
||||||
|
def make_pubsub_msg(
|
||||||
|
origin_id: ID,
|
||||||
|
topic_ids: Sequence[str],
|
||||||
|
data: bytes,
|
||||||
|
seqno: bytes) -> rpc_pb2.Message:
|
||||||
|
return rpc_pb2.Message(
|
||||||
|
from_id=origin_id.to_bytes(),
|
||||||
|
seqno=seqno,
|
||||||
|
data=data,
|
||||||
|
topicIDs=list(topic_ids),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
|
def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
|
||||||
"""
|
"""
|
||||||
Generate RPC packet to send over wire
|
Generate RPC packet to send over wire
|
||||||
|
@ -50,6 +69,7 @@ def generate_RPC_packet(origin_id, topics, msg_content, msg_id):
|
||||||
packet.publish.extend([message])
|
packet.publish.extend([message])
|
||||||
return packet
|
return packet
|
||||||
|
|
||||||
|
|
||||||
async def connect(node1, node2):
|
async def connect(node1, node2):
|
||||||
"""
|
"""
|
||||||
Connect node1 to node2
|
Connect node1 to node2
|
||||||
|
|
Loading…
Reference in New Issue
Block a user