2020-01-28 00:29:05 +08:00
|
|
|
import functools
|
2019-08-06 12:32:18 +08:00
|
|
|
import logging
|
2019-07-24 14:54:30 +08:00
|
|
|
import time
|
2020-02-28 03:57:00 +08:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
KeysView,
|
|
|
|
List,
|
|
|
|
NamedTuple,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
cast,
|
|
|
|
)
|
2019-07-22 18:12:54 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
from async_service import Service
|
2019-09-11 04:03:24 +08:00
|
|
|
import base58
|
2019-04-06 09:46:03 +08:00
|
|
|
from lru import LRU
|
2019-12-03 17:27:49 +08:00
|
|
|
import trio
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-11-26 16:09:46 +08:00
|
|
|
from libp2p.crypto.keys import PrivateKey
|
2019-09-15 16:58:08 +08:00
|
|
|
from libp2p.exceptions import ParseError, ValidationError
|
2019-07-28 22:30:51 +08:00
|
|
|
from libp2p.host.host_interface import IHost
|
2019-09-15 16:58:08 +08:00
|
|
|
from libp2p.io.exceptions import IncompleteReadError
|
2019-09-17 16:17:41 +08:00
|
|
|
from libp2p.network.exceptions import SwarmException
|
2019-11-12 18:10:41 +08:00
|
|
|
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
|
2019-07-28 22:30:51 +08:00
|
|
|
from libp2p.network.stream.net_stream_interface import INetStream
|
2019-08-03 13:36:19 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-08-11 16:47:54 +08:00
|
|
|
from libp2p.typing import TProtocol
|
2019-09-02 23:21:57 +08:00
|
|
|
from libp2p.utils import encode_varint_prefixed, read_varint_prefixed_bytes
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-12-17 18:17:28 +08:00
|
|
|
from .abc import IPubsub, ISubscriptionAPI
|
2019-07-24 16:29:02 +08:00
|
|
|
from .pb import rpc_pb2
|
|
|
|
from .pubsub_notifee import PubsubNotifee
|
2019-12-17 18:17:28 +08:00
|
|
|
from .subscription import TrioSubscriptionAPI
|
2020-01-27 00:10:33 +08:00
|
|
|
from .typing import AsyncValidatorFn, SyncValidatorFn, ValidatorFn
|
2019-11-29 19:37:48 +08:00
|
|
|
from .validators import PUBSUB_SIGNING_PREFIX, signature_validator
|
2019-07-29 12:42:13 +08:00
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
2019-12-17 18:17:28 +08:00
|
|
|
from .abc import IPubsubRouter # noqa: F401
|
2019-11-09 23:55:35 +08:00
|
|
|
from typing import Any # noqa: F401
|
2019-07-24 14:54:30 +08:00
|
|
|
|
|
|
|
|
2020-02-05 21:30:26 +08:00
|
|
|
# Ref: https://github.com/libp2p/go-libp2p-pubsub/blob/40e1c94708658b155f30cf99e4574f384756d83c/topic.go#L97 # noqa: E501
|
|
|
|
SUBSCRIPTION_CHANNEL_SIZE = 32
|
|
|
|
|
2019-09-14 21:54:26 +08:00
|
|
|
logger = logging.getLogger("libp2p.pubsub")
|
2019-08-06 12:32:18 +08:00
|
|
|
|
|
|
|
|
2020-02-28 03:57:00 +08:00
|
|
|
def get_peer_and_seqno_msg_id(msg: rpc_pb2.Message) -> bytes:
|
2019-07-24 14:54:30 +08:00
|
|
|
# NOTE: `string(from, seqno)` in Go
|
2020-02-28 03:57:00 +08:00
|
|
|
return msg.seqno + msg.from_id
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
|
2019-08-11 16:47:54 +08:00
|
|
|
class TopicValidator(NamedTuple):
|
|
|
|
validator: ValidatorFn
|
|
|
|
is_async: bool
|
2019-08-04 10:44:12 +08:00
|
|
|
|
|
|
|
|
2020-01-27 00:10:33 +08:00
|
|
|
class Pubsub(Service, IPubsub):
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
host: IHost
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
router: "IPubsubRouter"
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
peer_receive_channel: "trio.MemoryReceiveChannel[ID]"
|
|
|
|
dead_peer_receive_channel: "trio.MemoryReceiveChannel[ID]"
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-07-24 14:54:30 +08:00
|
|
|
seen_messages: LRU
|
2019-07-29 12:42:13 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
subscribed_topics_send: Dict[str, "trio.MemorySendChannel[rpc_pb2.Message]"]
|
2019-12-17 18:17:28 +08:00
|
|
|
subscribed_topics_receive: Dict[str, "TrioSubscriptionAPI"]
|
2019-07-29 12:42:13 +08:00
|
|
|
|
2019-12-17 17:55:13 +08:00
|
|
|
peer_topics: Dict[str, Set[ID]]
|
2019-08-01 12:05:28 +08:00
|
|
|
peers: Dict[ID, INetStream]
|
2019-07-24 16:29:02 +08:00
|
|
|
|
2019-08-03 18:44:40 +08:00
|
|
|
topic_validators: Dict[str, TopicValidator]
|
|
|
|
|
2019-07-24 14:54:30 +08:00
|
|
|
counter: int # uint64
|
|
|
|
|
2019-11-26 16:09:46 +08:00
|
|
|
# Indicate if we should enforce signature verification
|
|
|
|
strict_signing: bool
|
|
|
|
sign_key: PrivateKey
|
|
|
|
|
2020-01-18 00:17:30 +08:00
|
|
|
event_handle_peer_queue_started: trio.Event
|
|
|
|
event_handle_dead_peer_queue_started: trio.Event
|
|
|
|
|
2019-08-01 06:00:12 +08:00
|
|
|
def __init__(
|
2019-11-26 16:09:46 +08:00
|
|
|
self,
|
|
|
|
host: IHost,
|
|
|
|
router: "IPubsubRouter",
|
|
|
|
cache_size: int = None,
|
|
|
|
strict_signing: bool = True,
|
2020-02-28 03:57:00 +08:00
|
|
|
msg_id_constructor: Callable[
|
|
|
|
[rpc_pb2.Message], bytes
|
|
|
|
] = get_peer_and_seqno_msg_id,
|
2019-08-01 06:00:12 +08:00
|
|
|
) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Construct a new Pubsub object, which is responsible for handling all
|
2019-03-24 01:52:02 +08:00
|
|
|
Pubsub-related messages and relaying messages as appropriate to the
|
2019-10-24 14:41:10 +08:00
|
|
|
Pubsub router (which is responsible for choosing who to send messages
|
|
|
|
to).
|
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
Since the logic for choosing peers to send pubsub messages to is
|
2019-10-24 14:41:10 +08:00
|
|
|
in the router, the same Pubsub impl can back floodsub,
|
|
|
|
gossipsub, etc.
|
2019-03-24 01:52:02 +08:00
|
|
|
"""
|
|
|
|
self.host = host
|
|
|
|
self.router = router
|
|
|
|
|
2020-02-28 03:57:00 +08:00
|
|
|
self._msg_id_constructor = msg_id_constructor
|
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Attach this new Pubsub object to the router
|
|
|
|
self.router.attach(self)
|
|
|
|
|
2020-01-31 17:42:47 +08:00
|
|
|
peer_send, peer_receive = trio.open_memory_channel[ID](0)
|
|
|
|
dead_peer_send, dead_peer_receive = trio.open_memory_channel[ID](0)
|
2019-12-03 17:27:49 +08:00
|
|
|
# Only keep the receive channels in `Pubsub`.
|
|
|
|
# Therefore, we can only close from the receive side.
|
2020-01-27 14:30:44 +08:00
|
|
|
self.peer_receive_channel = peer_receive
|
|
|
|
self.dead_peer_receive_channel = dead_peer_receive
|
2019-03-24 01:52:02 +08:00
|
|
|
# Register a notifee
|
2019-11-04 14:22:24 +08:00
|
|
|
self.host.get_network().register_notifee(
|
2020-01-27 14:30:44 +08:00
|
|
|
PubsubNotifee(peer_send, dead_peer_send)
|
2019-11-04 14:22:24 +08:00
|
|
|
)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Register stream handlers for each pubsub router protocol to handle
|
|
|
|
# the pubsub streams opened on those protocols
|
2019-12-06 17:06:37 +08:00
|
|
|
for protocol in router.get_protocols():
|
2019-03-24 01:52:02 +08:00
|
|
|
self.host.set_stream_handler(protocol, self.stream_handler)
|
|
|
|
|
2019-04-06 09:46:03 +08:00
|
|
|
# keeps track of seen messages as LRU cache
|
|
|
|
if cache_size is None:
|
|
|
|
self.cache_size = 128
|
|
|
|
else:
|
|
|
|
self.cache_size = cache_size
|
2019-05-07 11:44:13 +08:00
|
|
|
|
2019-12-24 02:19:43 +08:00
|
|
|
self.strict_signing = strict_signing
|
|
|
|
if strict_signing:
|
|
|
|
self.sign_key = self.host.get_private_key()
|
|
|
|
else:
|
|
|
|
self.sign_key = None
|
|
|
|
|
2019-04-06 09:46:03 +08:00
|
|
|
self.seen_messages = LRU(self.cache_size)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-05-07 11:44:13 +08:00
|
|
|
# Map of topics we are subscribed to blocking queues
|
2019-03-24 01:52:02 +08:00
|
|
|
# for when the given topic receives a message
|
2019-12-03 17:27:49 +08:00
|
|
|
self.subscribed_topics_send = {}
|
|
|
|
self.subscribed_topics_receive = {}
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Map of topic to peers to keep track of what peers are subscribed to
|
|
|
|
self.peer_topics = {}
|
|
|
|
|
|
|
|
# Create peers map, which maps peer_id (as string) to stream (to a given peer)
|
|
|
|
self.peers = {}
|
|
|
|
|
2019-08-03 18:44:40 +08:00
|
|
|
# Map of topic to topic validator
|
|
|
|
self.topic_validators = {}
|
|
|
|
|
2019-12-03 07:57:22 +08:00
|
|
|
self.counter = int(time.time())
|
2019-07-24 14:54:30 +08:00
|
|
|
|
2020-01-18 00:17:30 +08:00
|
|
|
self.event_handle_peer_queue_started = trio.Event()
|
|
|
|
self.event_handle_dead_peer_queue_started = trio.Event()
|
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
async def run(self) -> None:
|
|
|
|
self.manager.run_daemon_task(self.handle_peer_queue)
|
|
|
|
self.manager.run_daemon_task(self.handle_dead_peer_queue)
|
|
|
|
await self.manager.wait_finished()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def my_id(self) -> ID:
|
|
|
|
return self.host.get_id()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def protocols(self) -> Tuple[TProtocol, ...]:
|
|
|
|
return tuple(self.router.get_protocols())
|
|
|
|
|
|
|
|
@property
|
|
|
|
def topic_ids(self) -> KeysView[str]:
|
|
|
|
return self.subscribed_topics_receive.keys()
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-09-02 21:01:13 +08:00
|
|
|
def get_hello_packet(self) -> rpc_pb2.RPC:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Generate subscription message with all topics we are subscribed to
|
|
|
|
only send hello packet if we have subscribed topics."""
|
2019-07-28 18:07:48 +08:00
|
|
|
packet = rpc_pb2.RPC()
|
2019-12-03 17:27:49 +08:00
|
|
|
for topic_id in self.topic_ids:
|
2019-08-14 05:36:42 +08:00
|
|
|
packet.subscriptions.extend(
|
|
|
|
[rpc_pb2.RPC.SubOpts(subscribe=True, topicid=topic_id)]
|
|
|
|
)
|
2019-09-02 21:01:13 +08:00
|
|
|
return packet
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-07-28 14:30:15 +08:00
|
|
|
async def continuously_read_stream(self, stream: INetStream) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Read from input stream in an infinite loop. Process messages from other
|
|
|
|
nodes.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
:param stream: stream to continously read from
|
|
|
|
"""
|
2019-09-15 21:41:29 +08:00
|
|
|
peer_id = stream.muxed_conn.peer_id
|
2019-03-30 04:23:30 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
while self.manager.is_running:
|
2019-11-04 21:17:54 +08:00
|
|
|
incoming: bytes = await read_varint_prefixed_bytes(stream)
|
2019-07-24 16:29:02 +08:00
|
|
|
rpc_incoming: rpc_pb2.RPC = rpc_pb2.RPC()
|
2019-03-29 03:25:33 +08:00
|
|
|
rpc_incoming.ParseFromString(incoming)
|
2019-03-31 05:59:08 +08:00
|
|
|
if rpc_incoming.publish:
|
2019-04-01 10:16:28 +08:00
|
|
|
# deal with RPC.publish
|
2019-07-25 16:58:00 +08:00
|
|
|
for msg in rpc_incoming.publish:
|
|
|
|
if not self._is_subscribed_to_msg(msg):
|
|
|
|
continue
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug(
|
|
|
|
"received `publish` message %s from peer %s", msg, peer_id
|
|
|
|
)
|
2019-12-03 17:27:49 +08:00
|
|
|
self.manager.run_task(self.push_msg, peer_id, msg)
|
2019-03-30 04:23:30 +08:00
|
|
|
|
2019-03-31 05:59:08 +08:00
|
|
|
if rpc_incoming.subscriptions:
|
2019-04-01 10:16:28 +08:00
|
|
|
# deal with RPC.subscriptions
|
|
|
|
# We don't need to relay the subscription to our
|
|
|
|
# peers because a given node only needs its peers
|
|
|
|
# to know that it is subscribed to the topic (doesn't
|
|
|
|
# need everyone to know)
|
|
|
|
for message in rpc_incoming.subscriptions:
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug(
|
|
|
|
"received `subscriptions` message %s from peer %s",
|
|
|
|
message,
|
|
|
|
peer_id,
|
|
|
|
)
|
2019-07-15 16:28:29 +08:00
|
|
|
self.handle_subscription(peer_id, message)
|
2019-04-01 10:16:28 +08:00
|
|
|
|
2019-07-29 22:50:02 +08:00
|
|
|
# NOTE: Check if `rpc_incoming.control` is set through `HasField`.
|
|
|
|
# This is necessary because `control` is an optional field in pb2.
|
2019-08-04 03:45:55 +08:00
|
|
|
# Ref: https://developers.google.com/protocol-buffers/docs/reference/python-generated#singular-fields-proto2 # noqa: E501
|
2019-07-29 22:50:02 +08:00
|
|
|
if rpc_incoming.HasField("control"):
|
2019-05-07 11:44:13 +08:00
|
|
|
# Pass rpc to router so router could perform custom logic
|
2019-09-24 11:30:52 +08:00
|
|
|
logger.debug(
|
|
|
|
"received `control` message %s from peer %s",
|
|
|
|
rpc_incoming.control,
|
|
|
|
peer_id,
|
|
|
|
)
|
2019-05-07 11:44:13 +08:00
|
|
|
await self.router.handle_rpc(rpc_incoming, peer_id)
|
|
|
|
|
2019-08-04 11:03:29 +08:00
|
|
|
def set_topic_validator(
|
2019-08-04 10:44:12 +08:00
|
|
|
self, topic: str, validator: ValidatorFn, is_async_validator: bool
|
|
|
|
) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Register a validator under the given topic. One topic can only have one
|
|
|
|
validtor.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-04 18:13:34 +08:00
|
|
|
:param topic: the topic to register validator under
|
|
|
|
:param validator: the validator used to validate messages published to the topic
|
|
|
|
:param is_async_validator: indicate if the validator is an asynchronous validator
|
2019-08-04 12:33:22 +08:00
|
|
|
"""
|
2019-08-04 10:44:12 +08:00
|
|
|
self.topic_validators[topic] = TopicValidator(validator, is_async_validator)
|
|
|
|
|
|
|
|
def remove_topic_validator(self, topic: str) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Remove the validator from the given topic.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-04 18:13:34 +08:00
|
|
|
:param topic: the topic to remove validator from
|
2019-08-04 12:33:22 +08:00
|
|
|
"""
|
2019-11-20 23:06:37 +08:00
|
|
|
self.topic_validators.pop(topic, None)
|
2019-08-04 10:44:12 +08:00
|
|
|
|
2019-08-06 12:32:18 +08:00
|
|
|
def get_msg_validators(self, msg: rpc_pb2.Message) -> Tuple[TopicValidator, ...]:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Get all validators corresponding to the topics in the message.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-04 18:13:34 +08:00
|
|
|
:param msg: the message published to the topic
|
2019-08-04 12:33:22 +08:00
|
|
|
"""
|
2019-08-11 16:47:54 +08:00
|
|
|
return tuple(
|
2019-08-14 05:36:42 +08:00
|
|
|
self.topic_validators[topic]
|
|
|
|
for topic in msg.topicIDs
|
|
|
|
if topic in self.topic_validators
|
2019-08-06 12:32:18 +08:00
|
|
|
)
|
2019-08-04 11:23:20 +08:00
|
|
|
|
2019-07-28 14:30:15 +08:00
|
|
|
async def stream_handler(self, stream: INetStream) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Stream handler for pubsub. Gets invoked whenever a new stream is
|
2019-10-24 14:41:10 +08:00
|
|
|
created on one of the supported pubsub protocols.
|
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
:param stream: newly created stream
|
|
|
|
"""
|
2019-11-04 21:17:54 +08:00
|
|
|
peer_id = stream.muxed_conn.peer_id
|
|
|
|
|
2019-09-15 16:58:08 +08:00
|
|
|
try:
|
|
|
|
await self.continuously_read_stream(stream)
|
2019-11-05 15:09:53 +08:00
|
|
|
except (StreamEOF, StreamReset, ParseError, IncompleteReadError) as error:
|
|
|
|
logger.debug(
|
|
|
|
"fail to read from peer %s, error=%s,"
|
|
|
|
"closing the stream and remove the peer from record",
|
|
|
|
peer_id,
|
|
|
|
error,
|
|
|
|
)
|
2019-11-05 14:27:06 +08:00
|
|
|
await stream.reset()
|
2019-11-05 15:09:53 +08:00
|
|
|
self._handle_dead_peer(peer_id)
|
2019-09-02 23:21:57 +08:00
|
|
|
|
2020-01-27 00:10:33 +08:00
|
|
|
async def wait_until_ready(self) -> None:
|
|
|
|
await self.event_handle_peer_queue_started.wait()
|
|
|
|
await self.event_handle_dead_peer_queue_started.wait()
|
|
|
|
|
2019-09-02 23:21:57 +08:00
|
|
|
async def _handle_new_peer(self, peer_id: ID) -> None:
|
2019-09-17 16:17:41 +08:00
|
|
|
try:
|
|
|
|
stream: INetStream = await self.host.new_stream(peer_id, self.protocols)
|
|
|
|
except SwarmException as error:
|
|
|
|
logger.debug("fail to add new peer %s, error %s", peer_id, error)
|
|
|
|
return
|
2019-09-02 23:21:57 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Send hello packet
|
2019-09-02 21:01:13 +08:00
|
|
|
hello = self.get_hello_packet()
|
2019-11-12 18:10:41 +08:00
|
|
|
try:
|
|
|
|
await stream.write(encode_varint_prefixed(hello.SerializeToString()))
|
|
|
|
except StreamClosed:
|
|
|
|
logger.debug("Fail to add new peer %s: stream closed", peer_id)
|
|
|
|
return
|
2019-09-02 23:21:57 +08:00
|
|
|
# TODO: Check if the peer in black list.
|
2019-09-14 21:54:26 +08:00
|
|
|
try:
|
|
|
|
self.router.add_peer(peer_id, stream.get_protocol())
|
|
|
|
except Exception as error:
|
|
|
|
logger.debug("fail to add new peer %s, error %s", peer_id, error)
|
|
|
|
return
|
|
|
|
|
2019-12-17 17:17:03 +08:00
|
|
|
self.peers[peer_id] = stream
|
|
|
|
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug("added new peer %s", peer_id)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-11-05 14:27:06 +08:00
|
|
|
def _handle_dead_peer(self, peer_id: ID) -> None:
|
2019-11-05 15:09:53 +08:00
|
|
|
if peer_id not in self.peers:
|
|
|
|
return
|
2019-11-04 14:23:42 +08:00
|
|
|
del self.peers[peer_id]
|
|
|
|
|
|
|
|
for topic in self.peer_topics:
|
|
|
|
if peer_id in self.peer_topics[topic]:
|
2019-12-17 17:55:13 +08:00
|
|
|
self.peer_topics[topic].discard(peer_id)
|
2019-11-04 14:23:42 +08:00
|
|
|
|
|
|
|
self.router.remove_peer(peer_id)
|
|
|
|
|
|
|
|
logger.debug("removed dead peer %s", peer_id)
|
|
|
|
|
2019-07-24 14:54:30 +08:00
|
|
|
async def handle_peer_queue(self) -> None:
|
2019-12-02 22:49:27 +08:00
|
|
|
"""Continuously read from peer queue and each time a new peer is found,
|
|
|
|
open a stream to the peer using a supported pubsub protocol pubsub
|
|
|
|
protocols we support."""
|
2019-12-03 17:27:49 +08:00
|
|
|
async with self.peer_receive_channel:
|
2020-01-27 00:10:33 +08:00
|
|
|
self.event_handle_peer_queue_started.set()
|
2019-12-26 20:44:32 +08:00
|
|
|
async for peer_id in self.peer_receive_channel:
|
2019-12-03 17:27:49 +08:00
|
|
|
# Add Peer
|
|
|
|
self.manager.run_task(self._handle_new_peer, peer_id)
|
2019-11-04 14:23:42 +08:00
|
|
|
|
|
|
|
async def handle_dead_peer_queue(self) -> None:
|
2019-12-06 17:06:37 +08:00
|
|
|
"""Continuously read from dead peer channel and close the stream
|
|
|
|
between that peer and remove peer info from pubsub and pubsub
|
|
|
|
router."""
|
2019-12-03 17:27:49 +08:00
|
|
|
async with self.dead_peer_receive_channel:
|
2020-01-27 00:10:33 +08:00
|
|
|
self.event_handle_dead_peer_queue_started.set()
|
2019-12-26 20:44:32 +08:00
|
|
|
async for peer_id in self.dead_peer_receive_channel:
|
2019-12-03 17:27:49 +08:00
|
|
|
# Remove Peer
|
|
|
|
self._handle_dead_peer(peer_id)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-09-06 20:02:35 +08:00
|
|
|
def handle_subscription(
|
|
|
|
self, origin_id: ID, sub_message: rpc_pb2.RPC.SubOpts
|
|
|
|
) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Handle an incoming subscription message from a peer. Update internal
|
2019-03-24 01:52:02 +08:00
|
|
|
mapping to mark the peer as subscribed or unsubscribed to topics as
|
2019-10-24 14:41:10 +08:00
|
|
|
defined in the subscription message.
|
|
|
|
|
2019-04-01 10:16:28 +08:00
|
|
|
:param origin_id: id of the peer who subscribe to the message
|
|
|
|
:param sub_message: RPC.SubOpts
|
2019-03-24 01:52:02 +08:00
|
|
|
"""
|
2019-04-01 10:16:28 +08:00
|
|
|
if sub_message.subscribe:
|
|
|
|
if sub_message.topicid not in self.peer_topics:
|
2019-12-17 21:56:02 +08:00
|
|
|
self.peer_topics[sub_message.topicid] = set([origin_id])
|
2019-08-01 12:05:28 +08:00
|
|
|
elif origin_id not in self.peer_topics[sub_message.topicid]:
|
2019-04-01 10:16:28 +08:00
|
|
|
# Add peer to topic
|
2019-12-17 17:55:13 +08:00
|
|
|
self.peer_topics[sub_message.topicid].add(origin_id)
|
2019-04-01 10:16:28 +08:00
|
|
|
else:
|
2019-05-07 11:44:13 +08:00
|
|
|
if sub_message.topicid in self.peer_topics:
|
2019-08-01 12:05:28 +08:00
|
|
|
if origin_id in self.peer_topics[sub_message.topicid]:
|
2019-12-17 17:55:13 +08:00
|
|
|
self.peer_topics[sub_message.topicid].discard(origin_id)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2020-02-05 21:44:33 +08:00
|
|
|
def notify_subscriptions(self, publish_message: rpc_pb2.Message) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Put incoming message from a peer onto my blocking queue.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-07-24 16:29:02 +08:00
|
|
|
:param publish_message: RPC.Message format
|
2019-03-24 01:52:02 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Check if this message has any topics that we are subscribed to
|
2019-03-31 06:49:50 +08:00
|
|
|
for topic in publish_message.topicIDs:
|
2019-12-03 17:27:49 +08:00
|
|
|
if topic in self.topic_ids:
|
2019-03-24 01:52:02 +08:00
|
|
|
# we are subscribed to a topic this message was sent for,
|
|
|
|
# so add message to the subscription output queue
|
2019-04-03 10:34:01 +08:00
|
|
|
# for each topic
|
2020-02-05 21:30:26 +08:00
|
|
|
try:
|
|
|
|
self.subscribed_topics_send[topic].send_nowait(publish_message)
|
|
|
|
except trio.WouldBlock:
|
|
|
|
# Channel is full, ignore this message.
|
|
|
|
logger.warning(
|
|
|
|
"fail to deliver message to subscription for topic %s", topic
|
|
|
|
)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-17 18:17:28 +08:00
|
|
|
async def subscribe(self, topic_id: str) -> ISubscriptionAPI:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Subscribe ourself to a topic.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
:param topic_id: topic_id to subscribe to
|
|
|
|
"""
|
2019-04-02 04:23:20 +08:00
|
|
|
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug("subscribing to topic %s", topic_id)
|
|
|
|
|
2019-07-15 16:32:05 +08:00
|
|
|
# Already subscribed
|
2019-12-03 17:27:49 +08:00
|
|
|
if topic_id in self.topic_ids:
|
|
|
|
return self.subscribed_topics_receive[topic_id]
|
2019-07-15 16:32:05 +08:00
|
|
|
|
2020-01-28 00:29:05 +08:00
|
|
|
send_channel, receive_channel = trio.open_memory_channel[rpc_pb2.Message](
|
2020-02-05 21:30:26 +08:00
|
|
|
SUBSCRIPTION_CHANNEL_SIZE
|
2020-01-28 00:29:05 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
subscription = TrioSubscriptionAPI(
|
|
|
|
receive_channel,
|
|
|
|
unsubscribe_fn=functools.partial(self.unsubscribe, topic_id),
|
|
|
|
)
|
2019-12-03 17:27:49 +08:00
|
|
|
self.subscribed_topics_send[topic_id] = send_channel
|
2019-12-17 18:17:28 +08:00
|
|
|
self.subscribed_topics_receive[topic_id] = subscription
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Create subscribe message
|
2019-07-24 16:29:02 +08:00
|
|
|
packet: rpc_pb2.RPC = rpc_pb2.RPC()
|
2019-08-14 05:36:42 +08:00
|
|
|
packet.subscriptions.extend(
|
|
|
|
[rpc_pb2.RPC.SubOpts(subscribe=True, topicid=topic_id)]
|
|
|
|
)
|
2019-04-02 04:55:44 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
# Send out subscribe message to all peers
|
2019-03-31 05:59:08 +08:00
|
|
|
await self.message_all_peers(packet.SerializeToString())
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Tell router we are joining this topic
|
2019-05-07 11:44:13 +08:00
|
|
|
await self.router.join(topic_id)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-17 18:17:28 +08:00
|
|
|
# Return the subscription for messages on this topic
|
|
|
|
return subscription
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-07-24 14:54:30 +08:00
|
|
|
async def unsubscribe(self, topic_id: str) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Unsubscribe ourself from a topic.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
:param topic_id: topic_id to unsubscribe from
|
|
|
|
"""
|
|
|
|
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug("unsubscribing from topic %s", topic_id)
|
|
|
|
|
2019-07-18 13:26:31 +08:00
|
|
|
# Return if we already unsubscribed from the topic
|
2019-12-03 17:27:49 +08:00
|
|
|
if topic_id not in self.topic_ids:
|
2019-07-18 13:26:31 +08:00
|
|
|
return
|
2019-12-03 17:27:49 +08:00
|
|
|
# Remove topic_id from the maps before yielding
|
|
|
|
send_channel = self.subscribed_topics_send[topic_id]
|
|
|
|
del self.subscribed_topics_send[topic_id]
|
|
|
|
del self.subscribed_topics_receive[topic_id]
|
|
|
|
# Only close the send side
|
|
|
|
await send_channel.aclose()
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Create unsubscribe message
|
2019-07-24 16:29:02 +08:00
|
|
|
packet: rpc_pb2.RPC = rpc_pb2.RPC()
|
2019-08-14 05:36:42 +08:00
|
|
|
packet.subscriptions.extend(
|
|
|
|
[rpc_pb2.RPC.SubOpts(subscribe=False, topicid=topic_id)]
|
|
|
|
)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Send out unsubscribe message to all peers
|
2019-03-31 05:59:08 +08:00
|
|
|
await self.message_all_peers(packet.SerializeToString())
|
2019-03-24 01:52:02 +08:00
|
|
|
|
|
|
|
# Tell router we are leaving this topic
|
2019-05-07 11:44:13 +08:00
|
|
|
await self.router.leave(topic_id)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-07-30 15:31:02 +08:00
|
|
|
async def message_all_peers(self, raw_msg: bytes) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Broadcast a message to peers.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
:param raw_msg: raw contents of the message to broadcast
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Broadcast message
|
2019-07-30 13:33:48 +08:00
|
|
|
for stream in self.peers.values():
|
2019-03-24 01:52:02 +08:00
|
|
|
# Write message to stream
|
2019-11-12 18:10:41 +08:00
|
|
|
try:
|
|
|
|
await stream.write(encode_varint_prefixed(raw_msg))
|
|
|
|
except StreamClosed:
|
|
|
|
peer_id = stream.muxed_conn.peer_id
|
|
|
|
logger.debug("Fail to message peer %s: stream closed", peer_id)
|
2019-11-16 17:03:04 +08:00
|
|
|
self._handle_dead_peer(peer_id)
|
2019-07-22 18:12:54 +08:00
|
|
|
|
2019-07-23 16:10:14 +08:00
|
|
|
async def publish(self, topic_id: str, data: bytes) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Publish data to a topic.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-07-25 14:08:16 +08:00
|
|
|
:param topic_id: topic which we are going to publish the data to
|
|
|
|
:param data: data which we are publishing
|
|
|
|
"""
|
2019-07-23 16:10:14 +08:00
|
|
|
msg = rpc_pb2.Message(
|
|
|
|
data=data,
|
|
|
|
topicIDs=[topic_id],
|
2019-07-28 16:05:29 +08:00
|
|
|
# Origin is ourself.
|
2019-12-03 17:27:49 +08:00
|
|
|
from_id=self.my_id.to_bytes(),
|
2019-07-23 16:10:14 +08:00
|
|
|
seqno=self._next_seqno(),
|
|
|
|
)
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-11-26 16:10:58 +08:00
|
|
|
if self.strict_signing:
|
|
|
|
priv_key = self.sign_key
|
2019-11-28 18:45:00 +08:00
|
|
|
signature = priv_key.sign(
|
|
|
|
PUBSUB_SIGNING_PREFIX.encode() + msg.SerializeToString()
|
|
|
|
)
|
2019-11-26 16:10:58 +08:00
|
|
|
msg.key = self.host.get_public_key().serialize()
|
|
|
|
msg.signature = signature
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
await self.push_msg(self.my_id, msg)
|
2019-07-23 16:10:14 +08:00
|
|
|
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug("successfully published message %s", msg)
|
|
|
|
|
2019-08-06 12:38:31 +08:00
|
|
|
async def validate_msg(self, msg_forwarder: ID, msg: rpc_pb2.Message) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Validate the received message.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-08-04 18:13:34 +08:00
|
|
|
:param msg_forwarder: the peer who forward us the message.
|
|
|
|
:param msg: the message.
|
|
|
|
"""
|
2019-12-03 17:27:49 +08:00
|
|
|
sync_topic_validators: List[SyncValidatorFn] = []
|
|
|
|
async_topic_validators: List[AsyncValidatorFn] = []
|
2019-08-04 18:13:23 +08:00
|
|
|
for topic_validator in self.get_msg_validators(msg):
|
|
|
|
if topic_validator.is_async:
|
2019-12-03 17:27:49 +08:00
|
|
|
async_topic_validators.append(
|
|
|
|
cast(AsyncValidatorFn, topic_validator.validator)
|
2019-08-11 16:47:54 +08:00
|
|
|
)
|
2019-08-04 18:13:23 +08:00
|
|
|
else:
|
2019-08-14 05:36:42 +08:00
|
|
|
sync_topic_validators.append(
|
|
|
|
cast(SyncValidatorFn, topic_validator.validator)
|
|
|
|
)
|
2019-08-04 18:13:23 +08:00
|
|
|
|
|
|
|
for validator in sync_topic_validators:
|
|
|
|
if not validator(msg_forwarder, msg):
|
2019-08-06 12:38:31 +08:00
|
|
|
raise ValidationError(f"Validation failed for msg={msg}")
|
2019-08-04 18:13:23 +08:00
|
|
|
|
|
|
|
# TODO: Implement throttle on async validators
|
|
|
|
|
2019-12-03 17:27:49 +08:00
|
|
|
if len(async_topic_validators) > 0:
|
|
|
|
# TODO: Use a better pattern
|
2019-12-06 17:06:37 +08:00
|
|
|
final_result: bool = True
|
2019-12-03 17:27:49 +08:00
|
|
|
|
|
|
|
async def run_async_validator(func: AsyncValidatorFn) -> None:
|
|
|
|
nonlocal final_result
|
|
|
|
result = await func(msg_forwarder, msg)
|
|
|
|
final_result = final_result and result
|
|
|
|
|
|
|
|
async with trio.open_nursery() as nursery:
|
2019-12-06 17:06:37 +08:00
|
|
|
for async_validator in async_topic_validators:
|
|
|
|
nursery.start_soon(run_async_validator, async_validator)
|
2019-12-03 17:27:49 +08:00
|
|
|
|
|
|
|
if not final_result:
|
2019-08-06 12:38:31 +08:00
|
|
|
raise ValidationError(f"Validation failed for msg={msg}")
|
2019-08-04 18:13:23 +08:00
|
|
|
|
2019-07-29 12:09:35 +08:00
|
|
|
async def push_msg(self, msg_forwarder: ID, msg: rpc_pb2.Message) -> None:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
Push a pubsub message to others.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2019-07-29 12:09:35 +08:00
|
|
|
:param msg_forwarder: the peer who forward us the message.
|
2019-07-25 14:08:16 +08:00
|
|
|
:param msg: the message we are going to push out.
|
|
|
|
"""
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug("attempting to publish message %s", msg)
|
|
|
|
|
2019-07-27 18:41:16 +08:00
|
|
|
# TODO: Check if the `source` is in the blacklist. If yes, reject.
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-07-27 18:41:16 +08:00
|
|
|
# TODO: Check if the `from` is in the blacklist. If yes, reject.
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-08-06 12:32:18 +08:00
|
|
|
# If the message is processed before, return(i.e., don't further process the message).
|
2019-07-23 16:10:14 +08:00
|
|
|
if self._is_msg_seen(msg):
|
|
|
|
return
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-11-29 19:37:48 +08:00
|
|
|
# Check if signing is required and if so validate the signature
|
2019-11-26 16:10:58 +08:00
|
|
|
if self.strict_signing:
|
|
|
|
# Validate the signature of the message
|
2019-11-29 19:37:48 +08:00
|
|
|
if not signature_validator(msg):
|
2019-11-26 16:10:58 +08:00
|
|
|
logger.debug("Signature validation failed for msg: %s", msg)
|
|
|
|
return
|
|
|
|
|
2019-08-06 12:32:18 +08:00
|
|
|
# Validate the message with registered topic validators.
|
|
|
|
# If the validation failed, return(i.e., don't further process the message).
|
2019-08-06 12:38:31 +08:00
|
|
|
try:
|
|
|
|
await self.validate_msg(msg_forwarder, msg)
|
|
|
|
except ValidationError:
|
2019-09-14 21:54:26 +08:00
|
|
|
logger.debug(
|
2019-12-20 01:09:47 +08:00
|
|
|
"Topic validation failed: sender %s sent data %s under topic IDs: %s %s:%s",
|
2019-12-23 16:15:56 +08:00
|
|
|
msg_forwarder,
|
2019-09-11 04:17:40 +08:00
|
|
|
msg.data.hex(),
|
2019-09-11 04:03:24 +08:00
|
|
|
msg.topicIDs,
|
2019-12-20 01:09:47 +08:00
|
|
|
base58.b58encode(msg.from_id).decode(),
|
|
|
|
msg.seqno.hex(),
|
2019-09-11 04:03:24 +08:00
|
|
|
)
|
2019-08-04 18:13:45 +08:00
|
|
|
return
|
2019-07-24 14:35:14 +08:00
|
|
|
|
2019-07-23 16:10:14 +08:00
|
|
|
self._mark_msg_seen(msg)
|
2020-02-05 21:44:33 +08:00
|
|
|
self.notify_subscriptions(msg)
|
2019-07-29 12:09:35 +08:00
|
|
|
await self.router.publish(msg_forwarder, msg)
|
2019-07-23 16:10:14 +08:00
|
|
|
|
|
|
|
def _next_seqno(self) -> bytes:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Make the next message sequence id."""
|
2019-07-23 16:10:14 +08:00
|
|
|
self.counter += 1
|
2019-08-01 06:00:12 +08:00
|
|
|
return self.counter.to_bytes(8, "big")
|
2019-07-23 16:10:14 +08:00
|
|
|
|
|
|
|
def _is_msg_seen(self, msg: rpc_pb2.Message) -> bool:
|
2020-02-28 03:57:00 +08:00
|
|
|
msg_id = self._msg_id_constructor(msg)
|
2019-07-23 16:10:14 +08:00
|
|
|
return msg_id in self.seen_messages
|
|
|
|
|
|
|
|
def _mark_msg_seen(self, msg: rpc_pb2.Message) -> None:
|
2020-02-28 03:57:00 +08:00
|
|
|
msg_id = self._msg_id_constructor(msg)
|
2019-07-23 16:10:14 +08:00
|
|
|
# FIXME: Mapping `msg_id` to `1` is quite awkward. Should investigate if there is a
|
|
|
|
# more appropriate way.
|
|
|
|
self.seen_messages[msg_id] = 1
|
2019-07-25 16:58:00 +08:00
|
|
|
|
|
|
|
def _is_subscribed_to_msg(self, msg: rpc_pb2.Message) -> bool:
|
2019-12-03 17:27:49 +08:00
|
|
|
return any(topic in self.topic_ids for topic in msg.topicIDs)
|