Add some loggings to pubsub

This commit is contained in:
NIC619 2019-09-14 21:54:26 +08:00
parent 786a03544c
commit 9bad7a61f0
No known key found for this signature in database
GPG Key ID: 570C35F5C2D51B17
3 changed files with 56 additions and 8 deletions

View File

@ -1,3 +1,4 @@
import logging
from typing import Iterable, List, Sequence from typing import Iterable, List, Sequence
from libp2p.peer.id import ID from libp2p.peer.id import ID
@ -10,6 +11,9 @@ from .pubsub_router_interface import IPubsubRouter
PROTOCOL_ID = TProtocol("/floodsub/1.0.0") PROTOCOL_ID = TProtocol("/floodsub/1.0.0")
logger = logging.getLogger("libp2p.pubsub.floodsub")
logger.setLevel(logging.DEBUG)
class FloodSub(IPubsubRouter): class FloodSub(IPubsubRouter):
@ -75,6 +79,9 @@ class FloodSub(IPubsubRouter):
origin=ID(pubsub_msg.from_id), origin=ID(pubsub_msg.from_id),
) )
rpc_msg = rpc_pb2.RPC(publish=[pubsub_msg]) rpc_msg = rpc_pb2.RPC(publish=[pubsub_msg])
logger.debug("publishing message %s", pubsub_msg)
for peer_id in peers_gen: for peer_id in peers_gen:
stream = self.pubsub.peers[peer_id] stream = self.pubsub.peers[peer_id]
# FIXME: We should add a `WriteMsg` similar to write delimited messages. # FIXME: We should add a `WriteMsg` similar to write delimited messages.

View File

@ -1,5 +1,6 @@
from ast import literal_eval from ast import literal_eval
import asyncio import asyncio
import logging
import random import random
from typing import Any, Dict, Iterable, List, Sequence, Set from typing import Any, Dict, Iterable, List, Sequence, Set
@ -15,6 +16,9 @@ from .pubsub_router_interface import IPubsubRouter
PROTOCOL_ID = TProtocol("/meshsub/1.0.0") PROTOCOL_ID = TProtocol("/meshsub/1.0.0")
logger = logging.getLogger("libp2p.pubsub.gossipsub")
logger.setLevel(logging.DEBUG)
class GossipSub(IPubsubRouter): class GossipSub(IPubsubRouter):
@ -98,6 +102,8 @@ class GossipSub(IPubsubRouter):
""" """
self.pubsub = pubsub self.pubsub = pubsub
logger.debug("attached to pusub")
# Start heartbeat now that we have a pubsub instance # Start heartbeat now that we have a pubsub instance
# TODO: Start after delay # TODO: Start after delay
asyncio.ensure_future(self.heartbeat()) asyncio.ensure_future(self.heartbeat())
@ -108,7 +114,7 @@ class GossipSub(IPubsubRouter):
:param peer_id: id of peer to add :param peer_id: id of peer to add
:param protocol_id: router protocol the peer speaks, e.g., floodsub, gossipsub :param protocol_id: router protocol the peer speaks, e.g., floodsub, gossipsub
""" """
self.peers_to_protocol[peer_id] = protocol_id logger.debug("adding peer %s with protocol %s", peer_id, protocol_id)
if protocol_id == PROTOCOL_ID: if protocol_id == PROTOCOL_ID:
self.peers_gossipsub.append(peer_id) self.peers_gossipsub.append(peer_id)
@ -121,19 +127,23 @@ class GossipSub(IPubsubRouter):
# in multistream-select, or wrong versions. # in multistream-select, or wrong versions.
# TODO: Better handling # TODO: Better handling
raise Exception(f"protocol is not supported: protocol_id={protocol_id}") raise Exception(f"protocol is not supported: protocol_id={protocol_id}")
self.peers_to_protocol[peer_id] = protocol_id
def remove_peer(self, peer_id: ID) -> None: def remove_peer(self, peer_id: ID) -> None:
""" """
Notifies the router that a peer has been disconnected Notifies the router that a peer has been disconnected
:param peer_id: id of peer to remove :param peer_id: id of peer to remove
""" """
del self.peers_to_protocol[peer_id] logger.debug("removing peer %s", peer_id)
if peer_id in self.peers_gossipsub: if peer_id in self.peers_gossipsub:
self.peers_gossipsub.remove(peer_id) self.peers_gossipsub.remove(peer_id)
if peer_id in self.peers_gossipsub: elif peer_id in self.peers_floodsub:
self.peers_floodsub.remove(peer_id) self.peers_floodsub.remove(peer_id)
if peer_id in self.peers_to_protocol:
del self.peers_to_protocol[peer_id]
async def handle_rpc(self, rpc: rpc_pb2.RPC, sender_peer_id: ID) -> None: async def handle_rpc(self, rpc: rpc_pb2.RPC, sender_peer_id: ID) -> None:
""" """
Invoked to process control messages in the RPC envelope. Invoked to process control messages in the RPC envelope.
@ -169,6 +179,9 @@ class GossipSub(IPubsubRouter):
origin=ID(pubsub_msg.from_id), origin=ID(pubsub_msg.from_id),
) )
rpc_msg = rpc_pb2.RPC(publish=[pubsub_msg]) rpc_msg = rpc_pb2.RPC(publish=[pubsub_msg])
logger.debug("publishing message %s", pubsub_msg)
for peer_id in peers_gen: for peer_id in peers_gen:
stream = self.pubsub.peers[peer_id] stream = self.pubsub.peers[peer_id]
# FIXME: We should add a `WriteMsg` similar to write delimited messages. # FIXME: We should add a `WriteMsg` similar to write delimited messages.
@ -227,6 +240,8 @@ class GossipSub(IPubsubRouter):
subscription announcement subscription announcement
:param topic: topic to join :param topic: topic to join
""" """
logger.debug("joining topic %s", topic)
if topic in self.mesh: if topic in self.mesh:
return return
# Create mesh[topic] if it does not yet exist # Create mesh[topic] if it does not yet exist
@ -262,6 +277,8 @@ class GossipSub(IPubsubRouter):
It is invoked after the unsubscription announcement. It is invoked after the unsubscription announcement.
:param topic: topic to leave :param topic: topic to leave
""" """
logger.debug("leaving topic %s", topic)
if topic not in self.mesh: if topic not in self.mesh:
return return
# Notify the peers in mesh[topic] with a PRUNE(topic) message # Notify the peers in mesh[topic] with a PRUNE(topic) message
@ -269,7 +286,7 @@ class GossipSub(IPubsubRouter):
await self.emit_prune(topic, peer) await self.emit_prune(topic, peer)
# Forget mesh[topic] # Forget mesh[topic]
self.mesh.pop(topic, None) del self.mesh[topic]
# Heartbeat # Heartbeat
async def heartbeat(self) -> None: async def heartbeat(self) -> None:

View File

@ -31,7 +31,8 @@ if TYPE_CHECKING:
from .pubsub_router_interface import IPubsubRouter # noqa: F401 from .pubsub_router_interface import IPubsubRouter # noqa: F401
log = logging.getLogger(__name__) logger = logging.getLogger("libp2p.pubsub")
logger.setLevel(logging.DEBUG)
def get_msg_id(msg: rpc_pb2.Message) -> Tuple[bytes, bytes]: def get_msg_id(msg: rpc_pb2.Message) -> Tuple[bytes, bytes]:
@ -161,6 +162,9 @@ class Pubsub:
for msg in rpc_incoming.publish: for msg in rpc_incoming.publish:
if not self._is_subscribed_to_msg(msg): if not self._is_subscribed_to_msg(msg):
continue continue
logger.debug(
"received `publish` message %s from peer %s", msg, peer_id
)
asyncio.ensure_future(self.push_msg(msg_forwarder=peer_id, msg=msg)) asyncio.ensure_future(self.push_msg(msg_forwarder=peer_id, msg=msg))
if rpc_incoming.subscriptions: if rpc_incoming.subscriptions:
@ -170,6 +174,11 @@ class Pubsub:
# to know that it is subscribed to the topic (doesn't # to know that it is subscribed to the topic (doesn't
# need everyone to know) # need everyone to know)
for message in rpc_incoming.subscriptions: for message in rpc_incoming.subscriptions:
logger.debug(
"received `subscriptions` message %s from peer %s",
message,
peer_id,
)
self.handle_subscription(peer_id, message) self.handle_subscription(peer_id, message)
# NOTE: Check if `rpc_incoming.control` is set through `HasField`. # NOTE: Check if `rpc_incoming.control` is set through `HasField`.
@ -177,6 +186,7 @@ class Pubsub:
# Ref: https://developers.google.com/protocol-buffers/docs/reference/python-generated#singular-fields-proto2 # noqa: E501 # Ref: https://developers.google.com/protocol-buffers/docs/reference/python-generated#singular-fields-proto2 # noqa: E501
if rpc_incoming.HasField("control"): if rpc_incoming.HasField("control"):
# Pass rpc to router so router could perform custom logic # Pass rpc to router so router could perform custom logic
logger.debug("received `control` message %s from peer %s", peer_id)
await self.router.handle_rpc(rpc_incoming, peer_id) await self.router.handle_rpc(rpc_incoming, peer_id)
# Force context switch # Force context switch
@ -230,7 +240,13 @@ class Pubsub:
await stream.write(encode_varint_prefixed(hello.SerializeToString())) await stream.write(encode_varint_prefixed(hello.SerializeToString()))
# TODO: Check EOF of this stream. # TODO: Check EOF of this stream.
# TODO: Check if the peer in black list. # TODO: Check if the peer in black list.
try:
self.router.add_peer(peer_id, stream.get_protocol()) self.router.add_peer(peer_id, stream.get_protocol())
except Exception as error:
logger.debug("fail to add new peer %s, error %s", peer_id, error)
return
logger.debug("added new peer %s", peer_id)
async def handle_peer_queue(self) -> None: async def handle_peer_queue(self) -> None:
""" """
@ -291,6 +307,8 @@ class Pubsub:
:param topic_id: topic_id to subscribe to :param topic_id: topic_id to subscribe to
""" """
logger.debug("subscribing to topic %s", topic_id)
# Already subscribed # Already subscribed
if topic_id in self.my_topics: if topic_id in self.my_topics:
return self.my_topics[topic_id] return self.my_topics[topic_id]
@ -319,6 +337,8 @@ class Pubsub:
:param topic_id: topic_id to unsubscribe from :param topic_id: topic_id to unsubscribe from
""" """
logger.debug("unsubscribing from topic %s", topic_id)
# Return if we already unsubscribed from the topic # Return if we already unsubscribed from the topic
if topic_id not in self.my_topics: if topic_id not in self.my_topics:
return return
@ -366,6 +386,8 @@ class Pubsub:
await self.push_msg(self.host.get_id(), msg) await self.push_msg(self.host.get_id(), msg)
logger.debug("successfully published message %s", msg)
async def validate_msg(self, msg_forwarder: ID, msg: rpc_pb2.Message) -> None: async def validate_msg(self, msg_forwarder: ID, msg: rpc_pb2.Message) -> None:
""" """
Validate the received message Validate the received message
@ -401,6 +423,8 @@ class Pubsub:
:param msg_forwarder: the peer who forward us the message. :param msg_forwarder: the peer who forward us the message.
:param msg: the message we are going to push out. :param msg: the message we are going to push out.
""" """
logger.debug("attempting to publish message %s", msg)
# TODO: Check if the `source` is in the blacklist. If yes, reject. # TODO: Check if the `source` is in the blacklist. If yes, reject.
# TODO: Check if the `from` is in the blacklist. If yes, reject. # TODO: Check if the `from` is in the blacklist. If yes, reject.
@ -415,14 +439,14 @@ class Pubsub:
# Validate the signature of the message # Validate the signature of the message
# FIXME: `signature_validator` is currently a stub. # FIXME: `signature_validator` is currently a stub.
if not signature_validator(msg.key, msg.SerializeToString()): if not signature_validator(msg.key, msg.SerializeToString()):
log.debug("Signature validation failed for msg: %s", msg) logger.debug("Signature validation failed for msg: %s", msg)
return return
# Validate the message with registered topic validators. # Validate the message with registered topic validators.
# If the validation failed, return(i.e., don't further process the message). # If the validation failed, return(i.e., don't further process the message).
try: try:
await self.validate_msg(msg_forwarder, msg) await self.validate_msg(msg_forwarder, msg)
except ValidationError: except ValidationError:
log.debug( logger.debug(
"Topic validation failed: sender %s sent data %s under topic IDs: %s", "Topic validation failed: sender %s sent data %s under topic IDs: %s",
f"{base58.b58encode(msg.from_id).decode()}:{msg.seqno.hex()}", f"{base58.b58encode(msg.from_id).decode()}:{msg.seqno.hex()}",
msg.data.hex(), msg.data.hex(),