2019-09-11 07:02:29 +08:00
|
|
|
import logging
|
2020-02-04 20:45:58 +08:00
|
|
|
from typing import Dict, List, Optional
|
2019-02-25 09:58:23 +08:00
|
|
|
|
2020-01-26 23:55:31 +08:00
|
|
|
from async_service import Service
|
2019-07-28 14:06:29 +08:00
|
|
|
from multiaddr import Multiaddr
|
2020-02-04 20:45:58 +08:00
|
|
|
import trio
|
2019-01-10 02:38:56 +08:00
|
|
|
|
2019-11-19 14:01:12 +08:00
|
|
|
from libp2p.io.abc import ReadWriteCloser
|
2019-09-12 00:38:12 +08:00
|
|
|
from libp2p.network.connection.net_connection_interface import INetConn
|
2019-07-31 19:31:58 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-09-03 16:49:00 +08:00
|
|
|
from libp2p.peer.peerstore import PeerStoreError
|
2019-08-11 16:47:54 +08:00
|
|
|
from libp2p.peer.peerstore_interface import IPeerStore
|
2019-09-12 00:38:12 +08:00
|
|
|
from libp2p.stream_muxer.abc import IMuxedConn
|
2019-09-20 16:17:13 +08:00
|
|
|
from libp2p.transport.exceptions import (
|
|
|
|
MuxerUpgradeFailure,
|
|
|
|
OpenConnectionError,
|
|
|
|
SecurityUpgradeFailure,
|
|
|
|
)
|
2019-07-28 14:06:29 +08:00
|
|
|
from libp2p.transport.listener_interface import IListener
|
2019-08-03 13:36:19 +08:00
|
|
|
from libp2p.transport.transport_interface import ITransport
|
|
|
|
from libp2p.transport.upgrader import TransportUpgrader
|
2019-09-17 21:54:20 +08:00
|
|
|
from libp2p.typing import StreamHandlerFn
|
2019-07-28 14:06:29 +08:00
|
|
|
|
2019-12-17 19:00:11 +08:00
|
|
|
from ..exceptions import MultiError
|
2019-08-03 13:36:19 +08:00
|
|
|
from .connection.raw_connection import RawConnection
|
2019-09-12 00:38:12 +08:00
|
|
|
from .connection.swarm_connection import SwarmConn
|
2019-08-21 23:04:59 +08:00
|
|
|
from .exceptions import SwarmException
|
2019-12-24 18:03:18 +08:00
|
|
|
from .network_interface import INetworkService
|
2019-07-28 22:30:51 +08:00
|
|
|
from .notifee_interface import INotifee
|
2019-07-30 15:31:02 +08:00
|
|
|
from .stream.net_stream_interface import INetStream
|
2019-07-28 22:30:51 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger = logging.getLogger("libp2p.network.swarm")
|
|
|
|
|
2019-07-29 12:42:13 +08:00
|
|
|
|
2020-02-04 17:05:53 +08:00
|
|
|
def create_default_stream_handler(network: INetworkService) -> StreamHandlerFn:
|
|
|
|
async def stream_handler(stream: INetStream) -> None:
|
|
|
|
await network.get_manager().wait_finished()
|
|
|
|
|
|
|
|
return stream_handler
|
|
|
|
|
|
|
|
|
2020-01-26 23:55:31 +08:00
|
|
|
class Swarm(Service, INetworkService):
|
2018-10-15 13:52:25 +08:00
|
|
|
|
2019-07-28 14:06:29 +08:00
|
|
|
self_id: ID
|
2019-08-11 16:47:54 +08:00
|
|
|
peerstore: IPeerStore
|
2019-07-28 14:06:29 +08:00
|
|
|
upgrader: TransportUpgrader
|
|
|
|
transport: ITransport
|
2019-08-31 22:32:32 +08:00
|
|
|
# TODO: Connection and `peer_id` are 1-1 mapping in our implementation,
|
2019-08-29 22:00:07 +08:00
|
|
|
# whereas in Go one `peer_id` may point to multiple connections.
|
2019-09-12 00:38:12 +08:00
|
|
|
connections: Dict[ID, INetConn]
|
2019-07-28 14:06:29 +08:00
|
|
|
listeners: Dict[str, IListener]
|
2020-02-04 17:05:53 +08:00
|
|
|
common_stream_handler: StreamHandlerFn
|
2020-02-04 20:45:58 +08:00
|
|
|
listener_nursery: Optional[trio.Nursery]
|
|
|
|
event_listener_nursery_created: trio.Event
|
2019-07-28 14:06:29 +08:00
|
|
|
|
|
|
|
notifees: List[INotifee]
|
|
|
|
|
2019-08-01 06:00:12 +08:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
peer_id: ID,
|
2019-08-11 16:47:54 +08:00
|
|
|
peerstore: IPeerStore,
|
2019-08-01 06:00:12 +08:00
|
|
|
upgrader: TransportUpgrader,
|
|
|
|
transport: ITransport,
|
|
|
|
):
|
2018-11-29 23:06:40 +08:00
|
|
|
self.self_id = peer_id
|
2018-11-12 01:36:15 +08:00
|
|
|
self.peerstore = peerstore
|
2018-11-12 05:42:10 +08:00
|
|
|
self.upgrader = upgrader
|
2019-04-25 10:36:09 +08:00
|
|
|
self.transport = transport
|
2018-11-12 06:10:37 +08:00
|
|
|
self.connections = dict()
|
|
|
|
self.listeners = dict()
|
2018-10-15 13:52:25 +08:00
|
|
|
|
2019-03-01 07:18:58 +08:00
|
|
|
# Create Notifee array
|
|
|
|
self.notifees = []
|
|
|
|
|
2020-02-04 17:05:53 +08:00
|
|
|
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
|
|
|
|
self.common_stream_handler = create_default_stream_handler(self) # type: ignore
|
2019-02-25 09:58:23 +08:00
|
|
|
|
2020-02-04 20:45:58 +08:00
|
|
|
self.listener_nursery = None
|
|
|
|
self.event_listener_nursery_created = trio.Event()
|
|
|
|
|
2019-11-26 19:24:30 +08:00
|
|
|
async def run(self) -> None:
|
2020-02-04 20:45:58 +08:00
|
|
|
async with trio.open_nursery() as nursery:
|
|
|
|
# Create a nursery for listener tasks.
|
|
|
|
self.listener_nursery = nursery
|
|
|
|
self.event_listener_nursery_created.set()
|
|
|
|
try:
|
|
|
|
await self.manager.wait_finished()
|
|
|
|
finally:
|
|
|
|
# The service ended. Cancel listener tasks.
|
|
|
|
nursery.cancel_scope.cancel()
|
|
|
|
# Indicate that the nursery has been cancelled.
|
|
|
|
self.listener_nursery = None
|
2019-11-26 19:24:30 +08:00
|
|
|
|
2019-07-28 14:06:29 +08:00
|
|
|
def get_peer_id(self) -> ID:
|
2018-11-27 07:24:29 +08:00
|
|
|
return self.self_id
|
2018-11-19 00:22:56 +08:00
|
|
|
|
2019-09-12 14:30:39 +08:00
|
|
|
def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None:
|
2020-02-04 17:05:53 +08:00
|
|
|
# Ignore type here since mypy complains: https://github.com/python/mypy/issues/2427
|
|
|
|
self.common_stream_handler = stream_handler # type: ignore
|
2018-10-15 13:52:25 +08:00
|
|
|
|
2019-11-26 19:24:30 +08:00
|
|
|
async def dial_peer(self, peer_id: ID) -> INetConn:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
dial_peer try to create a connection to peer_id.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-29 23:06:40 +08:00
|
|
|
:param peer_id: peer if we want to dial
|
2019-08-21 23:04:59 +08:00
|
|
|
:raises SwarmException: raised when an error occurs
|
2018-11-29 23:06:40 +08:00
|
|
|
:return: muxed connection
|
2018-11-01 05:40:01 +08:00
|
|
|
"""
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
if peer_id in self.connections:
|
|
|
|
# If muxed connection already exists for peer_id,
|
|
|
|
# set muxed connection equal to existing muxed connection
|
|
|
|
return self.connections[peer_id]
|
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("attempting to dial peer %s", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
try:
|
|
|
|
# Get peer info from peer store
|
|
|
|
addrs = self.peerstore.addrs(peer_id)
|
2019-12-18 03:20:09 +08:00
|
|
|
except PeerStoreError as error:
|
|
|
|
raise SwarmException(f"No known addresses to peer {peer_id}") from error
|
2018-11-13 00:00:43 +08:00
|
|
|
|
|
|
|
if not addrs:
|
2019-09-03 14:12:16 +08:00
|
|
|
raise SwarmException(f"No known addresses to peer {peer_id}")
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-12-17 19:00:11 +08:00
|
|
|
exceptions: List[SwarmException] = []
|
|
|
|
|
|
|
|
# Try all known addresses
|
|
|
|
for multiaddr in addrs:
|
|
|
|
try:
|
|
|
|
return await self.dial_addr(multiaddr, peer_id)
|
|
|
|
except SwarmException as e:
|
|
|
|
exceptions.append(e)
|
|
|
|
logger.debug(
|
|
|
|
"encountered swarm exception when trying to connect to %s, "
|
|
|
|
"trying next address...",
|
|
|
|
multiaddr,
|
|
|
|
exc_info=e,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Tried all addresses, raising exception.
|
2019-12-19 01:47:03 +08:00
|
|
|
raise SwarmException(
|
|
|
|
f"unable to connect to {peer_id}, no addresses established a successful connection "
|
2019-12-19 02:05:22 +08:00
|
|
|
"(with exceptions)"
|
2019-12-19 01:47:03 +08:00
|
|
|
) from MultiError(exceptions)
|
2019-12-17 19:00:11 +08:00
|
|
|
|
|
|
|
async def dial_addr(self, addr: Multiaddr, peer_id: ID) -> INetConn:
|
|
|
|
"""
|
|
|
|
dial_addr try to create a connection to peer_id with addr.
|
|
|
|
|
|
|
|
:param addr: the address we want to connect with
|
|
|
|
:param peer_id: the peer we want to connect to
|
|
|
|
:raises SwarmException: raised when an error occurs
|
2019-12-18 17:54:52 +08:00
|
|
|
:return: network connection
|
2019-12-17 19:00:11 +08:00
|
|
|
"""
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
# Dial peer (connection to peer does not yet exist)
|
|
|
|
# Transport dials peer (gets back a raw conn)
|
2019-09-19 21:24:01 +08:00
|
|
|
try:
|
2019-12-17 19:00:11 +08:00
|
|
|
raw_conn = await self.transport.dial(addr)
|
2019-09-19 21:24:01 +08:00
|
|
|
except OpenConnectionError as error:
|
|
|
|
logger.debug("fail to dial peer %s over base transport", peer_id)
|
|
|
|
raise SwarmException(
|
2019-12-19 01:47:03 +08:00
|
|
|
f"fail to open connection to peer {peer_id}"
|
2019-09-19 21:24:01 +08:00
|
|
|
) from error
|
2019-09-03 14:12:16 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("dialed peer %s over base transport", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
|
|
|
|
# the conn and then mux the conn
|
|
|
|
try:
|
|
|
|
secured_conn = await self.upgrader.upgrade_security(raw_conn, peer_id, True)
|
|
|
|
except SecurityUpgradeFailure as error:
|
2019-12-20 00:31:18 +08:00
|
|
|
logger.debug("failed to upgrade security for peer %s", peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
await raw_conn.close()
|
2019-12-20 00:31:18 +08:00
|
|
|
raise SwarmException(
|
|
|
|
f"failed to upgrade security for peer {peer_id}"
|
|
|
|
) from error
|
2019-09-11 07:02:29 +08:00
|
|
|
|
|
|
|
logger.debug("upgraded security for peer %s", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
try:
|
2019-09-12 00:38:12 +08:00
|
|
|
muxed_conn = await self.upgrader.upgrade_connection(secured_conn, peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
except MuxerUpgradeFailure as error:
|
2019-12-20 00:31:18 +08:00
|
|
|
logger.debug("failed to upgrade mux for peer %s", peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
await secured_conn.close()
|
2019-12-20 00:31:18 +08:00
|
|
|
raise SwarmException(f"failed to upgrade mux for peer {peer_id}") from error
|
2019-09-03 14:12:16 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("upgraded mux for peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
swarm_conn = await self.add_conn(muxed_conn)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("successfully dialed peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
return swarm_conn
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-11-26 19:24:30 +08:00
|
|
|
async def new_stream(self, peer_id: ID) -> INetStream:
|
2018-11-29 23:06:40 +08:00
|
|
|
"""
|
|
|
|
:param peer_id: peer_id of destination
|
2019-09-19 22:19:36 +08:00
|
|
|
:raises SwarmException: raised when an error occurs
|
2018-11-29 23:06:40 +08:00
|
|
|
:return: net stream instance
|
|
|
|
"""
|
2019-09-19 14:10:50 +08:00
|
|
|
logger.debug("attempting to open a stream to peer %s", peer_id)
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-11-26 19:24:30 +08:00
|
|
|
swarm_conn = await self.dial_peer(peer_id)
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
net_stream = await swarm_conn.new_stream()
|
2019-09-12 14:30:39 +08:00
|
|
|
logger.debug("successfully opened a stream to peer %s", peer_id)
|
2018-11-12 05:42:10 +08:00
|
|
|
return net_stream
|
2018-10-22 01:51:55 +08:00
|
|
|
|
2019-11-26 19:24:30 +08:00
|
|
|
async def listen(self, *multiaddrs: Multiaddr) -> bool:
|
2018-10-22 01:51:55 +08:00
|
|
|
"""
|
2019-08-01 13:25:20 +08:00
|
|
|
:param multiaddrs: one or many multiaddrs to start listening on
|
2018-10-22 01:51:55 +08:00
|
|
|
:return: true if at least one success
|
2018-11-13 02:02:49 +08:00
|
|
|
|
2019-08-01 13:25:20 +08:00
|
|
|
For each multiaddr
|
2019-11-27 09:33:50 +08:00
|
|
|
|
|
|
|
- Check if a listener for multiaddr exists already
|
|
|
|
- If listener already exists, continue
|
|
|
|
- Otherwise:
|
|
|
|
|
|
|
|
- Capture multiaddr in conn handler
|
|
|
|
- Have conn handler delegate to stream handler
|
|
|
|
- Call listener listen with the multiaddr
|
|
|
|
- Map multiaddr to listener
|
2018-11-13 00:00:43 +08:00
|
|
|
"""
|
2020-02-04 20:45:58 +08:00
|
|
|
# We need to wait until `self.listener_nursery` is created.
|
|
|
|
await self.event_listener_nursery_created.wait()
|
|
|
|
|
2019-08-17 21:41:17 +08:00
|
|
|
for maddr in multiaddrs:
|
|
|
|
if str(maddr) in self.listeners:
|
2018-11-12 09:29:17 +08:00
|
|
|
return True
|
|
|
|
|
2019-11-19 14:01:12 +08:00
|
|
|
async def conn_handler(read_write_closer: ReadWriteCloser) -> None:
|
|
|
|
raw_conn = RawConnection(read_write_closer, False)
|
2019-05-02 01:54:19 +08:00
|
|
|
|
2019-04-30 15:09:05 +08:00
|
|
|
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
|
|
|
|
# the conn and then mux the conn
|
2019-08-20 16:42:34 +08:00
|
|
|
try:
|
2019-08-21 23:04:59 +08:00
|
|
|
# FIXME: This dummy `ID(b"")` for the remote peer is useless.
|
2019-08-20 16:42:34 +08:00
|
|
|
secured_conn = await self.upgrader.upgrade_security(
|
|
|
|
raw_conn, ID(b""), False
|
|
|
|
)
|
2019-08-21 23:04:59 +08:00
|
|
|
except SecurityUpgradeFailure as error:
|
2020-02-06 10:39:54 +08:00
|
|
|
logger.debug("failed to upgrade security for peer at %s", maddr)
|
2019-08-25 04:06:24 +08:00
|
|
|
await raw_conn.close()
|
2019-12-20 00:31:18 +08:00
|
|
|
raise SwarmException(
|
2020-02-06 10:39:54 +08:00
|
|
|
f"failed to upgrade security for peer at {maddr}"
|
2019-12-20 00:31:18 +08:00
|
|
|
) from error
|
2019-08-21 23:04:59 +08:00
|
|
|
peer_id = secured_conn.get_remote_peer()
|
2019-09-11 07:02:29 +08:00
|
|
|
|
2019-08-21 23:04:59 +08:00
|
|
|
try:
|
2019-08-20 16:42:34 +08:00
|
|
|
muxed_conn = await self.upgrader.upgrade_connection(
|
2019-09-12 00:38:12 +08:00
|
|
|
secured_conn, peer_id
|
2019-08-20 16:42:34 +08:00
|
|
|
)
|
2019-08-21 23:04:59 +08:00
|
|
|
except MuxerUpgradeFailure as error:
|
2019-12-20 00:31:18 +08:00
|
|
|
logger.debug("fail to upgrade mux for peer %s", peer_id)
|
2019-08-25 04:06:24 +08:00
|
|
|
await secured_conn.close()
|
2019-12-20 00:31:18 +08:00
|
|
|
raise SwarmException(
|
|
|
|
f"fail to upgrade mux for peer {peer_id}"
|
|
|
|
) from error
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("upgraded mux for peer %s", peer_id)
|
2019-09-12 00:38:12 +08:00
|
|
|
|
|
|
|
await self.add_conn(muxed_conn)
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("successfully opened connection to peer %s", peer_id)
|
2020-01-07 21:50:03 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
# NOTE: This is a intentional barrier to prevent from the handler exiting and
|
|
|
|
# closing the connection.
|
|
|
|
await self.manager.wait_finished()
|
2019-09-11 07:02:29 +08:00
|
|
|
|
2018-11-12 09:29:17 +08:00
|
|
|
try:
|
|
|
|
# Success
|
|
|
|
listener = self.transport.create_listener(conn_handler)
|
2019-08-17 21:41:17 +08:00
|
|
|
self.listeners[str(maddr)] = listener
|
2019-12-06 17:06:37 +08:00
|
|
|
# TODO: `listener.listen` is not bounded with nursery. If we want to be
|
|
|
|
# I/O agnostic, we should change the API.
|
2020-02-04 20:45:58 +08:00
|
|
|
if self.listener_nursery is None:
|
|
|
|
raise SwarmException("swarm instance hasn't been run")
|
|
|
|
await listener.listen(maddr, self.listener_nursery)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
|
|
|
# Call notifiers since event occurred
|
2020-02-04 22:56:13 +08:00
|
|
|
await self.notify_listen(maddr)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
2018-11-12 09:29:17 +08:00
|
|
|
return True
|
|
|
|
except IOError:
|
|
|
|
# Failed. Continue looping.
|
2019-12-20 00:31:18 +08:00
|
|
|
logger.debug("fail to listen on: %s", maddr)
|
2018-11-12 09:29:17 +08:00
|
|
|
|
2019-08-17 21:41:17 +08:00
|
|
|
# No maddr succeeded
|
2018-11-12 09:29:17 +08:00
|
|
|
return False
|
2018-11-12 05:42:10 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
async def close(self) -> None:
|
2019-11-29 19:09:56 +08:00
|
|
|
await self.manager.stop()
|
2019-09-14 21:47:49 +08:00
|
|
|
logger.debug("swarm successfully closed")
|
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
async def close_peer(self, peer_id: ID) -> None:
|
2019-08-31 22:37:59 +08:00
|
|
|
if peer_id not in self.connections:
|
|
|
|
return
|
2019-08-29 21:38:06 +08:00
|
|
|
connection = self.connections[peer_id]
|
2019-11-20 23:06:37 +08:00
|
|
|
# NOTE: `connection.close` will delete `peer_id` from `self.connections`
|
2019-11-03 12:31:20 +08:00
|
|
|
# and `notify_disconnected` for us.
|
2019-08-29 21:38:06 +08:00
|
|
|
await connection.close()
|
2019-07-22 18:12:54 +08:00
|
|
|
|
2019-09-14 21:47:49 +08:00
|
|
|
logger.debug("successfully close the connection to peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
|
|
|
|
and start to monitor the connection for its new streams and
|
|
|
|
disconnection."""
|
2019-09-12 00:38:12 +08:00
|
|
|
swarm_conn = SwarmConn(muxed_conn, self)
|
2020-01-07 21:50:03 +08:00
|
|
|
self.manager.run_task(muxed_conn.start)
|
|
|
|
await muxed_conn.event_started.wait()
|
|
|
|
self.manager.run_task(swarm_conn.start)
|
|
|
|
await swarm_conn.event_started.wait()
|
2019-09-12 00:38:12 +08:00
|
|
|
# Store muxed_conn with peer id
|
|
|
|
self.connections[muxed_conn.peer_id] = swarm_conn
|
|
|
|
# Call notifiers since event occurred
|
2020-02-04 22:56:13 +08:00
|
|
|
await self.notify_connected(swarm_conn)
|
2019-09-12 00:38:12 +08:00
|
|
|
return swarm_conn
|
2019-09-13 15:29:24 +08:00
|
|
|
|
|
|
|
def remove_conn(self, swarm_conn: SwarmConn) -> None:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Simply remove the connection from Swarm's records, without closing
|
|
|
|
the connection."""
|
2019-09-23 15:46:50 +08:00
|
|
|
peer_id = swarm_conn.muxed_conn.peer_id
|
2019-09-15 20:44:48 +08:00
|
|
|
if peer_id not in self.connections:
|
|
|
|
return
|
2019-09-13 15:29:24 +08:00
|
|
|
del self.connections[peer_id]
|
2019-09-23 15:01:58 +08:00
|
|
|
|
|
|
|
# Notifee
|
|
|
|
|
|
|
|
def register_notifee(self, notifee: INotifee) -> None:
|
|
|
|
"""
|
|
|
|
:param notifee: object implementing Notifee interface
|
|
|
|
:return: true if notifee registered successfully, false otherwise
|
|
|
|
"""
|
|
|
|
self.notifees.append(notifee)
|
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_opened_stream(self, stream: INetStream) -> None:
|
|
|
|
async with trio.open_nursery() as nursery:
|
|
|
|
for notifee in self.notifees:
|
|
|
|
nursery.start_soon(notifee.opened_stream, self, stream)
|
2019-09-23 15:01:58 +08:00
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_connected(self, conn: INetConn) -> None:
|
|
|
|
async with trio.open_nursery() as nursery:
|
|
|
|
for notifee in self.notifees:
|
|
|
|
nursery.start_soon(notifee.connected, self, conn)
|
2019-09-23 15:01:58 +08:00
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_disconnected(self, conn: INetConn) -> None:
|
|
|
|
async with trio.open_nursery() as nursery:
|
|
|
|
for notifee in self.notifees:
|
|
|
|
nursery.start_soon(notifee.disconnected, self, conn)
|
2019-09-23 15:01:58 +08:00
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_listen(self, multiaddr: Multiaddr) -> None:
|
|
|
|
async with trio.open_nursery() as nursery:
|
|
|
|
for notifee in self.notifees:
|
|
|
|
nursery.start_soon(notifee.listen, self, multiaddr)
|
2019-09-23 15:01:58 +08:00
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_closed_stream(self, stream: INetStream) -> None:
|
|
|
|
raise NotImplementedError
|
2019-09-23 15:01:58 +08:00
|
|
|
|
2020-02-04 22:56:13 +08:00
|
|
|
async def notify_listen_close(self, multiaddr: Multiaddr) -> None:
|
|
|
|
raise NotImplementedError
|