2019-02-25 09:58:23 +08:00
|
|
|
import asyncio
|
2019-09-11 07:02:29 +08:00
|
|
|
import logging
|
2019-09-17 21:54:20 +08:00
|
|
|
from typing import Dict, List, Optional
|
2019-02-25 09:58:23 +08:00
|
|
|
|
2019-07-28 14:06:29 +08:00
|
|
|
from multiaddr import Multiaddr
|
2019-01-10 02:38:56 +08:00
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
from libp2p.network.connection.net_connection_interface import INetConn
|
2019-07-31 19:31:58 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-09-03 16:49:00 +08:00
|
|
|
from libp2p.peer.peerstore import PeerStoreError
|
2019-08-11 16:47:54 +08:00
|
|
|
from libp2p.peer.peerstore_interface import IPeerStore
|
2019-09-12 00:38:12 +08:00
|
|
|
from libp2p.stream_muxer.abc import IMuxedConn
|
2019-09-20 16:17:13 +08:00
|
|
|
from libp2p.transport.exceptions import (
|
|
|
|
MuxerUpgradeFailure,
|
|
|
|
OpenConnectionError,
|
|
|
|
SecurityUpgradeFailure,
|
|
|
|
)
|
2019-07-28 14:06:29 +08:00
|
|
|
from libp2p.transport.listener_interface import IListener
|
2019-08-03 13:36:19 +08:00
|
|
|
from libp2p.transport.transport_interface import ITransport
|
|
|
|
from libp2p.transport.upgrader import TransportUpgrader
|
2019-09-17 21:54:20 +08:00
|
|
|
from libp2p.typing import StreamHandlerFn
|
2019-07-28 14:06:29 +08:00
|
|
|
|
2019-08-03 13:36:19 +08:00
|
|
|
from .connection.raw_connection import RawConnection
|
2019-09-12 00:38:12 +08:00
|
|
|
from .connection.swarm_connection import SwarmConn
|
2019-08-21 23:04:59 +08:00
|
|
|
from .exceptions import SwarmException
|
2019-07-28 22:30:51 +08:00
|
|
|
from .network_interface import INetwork
|
|
|
|
from .notifee_interface import INotifee
|
2019-07-30 15:31:02 +08:00
|
|
|
from .stream.net_stream_interface import INetStream
|
2019-07-28 22:30:51 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger = logging.getLogger("libp2p.network.swarm")
|
|
|
|
|
2019-07-29 12:42:13 +08:00
|
|
|
|
2018-10-15 13:52:25 +08:00
|
|
|
class Swarm(INetwork):
|
|
|
|
|
2019-07-28 14:06:29 +08:00
|
|
|
self_id: ID
|
2019-08-11 16:47:54 +08:00
|
|
|
peerstore: IPeerStore
|
2019-07-28 14:06:29 +08:00
|
|
|
upgrader: TransportUpgrader
|
|
|
|
transport: ITransport
|
2019-08-31 22:32:32 +08:00
|
|
|
# TODO: Connection and `peer_id` are 1-1 mapping in our implementation,
|
2019-08-29 22:00:07 +08:00
|
|
|
# whereas in Go one `peer_id` may point to multiple connections.
|
2019-09-12 00:38:12 +08:00
|
|
|
connections: Dict[ID, INetConn]
|
2019-07-28 14:06:29 +08:00
|
|
|
listeners: Dict[str, IListener]
|
2019-09-12 14:30:39 +08:00
|
|
|
common_stream_handler: Optional[StreamHandlerFn]
|
2019-07-28 14:06:29 +08:00
|
|
|
|
|
|
|
notifees: List[INotifee]
|
|
|
|
|
2019-08-01 06:00:12 +08:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
peer_id: ID,
|
2019-08-11 16:47:54 +08:00
|
|
|
peerstore: IPeerStore,
|
2019-08-01 06:00:12 +08:00
|
|
|
upgrader: TransportUpgrader,
|
|
|
|
transport: ITransport,
|
|
|
|
):
|
2018-11-29 23:06:40 +08:00
|
|
|
self.self_id = peer_id
|
2018-11-12 01:36:15 +08:00
|
|
|
self.peerstore = peerstore
|
2018-11-12 05:42:10 +08:00
|
|
|
self.upgrader = upgrader
|
2019-04-25 10:36:09 +08:00
|
|
|
self.transport = transport
|
2018-11-12 06:10:37 +08:00
|
|
|
self.connections = dict()
|
|
|
|
self.listeners = dict()
|
2018-10-15 13:52:25 +08:00
|
|
|
|
2019-03-01 07:18:58 +08:00
|
|
|
# Create Notifee array
|
|
|
|
self.notifees = []
|
|
|
|
|
2019-09-12 14:30:39 +08:00
|
|
|
self.common_stream_handler = None
|
2019-02-25 09:58:23 +08:00
|
|
|
|
2019-07-28 14:06:29 +08:00
|
|
|
def get_peer_id(self) -> ID:
|
2018-11-27 07:24:29 +08:00
|
|
|
return self.self_id
|
2018-11-19 00:22:56 +08:00
|
|
|
|
2019-09-12 14:30:39 +08:00
|
|
|
def set_stream_handler(self, stream_handler: StreamHandlerFn) -> None:
|
|
|
|
self.common_stream_handler = stream_handler
|
2018-10-15 13:52:25 +08:00
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
async def dial_peer(self, peer_id: ID) -> INetConn:
|
2019-10-25 02:10:45 +08:00
|
|
|
"""
|
|
|
|
dial_peer try to create a connection to peer_id.
|
2019-10-24 14:41:10 +08:00
|
|
|
|
2018-11-29 23:06:40 +08:00
|
|
|
:param peer_id: peer if we want to dial
|
2019-08-21 23:04:59 +08:00
|
|
|
:raises SwarmException: raised when an error occurs
|
2018-11-29 23:06:40 +08:00
|
|
|
:return: muxed connection
|
2018-11-01 05:40:01 +08:00
|
|
|
"""
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
if peer_id in self.connections:
|
|
|
|
# If muxed connection already exists for peer_id,
|
|
|
|
# set muxed connection equal to existing muxed connection
|
|
|
|
return self.connections[peer_id]
|
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("attempting to dial peer %s", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
try:
|
|
|
|
# Get peer info from peer store
|
|
|
|
addrs = self.peerstore.addrs(peer_id)
|
|
|
|
except PeerStoreError:
|
|
|
|
raise SwarmException(f"No known addresses to peer {peer_id}")
|
2018-11-13 00:00:43 +08:00
|
|
|
|
|
|
|
if not addrs:
|
2019-09-03 14:12:16 +08:00
|
|
|
raise SwarmException(f"No known addresses to peer {peer_id}")
|
2018-11-13 00:00:43 +08:00
|
|
|
|
2019-10-14 06:29:28 +08:00
|
|
|
multiaddr = addrs[0]
|
2019-09-03 14:12:16 +08:00
|
|
|
# Dial peer (connection to peer does not yet exist)
|
|
|
|
# Transport dials peer (gets back a raw conn)
|
2019-09-19 21:24:01 +08:00
|
|
|
try:
|
|
|
|
raw_conn = await self.transport.dial(multiaddr)
|
|
|
|
except OpenConnectionError as error:
|
|
|
|
logger.debug("fail to dial peer %s over base transport", peer_id)
|
|
|
|
raise SwarmException(
|
|
|
|
"fail to open connection to peer %s", peer_id
|
|
|
|
) from error
|
2019-09-03 14:12:16 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("dialed peer %s over base transport", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
|
|
|
|
# the conn and then mux the conn
|
|
|
|
try:
|
|
|
|
secured_conn = await self.upgrader.upgrade_security(raw_conn, peer_id, True)
|
|
|
|
except SecurityUpgradeFailure as error:
|
2019-09-15 15:09:58 +08:00
|
|
|
error_msg = "fail to upgrade security for peer %s"
|
|
|
|
logger.debug(error_msg, peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
await raw_conn.close()
|
2019-09-15 15:09:58 +08:00
|
|
|
raise SwarmException(error_msg % peer_id) from error
|
2019-09-11 07:02:29 +08:00
|
|
|
|
|
|
|
logger.debug("upgraded security for peer %s", peer_id)
|
|
|
|
|
2019-09-03 14:12:16 +08:00
|
|
|
try:
|
2019-09-12 00:38:12 +08:00
|
|
|
muxed_conn = await self.upgrader.upgrade_connection(secured_conn, peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
except MuxerUpgradeFailure as error:
|
2019-09-15 15:09:58 +08:00
|
|
|
error_msg = "fail to upgrade mux for peer %s"
|
|
|
|
logger.debug(error_msg, peer_id)
|
2019-09-03 14:12:16 +08:00
|
|
|
await secured_conn.close()
|
2019-09-15 15:09:58 +08:00
|
|
|
raise SwarmException(error_msg % peer_id) from error
|
2019-09-03 14:12:16 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("upgraded mux for peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
swarm_conn = await self.add_conn(muxed_conn)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("successfully dialed peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
return swarm_conn
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-17 21:54:20 +08:00
|
|
|
async def new_stream(self, peer_id: ID) -> INetStream:
|
2018-11-29 23:06:40 +08:00
|
|
|
"""
|
|
|
|
:param peer_id: peer_id of destination
|
|
|
|
:param protocol_id: protocol id
|
2019-09-19 22:19:36 +08:00
|
|
|
:raises SwarmException: raised when an error occurs
|
2018-11-29 23:06:40 +08:00
|
|
|
:return: net stream instance
|
|
|
|
"""
|
2019-09-19 14:10:50 +08:00
|
|
|
logger.debug("attempting to open a stream to peer %s", peer_id)
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
swarm_conn = await self.dial_peer(peer_id)
|
2018-11-29 23:06:40 +08:00
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
net_stream = await swarm_conn.new_stream()
|
2019-09-12 14:30:39 +08:00
|
|
|
logger.debug("successfully opened a stream to peer %s", peer_id)
|
2018-11-12 05:42:10 +08:00
|
|
|
return net_stream
|
2018-10-22 01:51:55 +08:00
|
|
|
|
2019-08-11 16:47:54 +08:00
|
|
|
async def listen(self, *multiaddrs: Multiaddr) -> bool:
|
2018-10-22 01:51:55 +08:00
|
|
|
"""
|
2019-08-01 13:25:20 +08:00
|
|
|
:param multiaddrs: one or many multiaddrs to start listening on
|
2018-10-22 01:51:55 +08:00
|
|
|
:return: true if at least one success
|
2018-11-13 02:02:49 +08:00
|
|
|
|
2019-08-01 13:25:20 +08:00
|
|
|
For each multiaddr
|
2019-11-27 09:33:50 +08:00
|
|
|
|
|
|
|
- Check if a listener for multiaddr exists already
|
|
|
|
- If listener already exists, continue
|
|
|
|
- Otherwise:
|
|
|
|
|
|
|
|
- Capture multiaddr in conn handler
|
|
|
|
- Have conn handler delegate to stream handler
|
|
|
|
- Call listener listen with the multiaddr
|
|
|
|
- Map multiaddr to listener
|
2018-11-13 00:00:43 +08:00
|
|
|
"""
|
2019-08-17 21:41:17 +08:00
|
|
|
for maddr in multiaddrs:
|
|
|
|
if str(maddr) in self.listeners:
|
2018-11-12 09:29:17 +08:00
|
|
|
return True
|
|
|
|
|
2019-08-01 06:00:12 +08:00
|
|
|
async def conn_handler(
|
|
|
|
reader: asyncio.StreamReader, writer: asyncio.StreamWriter
|
|
|
|
) -> None:
|
2019-09-11 07:02:29 +08:00
|
|
|
connection_info = writer.get_extra_info("peername")
|
|
|
|
# TODO make a proper multiaddr
|
|
|
|
peer_addr = f"/ip4/{connection_info[0]}/tcp/{connection_info[1]}"
|
|
|
|
logger.debug("inbound connection at %s", peer_addr)
|
|
|
|
# logger.debug("inbound connection request", peer_id)
|
2019-08-25 03:02:30 +08:00
|
|
|
raw_conn = RawConnection(reader, writer, False)
|
2019-05-02 01:54:19 +08:00
|
|
|
|
2019-04-30 15:09:05 +08:00
|
|
|
# Per, https://discuss.libp2p.io/t/multistream-security/130, we first secure
|
|
|
|
# the conn and then mux the conn
|
2019-08-20 16:42:34 +08:00
|
|
|
try:
|
2019-08-21 23:04:59 +08:00
|
|
|
# FIXME: This dummy `ID(b"")` for the remote peer is useless.
|
2019-08-20 16:42:34 +08:00
|
|
|
secured_conn = await self.upgrader.upgrade_security(
|
|
|
|
raw_conn, ID(b""), False
|
|
|
|
)
|
2019-08-21 23:04:59 +08:00
|
|
|
except SecurityUpgradeFailure as error:
|
2019-09-15 15:09:58 +08:00
|
|
|
error_msg = "fail to upgrade security for peer at %s"
|
|
|
|
logger.debug(error_msg, peer_addr)
|
2019-08-25 04:06:24 +08:00
|
|
|
await raw_conn.close()
|
2019-09-15 15:09:58 +08:00
|
|
|
raise SwarmException(error_msg % peer_addr) from error
|
2019-08-21 23:04:59 +08:00
|
|
|
peer_id = secured_conn.get_remote_peer()
|
2019-09-11 07:02:29 +08:00
|
|
|
|
|
|
|
logger.debug("upgraded security for peer at %s", peer_addr)
|
|
|
|
logger.debug("identified peer at %s as %s", peer_addr, peer_id)
|
|
|
|
|
2019-08-21 23:04:59 +08:00
|
|
|
try:
|
2019-08-20 16:42:34 +08:00
|
|
|
muxed_conn = await self.upgrader.upgrade_connection(
|
2019-09-12 00:38:12 +08:00
|
|
|
secured_conn, peer_id
|
2019-08-20 16:42:34 +08:00
|
|
|
)
|
2019-08-21 23:04:59 +08:00
|
|
|
except MuxerUpgradeFailure as error:
|
2019-09-15 15:09:58 +08:00
|
|
|
error_msg = "fail to upgrade mux for peer %s"
|
|
|
|
logger.debug(error_msg, peer_id)
|
2019-08-25 04:06:24 +08:00
|
|
|
await secured_conn.close()
|
2019-09-15 15:09:58 +08:00
|
|
|
raise SwarmException(error_msg % peer_id) from error
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("upgraded mux for peer %s", peer_id)
|
2019-09-12 00:38:12 +08:00
|
|
|
|
|
|
|
await self.add_conn(muxed_conn)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
2019-09-11 07:02:29 +08:00
|
|
|
logger.debug("successfully opened connection to peer %s", peer_id)
|
|
|
|
|
2018-11-12 09:29:17 +08:00
|
|
|
try:
|
|
|
|
# Success
|
|
|
|
listener = self.transport.create_listener(conn_handler)
|
2019-08-17 21:41:17 +08:00
|
|
|
self.listeners[str(maddr)] = listener
|
|
|
|
await listener.listen(maddr)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
|
|
|
# Call notifiers since event occurred
|
2019-09-23 15:01:58 +08:00
|
|
|
self.notify_listen(maddr)
|
2019-03-01 07:18:58 +08:00
|
|
|
|
2018-11-12 09:29:17 +08:00
|
|
|
return True
|
|
|
|
except IOError:
|
|
|
|
# Failed. Continue looping.
|
2019-09-14 21:47:49 +08:00
|
|
|
logger.debug("fail to listen on: " + str(maddr))
|
2018-11-12 09:29:17 +08:00
|
|
|
|
2019-08-17 21:41:17 +08:00
|
|
|
# No maddr succeeded
|
2018-11-12 09:29:17 +08:00
|
|
|
return False
|
2018-11-12 05:42:10 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
async def close(self) -> None:
|
|
|
|
# TODO: Prevent from new listeners and conns being added.
|
|
|
|
# Reference: https://github.com/libp2p/go-libp2p-swarm/blob/8be680aef8dea0a4497283f2f98470c2aeae6b65/swarm.go#L124-L134 # noqa: E501
|
2019-07-22 18:12:54 +08:00
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
# Close listeners
|
|
|
|
await asyncio.gather(
|
|
|
|
*[listener.close() for listener in self.listeners.values()]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Close connections
|
|
|
|
await asyncio.gather(
|
|
|
|
*[connection.close() for connection in self.connections.values()]
|
|
|
|
)
|
|
|
|
|
2019-09-14 21:47:49 +08:00
|
|
|
logger.debug("swarm successfully closed")
|
|
|
|
|
2019-08-29 21:38:06 +08:00
|
|
|
async def close_peer(self, peer_id: ID) -> None:
|
2019-08-31 22:37:59 +08:00
|
|
|
if peer_id not in self.connections:
|
|
|
|
return
|
2019-09-15 20:44:48 +08:00
|
|
|
# TODO: Should be changed to close multisple connections,
|
|
|
|
# if we have several connections per peer in the future.
|
2019-08-29 21:38:06 +08:00
|
|
|
connection = self.connections[peer_id]
|
2019-11-20 23:06:37 +08:00
|
|
|
# NOTE: `connection.close` will delete `peer_id` from `self.connections`
|
2019-11-03 12:31:20 +08:00
|
|
|
# and `notify_disconnected` for us.
|
2019-08-29 21:38:06 +08:00
|
|
|
await connection.close()
|
2019-07-22 18:12:54 +08:00
|
|
|
|
2019-09-14 21:47:49 +08:00
|
|
|
logger.debug("successfully close the connection to peer %s", peer_id)
|
|
|
|
|
2019-09-12 00:38:12 +08:00
|
|
|
async def add_conn(self, muxed_conn: IMuxedConn) -> SwarmConn:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Add a `IMuxedConn` to `Swarm` as a `SwarmConn`, notify "connected",
|
|
|
|
and start to monitor the connection for its new streams and
|
|
|
|
disconnection."""
|
2019-09-12 00:38:12 +08:00
|
|
|
swarm_conn = SwarmConn(muxed_conn, self)
|
|
|
|
# Store muxed_conn with peer id
|
|
|
|
self.connections[muxed_conn.peer_id] = swarm_conn
|
|
|
|
# Call notifiers since event occurred
|
2019-09-23 15:01:58 +08:00
|
|
|
self.notify_connected(swarm_conn)
|
2019-09-12 00:38:12 +08:00
|
|
|
await swarm_conn.start()
|
|
|
|
return swarm_conn
|
2019-09-13 15:29:24 +08:00
|
|
|
|
|
|
|
def remove_conn(self, swarm_conn: SwarmConn) -> None:
|
2019-10-24 14:41:10 +08:00
|
|
|
"""Simply remove the connection from Swarm's records, without closing
|
|
|
|
the connection."""
|
2019-09-23 15:46:50 +08:00
|
|
|
peer_id = swarm_conn.muxed_conn.peer_id
|
2019-11-21 14:48:03 +08:00
|
|
|
if peer_id not in self.connections:
|
|
|
|
return
|
2019-09-13 15:29:24 +08:00
|
|
|
# TODO: Should be changed to remove the exact connection,
|
|
|
|
# if we have several connections per peer in the future.
|
2019-11-21 14:48:03 +08:00
|
|
|
del self.connections[peer_id]
|
2019-09-23 15:01:58 +08:00
|
|
|
|
|
|
|
# Notifee
|
|
|
|
|
|
|
|
# TODO: Remeber the spawn notifying tasks and clean them up when closing.
|
|
|
|
|
|
|
|
def register_notifee(self, notifee: INotifee) -> None:
|
|
|
|
"""
|
|
|
|
:param notifee: object implementing Notifee interface
|
|
|
|
:return: true if notifee registered successfully, false otherwise
|
|
|
|
"""
|
|
|
|
self.notifees.append(notifee)
|
|
|
|
|
|
|
|
def notify_opened_stream(self, stream: INetStream) -> None:
|
|
|
|
asyncio.gather(
|
|
|
|
*[notifee.opened_stream(self, stream) for notifee in self.notifees]
|
|
|
|
)
|
|
|
|
|
|
|
|
# TODO: `notify_closed_stream`
|
|
|
|
|
|
|
|
def notify_connected(self, conn: INetConn) -> None:
|
|
|
|
asyncio.gather(*[notifee.connected(self, conn) for notifee in self.notifees])
|
|
|
|
|
|
|
|
def notify_disconnected(self, conn: INetConn) -> None:
|
|
|
|
asyncio.gather(*[notifee.disconnected(self, conn) for notifee in self.notifees])
|
|
|
|
|
|
|
|
def notify_listen(self, multiaddr: Multiaddr) -> None:
|
|
|
|
asyncio.gather(*[notifee.listen(self, multiaddr) for notifee in self.notifees])
|
|
|
|
|
|
|
|
# TODO: `notify_listen_close`
|