2019-12-06 17:06:37 +08:00
|
|
|
import functools
|
2019-08-01 00:09:09 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
import pytest
|
2019-12-06 17:06:37 +08:00
|
|
|
import trio
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-07-25 16:58:00 +08:00
|
|
|
from libp2p.peer.id import ID
|
2019-12-06 17:06:37 +08:00
|
|
|
from libp2p.tools.factories import PubsubFactory
|
2019-11-21 11:47:54 +08:00
|
|
|
from libp2p.tools.pubsub.floodsub_integration_test_settings import (
|
2019-07-27 11:49:03 +08:00
|
|
|
floodsub_protocol_pytest_params,
|
2019-08-03 13:36:19 +08:00
|
|
|
perform_test_from_obj,
|
2019-07-25 14:08:16 +08:00
|
|
|
)
|
2019-11-21 11:47:54 +08:00
|
|
|
from libp2p.tools.utils import connect
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-07-25 16:58:00 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
@pytest.mark.trio
|
|
|
|
async def test_simple_two_nodes():
|
|
|
|
async with PubsubFactory.create_batch_with_floodsub(2) as pubsubs_fsub:
|
|
|
|
topic = "my_topic"
|
|
|
|
data = b"some data"
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
await connect(pubsubs_fsub[0].host, pubsubs_fsub[1].host)
|
|
|
|
await trio.sleep(0.25)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
sub_b = await pubsubs_fsub[1].subscribe(topic)
|
|
|
|
# Sleep to let a know of b's subscription
|
|
|
|
await trio.sleep(0.25)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
await pubsubs_fsub[0].publish(topic, data)
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-17 18:17:28 +08:00
|
|
|
res_b = await sub_b.get()
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
# Check that the msg received by node_b is the same
|
|
|
|
# as the message sent by node_a
|
|
|
|
assert ID(res_b.from_id) == pubsubs_fsub[0].host.get_id()
|
|
|
|
assert res_b.data == data
|
|
|
|
assert res_b.topicIDs == [topic]
|
2019-04-02 04:23:20 +08:00
|
|
|
|
2019-03-24 01:52:02 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
@pytest.mark.trio
|
|
|
|
async def test_lru_cache_two_nodes(monkeypatch):
|
2019-04-06 09:46:18 +08:00
|
|
|
# two nodes with cache_size of 4
|
2019-12-06 17:06:37 +08:00
|
|
|
async with PubsubFactory.create_batch_with_floodsub(
|
|
|
|
2, cache_size=4
|
|
|
|
) as pubsubs_fsub:
|
|
|
|
# `node_a` send the following messages to node_b
|
|
|
|
message_indices = [1, 1, 2, 1, 3, 1, 4, 1, 5, 1]
|
|
|
|
# `node_b` should only receive the following
|
|
|
|
expected_received_indices = [1, 2, 3, 4, 5, 1]
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
topic = "my_topic"
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
# Mock `get_msg_id` to make us easier to manipulate `msg_id` by `data`.
|
|
|
|
def get_msg_id(msg):
|
|
|
|
# Originally it is `(msg.seqno, msg.from_id)`
|
|
|
|
return (msg.data, msg.from_id)
|
2019-08-01 06:00:12 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
import libp2p.pubsub.pubsub
|
2019-08-01 06:00:12 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
monkeypatch.setattr(libp2p.pubsub.pubsub, "get_msg_id", get_msg_id)
|
2019-07-25 16:58:00 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
await connect(pubsubs_fsub[0].host, pubsubs_fsub[1].host)
|
|
|
|
await trio.sleep(0.25)
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
sub_b = await pubsubs_fsub[1].subscribe(topic)
|
|
|
|
await trio.sleep(0.25)
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
def _make_testing_data(i: int) -> bytes:
|
|
|
|
num_int_bytes = 4
|
|
|
|
if i >= 2 ** (num_int_bytes * 8):
|
|
|
|
raise ValueError("integer is too large to be serialized")
|
|
|
|
return b"data" + i.to_bytes(num_int_bytes, "big")
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
for index in message_indices:
|
|
|
|
await pubsubs_fsub[0].publish(topic, _make_testing_data(index))
|
|
|
|
await trio.sleep(0.25)
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-12-06 17:06:37 +08:00
|
|
|
for index in expected_received_indices:
|
2019-12-17 18:17:28 +08:00
|
|
|
res_b = await sub_b.get()
|
2019-12-06 17:06:37 +08:00
|
|
|
assert res_b.data == _make_testing_data(index)
|
2019-07-25 14:08:16 +08:00
|
|
|
|
2019-04-06 09:46:18 +08:00
|
|
|
|
2019-08-01 06:00:12 +08:00
|
|
|
@pytest.mark.parametrize("test_case_obj", floodsub_protocol_pytest_params)
|
2019-12-06 17:06:37 +08:00
|
|
|
@pytest.mark.trio
|
2019-08-03 07:48:43 +08:00
|
|
|
@pytest.mark.slow
|
2019-12-06 17:06:37 +08:00
|
|
|
async def test_gossipsub_run_with_floodsub_tests(test_case_obj, is_host_secure):
|
|
|
|
await perform_test_from_obj(
|
|
|
|
test_case_obj,
|
|
|
|
functools.partial(
|
|
|
|
PubsubFactory.create_batch_with_floodsub, is_secure=is_host_secure
|
|
|
|
),
|
|
|
|
)
|