Compare commits
13 Commits
master
...
bee-movie-
Author | SHA1 | Date | |
---|---|---|---|
|
fa292ae7c8 | ||
|
4c2bf6873a | ||
|
7cc9ddda75 | ||
|
3b3ff61755 | ||
|
853be062a2 | ||
|
7b67c9cb2f | ||
|
f4fb71e0cf | ||
|
c1d011ac64 | ||
|
41b2a0c6d5 | ||
|
437e9e9ee6 | ||
|
b31773f00b | ||
|
2a5aa14c42 | ||
|
21bcf742a4 |
|
@ -1,23 +0,0 @@
|
|||
[bumpversion]
|
||||
current_version = 0.1.5
|
||||
commit = True
|
||||
tag = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<stage>[^.]*)\.(?P<devnum>\d+))?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{stage}.{devnum}
|
||||
{major}.{minor}.{patch}
|
||||
|
||||
[bumpversion:part:stage]
|
||||
optional_value = stable
|
||||
first_value = stable
|
||||
values =
|
||||
alpha
|
||||
beta
|
||||
stable
|
||||
|
||||
[bumpversion:part:devnum]
|
||||
|
||||
[bumpversion:file:setup.py]
|
||||
search = version="{current_version}",
|
||||
replace = version="{new_version}",
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
version: 2.0
|
||||
|
||||
# heavily inspired by https://raw.githubusercontent.com/pinax/pinax-wiki/6bd2a99ab6f702e300d708532a6d1d9aa638b9f8/.circleci/config.yml
|
||||
|
||||
common: &common
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: merge pull request base
|
||||
command: ./.circleci/merge_pr.sh
|
||||
- run:
|
||||
name: merge pull request base (2nd try)
|
||||
command: ./.circleci/merge_pr.sh
|
||||
when: on_fail
|
||||
- run:
|
||||
name: merge pull request base (3nd try)
|
||||
command: ./.circleci/merge_pr.sh
|
||||
when: on_fail
|
||||
- restore_cache:
|
||||
keys:
|
||||
- cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
|
||||
- run:
|
||||
name: install dependencies
|
||||
command: pip install --user tox
|
||||
- run:
|
||||
name: run tox
|
||||
command: ~/.local/bin/tox -r
|
||||
- save_cache:
|
||||
paths:
|
||||
- .hypothesis
|
||||
- .tox
|
||||
- ~/.cache/pip
|
||||
- ~/.local
|
||||
- ./eggs
|
||||
key: cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "setup.py" }}-{{ checksum "tox.ini" }}
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
<<: *common
|
||||
docker:
|
||||
- image: circleci/python:3.6
|
||||
environment:
|
||||
TOXENV: docs
|
||||
lint:
|
||||
<<: *common
|
||||
docker:
|
||||
- image: circleci/python:3.6
|
||||
environment:
|
||||
TOXENV: lint
|
||||
py36-core:
|
||||
<<: *common
|
||||
docker:
|
||||
- image: circleci/python:3.6
|
||||
environment:
|
||||
TOXENV: py36-core
|
||||
py37-core:
|
||||
<<: *common
|
||||
docker:
|
||||
- image: circleci/python:3.7
|
||||
environment:
|
||||
TOXENV: py37-core
|
||||
pypy3-core:
|
||||
<<: *common
|
||||
docker:
|
||||
- image: pypy
|
||||
environment:
|
||||
TOXENV: pypy3-core
|
||||
workflows:
|
||||
version: 2
|
||||
test:
|
||||
jobs:
|
||||
- docs
|
||||
- lint
|
||||
- py36-core
|
||||
- py37-core
|
||||
- pypy3-core
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
if [[ -n "${CIRCLE_PR_NUMBER}" ]]; then
|
||||
PR_INFO_URL=https://api.github.com/repos/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/pulls/$CIRCLE_PR_NUMBER
|
||||
PR_BASE_BRANCH=$(curl -L "$PR_INFO_URL" | python -c 'import json, sys; obj = json.load(sys.stdin); sys.stdout.write(obj["base"]["ref"])')
|
||||
git fetch origin +"$PR_BASE_BRANCH":circleci/pr-base
|
||||
# We need these config values or git complains when creating the
|
||||
# merge commit
|
||||
git config --global user.name "Circle CI"
|
||||
git config --global user.email "circleci@example.com"
|
||||
git merge --no-edit circleci/pr-base
|
||||
fi
|
38
.github/ISSUE_TEMPLATE.md
vendored
38
.github/ISSUE_TEMPLATE.md
vendored
|
@ -1,38 +0,0 @@
|
|||
_If this is a bug report, please fill in the following sections.
|
||||
If this is a feature request, delete and describe what you would like with examples._
|
||||
|
||||
## What was wrong?
|
||||
|
||||
### Code that produced the error
|
||||
|
||||
```py
|
||||
CODE_TO_REPRODUCE
|
||||
```
|
||||
|
||||
### Full error output
|
||||
|
||||
```sh
|
||||
ERROR_HERE
|
||||
```
|
||||
|
||||
### Expected Result
|
||||
|
||||
_This section may be deleted if the expectation is "don't crash"._
|
||||
|
||||
```sh
|
||||
EXPECTED_RESULT
|
||||
```
|
||||
|
||||
### Environment
|
||||
|
||||
```sh
|
||||
# run this:
|
||||
$ python -m eth_utils
|
||||
|
||||
# then copy the output here:
|
||||
OUTPUT_HERE
|
||||
```
|
||||
|
||||
## How can it be fixed?
|
||||
|
||||
Fill this section in if you know how this could or should be fixed.
|
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -1,21 +0,0 @@
|
|||
## What was wrong?
|
||||
|
||||
Issue #
|
||||
|
||||
## How was it fixed?
|
||||
|
||||
Summary of approach.
|
||||
|
||||
### To-Do
|
||||
|
||||
[//]: # (Stay ahead of things, add list items here!)
|
||||
- [ ] Clean up commit history
|
||||
|
||||
[//]: # (For important changes that should go into the release notes please add a newsfragment file as explained here: https://github.com/libp2p/py-libp2p/blob/master/newsfragments/README.md)
|
||||
|
||||
[//]: # (See: https://py-libp2p.readthedocs.io/en/latest/contributing.html#pull-requests)
|
||||
- [ ] Add entry to the [release notes](https://github.com/libp2p/py-libp2p/blob/master/newsfragments/README.md)
|
||||
|
||||
#### Cute Animal Picture
|
||||
|
||||
![put a cute animal picture link inside the parentheses]()
|
157
.gitignore
vendored
157
.gitignore
vendored
|
@ -1,126 +1,29 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
*.py[cod]
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
.eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
venv*
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
pip-wheel-metadata
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
.tox
|
||||
nosetests.xml
|
||||
htmlcov/
|
||||
.coverage.*
|
||||
coverage.xml
|
||||
*.cover
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
docs/_build
|
||||
docs/modules.rst
|
||||
docs/*.internal.rst
|
||||
docs/*._utils.*
|
||||
|
||||
# Hypothese Property base testing
|
||||
.hypothesis
|
||||
|
||||
# tox/pytest cache
|
||||
.cache
|
||||
|
||||
# Test output logs
|
||||
logs
|
||||
|
||||
### JetBrains template
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff:
|
||||
.idea/workspace.xml
|
||||
.idea/tasks.xml
|
||||
.idea/dictionaries
|
||||
.idea/vcs.xml
|
||||
.idea/jsLibraryMappings.xml
|
||||
|
||||
# Sensitive or high-churn files:
|
||||
.idea/dataSources.ids
|
||||
.idea/dataSources.xml
|
||||
.idea/dataSources.local.xml
|
||||
.idea/sqlDataSources.xml
|
||||
.idea/dynamic.xml
|
||||
.idea/uiDesigner.xml
|
||||
|
||||
# Gradle:
|
||||
.idea/gradle.xml
|
||||
.idea/libraries
|
||||
|
||||
# Mongo Explorer plugin:
|
||||
.idea/mongoSettings.xml
|
||||
|
||||
# VIM temp files
|
||||
*.sw[op]
|
||||
|
||||
# mypy
|
||||
.mypy_cache
|
||||
|
||||
## File-based project format:
|
||||
*.iws
|
||||
|
||||
## Plugin-specific files:
|
||||
|
||||
# IntelliJ
|
||||
/out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
|
@ -128,6 +31,26 @@ fabric.properties
|
|||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
@ -140,6 +63,9 @@ instance/
|
|||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
|
@ -159,8 +85,10 @@ celerybeat-schedule
|
|||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
@ -172,5 +100,8 @@ env.bak/
|
|||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# vscode
|
||||
.vscode/
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# pycharm
|
||||
.idea/
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
set -o pipefail
|
||||
|
||||
PROJECT_ROOT=$(dirname $(dirname $(python -c 'import os, sys; sys.stdout.write(os.path.realpath(sys.argv[1]))' "$0")))
|
||||
|
||||
echo "What is your python module name?"
|
||||
read MODULE_NAME
|
||||
|
||||
echo "What is your pypi package name? (default: $MODULE_NAME)"
|
||||
read PYPI_INPUT
|
||||
PYPI_NAME=${PYPI_INPUT:-$MODULE_NAME}
|
||||
|
||||
echo "What is your github project name? (default: $PYPI_NAME)"
|
||||
read REPO_INPUT
|
||||
REPO_NAME=${REPO_INPUT:-$PYPI_NAME}
|
||||
|
||||
echo "What is your readthedocs.org project name? (default: $PYPI_NAME)"
|
||||
read RTD_INPUT
|
||||
RTD_NAME=${RTD_INPUT:-$PYPI_NAME}
|
||||
|
||||
echo "What is your project name (ex: at the top of the README)? (default: $REPO_NAME)"
|
||||
read PROJECT_INPUT
|
||||
PROJECT_NAME=${PROJECT_INPUT:-$REPO_NAME}
|
||||
|
||||
echo "What is a one-liner describing the project?"
|
||||
read SHORT_DESCRIPTION
|
||||
|
||||
_replace() {
|
||||
local find_cmd=(find "$PROJECT_ROOT" ! -perm -u=x ! -path '*/.git/*' -type f)
|
||||
|
||||
if [[ $(uname) == Darwin ]]; then
|
||||
"${find_cmd[@]}" -exec sed -i '' "$1" {} +
|
||||
else
|
||||
"${find_cmd[@]}" -exec sed -i "$1" {} +
|
||||
fi
|
||||
}
|
||||
_replace "s/<MODULE_NAME>/$MODULE_NAME/g"
|
||||
_replace "s/<PYPI_NAME>/$PYPI_NAME/g"
|
||||
_replace "s/<REPO_NAME>/$REPO_NAME/g"
|
||||
_replace "s/<RTD_NAME>/$RTD_NAME/g"
|
||||
_replace "s/<PROJECT_NAME>/$PROJECT_NAME/g"
|
||||
_replace "s/<SHORT_DESCRIPTION>/$SHORT_DESCRIPTION/g"
|
||||
|
||||
mkdir -p "$PROJECT_ROOT/$MODULE_NAME"
|
||||
touch "$PROJECT_ROOT/$MODULE_NAME/__init__.py"
|
|
@ -1,2 +0,0 @@
|
|||
TEMPLATE_DIR=$(dirname $(readlink -f "$0"))
|
||||
<"$TEMPLATE_DIR/template_vars.txt" "$TEMPLATE_DIR/fill_template_vars.sh"
|
|
@ -1,6 +0,0 @@
|
|||
libp2p
|
||||
libp2p
|
||||
py-libp2p
|
||||
py-libp2p
|
||||
py-libp2p
|
||||
The Python implementation of the libp2p networking stack
|
|
@ -1,30 +0,0 @@
|
|||
[pydocstyle]
|
||||
; All error codes found here:
|
||||
; http://www.pydocstyle.org/en/3.0.0/error_codes.html
|
||||
;
|
||||
; Ignored:
|
||||
; D1 - Missing docstring error codes
|
||||
;
|
||||
; Selected:
|
||||
; D2 - Whitespace error codes
|
||||
; D3 - Quote error codes
|
||||
; D4 - Content related error codes
|
||||
select=D2,D3,D4
|
||||
|
||||
; Extra ignores:
|
||||
; D200 - One-line docstring should fit on one line with quotes
|
||||
; D203 - 1 blank line required before class docstring
|
||||
; D204 - 1 blank line required after class docstring
|
||||
; D205 - 1 blank line required between summary line and description
|
||||
; D212 - Multi-line docstring summary should start at the first line
|
||||
; D302 - Use u""" for Unicode docstrings
|
||||
; D400 - First line should end with a period
|
||||
; D401 - First line should be in imperative mood
|
||||
; D412 - No blank lines allowed between a section header and its content
|
||||
add-ignore=D200,D203,D204,D205,D212,D302,D400,D401,D412
|
||||
|
||||
; Explanation:
|
||||
; D400 - Enabling this error code seems to make it a requirement that the first
|
||||
; sentence in a docstring is not split across two lines. It also makes it a
|
||||
; requirement that no docstring can have a multi-sentence description without a
|
||||
; summary line. Neither one of those requirements seem appropriate.
|
533
.pylintrc
Normal file
533
.pylintrc
Normal file
|
@ -0,0 +1,533 @@
|
|||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=1
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Specify a configuration file.
|
||||
rcfile=.pylintrc
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=print-statement,
|
||||
parameter-unpacking,
|
||||
unpacking-in-except,
|
||||
old-raise-syntax,
|
||||
backtick,
|
||||
long-suffix,
|
||||
old-ne-operator,
|
||||
old-octal-literal,
|
||||
import-star-module-level,
|
||||
non-ascii-bytes-literal,
|
||||
raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
locally-enabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
apply-builtin,
|
||||
basestring-builtin,
|
||||
buffer-builtin,
|
||||
cmp-builtin,
|
||||
coerce-builtin,
|
||||
execfile-builtin,
|
||||
file-builtin,
|
||||
long-builtin,
|
||||
raw_input-builtin,
|
||||
reduce-builtin,
|
||||
standarderror-builtin,
|
||||
unicode-builtin,
|
||||
xrange-builtin,
|
||||
coerce-method,
|
||||
delslice-method,
|
||||
getslice-method,
|
||||
setslice-method,
|
||||
no-absolute-import,
|
||||
old-division,
|
||||
dict-iter-method,
|
||||
dict-view-method,
|
||||
next-method-called,
|
||||
metaclass-assignment,
|
||||
indexing-exception,
|
||||
raising-string,
|
||||
reload-builtin,
|
||||
oct-method,
|
||||
hex-method,
|
||||
nonzero-method,
|
||||
cmp-method,
|
||||
input-builtin,
|
||||
round-builtin,
|
||||
intern-builtin,
|
||||
unichr-builtin,
|
||||
map-builtin-not-iterating,
|
||||
zip-builtin-not-iterating,
|
||||
range-builtin-not-iterating,
|
||||
filter-builtin-not-iterating,
|
||||
using-cmp-argument,
|
||||
eq-without-hash,
|
||||
div-method,
|
||||
idiv-method,
|
||||
rdiv-method,
|
||||
exception-message-attribute,
|
||||
invalid-str-codec,
|
||||
sys-max-int,
|
||||
bad-python3-import,
|
||||
deprecated-string-function,
|
||||
deprecated-str-translate-call,
|
||||
deprecated-itertools-function,
|
||||
deprecated-types-field,
|
||||
next-method-defined,
|
||||
dict-items-not-iterating,
|
||||
dict-keys-not-iterating,
|
||||
dict-values-not-iterating,
|
||||
missing-docstring,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio).You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=no
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=" "
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,
|
||||
dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=8
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Naming style matching correct class attribute names
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class names
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-style
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Naming style matching correct variable names
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in a if statement
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
30
.travis.yml
30
.travis.yml
|
@ -2,36 +2,22 @@ language: python
|
|||
|
||||
matrix:
|
||||
include:
|
||||
- python: 3.6-dev
|
||||
dist: xenial
|
||||
env: TOXENV=py36-test
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: TOXENV=py37-test
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: TOXENV=lint
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: TOXENV=docs
|
||||
- python: 3.7
|
||||
dist: xenial
|
||||
env: TOXENV=py37-interop GOBINPKG=go1.13.8.linux-amd64.tar.gz
|
||||
sudo: true
|
||||
before_install:
|
||||
- wget https://dl.google.com/go/$GOBINPKG
|
||||
- sudo tar -C /usr/local -xzf $GOBINPKG
|
||||
- export GOPATH=$HOME/go
|
||||
- export GOROOT=/usr/local/go
|
||||
- export PATH=$GOROOT/bin:$GOPATH/bin:$PATH
|
||||
- ./tests_interop/go_pkgs/install_interop_go_pkgs.sh
|
||||
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip install tox
|
||||
- pip install -r requirements_dev.txt
|
||||
- python setup.py develop
|
||||
|
||||
script:
|
||||
- tox
|
||||
- pytest --cov=./libp2p tests/
|
||||
- pylint --rcfile=.pylintrc libp2p/!(kademlia) tests
|
||||
|
||||
after_success:
|
||||
- codecov
|
||||
|
||||
notifications:
|
||||
slack: py-libp2p:RK0WVoQZhQXLgIKfHNPL1TR2
|
||||
|
||||
|
|
21
LICENSE
21
LICENSE
|
@ -1,21 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2019 The Ethereum Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
113
Makefile
113
Makefile
|
@ -1,113 +0,0 @@
|
|||
CURRENT_SIGN_SETTING := $(shell git config commit.gpgSign)
|
||||
|
||||
.PHONY: clean-pyc clean-build docs
|
||||
|
||||
help:
|
||||
@echo "clean-build - remove build artifacts"
|
||||
@echo "clean-pyc - remove Python file artifacts"
|
||||
@echo "lint - check style with flake8, etc"
|
||||
@echo "lint-roll - auto-correct styles with isort, black, docformatter, etc"
|
||||
@echo "test - run tests quickly with the default Python"
|
||||
@echo "testall - run tests on every Python version with tox"
|
||||
@echo "release - package and upload a release"
|
||||
@echo "dist - package"
|
||||
|
||||
FILES_TO_LINT = libp2p tests tests_interop examples setup.py
|
||||
PB = libp2p/crypto/pb/crypto.proto \
|
||||
libp2p/pubsub/pb/rpc.proto \
|
||||
libp2p/security/insecure/pb/plaintext.proto \
|
||||
libp2p/security/secio/pb/spipe.proto \
|
||||
libp2p/security/noise/pb/noise.proto \
|
||||
libp2p/identity/identify/pb/identify.proto
|
||||
PY = $(PB:.proto=_pb2.py)
|
||||
PYI = $(PB:.proto=_pb2.pyi)
|
||||
|
||||
# Set default to `protobufs`, otherwise `format` is called when typing only `make`
|
||||
all: protobufs
|
||||
|
||||
protobufs: $(PY)
|
||||
|
||||
%_pb2.py: %.proto
|
||||
protoc --python_out=. --mypy_out=. $<
|
||||
|
||||
clean-proto:
|
||||
rm -f $(PY) $(PYI)
|
||||
|
||||
clean: clean-build clean-pyc
|
||||
|
||||
clean-build:
|
||||
rm -fr build/
|
||||
rm -fr dist/
|
||||
rm -fr *.egg-info
|
||||
|
||||
clean-pyc:
|
||||
find . -name '*.pyc' -exec rm -f {} +
|
||||
find . -name '*.pyo' -exec rm -f {} +
|
||||
find . -name '*~' -exec rm -f {} +
|
||||
find . -name '__pycache__' -exec rm -rf {} +
|
||||
|
||||
lint:
|
||||
mypy -p libp2p -p examples --config-file mypy.ini
|
||||
flake8 $(FILES_TO_LINT)
|
||||
black --check $(FILES_TO_LINT)
|
||||
isort --recursive --check-only --diff $(FILES_TO_LINT)
|
||||
docformatter --pre-summary-newline --check --recursive $(FILES_TO_LINT)
|
||||
tox -e lint # This is probably redundant, but just in case...
|
||||
|
||||
lint-roll:
|
||||
isort --recursive $(FILES_TO_LINT)
|
||||
black $(FILES_TO_LINT)
|
||||
docformatter -ir --pre-summary-newline $(FILES_TO_LINT)
|
||||
$(MAKE) lint
|
||||
|
||||
test:
|
||||
pytest tests
|
||||
|
||||
test-all:
|
||||
tox
|
||||
|
||||
build-docs:
|
||||
sphinx-apidoc -o docs/ . setup.py "*conftest*" "libp2p/tools/interop*"
|
||||
$(MAKE) -C docs clean
|
||||
$(MAKE) -C docs html
|
||||
$(MAKE) -C docs doctest
|
||||
./newsfragments/validate_files.py
|
||||
towncrier --draft --version preview
|
||||
|
||||
docs: build-docs
|
||||
open docs/_build/html/index.html
|
||||
|
||||
linux-docs: build-docs
|
||||
xdg-open docs/_build/html/index.html
|
||||
|
||||
package: clean
|
||||
python setup.py sdist bdist_wheel
|
||||
python scripts/release/test_package.py
|
||||
|
||||
notes:
|
||||
# Let UPCOMING_VERSION be the version that is used for the current bump
|
||||
$(eval UPCOMING_VERSION=$(shell bumpversion $(bump) --dry-run --list | grep new_version= | sed 's/new_version=//g'))
|
||||
# Now generate the release notes to have them included in the release commit
|
||||
towncrier --yes --version $(UPCOMING_VERSION)
|
||||
# Before we bump the version, make sure that the towncrier-generated docs will build
|
||||
make build-docs
|
||||
git commit -m "Compile release notes"
|
||||
|
||||
release: clean
|
||||
# require that you be on a branch that's linked to upstream/master
|
||||
git status -s -b | head -1 | grep "\.\.upstream/master"
|
||||
# verify that docs build correctly
|
||||
./newsfragments/validate_files.py is-empty
|
||||
make build-docs
|
||||
CURRENT_SIGN_SETTING=$(git config commit.gpgSign)
|
||||
git config commit.gpgSign true
|
||||
bumpversion $(bump)
|
||||
git push upstream && git push upstream --tags
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
git config commit.gpgSign "$(CURRENT_SIGN_SETTING)"
|
||||
|
||||
|
||||
dist: clean
|
||||
python setup.py sdist bdist_wheel
|
||||
ls -l dist
|
100
README.md
100
README.md
|
@ -1,86 +1,42 @@
|
|||
# py-libp2p
|
||||
# py-libp2p [![Build Status](https://travis-ci.com/libp2p/py-libp2p.svg?branch=master)](https://travis-ci.com/libp2p/py-libp2p) [![codecov](https://codecov.io/gh/libp2p/py-libp2p/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/py-libp2p) [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/py-libp2p/Lobby)[![Freenode](https://img.shields.io/badge/freenode-%23libp2p-yellow.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23libp2p)
|
||||
|
||||
[![Join the chat at https://gitter.im/py-libp2p/Lobby](https://badges.gitter.im/py-libp2p/Lobby.png)](https://gitter.im/py-libp2p/Lobby)
|
||||
[![Build Status](https://travis-ci.com/libp2p/py-libp2p.svg?branch=master)](https://travis-ci.com/libp2p/py-libp2p)
|
||||
[![PyPI version](https://badge.fury.io/py/libp2p.svg)](https://badge.fury.io/py/libp2p)
|
||||
[![Python versions](https://img.shields.io/pypi/pyversions/libp2p.svg)](https://pypi.python.org/pypi/libp2p)
|
||||
[![Docs build](https://readthedocs.org/projects/py-libp2p/badge/?version=latest)](http://py-libp2p.readthedocs.io/en/latest/?badge=latest)
|
||||
[![Freenode](https://img.shields.io/badge/freenode-%23libp2p-yellow.svg)](https://webchat.freenode.net/?channels=%23libp2p)
|
||||
[![Matrix](https://img.shields.io/badge/matrix-%23libp2p%3Apermaweb.io-blue.svg)](https://riot.permaweb.io/#/room/#libp2p:permaweb.io)
|
||||
[![Discord](https://img.shields.io/discord/475789330380488707?color=blueviolet&label=discord)](https://discord.gg/66KBrm2)
|
||||
|
||||
|
||||
<h1 align="center">
|
||||
<img width="250" align="center" src="https://github.com/libp2p/py-libp2p/blob/master/assets/py-libp2p-logo.png?raw=true" alt="py-libp2p hex logo" />
|
||||
<img width="250" align="center" src="https://github.com/libp2p/py-libp2p/blob/master/assets/py-libp2p-logo.png?raw=true" alt="py-libp2p hex logo" />
|
||||
</h1>
|
||||
|
||||
## WARNING
|
||||
py-libp2p is an experimental and work-in-progress repo under heavy development. We do not yet recommend using py-libp2p in production environments.
|
||||
|
||||
The Python implementation of the libp2p networking stack
|
||||
|
||||
Read more in the [documentation on ReadTheDocs](https://py-libp2p.readthedocs.io/). [View the release notes](https://py-libp2p.readthedocs.io/en/latest/release_notes.html).
|
||||
|
||||
## Sponsorship
|
||||
This project is graciously sponsored by the Ethereum Foundation through [Wave 5 of their Grants Program](https://blog.ethereum.org/2019/02/21/ethereum-foundation-grants-program-wave-5/).
|
||||
|
||||
## Maintainers
|
||||
The py-libp2p team consists of:
|
||||
|
||||
[@zixuanzh](https://github.com/zixuanzh) [@alexh](https://github.com/alexh) [@stuckinaboot](https://github.com/stuckinaboot) [@robzajac](https://github.com/robzajac) [@carver](https://github.com/carver)
|
||||
[@zixuanzh](https://github.com/zixuanzh) [@alexh](https://github.com/alexh) [@stuckinaboot](https://github.com/stuckinaboot) [@robzajac](https://github.com/robzajac)
|
||||
|
||||
## Development
|
||||
|
||||
py-libp2p requires Python 3.7 and the best way to guarantee a clean Python 3.7 environment is with [`virtualenv`](https://virtualenv.pypa.io/en/stable/)
|
||||
|
||||
```sh
|
||||
git clone git@github.com:libp2p/py-libp2p.git
|
||||
cd py-libp2p
|
||||
virtualenv -p python3.7 venv
|
||||
. venv/bin/activate
|
||||
pip install -e .[dev]
|
||||
pip3 install -r requirements_dev.txt
|
||||
python setup.py develop
|
||||
```
|
||||
|
||||
### Testing Setup
|
||||
|
||||
During development, you might like to have tests run on every file save.
|
||||
|
||||
Show flake8 errors on file change:
|
||||
## Testing
|
||||
|
||||
After installing our requirements (see above), you can:
|
||||
```sh
|
||||
# Test flake8
|
||||
when-changed -v -s -r -1 libp2p/ tests/ -c "clear; flake8 libp2p tests && echo 'flake8 success' || echo 'error'"
|
||||
cd tests
|
||||
pytest
|
||||
```
|
||||
|
||||
Run multi-process tests in one command, but without color:
|
||||
|
||||
```sh
|
||||
# in the project root:
|
||||
pytest --numprocesses=4 --looponfail --maxfail=1
|
||||
# the same thing, succinctly:
|
||||
pytest -n 4 -f --maxfail=1
|
||||
```
|
||||
|
||||
Run in one thread, with color and desktop notifications:
|
||||
|
||||
```sh
|
||||
cd venv
|
||||
ptw --onfail "notify-send -t 5000 'Test failure ⚠⚠⚠⚠⚠' 'python 3 test on py-libp2p failed'" ../tests ../libp2p
|
||||
```
|
||||
|
||||
Note that tests/libp2p/test_libp2p.py contains an end-to-end messaging test between two libp2p hosts, which is the bulk of our proof of concept.
|
||||
|
||||
|
||||
### Release setup
|
||||
|
||||
Releases follow the same basic pattern as releases of some tangentially-related projects,
|
||||
like Trinity. See [Trinity's release instructions](
|
||||
https://trinity-client.readthedocs.io/en/latest/contributing.html#releasing).
|
||||
|
||||
## Requirements
|
||||
|
||||
The protobuf description in this repository was generated by `protoc` at version `3.7.1`.
|
||||
|
||||
## Feature Breakdown
|
||||
py-libp2p aims for conformity with [the standard libp2p modules](https://github.com/libp2p/libp2p/blob/master/REQUIREMENTS.md#libp2p-modules-implementations). Below is a breakdown of the modules we have developed, are developing, and may develop in the future.
|
||||
|
||||
|
@ -93,16 +49,16 @@ py-libp2p aims for conformity with [the standard libp2p modules](https://github.
|
|||
|
||||
| Identify Protocol | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`Identify`** | :lemon: |
|
||||
| **`Identify`** | :tomato: |
|
||||
|
||||
|
||||
| Transport Protocols | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`TCP`** | :green_apple: |
|
||||
| **`TCP`** | :lemon: tests |
|
||||
| **`UDP`** | :tomato: |
|
||||
| **`WebSockets`** | :chestnut: |
|
||||
| **`UTP`** | :chestnut: |
|
||||
| **`WebRTC`** | :chestnut: |
|
||||
| **`WebSockets`** | :tomato: |
|
||||
| **`UTP`** | :tomato: |
|
||||
| **`WebRTC`** | :tomato: |
|
||||
| **`SCTP`** | :chestnut: |
|
||||
| **`Tor`** | :chestnut: |
|
||||
| **`i2p`** | :chestnut: |
|
||||
|
@ -115,7 +71,7 @@ py-libp2p aims for conformity with [the standard libp2p modules](https://github.
|
|||
|
||||
| Stream Muxers | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`multiplex`** | :green_apple: |
|
||||
| **`multiplex`** | :lemon: tests |
|
||||
| **`yamux`** | :tomato: |
|
||||
| **`benchmarks`** | :chestnut: |
|
||||
| **`muxado`** | :chestnut: |
|
||||
|
@ -132,39 +88,39 @@ py-libp2p aims for conformity with [the standard libp2p modules](https://github.
|
|||
|
||||
| Switch (Swarm) | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`Switch`** | :green_apple: |
|
||||
| **`Dialer stack`** | :green_apple: |
|
||||
| **`Switch`** | :lemon: tests |
|
||||
| **`Dialer stack`** | :chestnut: |
|
||||
|
||||
|
||||
| Peer Discovery | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`bootstrap list`** | :tomato: |
|
||||
| **`Kademlia DHT`** | :chestnut: |
|
||||
| **`mDNS`** | :chestnut: |
|
||||
| **`bootstrap list`** | :green_apple: |
|
||||
| **`Kademlia DHT`** | :tomato: |
|
||||
| **`mDNS`** | :tomato: |
|
||||
| **`PEX`** | :chestnut: |
|
||||
| **`DNS`** | :chestnut: |
|
||||
|
||||
|
||||
| Content Routing | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`Kademlia DHT`** | :chestnut: |
|
||||
| **`floodsub`** | :green_apple: |
|
||||
| **`gossipsub`** | :green_apple: |
|
||||
| **`Kademlia DHT`** | :tomato: |
|
||||
| **`floodsub`** | :tomato: |
|
||||
| **`gossipsub`** | :tomato: |
|
||||
| **`PHT`** | :chestnut: |
|
||||
|
||||
|
||||
| Peer Routing | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`Kademlia DHT`** | :chestnut: |
|
||||
| **`floodsub`** | :green_apple: |
|
||||
| **`gossipsub`** | :green_apple: |
|
||||
| **`Kademlia DHT`** | :tomato: |
|
||||
| **`floodsub`** | :tomato: |
|
||||
| **`gossipsub`** | :tomato: |
|
||||
| **`PHT`** | :chestnut: |
|
||||
|
||||
|
||||
| NAT Traversal | Status |
|
||||
| -------------------------------------------- | :-----------: |
|
||||
| **`nat-pmp`** | :chestnut: |
|
||||
| **`upnp`** | :chestnut: |
|
||||
| **`nat-pmp`** | :tomato: |
|
||||
| **`upnp`** | :tomato: |
|
||||
| **`ext addr discovery`** | :chestnut: |
|
||||
| **`STUN-like`** | :chestnut: |
|
||||
| **`line-switch relay`** | :chestnut: |
|
||||
|
|
177
docs/Makefile
177
docs/Makefile
|
@ -1,177 +0,0 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS = -W
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/web3.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/web3.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/web3"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/web3"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
304
docs/conf.py
304
docs/conf.py
|
@ -1,304 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# py-libp2p documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Oct 16 20:43:24 2014.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
import os
|
||||
|
||||
DIR = os.path.dirname('__file__')
|
||||
with open (os.path.join(DIR, '../setup.py'), 'r') as f:
|
||||
for line in f:
|
||||
if 'version=' in line:
|
||||
setup_version = line.split('"')[1]
|
||||
break
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.doctest',
|
||||
'sphinx.ext.intersphinx',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'py-libp2p'
|
||||
copyright = '2019, The Ethereum Foundation'
|
||||
|
||||
__version__ = setup_version
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '.'.join(__version__.split('.')[:2])
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = __version__
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = [
|
||||
'_build',
|
||||
'modules.rst',
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'libp2pdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'libp2p.tex', 'py-libp2p Documentation',
|
||||
'The Ethereum Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'libp2p', 'py-libp2p Documentation',
|
||||
['The Ethereum Foundation'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'py-libp2p', 'py-libp2p Documentation',
|
||||
'The Ethereum Foundation', 'py-libp2p', 'The Python implementation of the libp2p networking stack',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
# -- Intersphinx configuration ------------------------------------------------
|
||||
|
||||
intersphinx_mapping = {
|
||||
'python': ('https://docs.python.org/3.6', None),
|
||||
}
|
||||
|
||||
# -- Doctest configuration ----------------------------------------
|
||||
|
||||
import doctest
|
||||
|
||||
doctest_default_flags = (0
|
||||
| doctest.DONT_ACCEPT_TRUE_FOR_1
|
||||
| doctest.ELLIPSIS
|
||||
| doctest.IGNORE_EXCEPTION_DETAIL
|
||||
| doctest.NORMALIZE_WHITESPACE
|
||||
)
|
||||
|
||||
# -- Mocked dependencies ----------------------------------------
|
||||
|
||||
# Mock out dependencies that are unbuildable on readthedocs, as recommended here:
|
||||
# https://docs.readthedocs.io/en/rel/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
|
||||
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Add new modules to mock here (it should be the same list as those excluded in setup.py)
|
||||
MOCK_MODULES = [
|
||||
"fastecdsa",
|
||||
"fastecdsa.encoding",
|
||||
"fastecdsa.encoding.sec1",
|
||||
]
|
||||
sys.modules.update((mod_name, MagicMock()) for mod_name in MOCK_MODULES)
|
|
@ -1,22 +0,0 @@
|
|||
examples.chat package
|
||||
=====================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
examples.chat.chat module
|
||||
-------------------------
|
||||
|
||||
.. automodule:: examples.chat.chat
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: examples.chat
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,17 +0,0 @@
|
|||
examples package
|
||||
================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
examples.chat
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: examples
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,21 +0,0 @@
|
|||
py-libp2p
|
||||
==============================
|
||||
|
||||
The Python implementation of the libp2p networking stack
|
||||
|
||||
Contents
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 3
|
||||
|
||||
libp2p
|
||||
release_notes
|
||||
examples
|
||||
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.crypto.pb package
|
||||
========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.crypto.pb.crypto\_pb2 module
|
||||
-----------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.pb.crypto_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.crypto.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,93 +0,0 @@
|
|||
libp2p.crypto package
|
||||
=====================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.crypto.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.crypto.authenticated\_encryption module
|
||||
----------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.authenticated_encryption
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.ecc module
|
||||
------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.ecc
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.ed25519 module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.ed25519
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.exceptions module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.key\_exchange module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.key_exchange
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.keys module
|
||||
-------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.keys
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.rsa module
|
||||
------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.rsa
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.secp256k1 module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.secp256k1
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.crypto.serialization module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: libp2p.crypto.serialization
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.crypto
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,62 +0,0 @@
|
|||
libp2p.host package
|
||||
===================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.host.basic\_host module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: libp2p.host.basic_host
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.host.defaults module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.host.defaults
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.host.exceptions module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: libp2p.host.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.host.host\_interface module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: libp2p.host.host_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.host.ping module
|
||||
-----------------------
|
||||
|
||||
.. automodule:: libp2p.host.ping
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.host.routed\_host module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.host.routed_host
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.host
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.identity.identify.pb package
|
||||
===================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.identity.identify.pb.identify\_pb2 module
|
||||
------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.identity.identify.pb.identify_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.identity.identify.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,29 +0,0 @@
|
|||
libp2p.identity.identify package
|
||||
================================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.identity.identify.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.identity.identify.protocol module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.identity.identify.protocol
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.identity.identify
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,17 +0,0 @@
|
|||
libp2p.identity package
|
||||
=======================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.identity.identify
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.identity
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,46 +0,0 @@
|
|||
libp2p.io package
|
||||
=================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.io.abc module
|
||||
--------------------
|
||||
|
||||
.. automodule:: libp2p.io.abc
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.io.exceptions module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.io.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.io.msgio module
|
||||
----------------------
|
||||
|
||||
.. automodule:: libp2p.io.msgio
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.io.utils module
|
||||
----------------------
|
||||
|
||||
.. automodule:: libp2p.io.utils
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.io
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,54 +0,0 @@
|
|||
libp2p.network.connection package
|
||||
=================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.network.connection.exceptions module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.connection.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.connection.net\_connection\_interface module
|
||||
-----------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.connection.net_connection_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.connection.raw\_connection module
|
||||
------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.connection.raw_connection
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.connection.raw\_connection\_interface module
|
||||
-----------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.connection.raw_connection_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.connection.swarm\_connection module
|
||||
--------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.connection.swarm_connection
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.network.connection
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,54 +0,0 @@
|
|||
libp2p.network package
|
||||
======================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.network.connection
|
||||
libp2p.network.stream
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.network.exceptions module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.network\_interface module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.network_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.notifee\_interface module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.notifee_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.swarm module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.network.swarm
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.network
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,38 +0,0 @@
|
|||
libp2p.network.stream package
|
||||
=============================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.network.stream.exceptions module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.stream.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.stream.net\_stream module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.stream.net_stream
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.network.stream.net\_stream\_interface module
|
||||
---------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.network.stream.net_stream_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.network.stream
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,78 +0,0 @@
|
|||
libp2p.peer package
|
||||
===================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.peer.addrbook\_interface module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.addrbook_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.id module
|
||||
---------------------
|
||||
|
||||
.. automodule:: libp2p.peer.id
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peerdata module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peerdata
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peerdata\_interface module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peerdata_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peerinfo module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peerinfo
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peermetadata\_interface module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peermetadata_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peerstore module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peerstore
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.peer.peerstore\_interface module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: libp2p.peer.peerstore_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.peer
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,70 +0,0 @@
|
|||
libp2p.protocol\_muxer package
|
||||
==============================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.protocol\_muxer.exceptions module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect module
|
||||
-----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect\_client module
|
||||
-------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect_client
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect\_client\_interface module
|
||||
------------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect_client_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect\_communicator module
|
||||
-------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect_communicator
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect\_communicator\_interface module
|
||||
------------------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect_communicator_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.protocol\_muxer.multiselect\_muxer\_interface module
|
||||
-----------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer.multiselect_muxer_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.protocol_muxer
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.pubsub.pb package
|
||||
========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.pubsub.pb.rpc\_pb2 module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.pb.rpc_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,93 +0,0 @@
|
|||
libp2p.pubsub package
|
||||
=====================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.pubsub.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.pubsub.abc module
|
||||
------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.abc
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.exceptions module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.floodsub module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.floodsub
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.gossipsub module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.gossipsub
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.mcache module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.mcache
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.pubsub module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.pubsub
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.pubsub\_notifee module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.pubsub_notifee
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.subscription module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.subscription
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.pubsub.validators module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.pubsub.validators
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.pubsub
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,23 +0,0 @@
|
|||
libp2p.routing package
|
||||
======================
|
||||
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.routing.interfaces module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: libp2p.routing.interfaces
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.routing
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,57 +0,0 @@
|
|||
libp2p package
|
||||
==============
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.crypto
|
||||
libp2p.host
|
||||
libp2p.identity
|
||||
libp2p.io
|
||||
libp2p.network
|
||||
libp2p.peer
|
||||
libp2p.protocol_muxer
|
||||
libp2p.pubsub
|
||||
libp2p.routing
|
||||
libp2p.security
|
||||
libp2p.stream_muxer
|
||||
libp2p.tools
|
||||
libp2p.transport
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.exceptions module
|
||||
------------------------
|
||||
|
||||
.. automodule:: libp2p.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.typing module
|
||||
--------------------
|
||||
|
||||
.. automodule:: libp2p.typing
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.utils module
|
||||
-------------------
|
||||
|
||||
.. automodule:: libp2p.utils
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.security.insecure.pb package
|
||||
===================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.insecure.pb.plaintext\_pb2 module
|
||||
-------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.insecure.pb.plaintext_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.insecure.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,29 +0,0 @@
|
|||
libp2p.security.insecure package
|
||||
================================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.security.insecure.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.insecure.transport module
|
||||
-----------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.insecure.transport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.insecure
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.security.noise.pb package
|
||||
================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.noise.pb.noise\_pb2 module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.pb.noise_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,61 +0,0 @@
|
|||
libp2p.security.noise package
|
||||
=============================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.security.noise.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.noise.exceptions module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.noise.io module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.io
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.noise.messages module
|
||||
-------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.messages
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.noise.patterns module
|
||||
-------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.patterns
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.noise.transport module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.noise.transport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.noise
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,71 +0,0 @@
|
|||
libp2p.security package
|
||||
=======================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.security.insecure
|
||||
libp2p.security.noise
|
||||
libp2p.security.secio
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.base\_session module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.base_session
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.base\_transport module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.base_transport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.exceptions module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.secure\_conn\_interface module
|
||||
----------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.secure_conn_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.secure\_transport\_interface module
|
||||
---------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.secure_transport_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.security\_multistream module
|
||||
--------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.security_multistream
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.security.secio.pb package
|
||||
================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.secio.pb.spipe\_pb2 module
|
||||
------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.secio.pb.spipe_pb2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.secio.pb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,37 +0,0 @@
|
|||
libp2p.security.secio package
|
||||
=============================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.security.secio.pb
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.security.secio.exceptions module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.secio.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.security.secio.transport module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.security.secio.transport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.security.secio
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,54 +0,0 @@
|
|||
libp2p.stream\_muxer.mplex package
|
||||
==================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.stream\_muxer.mplex.constants module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex.constants
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.mplex.datastructures module
|
||||
------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex.datastructures
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.mplex.exceptions module
|
||||
--------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.mplex.mplex module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex.mplex
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.mplex.mplex\_stream module
|
||||
-----------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex.mplex_stream
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.mplex
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,45 +0,0 @@
|
|||
libp2p.stream\_muxer package
|
||||
============================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.stream_muxer.mplex
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.stream\_muxer.abc module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.abc
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.exceptions module
|
||||
--------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.stream\_muxer.muxer\_multistream module
|
||||
----------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer.muxer_multistream
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.stream_muxer
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,38 +0,0 @@
|
|||
libp2p.tools.pubsub package
|
||||
===========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.tools.pubsub.dummy\_account\_node module
|
||||
-----------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.pubsub.dummy_account_node
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.tools.pubsub.floodsub\_integration\_test\_settings module
|
||||
----------------------------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.pubsub.floodsub_integration_test_settings
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.tools.pubsub.utils module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.pubsub.utils
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.tools.pubsub
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,47 +0,0 @@
|
|||
libp2p.tools package
|
||||
====================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.tools.pubsub
|
||||
|
||||
The interop module is left out for now, because of the extra dependencies it requires.
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.tools.constants module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.constants
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.tools.factories module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.factories
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.tools.utils module
|
||||
-------------------------
|
||||
|
||||
.. automodule:: libp2p.tools.utils
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.tools
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,61 +0,0 @@
|
|||
libp2p.transport package
|
||||
========================
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
libp2p.transport.tcp
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.transport.exceptions module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.exceptions
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.transport.listener\_interface module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.listener_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.transport.transport\_interface module
|
||||
--------------------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.transport_interface
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.transport.typing module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.typing
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
libp2p.transport.upgrader module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.upgrader
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.transport
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,22 +0,0 @@
|
|||
libp2p.transport.tcp package
|
||||
============================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
libp2p.transport.tcp.tcp module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: libp2p.transport.tcp.tcp
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: libp2p.transport.tcp
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -1,95 +0,0 @@
|
|||
Release Notes
|
||||
=============
|
||||
|
||||
.. towncrier release notes start
|
||||
|
||||
libp2p v0.1.5 (2020-03-25)
|
||||
---------------------------
|
||||
|
||||
Features
|
||||
~~~~~~~~
|
||||
|
||||
- Dial all multiaddrs stored for a peer when attempting to connect (not just the first one in the peer store). (`#386 <https://github.com/libp2p/py-libp2p/issues/386>`__)
|
||||
- Migrate transport stack to trio-compatible code. Merge in #404. (`#396 <https://github.com/libp2p/py-libp2p/issues/396>`__)
|
||||
- Migrate network stack to trio-compatible code. Merge in #404. (`#397 <https://github.com/libp2p/py-libp2p/issues/397>`__)
|
||||
- Migrate host, peer and protocols stacks to trio-compatible code. Merge in #404. (`#398 <https://github.com/libp2p/py-libp2p/issues/398>`__)
|
||||
- Migrate muxer and security transport stacks to trio-compatible code. Merge in #404. (`#399 <https://github.com/libp2p/py-libp2p/issues/399>`__)
|
||||
- Migrate pubsub stack to trio-compatible code. Merge in #404. (`#400 <https://github.com/libp2p/py-libp2p/issues/400>`__)
|
||||
- Fix interop tests w/ new trio-style code. Merge in #404. (`#401 <https://github.com/libp2p/py-libp2p/issues/401>`__)
|
||||
- Fix remainder of test code w/ new trio-style code. Merge in #404. (`#402 <https://github.com/libp2p/py-libp2p/issues/402>`__)
|
||||
- Add initial infrastructure for `noise` security transport. (`#405 <https://github.com/libp2p/py-libp2p/issues/405>`__)
|
||||
- Add `PatternXX` of `noise` security transport. (`#406 <https://github.com/libp2p/py-libp2p/issues/406>`__)
|
||||
- The `msg_id` in a pubsub message is now configurable by the user of the library. (`#410 <https://github.com/libp2p/py-libp2p/issues/410>`__)
|
||||
|
||||
|
||||
Bugfixes
|
||||
~~~~~~~~
|
||||
|
||||
- Use `sha256` when calculating a peer's ID from their public key in Kademlia DHTs. (`#385 <https://github.com/libp2p/py-libp2p/issues/385>`__)
|
||||
- Store peer ids in ``set`` instead of ``list`` and check if peer id exists in ``dict`` before accessing to prevent ``KeyError``. (`#387 <https://github.com/libp2p/py-libp2p/issues/387>`__)
|
||||
- Do not close a connection if it has been reset. (`#394 <https://github.com/libp2p/py-libp2p/issues/394>`__)
|
||||
|
||||
|
||||
Internal Changes - for py-libp2p Contributors
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Add support for `fastecdsa` on windows (and thereby supporting windows installation via `pip`) (`#380 <https://github.com/libp2p/py-libp2p/issues/380>`__)
|
||||
- Prefer f-string style formatting everywhere except logging statements. (`#389 <https://github.com/libp2p/py-libp2p/issues/389>`__)
|
||||
- Mark `lru` dependency as third-party to fix a windows inconsistency. (`#392 <https://github.com/libp2p/py-libp2p/issues/392>`__)
|
||||
- Bump `multiaddr` dependency to version `0.0.9` so that multiaddr objects are hashable. (`#393 <https://github.com/libp2p/py-libp2p/issues/393>`__)
|
||||
- Remove incremental mode of mypy to disable some warnings. (`#403 <https://github.com/libp2p/py-libp2p/issues/403>`__)
|
||||
|
||||
|
||||
libp2p v0.1.4 (2019-12-12)
|
||||
--------------------------
|
||||
|
||||
Features
|
||||
~~~~~~~~
|
||||
|
||||
- Added support for Python 3.6 (`#372 <https://github.com/libp2p/py-libp2p/issues/372>`__)
|
||||
- Add signing and verification to pubsub (`#362 <https://github.com/libp2p/py-libp2p/issues/362>`__)
|
||||
|
||||
|
||||
Internal Changes - for py-libp2p Contributors
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Refactor and cleanup gossipsub (`#373 <https://github.com/libp2p/py-libp2p/issues/373>`__)
|
||||
|
||||
|
||||
libp2p v0.1.3 (2019-11-27)
|
||||
--------------------------
|
||||
|
||||
Bugfixes
|
||||
~~~~~~~~
|
||||
|
||||
- Handle Stream* errors (like ``StreamClosed``) during calls to ``stream.write()`` and
|
||||
``stream.read()`` (`#350 <https://github.com/libp2p/py-libp2p/issues/350>`__)
|
||||
- Relax the protobuf dependency to play nicely with other libraries. It was pinned to 3.9.0, and now
|
||||
permits v3.10 up to (but not including) v4. (`#354 <https://github.com/libp2p/py-libp2p/issues/354>`__)
|
||||
- Fixes KeyError when peer in a stream accidentally closes and resets the stream, because handlers
|
||||
for both will try to ``del streams[stream_id]`` without checking if the entry still exists. (`#355 <https://github.com/libp2p/py-libp2p/issues/355>`__)
|
||||
|
||||
|
||||
Improved Documentation
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Use Sphinx & autodoc to generate docs, now available on `py-libp2p.readthedocs.io <https://py-libp2p.readthedocs.io>`_ (`#318 <https://github.com/libp2p/py-libp2p/issues/318>`__)
|
||||
|
||||
|
||||
Internal Changes - for py-libp2p Contributors
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added Makefile target to test a packaged version of libp2p before release. (`#353 <https://github.com/libp2p/py-libp2p/issues/353>`__)
|
||||
- Move helper tools from ``tests/`` to ``libp2p/tools/``, and some mildly-related cleanups. (`#356 <https://github.com/libp2p/py-libp2p/issues/356>`__)
|
||||
|
||||
|
||||
Miscellaneous changes
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- `#357 <https://github.com/libp2p/py-libp2p/issues/357>`__
|
||||
|
||||
|
||||
v0.1.2
|
||||
--------------
|
||||
|
||||
Welcome to the great beyond, where changes were not tracked by release...
|
80
examples/bee_movie/msg_ordering_node.py
Normal file
80
examples/bee_movie/msg_ordering_node.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
import asyncio
|
||||
import multiaddr
|
||||
|
||||
from tests.pubsub.utils import message_id_generator, generate_RPC_packet
|
||||
from libp2p import new_node
|
||||
from libp2p.pubsub.pubsub import Pubsub
|
||||
from libp2p.pubsub.floodsub import FloodSub
|
||||
from .ordered_queue import OrderedQueue
|
||||
|
||||
SUPPORTED_PUBSUB_PROTOCOLS = ["/floodsub/1.0.0"]
|
||||
BEE_MOVIE_TOPIC = "bee_movie"
|
||||
|
||||
|
||||
class MsgOrderingNode():
|
||||
|
||||
def __init__(self):
|
||||
self.balances = {}
|
||||
self.next_msg_id_func = message_id_generator(0)
|
||||
self.priority_queue = OrderedQueue()
|
||||
|
||||
self.libp2p_node = None
|
||||
self.floodsub = None
|
||||
self.pubsub = None
|
||||
|
||||
@classmethod
|
||||
async def create(cls):
|
||||
"""
|
||||
Create a new MsgOrderingNode and attach a libp2p node, a floodsub, and a pubsub
|
||||
instance to this new node
|
||||
|
||||
We use create as this serves as a factory function and allows us
|
||||
to use async await, unlike the init function
|
||||
"""
|
||||
self = MsgOrderingNode()
|
||||
|
||||
libp2p_node = await new_node(transport_opt=["/ip4/127.0.0.1/tcp/0"])
|
||||
await libp2p_node.get_network().listen(multiaddr.Multiaddr("/ip4/127.0.0.1/tcp/0"))
|
||||
|
||||
self.libp2p_node = libp2p_node
|
||||
|
||||
self.floodsub = FloodSub(SUPPORTED_PUBSUB_PROTOCOLS)
|
||||
self.pubsub = Pubsub(self.libp2p_node, self.floodsub, "a")
|
||||
return self
|
||||
|
||||
async def handle_incoming_msgs(self):
|
||||
"""
|
||||
Handle all incoming messages on the BEE_MOVIE_TOPIC from peers
|
||||
"""
|
||||
while True:
|
||||
incoming = await self.queue.get()
|
||||
seqno = int.from_bytes(incoming.seqno, byteorder='big')
|
||||
word = incoming.data.decode('utf-8')
|
||||
|
||||
await self.handle_bee_movie_word(seqno, word)
|
||||
|
||||
async def setup_crypto_networking(self):
|
||||
"""
|
||||
Subscribe to BEE_MOVIE_TOPIC and perform call to function that handles
|
||||
all incoming messages on said topic
|
||||
"""
|
||||
self.queue = await self.pubsub.subscribe(BEE_MOVIE_TOPIC)
|
||||
|
||||
asyncio.ensure_future(self.handle_incoming_msgs())
|
||||
|
||||
async def publish_bee_movie_word(self, word, msg_id=None):
|
||||
# Publish a bee movie word to all peers
|
||||
my_id = str(self.libp2p_node.get_id())
|
||||
if msg_id is None:
|
||||
msg_id = self.next_msg_id_func()
|
||||
packet = generate_RPC_packet(my_id, [BEE_MOVIE_TOPIC], word, msg_id)
|
||||
await self.floodsub.publish(my_id, packet.SerializeToString())
|
||||
|
||||
async def handle_bee_movie_word(self, seqno, word):
|
||||
# Handle bee movie word received
|
||||
await self.priority_queue.put((seqno, word))
|
||||
|
||||
async def get_next_word_in_bee_movie(self):
|
||||
# Get just the word (and not the seqno) and return the word
|
||||
next_word = (await self.priority_queue.get())[1]
|
||||
return next_word
|
51
examples/bee_movie/ordered_queue.py
Normal file
51
examples/bee_movie/ordered_queue.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
import asyncio
|
||||
|
||||
class OrderedQueue():
|
||||
"""
|
||||
asyncio.queue wrapper that delivers messages in order of subsequent sequence numbers,
|
||||
so if message 1 and 3 are received and the following get calls occur:
|
||||
get(), get(), get()
|
||||
the queue will deliver message 1, will wait until message 2 is received to deliver message 2,
|
||||
and then deliver message 3
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.last_gotten_seqno = 0
|
||||
self.queue = asyncio.PriorityQueue()
|
||||
self.task = None
|
||||
|
||||
async def put(self, item):
|
||||
"""
|
||||
:param item: put item tuple (seqno, data) onto queue
|
||||
"""
|
||||
seqno = item[0]
|
||||
await self.queue.put(item)
|
||||
if self.last_gotten_seqno + 1 == seqno and self.task is not None:
|
||||
# Allow get future to return the most recent item that is put
|
||||
self.task.set()
|
||||
|
||||
async def get(self):
|
||||
"""
|
||||
Get item with last_gotten_seqno + 1 from the queue
|
||||
:return: (seqno, data)
|
||||
"""
|
||||
if self.queue.qsize() > 0:
|
||||
front_item = await self.queue.get()
|
||||
|
||||
if front_item[0] == self.last_gotten_seqno + 1:
|
||||
self.last_gotten_seqno += 1
|
||||
return front_item
|
||||
# Put element back as it should not be delivered yet
|
||||
await self.queue.put(front_item)
|
||||
|
||||
# Wait until item with subsequent seqno is put on queue
|
||||
self.task = asyncio.Event()
|
||||
await self.task.wait()
|
||||
item = await self.queue.get()
|
||||
|
||||
# Remove task
|
||||
self.task = None
|
||||
|
||||
self.last_gotten_seqno += 1
|
||||
|
||||
return item
|
427
examples/bee_movie/test_bee_movie.py
Normal file
427
examples/bee_movie/test_bee_movie.py
Normal file
|
@ -0,0 +1,427 @@
|
|||
import asyncio
|
||||
from threading import Thread
|
||||
import struct
|
||||
import pytest
|
||||
import urllib.request
|
||||
|
||||
from libp2p.peer.peerinfo import info_from_p2p_addr
|
||||
from .msg_ordering_node import MsgOrderingNode
|
||||
from tests.utils import cleanup
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
"""
|
||||
Test-cases demonstrating how to create nodes that continuously stream data
|
||||
and ensure that data is delivered to each node with pre-determined ordering.
|
||||
The ordering is such that if a peer A sends a publish 1 and 2 with seqno=1 and with seqno=2,
|
||||
respectively, even if the publish 2 (with seqno=2) reaches the peers first, it will not
|
||||
be processed until seqno=1 is received (and then publish 1 with seqno=1 must be
|
||||
processed before publish 2 with seqno=2 will be).
|
||||
|
||||
This concept is demonstrated by streaming the script to the entire bee movie to several nodes
|
||||
"""
|
||||
|
||||
async def connect(node1, node2):
|
||||
# node1 connects to node2
|
||||
addr = node2.get_addrs()[0]
|
||||
info = info_from_p2p_addr(addr)
|
||||
await node1.connect(info)
|
||||
|
||||
def create_setup_in_new_thread_func(dummy_node):
|
||||
def setup_in_new_thread():
|
||||
asyncio.ensure_future(dummy_node.setup_crypto_networking())
|
||||
return setup_in_new_thread
|
||||
|
||||
async def perform_test(num_nodes, adjacency_map, action_func, assertion_func):
|
||||
"""
|
||||
Helper function to allow for easy construction of custom tests for msg ordering nodes
|
||||
in various network topologies
|
||||
:param num_nodes: number of nodes in the test
|
||||
:param adjacency_map: adjacency map defining each node and its list of neighbors
|
||||
:param action_func: function to execute that includes actions by the nodes
|
||||
:param assertion_func: assertions for testing the results of the actions are correct
|
||||
"""
|
||||
|
||||
# Create nodes
|
||||
dummy_nodes = []
|
||||
for _ in range(num_nodes):
|
||||
dummy_nodes.append(await MsgOrderingNode.create())
|
||||
|
||||
# Create network
|
||||
for source_num in adjacency_map:
|
||||
target_nums = adjacency_map[source_num]
|
||||
for target_num in target_nums:
|
||||
await connect(dummy_nodes[source_num].libp2p_node, \
|
||||
dummy_nodes[target_num].libp2p_node)
|
||||
|
||||
# Allow time for network creation to take place
|
||||
await asyncio.sleep(0.25)
|
||||
|
||||
# Start a thread for each node so that each node can listen and respond
|
||||
# to messages on its own thread, which will avoid waiting indefinitely
|
||||
# on the main thread. On this thread, call the setup func for the node,
|
||||
# which subscribes the node to the BEE_MOVIE_TOPIC topic
|
||||
for dummy_node in dummy_nodes:
|
||||
thread = Thread(target=create_setup_in_new_thread_func(dummy_node))
|
||||
thread.run()
|
||||
|
||||
# Allow time for nodes to subscribe to BEE_MOVIE_TOPIC topic
|
||||
await asyncio.sleep(0.25)
|
||||
|
||||
# Perform action function
|
||||
await action_func(dummy_nodes)
|
||||
|
||||
# Allow time for action function to be performed (i.e. messages to propogate)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Perform assertion function
|
||||
for dummy_node in dummy_nodes:
|
||||
await assertion_func(dummy_node)
|
||||
|
||||
# Success, terminate pending tasks.
|
||||
await cleanup()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_one_word():
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
await dummy_nodes[0].publish_bee_movie_word("aspyn")
|
||||
# await asyncio.sleep(0.25)
|
||||
await dummy_nodes[0].publish_bee_movie_word("hello")
|
||||
# await asyncio.sleep(0.25)
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
next_word = await dummy_node.get_next_word_in_bee_movie()
|
||||
assert next_word == "aspyn"
|
||||
next_word = await dummy_node.get_next_word_in_bee_movie()
|
||||
assert next_word == "hello"
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_ten_words():
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
|
||||
words = ["aspyn", "is", "so", "good", "at", "writing", "code", "XD", ":)", "foobar"]
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
for word in words:
|
||||
await dummy_nodes[0].publish_bee_movie_word(word)
|
||||
# await asyncio.sleep(0.25)
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
for word in words:
|
||||
assert await dummy_node.get_next_word_in_bee_movie() == word
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_two_words_out_of_order_ids():
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
await dummy_nodes[0].publish_bee_movie_word("word 2", struct.pack('>I', 2))
|
||||
word, _, _ = await asyncio.gather(dummy_nodes[0].get_next_word_in_bee_movie(),\
|
||||
asyncio.sleep(0.25), \
|
||||
dummy_nodes[0].publish_bee_movie_word("word 1", struct.pack('>I', 1)))
|
||||
assert word == "word 1"
|
||||
assert await dummy_nodes[0].get_next_word_in_bee_movie() == "word 2"
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
pass
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_two_words_read_then_publish_out_of_order_ids():
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
collected = None
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
if len(collected_words) == expected_len:
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
words, _, _, _ = await asyncio.gather(collect_all_words(2, dummy_nodes[0]),\
|
||||
asyncio.sleep(0.25), \
|
||||
dummy_nodes[0].publish_bee_movie_word("word 2", struct.pack('>I', 2)),\
|
||||
dummy_nodes[0].publish_bee_movie_word("word 1", struct.pack('>I', 1)))
|
||||
|
||||
# Store collected words to be checked in assertion func
|
||||
nonlocal collected
|
||||
collected = words
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
assert collected[0] == "word 1"
|
||||
assert collected[1] == "word 2"
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_ten_words_out_of_order_ids():
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
collected = None
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
if len(collected_words) == expected_len:
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
words = ["e", "b", "d", "i", "a", "h", "c", "f", "g", "j"]
|
||||
msg_id_nums = [5, 2, 4, 9, 1, 8, 3, 6, 7, 10]
|
||||
msg_ids = []
|
||||
tasks = []
|
||||
for msg_id_num in msg_id_nums:
|
||||
msg_ids.append(struct.pack('>I', msg_id_num))
|
||||
|
||||
tasks.append(collect_all_words(len(words), dummy_nodes[0]))
|
||||
tasks.append(asyncio.sleep(0.25))
|
||||
|
||||
for i in range(len(words)):
|
||||
tasks.append(dummy_nodes[0].publish_bee_movie_word(words[i], msg_ids[i]))
|
||||
|
||||
res = await asyncio.gather(*tasks)
|
||||
|
||||
nonlocal collected
|
||||
collected = res[0]
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
correct_words = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
||||
for i in range(len(correct_words)):
|
||||
assert collected[i] == correct_words[i]
|
||||
assert len(collected) == len(correct_words)
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_five_nodes_rings_words_out_of_order_ids():
|
||||
num_nodes = 5
|
||||
adj_map = {0: [1], 1: [2], 2: [3], 3: [4], 4: [0]}
|
||||
collected = []
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
if len(collected_words) == expected_len:
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
words = ["e", "b", "d", "i", "a", "h", "c", "f", "g", "j"]
|
||||
msg_id_nums = [5, 2, 4, 9, 1, 8, 3, 6, 7, 10]
|
||||
msg_ids = []
|
||||
tasks = []
|
||||
for msg_id_num in msg_id_nums:
|
||||
msg_ids.append(struct.pack('>I', msg_id_num))
|
||||
|
||||
for i in range(num_nodes):
|
||||
tasks.append(collect_all_words(len(words), dummy_nodes[i]))
|
||||
|
||||
tasks.append(asyncio.sleep(0.25))
|
||||
|
||||
for i in range(len(words)):
|
||||
tasks.append(dummy_nodes[0].publish_bee_movie_word(words[i], msg_ids[i]))
|
||||
|
||||
res = await asyncio.gather(*tasks)
|
||||
|
||||
nonlocal collected
|
||||
for i in range(num_nodes):
|
||||
collected.append(res[i])
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
correct_words = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
||||
for i in range(num_nodes):
|
||||
assert collected[i] == correct_words
|
||||
assert len(collected[i]) == len(correct_words)
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_seven_nodes_tree_words_out_of_order_ids():
|
||||
num_nodes = 7
|
||||
adj_map = {0: [1, 2], 1: [3, 4], 2: [5, 6]}
|
||||
collected = []
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
if len(collected_words) == expected_len:
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
words = ["e", "b", "d", "i", "a", "h", "c", "f", "g", "j"]
|
||||
msg_id_nums = [5, 2, 4, 9, 1, 8, 3, 6, 7, 10]
|
||||
msg_ids = []
|
||||
tasks = []
|
||||
for msg_id_num in msg_id_nums:
|
||||
msg_ids.append(struct.pack('>I', msg_id_num))
|
||||
|
||||
for i in range(num_nodes):
|
||||
tasks.append(collect_all_words(len(words), dummy_nodes[i]))
|
||||
|
||||
tasks.append(asyncio.sleep(0.25))
|
||||
|
||||
for i in range(len(words)):
|
||||
tasks.append(dummy_nodes[0].publish_bee_movie_word(words[i], msg_ids[i]))
|
||||
|
||||
res = await asyncio.gather(*tasks)
|
||||
|
||||
nonlocal collected
|
||||
for i in range(num_nodes):
|
||||
collected.append(res[i])
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
correct_words = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
||||
for i in range(num_nodes):
|
||||
assert collected[i] == correct_words
|
||||
assert len(collected[i]) == len(correct_words)
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
def download_bee_movie():
|
||||
url = "https://gist.githubusercontent.com/stuckinaboot/c531823814af1f6785f75ed7eedf60cb/raw/5107c5e6c2fda2ff54cfcc9803bbb297a53db71b/bee_movie.txt"
|
||||
response = urllib.request.urlopen(url)
|
||||
data = response.read() # a `bytes` object
|
||||
text = data.decode('utf-8') # a `str`; this step can't be used if data is binary
|
||||
return text
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_two_nodes_bee_movie():
|
||||
print("Downloading Bee Movie")
|
||||
bee_movie_script = download_bee_movie()
|
||||
print("Downloaded Bee Movie")
|
||||
bee_movie_words = bee_movie_script.split(" ")
|
||||
print("Bee Movie Script Split on Spaces, # spaces = " + str(len(bee_movie_words)))
|
||||
|
||||
num_nodes = 2
|
||||
adj_map = {0: [1]}
|
||||
collected = []
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node, log_nodes=[]):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
|
||||
# Log if needed
|
||||
if dummy_node in log_nodes:
|
||||
print(word + "| " + str(len(collected_words)) + "/" + str(expected_len))
|
||||
|
||||
if len(collected_words) == expected_len:
|
||||
print("Returned collected words")
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
print("Start action function")
|
||||
words = bee_movie_words
|
||||
tasks = []
|
||||
|
||||
print("Add collect all words")
|
||||
log_nodes = [dummy_nodes[0]]
|
||||
for i in range(num_nodes):
|
||||
tasks.append(collect_all_words(len(words), dummy_nodes[i], log_nodes))
|
||||
|
||||
print("Add sleep")
|
||||
tasks.append(asyncio.sleep(0.25))
|
||||
|
||||
print("Add publish")
|
||||
for i in range(len(words)):
|
||||
tasks.append(dummy_nodes[0].publish_bee_movie_word(words[i]))
|
||||
|
||||
print("Perform gather")
|
||||
res = await asyncio.gather(*tasks)
|
||||
|
||||
print("Filling collected")
|
||||
nonlocal collected
|
||||
for i in range(num_nodes):
|
||||
collected.append(res[i])
|
||||
print("Filled collected")
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
print("Perform assertion")
|
||||
correct_words = bee_movie_words
|
||||
for i in range(num_nodes):
|
||||
assert collected[i] == correct_words
|
||||
assert len(collected[i]) == len(correct_words)
|
||||
print("Assertion performed")
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_seven_nodes_tree_bee_movie():
|
||||
print("Downloading Bee Movie")
|
||||
bee_movie_script = download_bee_movie()
|
||||
print("Downloaded Bee Movie")
|
||||
bee_movie_words = bee_movie_script.split(" ")
|
||||
print("Bee Movie Script Split on Spaces, # spaces = " + str(len(bee_movie_words)))
|
||||
|
||||
num_nodes = 7
|
||||
adj_map = {0: [1, 2], 1: [3, 4], 2: [5, 6]}
|
||||
collected = []
|
||||
|
||||
async def collect_all_words(expected_len, dummy_node, log_nodes=[]):
|
||||
collected_words = []
|
||||
while True:
|
||||
word = await dummy_node.get_next_word_in_bee_movie()
|
||||
collected_words.append(word)
|
||||
|
||||
# Log if needed
|
||||
if dummy_node in log_nodes:
|
||||
print(word + "| " + str(len(collected_words)) + "/" + str(expected_len))
|
||||
|
||||
if len(collected_words) == expected_len:
|
||||
print("Returned collected words")
|
||||
return collected_words
|
||||
|
||||
async def action_func(dummy_nodes):
|
||||
print("Start action function")
|
||||
words = bee_movie_words
|
||||
tasks = []
|
||||
|
||||
print("Add collect all words")
|
||||
log_nodes = [dummy_nodes[0]]
|
||||
for i in range(num_nodes):
|
||||
tasks.append(collect_all_words(len(words), dummy_nodes[i], log_nodes))
|
||||
|
||||
print("Add sleep")
|
||||
tasks.append(asyncio.sleep(0.25))
|
||||
|
||||
print("Add publish")
|
||||
for i in range(len(words)):
|
||||
tasks.append(dummy_nodes[0].publish_bee_movie_word(words[i]))
|
||||
|
||||
print("Perform gather")
|
||||
res = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
print("Filling collected")
|
||||
nonlocal collected
|
||||
for i in range(num_nodes):
|
||||
collected.append(res[i])
|
||||
print("Filled collected")
|
||||
|
||||
async def assertion_func(dummy_node):
|
||||
print("Perform assertion")
|
||||
correct_words = bee_movie_words
|
||||
for i in range(num_nodes):
|
||||
assert collected[i] == correct_words
|
||||
assert len(collected[i]) == len(correct_words)
|
||||
print("Assertion performed")
|
||||
|
||||
await perform_test(num_nodes, adj_map, action_func, assertion_func)
|
|
@ -1,103 +1,97 @@
|
|||
import argparse
|
||||
import asyncio
|
||||
import sys
|
||||
import urllib.request
|
||||
|
||||
import click
|
||||
import multiaddr
|
||||
import trio
|
||||
|
||||
from libp2p import new_host
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p import new_node
|
||||
from libp2p.peer.peerinfo import info_from_p2p_addr
|
||||
from libp2p.typing import TProtocol
|
||||
|
||||
PROTOCOL_ID = TProtocol("/chat/1.0.0")
|
||||
MAX_READ_LEN = 2 ** 32 - 1
|
||||
|
||||
|
||||
async def read_data(stream: INetStream) -> None:
|
||||
PROTOCOL_ID = '/chat/1.0.0'
|
||||
|
||||
|
||||
async def read_data(stream):
|
||||
while True:
|
||||
read_bytes = await stream.read(MAX_READ_LEN)
|
||||
if read_bytes is not None:
|
||||
read_string = read_bytes.decode()
|
||||
read_string = await stream.read()
|
||||
if read_string is not None:
|
||||
read_string = read_string.decode()
|
||||
if read_string != "\n":
|
||||
# Green console colour: \x1b[32m
|
||||
# Reset console colour: \x1b[0m
|
||||
print("\x1b[32m %s\x1b[0m " % read_string, end="")
|
||||
|
||||
|
||||
async def write_data(stream: INetStream) -> None:
|
||||
async_f = trio.wrap_file(sys.stdin)
|
||||
async def write_data(stream):
|
||||
loop = asyncio.get_event_loop()
|
||||
while True:
|
||||
line = await async_f.readline()
|
||||
line = await loop.run_in_executor(None, sys.stdin.readline)
|
||||
await stream.write(line.encode())
|
||||
|
||||
|
||||
async def run(port: int, destination: str) -> None:
|
||||
localhost_ip = "127.0.0.1"
|
||||
listen_addr = multiaddr.Multiaddr(f"/ip4/0.0.0.0/tcp/{port}")
|
||||
host = new_host()
|
||||
async with host.run(listen_addrs=[listen_addr]), trio.open_nursery() as nursery:
|
||||
if not destination: # its the server
|
||||
async def run(port, destination):
|
||||
external_ip = urllib.request.urlopen(
|
||||
'https://v4.ident.me/').read().decode('utf8')
|
||||
host = await new_node(
|
||||
transport_opt=["/ip4/%s/tcp/%s" % (external_ip, port)])
|
||||
if not destination: # its the server
|
||||
async def stream_handler(stream):
|
||||
asyncio.ensure_future(read_data(stream))
|
||||
asyncio.ensure_future(write_data(stream))
|
||||
host.set_stream_handler(PROTOCOL_ID, stream_handler)
|
||||
|
||||
async def stream_handler(stream: INetStream) -> None:
|
||||
nursery.start_soon(read_data, stream)
|
||||
nursery.start_soon(write_data, stream)
|
||||
port = None
|
||||
ip = None
|
||||
for listener in host.network.listeners.values():
|
||||
for addr in listener.get_addrs():
|
||||
ip = addr.value_for_protocol('ip4')
|
||||
port = int(addr.value_for_protocol('tcp'))
|
||||
|
||||
host.set_stream_handler(PROTOCOL_ID, stream_handler)
|
||||
if not port:
|
||||
raise RuntimeError("was not able to find the actual local port")
|
||||
|
||||
print(
|
||||
f"Run 'python ./examples/chat/chat.py "
|
||||
f"-p {int(port) + 1} "
|
||||
f"-d /ip4/{localhost_ip}/tcp/{port}/p2p/{host.get_id().pretty()}' "
|
||||
"on another console."
|
||||
)
|
||||
print("Waiting for incoming connection...")
|
||||
print("Run './examples/chat/chat.py -p %s -d /ip4/%s/tcp/%s/p2p/%s' on another console.\n" %
|
||||
(int(port) + 1, ip, port, host.get_id().pretty()))
|
||||
print("\nWaiting for incoming connection\n\n")
|
||||
|
||||
else: # its the client
|
||||
maddr = multiaddr.Multiaddr(destination)
|
||||
info = info_from_p2p_addr(maddr)
|
||||
# Associate the peer with local ip address
|
||||
await host.connect(info)
|
||||
# Start a stream with the destination.
|
||||
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'.
|
||||
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
|
||||
else: # its the client
|
||||
m = multiaddr.Multiaddr(destination)
|
||||
info = info_from_p2p_addr(m)
|
||||
# Associate the peer with local ip address
|
||||
await host.connect(info)
|
||||
|
||||
nursery.start_soon(read_data, stream)
|
||||
nursery.start_soon(write_data, stream)
|
||||
print(f"Connected to peer {info.addrs[0]}")
|
||||
# Start a stream with the destination.
|
||||
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'.
|
||||
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
|
||||
|
||||
await trio.sleep_forever()
|
||||
asyncio.ensure_future(read_data(stream))
|
||||
asyncio.ensure_future(write_data(stream))
|
||||
print("Connected to peer %s" % info.addrs[0])
|
||||
|
||||
|
||||
def main() -> None:
|
||||
description = """
|
||||
This program demonstrates a simple p2p chat application using libp2p.
|
||||
To use it, first run 'python ./chat -p <PORT>', where <PORT> is the port number.
|
||||
Then, run another host with 'python ./chat -p <ANOTHER_PORT> -d <DESTINATION>',
|
||||
where <DESTINATION> is the multiaddress of the previous listener host.
|
||||
"""
|
||||
example_maddr = (
|
||||
"/ip4/127.0.0.1/tcp/8000/p2p/QmQn4SwGkDZKkUEpBRBvTmheQycxAHJUNmVEnjA2v1qe8Q"
|
||||
)
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"-p", "--port", default=8000, type=int, help="source port number"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
type=str,
|
||||
help=f"destination multiaddr string, e.g. {example_maddr}",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
@click.command()
|
||||
@click.option('--port', '-p', help='source port number', default=8000)
|
||||
@click.option('--destination', '-d', help="Destination multiaddr string")
|
||||
@click.option('--help', is_flag=True, default=False, help='display help')
|
||||
# @click.option('--debug', is_flag=True, default=False, help='Debug generates the same node ID on every execution')
|
||||
def main(port, destination, help):
|
||||
|
||||
if not args.port:
|
||||
raise RuntimeError("was not able to determine a local port")
|
||||
if help:
|
||||
print("This program demonstrates a simple p2p chat application using libp2p\n\n")
|
||||
print("Usage: Run './chat -p <SOURCE_PORT>' where <SOURCE_PORT> can be any port number.")
|
||||
print("Now run './chat -p <PORT> -d <MULTIADDR>' where <MULTIADDR> is multiaddress of previous listener host.")
|
||||
return
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
trio.run(run, *(args.port, args.destination))
|
||||
asyncio.ensure_future(run(port, destination))
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -1,116 +0,0 @@
|
|||
import argparse
|
||||
|
||||
import multiaddr
|
||||
import trio
|
||||
|
||||
from libp2p import new_host
|
||||
from libp2p.crypto.secp256k1 import create_new_key_pair
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.peer.peerinfo import info_from_p2p_addr
|
||||
from libp2p.typing import TProtocol
|
||||
|
||||
PROTOCOL_ID = TProtocol("/echo/1.0.0")
|
||||
|
||||
|
||||
async def _echo_stream_handler(stream: INetStream) -> None:
|
||||
# Wait until EOF
|
||||
msg = await stream.read()
|
||||
await stream.write(msg)
|
||||
await stream.close()
|
||||
|
||||
|
||||
async def run(port: int, destination: str, seed: int = None) -> None:
|
||||
localhost_ip = "127.0.0.1"
|
||||
listen_addr = multiaddr.Multiaddr(f"/ip4/0.0.0.0/tcp/{port}")
|
||||
|
||||
if seed:
|
||||
import random
|
||||
|
||||
random.seed(seed)
|
||||
secret_number = random.getrandbits(32 * 8)
|
||||
secret = secret_number.to_bytes(length=32, byteorder="big")
|
||||
else:
|
||||
import secrets
|
||||
|
||||
secret = secrets.token_bytes(32)
|
||||
|
||||
host = new_host(key_pair=create_new_key_pair(secret))
|
||||
async with host.run(listen_addrs=[listen_addr]):
|
||||
|
||||
print(f"I am {host.get_id().to_string()}")
|
||||
|
||||
if not destination: # its the server
|
||||
|
||||
host.set_stream_handler(PROTOCOL_ID, _echo_stream_handler)
|
||||
|
||||
print(
|
||||
f"Run 'python ./examples/echo/echo.py "
|
||||
f"-p {int(port) + 1} "
|
||||
f"-d /ip4/{localhost_ip}/tcp/{port}/p2p/{host.get_id().pretty()}' "
|
||||
"on another console."
|
||||
)
|
||||
print("Waiting for incoming connections...")
|
||||
await trio.sleep_forever()
|
||||
|
||||
else: # its the client
|
||||
maddr = multiaddr.Multiaddr(destination)
|
||||
info = info_from_p2p_addr(maddr)
|
||||
# Associate the peer with local ip address
|
||||
await host.connect(info)
|
||||
|
||||
# Start a stream with the destination.
|
||||
# Multiaddress of the destination peer is fetched from the peerstore using 'peerId'.
|
||||
stream = await host.new_stream(info.peer_id, [PROTOCOL_ID])
|
||||
|
||||
msg = b"hi, there!\n"
|
||||
|
||||
await stream.write(msg)
|
||||
# Notify the other side about EOF
|
||||
await stream.close()
|
||||
response = await stream.read()
|
||||
|
||||
print(f"Sent: {msg}")
|
||||
print(f"Got: {response}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
description = """
|
||||
This program demonstrates a simple echo protocol where a peer listens for
|
||||
connections and copies back any input received on a stream.
|
||||
|
||||
To use it, first run 'python ./echo -p <PORT>', where <PORT> is the port number.
|
||||
Then, run another host with 'python ./chat -p <ANOTHER_PORT> -d <DESTINATION>',
|
||||
where <DESTINATION> is the multiaddress of the previous listener host.
|
||||
"""
|
||||
example_maddr = (
|
||||
"/ip4/127.0.0.1/tcp/8000/p2p/QmQn4SwGkDZKkUEpBRBvTmheQycxAHJUNmVEnjA2v1qe8Q"
|
||||
)
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"-p", "--port", default=8000, type=int, help="source port number"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
type=str,
|
||||
help=f"destination multiaddr string, e.g. {example_maddr}",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--seed",
|
||||
type=int,
|
||||
help="provide a seed to the random number generator (e.g. to fix peer IDs across runs)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.port:
|
||||
raise RuntimeError("was not able to determine a local port")
|
||||
|
||||
try:
|
||||
trio.run(run, args.port, args.destination, args.seed)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,98 +1,83 @@
|
|||
from libp2p.crypto.keys import KeyPair
|
||||
from libp2p.crypto.rsa import create_new_key_pair
|
||||
from libp2p.host.basic_host import BasicHost
|
||||
from libp2p.host.host_interface import IHost
|
||||
from libp2p.host.routed_host import RoutedHost
|
||||
from libp2p.network.network_interface import INetworkService
|
||||
from libp2p.network.swarm import Swarm
|
||||
from libp2p.peer.id import ID
|
||||
from libp2p.peer.peerstore import PeerStore
|
||||
from libp2p.peer.peerstore_interface import IPeerStore
|
||||
from libp2p.routing.interfaces import IPeerRouting
|
||||
from libp2p.security.insecure.transport import PLAINTEXT_PROTOCOL_ID, InsecureTransport
|
||||
import libp2p.security.secio.transport as secio
|
||||
from libp2p.stream_muxer.mplex.mplex import MPLEX_PROTOCOL_ID, Mplex
|
||||
from libp2p.transport.tcp.tcp import TCP
|
||||
from libp2p.transport.typing import TMuxerOptions, TSecurityOptions
|
||||
from libp2p.transport.upgrader import TransportUpgrader
|
||||
from libp2p.typing import TProtocol
|
||||
import asyncio
|
||||
import multiaddr
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from .peer.peerstore import PeerStore
|
||||
from .peer.id import id_from_public_key
|
||||
from .network.swarm import Swarm
|
||||
from .host.basic_host import BasicHost
|
||||
from .transport.upgrader import TransportUpgrader
|
||||
from .transport.tcp.tcp import TCP
|
||||
|
||||
|
||||
def generate_new_rsa_identity() -> KeyPair:
|
||||
return create_new_key_pair()
|
||||
|
||||
|
||||
def generate_peer_id_from(key_pair: KeyPair) -> ID:
|
||||
public_key = key_pair.public_key
|
||||
return ID.from_pubkey(public_key)
|
||||
|
||||
|
||||
def new_swarm(
|
||||
key_pair: KeyPair = None,
|
||||
muxer_opt: TMuxerOptions = None,
|
||||
sec_opt: TSecurityOptions = None,
|
||||
peerstore_opt: IPeerStore = None,
|
||||
) -> INetworkService:
|
||||
async def cleanup_done_tasks():
|
||||
"""
|
||||
Create a swarm instance based on the parameters.
|
||||
clean up asyncio done tasks to free up resources
|
||||
"""
|
||||
while True:
|
||||
for task in asyncio.all_tasks():
|
||||
if task.done():
|
||||
await task
|
||||
|
||||
:param key_pair: optional choice of the ``KeyPair``
|
||||
# Need not run often
|
||||
# Some sleep necessary to context switch
|
||||
await asyncio.sleep(3)
|
||||
|
||||
def initialize_default_swarm(
|
||||
id_opt=None, transport_opt=None,
|
||||
muxer_opt=None, sec_opt=None, peerstore_opt=None):
|
||||
"""
|
||||
initialize swarm when no swarm is passed in
|
||||
:param id_opt: optional id for host
|
||||
:param transport_opt: optional choice of transport upgrade
|
||||
:param muxer_opt: optional choice of stream muxer
|
||||
:param sec_opt: optional choice of security upgrade
|
||||
:param peerstore_opt: optional peerstore
|
||||
:return: return a default swarm instance
|
||||
"""
|
||||
# pylint: disable=too-many-arguments, unused-argument
|
||||
if not id_opt:
|
||||
new_key = RSA.generate(2048, e=65537)
|
||||
id_opt = id_from_public_key(new_key.publickey())
|
||||
# private_key = new_key.exportKey("PEM")
|
||||
|
||||
if key_pair is None:
|
||||
key_pair = generate_new_rsa_identity()
|
||||
|
||||
id_opt = generate_peer_id_from(key_pair)
|
||||
|
||||
# TODO: Parse `listen_addrs` to determine transport
|
||||
transport = TCP()
|
||||
|
||||
muxer_transports_by_protocol = muxer_opt or {MPLEX_PROTOCOL_ID: Mplex}
|
||||
security_transports_by_protocol = sec_opt or {
|
||||
TProtocol(PLAINTEXT_PROTOCOL_ID): InsecureTransport(key_pair),
|
||||
TProtocol(secio.ID): secio.Transport(key_pair),
|
||||
}
|
||||
upgrader = TransportUpgrader(
|
||||
security_transports_by_protocol, muxer_transports_by_protocol
|
||||
)
|
||||
|
||||
transport_opt = transport_opt or ["/ip4/127.0.0.1/tcp/8001"]
|
||||
transport = [multiaddr.Multiaddr(t) for t in transport_opt]
|
||||
# TODO wire muxer up with swarm
|
||||
# muxer = muxer_opt or ["mplex/6.7.0"]
|
||||
sec = sec_opt or ["secio"]
|
||||
peerstore = peerstore_opt or PeerStore()
|
||||
# Store our key pair in peerstore
|
||||
peerstore.add_key_pair(id_opt, key_pair)
|
||||
upgrader = TransportUpgrader(sec, transport)
|
||||
swarm_opt = Swarm(id_opt, peerstore, upgrader)
|
||||
tcp = TCP()
|
||||
swarm_opt.add_transport(tcp)
|
||||
|
||||
return Swarm(id_opt, peerstore, upgrader, transport)
|
||||
return swarm_opt
|
||||
|
||||
|
||||
def new_host(
|
||||
key_pair: KeyPair = None,
|
||||
muxer_opt: TMuxerOptions = None,
|
||||
sec_opt: TSecurityOptions = None,
|
||||
peerstore_opt: IPeerStore = None,
|
||||
disc_opt: IPeerRouting = None,
|
||||
) -> IHost:
|
||||
async def new_node(
|
||||
swarm_opt=None, id_opt=None, transport_opt=None,
|
||||
muxer_opt=None, sec_opt=None, peerstore_opt=None):
|
||||
"""
|
||||
Create a new libp2p host based on the given parameters.
|
||||
|
||||
:param key_pair: optional choice of the ``KeyPair``
|
||||
create new libp2p node
|
||||
:param id_opt: optional id for host
|
||||
:param transport_opt: optional choice of transport upgrade
|
||||
:param muxer_opt: optional choice of stream muxer
|
||||
:param sec_opt: optional choice of security upgrade
|
||||
:param peerstore_opt: optional peerstore
|
||||
:param disc_opt: optional discovery
|
||||
:return: return a host instance
|
||||
:return: return a default swarm instance
|
||||
"""
|
||||
swarm = new_swarm(
|
||||
key_pair=key_pair,
|
||||
muxer_opt=muxer_opt,
|
||||
sec_opt=sec_opt,
|
||||
peerstore_opt=peerstore_opt,
|
||||
)
|
||||
host: IHost
|
||||
if disc_opt:
|
||||
host = RoutedHost(swarm, disc_opt)
|
||||
else:
|
||||
host = BasicHost(swarm)
|
||||
# pylint: disable=too-many-arguments
|
||||
if not swarm_opt:
|
||||
swarm_opt = initialize_default_swarm(
|
||||
id_opt=id_opt, transport_opt=transport_opt,
|
||||
muxer_opt=muxer_opt, sec_opt=sec_opt,
|
||||
peerstore_opt=peerstore_opt)
|
||||
|
||||
# TODO enable support for other host type
|
||||
# TODO routing unimplemented
|
||||
host = BasicHost(swarm_opt)
|
||||
|
||||
# Kick off cleanup job
|
||||
asyncio.ensure_future(cleanup_done_tasks())
|
||||
|
||||
return host
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
from dataclasses import dataclass
|
||||
import hmac
|
||||
from typing import Tuple
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
import Crypto.Util.Counter as Counter
|
||||
|
||||
|
||||
class InvalidMACException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EncryptionParameters:
|
||||
cipher_type: str
|
||||
hash_type: str
|
||||
iv: bytes
|
||||
mac_key: bytes
|
||||
cipher_key: bytes
|
||||
|
||||
|
||||
class MacAndCipher:
|
||||
def __init__(self, parameters: EncryptionParameters) -> None:
|
||||
self.authenticator = hmac.new(
|
||||
parameters.mac_key, digestmod=parameters.hash_type
|
||||
)
|
||||
iv_bit_size = 8 * len(parameters.iv)
|
||||
cipher = AES.new(
|
||||
parameters.cipher_key,
|
||||
AES.MODE_CTR,
|
||||
counter=Counter.new(
|
||||
iv_bit_size,
|
||||
initial_value=int.from_bytes(parameters.iv, byteorder="big"),
|
||||
),
|
||||
)
|
||||
self.cipher = cipher
|
||||
|
||||
def encrypt(self, data: bytes) -> bytes:
|
||||
return self.cipher.encrypt(data)
|
||||
|
||||
def authenticate(self, data: bytes) -> bytes:
|
||||
authenticator = self.authenticator.copy()
|
||||
authenticator.update(data)
|
||||
return authenticator.digest()
|
||||
|
||||
def decrypt_if_valid(self, data_with_tag: bytes) -> bytes:
|
||||
tag_position = len(data_with_tag) - self.authenticator.digest_size
|
||||
data = data_with_tag[:tag_position]
|
||||
tag = data_with_tag[tag_position:]
|
||||
|
||||
authenticator = self.authenticator.copy()
|
||||
authenticator.update(data)
|
||||
expected_tag = authenticator.digest()
|
||||
|
||||
if not hmac.compare_digest(tag, expected_tag):
|
||||
raise InvalidMACException(expected_tag, tag)
|
||||
|
||||
return self.cipher.decrypt(data)
|
||||
|
||||
|
||||
def initialize_pair(
|
||||
cipher_type: str, hash_type: str, secret: bytes
|
||||
) -> Tuple[EncryptionParameters, EncryptionParameters]:
|
||||
"""Return a pair of ``Keys`` for use in securing a communications channel
|
||||
with authenticated encryption derived from the ``secret`` and using the
|
||||
requested ``cipher_type`` and ``hash_type``."""
|
||||
if cipher_type != "AES-128":
|
||||
raise NotImplementedError()
|
||||
if hash_type != "SHA256":
|
||||
raise NotImplementedError()
|
||||
|
||||
iv_size = 16
|
||||
cipher_key_size = 16
|
||||
hmac_key_size = 20
|
||||
seed = "key expansion".encode()
|
||||
|
||||
params_size = iv_size + cipher_key_size + hmac_key_size
|
||||
result = bytearray(2 * params_size)
|
||||
|
||||
authenticator = hmac.new(secret, digestmod=hash_type)
|
||||
authenticator.update(seed)
|
||||
tag = authenticator.digest()
|
||||
|
||||
i = 0
|
||||
len_result = 2 * params_size
|
||||
while i < len_result:
|
||||
authenticator = hmac.new(secret, digestmod=hash_type)
|
||||
|
||||
authenticator.update(tag)
|
||||
authenticator.update(seed)
|
||||
|
||||
another_tag = authenticator.digest()
|
||||
|
||||
remaining_bytes = len(another_tag)
|
||||
|
||||
if i + remaining_bytes > len_result:
|
||||
remaining_bytes = len_result - i
|
||||
|
||||
result[i : i + remaining_bytes] = another_tag[0:remaining_bytes]
|
||||
|
||||
i += remaining_bytes
|
||||
|
||||
authenticator = hmac.new(secret, digestmod=hash_type)
|
||||
authenticator.update(tag)
|
||||
tag = authenticator.digest()
|
||||
|
||||
first_half = result[:params_size]
|
||||
second_half = result[params_size:]
|
||||
|
||||
return (
|
||||
EncryptionParameters(
|
||||
cipher_type,
|
||||
hash_type,
|
||||
first_half[0:iv_size],
|
||||
first_half[iv_size + cipher_key_size :],
|
||||
first_half[iv_size : iv_size + cipher_key_size],
|
||||
),
|
||||
EncryptionParameters(
|
||||
cipher_type,
|
||||
hash_type,
|
||||
second_half[0:iv_size],
|
||||
second_half[iv_size + cipher_key_size :],
|
||||
second_half[iv_size : iv_size + cipher_key_size],
|
||||
),
|
||||
)
|
|
@ -1,68 +0,0 @@
|
|||
from fastecdsa import curve as curve_types
|
||||
from fastecdsa import keys, point
|
||||
from fastecdsa.encoding.sec1 import SEC1Encoder
|
||||
|
||||
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
|
||||
|
||||
|
||||
def infer_local_type(curve: str) -> curve_types.Curve:
|
||||
"""converts a ``str`` representation of some elliptic curve to a
|
||||
representation understood by the backend of this module."""
|
||||
if curve == "P-256":
|
||||
return curve_types.P256
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ECCPublicKey(PublicKey):
|
||||
def __init__(self, impl: point.Point, curve: curve_types.Curve) -> None:
|
||||
self.impl = impl
|
||||
self.curve = curve
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return SEC1Encoder.encode_public_key(self.impl, compressed=False)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes, curve: str) -> "ECCPublicKey":
|
||||
curve_type = infer_local_type(curve)
|
||||
public_key_impl = SEC1Encoder.decode_public_key(data, curve_type)
|
||||
return cls(public_key_impl, curve_type)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.ECC_P256
|
||||
|
||||
def verify(self, data: bytes, signature: bytes) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ECCPrivateKey(PrivateKey):
|
||||
def __init__(self, impl: int, curve: curve_types.Curve) -> None:
|
||||
self.impl = impl
|
||||
self.curve = curve
|
||||
|
||||
@classmethod
|
||||
def new(cls, curve: str) -> "ECCPrivateKey":
|
||||
curve_type = infer_local_type(curve)
|
||||
private_key_impl = keys.gen_private_key(curve_type)
|
||||
return cls(private_key_impl, curve_type)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return keys.export_key(self.impl, self.curve)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.ECC_P256
|
||||
|
||||
def sign(self, data: bytes) -> bytes:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_public_key(self) -> PublicKey:
|
||||
public_key_impl = keys.get_public_key(self.impl, self.curve)
|
||||
return ECCPublicKey(public_key_impl, self.curve)
|
||||
|
||||
|
||||
def create_new_key_pair(curve: str) -> KeyPair:
|
||||
"""Return a new ECC keypair with the requested ``curve`` type, e.g.
|
||||
"P-256"."""
|
||||
private_key = ECCPrivateKey.new(curve)
|
||||
public_key = private_key.get_public_key()
|
||||
return KeyPair(private_key, public_key)
|
|
@ -1,69 +0,0 @@
|
|||
from Crypto.Hash import SHA256
|
||||
from nacl.exceptions import BadSignatureError
|
||||
from nacl.public import PrivateKey as PrivateKeyImpl
|
||||
from nacl.public import PublicKey as PublicKeyImpl
|
||||
from nacl.signing import SigningKey, VerifyKey
|
||||
import nacl.utils as utils
|
||||
|
||||
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
|
||||
|
||||
|
||||
class Ed25519PublicKey(PublicKey):
|
||||
def __init__(self, impl: PublicKeyImpl) -> None:
|
||||
self.impl = impl
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return bytes(self.impl)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, key_bytes: bytes) -> "Ed25519PublicKey":
|
||||
return cls(PublicKeyImpl(key_bytes))
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.Ed25519
|
||||
|
||||
def verify(self, data: bytes, signature: bytes) -> bool:
|
||||
verify_key = VerifyKey(self.to_bytes())
|
||||
try:
|
||||
verify_key.verify(data, signature)
|
||||
except BadSignatureError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Ed25519PrivateKey(PrivateKey):
|
||||
def __init__(self, impl: PrivateKeyImpl) -> None:
|
||||
self.impl = impl
|
||||
|
||||
@classmethod
|
||||
def new(cls, seed: bytes = None) -> "Ed25519PrivateKey":
|
||||
if not seed:
|
||||
seed = utils.random()
|
||||
|
||||
private_key_impl = PrivateKeyImpl.from_seed(seed)
|
||||
return cls(private_key_impl)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return bytes(self.impl)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> "Ed25519PrivateKey":
|
||||
impl = PrivateKeyImpl(data)
|
||||
return cls(impl)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.Ed25519
|
||||
|
||||
def sign(self, data: bytes) -> bytes:
|
||||
h = SHA256.new(data)
|
||||
signing_key = SigningKey(self.to_bytes())
|
||||
return signing_key.sign(h)
|
||||
|
||||
def get_public_key(self) -> PublicKey:
|
||||
return Ed25519PublicKey(self.impl.public_key)
|
||||
|
||||
|
||||
def create_new_key_pair(seed: bytes = None) -> KeyPair:
|
||||
private_key = Ed25519PrivateKey.new(seed)
|
||||
public_key = private_key.get_public_key()
|
||||
return KeyPair(private_key, public_key)
|
|
@ -1,12 +0,0 @@
|
|||
from libp2p.exceptions import BaseLibp2pError
|
||||
|
||||
|
||||
class CryptographyError(BaseLibp2pError):
|
||||
pass
|
||||
|
||||
|
||||
class MissingDeserializerError(CryptographyError):
|
||||
"""Raise if the requested deserialization routine is missing for some type
|
||||
of cryptographic key."""
|
||||
|
||||
pass
|
|
@ -1,29 +0,0 @@
|
|||
from typing import Callable, Tuple, cast
|
||||
|
||||
from fastecdsa.encoding import util
|
||||
|
||||
from libp2p.crypto.ecc import ECCPrivateKey, ECCPublicKey, create_new_key_pair
|
||||
from libp2p.crypto.keys import PublicKey
|
||||
|
||||
SharedKeyGenerator = Callable[[bytes], bytes]
|
||||
|
||||
int_bytelen = util.int_bytelen
|
||||
|
||||
|
||||
def create_ephemeral_key_pair(curve_type: str) -> Tuple[PublicKey, SharedKeyGenerator]:
|
||||
"""Facilitates ECDH key exchange."""
|
||||
if curve_type != "P-256":
|
||||
raise NotImplementedError()
|
||||
|
||||
key_pair = create_new_key_pair(curve_type)
|
||||
|
||||
def _key_exchange(serialized_remote_public_key: bytes) -> bytes:
|
||||
private_key = cast(ECCPrivateKey, key_pair.private_key)
|
||||
|
||||
remote_point = ECCPublicKey.from_bytes(serialized_remote_public_key, curve_type)
|
||||
secret_point = remote_point.impl * private_key.impl
|
||||
secret_x_coordinate = secret_point.x
|
||||
byte_size = int_bytelen(secret_x_coordinate)
|
||||
return secret_x_coordinate.to_bytes(byte_size, byteorder="big")
|
||||
|
||||
return key_pair.public_key, _key_exchange
|
|
@ -1,91 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum, unique
|
||||
|
||||
from .pb import crypto_pb2 as protobuf
|
||||
|
||||
|
||||
@unique
|
||||
class KeyType(Enum):
|
||||
RSA = 0
|
||||
Ed25519 = 1
|
||||
Secp256k1 = 2
|
||||
ECDSA = 3
|
||||
ECC_P256 = 4
|
||||
|
||||
|
||||
class Key(ABC):
|
||||
"""A ``Key`` represents a cryptographic key."""
|
||||
|
||||
@abstractmethod
|
||||
def to_bytes(self) -> bytes:
|
||||
"""Returns the byte representation of this key."""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def get_type(self) -> KeyType:
|
||||
"""Returns the ``KeyType`` for ``self``."""
|
||||
...
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Key):
|
||||
return NotImplemented
|
||||
return self.to_bytes() == other.to_bytes()
|
||||
|
||||
|
||||
class PublicKey(Key):
|
||||
"""A ``PublicKey`` represents a cryptographic public key."""
|
||||
|
||||
@abstractmethod
|
||||
def verify(self, data: bytes, signature: bytes) -> bool:
|
||||
"""Verify that ``signature`` is the cryptographic signature of the hash
|
||||
of ``data``."""
|
||||
...
|
||||
|
||||
def _serialize_to_protobuf(self) -> protobuf.PublicKey:
|
||||
"""Return the protobuf representation of this ``Key``."""
|
||||
key_type = self.get_type().value
|
||||
data = self.to_bytes()
|
||||
protobuf_key = protobuf.PublicKey(key_type=key_type, data=data)
|
||||
return protobuf_key
|
||||
|
||||
def serialize(self) -> bytes:
|
||||
"""Return the canonical serialization of this ``Key``."""
|
||||
return self._serialize_to_protobuf().SerializeToString()
|
||||
|
||||
@classmethod
|
||||
def deserialize_from_protobuf(cls, protobuf_data: bytes) -> protobuf.PublicKey:
|
||||
return protobuf.PublicKey.FromString(protobuf_data)
|
||||
|
||||
|
||||
class PrivateKey(Key):
|
||||
"""A ``PrivateKey`` represents a cryptographic private key."""
|
||||
|
||||
@abstractmethod
|
||||
def sign(self, data: bytes) -> bytes:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def get_public_key(self) -> PublicKey:
|
||||
...
|
||||
|
||||
def _serialize_to_protobuf(self) -> protobuf.PrivateKey:
|
||||
"""Return the protobuf representation of this ``Key``."""
|
||||
key_type = self.get_type().value
|
||||
data = self.to_bytes()
|
||||
protobuf_key = protobuf.PrivateKey(key_type=key_type, data=data)
|
||||
return protobuf_key
|
||||
|
||||
def serialize(self) -> bytes:
|
||||
"""Return the canonical serialization of this ``Key``."""
|
||||
return self._serialize_to_protobuf().SerializeToString()
|
||||
|
||||
@classmethod
|
||||
def deserialize_from_protobuf(cls, protobuf_data: bytes) -> protobuf.PrivateKey:
|
||||
return protobuf.PrivateKey.FromString(protobuf_data)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class KeyPair:
|
||||
private_key: PrivateKey
|
||||
public_key: PublicKey
|
|
@ -1,20 +0,0 @@
|
|||
syntax = "proto2";
|
||||
|
||||
package crypto.pb;
|
||||
|
||||
enum KeyType {
|
||||
RSA = 0;
|
||||
Ed25519 = 1;
|
||||
Secp256k1 = 2;
|
||||
ECDSA = 3;
|
||||
}
|
||||
|
||||
message PublicKey {
|
||||
required KeyType key_type = 1;
|
||||
required bytes data = 2;
|
||||
}
|
||||
|
||||
message PrivateKey {
|
||||
required KeyType key_type = 1;
|
||||
required bytes data = 2;
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: libp2p/crypto/pb/crypto.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='libp2p/crypto/pb/crypto.proto',
|
||||
package='crypto.pb',
|
||||
syntax='proto2',
|
||||
serialized_options=None,
|
||||
serialized_pb=_b('\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*9\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03')
|
||||
)
|
||||
|
||||
_KEYTYPE = _descriptor.EnumDescriptor(
|
||||
name='KeyType',
|
||||
full_name='crypto.pb.KeyType',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='RSA', index=0, number=0,
|
||||
serialized_options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='Ed25519', index=1, number=1,
|
||||
serialized_options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='Secp256k1', index=2, number=2,
|
||||
serialized_options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='ECDSA', index=3, number=3,
|
||||
serialized_options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
serialized_options=None,
|
||||
serialized_start=175,
|
||||
serialized_end=232,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_KEYTYPE)
|
||||
|
||||
KeyType = enum_type_wrapper.EnumTypeWrapper(_KEYTYPE)
|
||||
RSA = 0
|
||||
Ed25519 = 1
|
||||
Secp256k1 = 2
|
||||
ECDSA = 3
|
||||
|
||||
|
||||
|
||||
_PUBLICKEY = _descriptor.Descriptor(
|
||||
name='PublicKey',
|
||||
full_name='crypto.pb.PublicKey',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key_type', full_name='crypto.pb.PublicKey.key_type', index=0,
|
||||
number=1, type=14, cpp_type=8, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='data', full_name='crypto.pb.PublicKey.data', index=1,
|
||||
number=2, type=12, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b(""),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
serialized_options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=44,
|
||||
serialized_end=107,
|
||||
)
|
||||
|
||||
|
||||
_PRIVATEKEY = _descriptor.Descriptor(
|
||||
name='PrivateKey',
|
||||
full_name='crypto.pb.PrivateKey',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key_type', full_name='crypto.pb.PrivateKey.key_type', index=0,
|
||||
number=1, type=14, cpp_type=8, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='data', full_name='crypto.pb.PrivateKey.data', index=1,
|
||||
number=2, type=12, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b(""),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
serialized_options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=109,
|
||||
serialized_end=173,
|
||||
)
|
||||
|
||||
_PUBLICKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
|
||||
_PRIVATEKEY.fields_by_name['key_type'].enum_type = _KEYTYPE
|
||||
DESCRIPTOR.message_types_by_name['PublicKey'] = _PUBLICKEY
|
||||
DESCRIPTOR.message_types_by_name['PrivateKey'] = _PRIVATEKEY
|
||||
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
PublicKey = _reflection.GeneratedProtocolMessageType('PublicKey', (_message.Message,), {
|
||||
'DESCRIPTOR' : _PUBLICKEY,
|
||||
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
|
||||
# @@protoc_insertion_point(class_scope:crypto.pb.PublicKey)
|
||||
})
|
||||
_sym_db.RegisterMessage(PublicKey)
|
||||
|
||||
PrivateKey = _reflection.GeneratedProtocolMessageType('PrivateKey', (_message.Message,), {
|
||||
'DESCRIPTOR' : _PRIVATEKEY,
|
||||
'__module__' : 'libp2p.crypto.pb.crypto_pb2'
|
||||
# @@protoc_insertion_point(class_scope:crypto.pb.PrivateKey)
|
||||
})
|
||||
_sym_db.RegisterMessage(PrivateKey)
|
||||
|
||||
|
||||
# @@protoc_insertion_point(module_scope)
|
|
@ -1,84 +0,0 @@
|
|||
# @generated by generate_proto_mypy_stubs.py. Do not edit!
|
||||
import sys
|
||||
from google.protobuf.descriptor import (
|
||||
Descriptor as google___protobuf___descriptor___Descriptor,
|
||||
EnumDescriptor as google___protobuf___descriptor___EnumDescriptor,
|
||||
)
|
||||
|
||||
from google.protobuf.message import (
|
||||
Message as google___protobuf___message___Message,
|
||||
)
|
||||
|
||||
from typing import (
|
||||
List as typing___List,
|
||||
Tuple as typing___Tuple,
|
||||
cast as typing___cast,
|
||||
)
|
||||
|
||||
from typing_extensions import (
|
||||
Literal as typing_extensions___Literal,
|
||||
)
|
||||
|
||||
|
||||
class KeyType(int):
|
||||
DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ...
|
||||
@classmethod
|
||||
def Name(cls, number: int) -> str: ...
|
||||
@classmethod
|
||||
def Value(cls, name: str) -> KeyType: ...
|
||||
@classmethod
|
||||
def keys(cls) -> typing___List[str]: ...
|
||||
@classmethod
|
||||
def values(cls) -> typing___List[KeyType]: ...
|
||||
@classmethod
|
||||
def items(cls) -> typing___List[typing___Tuple[str, KeyType]]: ...
|
||||
RSA = typing___cast(KeyType, 0)
|
||||
Ed25519 = typing___cast(KeyType, 1)
|
||||
Secp256k1 = typing___cast(KeyType, 2)
|
||||
ECDSA = typing___cast(KeyType, 3)
|
||||
RSA = typing___cast(KeyType, 0)
|
||||
Ed25519 = typing___cast(KeyType, 1)
|
||||
Secp256k1 = typing___cast(KeyType, 2)
|
||||
ECDSA = typing___cast(KeyType, 3)
|
||||
|
||||
class PublicKey(google___protobuf___message___Message):
|
||||
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
|
||||
key_type = ... # type: KeyType
|
||||
data = ... # type: bytes
|
||||
|
||||
def __init__(self,
|
||||
*,
|
||||
key_type : KeyType,
|
||||
data : bytes,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def FromString(cls, s: bytes) -> PublicKey: ...
|
||||
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
if sys.version_info >= (3,):
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
|
||||
else:
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
|
||||
|
||||
class PrivateKey(google___protobuf___message___Message):
|
||||
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
|
||||
key_type = ... # type: KeyType
|
||||
data = ... # type: bytes
|
||||
|
||||
def __init__(self,
|
||||
*,
|
||||
key_type : KeyType,
|
||||
data : bytes,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def FromString(cls, s: bytes) -> PrivateKey: ...
|
||||
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
if sys.version_info >= (3,):
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"data",u"key_type"]) -> None: ...
|
||||
else:
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"data",b"data",u"key_type",b"key_type"]) -> None: ...
|
|
@ -1,65 +0,0 @@
|
|||
from Crypto.Hash import SHA256
|
||||
import Crypto.PublicKey.RSA as RSA
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
from Crypto.Signature import pkcs1_15
|
||||
|
||||
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
|
||||
|
||||
|
||||
class RSAPublicKey(PublicKey):
|
||||
def __init__(self, impl: RsaKey) -> None:
|
||||
self.impl = impl
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.impl.export_key("DER")
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, key_bytes: bytes) -> "RSAPublicKey":
|
||||
rsakey = RSA.import_key(key_bytes)
|
||||
return cls(rsakey)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.RSA
|
||||
|
||||
def verify(self, data: bytes, signature: bytes) -> bool:
|
||||
h = SHA256.new(data)
|
||||
try:
|
||||
pkcs1_15.new(self.impl).verify(h, signature)
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class RSAPrivateKey(PrivateKey):
|
||||
def __init__(self, impl: RsaKey) -> None:
|
||||
self.impl = impl
|
||||
|
||||
@classmethod
|
||||
def new(cls, bits: int = 2048, e: int = 65537) -> "RSAPrivateKey":
|
||||
private_key_impl = RSA.generate(bits, e=e)
|
||||
return cls(private_key_impl)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.impl.export_key("DER")
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.RSA
|
||||
|
||||
def sign(self, data: bytes) -> bytes:
|
||||
h = SHA256.new(data)
|
||||
return pkcs1_15.new(self.impl).sign(h)
|
||||
|
||||
def get_public_key(self) -> PublicKey:
|
||||
return RSAPublicKey(self.impl.publickey())
|
||||
|
||||
|
||||
def create_new_key_pair(bits: int = 2048, e: int = 65537) -> KeyPair:
|
||||
"""
|
||||
Returns a new RSA keypair with the requested key size (``bits``) and the
|
||||
given public exponent ``e``.
|
||||
|
||||
Sane defaults are provided for both values.
|
||||
"""
|
||||
private_key = RSAPrivateKey.new(bits, e)
|
||||
public_key = private_key.get_public_key()
|
||||
return KeyPair(private_key, public_key)
|
|
@ -1,73 +0,0 @@
|
|||
import coincurve
|
||||
|
||||
from libp2p.crypto.keys import KeyPair, KeyType, PrivateKey, PublicKey
|
||||
|
||||
|
||||
class Secp256k1PublicKey(PublicKey):
|
||||
def __init__(self, impl: coincurve.PublicKey) -> None:
|
||||
self.impl = impl
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.impl.format()
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> "Secp256k1PublicKey":
|
||||
impl = coincurve.PublicKey(data)
|
||||
return cls(impl)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data: bytes) -> "Secp256k1PublicKey":
|
||||
protobuf_key = cls.deserialize_from_protobuf(data)
|
||||
return cls.from_bytes(protobuf_key.data)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.Secp256k1
|
||||
|
||||
def verify(self, data: bytes, signature: bytes) -> bool:
|
||||
return self.impl.verify(signature, data)
|
||||
|
||||
|
||||
class Secp256k1PrivateKey(PrivateKey):
|
||||
def __init__(self, impl: coincurve.PrivateKey) -> None:
|
||||
self.impl = impl
|
||||
|
||||
@classmethod
|
||||
def new(cls, secret: bytes = None) -> "Secp256k1PrivateKey":
|
||||
private_key_impl = coincurve.PrivateKey(secret)
|
||||
return cls(private_key_impl)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.impl.secret
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> "Secp256k1PrivateKey":
|
||||
impl = coincurve.PrivateKey(data)
|
||||
return cls(impl)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data: bytes) -> "Secp256k1PrivateKey":
|
||||
protobuf_key = cls.deserialize_from_protobuf(data)
|
||||
return cls.from_bytes(protobuf_key.data)
|
||||
|
||||
def get_type(self) -> KeyType:
|
||||
return KeyType.Secp256k1
|
||||
|
||||
def sign(self, data: bytes) -> bytes:
|
||||
return self.impl.sign(data)
|
||||
|
||||
def get_public_key(self) -> PublicKey:
|
||||
public_key_impl = coincurve.PublicKey.from_secret(self.impl.secret)
|
||||
return Secp256k1PublicKey(public_key_impl)
|
||||
|
||||
|
||||
def create_new_key_pair(secret: bytes = None) -> KeyPair:
|
||||
"""
|
||||
Returns a new Secp256k1 keypair derived from the provided ``secret``, a
|
||||
sequence of bytes corresponding to some integer between 0 and the group
|
||||
order.
|
||||
|
||||
A valid secret is created if ``None`` is passed.
|
||||
"""
|
||||
private_key = Secp256k1PrivateKey.new(secret)
|
||||
public_key = private_key.get_public_key()
|
||||
return KeyPair(private_key, public_key)
|
|
@ -1,38 +0,0 @@
|
|||
from libp2p.crypto.ed25519 import Ed25519PrivateKey, Ed25519PublicKey
|
||||
from libp2p.crypto.exceptions import MissingDeserializerError
|
||||
from libp2p.crypto.keys import KeyType, PrivateKey, PublicKey
|
||||
from libp2p.crypto.rsa import RSAPublicKey
|
||||
from libp2p.crypto.secp256k1 import Secp256k1PrivateKey, Secp256k1PublicKey
|
||||
|
||||
key_type_to_public_key_deserializer = {
|
||||
KeyType.Secp256k1.value: Secp256k1PublicKey.from_bytes,
|
||||
KeyType.RSA.value: RSAPublicKey.from_bytes,
|
||||
KeyType.Ed25519.value: Ed25519PublicKey.from_bytes,
|
||||
}
|
||||
|
||||
key_type_to_private_key_deserializer = {
|
||||
KeyType.Secp256k1.value: Secp256k1PrivateKey.from_bytes,
|
||||
KeyType.Ed25519.value: Ed25519PrivateKey.from_bytes,
|
||||
}
|
||||
|
||||
|
||||
def deserialize_public_key(data: bytes) -> PublicKey:
|
||||
f = PublicKey.deserialize_from_protobuf(data)
|
||||
try:
|
||||
deserializer = key_type_to_public_key_deserializer[f.key_type]
|
||||
except KeyError as e:
|
||||
raise MissingDeserializerError(
|
||||
{"key_type": f.key_type, "key": "public_key"}
|
||||
) from e
|
||||
return deserializer(f.data)
|
||||
|
||||
|
||||
def deserialize_private_key(data: bytes) -> PrivateKey:
|
||||
f = PrivateKey.deserialize_from_protobuf(data)
|
||||
try:
|
||||
deserializer = key_type_to_private_key_deserializer[f.key_type]
|
||||
except KeyError as e:
|
||||
raise MissingDeserializerError(
|
||||
{"key_type": f.key_type, "key": "private_key"}
|
||||
) from e
|
||||
return deserializer(f.data)
|
16
libp2p/discovery/advertiser_interface.py
Normal file
16
libp2p/discovery/advertiser_interface.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from abc import ABC, abstractmethod
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
class IAdvertiser(ABC):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def advertise(self, service):
|
||||
"""
|
||||
Advertise providing a specific service to the network
|
||||
:param service: service that you provide
|
||||
:raise Exception: network error
|
||||
"""
|
||||
|
17
libp2p/discovery/discoverer_interface.py
Normal file
17
libp2p/discovery/discoverer_interface.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
from abc import ABC, abstractmethod
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
class IDiscoverer(ABC):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_peers(self, service):
|
||||
"""
|
||||
Find peers on the networking providing a particular service
|
||||
:param service: service that peers must provide
|
||||
:return: PeerInfo generator that yields PeerInfo objects for discovered peers
|
||||
:raise Exception: network error
|
||||
"""
|
|
@ -1,16 +0,0 @@
|
|||
class BaseLibp2pError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(BaseLibp2pError):
|
||||
"""Raised when something does not pass a validation check."""
|
||||
|
||||
|
||||
class ParseError(BaseLibp2pError):
|
||||
pass
|
||||
|
||||
|
||||
class MultiError(BaseLibp2pError):
|
||||
"""Raised with multiple exceptions."""
|
||||
|
||||
# todo: find some way for this to fancy-print all encapsulated errors
|
|
@ -1,195 +1,90 @@
|
|||
import logging
|
||||
from typing import TYPE_CHECKING, AsyncIterator, List, Sequence
|
||||
|
||||
from async_generator import asynccontextmanager
|
||||
from async_service import background_trio_service
|
||||
import multiaddr
|
||||
|
||||
from libp2p.crypto.keys import PrivateKey, PublicKey
|
||||
from libp2p.host.defaults import get_default_protocols
|
||||
from libp2p.host.exceptions import StreamFailure
|
||||
from libp2p.network.network_interface import INetworkService
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.peer.id import ID
|
||||
from libp2p.peer.peerinfo import PeerInfo
|
||||
from libp2p.peer.peerstore_interface import IPeerStore
|
||||
from libp2p.protocol_muxer.exceptions import MultiselectClientError, MultiselectError
|
||||
from libp2p.protocol_muxer.multiselect import Multiselect
|
||||
from libp2p.protocol_muxer.multiselect_client import MultiselectClient
|
||||
from libp2p.protocol_muxer.multiselect_communicator import MultiselectCommunicator
|
||||
from libp2p.typing import StreamHandlerFn, TProtocol
|
||||
|
||||
from .host_interface import IHost
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections import OrderedDict
|
||||
|
||||
# Upon host creation, host takes in options,
|
||||
# including the list of addresses on which to listen.
|
||||
# Host then parses these options and delegates to its Network instance,
|
||||
# telling it to listen on the given listen addresses.
|
||||
|
||||
|
||||
logger = logging.getLogger("libp2p.network.basic_host")
|
||||
|
||||
|
||||
class BasicHost(IHost):
|
||||
"""
|
||||
BasicHost is a wrapper of a `INetwork` implementation.
|
||||
|
||||
It performs protocol negotiation on a stream with multistream-select
|
||||
right after a stream is initialized.
|
||||
"""
|
||||
# default options constructor
|
||||
def __init__(self, _network):
|
||||
self.network = _network
|
||||
self.peerstore = self.network.peerstore
|
||||
|
||||
_network: INetworkService
|
||||
peerstore: IPeerStore
|
||||
|
||||
multiselect: Multiselect
|
||||
multiselect_client: MultiselectClient
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
network: INetworkService,
|
||||
default_protocols: "OrderedDict[TProtocol, StreamHandlerFn]" = None,
|
||||
) -> None:
|
||||
self._network = network
|
||||
self._network.set_stream_handler(self._swarm_stream_handler)
|
||||
self.peerstore = self._network.peerstore
|
||||
# Protocol muxing
|
||||
default_protocols = default_protocols or get_default_protocols(self)
|
||||
self.multiselect = Multiselect(default_protocols)
|
||||
self.multiselect_client = MultiselectClient()
|
||||
|
||||
def get_id(self) -> ID:
|
||||
def get_id(self):
|
||||
"""
|
||||
:return: peer_id of host
|
||||
"""
|
||||
return self._network.get_peer_id()
|
||||
return self.network.get_peer_id()
|
||||
|
||||
def get_public_key(self) -> PublicKey:
|
||||
return self.peerstore.pubkey(self.get_id())
|
||||
|
||||
def get_private_key(self) -> PrivateKey:
|
||||
return self.peerstore.privkey(self.get_id())
|
||||
|
||||
def get_network(self) -> INetworkService:
|
||||
def get_network(self):
|
||||
"""
|
||||
:return: network instance of host
|
||||
"""
|
||||
return self._network
|
||||
return self.network
|
||||
|
||||
def get_peerstore(self) -> IPeerStore:
|
||||
def get_peerstore(self):
|
||||
"""
|
||||
:return: peerstore of the host (same one as in its network instance)
|
||||
"""
|
||||
return self.peerstore
|
||||
|
||||
def get_mux(self) -> Multiselect:
|
||||
def get_mux(self):
|
||||
"""
|
||||
:return: mux instance of host
|
||||
"""
|
||||
return self.multiselect
|
||||
|
||||
def get_addrs(self) -> List[multiaddr.Multiaddr]:
|
||||
def get_addrs(self):
|
||||
"""
|
||||
:return: all the multiaddr addresses this host is listening to
|
||||
:return: all the multiaddr addresses this host is listening too
|
||||
"""
|
||||
# TODO: We don't need "/p2p/{peer_id}" postfix actually.
|
||||
p2p_part = multiaddr.Multiaddr(f"/p2p/{self.get_id()!s}")
|
||||
p2p_part = multiaddr.Multiaddr('/p2p/{}'.format(self.get_id().pretty()))
|
||||
|
||||
addrs: List[multiaddr.Multiaddr] = []
|
||||
for transport in self._network.listeners.values():
|
||||
addrs = []
|
||||
for transport in self.network.listeners.values():
|
||||
for addr in transport.get_addrs():
|
||||
addrs.append(addr.encapsulate(p2p_part))
|
||||
return addrs
|
||||
|
||||
@asynccontextmanager
|
||||
async def run(
|
||||
self, listen_addrs: Sequence[multiaddr.Multiaddr]
|
||||
) -> AsyncIterator[None]:
|
||||
def set_stream_handler(self, protocol_id, stream_handler):
|
||||
"""
|
||||
run the host instance and listen to ``listen_addrs``.
|
||||
|
||||
:param listen_addrs: a sequence of multiaddrs that we want to listen to
|
||||
"""
|
||||
network = self.get_network()
|
||||
async with background_trio_service(network):
|
||||
await network.listen(*listen_addrs)
|
||||
yield
|
||||
|
||||
def set_stream_handler(
|
||||
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
|
||||
) -> None:
|
||||
"""
|
||||
set stream handler for given `protocol_id`
|
||||
|
||||
set stream handler for host
|
||||
:param protocol_id: protocol id used on stream
|
||||
:param stream_handler: a stream handler function
|
||||
:return: true if successful
|
||||
"""
|
||||
self.multiselect.add_handler(protocol_id, stream_handler)
|
||||
return self.network.set_stream_handler(protocol_id, stream_handler)
|
||||
|
||||
async def new_stream(
|
||||
self, peer_id: ID, protocol_ids: Sequence[TProtocol]
|
||||
) -> INetStream:
|
||||
# protocol_id can be a list of protocol_ids
|
||||
# stream will decide which protocol_id to run on
|
||||
async def new_stream(self, peer_id, protocol_ids):
|
||||
"""
|
||||
:param peer_id: peer_id that host is connecting
|
||||
:param protocol_ids: available protocol ids to use for stream
|
||||
:return: stream: new stream created
|
||||
:param protocol_id: protocol id that stream runs on
|
||||
:return: true if successful
|
||||
"""
|
||||
stream = await self.network.new_stream(peer_id, protocol_ids)
|
||||
return stream
|
||||
|
||||
net_stream = await self._network.new_stream(peer_id)
|
||||
|
||||
# Perform protocol muxing to determine protocol to use
|
||||
try:
|
||||
selected_protocol = await self.multiselect_client.select_one_of(
|
||||
list(protocol_ids), MultiselectCommunicator(net_stream)
|
||||
)
|
||||
except MultiselectClientError as error:
|
||||
logger.debug("fail to open a stream to peer %s, error=%s", peer_id, error)
|
||||
await net_stream.reset()
|
||||
raise StreamFailure(f"failed to open a stream to peer {peer_id}") from error
|
||||
|
||||
net_stream.set_protocol(selected_protocol)
|
||||
return net_stream
|
||||
|
||||
async def connect(self, peer_info: PeerInfo) -> None:
|
||||
async def connect(self, peer_info):
|
||||
"""
|
||||
connect ensures there is a connection between this host and the peer
|
||||
with given `peer_info.peer_id`. connect will absorb the addresses in
|
||||
peer_info into its internal peerstore. If there is not an active
|
||||
connection, connect will issue a dial, and block until a connection is
|
||||
opened, or an error is returned.
|
||||
connect ensures there is a connection between this host and the peer with
|
||||
given peer_info.peer_id. connect will absorb the addresses in peer_info into its internal
|
||||
peerstore. If there is not an active connection, connect will issue a
|
||||
dial, and block until a connection is open, or an error is
|
||||
returned.
|
||||
|
||||
:param peer_info: peer_info of the peer we want to connect to
|
||||
:param peer_info: peer_info of the host we want to connect to
|
||||
:type peer_info: peer.peerinfo.PeerInfo
|
||||
"""
|
||||
self.peerstore.add_addrs(peer_info.peer_id, peer_info.addrs, 10)
|
||||
|
||||
# there is already a connection to this peer
|
||||
if peer_info.peer_id in self._network.connections:
|
||||
if peer_info.peer_id in self.network.connections:
|
||||
return
|
||||
|
||||
await self._network.dial_peer(peer_info.peer_id)
|
||||
|
||||
async def disconnect(self, peer_id: ID) -> None:
|
||||
await self._network.close_peer(peer_id)
|
||||
|
||||
async def close(self) -> None:
|
||||
await self._network.close()
|
||||
|
||||
# Reference: `BasicHost.newStreamHandler` in Go.
|
||||
async def _swarm_stream_handler(self, net_stream: INetStream) -> None:
|
||||
# Perform protocol muxing to determine protocol to use
|
||||
try:
|
||||
protocol, handler = await self.multiselect.negotiate(
|
||||
MultiselectCommunicator(net_stream)
|
||||
)
|
||||
except MultiselectError as error:
|
||||
peer_id = net_stream.muxed_conn.peer_id
|
||||
logger.debug(
|
||||
"failed to accept a stream from peer %s, error=%s", peer_id, error
|
||||
)
|
||||
await net_stream.reset()
|
||||
return
|
||||
net_stream.set_protocol(protocol)
|
||||
await handler(net_stream)
|
||||
await self.network.dial_peer(peer_info.peer_id)
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
from collections import OrderedDict
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from libp2p.host.host_interface import IHost
|
||||
from libp2p.host.ping import ID as PingID
|
||||
from libp2p.host.ping import handle_ping
|
||||
from libp2p.identity.identify.protocol import ID as IdentifyID
|
||||
from libp2p.identity.identify.protocol import identify_handler_for
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from libp2p.typing import TProtocol, StreamHandlerFn
|
||||
|
||||
|
||||
def get_default_protocols(host: IHost) -> "OrderedDict[TProtocol, StreamHandlerFn]":
|
||||
return OrderedDict(
|
||||
((IdentifyID, identify_handler_for(host)), (PingID, handle_ping))
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
from libp2p.exceptions import BaseLibp2pError
|
||||
|
||||
|
||||
class HostException(BaseLibp2pError):
|
||||
"""A generic exception in `IHost`."""
|
||||
|
||||
|
||||
class ConnectionFailure(HostException):
|
||||
pass
|
||||
|
||||
|
||||
class StreamFailure(HostException):
|
||||
pass
|
|
@ -1,104 +1,60 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from typing import Any, AsyncContextManager, List, Sequence
|
||||
|
||||
import multiaddr
|
||||
|
||||
from libp2p.crypto.keys import PrivateKey, PublicKey
|
||||
from libp2p.network.network_interface import INetworkService
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.peer.id import ID
|
||||
from libp2p.peer.peerinfo import PeerInfo
|
||||
from libp2p.typing import StreamHandlerFn, TProtocol
|
||||
|
||||
|
||||
class IHost(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def get_id(self) -> ID:
|
||||
def get_id(self):
|
||||
"""
|
||||
:return: peer_id of host
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_public_key(self) -> PublicKey:
|
||||
"""
|
||||
:return: the public key belonging to the peer
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_private_key(self) -> PrivateKey:
|
||||
"""
|
||||
:return: the private key belonging to the peer
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_network(self) -> INetworkService:
|
||||
def get_network(self):
|
||||
"""
|
||||
:return: network instance of host
|
||||
"""
|
||||
|
||||
# FIXME: Replace with correct return type
|
||||
@abstractmethod
|
||||
def get_mux(self) -> Any:
|
||||
def get_mux(self):
|
||||
"""
|
||||
:return: mux instance of host
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_addrs(self) -> List[multiaddr.Multiaddr]:
|
||||
def get_addrs(self):
|
||||
"""
|
||||
:return: all the multiaddr addresses this host is listening to
|
||||
:return: all the multiaddr addresses this host is listening too
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def run(
|
||||
self, listen_addrs: Sequence[multiaddr.Multiaddr]
|
||||
) -> AsyncContextManager[None]:
|
||||
def set_stream_handler(self, protocol_id, stream_handler):
|
||||
"""
|
||||
run the host instance and listen to ``listen_addrs``.
|
||||
|
||||
:param listen_addrs: a sequence of multiaddrs that we want to listen to
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_stream_handler(
|
||||
self, protocol_id: TProtocol, stream_handler: StreamHandlerFn
|
||||
) -> None:
|
||||
"""
|
||||
set stream handler for host.
|
||||
|
||||
set stream handler for host
|
||||
:param protocol_id: protocol id used on stream
|
||||
:param stream_handler: a stream handler function
|
||||
:return: true if successful
|
||||
"""
|
||||
|
||||
# protocol_id can be a list of protocol_ids
|
||||
# stream will decide which protocol_id to run on
|
||||
@abstractmethod
|
||||
async def new_stream(
|
||||
self, peer_id: ID, protocol_ids: Sequence[TProtocol]
|
||||
) -> INetStream:
|
||||
def new_stream(self, peer_id, protocol_ids):
|
||||
"""
|
||||
:param peer_id: peer_id that host is connecting
|
||||
:param protocol_ids: available protocol ids to use for stream
|
||||
:return: stream: new stream created
|
||||
:param protocol_ids: protocol ids that stream can run on
|
||||
:return: true if successful
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def connect(self, peer_info: PeerInfo) -> None:
|
||||
def connect(self, peer_info):
|
||||
"""
|
||||
connect ensures there is a connection between this host and the peer
|
||||
with given peer_info.peer_id. connect will absorb the addresses in
|
||||
peer_info into its internal peerstore. If there is not an active
|
||||
connection, connect will issue a dial, and block until a connection is
|
||||
opened, or an error is returned.
|
||||
connect ensures there is a connection between this host and the peer with
|
||||
given peer_info.peer_id. connect will absorb the addresses in peer_info into its internal
|
||||
peerstore. If there is not an active connection, connect will issue a
|
||||
dial, and block until a connection is open, or an error is
|
||||
returned.
|
||||
|
||||
:param peer_info: peer_info of the peer we want to connect to
|
||||
:param peer_info: peer_info of the host we want to connect to
|
||||
:type peer_info: peer.peerinfo.PeerInfo
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
async def disconnect(self, peer_id: ID) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
import logging
|
||||
|
||||
import trio
|
||||
|
||||
from libp2p.network.stream.exceptions import StreamClosed, StreamEOF, StreamReset
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.peer.id import ID as PeerID
|
||||
from libp2p.typing import TProtocol
|
||||
|
||||
ID = TProtocol("/ipfs/ping/1.0.0")
|
||||
PING_LENGTH = 32
|
||||
RESP_TIMEOUT = 60
|
||||
|
||||
logger = logging.getLogger("libp2p.host.ping")
|
||||
|
||||
|
||||
async def _handle_ping(stream: INetStream, peer_id: PeerID) -> bool:
|
||||
"""Return a boolean indicating if we expect more pings from the peer at
|
||||
``peer_id``."""
|
||||
try:
|
||||
with trio.fail_after(RESP_TIMEOUT):
|
||||
payload = await stream.read(PING_LENGTH)
|
||||
except trio.TooSlowError as error:
|
||||
logger.debug("Timed out waiting for ping from %s: %s", peer_id, error)
|
||||
raise
|
||||
except StreamEOF:
|
||||
logger.debug("Other side closed while waiting for ping from %s", peer_id)
|
||||
return False
|
||||
except StreamReset as error:
|
||||
logger.debug(
|
||||
"Other side reset while waiting for ping from %s: %s", peer_id, error
|
||||
)
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.debug("Error while waiting to read ping for %s: %s", peer_id, error)
|
||||
raise
|
||||
|
||||
logger.debug("Received ping from %s with data: 0x%s", peer_id, payload.hex())
|
||||
|
||||
try:
|
||||
await stream.write(payload)
|
||||
except StreamClosed:
|
||||
logger.debug("Fail to respond to ping from %s: stream closed", peer_id)
|
||||
raise
|
||||
return True
|
||||
|
||||
|
||||
async def handle_ping(stream: INetStream) -> None:
|
||||
"""``handle_ping`` responds to incoming ping requests until one side errors
|
||||
or closes the ``stream``."""
|
||||
peer_id = stream.muxed_conn.peer_id
|
||||
|
||||
while True:
|
||||
try:
|
||||
should_continue = await _handle_ping(stream, peer_id)
|
||||
if not should_continue:
|
||||
return
|
||||
except Exception:
|
||||
await stream.reset()
|
||||
return
|
|
@ -1,41 +0,0 @@
|
|||
from libp2p.host.basic_host import BasicHost
|
||||
from libp2p.host.exceptions import ConnectionFailure
|
||||
from libp2p.network.network_interface import INetworkService
|
||||
from libp2p.peer.peerinfo import PeerInfo
|
||||
from libp2p.routing.interfaces import IPeerRouting
|
||||
|
||||
|
||||
# RoutedHost is a p2p Host that includes a routing system.
|
||||
# This allows the Host to find the addresses for peers when it does not have them.
|
||||
class RoutedHost(BasicHost):
|
||||
_router: IPeerRouting
|
||||
|
||||
def __init__(self, network: INetworkService, router: IPeerRouting):
|
||||
super().__init__(network)
|
||||
self._router = router
|
||||
|
||||
async def connect(self, peer_info: PeerInfo) -> None:
|
||||
"""
|
||||
connect ensures there is a connection between this host and the peer
|
||||
with given `peer_info.peer_id`. See (basic_host).connect for more
|
||||
information.
|
||||
|
||||
RoutedHost's Connect differs in that if the host has no addresses for a
|
||||
given peer, it will use its routing system to try to find some.
|
||||
|
||||
:param peer_info: peer_info of the peer we want to connect to
|
||||
:type peer_info: peer.peerinfo.PeerInfo
|
||||
"""
|
||||
# check if we were given some addresses, otherwise, find some with the routing system.
|
||||
if not peer_info.addrs:
|
||||
found_peer_info = await self._router.find_peer(peer_info.peer_id)
|
||||
if not found_peer_info:
|
||||
raise ConnectionFailure("Unable to find Peer address")
|
||||
self.peerstore.add_addrs(peer_info.peer_id, found_peer_info.addrs, 10)
|
||||
self.peerstore.add_addrs(peer_info.peer_id, peer_info.addrs, 10)
|
||||
|
||||
# there is already a connection to this peer
|
||||
if peer_info.peer_id in self._network.connections:
|
||||
return
|
||||
|
||||
await self._network.dial_peer(peer_info.peer_id)
|
|
@ -1,12 +0,0 @@
|
|||
syntax = "proto2";
|
||||
|
||||
package identify.pb;
|
||||
|
||||
message Identify {
|
||||
optional string protocol_version = 5;
|
||||
optional string agent_version = 6;
|
||||
optional bytes public_key = 1;
|
||||
repeated bytes listen_addrs = 2;
|
||||
optional bytes observed_addr = 4;
|
||||
repeated string protocols = 3;
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: libp2p/identity/identify/pb/identify.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='libp2p/identity/identify/pb/identify.proto',
|
||||
package='identify.pb',
|
||||
syntax='proto2',
|
||||
serialized_options=None,
|
||||
serialized_pb=_b('\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\x8f\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t')
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
_IDENTIFY = _descriptor.Descriptor(
|
||||
name='Identify',
|
||||
full_name='identify.pb.Identify',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='protocol_version', full_name='identify.pb.Identify.protocol_version', index=0,
|
||||
number=5, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='agent_version', full_name='identify.pb.Identify.agent_version', index=1,
|
||||
number=6, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='public_key', full_name='identify.pb.Identify.public_key', index=2,
|
||||
number=1, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b(""),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='listen_addrs', full_name='identify.pb.Identify.listen_addrs', index=3,
|
||||
number=2, type=12, cpp_type=9, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='observed_addr', full_name='identify.pb.Identify.observed_addr', index=4,
|
||||
number=4, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b(""),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='protocols', full_name='identify.pb.Identify.protocols', index=5,
|
||||
number=3, type=9, cpp_type=9, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
serialized_options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=60,
|
||||
serialized_end=203,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['Identify'] = _IDENTIFY
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
Identify = _reflection.GeneratedProtocolMessageType('Identify', (_message.Message,), {
|
||||
'DESCRIPTOR' : _IDENTIFY,
|
||||
'__module__' : 'libp2p.identity.identify.pb.identify_pb2'
|
||||
# @@protoc_insertion_point(class_scope:identify.pb.Identify)
|
||||
})
|
||||
_sym_db.RegisterMessage(Identify)
|
||||
|
||||
|
||||
# @@protoc_insertion_point(module_scope)
|
|
@ -1,53 +0,0 @@
|
|||
# @generated by generate_proto_mypy_stubs.py. Do not edit!
|
||||
import sys
|
||||
from google.protobuf.descriptor import (
|
||||
Descriptor as google___protobuf___descriptor___Descriptor,
|
||||
)
|
||||
|
||||
from google.protobuf.internal.containers import (
|
||||
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
|
||||
)
|
||||
|
||||
from google.protobuf.message import (
|
||||
Message as google___protobuf___message___Message,
|
||||
)
|
||||
|
||||
from typing import (
|
||||
Iterable as typing___Iterable,
|
||||
Optional as typing___Optional,
|
||||
Text as typing___Text,
|
||||
)
|
||||
|
||||
from typing_extensions import (
|
||||
Literal as typing_extensions___Literal,
|
||||
)
|
||||
|
||||
|
||||
class Identify(google___protobuf___message___Message):
|
||||
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
|
||||
protocol_version = ... # type: typing___Text
|
||||
agent_version = ... # type: typing___Text
|
||||
public_key = ... # type: bytes
|
||||
listen_addrs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes]
|
||||
observed_addr = ... # type: bytes
|
||||
protocols = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
|
||||
|
||||
def __init__(self,
|
||||
*,
|
||||
protocol_version : typing___Optional[typing___Text] = None,
|
||||
agent_version : typing___Optional[typing___Text] = None,
|
||||
public_key : typing___Optional[bytes] = None,
|
||||
listen_addrs : typing___Optional[typing___Iterable[bytes]] = None,
|
||||
observed_addr : typing___Optional[bytes] = None,
|
||||
protocols : typing___Optional[typing___Iterable[typing___Text]] = None,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def FromString(cls, s: bytes) -> Identify: ...
|
||||
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
|
||||
if sys.version_info >= (3,):
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",u"observed_addr",u"protocol_version",u"public_key"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",u"listen_addrs",u"observed_addr",u"protocol_version",u"protocols",u"public_key"]) -> None: ...
|
||||
else:
|
||||
def HasField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"public_key",b"public_key"]) -> bool: ...
|
||||
def ClearField(self, field_name: typing_extensions___Literal[u"agent_version",b"agent_version",u"listen_addrs",b"listen_addrs",u"observed_addr",b"observed_addr",u"protocol_version",b"protocol_version",u"protocols",b"protocols",u"public_key",b"public_key"]) -> None: ...
|
|
@ -1,55 +0,0 @@
|
|||
import logging
|
||||
|
||||
from multiaddr import Multiaddr
|
||||
|
||||
from libp2p.host.host_interface import IHost
|
||||
from libp2p.network.stream.exceptions import StreamClosed
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.typing import StreamHandlerFn, TProtocol
|
||||
|
||||
from .pb.identify_pb2 import Identify
|
||||
|
||||
ID = TProtocol("/ipfs/id/1.0.0")
|
||||
PROTOCOL_VERSION = "ipfs/0.1.0"
|
||||
# TODO dynamically generate the agent version
|
||||
AGENT_VERSION = "py-libp2p/alpha"
|
||||
logger = logging.getLogger("libp2p.identity.identify")
|
||||
|
||||
|
||||
def _multiaddr_to_bytes(maddr: Multiaddr) -> bytes:
|
||||
return maddr.to_bytes()
|
||||
|
||||
|
||||
def _mk_identify_protobuf(host: IHost) -> Identify:
|
||||
public_key = host.get_public_key()
|
||||
laddrs = host.get_addrs()
|
||||
protocols = host.get_mux().get_protocols()
|
||||
|
||||
return Identify(
|
||||
protocol_version=PROTOCOL_VERSION,
|
||||
agent_version=AGENT_VERSION,
|
||||
public_key=public_key.serialize(),
|
||||
listen_addrs=map(_multiaddr_to_bytes, laddrs),
|
||||
# TODO send observed address from ``stream``
|
||||
observed_addr=b"",
|
||||
protocols=protocols,
|
||||
)
|
||||
|
||||
|
||||
def identify_handler_for(host: IHost) -> StreamHandlerFn:
|
||||
async def handle_identify(stream: INetStream) -> None:
|
||||
peer_id = stream.muxed_conn.peer_id
|
||||
logger.debug("received a request for %s from %s", ID, peer_id)
|
||||
|
||||
protobuf = _mk_identify_protobuf(host)
|
||||
response = protobuf.SerializeToString()
|
||||
|
||||
try:
|
||||
await stream.write(response)
|
||||
except StreamClosed:
|
||||
logger.debug("Fail to respond to %s request: stream closed", ID)
|
||||
else:
|
||||
await stream.close()
|
||||
logger.debug("successfully handled request for %s from %s", ID, peer_id)
|
||||
|
||||
return handle_identify
|
|
@ -1,65 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Closer(ABC):
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
...
|
||||
|
||||
|
||||
class Reader(ABC):
|
||||
@abstractmethod
|
||||
async def read(self, n: int = None) -> bytes:
|
||||
...
|
||||
|
||||
|
||||
class Writer(ABC):
|
||||
@abstractmethod
|
||||
async def write(self, data: bytes) -> None:
|
||||
...
|
||||
|
||||
|
||||
class WriteCloser(Writer, Closer):
|
||||
pass
|
||||
|
||||
|
||||
class ReadCloser(Reader, Closer):
|
||||
pass
|
||||
|
||||
|
||||
class ReadWriter(Reader, Writer):
|
||||
pass
|
||||
|
||||
|
||||
class ReadWriteCloser(Reader, Writer, Closer):
|
||||
pass
|
||||
|
||||
|
||||
class MsgReader(ABC):
|
||||
@abstractmethod
|
||||
async def read_msg(self) -> bytes:
|
||||
...
|
||||
|
||||
|
||||
class MsgWriter(ABC):
|
||||
@abstractmethod
|
||||
async def write_msg(self, msg: bytes) -> None:
|
||||
...
|
||||
|
||||
|
||||
class MsgReadWriteCloser(MsgReader, MsgWriter, Closer):
|
||||
pass
|
||||
|
||||
|
||||
class Encrypter(ABC):
|
||||
@abstractmethod
|
||||
def encrypt(self, data: bytes) -> bytes:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def decrypt(self, data: bytes) -> bytes:
|
||||
...
|
||||
|
||||
|
||||
class EncryptedMsgReadWriter(MsgReadWriteCloser, Encrypter):
|
||||
"""Read/write message with encryption/decryption."""
|
|
@ -1,29 +0,0 @@
|
|||
from libp2p.exceptions import BaseLibp2pError
|
||||
|
||||
|
||||
class IOException(BaseLibp2pError):
|
||||
pass
|
||||
|
||||
|
||||
class IncompleteReadError(IOException):
|
||||
"""Fewer bytes were read than requested."""
|
||||
|
||||
|
||||
class MsgioException(IOException):
|
||||
pass
|
||||
|
||||
|
||||
class MissingLengthException(MsgioException):
|
||||
pass
|
||||
|
||||
|
||||
class MissingMessageException(MsgioException):
|
||||
pass
|
||||
|
||||
|
||||
class DecryptionFailedException(MsgioException):
|
||||
pass
|
||||
|
||||
|
||||
class MessageTooLarge(MsgioException):
|
||||
pass
|
|
@ -1,89 +0,0 @@
|
|||
"""
|
||||
``msgio`` is an implementation of `https://github.com/libp2p/go-msgio`.
|
||||
|
||||
from that repo: "a simple package to r/w length-delimited slices."
|
||||
|
||||
NOTE: currently missing the capability to indicate lengths by "varint" method.
|
||||
"""
|
||||
from abc import abstractmethod
|
||||
|
||||
from libp2p.io.abc import MsgReadWriteCloser, Reader, ReadWriteCloser
|
||||
from libp2p.io.utils import read_exactly
|
||||
from libp2p.utils import decode_uvarint_from_stream, encode_varint_prefixed
|
||||
|
||||
from .exceptions import MessageTooLarge
|
||||
|
||||
BYTE_ORDER = "big"
|
||||
|
||||
|
||||
async def read_length(reader: Reader, size_len_bytes: int) -> int:
|
||||
length_bytes = await read_exactly(reader, size_len_bytes)
|
||||
return int.from_bytes(length_bytes, byteorder=BYTE_ORDER)
|
||||
|
||||
|
||||
def encode_msg_with_length(msg_bytes: bytes, size_len_bytes: int) -> bytes:
|
||||
try:
|
||||
len_prefix = len(msg_bytes).to_bytes(size_len_bytes, byteorder=BYTE_ORDER)
|
||||
except OverflowError:
|
||||
raise ValueError(
|
||||
"msg_bytes is too large for `size_len_bytes` bytes length: "
|
||||
f"msg_bytes={msg_bytes!r}, size_len_bytes={size_len_bytes}"
|
||||
)
|
||||
return len_prefix + msg_bytes
|
||||
|
||||
|
||||
class BaseMsgReadWriter(MsgReadWriteCloser):
|
||||
read_write_closer: ReadWriteCloser
|
||||
size_len_bytes: int
|
||||
|
||||
def __init__(self, read_write_closer: ReadWriteCloser) -> None:
|
||||
self.read_write_closer = read_write_closer
|
||||
|
||||
async def read_msg(self) -> bytes:
|
||||
length = await self.next_msg_len()
|
||||
return await read_exactly(self.read_write_closer, length)
|
||||
|
||||
@abstractmethod
|
||||
async def next_msg_len(self) -> int:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def encode_msg(self, msg: bytes) -> bytes:
|
||||
...
|
||||
|
||||
async def close(self) -> None:
|
||||
await self.read_write_closer.close()
|
||||
|
||||
async def write_msg(self, msg: bytes) -> None:
|
||||
encoded_msg = self.encode_msg(msg)
|
||||
await self.read_write_closer.write(encoded_msg)
|
||||
|
||||
|
||||
class FixedSizeLenMsgReadWriter(BaseMsgReadWriter):
|
||||
size_len_bytes: int
|
||||
|
||||
async def next_msg_len(self) -> int:
|
||||
return await read_length(self.read_write_closer, self.size_len_bytes)
|
||||
|
||||
def encode_msg(self, msg: bytes) -> bytes:
|
||||
return encode_msg_with_length(msg, self.size_len_bytes)
|
||||
|
||||
|
||||
class VarIntLengthMsgReadWriter(BaseMsgReadWriter):
|
||||
max_msg_size: int
|
||||
|
||||
async def next_msg_len(self) -> int:
|
||||
msg_len = await decode_uvarint_from_stream(self.read_write_closer)
|
||||
if msg_len > self.max_msg_size:
|
||||
raise MessageTooLarge(
|
||||
f"msg_len={msg_len} > max_msg_size={self.max_msg_size}"
|
||||
)
|
||||
return msg_len
|
||||
|
||||
def encode_msg(self, msg: bytes) -> bytes:
|
||||
msg_len = len(msg)
|
||||
if msg_len > self.max_msg_size:
|
||||
raise MessageTooLarge(
|
||||
f"msg_len={msg_len} > max_msg_size={self.max_msg_size}"
|
||||
)
|
||||
return encode_varint_prefixed(msg)
|
|
@ -1,40 +0,0 @@
|
|||
import logging
|
||||
|
||||
import trio
|
||||
|
||||
from libp2p.io.abc import ReadWriteCloser
|
||||
from libp2p.io.exceptions import IOException
|
||||
|
||||
logger = logging.getLogger("libp2p.io.trio")
|
||||
|
||||
|
||||
class TrioTCPStream(ReadWriteCloser):
|
||||
stream: trio.SocketStream
|
||||
# NOTE: Add both read and write lock to avoid `trio.BusyResourceError`
|
||||
read_lock: trio.Lock
|
||||
write_lock: trio.Lock
|
||||
|
||||
def __init__(self, stream: trio.SocketStream) -> None:
|
||||
self.stream = stream
|
||||
self.read_lock = trio.Lock()
|
||||
self.write_lock = trio.Lock()
|
||||
|
||||
async def write(self, data: bytes) -> None:
|
||||
"""Raise `RawConnError` if the underlying connection breaks."""
|
||||
async with self.write_lock:
|
||||
try:
|
||||
await self.stream.send_all(data)
|
||||
except (trio.ClosedResourceError, trio.BrokenResourceError) as error:
|
||||
raise IOException from error
|
||||
|
||||
async def read(self, n: int = None) -> bytes:
|
||||
async with self.read_lock:
|
||||
if n is not None and n == 0:
|
||||
return b""
|
||||
try:
|
||||
return await self.stream.receive_some(n)
|
||||
except (trio.ClosedResourceError, trio.BrokenResourceError) as error:
|
||||
raise IOException from error
|
||||
|
||||
async def close(self) -> None:
|
||||
await self.stream.aclose()
|
|
@ -1,21 +0,0 @@
|
|||
from libp2p.io.abc import Reader
|
||||
from libp2p.io.exceptions import IncompleteReadError
|
||||
|
||||
DEFAULT_RETRY_READ_COUNT = 100
|
||||
|
||||
|
||||
async def read_exactly(
|
||||
reader: Reader, n: int, retry_count: int = DEFAULT_RETRY_READ_COUNT
|
||||
) -> bytes:
|
||||
"""
|
||||
NOTE: relying on exceptions to break out on erroneous conditions, like EOF
|
||||
"""
|
||||
data = await reader.read(n)
|
||||
|
||||
for _ in range(retry_count):
|
||||
if len(data) < n:
|
||||
remaining = n - len(data)
|
||||
data += await reader.read(remaining)
|
||||
else:
|
||||
return data
|
||||
raise IncompleteReadError({"requested_count": n, "received_count": len(data)})
|
5
libp2p/kademlia/__init__.py
Normal file
5
libp2p/kademlia/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
"""
|
||||
Kademlia is a Python implementation of the Kademlia protocol which
|
||||
utilizes the asyncio library.
|
||||
"""
|
||||
__version__ = "1.1"
|
181
libp2p/kademlia/crawling.py
Normal file
181
libp2p/kademlia/crawling.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
from collections import Counter
|
||||
import logging
|
||||
|
||||
from .kademlia.node import Node, NodeHeap
|
||||
from .kademlia.utils import gather_dict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpiderCrawl:
|
||||
"""
|
||||
Crawl the network and look for given 160-bit keys.
|
||||
"""
|
||||
def __init__(self, protocol, node, peers, ksize, alpha):
|
||||
"""
|
||||
Create a new C{SpiderCrawl}er.
|
||||
|
||||
Args:
|
||||
protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance.
|
||||
node: A :class:`~kademlia.node.Node` representing the key we're
|
||||
looking for
|
||||
peers: A list of :class:`~kademlia.node.Node` instances that
|
||||
provide the entry point for the network
|
||||
ksize: The value for k based on the paper
|
||||
alpha: The value for alpha based on the paper
|
||||
"""
|
||||
self.protocol = protocol
|
||||
self.ksize = ksize
|
||||
self.alpha = alpha
|
||||
self.node = node
|
||||
self.nearest = NodeHeap(self.node, self.ksize)
|
||||
self.lastIDsCrawled = []
|
||||
log.info("creating spider with peers: %s", peers)
|
||||
self.nearest.push(peers)
|
||||
|
||||
async def _find(self, rpcmethod):
|
||||
"""
|
||||
Get either a value or list of nodes.
|
||||
|
||||
Args:
|
||||
rpcmethod: The protocol's callfindValue or callFindNode.
|
||||
|
||||
The process:
|
||||
1. calls find_* to current ALPHA nearest not already queried nodes,
|
||||
adding results to current nearest list of k nodes.
|
||||
2. current nearest list needs to keep track of who has been queried
|
||||
already sort by nearest, keep KSIZE
|
||||
3. if list is same as last time, next call should be to everyone not
|
||||
yet queried
|
||||
4. repeat, unless nearest list has all been queried, then ur done
|
||||
"""
|
||||
log.info("crawling network with nearest: %s", str(tuple(self.nearest)))
|
||||
count = self.alpha
|
||||
if self.nearest.getIDs() == self.lastIDsCrawled:
|
||||
count = len(self.nearest)
|
||||
self.lastIDsCrawled = self.nearest.getIDs()
|
||||
|
||||
ds = {}
|
||||
for peer in self.nearest.getUncontacted()[:count]:
|
||||
ds[peer.id] = rpcmethod(peer, self.node)
|
||||
self.nearest.markContacted(peer)
|
||||
found = await gather_dict(ds)
|
||||
return await self._nodesFound(found)
|
||||
|
||||
async def _nodesFound(self, responses):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ValueSpiderCrawl(SpiderCrawl):
|
||||
def __init__(self, protocol, node, peers, ksize, alpha):
|
||||
SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha)
|
||||
# keep track of the single nearest node without value - per
|
||||
# section 2.3 so we can set the key there if found
|
||||
self.nearestWithoutValue = NodeHeap(self.node, 1)
|
||||
|
||||
async def find(self):
|
||||
"""
|
||||
Find either the closest nodes or the value requested.
|
||||
"""
|
||||
return await self._find(self.protocol.callFindValue)
|
||||
|
||||
async def _nodesFound(self, responses):
|
||||
"""
|
||||
Handle the result of an iteration in _find.
|
||||
"""
|
||||
toremove = []
|
||||
foundValues = []
|
||||
for peerid, response in responses.items():
|
||||
response = RPCFindResponse(response)
|
||||
if not response.happened():
|
||||
toremove.append(peerid)
|
||||
elif response.hasValue():
|
||||
foundValues.append(response.getValue())
|
||||
else:
|
||||
peer = self.nearest.getNodeById(peerid)
|
||||
self.nearestWithoutValue.push(peer)
|
||||
self.nearest.push(response.getNodeList())
|
||||
self.nearest.remove(toremove)
|
||||
|
||||
if len(foundValues) > 0:
|
||||
return await self._handleFoundValues(foundValues)
|
||||
if self.nearest.allBeenContacted():
|
||||
# not found!
|
||||
return None
|
||||
return await self.find()
|
||||
|
||||
async def _handleFoundValues(self, values):
|
||||
"""
|
||||
We got some values! Exciting. But let's make sure
|
||||
they're all the same or freak out a little bit. Also,
|
||||
make sure we tell the nearest node that *didn't* have
|
||||
the value to store it.
|
||||
"""
|
||||
valueCounts = Counter(values)
|
||||
if len(valueCounts) != 1:
|
||||
log.warning("Got multiple values for key %i: %s",
|
||||
self.node.long_id, str(values))
|
||||
value = valueCounts.most_common(1)[0][0]
|
||||
|
||||
peerToSaveTo = self.nearestWithoutValue.popleft()
|
||||
if peerToSaveTo is not None:
|
||||
await self.protocol.callStore(peerToSaveTo, self.node.id, value)
|
||||
return value
|
||||
|
||||
|
||||
class NodeSpiderCrawl(SpiderCrawl):
|
||||
async def find(self):
|
||||
"""
|
||||
Find the closest nodes.
|
||||
"""
|
||||
return await self._find(self.protocol.callFindNode)
|
||||
|
||||
async def _nodesFound(self, responses):
|
||||
"""
|
||||
Handle the result of an iteration in _find.
|
||||
"""
|
||||
toremove = []
|
||||
for peerid, response in responses.items():
|
||||
response = RPCFindResponse(response)
|
||||
if not response.happened():
|
||||
toremove.append(peerid)
|
||||
else:
|
||||
self.nearest.push(response.getNodeList())
|
||||
self.nearest.remove(toremove)
|
||||
|
||||
if self.nearest.allBeenContacted():
|
||||
return list(self.nearest)
|
||||
return await self.find()
|
||||
|
||||
|
||||
class RPCFindResponse:
|
||||
def __init__(self, response):
|
||||
"""
|
||||
A wrapper for the result of a RPC find.
|
||||
|
||||
Args:
|
||||
response: This will be a tuple of (<response received>, <value>)
|
||||
where <value> will be a list of tuples if not found or
|
||||
a dictionary of {'value': v} where v is the value desired
|
||||
"""
|
||||
self.response = response
|
||||
|
||||
def happened(self):
|
||||
"""
|
||||
Did the other host actually respond?
|
||||
"""
|
||||
return self.response[0]
|
||||
|
||||
def hasValue(self):
|
||||
return isinstance(self.response[1], dict)
|
||||
|
||||
def getValue(self):
|
||||
return self.response[1]['value']
|
||||
|
||||
def getNodeList(self):
|
||||
"""
|
||||
Get the node list in the response. If there's no value, this should
|
||||
be set.
|
||||
"""
|
||||
nodelist = self.response[1] or []
|
||||
return [Node(*nodeple) for nodeple in nodelist]
|
258
libp2p/kademlia/network.py
Normal file
258
libp2p/kademlia/network.py
Normal file
|
@ -0,0 +1,258 @@
|
|||
"""
|
||||
Package for interacting on the network at a high level.
|
||||
"""
|
||||
import random
|
||||
import pickle
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .kademlia.protocol import KademliaProtocol
|
||||
from .kademlia.utils import digest
|
||||
from .kademlia.storage import ForgetfulStorage
|
||||
from .kademlia.node import Node
|
||||
from .kademlia.crawling import ValueSpiderCrawl
|
||||
from .kademlia.crawling import NodeSpiderCrawl
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Server:
|
||||
"""
|
||||
High level view of a node instance. This is the object that should be
|
||||
created to start listening as an active node on the network.
|
||||
"""
|
||||
|
||||
protocol_class = KademliaProtocol
|
||||
|
||||
def __init__(self, ksize=20, alpha=3, node_id=None, storage=None):
|
||||
"""
|
||||
Create a server instance. This will start listening on the given port.
|
||||
|
||||
Args:
|
||||
ksize (int): The k parameter from the paper
|
||||
alpha (int): The alpha parameter from the paper
|
||||
node_id: The id for this node on the network.
|
||||
storage: An instance that implements
|
||||
:interface:`~kademlia.storage.IStorage`
|
||||
"""
|
||||
self.ksize = ksize
|
||||
self.alpha = alpha
|
||||
self.storage = storage or ForgetfulStorage()
|
||||
self.node = Node(node_id or digest(random.getrandbits(255)))
|
||||
self.transport = None
|
||||
self.protocol = None
|
||||
self.refresh_loop = None
|
||||
self.save_state_loop = None
|
||||
|
||||
def stop(self):
|
||||
if self.transport is not None:
|
||||
self.transport.close()
|
||||
|
||||
if self.refresh_loop:
|
||||
self.refresh_loop.cancel()
|
||||
|
||||
if self.save_state_loop:
|
||||
self.save_state_loop.cancel()
|
||||
|
||||
def _create_protocol(self):
|
||||
return self.protocol_class(self.node, self.storage, self.ksize)
|
||||
|
||||
def listen(self, port, interface='0.0.0.0'):
|
||||
"""
|
||||
Start listening on the given port.
|
||||
|
||||
Provide interface="::" to accept ipv6 address
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
listen = loop.create_datagram_endpoint(self._create_protocol,
|
||||
local_addr=(interface, port))
|
||||
log.info("Node %i listening on %s:%i",
|
||||
self.node.long_id, interface, port)
|
||||
self.transport, self.protocol = loop.run_until_complete(listen)
|
||||
# finally, schedule refreshing table
|
||||
self.refresh_table()
|
||||
|
||||
def refresh_table(self):
|
||||
log.debug("Refreshing routing table")
|
||||
asyncio.ensure_future(self._refresh_table())
|
||||
loop = asyncio.get_event_loop()
|
||||
self.refresh_loop = loop.call_later(3600, self.refresh_table)
|
||||
|
||||
async def _refresh_table(self):
|
||||
"""
|
||||
Refresh buckets that haven't had any lookups in the last hour
|
||||
(per section 2.3 of the paper).
|
||||
"""
|
||||
ds = []
|
||||
for node_id in self.protocol.getRefreshIDs():
|
||||
node = Node(node_id)
|
||||
nearest = self.protocol.router.findNeighbors(node, self.alpha)
|
||||
spider = NodeSpiderCrawl(self.protocol, node, nearest,
|
||||
self.ksize, self.alpha)
|
||||
ds.append(spider.find())
|
||||
|
||||
# do our crawling
|
||||
await asyncio.gather(*ds)
|
||||
|
||||
# now republish keys older than one hour
|
||||
for dkey, value in self.storage.iteritemsOlderThan(3600):
|
||||
await self.set_digest(dkey, value)
|
||||
|
||||
def bootstrappableNeighbors(self):
|
||||
"""
|
||||
Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable for
|
||||
use as an argument to the bootstrap method.
|
||||
|
||||
The server should have been bootstrapped
|
||||
already - this is just a utility for getting some neighbors and then
|
||||
storing them if this server is going down for a while. When it comes
|
||||
back up, the list of nodes can be used to bootstrap.
|
||||
"""
|
||||
neighbors = self.protocol.router.findNeighbors(self.node)
|
||||
return [tuple(n)[-2:] for n in neighbors]
|
||||
|
||||
async def bootstrap(self, addrs):
|
||||
"""
|
||||
Bootstrap the server by connecting to other known nodes in the network.
|
||||
|
||||
Args:
|
||||
addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP
|
||||
addresses are acceptable - hostnames will cause an error.
|
||||
"""
|
||||
log.debug("Attempting to bootstrap node with %i initial contacts",
|
||||
len(addrs))
|
||||
cos = list(map(self.bootstrap_node, addrs))
|
||||
gathered = await asyncio.gather(*cos)
|
||||
nodes = [node for node in gathered if node is not None]
|
||||
spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
|
||||
self.ksize, self.alpha)
|
||||
return await spider.find()
|
||||
|
||||
async def bootstrap_node(self, addr):
|
||||
result = await self.protocol.ping(addr, self.node.id)
|
||||
return Node(result[1], addr[0], addr[1]) if result[0] else None
|
||||
|
||||
async def get(self, key):
|
||||
"""
|
||||
Get a key if the network has it.
|
||||
|
||||
Returns:
|
||||
:class:`None` if not found, the value otherwise.
|
||||
"""
|
||||
log.info("Looking up key %s", key)
|
||||
dkey = digest(key)
|
||||
# if this node has it, return it
|
||||
if self.storage.get(dkey) is not None:
|
||||
return self.storage.get(dkey)
|
||||
node = Node(dkey)
|
||||
nearest = self.protocol.router.findNeighbors(node)
|
||||
if len(nearest) == 0:
|
||||
log.warning("There are no known neighbors to get key %s", key)
|
||||
return None
|
||||
spider = ValueSpiderCrawl(self.protocol, node, nearest,
|
||||
self.ksize, self.alpha)
|
||||
return await spider.find()
|
||||
|
||||
async def set(self, key, value):
|
||||
"""
|
||||
Set the given string key to the given value in the network.
|
||||
"""
|
||||
if not check_dht_value_type(value):
|
||||
raise TypeError(
|
||||
"Value must be of type int, float, bool, str, or bytes"
|
||||
)
|
||||
log.info("setting '%s' = '%s' on network", key, value)
|
||||
dkey = digest(key)
|
||||
return await self.set_digest(dkey, value)
|
||||
|
||||
async def set_digest(self, dkey, value):
|
||||
"""
|
||||
Set the given SHA1 digest key (bytes) to the given value in the
|
||||
network.
|
||||
"""
|
||||
node = Node(dkey)
|
||||
|
||||
nearest = self.protocol.router.findNeighbors(node)
|
||||
if len(nearest) == 0:
|
||||
log.warning("There are no known neighbors to set key %s",
|
||||
dkey.hex())
|
||||
return False
|
||||
|
||||
spider = NodeSpiderCrawl(self.protocol, node, nearest,
|
||||
self.ksize, self.alpha)
|
||||
nodes = await spider.find()
|
||||
log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes)))
|
||||
|
||||
# if this node is close too, then store here as well
|
||||
biggest = max([n.distanceTo(node) for n in nodes])
|
||||
if self.node.distanceTo(node) < biggest:
|
||||
self.storage[dkey] = value
|
||||
ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
|
||||
# return true only if at least one store call succeeded
|
||||
return any(await asyncio.gather(*ds))
|
||||
|
||||
def saveState(self, fname):
|
||||
"""
|
||||
Save the state of this node (the alpha/ksize/id/immediate neighbors)
|
||||
to a cache file with the given fname.
|
||||
"""
|
||||
log.info("Saving state to %s", fname)
|
||||
data = {
|
||||
'ksize': self.ksize,
|
||||
'alpha': self.alpha,
|
||||
'id': self.node.id,
|
||||
'neighbors': self.bootstrappableNeighbors()
|
||||
}
|
||||
if len(data['neighbors']) == 0:
|
||||
log.warning("No known neighbors, so not writing to cache.")
|
||||
return
|
||||
with open(fname, 'wb') as f:
|
||||
pickle.dump(data, f)
|
||||
|
||||
@classmethod
|
||||
def loadState(self, fname):
|
||||
"""
|
||||
Load the state of this node (the alpha/ksize/id/immediate neighbors)
|
||||
from a cache file with the given fname.
|
||||
"""
|
||||
log.info("Loading state from %s", fname)
|
||||
with open(fname, 'rb') as f:
|
||||
data = pickle.load(f)
|
||||
s = Server(data['ksize'], data['alpha'], data['id'])
|
||||
if len(data['neighbors']) > 0:
|
||||
s.bootstrap(data['neighbors'])
|
||||
return s
|
||||
|
||||
def saveStateRegularly(self, fname, frequency=600):
|
||||
"""
|
||||
Save the state of node with a given regularity to the given
|
||||
filename.
|
||||
|
||||
Args:
|
||||
fname: File name to save retularly to
|
||||
frequency: Frequency in seconds that the state should be saved.
|
||||
By default, 10 minutes.
|
||||
"""
|
||||
self.saveState(fname)
|
||||
loop = asyncio.get_event_loop()
|
||||
self.save_state_loop = loop.call_later(frequency,
|
||||
self.saveStateRegularly,
|
||||
fname,
|
||||
frequency)
|
||||
|
||||
|
||||
def check_dht_value_type(value):
|
||||
"""
|
||||
Checks to see if the type of the value is a valid type for
|
||||
placing in the dht.
|
||||
"""
|
||||
typeset = set(
|
||||
[
|
||||
int,
|
||||
float,
|
||||
bool,
|
||||
str,
|
||||
bytes,
|
||||
]
|
||||
)
|
||||
return type(value) in typeset
|
115
libp2p/kademlia/node.py
Normal file
115
libp2p/kademlia/node.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
from operator import itemgetter
|
||||
import heapq
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, node_id, ip=None, port=None):
|
||||
self.id = node_id
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.long_id = int(node_id.hex(), 16)
|
||||
|
||||
def sameHomeAs(self, node):
|
||||
return self.ip == node.ip and self.port == node.port
|
||||
|
||||
def distanceTo(self, node):
|
||||
"""
|
||||
Get the distance between this node and another.
|
||||
"""
|
||||
return self.long_id ^ node.long_id
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Enables use of Node as a tuple - i.e., tuple(node) works.
|
||||
"""
|
||||
return iter([self.id, self.ip, self.port])
|
||||
|
||||
def __repr__(self):
|
||||
return repr([self.long_id, self.ip, self.port])
|
||||
|
||||
def __str__(self):
|
||||
return "%s:%s" % (self.ip, str(self.port))
|
||||
|
||||
|
||||
class NodeHeap:
|
||||
"""
|
||||
A heap of nodes ordered by distance to a given node.
|
||||
"""
|
||||
def __init__(self, node, maxsize):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
@param node: The node to measure all distnaces from.
|
||||
@param maxsize: The maximum size that this heap can grow to.
|
||||
"""
|
||||
self.node = node
|
||||
self.heap = []
|
||||
self.contacted = set()
|
||||
self.maxsize = maxsize
|
||||
|
||||
def remove(self, peerIDs):
|
||||
"""
|
||||
Remove a list of peer ids from this heap. Note that while this
|
||||
heap retains a constant visible size (based on the iterator), it's
|
||||
actual size may be quite a bit larger than what's exposed. Therefore,
|
||||
removal of nodes may not change the visible size as previously added
|
||||
nodes suddenly become visible.
|
||||
"""
|
||||
peerIDs = set(peerIDs)
|
||||
if len(peerIDs) == 0:
|
||||
return
|
||||
nheap = []
|
||||
for distance, node in self.heap:
|
||||
if node.id not in peerIDs:
|
||||
heapq.heappush(nheap, (distance, node))
|
||||
self.heap = nheap
|
||||
|
||||
def getNodeById(self, node_id):
|
||||
for _, node in self.heap:
|
||||
if node.id == node_id:
|
||||
return node
|
||||
return None
|
||||
|
||||
def allBeenContacted(self):
|
||||
return len(self.getUncontacted()) == 0
|
||||
|
||||
def getIDs(self):
|
||||
return [n.id for n in self]
|
||||
|
||||
def markContacted(self, node):
|
||||
self.contacted.add(node.id)
|
||||
|
||||
def popleft(self):
|
||||
if len(self) > 0:
|
||||
return heapq.heappop(self.heap)[1]
|
||||
return None
|
||||
|
||||
def push(self, nodes):
|
||||
"""
|
||||
Push nodes onto heap.
|
||||
|
||||
@param nodes: This can be a single item or a C{list}.
|
||||
"""
|
||||
if not isinstance(nodes, list):
|
||||
nodes = [nodes]
|
||||
|
||||
for node in nodes:
|
||||
if node not in self:
|
||||
distance = self.node.distanceTo(node)
|
||||
heapq.heappush(self.heap, (distance, node))
|
||||
|
||||
def __len__(self):
|
||||
return min(len(self.heap), self.maxsize)
|
||||
|
||||
def __iter__(self):
|
||||
nodes = heapq.nsmallest(self.maxsize, self.heap)
|
||||
return iter(map(itemgetter(1), nodes))
|
||||
|
||||
def __contains__(self, node):
|
||||
for _, n in self.heap:
|
||||
if node.id == n.id:
|
||||
return True
|
||||
return False
|
||||
|
||||
def getUncontacted(self):
|
||||
return [n for n in self if n.id not in self.contacted]
|
128
libp2p/kademlia/protocol.py
Normal file
128
libp2p/kademlia/protocol.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
import random
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from rpcudp.protocol import RPCProtocol
|
||||
|
||||
from .kademlia.node import Node
|
||||
from .kademlia.routing import RoutingTable
|
||||
from .kademlia.utils import digest
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KademliaProtocol(RPCProtocol):
|
||||
def __init__(self, sourceNode, storage, ksize):
|
||||
RPCProtocol.__init__(self)
|
||||
self.router = RoutingTable(self, ksize, sourceNode)
|
||||
self.storage = storage
|
||||
self.sourceNode = sourceNode
|
||||
|
||||
def getRefreshIDs(self):
|
||||
"""
|
||||
Get ids to search for to keep old buckets up to date.
|
||||
"""
|
||||
ids = []
|
||||
for bucket in self.router.getLonelyBuckets():
|
||||
rid = random.randint(*bucket.range).to_bytes(20, byteorder='big')
|
||||
ids.append(rid)
|
||||
return ids
|
||||
|
||||
def rpc_stun(self, sender):
|
||||
return sender
|
||||
|
||||
def rpc_ping(self, sender, nodeid):
|
||||
source = Node(nodeid, sender[0], sender[1])
|
||||
self.welcomeIfNewNode(source)
|
||||
return self.sourceNode.id
|
||||
|
||||
def rpc_store(self, sender, nodeid, key, value):
|
||||
source = Node(nodeid, sender[0], sender[1])
|
||||
self.welcomeIfNewNode(source)
|
||||
log.debug("got a store request from %s, storing '%s'='%s'",
|
||||
sender, key.hex(), value)
|
||||
self.storage[key] = value
|
||||
return True
|
||||
|
||||
def rpc_find_node(self, sender, nodeid, key):
|
||||
log.info("finding neighbors of %i in local table",
|
||||
int(nodeid.hex(), 16))
|
||||
source = Node(nodeid, sender[0], sender[1])
|
||||
self.welcomeIfNewNode(source)
|
||||
node = Node(key)
|
||||
neighbors = self.router.findNeighbors(node, exclude=source)
|
||||
return list(map(tuple, neighbors))
|
||||
|
||||
def rpc_find_value(self, sender, nodeid, key):
|
||||
source = Node(nodeid, sender[0], sender[1])
|
||||
self.welcomeIfNewNode(source)
|
||||
value = self.storage.get(key, None)
|
||||
if value is None:
|
||||
return self.rpc_find_node(sender, nodeid, key)
|
||||
return {'value': value}
|
||||
|
||||
async def callFindNode(self, nodeToAsk, nodeToFind):
|
||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||
result = await self.find_node(address, self.sourceNode.id,
|
||||
nodeToFind.id)
|
||||
return self.handleCallResponse(result, nodeToAsk)
|
||||
|
||||
async def callFindValue(self, nodeToAsk, nodeToFind):
|
||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||
result = await self.find_value(address, self.sourceNode.id,
|
||||
nodeToFind.id)
|
||||
return self.handleCallResponse(result, nodeToAsk)
|
||||
|
||||
async def callPing(self, nodeToAsk):
|
||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||
result = await self.ping(address, self.sourceNode.id)
|
||||
return self.handleCallResponse(result, nodeToAsk)
|
||||
|
||||
async def callStore(self, nodeToAsk, key, value):
|
||||
address = (nodeToAsk.ip, nodeToAsk.port)
|
||||
result = await self.store(address, self.sourceNode.id, key, value)
|
||||
return self.handleCallResponse(result, nodeToAsk)
|
||||
|
||||
def welcomeIfNewNode(self, node):
|
||||
"""
|
||||
Given a new node, send it all the keys/values it should be storing,
|
||||
then add it to the routing table.
|
||||
|
||||
@param node: A new node that just joined (or that we just found out
|
||||
about).
|
||||
|
||||
Process:
|
||||
For each key in storage, get k closest nodes. If newnode is closer
|
||||
than the furtherst in that list, and the node for this server
|
||||
is closer than the closest in that list, then store the key/value
|
||||
on the new node (per section 2.5 of the paper)
|
||||
"""
|
||||
if not self.router.isNewNode(node):
|
||||
return
|
||||
|
||||
log.info("never seen %s before, adding to router", node)
|
||||
for key, value in self.storage.items():
|
||||
keynode = Node(digest(key))
|
||||
neighbors = self.router.findNeighbors(keynode)
|
||||
if len(neighbors) > 0:
|
||||
last = neighbors[-1].distanceTo(keynode)
|
||||
newNodeClose = node.distanceTo(keynode) < last
|
||||
first = neighbors[0].distanceTo(keynode)
|
||||
thisNodeClosest = self.sourceNode.distanceTo(keynode) < first
|
||||
if len(neighbors) == 0 or (newNodeClose and thisNodeClosest):
|
||||
asyncio.ensure_future(self.callStore(node, key, value))
|
||||
self.router.addContact(node)
|
||||
|
||||
def handleCallResponse(self, result, node):
|
||||
"""
|
||||
If we get a response, add the node to the routing table. If
|
||||
we get no response, make sure it's removed from the routing table.
|
||||
"""
|
||||
if not result[0]:
|
||||
log.warning("no response from %s, removing from router", node)
|
||||
self.router.removeContact(node)
|
||||
return result
|
||||
|
||||
log.info("got successful response from %s", node)
|
||||
self.welcomeIfNewNode(node)
|
||||
return result
|
185
libp2p/kademlia/routing.py
Normal file
185
libp2p/kademlia/routing.py
Normal file
|
@ -0,0 +1,185 @@
|
|||
import heapq
|
||||
import time
|
||||
import operator
|
||||
import asyncio
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from .kademlia.utils import OrderedSet, sharedPrefix, bytesToBitString
|
||||
|
||||
|
||||
class KBucket:
|
||||
def __init__(self, rangeLower, rangeUpper, ksize):
|
||||
self.range = (rangeLower, rangeUpper)
|
||||
self.nodes = OrderedDict()
|
||||
self.replacementNodes = OrderedSet()
|
||||
self.touchLastUpdated()
|
||||
self.ksize = ksize
|
||||
|
||||
def touchLastUpdated(self):
|
||||
self.lastUpdated = time.monotonic()
|
||||
|
||||
def getNodes(self):
|
||||
return list(self.nodes.values())
|
||||
|
||||
def split(self):
|
||||
midpoint = (self.range[0] + self.range[1]) / 2
|
||||
one = KBucket(self.range[0], midpoint, self.ksize)
|
||||
two = KBucket(midpoint + 1, self.range[1], self.ksize)
|
||||
for node in self.nodes.values():
|
||||
bucket = one if node.long_id <= midpoint else two
|
||||
bucket.nodes[node.id] = node
|
||||
return (one, two)
|
||||
|
||||
def removeNode(self, node):
|
||||
if node.id not in self.nodes:
|
||||
return
|
||||
|
||||
# delete node, and see if we can add a replacement
|
||||
del self.nodes[node.id]
|
||||
if len(self.replacementNodes) > 0:
|
||||
newnode = self.replacementNodes.pop()
|
||||
self.nodes[newnode.id] = newnode
|
||||
|
||||
def hasInRange(self, node):
|
||||
return self.range[0] <= node.long_id <= self.range[1]
|
||||
|
||||
def isNewNode(self, node):
|
||||
return node.id not in self.nodes
|
||||
|
||||
def addNode(self, node):
|
||||
"""
|
||||
Add a C{Node} to the C{KBucket}. Return True if successful,
|
||||
False if the bucket is full.
|
||||
|
||||
If the bucket is full, keep track of node in a replacement list,
|
||||
per section 4.1 of the paper.
|
||||
"""
|
||||
if node.id in self.nodes:
|
||||
del self.nodes[node.id]
|
||||
self.nodes[node.id] = node
|
||||
elif len(self) < self.ksize:
|
||||
self.nodes[node.id] = node
|
||||
else:
|
||||
self.replacementNodes.push(node)
|
||||
return False
|
||||
return True
|
||||
|
||||
def depth(self):
|
||||
vals = self.nodes.values()
|
||||
sp = sharedPrefix([bytesToBitString(n.id) for n in vals])
|
||||
return len(sp)
|
||||
|
||||
def head(self):
|
||||
return list(self.nodes.values())[0]
|
||||
|
||||
def __getitem__(self, node_id):
|
||||
return self.nodes.get(node_id, None)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.nodes)
|
||||
|
||||
|
||||
class TableTraverser:
|
||||
def __init__(self, table, startNode):
|
||||
index = table.getBucketFor(startNode)
|
||||
table.buckets[index].touchLastUpdated()
|
||||
self.currentNodes = table.buckets[index].getNodes()
|
||||
self.leftBuckets = table.buckets[:index]
|
||||
self.rightBuckets = table.buckets[(index + 1):]
|
||||
self.left = True
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
"""
|
||||
Pop an item from the left subtree, then right, then left, etc.
|
||||
"""
|
||||
if len(self.currentNodes) > 0:
|
||||
return self.currentNodes.pop()
|
||||
|
||||
if self.left and len(self.leftBuckets) > 0:
|
||||
self.currentNodes = self.leftBuckets.pop().getNodes()
|
||||
self.left = False
|
||||
return next(self)
|
||||
|
||||
if len(self.rightBuckets) > 0:
|
||||
self.currentNodes = self.rightBuckets.pop(0).getNodes()
|
||||
self.left = True
|
||||
return next(self)
|
||||
|
||||
raise StopIteration
|
||||
|
||||
|
||||
class RoutingTable:
|
||||
def __init__(self, protocol, ksize, node):
|
||||
"""
|
||||
@param node: The node that represents this server. It won't
|
||||
be added to the routing table, but will be needed later to
|
||||
determine which buckets to split or not.
|
||||
"""
|
||||
self.node = node
|
||||
self.protocol = protocol
|
||||
self.ksize = ksize
|
||||
self.flush()
|
||||
|
||||
def flush(self):
|
||||
self.buckets = [KBucket(0, 2 ** 160, self.ksize)]
|
||||
|
||||
def splitBucket(self, index):
|
||||
one, two = self.buckets[index].split()
|
||||
self.buckets[index] = one
|
||||
self.buckets.insert(index + 1, two)
|
||||
|
||||
def getLonelyBuckets(self):
|
||||
"""
|
||||
Get all of the buckets that haven't been updated in over
|
||||
an hour.
|
||||
"""
|
||||
hrago = time.monotonic() - 3600
|
||||
return [b for b in self.buckets if b.lastUpdated < hrago]
|
||||
|
||||
def removeContact(self, node):
|
||||
index = self.getBucketFor(node)
|
||||
self.buckets[index].removeNode(node)
|
||||
|
||||
def isNewNode(self, node):
|
||||
index = self.getBucketFor(node)
|
||||
return self.buckets[index].isNewNode(node)
|
||||
|
||||
def addContact(self, node):
|
||||
index = self.getBucketFor(node)
|
||||
bucket = self.buckets[index]
|
||||
|
||||
# this will succeed unless the bucket is full
|
||||
if bucket.addNode(node):
|
||||
return
|
||||
|
||||
# Per section 4.2 of paper, split if the bucket has the node
|
||||
# in its range or if the depth is not congruent to 0 mod 5
|
||||
if bucket.hasInRange(self.node) or bucket.depth() % 5 != 0:
|
||||
self.splitBucket(index)
|
||||
self.addContact(node)
|
||||
else:
|
||||
asyncio.ensure_future(self.protocol.callPing(bucket.head()))
|
||||
|
||||
def getBucketFor(self, node):
|
||||
"""
|
||||
Get the index of the bucket that the given node would fall into.
|
||||
"""
|
||||
for index, bucket in enumerate(self.buckets):
|
||||
if node.long_id < bucket.range[1]:
|
||||
return index
|
||||
|
||||
def findNeighbors(self, node, k=None, exclude=None):
|
||||
k = k or self.ksize
|
||||
nodes = []
|
||||
for neighbor in TableTraverser(self, node):
|
||||
notexcluded = exclude is None or not neighbor.sameHomeAs(exclude)
|
||||
if neighbor.id != node.id and notexcluded:
|
||||
heapq.heappush(nodes, (node.distanceTo(neighbor), neighbor))
|
||||
if len(nodes) == k:
|
||||
break
|
||||
|
||||
return list(map(operator.itemgetter(1), heapq.nsmallest(k, nodes)))
|
97
libp2p/kademlia/storage.py
Normal file
97
libp2p/kademlia/storage.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
import time
|
||||
from itertools import takewhile
|
||||
import operator
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class IStorage:
|
||||
"""
|
||||
Local storage for this node.
|
||||
IStorage implementations of get must return the same type as put in by set
|
||||
"""
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
Set a key to the given value.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Get the given key. If item doesn't exist, raises C{KeyError}
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""
|
||||
Get given key. If not found, return default.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def iteritemsOlderThan(self, secondsOld):
|
||||
"""
|
||||
Return the an iterator over (key, value) tuples for items older
|
||||
than the given secondsOld.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Get the iterator for this storage, should yield tuple of (key, value)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class ForgetfulStorage(IStorage):
|
||||
def __init__(self, ttl=604800):
|
||||
"""
|
||||
By default, max age is a week.
|
||||
"""
|
||||
self.data = OrderedDict()
|
||||
self.ttl = ttl
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key in self.data:
|
||||
del self.data[key]
|
||||
self.data[key] = (time.monotonic(), value)
|
||||
self.cull()
|
||||
|
||||
def cull(self):
|
||||
for _, _ in self.iteritemsOlderThan(self.ttl):
|
||||
self.data.popitem(last=False)
|
||||
|
||||
def get(self, key, default=None):
|
||||
self.cull()
|
||||
if key in self.data:
|
||||
return self[key]
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.cull()
|
||||
return self.data[key][1]
|
||||
|
||||
def __iter__(self):
|
||||
self.cull()
|
||||
return iter(self.data)
|
||||
|
||||
def __repr__(self):
|
||||
self.cull()
|
||||
return repr(self.data)
|
||||
|
||||
def iteritemsOlderThan(self, secondsOld):
|
||||
minBirthday = time.monotonic() - secondsOld
|
||||
zipped = self._tripleIterable()
|
||||
matches = takewhile(lambda r: minBirthday >= r[1], zipped)
|
||||
return list(map(operator.itemgetter(0, 2), matches))
|
||||
|
||||
def _tripleIterable(self):
|
||||
ikeys = self.data.keys()
|
||||
ibirthday = map(operator.itemgetter(0), self.data.values())
|
||||
ivalues = map(operator.itemgetter(1), self.data.values())
|
||||
return zip(ikeys, ibirthday, ivalues)
|
||||
|
||||
def items(self):
|
||||
self.cull()
|
||||
ikeys = self.data.keys()
|
||||
ivalues = map(operator.itemgetter(1), self.data.values())
|
||||
return zip(ikeys, ivalues)
|
57
libp2p/kademlia/utils.py
Normal file
57
libp2p/kademlia/utils.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
"""
|
||||
General catchall for functions that don't make sense as methods.
|
||||
"""
|
||||
import hashlib
|
||||
import operator
|
||||
import asyncio
|
||||
|
||||
|
||||
async def gather_dict(d):
|
||||
cors = list(d.values())
|
||||
results = await asyncio.gather(*cors)
|
||||
return dict(zip(d.keys(), results))
|
||||
|
||||
|
||||
def digest(s):
|
||||
if not isinstance(s, bytes):
|
||||
s = str(s).encode('utf8')
|
||||
return hashlib.sha1(s).digest()
|
||||
|
||||
|
||||
class OrderedSet(list):
|
||||
"""
|
||||
Acts like a list in all ways, except in the behavior of the
|
||||
:meth:`push` method.
|
||||
"""
|
||||
|
||||
def push(self, thing):
|
||||
"""
|
||||
1. If the item exists in the list, it's removed
|
||||
2. The item is pushed to the end of the list
|
||||
"""
|
||||
if thing in self:
|
||||
self.remove(thing)
|
||||
self.append(thing)
|
||||
|
||||
|
||||
def sharedPrefix(args):
|
||||
"""
|
||||
Find the shared prefix between the strings.
|
||||
|
||||
For instance:
|
||||
|
||||
sharedPrefix(['blahblah', 'blahwhat'])
|
||||
|
||||
returns 'blah'.
|
||||
"""
|
||||
i = 0
|
||||
while i < min(map(len, args)):
|
||||
if len(set(map(operator.itemgetter(i), args))) != 1:
|
||||
break
|
||||
i += 1
|
||||
return args[0][:i]
|
||||
|
||||
|
||||
def bytesToBitString(bites):
|
||||
bits = [bin(bite)[2:].rjust(8, '0') for bite in bites]
|
||||
return "".join(bits)
|
|
@ -1,5 +0,0 @@
|
|||
from libp2p.io.exceptions import IOException
|
||||
|
||||
|
||||
class RawConnError(IOException):
|
||||
pass
|
|
@ -1,21 +0,0 @@
|
|||
from abc import abstractmethod
|
||||
from typing import Tuple
|
||||
|
||||
import trio
|
||||
|
||||
from libp2p.io.abc import Closer
|
||||
from libp2p.network.stream.net_stream_interface import INetStream
|
||||
from libp2p.stream_muxer.abc import IMuxedConn
|
||||
|
||||
|
||||
class INetConn(Closer):
|
||||
muxed_conn: IMuxedConn
|
||||
event_started: trio.Event
|
||||
|
||||
@abstractmethod
|
||||
async def new_stream(self) -> INetStream:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def get_streams(self) -> Tuple[INetStream, ...]:
|
||||
...
|
|
@ -1,36 +1,25 @@
|
|||
from libp2p.io.abc import ReadWriteCloser
|
||||
from libp2p.io.exceptions import IOException
|
||||
|
||||
from .exceptions import RawConnError
|
||||
from .raw_connection_interface import IRawConnection
|
||||
|
||||
|
||||
class RawConnection(IRawConnection):
|
||||
stream: ReadWriteCloser
|
||||
is_initiator: bool
|
||||
|
||||
def __init__(self, stream: ReadWriteCloser, initiator: bool) -> None:
|
||||
self.stream = stream
|
||||
self.is_initiator = initiator
|
||||
def __init__(self, ip, port, reader, writer, initiator):
|
||||
# pylint: disable=too-many-arguments
|
||||
self.conn_ip = ip
|
||||
self.conn_port = port
|
||||
self.reader = reader
|
||||
self.writer = writer
|
||||
self._next_id = 0 if initiator else 1
|
||||
self.initiator = initiator
|
||||
|
||||
async def write(self, data: bytes) -> None:
|
||||
"""Raise `RawConnError` if the underlying connection breaks."""
|
||||
try:
|
||||
await self.stream.write(data)
|
||||
except IOException as error:
|
||||
raise RawConnError from error
|
||||
def close(self):
|
||||
self.writer.close()
|
||||
|
||||
async def read(self, n: int = None) -> bytes:
|
||||
def next_stream_id(self):
|
||||
"""
|
||||
Read up to ``n`` bytes from the underlying stream. This call is
|
||||
delegated directly to the underlying ``self.reader``.
|
||||
|
||||
Raise `RawConnError` if the underlying connection breaks
|
||||
Get next available stream id
|
||||
:return: next available stream id for the connection
|
||||
"""
|
||||
try:
|
||||
return await self.stream.read(n)
|
||||
except IOException as error:
|
||||
raise RawConnError from error
|
||||
|
||||
async def close(self) -> None:
|
||||
await self.stream.close()
|
||||
next_id = self._next_id
|
||||
self._next_id += 2
|
||||
return next_id
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user