mirror of
https://github.com/fluencelabs/js-libp2p
synced 2025-07-08 13:21:34 +00:00
Compare commits
89 Commits
chore/use-
...
v0.36.2
Author | SHA1 | Date | |
---|---|---|---|
bad9e8c0ff | |||
902f10d58d | |||
fc12973344 | |||
d44bd9094f | |||
831ed39701 | |||
ff32eba6a0 | |||
9b22c6e2f9 | |||
d8ceb0bc66 | |||
00e49592a3 | |||
63aa480800 | |||
b7e87066a6 | |||
4c3bf01f35 | |||
12f1bb0aee | |||
a4bba35948 | |||
fc43db750d | |||
bf1fc325b6 | |||
79b356a313 | |||
54e77221eb | |||
85e23eb1cb | |||
cbd9bad86d | |||
75b922dc21 | |||
280bb1b1f6 | |||
e0354b4c6b | |||
0264eb62ee | |||
a0bfe8b534 | |||
2963b852db | |||
13d45de376 | |||
978eb3676f | |||
0a4dc54d08 | |||
c3700f55d5 | |||
96d3461393 | |||
5e5d11ec19 | |||
4cadbad102 | |||
5043cd5643 | |||
ef54e0a10e | |||
61bf546c46 | |||
d2b7ec0f6b | |||
79b3cfc6ad | |||
f18fc80b70 | |||
b4b432406e | |||
bbdd559a02 | |||
4070dcdf55 | |||
cb0d7d6c99 | |||
d1c48dcbed | |||
c4a442788b | |||
70a4bb9451 | |||
a0516ebc85 | |||
b425fa1230 | |||
0a485d07b3 | |||
0c3ed0a4ac | |||
09a0f940df | |||
a642ad2a03 | |||
8ce2f08589 | |||
fe0d9828bb | |||
c8e1b08c19 | |||
faf1f89d9e | |||
76f4ea5e8a | |||
2f0b311df7 | |||
d172d0d952 | |||
f8e8023aed | |||
bdc9f16d0c | |||
1b46f47fdb | |||
b539f9b655 | |||
103818733e | |||
1f1bbc0ee6 | |||
3b683e7156 | |||
b25e0fe531 | |||
cbaa5a2ef3 | |||
51dabb1724 | |||
b9339bccaa | |||
9b21893b64 | |||
b70fb43427 | |||
ae21299ade | |||
149120bebc | |||
91c2ec9856 | |||
6d0ac819f1 | |||
15a0b1dbf2 | |||
9cbf36fcb5 | |||
3a9d5f64d9 | |||
eacd7e8f76 | |||
7f2cc4dc44 | |||
5cc5a8749a | |||
ee60e18213 | |||
a4a2fac41e | |||
2f598eba09 | |||
443a102528 | |||
3bed7b4cb2 | |||
3fb424914f | |||
bb0ca28195 |
136
.github/workflows/examples.yml
vendored
136
.github/workflows/examples.yml
vendored
@ -8,95 +8,59 @@ on:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: npx aegir lint
|
||||
- run: npx aegir ts -p check
|
||||
- run: npx aegir build
|
||||
test-auto-relay-example:
|
||||
needs: check
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
with:
|
||||
directories: |
|
||||
./examples/node_modules
|
||||
~/.cache
|
||||
build: |
|
||||
cd examples
|
||||
npm i
|
||||
npx playwright install
|
||||
cache_name: cache-examples
|
||||
|
||||
test-example:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
example: [
|
||||
chat,
|
||||
connection-encryption,
|
||||
discovery-mechanisms,
|
||||
echo,
|
||||
libp2p-in-the-browser,
|
||||
peer-and-content-routing,
|
||||
pnet,
|
||||
protocol-and-stream-muxing,
|
||||
pubsub,
|
||||
transports,
|
||||
webrtc-direct
|
||||
]
|
||||
fail-fast: true
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- auto-relay
|
||||
test-chat-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- chat
|
||||
test-connection-encryption-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- connection-encryption
|
||||
test-discovery-mechanisms-example:
|
||||
needs: check
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- discovery-mechanisms
|
||||
test-echo-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- echo
|
||||
test-libp2p-in-the-browser-example:
|
||||
needs: check
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- libp2p-in-the-browser
|
||||
test-peer-and-content-routing-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- peer-and-content-routing
|
||||
test-pnet-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- pnet
|
||||
test-protocol-and-stream-muxing-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- protocol-and-stream-muxing
|
||||
test-pubsub-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- pubsub
|
||||
test-transports-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- transports
|
||||
test-webrtc-direct-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd examples && npm install && npm run test -- webrtc-direct
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
with:
|
||||
directories: |
|
||||
./examples/node_modules
|
||||
~/.cache
|
||||
build: |
|
||||
cd examples
|
||||
npm i
|
||||
npx playwright install
|
||||
cache_name: cache-examples
|
||||
- run: |
|
||||
cd examples
|
||||
npm run test -- ${{ matrix.example }}
|
||||
|
142
.github/workflows/main.yml
vendored
142
.github/workflows/main.yml
vendored
@ -8,63 +8,163 @@ on:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node: [16]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
|
||||
check:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 14
|
||||
- run: npm install
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npx aegir lint
|
||||
- uses: gozala/typescript-error-reporter-action@v1.0.8
|
||||
- run: npx aegir build
|
||||
- run: npx aegir dep-check
|
||||
- uses: ipfs/aegir/actions/bundle-size@v32.1.0
|
||||
- uses: ipfs/aegir/actions/bundle-size@master
|
||||
name: size
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
test-node:
|
||||
needs: check
|
||||
needs: build
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest, ubuntu-latest, macos-latest]
|
||||
node: [14, 16]
|
||||
node: [16]
|
||||
fail-fast: true
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- run: npm install
|
||||
- run: npx aegir test -t node --cov --bail
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:node -- --cov --bail
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # v2.1.0
|
||||
with:
|
||||
directory: ./.nyc_output
|
||||
flags: node
|
||||
|
||||
test-chrome:
|
||||
needs: check
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: npx aegir test -t browser -t webworker --bail
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:browser -- -t browser --cov --bail
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # v2.1.0
|
||||
with:
|
||||
directory: ./.nyc_output
|
||||
flags: chrome
|
||||
|
||||
test-chrome-webworker:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:browser -- -t webworker --bail
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # v2.1.0
|
||||
with:
|
||||
directory: ./.nyc_output
|
||||
flags: chrome-webworker
|
||||
|
||||
test-firefox:
|
||||
needs: check
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: npx aegir test -t browser -t webworker --bail -- --browser firefox
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:browser -- -t browser --bail -- --browser firefox
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # v2.1.0
|
||||
with:
|
||||
directory: ./.nyc_output
|
||||
flags: firefox
|
||||
|
||||
test-firefox-webworker:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:browser -- -t webworker --bail -- --browser firefox
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # v2.1.0
|
||||
with:
|
||||
directory: ./.nyc_output
|
||||
flags: firefox-webworker
|
||||
|
||||
test-ts:
|
||||
needs: check
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:ts
|
||||
|
||||
test-interop:
|
||||
needs: check
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm install
|
||||
- run: cd node_modules/interop-libp2p && yarn && LIBP2P_JS=${GITHUB_WORKSPACE}/src/index.js npx aegir test -t node --bail -- --exit
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- run: npm run test:interop -- --bail -- --exit
|
||||
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-node, test-chrome, test-chrome-webworker, test-firefox, test-firefox-webworker, test-ts, test-interop]
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
steps:
|
||||
- uses: GoogleCloudPlatform/release-please-action@v2
|
||||
id: release
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
release-type: node
|
||||
bump-minor-pre-major: true
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: lts/*
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- uses: ipfs/aegir/actions/cache-node-modules@master
|
||||
- if: ${{ steps.release.outputs.release_created }}
|
||||
name: Run release version
|
||||
run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- if: ${{ !steps.release.outputs.release_created }}
|
||||
name: Run release rc
|
||||
run: |
|
||||
npm version `node -p -e "require('./package.json').version"`-`git rev-parse --short HEAD` --no-git-tag-version
|
||||
npm publish --tag next
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
185
CHANGELOG.md
185
CHANGELOG.md
@ -1,3 +1,185 @@
|
||||
## [0.35.8](https://github.com/libp2p/js-libp2p/compare/v0.35.7...v0.35.8) (2021-12-29)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not wait for autodial start ([#1089](https://github.com/libp2p/js-libp2p/issues/1089)) ([79b3cfc](https://github.com/libp2p/js-libp2p/commit/79b3cfc6ad02ecc76fe23a3c3ff2d0b32a0ae4a8))
|
||||
* increase listeners on any-signal ([#1084](https://github.com/libp2p/js-libp2p/issues/1084)) ([f18fc80](https://github.com/libp2p/js-libp2p/commit/f18fc80b70bf7b6b26fffa70b0a8d0502a6c4801))
|
||||
* look for final peer event instead of peer response ([#1092](https://github.com/libp2p/js-libp2p/issues/1092)) ([d2b7ec0](https://github.com/libp2p/js-libp2p/commit/d2b7ec0f6be0ee80f2c963279a8ec2385059a889))
|
||||
* record tracked map clears ([#1085](https://github.com/libp2p/js-libp2p/issues/1085)) ([b4b4324](https://github.com/libp2p/js-libp2p/commit/b4b432406ebc08ef2fc3a1922c64cde7c9060cae))
|
||||
|
||||
|
||||
|
||||
### [0.36.2](https://www.github.com/libp2p/js-libp2p/compare/v0.36.1...v0.36.2) (2022-01-26)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* reject connections when not running ([#1146](https://www.github.com/libp2p/js-libp2p/issues/1146)) ([902f10d](https://www.github.com/libp2p/js-libp2p/commit/902f10d58d1062e812eb27aa0e2256e3fde5d3f6))
|
||||
|
||||
### [0.36.1](https://www.github.com/libp2p/js-libp2p/compare/v0.36.0...v0.36.1) (2022-01-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* await unhandle of protocols ([#1144](https://www.github.com/libp2p/js-libp2p/issues/1144)) ([d44bd90](https://www.github.com/libp2p/js-libp2p/commit/d44bd9094fe9545054eb8eff68f81bc52ece03e7))
|
||||
|
||||
## [0.36.0](https://www.github.com/libp2p/js-libp2p/compare/v0.35.8...v0.36.0) (2022-01-25)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* abort-controller dep is gone from dependency tree
|
||||
* `libp2p.handle`, `libp2p.registrar.register` and the peerstore methods have become async
|
||||
|
||||
### Features
|
||||
|
||||
* add fetch protocol ([#1036](https://www.github.com/libp2p/js-libp2p/issues/1036)) ([d8ceb0b](https://www.github.com/libp2p/js-libp2p/commit/d8ceb0bc66fe225d1335d3f05b9a3a30983c2a57))
|
||||
* async peerstore backed by datastores ([#1058](https://www.github.com/libp2p/js-libp2p/issues/1058)) ([978eb36](https://www.github.com/libp2p/js-libp2p/commit/978eb3676fad5d5d50ddb28d1a7868f448cbb20b))
|
||||
* connection gater ([#1142](https://www.github.com/libp2p/js-libp2p/issues/1142)) ([ff32eba](https://www.github.com/libp2p/js-libp2p/commit/ff32eba6a0fa222af1a7a46775d5e0346ad6ebdf))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* cache build artefacts ([#1091](https://www.github.com/libp2p/js-libp2p/issues/1091)) ([5043cd5](https://www.github.com/libp2p/js-libp2p/commit/5043cd56435a264e83db4fb8388d33e9a0442fff))
|
||||
* catch errors during identify ([#1138](https://www.github.com/libp2p/js-libp2p/issues/1138)) ([12f1bb0](https://www.github.com/libp2p/js-libp2p/commit/12f1bb0aeec4b639bd2af05807215f3b4284e379))
|
||||
* import uint8arrays package in example ([#1083](https://www.github.com/libp2p/js-libp2p/issues/1083)) ([c3700f5](https://www.github.com/libp2p/js-libp2p/commit/c3700f55d5a0b62182d683ca37258887b24065b9))
|
||||
* make tests more reliable ([#1139](https://www.github.com/libp2p/js-libp2p/issues/1139)) ([b7e8706](https://www.github.com/libp2p/js-libp2p/commit/b7e87066a69970f1adca4ba552c7fdf624916a7e))
|
||||
* prevent auto-dialer from dialing self ([#1104](https://www.github.com/libp2p/js-libp2p/issues/1104)) ([9b22c6e](https://www.github.com/libp2p/js-libp2p/commit/9b22c6e2f987a20c6639cd07f31fe9c824e24923))
|
||||
* remove abort-controller dep ([#1095](https://www.github.com/libp2p/js-libp2p/issues/1095)) ([0a4dc54](https://www.github.com/libp2p/js-libp2p/commit/0a4dc54d084c901df47cce1788bd5922090ee037))
|
||||
* try all peer addresses when dialing a relay ([#1140](https://www.github.com/libp2p/js-libp2p/issues/1140)) ([63aa480](https://www.github.com/libp2p/js-libp2p/commit/63aa480800974515f44d3b7e013da9c8ccaae8ad))
|
||||
* update any-signal and timeout-abort-controller ([#1128](https://www.github.com/libp2p/js-libp2p/issues/1128)) ([e0354b4](https://www.github.com/libp2p/js-libp2p/commit/e0354b4c6b95bb90656b868849182eb3efddf096))
|
||||
* update multistream select ([#1136](https://www.github.com/libp2p/js-libp2p/issues/1136)) ([00e4959](https://www.github.com/libp2p/js-libp2p/commit/00e49592a356e39b20c889d5f40b9bb37d4bf293))
|
||||
* update node-forge ([#1133](https://www.github.com/libp2p/js-libp2p/issues/1133)) ([a4bba35](https://www.github.com/libp2p/js-libp2p/commit/a4bba35948e1cd8dbe5147f2c8d6385b1fbb6fae))
|
||||
|
||||
## [0.35.7](https://github.com/libp2p/js-libp2p/compare/v0.35.2...v0.35.7) (2021-12-24)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add tracked map ([#1069](https://github.com/libp2p/js-libp2p/issues/1069)) ([b425fa1](https://github.com/libp2p/js-libp2p/commit/b425fa12304def2a007d43a0aa445c28b766ed02))
|
||||
* clean up pending dial targets ([#1059](https://github.com/libp2p/js-libp2p/issues/1059)) ([bdc9f16](https://github.com/libp2p/js-libp2p/commit/bdc9f16d0cbe56ccf26822f11068e7795bcef046))
|
||||
* fix uncaught promise rejection when finding peers ([#1044](https://github.com/libp2p/js-libp2p/issues/1044)) ([3b683e7](https://github.com/libp2p/js-libp2p/commit/3b683e715686163e229b7b5c3a892327dfd4fc63))
|
||||
* increase the maxlisteners for timeout controllers ([#1065](https://github.com/libp2p/js-libp2p/issues/1065)) ([09a0f94](https://github.com/libp2p/js-libp2p/commit/09a0f940df7fdb4ece34604e85693709df5c213e))
|
||||
* main ci ([#1079](https://github.com/libp2p/js-libp2p/issues/1079)) ([d1c48dc](https://github.com/libp2p/js-libp2p/commit/d1c48dcbeded828f2dd3044cc9aed3f17f02846d))
|
||||
* make error codes consistent ([#1054](https://github.com/libp2p/js-libp2p/issues/1054)) ([b25e0fe](https://github.com/libp2p/js-libp2p/commit/b25e0fe5312db58a06c39500ae84c50fed3a93bd))
|
||||
* type definitions for big dialrequest and persistent peerstore ([#1078](https://github.com/libp2p/js-libp2p/issues/1078)) ([cb0d7d6](https://github.com/libp2p/js-libp2p/commit/cb0d7d6c99d179498f04e76df76e70e4f7d41c4c))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow per-component metrics to be collected ([#1061](https://github.com/libp2p/js-libp2p/issues/1061)) ([2f0b311](https://github.com/libp2p/js-libp2p/commit/2f0b311df7127aa44512c2008142d4ca30268986)), closes [#1060](https://github.com/libp2p/js-libp2p/issues/1060)
|
||||
|
||||
|
||||
|
||||
## [0.35.6](https://github.com/libp2p/js-libp2p/compare/v0.35.5...v0.35.6) (2021-12-18)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* increase the maxlisteners for timeout controllers ([#1065](https://github.com/libp2p/js-libp2p/issues/1065)) ([09a0f94](https://github.com/libp2p/js-libp2p/commit/09a0f940df7fdb4ece34604e85693709df5c213e))
|
||||
|
||||
|
||||
|
||||
## [0.35.5](https://github.com/libp2p/js-libp2p/compare/v0.35.4...v0.35.5) (2021-12-15)
|
||||
|
||||
|
||||
|
||||
## [0.35.4](https://github.com/libp2p/js-libp2p/compare/v0.35.3...v0.35.4) (2021-12-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow per-component metrics to be collected ([#1061](https://github.com/libp2p/js-libp2p/issues/1061)) ([2f0b311](https://github.com/libp2p/js-libp2p/commit/2f0b311df7127aa44512c2008142d4ca30268986)), closes [#1060](https://github.com/libp2p/js-libp2p/issues/1060)
|
||||
|
||||
|
||||
|
||||
## [0.35.3](https://github.com/libp2p/js-libp2p/compare/v0.35.2...v0.35.3) (2021-12-13)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* clean up pending dial targets ([#1059](https://github.com/libp2p/js-libp2p/issues/1059)) ([bdc9f16](https://github.com/libp2p/js-libp2p/commit/bdc9f16d0cbe56ccf26822f11068e7795bcef046))
|
||||
* fix uncaught promise rejection when finding peers ([#1044](https://github.com/libp2p/js-libp2p/issues/1044)) ([3b683e7](https://github.com/libp2p/js-libp2p/commit/3b683e715686163e229b7b5c3a892327dfd4fc63))
|
||||
* make error codes consistent ([#1054](https://github.com/libp2p/js-libp2p/issues/1054)) ([b25e0fe](https://github.com/libp2p/js-libp2p/commit/b25e0fe5312db58a06c39500ae84c50fed3a93bd))
|
||||
|
||||
|
||||
|
||||
## [0.35.2](https://github.com/libp2p/js-libp2p/compare/v0.33.0...v0.35.2) (2021-12-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not let closest peers run forever ([#1047](https://github.com/libp2p/js-libp2p/issues/1047)) ([91c2ec9](https://github.com/libp2p/js-libp2p/commit/91c2ec9856a3e972b7b2c9c4d9a4eda1d431c7ef))
|
||||
* increase maxlisteners on event target ([#1050](https://github.com/libp2p/js-libp2p/issues/1050)) ([b70fb43](https://github.com/libp2p/js-libp2p/commit/b70fb43427b47df079b55929ec8956f69cbda966)), closes [#900](https://github.com/libp2p/js-libp2p/issues/900)
|
||||
* private ip ts compile has no call signatures ([#1020](https://github.com/libp2p/js-libp2p/issues/1020)) ([77d7cb8](https://github.com/libp2p/js-libp2p/commit/77d7cb8f0815f2cdd3bfdfa8b641a7a186fe9520))
|
||||
* stop dht before connection manager ([#1041](https://github.com/libp2p/js-libp2p/issues/1041)) ([3a9d5f6](https://github.com/libp2p/js-libp2p/commit/3a9d5f64d96719ebb4d3b083c4f5832db4fa0816)), closes [#1039](https://github.com/libp2p/js-libp2p/issues/1039)
|
||||
|
||||
|
||||
### chore
|
||||
|
||||
* update peer id and libp2p crypto ([#1042](https://github.com/libp2p/js-libp2p/issues/1042)) ([9cbf36f](https://github.com/libp2p/js-libp2p/commit/9cbf36fcb54099e6fed35ceccc4a2376f0926c1f))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* update dht ([#1009](https://github.com/libp2p/js-libp2p/issues/1009)) ([2f598eb](https://github.com/libp2p/js-libp2p/commit/2f598eba09cff4301474af08196158065e3602d8))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* requires node 15+
|
||||
* libp2p-kad-dht has a new event-based API which is exposed as `_dht`
|
||||
|
||||
|
||||
|
||||
## [0.35.1](https://github.com/libp2p/js-libp2p/compare/v0.35.0...v0.35.1) (2021-12-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not let closest peers run forever ([#1047](https://github.com/libp2p/js-libp2p/issues/1047)) ([91c2ec9](https://github.com/libp2p/js-libp2p/commit/91c2ec9856a3e972b7b2c9c4d9a4eda1d431c7ef))
|
||||
|
||||
|
||||
|
||||
# [0.35.0](https://github.com/libp2p/js-libp2p/compare/v0.34.0...v0.35.0) (2021-12-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* stop dht before connection manager ([#1041](https://github.com/libp2p/js-libp2p/issues/1041)) ([3a9d5f6](https://github.com/libp2p/js-libp2p/commit/3a9d5f64d96719ebb4d3b083c4f5832db4fa0816)), closes [#1039](https://github.com/libp2p/js-libp2p/issues/1039)
|
||||
|
||||
|
||||
### chore
|
||||
|
||||
* update peer id and libp2p crypto ([#1042](https://github.com/libp2p/js-libp2p/issues/1042)) ([9cbf36f](https://github.com/libp2p/js-libp2p/commit/9cbf36fcb54099e6fed35ceccc4a2376f0926c1f))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* requires node 15+
|
||||
|
||||
|
||||
|
||||
# [0.34.0](https://github.com/libp2p/js-libp2p/compare/v0.33.0...v0.34.0) (2021-11-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* private ip ts compile has no call signatures ([#1020](https://github.com/libp2p/js-libp2p/issues/1020)) ([77d7cb8](https://github.com/libp2p/js-libp2p/commit/77d7cb8f0815f2cdd3bfdfa8b641a7a186fe9520))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* update dht ([#1009](https://github.com/libp2p/js-libp2p/issues/1009)) ([2f598eb](https://github.com/libp2p/js-libp2p/commit/2f598eba09cff4301474af08196158065e3602d8))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* libp2p-kad-dht has a new event-based API which is exposed as `_dht`
|
||||
|
||||
|
||||
|
||||
# [0.33.0](https://github.com/libp2p/js-libp2p/compare/v0.32.5...v0.33.0) (2021-09-24)
|
||||
|
||||
|
||||
@ -1621,6 +1803,3 @@ for subscribe to see how it should be used.
|
||||
|
||||
<a name="0.5.5"></a>
|
||||
## [0.5.5](https://github.com/libp2p/js-libp2p/compare/v0.5.4...v0.5.5) (2017-03-21)
|
||||
|
||||
|
||||
|
||||
|
@ -23,8 +23,8 @@
|
||||
<a href="https://david-dm.org/libp2p/js-libp2p"><img src="https://david-dm.org/libp2p/js-libp2p.svg?style=flat-square" /></a>
|
||||
<a href="https://github.com/feross/standard"><img src="https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square"></a>
|
||||
<a href="https://github.com/RichardLitt/standard-readme"><img src="https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/npm-%3E%3D6.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/Node.js-%3E%3D12.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/npm-%3E%3D7.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/Node.js-%3E%3D15.0.0-orange.svg?style=flat-square" /></a>
|
||||
<br>
|
||||
</p>
|
||||
|
||||
|
72
doc/API.md
72
doc/API.md
@ -3,6 +3,7 @@
|
||||
* [Static Functions](#static-functions)
|
||||
* [`create`](#create)
|
||||
* [Instance Methods](#libp2p-instance-methods)
|
||||
* [`loadkeychain`](#loadkeychain)
|
||||
* [`start`](#start)
|
||||
* [`stop`](#stop)
|
||||
* [`dial`](#dial)
|
||||
@ -11,6 +12,9 @@
|
||||
* [`handle`](#handle)
|
||||
* [`unhandle`](#unhandle)
|
||||
* [`ping`](#ping)
|
||||
* [`fetch`](#fetch)
|
||||
* [`fetchService.registerLookupFunction`](#fetchserviceregisterlookupfunction)
|
||||
* [`fetchService.unRegisterLookupFunction`](#fetchserviceunregisterlookupfunction)
|
||||
* [`multiaddrs`](#multiaddrs)
|
||||
* [`addressManager.getListenAddrs`](#addressmanagergetlistenaddrs)
|
||||
* [`addressManager.getAnnounceAddrs`](#addressmanagergetannounceaddrs)
|
||||
@ -454,6 +458,72 @@ Pings a given peer and get the operation's latency.
|
||||
const latency = await libp2p.ping(otherPeerId)
|
||||
```
|
||||
|
||||
## fetch
|
||||
|
||||
Fetch a value from a remote node
|
||||
|
||||
`libp2p.fetch(peer, key)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| peer | [`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | peer to ping |
|
||||
| key | `string` | A key that corresponds to a value on the remote node |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<Uint8Array | null>` | The value for the key or null if it cannot be found |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
// ...
|
||||
const value = await libp2p.fetch(otherPeerId, '/some/key')
|
||||
```
|
||||
|
||||
## fetchService.registerLookupFunction
|
||||
|
||||
Register a function to look up values requested by remote nodes
|
||||
|
||||
`libp2p.fetchService.registerLookupFunction(prefix, lookup)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| prefix | `string` | All queries below this prefix will be passed to the lookup function |
|
||||
| lookup | `(key: string) => Promise<Uint8Array | null>` | A function that takes a key and returns a Uint8Array or null |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
// ...
|
||||
const value = await libp2p.fetchService.registerLookupFunction('/prefix', (key) => { ... })
|
||||
```
|
||||
|
||||
## fetchService.unregisterLookupFunction
|
||||
|
||||
Removes the passed lookup function or any function registered for the passed prefix
|
||||
|
||||
`libp2p.fetchService.unregisterLookupFunction(prefix, lookup)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| prefix | `string` | All queries below this prefix will be passed to the lookup function |
|
||||
| lookup | `(key: string) => Promise<Uint8Array | null>` | Optional: A function that takes a key and returns a Uint8Array or null |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
// ...
|
||||
libp2p.fetchService.unregisterLookupFunction('/prefix')
|
||||
```
|
||||
|
||||
## multiaddrs
|
||||
|
||||
Gets the multiaddrs the libp2p node announces to the network. This computes the advertising multiaddrs
|
||||
@ -2086,7 +2156,7 @@ the NatManager performing NAT hole punching.
|
||||
|
||||
[address]: https://github.com/libp2p/js-libp2p/tree/master/src/peer-store/address-book.js
|
||||
[cid]: https://github.com/multiformats/js-cid
|
||||
[connection]: https://github.com/libp2p/js-interfaces/tree/master/src/connection
|
||||
[connection]: https://github.com/libp2p/js-libp2p-interfaces/tree/master/packages/interfaces/src/connection
|
||||
[multiaddr]: https://github.com/multiformats/js-multiaddr
|
||||
[peer-id]: https://github.com/libp2p/js-peer-id
|
||||
[keys]: https://github.com/libp2p/js-libp2p-crypto/tree/master/src/keys
|
||||
|
@ -1,37 +1,40 @@
|
||||
#
|
||||
#
|
||||
|
||||
- [Configuration](#configuration)
|
||||
- [Overview](#overview)
|
||||
- [Modules](#modules)
|
||||
- [Transport](#transport)
|
||||
- [Stream Multiplexing](#stream-multiplexing)
|
||||
- [Connection Encryption](#connection-encryption)
|
||||
- [Peer Discovery](#peer-discovery)
|
||||
- [Content Routing](#content-routing)
|
||||
- [Peer Routing](#peer-routing)
|
||||
- [DHT](#dht)
|
||||
- [Pubsub](#pubsub)
|
||||
- [Customizing libp2p](#customizing-libp2p)
|
||||
- [Examples](#examples)
|
||||
- [Basic setup](#basic-setup)
|
||||
- [Customizing Peer Discovery](#customizing-peer-discovery)
|
||||
- [Setup webrtc transport and discovery](#setup-webrtc-transport-and-discovery)
|
||||
- [Customizing Pubsub](#customizing-pubsub)
|
||||
- [Customizing DHT](#customizing-dht)
|
||||
- [Setup with Content and Peer Routing](#setup-with-content-and-peer-routing)
|
||||
- [Setup with Relay](#setup-with-relay)
|
||||
- [Setup with Auto Relay](#setup-with-auto-relay)
|
||||
- [Setup with Keychain](#setup-with-keychain)
|
||||
- [Configuring Dialing](#configuring-dialing)
|
||||
- [Configuring Connection Manager](#configuring-connection-manager)
|
||||
- [Configuring Transport Manager](#configuring-transport-manager)
|
||||
- [Configuring Metrics](#configuring-metrics)
|
||||
- [Configuring PeerStore](#configuring-peerstore)
|
||||
- [Customizing Transports](#customizing-transports)
|
||||
- [Configuring the NAT Manager](#configuring-the-nat-manager)
|
||||
- [Browser support](#browser-support)
|
||||
- [UPnP and NAT-PMP](#upnp-and-nat-pmp)
|
||||
- [Configuration examples](#configuration-examples)
|
||||
- [Overview](#overview)
|
||||
- [Modules](#modules)
|
||||
- [Transport](#transport)
|
||||
- [Stream Multiplexing](#stream-multiplexing)
|
||||
- [Connection Encryption](#connection-encryption)
|
||||
- [Peer Discovery](#peer-discovery)
|
||||
- [Content Routing](#content-routing)
|
||||
- [Peer Routing](#peer-routing)
|
||||
- [DHT](#dht)
|
||||
- [Pubsub](#pubsub)
|
||||
- [Customizing libp2p](#customizing-libp2p)
|
||||
- [Examples](#examples)
|
||||
- [Basic setup](#basic-setup)
|
||||
- [Customizing Peer Discovery](#customizing-peer-discovery)
|
||||
- [Setup webrtc transport and discovery](#setup-webrtc-transport-and-discovery)
|
||||
- [Customizing Pubsub](#customizing-pubsub)
|
||||
- [Customizing DHT](#customizing-dht)
|
||||
- [Setup with Content and Peer Routing](#setup-with-content-and-peer-routing)
|
||||
- [Setup with Relay](#setup-with-relay)
|
||||
- [Setup with Auto Relay](#setup-with-auto-relay)
|
||||
- [Setup with Keychain](#setup-with-keychain)
|
||||
- [Configuring Dialing](#configuring-dialing)
|
||||
- [Configuring Connection Manager](#configuring-connection-manager)
|
||||
- [Configuring Connection Gater](#configuring-connection-gater)
|
||||
- [Outgoing connections](#outgoing-connections)
|
||||
- [Incoming connections](#incoming-connections)
|
||||
- [Configuring Transport Manager](#configuring-transport-manager)
|
||||
- [Configuring Metrics](#configuring-metrics)
|
||||
- [Configuring PeerStore](#configuring-peerstore)
|
||||
- [Customizing Transports](#customizing-transports)
|
||||
- [Configuring the NAT Manager](#configuring-the-nat-manager)
|
||||
- [Browser support](#browser-support)
|
||||
- [UPnP and NAT-PMP](#upnp-and-nat-pmp)
|
||||
- [Configuring protocol name](#configuring-protocol-name)
|
||||
- [Configuration examples](#configuration-examples)
|
||||
|
||||
## Overview
|
||||
|
||||
@ -374,11 +377,7 @@ const node = await Libp2p.create({
|
||||
dht: { // The DHT options (and defaults) can be found in its documentation
|
||||
kBucketSize: 20,
|
||||
enabled: true, // This flag is required for DHT to run (disabled by default)
|
||||
randomWalk: {
|
||||
enabled: true, // Allows to disable discovery (enabled by default)
|
||||
interval: 300e3,
|
||||
timeout: 10e3
|
||||
}
|
||||
clientMode: false // Whether to run the WAN DHT in client or server mode (default: client mode)
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -501,9 +500,9 @@ const Libp2p = require('libp2p')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const MPLEX = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
||||
const LevelStore = require('datastore-level')
|
||||
const { LevelDatastore } = require('datastore-level')
|
||||
|
||||
const datastore = new LevelStore('path/to/store')
|
||||
const datastore = new LevelDatastore('path/to/store')
|
||||
await datastore.open()
|
||||
|
||||
const node = await Libp2p.create({
|
||||
@ -594,9 +593,130 @@ const node = await Libp2p.create({
|
||||
})
|
||||
```
|
||||
|
||||
#### Configuring Connection Gater
|
||||
|
||||
The Connection Gater allows us to prevent making incoming and outgoing connections to peers and storing
|
||||
multiaddrs in the address book.
|
||||
|
||||
The order in which methods are called is as follows:
|
||||
|
||||
##### Outgoing connections
|
||||
|
||||
1. `connectionGater.denyDialPeer(...)`
|
||||
2. `connectionGater.denyDialMultiaddr(...)`
|
||||
3. `connectionGater.denyOutboundConnection(...)`
|
||||
4. `connectionGater.denyOutboundEncryptedConnection(...)`
|
||||
5. `connectionGater.denyOutboundUpgradedConnection(...)`
|
||||
|
||||
##### Incoming connections
|
||||
|
||||
1. `connectionGater.denyInboundConnection(...)`
|
||||
2. `connectionGater.denyInboundEncryptedConnection(...)`
|
||||
3. `connectionGater.denyInboundUpgradedConnection(...)`
|
||||
|
||||
```js
|
||||
const node = await Libp2p.create({
|
||||
// .. other config
|
||||
connectionGater: {
|
||||
/**
|
||||
* denyDialMultiaddr tests whether we're permitted to Dial the
|
||||
* specified peer.
|
||||
*
|
||||
* This is called by the dialer.connectToPeer implementation before
|
||||
* dialling a peer.
|
||||
*
|
||||
* Return true to prevent dialing the passed peer.
|
||||
*/
|
||||
denyDialPeer: (peerId: PeerId) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyDialMultiaddr tests whether we're permitted to dial the specified
|
||||
* multiaddr for the given peer.
|
||||
*
|
||||
* This is called by the dialer.connectToPeer implementation after it has
|
||||
* resolved the peer's addrs, and prior to dialling each.
|
||||
*
|
||||
* Return true to prevent dialing the passed peer on the passed multiaddr.
|
||||
*/
|
||||
denyDialMultiaddr: (peerId: PeerId, multiaddr: Multiaddr) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyInboundConnection tests whether an incipient inbound connection is allowed.
|
||||
*
|
||||
* This is called by the upgrader, or by the transport directly (e.g. QUIC,
|
||||
* Bluetooth), straight after it has accepted a connection from its socket.
|
||||
*
|
||||
* Return true to deny the incoming passed connection.
|
||||
*/
|
||||
denyInboundConnection: (maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyOutboundConnection tests whether an incipient outbound connection is allowed.
|
||||
*
|
||||
* This is called by the upgrader, or by the transport directly (e.g. QUIC,
|
||||
* Bluetooth), straight after it has created a connection with its socket.
|
||||
*
|
||||
* Return true to deny the incoming passed connection.
|
||||
*/
|
||||
denyOutboundConnection: (peerId: PeerId, maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyInboundEncryptedConnection tests whether a given connection, now encrypted,
|
||||
* is allowed.
|
||||
*
|
||||
* This is called by the upgrader, after it has performed the security
|
||||
* handshake, and before it negotiates the muxer, or by the directly by the
|
||||
* transport, at the exact same checkpoint.
|
||||
*
|
||||
* Return true to deny the passed secured connection.
|
||||
*/
|
||||
denyInboundEncryptedConnection: (peerId: PeerId, maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyOutboundEncryptedConnection tests whether a given connection, now encrypted,
|
||||
* is allowed.
|
||||
*
|
||||
* This is called by the upgrader, after it has performed the security
|
||||
* handshake, and before it negotiates the muxer, or by the directly by the
|
||||
* transport, at the exact same checkpoint.
|
||||
*
|
||||
* Return true to deny the passed secured connection.
|
||||
*/
|
||||
denyOutboundEncryptedConnection: (peerId: PeerId, maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyInboundUpgradedConnection tests whether a fully capable connection is allowed.
|
||||
*
|
||||
* This is called after encryption has been negotiated and the connection has been
|
||||
* multiplexed, if a multiplexer is configured.
|
||||
*
|
||||
* Return true to deny the passed upgraded connection.
|
||||
*/
|
||||
denyInboundUpgradedConnection: (peerId: PeerId, maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* denyOutboundUpgradedConnection tests whether a fully capable connection is allowed.
|
||||
*
|
||||
* This is called after encryption has been negotiated and the connection has been
|
||||
* multiplexed, if a multiplexer is configured.
|
||||
*
|
||||
* Return true to deny the passed upgraded connection.
|
||||
*/
|
||||
denyOutboundUpgradedConnection: (peerId: PeerId, maConn: MultiaddrConnection) => Promise<boolean>
|
||||
|
||||
/**
|
||||
* Used by the address book to filter passed addresses.
|
||||
*
|
||||
* Return true to allow storing the passed multiaddr for the passed peer.
|
||||
*/
|
||||
filterMultiaddrForPeer: (peer: PeerId, multiaddr: Multiaddr) => Promise<boolean>
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
#### Configuring Transport Manager
|
||||
|
||||
The Transport Manager is responsible for managing the libp2p transports life cycle. This includes starting listeners for the provided listen addresses, closing these listeners and dialing using the provided transports. By default, if a libp2p node has a list of multiaddrs for listenning on and there are no valid transports for those multiaddrs, libp2p will throw an error on startup and shutdown. However, for some applications it is perfectly acceptable for libp2p nodes to start in dial only mode if all the listen multiaddrs failed. This error tolerance can be enabled as follows:
|
||||
The Transport Manager is responsible for managing the libp2p transports life cycle. This includes starting listeners for the provided listen addresses, closing these listeners and dialing using the provided transports. By default, if a libp2p node has a list of multiaddrs for listening on and there are no valid transports for those multiaddrs, libp2p will throw an error on startup and shutdown. However, for some applications it is perfectly acceptable for libp2p nodes to start in dial only mode if all the listen multiaddrs failed. This error tolerance can be enabled as follows:
|
||||
|
||||
```js
|
||||
const Libp2p = require('libp2p')
|
||||
@ -676,18 +796,18 @@ const Libp2p = require('libp2p')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const MPLEX = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
||||
const LevelStore = require('datastore-level')
|
||||
const LevelDatastore = require('datastore-level')
|
||||
|
||||
const datastore = new LevelStore('path/to/store')
|
||||
const dsInstant = await datastore.open()
|
||||
const datastore = new LevelDatastore('path/to/store')
|
||||
await datastore.open() // level database must be ready before node boot
|
||||
|
||||
const node = await Libp2p.create({
|
||||
datastore, // pass the opened datastore
|
||||
modules: {
|
||||
transport: [TCP],
|
||||
streamMuxer: [MPLEX],
|
||||
connEncryption: [NOISE]
|
||||
},
|
||||
datastore: dsInstant,
|
||||
peerStore: {
|
||||
persistence: true,
|
||||
threshold: 5
|
||||
@ -788,7 +908,7 @@ By default under nodejs libp2p will attempt to use [UPnP](https://en.wikipedia.o
|
||||
|
||||
#### Configuring protocol name
|
||||
|
||||
Changing the protocol name prefix can isolate default public network (IPFS) for custom purposes.
|
||||
Changing the protocol name prefix can isolate default public network (IPFS) for custom purposes.
|
||||
|
||||
```js
|
||||
const node = await Libp2p.create({
|
||||
@ -810,8 +930,8 @@ protocols: [
|
||||
|
||||
As libp2p is designed to be a modular networking library, its usage will vary based on individual project needs. We've included links to some existing project configurations for your reference, in case you wish to replicate their configuration:
|
||||
|
||||
- [libp2p-ipfs-nodejs](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/src/core/runtime/libp2p-nodejs.js) - libp2p configuration used by js-ipfs when running in Node.js
|
||||
- [libp2p-ipfs-browser](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/src/core/runtime/libp2p-browser.js) - libp2p configuration used by js-ipfs when running in a Browser (that supports WebRTC)
|
||||
- [libp2p-ipfs-nodejs](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core-config/src/libp2p.js) - libp2p configuration used by js-ipfs when running in Node.js
|
||||
- [libp2p-ipfs-browser](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core-config/src/libp2p.browser.js) - libp2p configuration used by js-ipfs when running in a Browser (that supports WebRTC)
|
||||
|
||||
If you have developed a project using `js-libp2p`, please consider submitting your configuration to this list so that it can be found easily by other users.
|
||||
|
||||
|
@ -30,7 +30,7 @@ const createNode = async () => {
|
||||
createNode()
|
||||
])
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
node2.handle('/a-protocol', ({ stream }) => {
|
||||
pipe(
|
||||
|
@ -1,30 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const messageReceived = pDefer()
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('This information is sent out encrypted to the other peer', 'node', [path.join(__dirname, '1.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const s = uint8ArrayToString(data)
|
||||
if (s.includes('This information is sent out encrypted to the other peer')) {
|
||||
messageReceived.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageReceived.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -3,16 +3,16 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"ipfs": "~0.34.4",
|
||||
"libp2p": "github:libp2p/js-libp2p#master",
|
||||
"libp2p-delegated-content-routing": "~0.2.2",
|
||||
"libp2p-delegated-peer-routing": "~0.2.2",
|
||||
"libp2p-kad-dht": "~0.14.12",
|
||||
"libp2p-mplex": "~0.8.5",
|
||||
"libp2p-secio": "~0.11.1",
|
||||
"libp2p-webrtc-star": "~0.15.8",
|
||||
"libp2p-websocket-star": "~0.10.2",
|
||||
"libp2p-websockets": "~0.12.2",
|
||||
"@chainsafe/libp2p-noise": "^5.0.2",
|
||||
"ipfs-core": "^0.13.0",
|
||||
"libp2p": "../../",
|
||||
"libp2p-delegated-content-routing": "^0.11.0",
|
||||
"libp2p-delegated-peer-routing": "^0.11.1",
|
||||
"libp2p-kad-dht": "^0.28.6",
|
||||
"libp2p-mplex": "^0.10.4",
|
||||
"libp2p-webrtc-star": "^0.25.0",
|
||||
"libp2p-websocket-star": "^0.10.2",
|
||||
"libp2p-websockets": "^0.16.2",
|
||||
"react": "^16.8.6",
|
||||
"react-dom": "^16.8.6",
|
||||
"react-scripts": "2.1.8"
|
||||
|
@ -2,7 +2,7 @@
|
||||
'use strict'
|
||||
|
||||
import React from 'react'
|
||||
import Ipfs from 'ipfs'
|
||||
import Ipfs from 'ipfs-core'
|
||||
import libp2pBundle from './libp2p-bundle'
|
||||
const Component = React.Component
|
||||
|
||||
@ -70,7 +70,7 @@ class App extends Component {
|
||||
}
|
||||
|
||||
componentDidMount () {
|
||||
window.ipfs = this.ipfs = new Ipfs({
|
||||
window.ipfs = this.ipfs = Ipfs.create({
|
||||
config: {
|
||||
Addresses: {
|
||||
Swarm: []
|
||||
|
@ -6,7 +6,7 @@ const Websockets = require('libp2p-websockets')
|
||||
const WebSocketStar = require('libp2p-websocket-star')
|
||||
const WebRTCStar = require('libp2p-webrtc-star')
|
||||
const MPLEX = require('libp2p-mplex')
|
||||
const SECIO = require('libp2p-secio')
|
||||
const { NOISE } = require('@chainsafe/libp2p-noise')
|
||||
const KadDHT = require('libp2p-kad-dht')
|
||||
const DelegatedPeerRouter = require('libp2p-delegated-peer-routing')
|
||||
const DelegatedContentRouter = require('libp2p-delegated-content-routing')
|
||||
@ -48,7 +48,7 @@ export default function Libp2pBundle ({peerInfo, peerBook}) {
|
||||
MPLEX
|
||||
],
|
||||
connEncryption: [
|
||||
SECIO
|
||||
NOISE
|
||||
],
|
||||
dht: KadDHT
|
||||
},
|
||||
|
@ -5,7 +5,7 @@ const Libp2p = require('../../')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('@chainsafe/libp2p-noise')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
|
||||
const Bootstrap = require('libp2p-bootstrap')
|
||||
const PubsubPeerDiscovery = require('libp2p-pubsub-peer-discovery')
|
||||
|
||||
|
@ -55,7 +55,7 @@ const node = await Libp2p.create({
|
||||
peerId,
|
||||
addresses: {
|
||||
listen: ['/ip4/0.0.0.0/tcp/0']
|
||||
}
|
||||
},
|
||||
modules: {
|
||||
transport: [ TCP ],
|
||||
streamMuxer: [ Mplex ],
|
||||
@ -117,7 +117,7 @@ const createNode = () => {
|
||||
return Libp2p.create({
|
||||
addresses: {
|
||||
listen: ['/ip4/0.0.0.0/tcp/0']
|
||||
}
|
||||
},
|
||||
modules: {
|
||||
transport: [ TCP ],
|
||||
streamMuxer: [ Mplex ],
|
||||
@ -144,8 +144,13 @@ const [node1, node2] = await Promise.all([
|
||||
createNode()
|
||||
])
|
||||
|
||||
node1.on('peer:discovery', (peer) => console.log('Discovered:', peer.id.toB58String()))
|
||||
node2.on('peer:discovery', (peer) => console.log('Discovered:', peer.id.toB58String()))
|
||||
node1.on('peer:discovery', (peer) => console.log('Discovered:', peerId.toB58String()))
|
||||
node2.on('peer:discovery', (peer) => console.log('Discovered:', peerId.toB58String()))
|
||||
|
||||
await Promise.all([
|
||||
node1.start(),
|
||||
node2.start()
|
||||
])
|
||||
```
|
||||
|
||||
If you run this example, you will see the other peers being discovered.
|
||||
|
@ -1,13 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core/src/runtime/config-nodejs.js
|
||||
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core-config/src/config.js
|
||||
const bootstrapers = [
|
||||
'/ip4/104.131.131.82/tcp/4001/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmZa1sAxajnQjVM8WjWXoMbmPd7NsWhfKsPkErzpm9wGkp',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt'
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt',
|
||||
]
|
||||
|
||||
module.exports = bootstrapers
|
||||
|
@ -1,42 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const bootstrapers = require('./bootstrapers')
|
||||
|
||||
const discoveredCopy = 'Discovered:'
|
||||
const connectedCopy = 'Connection established to:'
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const discoveredNodes = []
|
||||
const connectedNodes = []
|
||||
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('Connection established to:', 'node', [path.join(__dirname, '1.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
// Discovered or Connected
|
||||
if (line.includes(discoveredCopy)) {
|
||||
const id = line.trim().split(discoveredCopy)[1]
|
||||
discoveredNodes.push(id)
|
||||
} else if (line.includes(connectedCopy)) {
|
||||
const id = line.trim().split(connectedCopy)[1]
|
||||
connectedNodes.push(id)
|
||||
}
|
||||
})
|
||||
|
||||
await pWaitFor(() => discoveredNodes.length === bootstrapers.length && connectedNodes.length === bootstrapers.length)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,4 +1,3 @@
|
||||
{
|
||||
"presets": ["@babel/preset-env"],
|
||||
"plugins": ["syntax-async-functions","transform-regenerator"]
|
||||
}
|
@ -14,12 +14,11 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-env": "^7.13.0",
|
||||
"@chainsafe/libp2p-noise": "^5.0.2",
|
||||
"libp2p": "../../",
|
||||
"libp2p-bootstrap": "^0.13.0",
|
||||
"libp2p-bootstrap": "^0.14.0",
|
||||
"libp2p-mplex": "^0.10.4",
|
||||
"@chainsafe/libp2p-noise": "^4.1.0",
|
||||
"libp2p-webrtc-star": "^0.23.0",
|
||||
"libp2p-webrtc-star": "^0.25.0",
|
||||
"libp2p-websockets": "^0.16.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@ -28,6 +27,6 @@
|
||||
"babel-plugin-syntax-async-functions": "^6.13.0",
|
||||
"babel-plugin-transform-regenerator": "^6.26.0",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"parcel": "next"
|
||||
"parcel": "^2.0.1"
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ async function run() {
|
||||
)
|
||||
await browser.close();
|
||||
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
} finally {
|
||||
|
@ -8,12 +8,12 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@achingbrain/libp2p-gossipsub": "^0.12.2",
|
||||
"execa": "^2.1.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"libp2p": "../src",
|
||||
"libp2p-pubsub-peer-discovery": "^4.0.0",
|
||||
"libp2p-relay-server": "^0.3.0",
|
||||
"libp2p-gossipsub": "^0.11.0",
|
||||
"p-defer": "^3.0.0",
|
||||
"uint8arrays": "^3.0.0",
|
||||
"which": "^2.0.1"
|
||||
|
@ -38,8 +38,8 @@ const createNode = async () => {
|
||||
createNode()
|
||||
])
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
|
||||
await Promise.all([
|
||||
node1.dial(node2.peerId),
|
||||
|
@ -40,8 +40,8 @@ const createNode = async () => {
|
||||
createNode()
|
||||
])
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
|
||||
await Promise.all([
|
||||
node1.dial(node2.peerId),
|
||||
|
@ -43,8 +43,8 @@ const node1 = nodes[0]
|
||||
const node2 = nodes[1]
|
||||
const node3 = nodes[2]
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
|
||||
await Promise.all([
|
||||
node1.dial(node2.peerId),
|
||||
|
@ -1,36 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test() {
|
||||
async function test () {
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const addrs = []
|
||||
let foundIt = false
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('Found it, multiaddrs are:', 'node', [path.join(__dirname, '1.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
// Discovered peer
|
||||
if (!foundIt && line.includes('Found it, multiaddrs are:')) {
|
||||
foundIt = true
|
||||
}
|
||||
|
||||
addrs.push(line)
|
||||
})
|
||||
|
||||
await pWaitFor(() => addrs.length === 2)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,40 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
const providedCopy = 'is providing'
|
||||
const foundCopy = 'Found provider:'
|
||||
|
||||
async function test() {
|
||||
async function test () {
|
||||
process.stdout.write('2.js\n')
|
||||
const providedDefer = pDefer()
|
||||
const foundDefer = pDefer()
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '2.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('Found provider:', 'node', [path.join(__dirname, '2.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes(providedCopy)) {
|
||||
providedDefer.resolve()
|
||||
} else if (line.includes(foundCopy)) {
|
||||
foundDefer.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all([
|
||||
providedDefer.promise,
|
||||
foundDefer.promise
|
||||
])
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -29,7 +29,7 @@ generate(otherSwarmKey)
|
||||
console.log('nodes started...')
|
||||
|
||||
// Add node 2 data to node1's PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node2.handle('/private', ({ stream }) => {
|
||||
|
@ -1,30 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const messageReceived = pDefer()
|
||||
process.stdout.write('index.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, 'index.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('This message is sent on a private network', 'node', [path.join(__dirname, 'index.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const s = uint8ArrayToString(data)
|
||||
if (s.includes('This message is sent on a private network')) {
|
||||
messageReceived.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageReceived.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
||||
|
@ -31,7 +31,7 @@ const createNode = async () => {
|
||||
])
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
// exact matching
|
||||
node2.handle('/your-protocol', ({ stream }) => {
|
||||
|
@ -31,7 +31,7 @@ const createNode = async () => {
|
||||
])
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
node2.handle(['/a', '/b'], ({ protocol, stream }) => {
|
||||
pipe(
|
||||
|
@ -32,8 +32,8 @@ const createNode = async () => {
|
||||
])
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
node1.handle('/node-1', ({ stream }) => {
|
||||
pipe(
|
||||
stream,
|
||||
|
@ -20,7 +20,7 @@ const node1 = nodes[0]
|
||||
const node2 = nodes[1]
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
// Here we are telling libp2p that if someone dials this node to talk with the `/your-protocol`
|
||||
// multicodec, the protocol identifier, please call this handler and give it the stream
|
||||
|
@ -1,31 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test() {
|
||||
const messageDefer = pDefer()
|
||||
async function test () {
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('my own protocol, wow!', 'node', [path.join(__dirname, '1.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes('my own protocol, wow!')) {
|
||||
messageDefer.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageDefer.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,38 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
const messages = [
|
||||
'protocol (a)',
|
||||
'protocol (b)',
|
||||
'another stream on protocol (b)'
|
||||
]
|
||||
|
||||
async function test() {
|
||||
async function test () {
|
||||
process.stdout.write('2.js\n')
|
||||
|
||||
let count = 0
|
||||
const proc = execa('node', [path.join(__dirname, '2.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('another stream on protocol (b)', 'node', [path.join(__dirname, '2.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (messages.find((m) => line.includes(m))) {
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
await pWaitFor(() => count === messages.length)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,37 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
const messages = [
|
||||
'from 1 to 2',
|
||||
'from 2 to 1'
|
||||
]
|
||||
|
||||
async function test() {
|
||||
async function test () {
|
||||
process.stdout.write('3.js\n')
|
||||
|
||||
let count = 0
|
||||
const proc = execa('node', [path.join(__dirname, '3.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('from 2 to 1', 'node', [path.join(__dirname, '3.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (messages.find((m) => line.includes(m))) {
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
await pWaitFor(() => count === messages.length)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -5,7 +5,7 @@ const Libp2p = require('../../')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('@chainsafe/libp2p-noise')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
|
||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
|
||||
@ -35,19 +35,19 @@ const createNode = async () => {
|
||||
])
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
node1.pubsub.subscribe(topic)
|
||||
|
||||
// Will not receive own published messages by default
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
node2.pubsub.subscribe(topic)
|
||||
|
||||
// node2 publishes "news" every second
|
||||
setInterval(() => {
|
||||
|
@ -41,29 +41,31 @@ const node = await Libp2p.create({
|
||||
Once that is done, we only need to create a few libp2p nodes, connect them and everything is ready to start using pubsub.
|
||||
|
||||
```JavaScript
|
||||
const { fromString } = require('uint8arrays/from-string')
|
||||
const { toString } = require('uint8arrays/to-string')
|
||||
const topic = 'news'
|
||||
|
||||
const node1 = nodes[0]
|
||||
const node2 = nodes[1]
|
||||
|
||||
// Add node's 2 data to the PeerStore
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
console.log(`node1 received: ${toString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
|
||||
// Will not receive own published messages by default
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
console.log(`node2 received: ${toString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
|
||||
// node2 publishes "news" every second
|
||||
setInterval(() => {
|
||||
node2.pubsub.publish(topic, uint8ArrayFromString('Bird bird bird, bird is the word!'))
|
||||
node2.pubsub.publish(topic, fromString('Bird bird bird, bird is the word!'))
|
||||
}, 1000)
|
||||
```
|
||||
|
||||
|
@ -5,7 +5,7 @@ const Libp2p = require('../../../')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('@chainsafe/libp2p-noise')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
|
||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
|
||||
@ -36,10 +36,10 @@ const createNode = async () => {
|
||||
])
|
||||
|
||||
// node1 conect to node2 and node2 conect to node3
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.dial(node3.peerId)
|
||||
|
||||
//subscribe
|
||||
|
@ -32,10 +32,10 @@ const [node1, node2, node3] = await Promise.all([
|
||||
createNode(),
|
||||
])
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.dial(node3.peerId)
|
||||
```
|
||||
|
||||
|
@ -49,7 +49,7 @@ function printAddrs (node, number) {
|
||||
console.log(result.toString())
|
||||
})
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
|
||||
|
||||
await pipe(
|
||||
|
@ -60,9 +60,9 @@ function print ({ stream }) {
|
||||
node2.handle('/print', print)
|
||||
node3.handle('/print', print)
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
|
||||
|
||||
// node 1 (TCP) dials to node 2 (TCP+WebSockets)
|
||||
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
|
||||
@ -81,7 +81,7 @@ function print ({ stream }) {
|
||||
// node 3 (listening WebSockets) can dial node 1 (TCP)
|
||||
try {
|
||||
await node3.dialProtocol(node1.peerId, '/print')
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
console.log('node 3 failed to dial to node 1 with:', err.message)
|
||||
}
|
||||
})();
|
||||
|
@ -91,7 +91,7 @@ const concat = require('it-concat')
|
||||
const MPLEX = require('libp2p-mplex')
|
||||
```
|
||||
|
||||
We are going to reuse the `createNode` function from step 1, but this time add a stream multiplexer from `libp2p-mplex`.
|
||||
We are going to reuse the `createNode` function from step 1, but this time add a stream multiplexer from `libp2p-mplex`.
|
||||
```js
|
||||
const createNode = async () => {
|
||||
const node = await Libp2p.create({
|
||||
@ -140,7 +140,7 @@ Then add,
|
||||
console.log(result.toString())
|
||||
})
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
|
||||
|
||||
await pipe(
|
||||
@ -224,9 +224,9 @@ node1.handle('/print', print)
|
||||
node2.handle('/print', print)
|
||||
node3.handle('/print', print)
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
|
||||
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
|
||||
|
||||
// node 1 (TCP) dials to node 2 (TCP+WebSockets)
|
||||
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
|
||||
|
@ -1,38 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const deferStarted = pDefer()
|
||||
const deferListen = pDefer()
|
||||
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('/p2p/', 'node', [path.join(__dirname, '1.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
|
||||
if (line.includes('node has started (true/false): true')) {
|
||||
deferStarted.resolve()
|
||||
} else if (line.includes('p2p')) {
|
||||
deferListen.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all([
|
||||
deferStarted.promise,
|
||||
deferListen.promise
|
||||
])
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,30 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const defer = pDefer()
|
||||
process.stdout.write('2.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '2.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('Hello p2p world!', 'node', [path.join(__dirname, '2.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes('Hello p2p world!')) {
|
||||
defer.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await defer.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,41 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const deferNode1 = pDefer()
|
||||
const deferNode2 = pDefer()
|
||||
const deferNode3 = pDefer()
|
||||
|
||||
process.stdout.write('3.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '3.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('node 3 failed to dial to node 1 with:', 'node', [path.join(__dirname, '3.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes('node 1 dialed to node 2 successfully')) {
|
||||
deferNode1.resolve()
|
||||
} else if (line.includes('node 2 dialed to node 3 successfully')) {
|
||||
deferNode2.resolve()
|
||||
} else if (line.includes('node 3 failed to dial to node 1 with:')) {
|
||||
deferNode3.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all([
|
||||
deferNode1.promise,
|
||||
deferNode2.promise,
|
||||
deferNode3.promise
|
||||
])
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -1,33 +1,14 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { waitForOutput } = require('../utils')
|
||||
|
||||
async function test () {
|
||||
const deferNode1 = pDefer()
|
||||
|
||||
process.stdout.write('4.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '4.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
await waitForOutput('node 2 dialed to node 1 successfully', 'node', [path.join(__dirname, '4.js')], {
|
||||
cwd: __dirname
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes('node 2 dialed to node 1 successfully')) {
|
||||
deferNode1.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all([
|
||||
deferNode1.promise,
|
||||
])
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
||||
|
@ -9,7 +9,7 @@ async function isExecutable (command) {
|
||||
await fs.access(command, fs.constants.X_OK)
|
||||
|
||||
return true
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return isExecutable(await which(command))
|
||||
}
|
||||
@ -30,7 +30,7 @@ async function waitForOutput (expectedOutput, command, args = [], opts = {}) {
|
||||
|
||||
const proc = execa(command, args, opts)
|
||||
let output = ''
|
||||
let time = 120000
|
||||
let time = 600000
|
||||
|
||||
let timeout = setTimeout(() => {
|
||||
throw new Error(`Did not see "${expectedOutput}" in output from "${[command].concat(args).join(' ')}" after ${time/1000}s`)
|
||||
@ -49,7 +49,7 @@ async function waitForOutput (expectedOutput, command, args = [], opts = {}) {
|
||||
|
||||
try {
|
||||
await proc
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (!err.killed) {
|
||||
throw err
|
||||
}
|
||||
|
@ -12,6 +12,6 @@
|
||||
<main>
|
||||
<pre id="output"></pre>
|
||||
</main>
|
||||
<script src="./dialer.js"></script>
|
||||
<script src="./dialer.js" type="module"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -40,5 +40,4 @@ const PeerId = require('peer-id')
|
||||
|
||||
console.log('Listening on:')
|
||||
node.multiaddrs.forEach((ma) => console.log(`${ma.toString()}/p2p/${node.peerId.toB58String()}`))
|
||||
|
||||
})()
|
||||
|
@ -3,7 +3,6 @@
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"description": "",
|
||||
"main": "dist/index.html",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "parcel build index.html",
|
||||
@ -13,19 +12,20 @@
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.13.10",
|
||||
"@babel/core": "^7.13.10",
|
||||
"@mapbox/node-pre-gyp": "^1.0.8",
|
||||
"babel-plugin-syntax-async-functions": "^6.13.0",
|
||||
"babel-plugin-transform-regenerator": "^6.26.0",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"parcel-bundler": "1.12.3",
|
||||
"parcel": "^2.0.1",
|
||||
"util": "^0.12.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@chainsafe/libp2p-noise": "^5.0.2",
|
||||
"libp2p": "../../",
|
||||
"libp2p-bootstrap": "^0.13.0",
|
||||
"libp2p-bootstrap": "^0.14.0",
|
||||
"libp2p-mplex": "^0.10.4",
|
||||
"@chainsafe/libp2p-noise": "^4.1.0",
|
||||
"libp2p-webrtc-direct": "^0.7.0",
|
||||
"peer-id": "^0.15.0"
|
||||
"peer-id": "^0.16.0"
|
||||
},
|
||||
"browser": {
|
||||
"ipfs": "ipfs/dist/index.min.js"
|
||||
|
@ -72,7 +72,7 @@ async function test () {
|
||||
{ timeout: 10000 }
|
||||
)
|
||||
await browser.close();
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
} finally {
|
||||
|
108
package.json
108
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "libp2p",
|
||||
"version": "0.33.0",
|
||||
"version": "0.36.2",
|
||||
"description": "JavaScript implementation of libp2p, a modular peer to peer network stack",
|
||||
"leadMaintainer": "Jacob Heun <jacobheun@gmail.com>",
|
||||
"main": "src/index.js",
|
||||
@ -20,20 +20,20 @@
|
||||
"scripts": {
|
||||
"lint": "aegir lint",
|
||||
"build": "aegir build",
|
||||
"build:proto": "npm run build:proto:circuit && npm run build:proto:identify && npm run build:proto:plaintext && npm run build:proto:address-book && npm run build:proto:proto-book && npm run build:proto:peer-record && npm run build:proto:envelope",
|
||||
"build:proto": "npm run build:proto:circuit && npm run build:proto:fetch && npm run build:proto:identify && npm run build:proto:plaintext && npm run build:proto:address-book && npm run build:proto:proto-book && npm run build:proto:peer && npm run build:proto:peer-record && npm run build:proto:envelope",
|
||||
"build:proto:circuit": "pbjs -t static-module -w commonjs -r libp2p-circuit --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/circuit/protocol/index.js ./src/circuit/protocol/index.proto",
|
||||
"build:proto:fetch": "pbjs -t static-module -w commonjs -r libp2p-fetch --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/fetch/proto.js ./src/fetch/proto.proto",
|
||||
"build:proto:identify": "pbjs -t static-module -w commonjs -r libp2p-identify --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/identify/message.js ./src/identify/message.proto",
|
||||
"build:proto:plaintext": "pbjs -t static-module -w commonjs -r libp2p-plaintext --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/insecure/proto.js ./src/insecure/proto.proto",
|
||||
"build:proto:address-book": "pbjs -t static-module -w commonjs -r libp2p-address-book --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/persistent/pb/address-book.js ./src/peer-store/persistent/pb/address-book.proto",
|
||||
"build:proto:proto-book": "pbjs -t static-module -w commonjs -r libp2p-proto-book --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/persistent/pb/proto-book.js ./src/peer-store/persistent/pb/proto-book.proto",
|
||||
"build:proto:peer": "pbjs -t static-module -w commonjs -r libp2p-peer --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/pb/peer.js ./src/peer-store/pb/peer.proto",
|
||||
"build:proto:peer-record": "pbjs -t static-module -w commonjs -r libp2p-peer-record --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/record/peer-record/peer-record.js ./src/record/peer-record/peer-record.proto",
|
||||
"build:proto:envelope": "pbjs -t static-module -w commonjs -r libp2p-envelope --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/record/envelope/envelope.js ./src/record/envelope/envelope.proto",
|
||||
"build:proto-types": "npm run build:proto-types:circuit && npm run build:proto-types:identify && npm run build:proto-types:plaintext && npm run build:proto-types:address-book && npm run build:proto-types:proto-book && npm run build:proto-types:peer-record && npm run build:proto-types:envelope",
|
||||
"build:proto-types": "npm run build:proto-types:circuit && npm run build:proto-types:fetch && npm run build:proto-types:identify && npm run build:proto-types:plaintext && npm run build:proto-types:address-book && npm run build:proto-types:proto-book && npm run build:proto-types:peer && npm run build:proto-types:peer-record && npm run build:proto-types:envelope",
|
||||
"build:proto-types:circuit": "pbts -o src/circuit/protocol/index.d.ts src/circuit/protocol/index.js",
|
||||
"build:proto-types:fetch": "pbts -o src/fetch/proto.d.ts src/fetch/proto.js",
|
||||
"build:proto-types:identify": "pbts -o src/identify/message.d.ts src/identify/message.js",
|
||||
"build:proto-types:plaintext": "pbts -o src/insecure/proto.d.ts src/insecure/proto.js",
|
||||
"build:proto-types:address-book": "pbts -o src/peer-store/persistent/pb/address-book.d.ts src/peer-store/persistent/pb/address-book.js",
|
||||
"build:proto-types:proto-book": "pbts -o src/peer-store/persistent/pb/proto-book.d.ts src/peer-store/persistent/pb/proto-book.js",
|
||||
"build:proto-types:peer": "pbts -o src/peer-store/pb/peer.d.ts src/peer-store/pb/peer.js",
|
||||
"build:proto-types:peer-record": "pbts -o src/record/peer-record/peer-record.d.ts src/record/peer-record/peer-record.js",
|
||||
"build:proto-types:envelope": "pbts -o src/record/envelope/envelope.d.ts src/record/envelope/envelope.js",
|
||||
"test": "aegir test",
|
||||
@ -41,10 +41,8 @@
|
||||
"test:node": "aegir test -t node -f \"./test/**/*.{node,spec}.js\"",
|
||||
"test:browser": "aegir test -t browser",
|
||||
"test:examples": "cd examples && npm run test:all",
|
||||
"prepare": "aegir build --no-bundle",
|
||||
"release": "aegir release -t node -t browser",
|
||||
"release-minor": "aegir release --type minor -t node -t browser",
|
||||
"release-major": "aegir release --type major -t node -t browser",
|
||||
"test:interop": "LIBP2P_JS=$PWD npx aegir test -t node -f ./node_modules/libp2p-interop/test/*",
|
||||
"prepare": "npm run build",
|
||||
"coverage": "nyc --reporter=text --reporter=lcov npm run test:node"
|
||||
},
|
||||
"repository": {
|
||||
@ -65,27 +63,27 @@
|
||||
"homepage": "https://libp2p.io",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
"node": ">=15.0.0"
|
||||
},
|
||||
"browser": {
|
||||
"@motrix/nat-api": false
|
||||
"nat-api": false
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "ipfs",
|
||||
"ignorePatterns": [
|
||||
"!.aegir.js",
|
||||
"test/ts-use"
|
||||
"test/ts-use",
|
||||
"*.d.ts"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"abortable-iterator": "^3.0.0",
|
||||
"@motrix/nat-api": "^0.3.1",
|
||||
"@vascosantos/moving-average": "^1.1.0",
|
||||
"abort-controller": "^3.0.0",
|
||||
"abortable-iterator": "^3.0.0",
|
||||
"aggregate-error": "^3.1.0",
|
||||
"any-signal": "^2.1.1",
|
||||
"any-signal": "^3.0.0",
|
||||
"bignumber.js": "^9.0.1",
|
||||
"class-is": "^1.1.0",
|
||||
"datastore-core": "^7.0.0",
|
||||
"debug": "^4.3.1",
|
||||
"err-code": "^3.0.0",
|
||||
"es6-promisify": "^7.0.0",
|
||||
@ -97,75 +95,76 @@
|
||||
"it-drain": "^1.0.3",
|
||||
"it-filter": "^1.0.1",
|
||||
"it-first": "^1.0.4",
|
||||
"it-foreach": "^0.1.1",
|
||||
"it-handshake": "^2.0.0",
|
||||
"it-length-prefixed": "^5.0.2",
|
||||
"it-map": "^1.0.4",
|
||||
"it-merge": "^1.0.0",
|
||||
"it-pipe": "^1.1.0",
|
||||
"it-sort": "^1.0.1",
|
||||
"it-take": "^1.0.0",
|
||||
"libp2p-crypto": "^0.19.4",
|
||||
"libp2p-interfaces": "^1.0.0",
|
||||
"libp2p-crypto": "^0.21.2",
|
||||
"libp2p-interfaces": "^4.0.0",
|
||||
"libp2p-utils": "^0.4.0",
|
||||
"mafmt": "^10.0.0",
|
||||
"merge-options": "^3.0.4",
|
||||
"mortice": "^2.0.1",
|
||||
"multiaddr": "^10.0.0",
|
||||
"multiformats": "^9.0.0",
|
||||
"multistream-select": "^2.0.0",
|
||||
"multistream-select": "^3.0.0",
|
||||
"mutable-proxy": "^1.0.0",
|
||||
"node-forge": "^0.10.0",
|
||||
"nat-api": "^0.3.1",
|
||||
"node-forge": "^1.2.1",
|
||||
"p-any": "^3.0.0",
|
||||
"p-fifo": "^1.0.0",
|
||||
"p-retry": "^4.4.0",
|
||||
"p-settle": "^4.1.1",
|
||||
"peer-id": "^0.15.0",
|
||||
"peer-id": "^0.16.0",
|
||||
"private-ip": "^2.1.0",
|
||||
"protobufjs": "^6.10.2",
|
||||
"retimer": "^3.0.0",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"set-delayed-interval": "^1.0.0",
|
||||
"streaming-iterables": "^6.0.0",
|
||||
"timeout-abort-controller": "^1.1.1",
|
||||
"timeout-abort-controller": "^3.0.0",
|
||||
"uint8arrays": "^3.0.0",
|
||||
"varint": "^6.0.0",
|
||||
"wherearewe": "^1.0.0",
|
||||
"xsalsa20": "^1.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chainsafe/libp2p-noise": "^4.0.0",
|
||||
"@chainsafe/libp2p-noise": "^5.0.0",
|
||||
"@nodeutils/defaults-deep": "^1.1.0",
|
||||
"@types/es6-promisify": "^6.0.0",
|
||||
"@types/node": "^16.0.1",
|
||||
"@types/node-forge": "^0.10.1",
|
||||
"@types/node-forge": "^1.0.0",
|
||||
"@types/varint": "^6.0.0",
|
||||
"aegir": "^33.1.1",
|
||||
"aegir": "^36.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"datastore-core": "^6.0.7",
|
||||
"delay": "^5.0.0",
|
||||
"interop-libp2p": "^0.4.0",
|
||||
"into-stream": "^7.0.0",
|
||||
"ipfs-http-client": "^52.0.2",
|
||||
"into-stream": "^6.0.0",
|
||||
"ipfs-http-client": "^54.0.2",
|
||||
"it-concat": "^2.0.0",
|
||||
"it-pair": "^1.0.0",
|
||||
"it-pushable": "^1.4.0",
|
||||
"libp2p": ".",
|
||||
"libp2p-bootstrap": "^0.13.0",
|
||||
"libp2p-bootstrap": "^0.14.0",
|
||||
"libp2p-delegated-content-routing": "^0.11.0",
|
||||
"libp2p-delegated-peer-routing": "^0.10.0",
|
||||
"libp2p-floodsub": "^0.27.0",
|
||||
"libp2p-gossipsub": "^0.11.0",
|
||||
"libp2p-interfaces-compliance-tests": "^1.0.0",
|
||||
"libp2p-kad-dht": "^0.24.2",
|
||||
"libp2p-mdns": "^0.17.0",
|
||||
"libp2p-mplex": "^0.10.1",
|
||||
"libp2p-delegated-peer-routing": "^0.11.1",
|
||||
"libp2p-interfaces-compliance-tests": "^4.0.8",
|
||||
"libp2p-interop": "^0.7.1",
|
||||
"libp2p-kad-dht": "^0.28.6",
|
||||
"libp2p-mdns": "^0.18.0",
|
||||
"libp2p-mplex": "^0.10.4",
|
||||
"libp2p-tcp": "^0.17.0",
|
||||
"libp2p-webrtc-star": "^0.23.0",
|
||||
"libp2p-webrtc-star": "^0.25.0",
|
||||
"libp2p-websockets": "^0.16.0",
|
||||
"nock": "^13.0.3",
|
||||
"p-defer": "^3.0.0",
|
||||
"p-times": "^3.0.0",
|
||||
"p-wait-for": "^3.2.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"sinon": "^11.1.1",
|
||||
"sinon": "^12.0.1",
|
||||
"util": "^0.12.3"
|
||||
},
|
||||
"contributors": [
|
||||
@ -179,42 +178,45 @@
|
||||
"Friedel Ziegelmayer <dignifiedquire@gmail.com>",
|
||||
"Maciej Krüger <mkg20001@gmail.com>",
|
||||
"Hugo Dias <mail@hugodias.me>",
|
||||
"Chris Dostert <chrisdostert@users.noreply.github.com>",
|
||||
"dirkmc <dirkmdev@gmail.com>",
|
||||
"Volker Mische <volker.mische@gmail.com>",
|
||||
"Chris Dostert <chrisdostert@users.noreply.github.com>",
|
||||
"zeim839 <50573884+zeim839@users.noreply.github.com>",
|
||||
"Robert Kiel <robert.kiel@hoprnet.org>",
|
||||
"Richard Littauer <richard.littauer@gmail.com>",
|
||||
"a1300 <matthias-knopp@gmx.net>",
|
||||
"Ryan Bell <ryan@piing.net>",
|
||||
"ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ <victorbjelkholm@gmail.com>",
|
||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||
"Franck Royer <franck@royer.one>",
|
||||
"Thomas Eizinger <thomas@eizinger.io>",
|
||||
"Vít Habada <vithabada93@gmail.com>",
|
||||
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
||||
"acolytec3 <17355484+acolytec3@users.noreply.github.com>",
|
||||
"Alan Smithee <ggnore.alan.smithee@gmail.com>",
|
||||
"Elven <mon.samuel@qq.com>",
|
||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||
"Samlior <samlior@foxmail.com>",
|
||||
"Didrik Nordström <didrik.nordstrom@gmail.com>",
|
||||
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||
"Robert Kiel <robert.kiel@hoprnet.org>",
|
||||
"Smite Chow <xiaopengyou@live.com>",
|
||||
"Soeren <nikorpoulsen@gmail.com>",
|
||||
"Sönke Hahn <soenkehahn@gmail.com>",
|
||||
"Aditya Bose <13054902+adbose@users.noreply.github.com>",
|
||||
"TJKoury <TJKoury@gmail.com>",
|
||||
"TheStarBoys <41286328+TheStarBoys@users.noreply.github.com>",
|
||||
"Tiago Alves <alvesjtiago@gmail.com>",
|
||||
"Tim Daubenschütz <tim@daubenschuetz.de>",
|
||||
"XiaoZhang <zxinmyth@gmail.com>",
|
||||
"Yusef Napora <yusef@napora.org>",
|
||||
"Zane Starr <zcstarr@gmail.com>",
|
||||
"ebinks <elizabethjbinks@gmail.com>",
|
||||
"Aditya Bose <13054902+adbose@users.noreply.github.com>",
|
||||
"greenSnot <greenSnot@users.noreply.github.com>",
|
||||
"isan_rivkin <isanrivkin@gmail.com>",
|
||||
"mayerwin <mayerwin@users.noreply.github.com>",
|
||||
"mcclure <andi.m.mcclure@gmail.com>",
|
||||
"patrickwoodhead <91056047+patrickwoodhead@users.noreply.github.com>",
|
||||
"phillmac <phillmac@users.noreply.github.com>",
|
||||
"robertkiel <robert.kiel@validitylabs.org>",
|
||||
"shresthagrawal <34920931+shresthagrawal@users.noreply.github.com>",
|
||||
"swedneck <40505480+swedneck@users.noreply.github.com>",
|
||||
"greenSnot <greenSnot@users.noreply.github.com>",
|
||||
"tuyennhv <vutuyen2636@gmail.com>",
|
||||
"Sönke Hahn <soenkehahn@gmail.com>",
|
||||
"Aleksei <vozhdb@gmail.com>",
|
||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||
"Chris Bratlien <chrisbratlien@gmail.com>",
|
||||
@ -239,9 +241,13 @@
|
||||
"Lars Gierth <lgierth@users.noreply.github.com>",
|
||||
"Leask Wong <i@leaskh.com>",
|
||||
"Marcin Tojek <mtojek@users.noreply.github.com>",
|
||||
"Marston Connell <34043723+TheMarstonConnell@users.noreply.github.com>",
|
||||
"Michael Burns <5170+mburns@users.noreply.github.com>",
|
||||
"Miguel Mota <miguelmota2@gmail.com>",
|
||||
"Nuno Nogueira <nunofmn@gmail.com>",
|
||||
"Philipp Muens <raute1337@gmx.de>"
|
||||
"Philipp Muens <raute1337@gmx.de>",
|
||||
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||
"Smite Chow <xiaopengyou@live.com>",
|
||||
"Soeren <nikorpoulsen@gmail.com>"
|
||||
]
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ const log = Object.assign(debug('libp2p:auto-relay'), {
|
||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
const all = require('it-all')
|
||||
|
||||
const { relay: multicodec } = require('./multicodec')
|
||||
const { canHop } = require('./circuit/hop')
|
||||
@ -22,7 +22,8 @@ const {
|
||||
|
||||
/**
|
||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||
* @typedef {import('../peer-store/address-book').Address} Address
|
||||
* @typedef {import('../peer-store/types').Address} Address
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
*/
|
||||
|
||||
/**
|
||||
@ -91,7 +92,7 @@ class AutoRelay {
|
||||
|
||||
// If no protocol, check if we were keeping the peer before as a listenRelay
|
||||
if (!hasProtocol && this._listenRelays.has(id)) {
|
||||
this._removeListenRelay(id)
|
||||
await this._removeListenRelay(id)
|
||||
return
|
||||
} else if (!hasProtocol || this._listenRelays.has(id)) {
|
||||
return
|
||||
@ -113,10 +114,10 @@ class AutoRelay {
|
||||
const supportsHop = await canHop({ connection })
|
||||
|
||||
if (supportsHop) {
|
||||
this._peerStore.metadataBook.set(peerId, HOP_METADATA_KEY, uint8ArrayFromString(HOP_METADATA_VALUE))
|
||||
await this._peerStore.metadataBook.setValue(peerId, HOP_METADATA_KEY, uint8ArrayFromString(HOP_METADATA_VALUE))
|
||||
await this._addListenRelay(connection, id)
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
this._onError(err)
|
||||
}
|
||||
}
|
||||
@ -125,7 +126,6 @@ class AutoRelay {
|
||||
* Peer disconnects.
|
||||
*
|
||||
* @param {Connection} connection - connection to the peer
|
||||
* @returns {void}
|
||||
*/
|
||||
_onPeerDisconnected (connection) {
|
||||
const peerId = connection.remotePeer
|
||||
@ -136,7 +136,9 @@ class AutoRelay {
|
||||
return
|
||||
}
|
||||
|
||||
this._removeListenRelay(id)
|
||||
this._removeListenRelay(id).catch(err => {
|
||||
log.error(err)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -148,28 +150,36 @@ class AutoRelay {
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _addListenRelay (connection, id) {
|
||||
// Check if already listening on enough relays
|
||||
if (this._listenRelays.size >= this.maxListeners) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get peer known addresses and sort them per public addresses first
|
||||
const remoteAddrs = this._peerStore.addressBook.getMultiaddrsForPeer(
|
||||
connection.remotePeer, this._addressSorter
|
||||
)
|
||||
|
||||
if (!remoteAddrs || !remoteAddrs.length) {
|
||||
return
|
||||
}
|
||||
|
||||
const listenAddr = `${remoteAddrs[0].toString()}/p2p-circuit`
|
||||
this._listenRelays.add(id)
|
||||
|
||||
// Attempt to listen on relay
|
||||
try {
|
||||
await this._transportManager.listen([new Multiaddr(listenAddr)])
|
||||
// Announce multiaddrs will update on listen success by TransportManager event being triggered
|
||||
} catch (err) {
|
||||
// Check if already listening on enough relays
|
||||
if (this._listenRelays.size >= this.maxListeners) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get peer known addresses and sort them per public addresses first
|
||||
const remoteAddrs = await this._peerStore.addressBook.getMultiaddrsForPeer(
|
||||
connection.remotePeer, this._addressSorter
|
||||
)
|
||||
|
||||
// Attempt to listen on relay
|
||||
const result = await Promise.all(
|
||||
remoteAddrs.map(async addr => {
|
||||
try {
|
||||
// Announce multiaddrs will update on listen success by TransportManager event being triggered
|
||||
await this._transportManager.listen([new Multiaddr(`${addr.toString()}/p2p-circuit`)])
|
||||
return true
|
||||
} catch (/** @type {any} */ err) {
|
||||
this._onError(err)
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
)
|
||||
|
||||
if (result.includes(true)) {
|
||||
this._listenRelays.add(id)
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
this._onError(err)
|
||||
this._listenRelays.delete(id)
|
||||
}
|
||||
@ -180,12 +190,11 @@ class AutoRelay {
|
||||
*
|
||||
* @private
|
||||
* @param {string} id - peer identifier string.
|
||||
* @returns {void}
|
||||
*/
|
||||
_removeListenRelay (id) {
|
||||
async _removeListenRelay (id) {
|
||||
if (this._listenRelays.delete(id)) {
|
||||
// TODO: this should be responsibility of the connMgr
|
||||
this._listenOnAvailableHopRelays([id])
|
||||
await this._listenOnAvailableHopRelays([id])
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,7 +206,6 @@ class AutoRelay {
|
||||
* 3. Search the network.
|
||||
*
|
||||
* @param {string[]} [peersToIgnore]
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _listenOnAvailableHopRelays (peersToIgnore = []) {
|
||||
// TODO: The peer redial issue on disconnect should be handled by connection gating
|
||||
@ -207,31 +215,37 @@ class AutoRelay {
|
||||
}
|
||||
|
||||
const knownHopsToDial = []
|
||||
const peers = await all(this._peerStore.getPeers())
|
||||
|
||||
// Check if we have known hop peers to use and attempt to listen on the already connected
|
||||
for (const [id, metadataMap] of this._peerStore.metadataBook.data.entries()) {
|
||||
for await (const { id, metadata } of peers) {
|
||||
const idStr = id.toB58String()
|
||||
|
||||
// Continue to next if listening on this or peer to ignore
|
||||
if (this._listenRelays.has(id) || peersToIgnore.includes(id)) {
|
||||
if (this._listenRelays.has(idStr)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const supportsHop = metadataMap.get(HOP_METADATA_KEY)
|
||||
if (peersToIgnore.includes(idStr)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const supportsHop = metadata.get(HOP_METADATA_KEY)
|
||||
|
||||
// Continue to next if it does not support Hop
|
||||
if (!supportsHop || uint8ArrayToString(supportsHop) !== HOP_METADATA_VALUE) {
|
||||
continue
|
||||
}
|
||||
|
||||
const peerId = PeerId.createFromB58String(id)
|
||||
const connection = this._connectionManager.get(peerId)
|
||||
const connection = this._connectionManager.get(id)
|
||||
|
||||
// If not connected, store for possible later use.
|
||||
if (!connection) {
|
||||
knownHopsToDial.push(peerId)
|
||||
knownHopsToDial.push(id)
|
||||
continue
|
||||
}
|
||||
|
||||
await this._addListenRelay(connection, id)
|
||||
await this._addListenRelay(connection, idStr)
|
||||
|
||||
// Check if already listening on enough relays
|
||||
if (this._listenRelays.size >= this.maxListeners) {
|
||||
@ -258,7 +272,7 @@ class AutoRelay {
|
||||
}
|
||||
|
||||
const peerId = provider.id
|
||||
this._peerStore.addressBook.add(peerId, provider.multiaddrs)
|
||||
await this._peerStore.addressBook.add(peerId, provider.multiaddrs)
|
||||
|
||||
await this._tryToListenOnRelay(peerId)
|
||||
|
||||
@ -267,7 +281,7 @@ class AutoRelay {
|
||||
return
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
this._onError(err)
|
||||
}
|
||||
}
|
||||
@ -279,7 +293,7 @@ class AutoRelay {
|
||||
try {
|
||||
const connection = await this._libp2p.dial(peerId)
|
||||
await this._addListenRelay(connection, peerId.toB58String())
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
this._onError(err, `could not connect and listen on known hop relay ${peerId.toB58String()}`)
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ async function handleHop ({
|
||||
// Validate the HOP request has the required input
|
||||
try {
|
||||
validateAddrs(request, streamHandler)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return log.error('invalid hop request via peer %s', connection.remotePeer.toB58String(), err)
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ async function handleHop ({
|
||||
connection: destinationConnection,
|
||||
request: stopRequest
|
||||
})
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return log.error(err)
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ module.exports.handleStop = function handleStop ({
|
||||
// Validate the STOP request has the required input
|
||||
try {
|
||||
validateAddrs(request, streamHandler)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return log.error('invalid stop request via peer %s', connection.remotePeer.toB58String(), err)
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ function validateAddrs (msg, streamHandler) {
|
||||
return new Multiaddr(addr)
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
writeResponse(streamHandler, msg.type === CircuitRelay.Type.HOP
|
||||
? CircuitRelay.Status.HOP_DST_MULTIADDR_INVALID
|
||||
: CircuitRelay.Status.STOP_DST_MULTIADDR_INVALID)
|
||||
@ -47,7 +47,7 @@ function validateAddrs (msg, streamHandler) {
|
||||
return new Multiaddr(addr)
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
writeResponse(streamHandler, msg.type === CircuitRelay.Type.HOP
|
||||
? CircuitRelay.Status.HOP_SRC_MULTIADDR_INVALID
|
||||
: CircuitRelay.Status.STOP_SRC_MULTIADDR_INVALID)
|
||||
|
@ -4,7 +4,7 @@ const debug = require('debug')
|
||||
const log = Object.assign(debug('libp2p:relay'), {
|
||||
error: debug('libp2p:relay:err')
|
||||
})
|
||||
|
||||
const { codes } = require('./../errors')
|
||||
const {
|
||||
setDelayedInterval,
|
||||
clearDelayedInterval
|
||||
@ -87,8 +87,8 @@ class Relay {
|
||||
try {
|
||||
const cid = await namespaceToCid(RELAY_RENDEZVOUS_NS)
|
||||
await this._libp2p.contentRouting.provide(cid)
|
||||
} catch (err) {
|
||||
if (err.code === 'NO_ROUTERS_AVAILABLE') {
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code === codes.ERR_NO_ROUTERS_AVAILABLE) {
|
||||
log.error('a content router, such as a DHT, must be provided in order to advertise the relay service', err)
|
||||
// Stop the advertise
|
||||
this.stop()
|
||||
|
@ -171,7 +171,7 @@ class Circuit {
|
||||
log('new outbound connection %s', maConn.remoteAddr)
|
||||
|
||||
return this._upgrader.upgradeOutbound(maConn)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('Circuit relay dial failed', err)
|
||||
disconnectOnFailure && await relayConnection.close()
|
||||
throw err
|
||||
|
@ -13,6 +13,7 @@ const { FaultTolerance } = require('./transport-manager')
|
||||
|
||||
/**
|
||||
* @typedef {import('multiaddr').Multiaddr} Multiaddr
|
||||
* @typedef {import('./types').ConnectionGater} ConnectionGater
|
||||
* @typedef {import('.').Libp2pOptions} Libp2pOptions
|
||||
* @typedef {import('.').constructorOptions} constructorOptions
|
||||
*/
|
||||
@ -27,6 +28,7 @@ const DefaultConfig = {
|
||||
connectionManager: {
|
||||
minConnections: 25
|
||||
},
|
||||
connectionGater: /** @type {ConnectionGater} */ {},
|
||||
transportManager: {
|
||||
faultTolerance: FaultTolerance.FATAL_ALL
|
||||
},
|
||||
@ -60,13 +62,7 @@ const DefaultConfig = {
|
||||
protocolPrefix: 'ipfs',
|
||||
dht: {
|
||||
enabled: false,
|
||||
kBucketSize: 20,
|
||||
randomWalk: {
|
||||
enabled: false, // disabled waiting for https://github.com/libp2p/js-libp2p-kad-dht/issues/86
|
||||
queriesPerPeriod: 1,
|
||||
interval: 300e3,
|
||||
timeout: 10e3
|
||||
}
|
||||
kBucketSize: 20
|
||||
},
|
||||
nat: {
|
||||
enabled: true,
|
||||
|
132
src/connection-manager/auto-dialler.js
Normal file
132
src/connection-manager/auto-dialler.js
Normal file
@ -0,0 +1,132 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const mergeOptions = require('merge-options')
|
||||
// @ts-ignore retimer does not have types
|
||||
const retimer = require('retimer')
|
||||
const all = require('it-all')
|
||||
const { pipe } = require('it-pipe')
|
||||
const filter = require('it-filter')
|
||||
const sort = require('it-sort')
|
||||
|
||||
const log = Object.assign(debug('libp2p:connection-manager:auto-dialler'), {
|
||||
error: debug('libp2p:connection-manager:auto-dialler:err')
|
||||
})
|
||||
|
||||
const defaultOptions = {
|
||||
enabled: true,
|
||||
minConnections: 0,
|
||||
autoDialInterval: 10000
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {import('../index')} Libp2p
|
||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} AutoDiallerOptions
|
||||
* @property {boolean} [enabled = true] - Should preemptively guarantee connections are above the low watermark
|
||||
* @property {number} [minConnections = 0] - The minimum number of connections to avoid pruning
|
||||
* @property {number} [autoDialInterval = 10000] - How often, in milliseconds, it should preemptively guarantee connections are above the low watermark
|
||||
*/
|
||||
|
||||
class AutoDialler {
|
||||
/**
|
||||
* Proactively tries to connect to known peers stored in the PeerStore.
|
||||
* It will keep the number of connections below the upper limit and sort
|
||||
* the peers to connect based on wether we know their keys and protocols.
|
||||
*
|
||||
* @class
|
||||
* @param {Libp2p} libp2p
|
||||
* @param {AutoDiallerOptions} options
|
||||
*/
|
||||
constructor (libp2p, options = {}) {
|
||||
this._options = mergeOptions.call({ ignoreUndefined: true }, defaultOptions, options)
|
||||
this._libp2p = libp2p
|
||||
this._running = false
|
||||
this._autoDialTimeout = null
|
||||
this._autoDial = this._autoDial.bind(this)
|
||||
|
||||
log('options: %j', this._options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the auto dialer
|
||||
*/
|
||||
async start () {
|
||||
if (!this._options.enabled) {
|
||||
log('not enabled')
|
||||
return
|
||||
}
|
||||
|
||||
this._running = true
|
||||
this._autoDial().catch(err => {
|
||||
log.error('could start autodial', err)
|
||||
})
|
||||
log('started')
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the auto dialler
|
||||
*/
|
||||
async stop () {
|
||||
if (!this._options.enabled) {
|
||||
log('not enabled')
|
||||
return
|
||||
}
|
||||
|
||||
this._running = false
|
||||
this._autoDialTimeout && this._autoDialTimeout.clear()
|
||||
log('stopped')
|
||||
}
|
||||
|
||||
async _autoDial () {
|
||||
const minConnections = this._options.minConnections
|
||||
|
||||
// Already has enough connections
|
||||
if (this._libp2p.connections.size >= minConnections) {
|
||||
this._autoDialTimeout = retimer(this._autoDial, this._options.autoDialInterval)
|
||||
return
|
||||
}
|
||||
|
||||
// Sort peers on whether we know protocols of public keys for them
|
||||
// TODO: assuming the `peerStore.getPeers()` order is stable this will mean
|
||||
// we keep trying to connect to the same peers?
|
||||
const peers = await pipe(
|
||||
this._libp2p.peerStore.getPeers(),
|
||||
(source) => filter(source, (peer) => !peer.id.equals(this._libp2p.peerId)),
|
||||
(source) => sort(source, (a, b) => {
|
||||
if (b.protocols && b.protocols.length && (!a.protocols || !a.protocols.length)) {
|
||||
return 1
|
||||
} else if (b.id.pubKey && !a.id.pubKey) {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
}),
|
||||
(source) => all(source)
|
||||
)
|
||||
|
||||
for (let i = 0; this._running && i < peers.length && this._libp2p.connections.size < minConnections; i++) {
|
||||
const peer = peers[i]
|
||||
|
||||
if (!this._libp2p.connectionManager.get(peer.id)) {
|
||||
log('connecting to a peerStore stored peer %s', peer.id.toB58String())
|
||||
try {
|
||||
await this._libp2p.dialer.connectToPeer(peer.id)
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('could not connect to peerStore stored peer', err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Connection Manager was stopped
|
||||
if (!this._running) {
|
||||
return
|
||||
}
|
||||
|
||||
this._autoDialTimeout = retimer(this._autoDial, this._options.autoDialInterval)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AutoDialler
|
@ -12,7 +12,7 @@ const LatencyMonitor = require('./latency-monitor')
|
||||
const retimer = require('retimer')
|
||||
|
||||
const { EventEmitter } = require('events')
|
||||
|
||||
const trackedMap = require('../metrics/tracked-map')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const {
|
||||
@ -32,6 +32,10 @@ const defaultOptions = {
|
||||
defaultPeerValue: 1
|
||||
}
|
||||
|
||||
const METRICS_COMPONENT = 'connection-manager'
|
||||
const METRICS_PEER_CONNECTIONS = 'peer-connections'
|
||||
const METRICS_PEER_VALUES = 'peer-values'
|
||||
|
||||
/**
|
||||
* @typedef {import('../')} Libp2p
|
||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||
@ -83,25 +87,34 @@ class ConnectionManager extends EventEmitter {
|
||||
*
|
||||
* @type {Map<string, number>}
|
||||
*/
|
||||
this._peerValues = new Map()
|
||||
this._peerValues = trackedMap({
|
||||
component: METRICS_COMPONENT,
|
||||
metric: METRICS_PEER_VALUES,
|
||||
metrics: this._libp2p.metrics
|
||||
})
|
||||
|
||||
/**
|
||||
* Map of connections per peer
|
||||
*
|
||||
* @type {Map<string, Connection[]>}
|
||||
*/
|
||||
this.connections = new Map()
|
||||
this.connections = trackedMap({
|
||||
component: METRICS_COMPONENT,
|
||||
metric: METRICS_PEER_CONNECTIONS,
|
||||
metrics: this._libp2p.metrics
|
||||
})
|
||||
|
||||
this._started = false
|
||||
this._timer = null
|
||||
this._autoDialTimeout = null
|
||||
this._checkMetrics = this._checkMetrics.bind(this)
|
||||
this._autoDial = this._autoDial.bind(this)
|
||||
|
||||
this._latencyMonitor = new LatencyMonitor({
|
||||
latencyCheckIntervalMs: this._options.pollInterval,
|
||||
dataEmitIntervalMs: this._options.pollInterval
|
||||
})
|
||||
|
||||
// This emitter gets listened to a lot
|
||||
this.setMaxListeners(Infinity)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -128,8 +141,6 @@ class ConnectionManager extends EventEmitter {
|
||||
|
||||
this._started = true
|
||||
log('started')
|
||||
|
||||
this._options.autoDial && this._autoDial()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -138,7 +149,6 @@ class ConnectionManager extends EventEmitter {
|
||||
* @async
|
||||
*/
|
||||
async stop () {
|
||||
this._autoDialTimeout && this._autoDialTimeout.clear()
|
||||
this._timer && this._timer.clear()
|
||||
|
||||
this._latencyMonitor.removeListener('data', this._onLatencyMeasure)
|
||||
@ -188,19 +198,22 @@ class ConnectionManager extends EventEmitter {
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_checkMetrics () {
|
||||
async _checkMetrics () {
|
||||
if (this._libp2p.metrics) {
|
||||
const movingAverages = this._libp2p.metrics.global.movingAverages
|
||||
// @ts-ignore moving averages object types
|
||||
const received = movingAverages.dataReceived[this._options.movingAverageInterval].movingAverage()
|
||||
this._checkMaxLimit('maxReceivedData', received)
|
||||
// @ts-ignore moving averages object types
|
||||
const sent = movingAverages.dataSent[this._options.movingAverageInterval].movingAverage()
|
||||
this._checkMaxLimit('maxSentData', sent)
|
||||
const total = received + sent
|
||||
this._checkMaxLimit('maxData', total)
|
||||
log('metrics update', total)
|
||||
this._timer = retimer(this._checkMetrics, this._options.pollInterval)
|
||||
try {
|
||||
const movingAverages = this._libp2p.metrics.global.movingAverages
|
||||
// @ts-ignore moving averages object types
|
||||
const received = movingAverages.dataReceived[this._options.movingAverageInterval].movingAverage()
|
||||
await this._checkMaxLimit('maxReceivedData', received)
|
||||
// @ts-ignore moving averages object types
|
||||
const sent = movingAverages.dataSent[this._options.movingAverageInterval].movingAverage()
|
||||
await this._checkMaxLimit('maxSentData', sent)
|
||||
const total = received + sent
|
||||
await this._checkMaxLimit('maxData', total)
|
||||
log('metrics update', total)
|
||||
} finally {
|
||||
this._timer = retimer(this._checkMetrics, this._options.pollInterval)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -208,27 +221,33 @@ class ConnectionManager extends EventEmitter {
|
||||
* Tracks the incoming connection and check the connection limit
|
||||
*
|
||||
* @param {Connection} connection
|
||||
* @returns {void}
|
||||
*/
|
||||
onConnect (connection) {
|
||||
async onConnect (connection) {
|
||||
if (!this._started) {
|
||||
// This can happen when we are in the process of shutting down the node
|
||||
await connection.close()
|
||||
return
|
||||
}
|
||||
|
||||
const peerId = connection.remotePeer
|
||||
const peerIdStr = peerId.toB58String()
|
||||
const storedConn = this.connections.get(peerIdStr)
|
||||
|
||||
this.emit('peer:connect', connection)
|
||||
|
||||
if (storedConn) {
|
||||
storedConn.push(connection)
|
||||
} else {
|
||||
this.connections.set(peerIdStr, [connection])
|
||||
}
|
||||
|
||||
this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
|
||||
await this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
|
||||
|
||||
if (!this._peerValues.has(peerIdStr)) {
|
||||
this._peerValues.set(peerIdStr, this._options.defaultPeerValue)
|
||||
}
|
||||
|
||||
this._checkMaxLimit('maxConnections', this.size)
|
||||
await this._checkMaxLimit('maxConnections', this.size)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -238,6 +257,11 @@ class ConnectionManager extends EventEmitter {
|
||||
* @returns {void}
|
||||
*/
|
||||
onDisconnect (connection) {
|
||||
if (!this._started) {
|
||||
// This can happen when we are in the process of shutting down the node
|
||||
return
|
||||
}
|
||||
|
||||
const peerId = connection.remotePeer.toB58String()
|
||||
let storedConn = this.connections.get(peerId)
|
||||
|
||||
@ -248,6 +272,8 @@ class ConnectionManager extends EventEmitter {
|
||||
this.connections.delete(peerId)
|
||||
this._peerValues.delete(connection.remotePeer.toB58String())
|
||||
this.emit('peer:disconnect', connection)
|
||||
|
||||
this._libp2p.metrics && this._libp2p.metrics.onPeerDisconnected(connection.remotePeer)
|
||||
}
|
||||
}
|
||||
|
||||
@ -294,6 +320,9 @@ class ConnectionManager extends EventEmitter {
|
||||
*/
|
||||
_onLatencyMeasure (summary) {
|
||||
this._checkMaxLimit('maxEventLoopDelay', summary.avgMs)
|
||||
.catch(err => {
|
||||
log.error(err)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -303,69 +332,22 @@ class ConnectionManager extends EventEmitter {
|
||||
* @param {string} name - The name of the field to check limits for
|
||||
* @param {number} value - The current value of the field
|
||||
*/
|
||||
_checkMaxLimit (name, value) {
|
||||
async _checkMaxLimit (name, value) {
|
||||
const limit = this._options[name]
|
||||
log('checking limit of %s. current value: %d of %d', name, value, limit)
|
||||
if (value > limit) {
|
||||
log('%s: limit exceeded: %s, %d', this._peerId, name, value)
|
||||
this._maybeDisconnectOne()
|
||||
await this._maybeDisconnectOne()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Proactively tries to connect to known peers stored in the PeerStore.
|
||||
* It will keep the number of connections below the upper limit and sort
|
||||
* the peers to connect based on wether we know their keys and protocols.
|
||||
*
|
||||
* @async
|
||||
* @private
|
||||
*/
|
||||
async _autoDial () {
|
||||
const minConnections = this._options.minConnections
|
||||
|
||||
// Already has enough connections
|
||||
if (this.size >= minConnections) {
|
||||
this._autoDialTimeout = retimer(this._autoDial, this._options.autoDialInterval)
|
||||
return
|
||||
}
|
||||
|
||||
// Sort peers on wether we know protocols of public keys for them
|
||||
const peers = Array.from(this._libp2p.peerStore.peers.values())
|
||||
.sort((a, b) => {
|
||||
if (b.protocols && b.protocols.length && (!a.protocols || !a.protocols.length)) {
|
||||
return 1
|
||||
} else if (b.id.pubKey && !a.id.pubKey) {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
})
|
||||
|
||||
for (let i = 0; i < peers.length && this.size < minConnections; i++) {
|
||||
if (!this.get(peers[i].id)) {
|
||||
log('connecting to a peerStore stored peer %s', peers[i].id.toB58String())
|
||||
try {
|
||||
await this._libp2p.dialer.connectToPeer(peers[i].id)
|
||||
|
||||
// Connection Manager was stopped
|
||||
if (!this._started) {
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
log.error('could not connect to peerStore stored peer', err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._autoDialTimeout = retimer(this._autoDial, this._options.autoDialInterval)
|
||||
}
|
||||
|
||||
/**
|
||||
* If we have more connections than our maximum, close a connection
|
||||
* to the lowest valued peer.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_maybeDisconnectOne () {
|
||||
async _maybeDisconnectOne () {
|
||||
if (this._options.minConnections < this.connections.size) {
|
||||
const peerValues = Array.from(new Map([...this._peerValues.entries()].sort((a, b) => a[1] - b[1])))
|
||||
log('%s: sorted peer values: %j', this._peerId, peerValues)
|
||||
@ -376,7 +358,11 @@ class ConnectionManager extends EventEmitter {
|
||||
log('%s: closing a connection to %j', this._peerId, peerId)
|
||||
for (const connections of this.connections.values()) {
|
||||
if (connections[0].remotePeer.toB58String() === peerId) {
|
||||
connections[0].close()
|
||||
connections[0].close().catch(err => {
|
||||
log.error(err)
|
||||
})
|
||||
// TODO: should not need to invoke this manually
|
||||
this.onDisconnect(connections[0])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -8,9 +8,10 @@ const {
|
||||
requirePeers,
|
||||
maybeLimitSource
|
||||
} = require('./utils')
|
||||
|
||||
const drain = require('it-drain')
|
||||
const merge = require('it-merge')
|
||||
const { pipe } = require('it-pipe')
|
||||
const { DHTContentRouting } = require('../dht/dht-content-routing')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
@ -38,7 +39,7 @@ class ContentRouting {
|
||||
|
||||
// If we have the dht, add it to the available content routers
|
||||
if (this.dht && libp2p._config.dht.enabled) {
|
||||
this.routers.push(this.dht)
|
||||
this.routers.push(new DHTContentRouting(this.dht))
|
||||
}
|
||||
}
|
||||
|
||||
@ -53,7 +54,7 @@ class ContentRouting {
|
||||
*/
|
||||
async * findProviders (key, options = {}) {
|
||||
if (!this.routers.length) {
|
||||
throw errCode(new Error('No content this.routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
throw errCode(new Error('No content this.routers available'), codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||
}
|
||||
|
||||
yield * pipe(
|
||||
@ -76,7 +77,7 @@ class ContentRouting {
|
||||
*/
|
||||
async provide (key) {
|
||||
if (!this.routers.length) {
|
||||
throw errCode(new Error('No content routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
throw errCode(new Error('No content routers available'), codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||
}
|
||||
|
||||
await Promise.all(this.routers.map((router) => router.provide(key)))
|
||||
@ -91,12 +92,12 @@ class ContentRouting {
|
||||
* @param {number} [options.minPeers] - minimum number of peers required to successfully put
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
put (key, value, options) {
|
||||
async put (key, value, options) {
|
||||
if (!this.libp2p.isStarted() || !this.dht.isStarted) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.DHT_NOT_STARTED)
|
||||
}
|
||||
|
||||
return this.dht.put(key, value, options)
|
||||
await drain(this.dht.put(key, value, options))
|
||||
}
|
||||
|
||||
/**
|
||||
@ -108,12 +109,18 @@ class ContentRouting {
|
||||
* @param {number} [options.timeout] - optional timeout (default: 60000)
|
||||
* @returns {Promise<GetData>}
|
||||
*/
|
||||
get (key, options) {
|
||||
async get (key, options) {
|
||||
if (!this.libp2p.isStarted() || !this.dht.isStarted) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.DHT_NOT_STARTED)
|
||||
}
|
||||
|
||||
return this.dht.get(key, options)
|
||||
for await (const event of this.dht.get(key, options)) {
|
||||
if (event.name === 'VALUE') {
|
||||
return { from: event.peerId, val: event.value }
|
||||
}
|
||||
}
|
||||
|
||||
throw errCode(new Error(messages.NOT_FOUND), codes.ERR_NOT_FOUND)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -123,14 +130,33 @@ class ContentRouting {
|
||||
* @param {number} nVals
|
||||
* @param {Object} [options] - get options
|
||||
* @param {number} [options.timeout] - optional timeout (default: 60000)
|
||||
* @returns {Promise<GetData[]>}
|
||||
*/
|
||||
async getMany (key, nVals, options) { // eslint-disable-line require-await
|
||||
async * getMany (key, nVals, options) { // eslint-disable-line require-await
|
||||
if (!this.libp2p.isStarted() || !this.dht.isStarted) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.DHT_NOT_STARTED)
|
||||
}
|
||||
|
||||
return this.dht.getMany(key, nVals, options)
|
||||
if (!nVals) {
|
||||
return
|
||||
}
|
||||
|
||||
let gotValues = 0
|
||||
|
||||
for await (const event of this.dht.get(key, options)) {
|
||||
if (event.name === 'VALUE') {
|
||||
yield { from: event.peerId, val: event.value }
|
||||
|
||||
gotValues++
|
||||
|
||||
if (gotValues === nVals) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (gotValues === 0) {
|
||||
throw errCode(new Error(messages.NOT_FOUND), codes.ERR_NOT_FOUND)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,12 +14,12 @@ const take = require('it-take')
|
||||
* Store the multiaddrs from every peer in the passed peer store
|
||||
*
|
||||
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
|
||||
* @param {import('../peer-store')} peerStore
|
||||
* @param {import('../peer-store/types').PeerStore} peerStore
|
||||
*/
|
||||
function storeAddresses (source, peerStore) {
|
||||
return map(source, (peer) => {
|
||||
async function * storeAddresses (source, peerStore) {
|
||||
yield * map(source, async (peer) => {
|
||||
// ensure we have the addresses for a given peer
|
||||
peerStore.addressBook.add(peer.id, peer.multiaddrs)
|
||||
await peerStore.addressBook.add(peer.id, peer.multiaddrs)
|
||||
|
||||
return peer
|
||||
})
|
||||
|
44
src/dht/dht-content-routing.js
Normal file
44
src/dht/dht-content-routing.js
Normal file
@ -0,0 +1,44 @@
|
||||
'use strict'
|
||||
|
||||
const drain = require('it-drain')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('libp2p-interfaces/src/content-routing/types').ContentRouting} ContentRoutingModule
|
||||
* @typedef {import('multiformats/cid').CID} CID
|
||||
*/
|
||||
|
||||
/**
|
||||
* Wrapper class to convert events into returned values
|
||||
*
|
||||
* @implements {ContentRoutingModule}
|
||||
*/
|
||||
class DHTContentRouting {
|
||||
/**
|
||||
* @param {import('libp2p-kad-dht').DHT} dht
|
||||
*/
|
||||
constructor (dht) {
|
||||
this._dht = dht
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CID} cid
|
||||
*/
|
||||
async provide (cid) {
|
||||
await drain(this._dht.provide(cid))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {CID} cid
|
||||
* @param {*} options
|
||||
*/
|
||||
async * findProviders (cid, options) {
|
||||
for await (const event of this._dht.findProviders(cid, options)) {
|
||||
if (event.name === 'PROVIDER') {
|
||||
yield * event.providers
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { DHTContentRouting }
|
51
src/dht/dht-peer-routing.js
Normal file
51
src/dht/dht-peer-routing.js
Normal file
@ -0,0 +1,51 @@
|
||||
'use strict'
|
||||
|
||||
const errCode = require('err-code')
|
||||
const { messages, codes } = require('../errors')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('libp2p-interfaces/src/peer-routing/types').PeerRouting} PeerRoutingModule
|
||||
*/
|
||||
|
||||
/**
|
||||
* Wrapper class to convert events into returned values
|
||||
*
|
||||
* @implements {PeerRoutingModule}
|
||||
*/
|
||||
class DHTPeerRouting {
|
||||
/**
|
||||
* @param {import('libp2p-kad-dht').DHT} dht
|
||||
*/
|
||||
constructor (dht) {
|
||||
this._dht = dht
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
* @param {any} options
|
||||
*/
|
||||
async findPeer (peerId, options = {}) {
|
||||
for await (const event of this._dht.findPeer(peerId, options)) {
|
||||
if (event.name === 'FINAL_PEER') {
|
||||
return event.peer
|
||||
}
|
||||
}
|
||||
|
||||
throw errCode(new Error(messages.NOT_FOUND), codes.ERR_NOT_FOUND)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @param {any} options
|
||||
*/
|
||||
async * getClosestPeers (key, options = {}) {
|
||||
for await (const event of this._dht.getClosestPeers(key, options)) {
|
||||
if (event.name === 'PEER_RESPONSE') {
|
||||
yield * event.closer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { DHTPeerRouting }
|
@ -1,11 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
const errCode = require('err-code')
|
||||
const AbortController = require('abort-controller').default
|
||||
const { anySignal } = require('any-signal')
|
||||
// @ts-ignore p-fifo does not export types
|
||||
const FIFO = require('p-fifo')
|
||||
const pAny = require('p-any')
|
||||
// @ts-expect-error setMaxListeners is missing from the types
|
||||
const { setMaxListeners } = require('events')
|
||||
const { codes } = require('../errors')
|
||||
|
||||
/**
|
||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||
@ -54,12 +56,20 @@ class DialRequest {
|
||||
const tokens = this.dialer.getTokens(this.addrs.length)
|
||||
// If no tokens are available, throw
|
||||
if (tokens.length < 1) {
|
||||
throw errCode(new Error('No dial tokens available'), 'ERR_NO_DIAL_TOKENS')
|
||||
throw errCode(new Error('No dial tokens available'), codes.ERR_NO_DIAL_TOKENS)
|
||||
}
|
||||
|
||||
const tokenHolder = new FIFO()
|
||||
tokens.forEach(token => tokenHolder.push(token))
|
||||
const dialAbortControllers = this.addrs.map(() => new AbortController())
|
||||
const dialAbortControllers = this.addrs.map(() => {
|
||||
const controller = new AbortController()
|
||||
try {
|
||||
// fails on node < 15.4
|
||||
setMaxListeners && setMaxListeners(Infinity, controller.signal)
|
||||
} catch {}
|
||||
|
||||
return controller
|
||||
})
|
||||
let completedDials = 0
|
||||
|
||||
try {
|
||||
|
@ -1,20 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const all = require('it-all')
|
||||
const filter = require('it-filter')
|
||||
const { pipe } = require('it-pipe')
|
||||
const log = Object.assign(debug('libp2p:dialer'), {
|
||||
error: debug('libp2p:dialer:err')
|
||||
})
|
||||
const errCode = require('err-code')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
// @ts-ignore timeout-abourt-controles does not export types
|
||||
const TimeoutController = require('timeout-abort-controller')
|
||||
const { TimeoutController } = require('timeout-abort-controller')
|
||||
const { AbortError } = require('abortable-iterator')
|
||||
const { anySignal } = require('any-signal')
|
||||
|
||||
// @ts-expect-error setMaxListeners is missing from the types
|
||||
const { setMaxListeners } = require('events')
|
||||
const DialRequest = require('./dial-request')
|
||||
const { publicAddressesFirst } = require('libp2p-utils/src/address-sort')
|
||||
const getPeer = require('../get-peer')
|
||||
|
||||
const trackedMap = require('../metrics/tracked-map')
|
||||
const { codes } = require('../errors')
|
||||
const {
|
||||
DIAL_TIMEOUT,
|
||||
@ -23,18 +26,24 @@ const {
|
||||
MAX_ADDRS_TO_DIAL
|
||||
} = require('../constants')
|
||||
|
||||
const METRICS_COMPONENT = 'dialler'
|
||||
const METRICS_PENDING_DIALS = 'pending-dials'
|
||||
const METRICS_PENDING_DIAL_TARGETS = 'pending-dial-targets'
|
||||
|
||||
/**
|
||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('../peer-store')} PeerStore
|
||||
* @typedef {import('../peer-store/address-book').Address} Address
|
||||
* @typedef {import('../peer-store/types').PeerStore} PeerStore
|
||||
* @typedef {import('../peer-store/types').Address} Address
|
||||
* @typedef {import('../transport-manager')} TransportManager
|
||||
* @typedef {import('../types').ConnectionGater} ConnectionGater
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DialerProperties
|
||||
* @property {PeerStore} peerStore
|
||||
* @property {TransportManager} transportManager
|
||||
* @property {ConnectionGater} connectionGater
|
||||
*
|
||||
* @typedef {(addr:Multiaddr) => Promise<string[]>} Resolver
|
||||
*
|
||||
@ -45,14 +54,15 @@ const {
|
||||
* @property {number} [maxDialsPerPeer = MAX_PER_PEER_DIALS] - Number of max concurrent dials per peer.
|
||||
* @property {number} [dialTimeout = DIAL_TIMEOUT] - How long a dial attempt is allowed to take.
|
||||
* @property {Record<string, Resolver>} [resolvers = {}] - multiaddr resolvers to use when dialing
|
||||
* @property {import('../metrics')} [metrics]
|
||||
*
|
||||
* @typedef DialTarget
|
||||
* @property {string} id
|
||||
* @property {Multiaddr[]} addrs
|
||||
*
|
||||
* @typedef PendingDial
|
||||
* @property {DialRequest} dialRequest
|
||||
* @property {TimeoutController} controller
|
||||
* @property {import('./dial-request')} dialRequest
|
||||
* @property {import('timeout-abort-controller').TimeoutController} controller
|
||||
* @property {Promise<Connection>} promise
|
||||
* @property {function():void} destroy
|
||||
*/
|
||||
@ -65,13 +75,16 @@ class Dialer {
|
||||
constructor ({
|
||||
transportManager,
|
||||
peerStore,
|
||||
connectionGater,
|
||||
addressSorter = publicAddressesFirst,
|
||||
maxParallelDials = MAX_PARALLEL_DIALS,
|
||||
maxAddrsToDial = MAX_ADDRS_TO_DIAL,
|
||||
dialTimeout = DIAL_TIMEOUT,
|
||||
maxDialsPerPeer = MAX_PER_PEER_DIALS,
|
||||
resolvers = {}
|
||||
resolvers = {},
|
||||
metrics
|
||||
}) {
|
||||
this.connectionGater = connectionGater
|
||||
this.transportManager = transportManager
|
||||
this.peerStore = peerStore
|
||||
this.addressSorter = addressSorter
|
||||
@ -80,8 +93,20 @@ class Dialer {
|
||||
this.timeout = dialTimeout
|
||||
this.maxDialsPerPeer = maxDialsPerPeer
|
||||
this.tokens = [...new Array(maxParallelDials)].map((_, index) => index)
|
||||
this._pendingDials = new Map()
|
||||
this._pendingDialTargets = new Map()
|
||||
|
||||
/** @type {Map<string, PendingDial>} */
|
||||
this._pendingDials = trackedMap({
|
||||
component: METRICS_COMPONENT,
|
||||
metric: METRICS_PENDING_DIALS,
|
||||
metrics
|
||||
})
|
||||
|
||||
/** @type {Map<string, { resolve: (value: any) => void, reject: (err: Error) => void}>} */
|
||||
this._pendingDialTargets = trackedMap({
|
||||
component: METRICS_COMPONENT,
|
||||
metric: METRICS_PENDING_DIAL_TARGETS,
|
||||
metrics
|
||||
})
|
||||
|
||||
for (const [key, value] of Object.entries(resolvers)) {
|
||||
Multiaddr.resolvers.set(key, value)
|
||||
@ -95,7 +120,7 @@ class Dialer {
|
||||
for (const dial of this._pendingDials.values()) {
|
||||
try {
|
||||
dial.controller.abort()
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
@ -118,6 +143,12 @@ class Dialer {
|
||||
* @returns {Promise<Connection>}
|
||||
*/
|
||||
async connectToPeer (peer, options = {}) {
|
||||
const { id } = getPeer(peer)
|
||||
|
||||
if (await this.connectionGater.denyDialPeer(id)) {
|
||||
throw errCode(new Error('The dial request is blocked by gater.allowDialPeer'), codes.ERR_PEER_DIAL_INTERCEPTED)
|
||||
}
|
||||
|
||||
const dialTarget = await this._createCancellableDialTarget(peer)
|
||||
|
||||
if (!dialTarget.addrs.length) {
|
||||
@ -129,7 +160,7 @@ class Dialer {
|
||||
const connection = await pendingDial.promise
|
||||
log('dial succeeded to %s', dialTarget.id)
|
||||
return connection
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
// Error is a timeout
|
||||
if (pendingDial.controller.signal.aborted) {
|
||||
err.code = codes.ERR_TIMEOUT
|
||||
@ -156,14 +187,16 @@ class Dialer {
|
||||
this._pendingDialTargets.set(id, { resolve, reject })
|
||||
})
|
||||
|
||||
const dialTarget = await Promise.race([
|
||||
this._createDialTarget(peer),
|
||||
cancellablePromise
|
||||
])
|
||||
try {
|
||||
const dialTarget = await Promise.race([
|
||||
this._createDialTarget(peer),
|
||||
cancellablePromise
|
||||
])
|
||||
|
||||
this._pendingDialTargets.delete(id)
|
||||
|
||||
return dialTarget
|
||||
return dialTarget
|
||||
} finally {
|
||||
this._pendingDialTargets.delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -180,10 +213,16 @@ class Dialer {
|
||||
const { id, multiaddrs } = getPeer(peer)
|
||||
|
||||
if (multiaddrs) {
|
||||
this.peerStore.addressBook.add(id, multiaddrs)
|
||||
await this.peerStore.addressBook.add(id, multiaddrs)
|
||||
}
|
||||
|
||||
let knownAddrs = this.peerStore.addressBook.getMultiaddrsForPeer(id, this.addressSorter) || []
|
||||
let knownAddrs = await pipe(
|
||||
await this.peerStore.addressBook.getMultiaddrsForPeer(id, this.addressSorter),
|
||||
(source) => filter(source, async (multiaddr) => {
|
||||
return !(await this.connectionGater.denyDialMultiaddr(id, multiaddr))
|
||||
}),
|
||||
(source) => all(source)
|
||||
)
|
||||
|
||||
// If received a multiaddr to dial, it should be the first to use
|
||||
// But, if we know other multiaddrs for the peer, we should try them too.
|
||||
@ -203,7 +242,7 @@ class Dialer {
|
||||
const supportedAddrs = addrs.filter(a => this.transportManager.transportForMultiaddr(a))
|
||||
|
||||
if (supportedAddrs.length > this.maxAddrsToDial) {
|
||||
this.peerStore.delete(id)
|
||||
await this.peerStore.delete(id)
|
||||
throw errCode(new Error('dial with more addresses than allowed'), codes.ERR_TOO_MANY_ADDRESSES)
|
||||
}
|
||||
|
||||
@ -240,10 +279,18 @@ class Dialer {
|
||||
|
||||
// Combine the timeout signal and options.signal, if provided
|
||||
const timeoutController = new TimeoutController(this.timeout)
|
||||
|
||||
const signals = [timeoutController.signal]
|
||||
options.signal && signals.push(options.signal)
|
||||
const signal = anySignal(signals)
|
||||
|
||||
// this signal will potentially be used while dialing lots of
|
||||
// peers so prevent MaxListenersExceededWarning appearing in the console
|
||||
try {
|
||||
// fails on node < 15.4
|
||||
setMaxListeners && setMaxListeners(Infinity, signal)
|
||||
} catch {}
|
||||
|
||||
const pendingDial = {
|
||||
dialRequest,
|
||||
controller: timeoutController,
|
||||
@ -254,6 +301,7 @@ class Dialer {
|
||||
}
|
||||
}
|
||||
this._pendingDials.set(dialTarget.id, pendingDial)
|
||||
|
||||
return pendingDial
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,8 @@
|
||||
exports.messages = {
|
||||
NOT_STARTED_YET: 'The libp2p node is not started yet',
|
||||
DHT_DISABLED: 'DHT is not available',
|
||||
CONN_ENCRYPTION_REQUIRED: 'At least one connection encryption module is required'
|
||||
CONN_ENCRYPTION_REQUIRED: 'At least one connection encryption module is required',
|
||||
NOT_FOUND: 'Not found'
|
||||
}
|
||||
|
||||
exports.codes = {
|
||||
@ -11,6 +12,8 @@ exports.codes = {
|
||||
PUBSUB_NOT_STARTED: 'ERR_PUBSUB_NOT_STARTED',
|
||||
DHT_NOT_STARTED: 'ERR_DHT_NOT_STARTED',
|
||||
CONN_ENCRYPTION_REQUIRED: 'ERR_CONN_ENCRYPTION_REQUIRED',
|
||||
ERR_PEER_DIAL_INTERCEPTED: 'ERR_PEER_DIAL_INTERCEPTED',
|
||||
ERR_CONNECTION_INTERCEPTED: 'ERR_CONNECTION_INTERCEPTED',
|
||||
ERR_INVALID_PROTOCOLS_FOR_STREAM: 'ERR_INVALID_PROTOCOLS_FOR_STREAM',
|
||||
ERR_CONNECTION_ENDED: 'ERR_CONNECTION_ENDED',
|
||||
ERR_CONNECTION_FAILED: 'ERR_CONNECTION_FAILED',
|
||||
@ -29,10 +32,35 @@ exports.codes = {
|
||||
ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS',
|
||||
ERR_INVALID_PEER: 'ERR_INVALID_PEER',
|
||||
ERR_MUXER_UNAVAILABLE: 'ERR_MUXER_UNAVAILABLE',
|
||||
ERR_NOT_FOUND: 'ERR_NOT_FOUND',
|
||||
ERR_TIMEOUT: 'ERR_TIMEOUT',
|
||||
ERR_TRANSPORT_UNAVAILABLE: 'ERR_TRANSPORT_UNAVAILABLE',
|
||||
ERR_TRANSPORT_DIAL_FAILED: 'ERR_TRANSPORT_DIAL_FAILED',
|
||||
ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL',
|
||||
ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR',
|
||||
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID'
|
||||
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID',
|
||||
ERR_FIND_SELF: 'ERR_FIND_SELF',
|
||||
ERR_NO_ROUTERS_AVAILABLE: 'ERR_NO_ROUTERS_AVAILABLE',
|
||||
ERR_CONNECTION_NOT_MULTIPLEXED: 'ERR_CONNECTION_NOT_MULTIPLEXED',
|
||||
ERR_NO_DIAL_TOKENS: 'ERR_NO_DIAL_TOKENS',
|
||||
ERR_KEYCHAIN_REQUIRED: 'ERR_KEYCHAIN_REQUIRED',
|
||||
ERR_INVALID_CMS: 'ERR_INVALID_CMS',
|
||||
ERR_MISSING_KEYS: 'ERR_MISSING_KEYS',
|
||||
ERR_NO_KEY: 'ERR_NO_KEY',
|
||||
ERR_INVALID_KEY_NAME: 'ERR_INVALID_KEY_NAME',
|
||||
ERR_INVALID_KEY_TYPE: 'ERR_INVALID_KEY_TYPE',
|
||||
ERR_KEY_ALREADY_EXISTS: 'ERR_KEY_ALREADY_EXISTS',
|
||||
ERR_INVALID_KEY_SIZE: 'ERR_INVALID_KEY_SIZE',
|
||||
ERR_KEY_NOT_FOUND: 'ERR_KEY_NOT_FOUND',
|
||||
ERR_OLD_KEY_NAME_INVALID: 'ERR_OLD_KEY_NAME_INVALID',
|
||||
ERR_NEW_KEY_NAME_INVALID: 'ERR_NEW_KEY_NAME_INVALID',
|
||||
ERR_PASSWORD_REQUIRED: 'ERR_PASSWORD_REQUIRED',
|
||||
ERR_PEM_REQUIRED: 'ERR_PEM_REQUIRED',
|
||||
ERR_CANNOT_READ_KEY: 'ERR_CANNOT_READ_KEY',
|
||||
ERR_MISSING_PRIVATE_KEY: 'ERR_MISSING_PRIVATE_KEY',
|
||||
ERR_INVALID_OLD_PASS_TYPE: 'ERR_INVALID_OLD_PASS_TYPE',
|
||||
ERR_INVALID_NEW_PASS_TYPE: 'ERR_INVALID_NEW_PASS_TYPE',
|
||||
ERR_INVALID_PASS_LENGTH: 'ERR_INVALID_PASS_LENGTH',
|
||||
ERR_NOT_IMPLEMENTED: 'ERR_NOT_IMPLEMENTED',
|
||||
ERR_WRONG_PING_ACK: 'ERR_WRONG_PING_ACK'
|
||||
}
|
||||
|
36
src/fetch/README.md
Normal file
36
src/fetch/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
libp2p-fetch JavaScript Implementation
|
||||
=====================================
|
||||
|
||||
> Libp2p fetch protocol JavaScript implementation
|
||||
|
||||
## Overview
|
||||
|
||||
An implementation of the Fetch protocol as described here: https://github.com/libp2p/specs/tree/master/fetch
|
||||
|
||||
The fetch protocol is a simple protocol for requesting a value corresponding to a key from a peer.
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
const Libp2p = require('libp2p')
|
||||
|
||||
/**
|
||||
* Given a key (as a string) returns a value (as a Uint8Array), or null if the key isn't found.
|
||||
* All keys must be prefixed my the same prefix, which will be used to find the appropriate key
|
||||
* lookup function.
|
||||
* @param key - a string
|
||||
* @returns value - a Uint8Array value that corresponds to the given key, or null if the key doesn't
|
||||
* have a corresponding value.
|
||||
*/
|
||||
async function my_subsystem_key_lookup(key) {
|
||||
// app specific callback to lookup key-value pairs.
|
||||
}
|
||||
|
||||
// Enable this peer to respond to fetch requests for keys that begin with '/my_subsystem_key_prefix/'
|
||||
const libp2p = Libp2p.create(...)
|
||||
libp2p.fetchService.registerLookupFunction('/my_subsystem_key_prefix/', my_subsystem_key_lookup)
|
||||
|
||||
const key = '/my_subsystem_key_prefix/{...}'
|
||||
const peerDst = PeerId.parse('Qmfoo...') // or Multiaddr instance
|
||||
const value = await libp2p.fetch(peerDst, key)
|
||||
```
|
6
src/fetch/constants.js
Normal file
6
src/fetch/constants.js
Normal file
@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
// https://github.com/libp2p/specs/tree/master/fetch#wire-protocol
|
||||
PROTOCOL: '/libp2p/fetch/0.0.1'
|
||||
}
|
159
src/fetch/index.js
Normal file
159
src/fetch/index.js
Normal file
@ -0,0 +1,159 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const log = Object.assign(debug('libp2p:fetch'), {
|
||||
error: debug('libp2p:fetch:err')
|
||||
})
|
||||
const errCode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
const lp = require('it-length-prefixed')
|
||||
const { FetchRequest, FetchResponse } = require('./proto')
|
||||
// @ts-ignore it-handshake does not export types
|
||||
const handshake = require('it-handshake')
|
||||
const { PROTOCOL } = require('./constants')
|
||||
|
||||
/**
|
||||
* @typedef {import('../')} Libp2p
|
||||
* @typedef {import('multiaddr').Multiaddr} Multiaddr
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('libp2p-interfaces/src/stream-muxer/types').MuxedStream} MuxedStream
|
||||
* @typedef {(key: string) => Promise<Uint8Array | null>} LookupFunction
|
||||
*/
|
||||
|
||||
/**
|
||||
* A simple libp2p protocol for requesting a value corresponding to a key from a peer.
|
||||
* Developers can register one or more lookup function for retrieving the value corresponding to
|
||||
* a given key. Each lookup function must act on a distinct part of the overall key space, defined
|
||||
* by a fixed prefix that all keys that should be routed to that lookup function will start with.
|
||||
*/
|
||||
class FetchProtocol {
|
||||
/**
|
||||
* @param {Libp2p} libp2p
|
||||
*/
|
||||
constructor (libp2p) {
|
||||
this._lookupFunctions = new Map() // Maps key prefix to value lookup function
|
||||
this._libp2p = libp2p
|
||||
this.handleMessage = this.handleMessage.bind(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a request to fetch the value associated with the given key from the given peer.
|
||||
*
|
||||
* @param {PeerId|Multiaddr} peer
|
||||
* @param {string} key
|
||||
* @returns {Promise<Uint8Array | null>}
|
||||
*/
|
||||
async fetch (peer, key) {
|
||||
// @ts-ignore multiaddr might not have toB58String
|
||||
log('dialing %s to %s', this._protocol, peer.toB58String ? peer.toB58String() : peer)
|
||||
|
||||
const connection = await this._libp2p.dial(peer)
|
||||
const { stream } = await connection.newStream(FetchProtocol.PROTOCOL)
|
||||
const shake = handshake(stream)
|
||||
|
||||
// send message
|
||||
const request = new FetchRequest({ identifier: key })
|
||||
shake.write(lp.encode.single(FetchRequest.encode(request).finish()))
|
||||
|
||||
// read response
|
||||
const response = FetchResponse.decode((await lp.decode.fromReader(shake.reader).next()).value.slice())
|
||||
switch (response.status) {
|
||||
case (FetchResponse.StatusCode.OK): {
|
||||
return response.data
|
||||
}
|
||||
case (FetchResponse.StatusCode.NOT_FOUND): {
|
||||
return null
|
||||
}
|
||||
case (FetchResponse.StatusCode.ERROR): {
|
||||
const errmsg = (new TextDecoder()).decode(response.data)
|
||||
throw errCode(new Error('Error in fetch protocol response: ' + errmsg), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
default: {
|
||||
throw errCode(new Error('Unknown response status'), codes.ERR_INVALID_MESSAGE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoked when a fetch request is received. Reads the request message off the given stream and
|
||||
* responds based on looking up the key in the request via the lookup callback that corresponds
|
||||
* to the key's prefix.
|
||||
*
|
||||
* @param {object} options
|
||||
* @param {MuxedStream} options.stream
|
||||
* @param {string} options.protocol
|
||||
*/
|
||||
async handleMessage (options) {
|
||||
const { stream } = options
|
||||
const shake = handshake(stream)
|
||||
const request = FetchRequest.decode((await lp.decode.fromReader(shake.reader).next()).value.slice())
|
||||
|
||||
let response
|
||||
const lookup = this._getLookupFunction(request.identifier)
|
||||
if (lookup) {
|
||||
const data = await lookup(request.identifier)
|
||||
if (data) {
|
||||
response = new FetchResponse({ status: FetchResponse.StatusCode.OK, data })
|
||||
} else {
|
||||
response = new FetchResponse({ status: FetchResponse.StatusCode.NOT_FOUND })
|
||||
}
|
||||
} else {
|
||||
const errmsg = (new TextEncoder()).encode('No lookup function registered for key: ' + request.identifier)
|
||||
response = new FetchResponse({ status: FetchResponse.StatusCode.ERROR, data: errmsg })
|
||||
}
|
||||
|
||||
shake.write(lp.encode.single(FetchResponse.encode(response).finish()))
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a key, finds the appropriate function for looking up its corresponding value, based on
|
||||
* the key's prefix.
|
||||
*
|
||||
* @param {string} key
|
||||
*/
|
||||
_getLookupFunction (key) {
|
||||
for (const prefix of this._lookupFunctions.keys()) {
|
||||
if (key.startsWith(prefix)) {
|
||||
return this._lookupFunctions.get(prefix)
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new lookup callback that can map keys to values, for a given set of keys that
|
||||
* share the same prefix.
|
||||
*
|
||||
* @param {string} prefix
|
||||
* @param {LookupFunction} lookup
|
||||
*/
|
||||
registerLookupFunction (prefix, lookup) {
|
||||
if (this._lookupFunctions.has(prefix)) {
|
||||
throw errCode(new Error("Fetch protocol handler for key prefix '" + prefix + "' already registered"), codes.ERR_KEY_ALREADY_EXISTS)
|
||||
}
|
||||
this._lookupFunctions.set(prefix, lookup)
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a new lookup callback that can map keys to values, for a given set of keys that
|
||||
* share the same prefix.
|
||||
*
|
||||
* @param {string} prefix
|
||||
* @param {LookupFunction} [lookup]
|
||||
*/
|
||||
unregisterLookupFunction (prefix, lookup) {
|
||||
if (lookup != null) {
|
||||
const existingLookup = this._lookupFunctions.get(prefix)
|
||||
|
||||
if (existingLookup !== lookup) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
this._lookupFunctions.delete(prefix)
|
||||
}
|
||||
}
|
||||
|
||||
FetchProtocol.PROTOCOL = PROTOCOL
|
||||
|
||||
exports = module.exports = FetchProtocol
|
134
src/fetch/proto.d.ts
vendored
Normal file
134
src/fetch/proto.d.ts
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
import * as $protobuf from "protobufjs";
|
||||
/** Properties of a FetchRequest. */
|
||||
export interface IFetchRequest {
|
||||
|
||||
/** FetchRequest identifier */
|
||||
identifier?: (string|null);
|
||||
}
|
||||
|
||||
/** Represents a FetchRequest. */
|
||||
export class FetchRequest implements IFetchRequest {
|
||||
|
||||
/**
|
||||
* Constructs a new FetchRequest.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IFetchRequest);
|
||||
|
||||
/** FetchRequest identifier. */
|
||||
public identifier: string;
|
||||
|
||||
/**
|
||||
* Encodes the specified FetchRequest message. Does not implicitly {@link FetchRequest.verify|verify} messages.
|
||||
* @param m FetchRequest message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IFetchRequest, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a FetchRequest message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns FetchRequest
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): FetchRequest;
|
||||
|
||||
/**
|
||||
* Creates a FetchRequest message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns FetchRequest
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): FetchRequest;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a FetchRequest message. Also converts values to other types if specified.
|
||||
* @param m FetchRequest
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: FetchRequest, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this FetchRequest to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
/** Properties of a FetchResponse. */
|
||||
export interface IFetchResponse {
|
||||
|
||||
/** FetchResponse status */
|
||||
status?: (FetchResponse.StatusCode|null);
|
||||
|
||||
/** FetchResponse data */
|
||||
data?: (Uint8Array|null);
|
||||
}
|
||||
|
||||
/** Represents a FetchResponse. */
|
||||
export class FetchResponse implements IFetchResponse {
|
||||
|
||||
/**
|
||||
* Constructs a new FetchResponse.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IFetchResponse);
|
||||
|
||||
/** FetchResponse status. */
|
||||
public status: FetchResponse.StatusCode;
|
||||
|
||||
/** FetchResponse data. */
|
||||
public data: Uint8Array;
|
||||
|
||||
/**
|
||||
* Encodes the specified FetchResponse message. Does not implicitly {@link FetchResponse.verify|verify} messages.
|
||||
* @param m FetchResponse message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IFetchResponse, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a FetchResponse message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns FetchResponse
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): FetchResponse;
|
||||
|
||||
/**
|
||||
* Creates a FetchResponse message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns FetchResponse
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): FetchResponse;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a FetchResponse message. Also converts values to other types if specified.
|
||||
* @param m FetchResponse
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: FetchResponse, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this FetchResponse to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
export namespace FetchResponse {
|
||||
|
||||
/** StatusCode enum. */
|
||||
enum StatusCode {
|
||||
OK = 0,
|
||||
NOT_FOUND = 1,
|
||||
ERROR = 2
|
||||
}
|
||||
}
|
333
src/fetch/proto.js
Normal file
333
src/fetch/proto.js
Normal file
@ -0,0 +1,333 @@
|
||||
/*eslint-disable*/
|
||||
"use strict";
|
||||
|
||||
var $protobuf = require("protobufjs/minimal");
|
||||
|
||||
// Common aliases
|
||||
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
|
||||
|
||||
// Exported root namespace
|
||||
var $root = $protobuf.roots["libp2p-fetch"] || ($protobuf.roots["libp2p-fetch"] = {});
|
||||
|
||||
$root.FetchRequest = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a FetchRequest.
|
||||
* @exports IFetchRequest
|
||||
* @interface IFetchRequest
|
||||
* @property {string|null} [identifier] FetchRequest identifier
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new FetchRequest.
|
||||
* @exports FetchRequest
|
||||
* @classdesc Represents a FetchRequest.
|
||||
* @implements IFetchRequest
|
||||
* @constructor
|
||||
* @param {IFetchRequest=} [p] Properties to set
|
||||
*/
|
||||
function FetchRequest(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* FetchRequest identifier.
|
||||
* @member {string} identifier
|
||||
* @memberof FetchRequest
|
||||
* @instance
|
||||
*/
|
||||
FetchRequest.prototype.identifier = "";
|
||||
|
||||
/**
|
||||
* Encodes the specified FetchRequest message. Does not implicitly {@link FetchRequest.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof FetchRequest
|
||||
* @static
|
||||
* @param {IFetchRequest} m FetchRequest message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
FetchRequest.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.identifier != null && Object.hasOwnProperty.call(m, "identifier"))
|
||||
w.uint32(10).string(m.identifier);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a FetchRequest message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof FetchRequest
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {FetchRequest} FetchRequest
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
FetchRequest.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.FetchRequest();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.identifier = r.string();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a FetchRequest message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof FetchRequest
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {FetchRequest} FetchRequest
|
||||
*/
|
||||
FetchRequest.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.FetchRequest)
|
||||
return d;
|
||||
var m = new $root.FetchRequest();
|
||||
if (d.identifier != null) {
|
||||
m.identifier = String(d.identifier);
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a FetchRequest message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof FetchRequest
|
||||
* @static
|
||||
* @param {FetchRequest} m FetchRequest
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
FetchRequest.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
d.identifier = "";
|
||||
}
|
||||
if (m.identifier != null && m.hasOwnProperty("identifier")) {
|
||||
d.identifier = m.identifier;
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this FetchRequest to JSON.
|
||||
* @function toJSON
|
||||
* @memberof FetchRequest
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
FetchRequest.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return FetchRequest;
|
||||
})();
|
||||
|
||||
$root.FetchResponse = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a FetchResponse.
|
||||
* @exports IFetchResponse
|
||||
* @interface IFetchResponse
|
||||
* @property {FetchResponse.StatusCode|null} [status] FetchResponse status
|
||||
* @property {Uint8Array|null} [data] FetchResponse data
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new FetchResponse.
|
||||
* @exports FetchResponse
|
||||
* @classdesc Represents a FetchResponse.
|
||||
* @implements IFetchResponse
|
||||
* @constructor
|
||||
* @param {IFetchResponse=} [p] Properties to set
|
||||
*/
|
||||
function FetchResponse(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* FetchResponse status.
|
||||
* @member {FetchResponse.StatusCode} status
|
||||
* @memberof FetchResponse
|
||||
* @instance
|
||||
*/
|
||||
FetchResponse.prototype.status = 0;
|
||||
|
||||
/**
|
||||
* FetchResponse data.
|
||||
* @member {Uint8Array} data
|
||||
* @memberof FetchResponse
|
||||
* @instance
|
||||
*/
|
||||
FetchResponse.prototype.data = $util.newBuffer([]);
|
||||
|
||||
/**
|
||||
* Encodes the specified FetchResponse message. Does not implicitly {@link FetchResponse.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof FetchResponse
|
||||
* @static
|
||||
* @param {IFetchResponse} m FetchResponse message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
FetchResponse.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.status != null && Object.hasOwnProperty.call(m, "status"))
|
||||
w.uint32(8).int32(m.status);
|
||||
if (m.data != null && Object.hasOwnProperty.call(m, "data"))
|
||||
w.uint32(18).bytes(m.data);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a FetchResponse message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof FetchResponse
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {FetchResponse} FetchResponse
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
FetchResponse.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.FetchResponse();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.status = r.int32();
|
||||
break;
|
||||
case 2:
|
||||
m.data = r.bytes();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a FetchResponse message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof FetchResponse
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {FetchResponse} FetchResponse
|
||||
*/
|
||||
FetchResponse.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.FetchResponse)
|
||||
return d;
|
||||
var m = new $root.FetchResponse();
|
||||
switch (d.status) {
|
||||
case "OK":
|
||||
case 0:
|
||||
m.status = 0;
|
||||
break;
|
||||
case "NOT_FOUND":
|
||||
case 1:
|
||||
m.status = 1;
|
||||
break;
|
||||
case "ERROR":
|
||||
case 2:
|
||||
m.status = 2;
|
||||
break;
|
||||
}
|
||||
if (d.data != null) {
|
||||
if (typeof d.data === "string")
|
||||
$util.base64.decode(d.data, m.data = $util.newBuffer($util.base64.length(d.data)), 0);
|
||||
else if (d.data.length)
|
||||
m.data = d.data;
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a FetchResponse message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof FetchResponse
|
||||
* @static
|
||||
* @param {FetchResponse} m FetchResponse
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
FetchResponse.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
d.status = o.enums === String ? "OK" : 0;
|
||||
if (o.bytes === String)
|
||||
d.data = "";
|
||||
else {
|
||||
d.data = [];
|
||||
if (o.bytes !== Array)
|
||||
d.data = $util.newBuffer(d.data);
|
||||
}
|
||||
}
|
||||
if (m.status != null && m.hasOwnProperty("status")) {
|
||||
d.status = o.enums === String ? $root.FetchResponse.StatusCode[m.status] : m.status;
|
||||
}
|
||||
if (m.data != null && m.hasOwnProperty("data")) {
|
||||
d.data = o.bytes === String ? $util.base64.encode(m.data, 0, m.data.length) : o.bytes === Array ? Array.prototype.slice.call(m.data) : m.data;
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this FetchResponse to JSON.
|
||||
* @function toJSON
|
||||
* @memberof FetchResponse
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
FetchResponse.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
/**
|
||||
* StatusCode enum.
|
||||
* @name FetchResponse.StatusCode
|
||||
* @enum {number}
|
||||
* @property {number} OK=0 OK value
|
||||
* @property {number} NOT_FOUND=1 NOT_FOUND value
|
||||
* @property {number} ERROR=2 ERROR value
|
||||
*/
|
||||
FetchResponse.StatusCode = (function() {
|
||||
var valuesById = {}, values = Object.create(valuesById);
|
||||
values[valuesById[0] = "OK"] = 0;
|
||||
values[valuesById[1] = "NOT_FOUND"] = 1;
|
||||
values[valuesById[2] = "ERROR"] = 2;
|
||||
return values;
|
||||
})();
|
||||
|
||||
return FetchResponse;
|
||||
})();
|
||||
|
||||
module.exports = $root;
|
15
src/fetch/proto.proto
Normal file
15
src/fetch/proto.proto
Normal file
@ -0,0 +1,15 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message FetchRequest {
|
||||
string identifier = 1;
|
||||
}
|
||||
|
||||
message FetchResponse {
|
||||
StatusCode status = 1;
|
||||
enum StatusCode {
|
||||
OK = 0;
|
||||
NOT_FOUND = 1;
|
||||
ERROR = 2;
|
||||
}
|
||||
bytes data = 2;
|
||||
}
|
@ -32,7 +32,7 @@ function getPeer (peer) {
|
||||
|
||||
try {
|
||||
peer = PeerId.createFromB58String(idStr)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
throw errCode(
|
||||
new Error(`${peer} is not a valid peer type`),
|
||||
codes.ERR_INVALID_MULTIADDR
|
||||
|
@ -77,8 +77,6 @@ class IdentifyService {
|
||||
...libp2p._options.host
|
||||
}
|
||||
|
||||
this.peerStore.metadataBook.set(this.peerId, 'AgentVersion', uint8ArrayFromString(this._host.agentVersion))
|
||||
this.peerStore.metadataBook.set(this.peerId, 'ProtocolVersion', uint8ArrayFromString(this._host.protocolVersion))
|
||||
// When a new connection happens, trigger identify
|
||||
this.connectionManager.on('peer:connect', (connection) => {
|
||||
this.identify(connection).catch(log.error)
|
||||
@ -87,18 +85,27 @@ class IdentifyService {
|
||||
// When self multiaddrs change, trigger identify-push
|
||||
this.peerStore.on('change:multiaddrs', ({ peerId }) => {
|
||||
if (peerId.toString() === this.peerId.toString()) {
|
||||
this.pushToPeerStore()
|
||||
this.pushToPeerStore().catch(err => log.error(err))
|
||||
}
|
||||
})
|
||||
|
||||
// When self protocols change, trigger identify-push
|
||||
this.peerStore.on('change:protocols', ({ peerId }) => {
|
||||
if (peerId.toString() === this.peerId.toString()) {
|
||||
this.pushToPeerStore()
|
||||
this.pushToPeerStore().catch(err => log.error(err))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async start () {
|
||||
await this.peerStore.metadataBook.setValue(this.peerId, 'AgentVersion', uint8ArrayFromString(this._host.agentVersion))
|
||||
await this.peerStore.metadataBook.setValue(this.peerId, 'ProtocolVersion', uint8ArrayFromString(this._host.protocolVersion))
|
||||
}
|
||||
|
||||
async stop () {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an Identify Push update to the list of connections
|
||||
*
|
||||
@ -108,7 +115,7 @@ class IdentifyService {
|
||||
async push (connections) {
|
||||
const signedPeerRecord = await this.peerStore.addressBook.getRawEnvelope(this.peerId)
|
||||
const listenAddrs = this._libp2p.multiaddrs.map((ma) => ma.bytes)
|
||||
const protocols = this.peerStore.protoBook.get(this.peerId) || []
|
||||
const protocols = await this.peerStore.protoBook.get(this.peerId)
|
||||
|
||||
const pushes = connections.map(async connection => {
|
||||
try {
|
||||
@ -124,7 +131,7 @@ class IdentifyService {
|
||||
stream,
|
||||
consume
|
||||
)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
// Just log errors
|
||||
log.error('could not push identify update to peer', err)
|
||||
}
|
||||
@ -135,10 +142,8 @@ class IdentifyService {
|
||||
|
||||
/**
|
||||
* Calls `push` for all peers in the `peerStore` that are connected
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
pushToPeerStore () {
|
||||
async pushToPeerStore () {
|
||||
// Do not try to push if libp2p node is not running
|
||||
if (!this._libp2p.isStarted()) {
|
||||
return
|
||||
@ -146,13 +151,13 @@ class IdentifyService {
|
||||
|
||||
const connections = []
|
||||
let connection
|
||||
for (const peer of this.peerStore.peers.values()) {
|
||||
for await (const peer of this.peerStore.getPeers()) {
|
||||
if (peer.protocols.includes(this.identifyPushProtocolStr) && (connection = this.connectionManager.get(peer.id))) {
|
||||
connections.push(connection)
|
||||
}
|
||||
}
|
||||
|
||||
this.push(connections)
|
||||
await this.push(connections)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -182,7 +187,7 @@ class IdentifyService {
|
||||
let message
|
||||
try {
|
||||
message = Message.Identify.decode(data)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
throw errCode(err, codes.ERR_INVALID_MESSAGE)
|
||||
}
|
||||
|
||||
@ -205,26 +210,26 @@ class IdentifyService {
|
||||
|
||||
try {
|
||||
const envelope = await Envelope.openAndCertify(signedPeerRecord, PeerRecord.DOMAIN)
|
||||
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
this.peerStore.protoBook.set(id, protocols)
|
||||
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
this.peerStore.metadataBook.set(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
|
||||
if (await this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
await this.peerStore.protoBook.set(id, protocols)
|
||||
await this.peerStore.metadataBook.setValue(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
await this.peerStore.metadataBook.setValue(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log('received invalid envelope, discard it and fallback to listenAddrs is available', err)
|
||||
}
|
||||
|
||||
// LEGACY: Update peers data in PeerStore
|
||||
try {
|
||||
this.peerStore.addressBook.set(id, listenAddrs.map((addr) => new Multiaddr(addr)))
|
||||
} catch (err) {
|
||||
await this.peerStore.addressBook.set(id, listenAddrs.map((addr) => new Multiaddr(addr)))
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('received invalid addrs', err)
|
||||
}
|
||||
|
||||
this.peerStore.protoBook.set(id, protocols)
|
||||
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
this.peerStore.metadataBook.set(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
|
||||
await this.peerStore.protoBook.set(id, protocols)
|
||||
await this.peerStore.metadataBook.setValue(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
await this.peerStore.metadataBook.setValue(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
|
||||
|
||||
// TODO: Add and score our observed addr
|
||||
log('received observed address of %s', cleanObservedAddr)
|
||||
@ -262,32 +267,32 @@ class IdentifyService {
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _handleIdentify ({ connection, stream }) {
|
||||
let publicKey = new Uint8Array(0)
|
||||
if (this.peerId.pubKey) {
|
||||
publicKey = this.peerId.pubKey.bytes
|
||||
}
|
||||
|
||||
const signedPeerRecord = await this.peerStore.addressBook.getRawEnvelope(this.peerId)
|
||||
const protocols = this.peerStore.protoBook.get(this.peerId) || []
|
||||
|
||||
const message = Message.Identify.encode({
|
||||
protocolVersion: this._host.protocolVersion,
|
||||
agentVersion: this._host.agentVersion,
|
||||
publicKey,
|
||||
listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.bytes),
|
||||
signedPeerRecord,
|
||||
observedAddr: connection.remoteAddr.bytes,
|
||||
protocols
|
||||
}).finish()
|
||||
|
||||
try {
|
||||
let publicKey = new Uint8Array(0)
|
||||
if (this.peerId.pubKey) {
|
||||
publicKey = this.peerId.pubKey.bytes
|
||||
}
|
||||
|
||||
const signedPeerRecord = await this.peerStore.addressBook.getRawEnvelope(this.peerId)
|
||||
const protocols = await this.peerStore.protoBook.get(this.peerId)
|
||||
|
||||
const message = Message.Identify.encode({
|
||||
protocolVersion: this._host.protocolVersion,
|
||||
agentVersion: this._host.agentVersion,
|
||||
publicKey,
|
||||
listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.bytes),
|
||||
signedPeerRecord,
|
||||
observedAddr: connection.remoteAddr.bytes,
|
||||
protocols
|
||||
}).finish()
|
||||
|
||||
await pipe(
|
||||
[message],
|
||||
lp.encode(),
|
||||
stream,
|
||||
consume
|
||||
)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('could not respond to identify request', err)
|
||||
}
|
||||
}
|
||||
@ -313,7 +318,7 @@ class IdentifyService {
|
||||
collect
|
||||
)
|
||||
message = Message.Identify.decode(data)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return log.error('received invalid message', err)
|
||||
}
|
||||
|
||||
@ -321,24 +326,28 @@ class IdentifyService {
|
||||
|
||||
try {
|
||||
const envelope = await Envelope.openAndCertify(message.signedPeerRecord, PeerRecord.DOMAIN)
|
||||
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
this.peerStore.protoBook.set(id, message.protocols)
|
||||
if (await this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
await this.peerStore.protoBook.set(id, message.protocols)
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log('received invalid envelope, discard it and fallback to listenAddrs is available', err)
|
||||
}
|
||||
|
||||
// LEGACY: Update peers data in PeerStore
|
||||
try {
|
||||
this.peerStore.addressBook.set(id,
|
||||
await this.peerStore.addressBook.set(id,
|
||||
message.listenAddrs.map((addr) => new Multiaddr(addr)))
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('received invalid addrs', err)
|
||||
}
|
||||
|
||||
// Update the protocols
|
||||
this.peerStore.protoBook.set(id, message.protocols)
|
||||
try {
|
||||
await this.peerStore.protoBook.set(id, message.protocols)
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('received invalid protocols', err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
166
src/index.js
166
src/index.js
@ -9,7 +9,7 @@ const { EventEmitter } = require('events')
|
||||
const errCode = require('err-code')
|
||||
const PeerId = require('peer-id')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
|
||||
const { MemoryDatastore } = require('datastore-core/memory')
|
||||
const PeerRouting = require('./peer-routing')
|
||||
const ContentRouting = require('./content-routing')
|
||||
const getPeer = require('./get-peer')
|
||||
@ -18,6 +18,7 @@ const { codes, messages } = require('./errors')
|
||||
|
||||
const AddressManager = require('./address-manager')
|
||||
const ConnectionManager = require('./connection-manager')
|
||||
const AutoDialler = require('./connection-manager/auto-dialler')
|
||||
const Circuit = require('./circuit/transport')
|
||||
const Relay = require('./circuit')
|
||||
const Dialer = require('./dialer')
|
||||
@ -27,10 +28,10 @@ const TransportManager = require('./transport-manager')
|
||||
const Upgrader = require('./upgrader')
|
||||
const PeerStore = require('./peer-store')
|
||||
const PubsubAdapter = require('./pubsub-adapter')
|
||||
const PersistentPeerStore = require('./peer-store/persistent')
|
||||
const Registrar = require('./registrar')
|
||||
const ping = require('./ping')
|
||||
const IdentifyService = require('./identify')
|
||||
const FetchService = require('./fetch')
|
||||
const PingService = require('./ping')
|
||||
const NatManager = require('./nat-manager')
|
||||
const { updateSelfPeerRecord } = require('./record/utils')
|
||||
|
||||
@ -47,6 +48,9 @@ const { updateSelfPeerRecord } = require('./record/utils')
|
||||
* @typedef {import('libp2p-interfaces/src/pubsub').PubsubOptions} PubsubOptions
|
||||
* @typedef {import('interface-datastore').Datastore} Datastore
|
||||
* @typedef {import('./pnet')} Protector
|
||||
* @typedef {import('./types').ConnectionGater} ConnectionGater
|
||||
* @typedef {Object} PersistentPeerStoreOptions
|
||||
* @property {number} [threshold]
|
||||
*/
|
||||
|
||||
/**
|
||||
@ -55,16 +59,9 @@ const { updateSelfPeerRecord } = require('./record/utils')
|
||||
* @property {MuxedStream} stream
|
||||
* @property {string} protocol
|
||||
*
|
||||
* @typedef {Object} RandomWalkOptions
|
||||
* @property {boolean} [enabled = false]
|
||||
* @property {number} [queriesPerPeriod = 1]
|
||||
* @property {number} [interval = 300e3]
|
||||
* @property {number} [timeout = 10e3]
|
||||
*
|
||||
* @typedef {Object} DhtOptions
|
||||
* @property {boolean} [enabled = false]
|
||||
* @property {number} [kBucketSize = 20]
|
||||
* @property {RandomWalkOptions} [randomWalk]
|
||||
* @property {boolean} [clientMode]
|
||||
* @property {import('libp2p-interfaces/src/types').DhtSelectors} [selectors]
|
||||
* @property {import('libp2p-interfaces/src/types').DhtValidators} [validators]
|
||||
@ -110,13 +107,14 @@ const { updateSelfPeerRecord } = require('./record/utils')
|
||||
* @property {Libp2pModules} modules libp2p modules to use
|
||||
* @property {import('./address-manager').AddressManagerOptions} [addresses]
|
||||
* @property {import('./connection-manager').ConnectionManagerOptions} [connectionManager]
|
||||
* @property {Partial<import('./types').ConnectionGater>} [connectionGater]
|
||||
* @property {Datastore} [datastore]
|
||||
* @property {import('./dialer').DialerOptions} [dialer]
|
||||
* @property {import('./identify/index').HostProperties} [host] libp2p host
|
||||
* @property {KeychainOptions & import('./keychain/index').KeychainOptions} [keychain]
|
||||
* @property {MetricsOptions & import('./metrics').MetricsOptions} [metrics]
|
||||
* @property {import('./peer-routing').PeerRoutingOptions} [peerRouting]
|
||||
* @property {PeerStoreOptions & import('./peer-store/persistent').PersistentPeerStoreOptions} [peerStore]
|
||||
* @property {PeerStoreOptions} [peerStore]
|
||||
* @property {import('./transport-manager').TransportManagerOptions} [transportManager]
|
||||
* @property {Libp2pConfig} [config]
|
||||
*
|
||||
@ -167,13 +165,35 @@ class Libp2p extends EventEmitter {
|
||||
this.peerId = this._options.peerId
|
||||
this.datastore = this._options.datastore
|
||||
|
||||
this.peerStore = (this.datastore && this._options.peerStore.persistence)
|
||||
? new PersistentPeerStore({
|
||||
peerId: this.peerId,
|
||||
datastore: this.datastore,
|
||||
...this._options.peerStore
|
||||
// Create Metrics
|
||||
if (this._options.metrics.enabled) {
|
||||
const metrics = new Metrics({
|
||||
...this._options.metrics
|
||||
})
|
||||
: new PeerStore({ peerId: this.peerId })
|
||||
|
||||
this.metrics = metrics
|
||||
}
|
||||
|
||||
/** @type {ConnectionGater} */
|
||||
this.connectionGater = {
|
||||
denyDialPeer: async () => Promise.resolve(false),
|
||||
denyDialMultiaddr: async () => Promise.resolve(false),
|
||||
denyInboundConnection: async () => Promise.resolve(false),
|
||||
denyOutboundConnection: async () => Promise.resolve(false),
|
||||
denyInboundEncryptedConnection: async () => Promise.resolve(false),
|
||||
denyOutboundEncryptedConnection: async () => Promise.resolve(false),
|
||||
denyInboundUpgradedConnection: async () => Promise.resolve(false),
|
||||
denyOutboundUpgradedConnection: async () => Promise.resolve(false),
|
||||
filterMultiaddrForPeer: async () => Promise.resolve(true),
|
||||
...this._options.connectionGater
|
||||
}
|
||||
|
||||
/** @type {import('./peer-store/types').PeerStore} */
|
||||
this.peerStore = new PeerStore({
|
||||
peerId: this.peerId,
|
||||
datastore: (this.datastore && this._options.peerStore.persistence) ? this.datastore : new MemoryDatastore(),
|
||||
addressFilter: this.connectionGater.filterMultiaddrForPeer
|
||||
})
|
||||
|
||||
// Addresses {listen, announce, noAnnounce}
|
||||
this.addresses = this._options.addresses
|
||||
@ -193,17 +213,13 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// Create the Connection Manager
|
||||
this.connectionManager = new ConnectionManager(this, {
|
||||
autoDial: this._config.peerDiscovery.autoDial,
|
||||
...this._options.connectionManager
|
||||
})
|
||||
|
||||
// Create Metrics
|
||||
if (this._options.metrics.enabled) {
|
||||
this.metrics = new Metrics({
|
||||
...this._options.metrics,
|
||||
connectionManager: this.connectionManager
|
||||
})
|
||||
}
|
||||
this._autodialler = new AutoDialler(this, {
|
||||
enabled: this._config.peerDiscovery.autoDial,
|
||||
minConnections: this._options.connectionManager.minConnections,
|
||||
autoDialInterval: this._options.connectionManager.autoDialInterval
|
||||
})
|
||||
|
||||
// Create keychain
|
||||
if (this._options.keychain && this._options.keychain.datastore) {
|
||||
@ -221,6 +237,7 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// Setup the Upgrader
|
||||
this.upgrader = new Upgrader({
|
||||
connectionGater: this.connectionGater,
|
||||
localPeer: this.peerId,
|
||||
metrics: this.metrics,
|
||||
onConnection: (connection) => this.connectionManager.onConnect(connection),
|
||||
@ -263,7 +280,9 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
this.dialer = new Dialer({
|
||||
transportManager: this.transportManager,
|
||||
connectionGater: this.connectionGater,
|
||||
peerStore: this.peerStore,
|
||||
metrics: this.metrics,
|
||||
...this._options.dialer
|
||||
})
|
||||
|
||||
@ -288,7 +307,6 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// Add the identify service since we can multiplex
|
||||
this.identifyService = new IdentifyService({ libp2p: this })
|
||||
this.handle(Object.values(IdentifyService.getProtocolStr(this)), this.identifyService.handleMessage)
|
||||
}
|
||||
|
||||
// Attach private network protector
|
||||
@ -301,14 +319,9 @@ class Libp2p extends EventEmitter {
|
||||
// dht provided components (peerRouting, contentRouting, dht)
|
||||
if (this._modules.dht) {
|
||||
const DHT = this._modules.dht
|
||||
// @ts-ignore Object is not constructable
|
||||
this._dht = new DHT({
|
||||
// @ts-ignore TODO: types need fixing - DHT is an `object` which has no `create` method
|
||||
this._dht = DHT.create({
|
||||
libp2p: this,
|
||||
dialer: this.dialer,
|
||||
peerId: this.peerId,
|
||||
peerStore: this.peerStore,
|
||||
registrar: this.registrar,
|
||||
datastore: this.datastore,
|
||||
...this._config.dht
|
||||
})
|
||||
}
|
||||
@ -326,10 +339,10 @@ class Libp2p extends EventEmitter {
|
||||
this.peerRouting = new PeerRouting(this)
|
||||
this.contentRouting = new ContentRouting(this)
|
||||
|
||||
// Mount default protocols
|
||||
ping.mount(this)
|
||||
|
||||
this._onDiscoveryPeer = this._onDiscoveryPeer.bind(this)
|
||||
|
||||
this.fetchService = new FetchService(this)
|
||||
this.pingService = new PingService(this)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -359,11 +372,23 @@ class Libp2p extends EventEmitter {
|
||||
async start () {
|
||||
log('libp2p is starting')
|
||||
|
||||
if (this.identifyService) {
|
||||
await this.handle(Object.values(IdentifyService.getProtocolStr(this)), this.identifyService.handleMessage)
|
||||
}
|
||||
|
||||
if (this.fetchService) {
|
||||
await this.handle(FetchService.PROTOCOL, this.fetchService.handleMessage)
|
||||
}
|
||||
|
||||
if (this.pingService) {
|
||||
await this.handle(PingService.getProtocolStr(this), this.pingService.handleMessage)
|
||||
}
|
||||
|
||||
try {
|
||||
await this._onStarting()
|
||||
await this._onDidStart()
|
||||
log('libp2p has started')
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
this.emit('error', err)
|
||||
log.error('An error occurred starting libp2p', err)
|
||||
await this.stop()
|
||||
@ -383,8 +408,14 @@ class Libp2p extends EventEmitter {
|
||||
try {
|
||||
this._isStarted = false
|
||||
|
||||
if (this.identifyService) {
|
||||
await this.identifyService.stop()
|
||||
}
|
||||
|
||||
this.relay && this.relay.stop()
|
||||
this.peerRouting.stop()
|
||||
await this._autodialler.stop()
|
||||
await (this._dht && this._dht.stop())
|
||||
|
||||
for (const service of this._discovery.values()) {
|
||||
service.removeListener('peer', this._onDiscoveryPeer)
|
||||
@ -394,21 +425,21 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
this._discovery = new Map()
|
||||
|
||||
await this.peerStore.stop()
|
||||
await this.connectionManager.stop()
|
||||
|
||||
await Promise.all([
|
||||
this.pubsub && this.pubsub.stop(),
|
||||
this._dht && this._dht.stop(),
|
||||
this.metrics && this.metrics.stop()
|
||||
])
|
||||
|
||||
await this.natManager.stop()
|
||||
await this.transportManager.close()
|
||||
|
||||
ping.unmount(this)
|
||||
await this.unhandle(FetchService.PROTOCOL)
|
||||
await this.unhandle(PingService.getProtocolStr(this))
|
||||
|
||||
this.dialer.destroy()
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err) {
|
||||
log.error(err)
|
||||
this.emit('error', err)
|
||||
@ -431,7 +462,7 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
try {
|
||||
await this.keychain.findKeyByName('self')
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
await this.keychain.importPeer('self', this.peerId)
|
||||
}
|
||||
}
|
||||
@ -501,7 +532,7 @@ class Libp2p extends EventEmitter {
|
||||
if (!connection) {
|
||||
connection = await this.dialer.connectToPeer(peer, options)
|
||||
} else if (multiaddrs) {
|
||||
this.peerStore.addressBook.add(id, multiaddrs)
|
||||
await this.peerStore.addressBook.add(id, multiaddrs)
|
||||
}
|
||||
|
||||
return connection
|
||||
@ -558,6 +589,17 @@ class Libp2p extends EventEmitter {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a request to fetch the value associated with the given key from the given peer.
|
||||
*
|
||||
* @param {PeerId|Multiaddr} peer
|
||||
* @param {string} key
|
||||
* @returns {Promise<Uint8Array | null>}
|
||||
*/
|
||||
fetch (peer, key) {
|
||||
return this.fetchService.fetch(peer, key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Pings the given peer in order to obtain the operation latency.
|
||||
*
|
||||
@ -569,10 +611,10 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// If received multiaddr, ping it
|
||||
if (multiaddrs) {
|
||||
return ping(this, multiaddrs[0])
|
||||
return this.pingService.ping(multiaddrs[0])
|
||||
}
|
||||
|
||||
return ping(this, id)
|
||||
return this.pingService.ping(id)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -581,14 +623,14 @@ class Libp2p extends EventEmitter {
|
||||
* @param {string[]|string} protocols
|
||||
* @param {(props: HandlerProps) => void} handler
|
||||
*/
|
||||
handle (protocols, handler) {
|
||||
async handle (protocols, handler) {
|
||||
protocols = Array.isArray(protocols) ? protocols : [protocols]
|
||||
protocols.forEach(protocol => {
|
||||
this.upgrader.protocols.set(protocol, handler)
|
||||
})
|
||||
|
||||
// Add new protocols to self protocols in the Protobook
|
||||
this.peerStore.protoBook.add(this.peerId, protocols)
|
||||
await this.peerStore.protoBook.add(this.peerId, protocols)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -597,14 +639,14 @@ class Libp2p extends EventEmitter {
|
||||
*
|
||||
* @param {string[]|string} protocols
|
||||
*/
|
||||
unhandle (protocols) {
|
||||
async unhandle (protocols) {
|
||||
protocols = Array.isArray(protocols) ? protocols : [protocols]
|
||||
protocols.forEach(protocol => {
|
||||
this.upgrader.protocols.delete(protocol)
|
||||
})
|
||||
|
||||
// Remove protocols from self protocols in the Protobook
|
||||
this.peerStore.protoBook.remove(this.peerId, protocols)
|
||||
await this.peerStore.protoBook.remove(this.peerId, protocols)
|
||||
}
|
||||
|
||||
async _onStarting () {
|
||||
@ -615,16 +657,13 @@ class Libp2p extends EventEmitter {
|
||||
// Manage your NATs
|
||||
this.natManager.start()
|
||||
|
||||
// Start PeerStore
|
||||
await this.peerStore.start()
|
||||
|
||||
if (this._config.pubsub.enabled) {
|
||||
this.pubsub && this.pubsub.start()
|
||||
this.pubsub && await this.pubsub.start()
|
||||
}
|
||||
|
||||
// DHT subsystem
|
||||
if (this._config.dht.enabled) {
|
||||
this._dht && this._dht.start()
|
||||
this._dht && await this._dht.start()
|
||||
|
||||
// TODO: this should be modified once random-walk is used as
|
||||
// the other discovery modules
|
||||
@ -633,6 +672,10 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// Start metrics if present
|
||||
this.metrics && this.metrics.start()
|
||||
|
||||
if (this.identifyService) {
|
||||
await this.identifyService.start()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -645,16 +688,19 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
this.peerStore.on('peer', peerId => {
|
||||
this.emit('peer:discovery', peerId)
|
||||
this._maybeConnect(peerId)
|
||||
this._maybeConnect(peerId).catch(err => {
|
||||
log.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
// Once we start, emit any peers we may have already discovered
|
||||
// TODO: this should be removed, as we already discovered these peers in the past
|
||||
for (const peer of this.peerStore.peers.values()) {
|
||||
for await (const peer of this.peerStore.getPeers()) {
|
||||
this.emit('peer:discovery', peer.id)
|
||||
}
|
||||
|
||||
this.connectionManager.start()
|
||||
await this._autodialler.start()
|
||||
|
||||
// Peer discovery
|
||||
await this._setupPeerDiscovery()
|
||||
@ -678,8 +724,8 @@ class Libp2p extends EventEmitter {
|
||||
return
|
||||
}
|
||||
|
||||
peer.multiaddrs && this.peerStore.addressBook.add(peer.id, peer.multiaddrs)
|
||||
peer.protocols && this.peerStore.protoBook.set(peer.id, peer.protocols)
|
||||
peer.multiaddrs && this.peerStore.addressBook.add(peer.id, peer.multiaddrs).catch(err => log.error(err))
|
||||
peer.protocols && this.peerStore.protoBook.set(peer.id, peer.protocols).catch(err => log.error(err))
|
||||
}
|
||||
|
||||
/**
|
||||
@ -698,7 +744,7 @@ class Libp2p extends EventEmitter {
|
||||
log('connecting to discovered peer %s', peerId.toB58String())
|
||||
try {
|
||||
await this.dialer.connectToPeer(peerId)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error(`could not connect to discovered peer ${peerId.toB58String()} with ${err}`)
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ async function encrypt (localId, conn, remoteId) {
|
||||
let peerId
|
||||
try {
|
||||
peerId = await PeerId.createFromPubKey(id.pubkey.Data)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error(err)
|
||||
throw new InvalidCryptoExchangeError('Remote did not provide its public key')
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ const { certificateForKey, findAsync } = require('./util')
|
||||
const errcode = require('err-code')
|
||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { codes } = require('../errors')
|
||||
|
||||
const privates = new WeakMap()
|
||||
|
||||
@ -31,7 +32,7 @@ class CMS {
|
||||
*/
|
||||
constructor (keychain, dek) {
|
||||
if (!keychain) {
|
||||
throw errcode(new Error('keychain is required'), 'ERR_KEYCHAIN_REQUIRED')
|
||||
throw errcode(new Error('keychain is required'), codes.ERR_KEYCHAIN_REQUIRED)
|
||||
}
|
||||
|
||||
this.keychain = keychain
|
||||
@ -49,7 +50,7 @@ class CMS {
|
||||
*/
|
||||
async encrypt (name, plain) {
|
||||
if (!(plain instanceof Uint8Array)) {
|
||||
throw errcode(new Error('Plain data must be a Uint8Array'), 'ERR_INVALID_PARAMS')
|
||||
throw errcode(new Error('Plain data must be a Uint8Array'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const key = await this.keychain.findKeyByName(name)
|
||||
@ -81,7 +82,7 @@ class CMS {
|
||||
*/
|
||||
async decrypt (cmsData) {
|
||||
if (!(cmsData instanceof Uint8Array)) {
|
||||
throw errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS')
|
||||
throw errcode(new Error('CMS data is required'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
let cms
|
||||
@ -90,8 +91,8 @@ class CMS {
|
||||
const obj = forge.asn1.fromDer(buf)
|
||||
// @ts-ignore not defined
|
||||
cms = forge.pkcs7.messageFromAsn1(obj)
|
||||
} catch (err) {
|
||||
throw errcode(new Error('Invalid CMS: ' + err.message), 'ERR_INVALID_CMS')
|
||||
} catch (/** @type {any} */ err) {
|
||||
throw errcode(new Error('Invalid CMS: ' + err.message), codes.ERR_INVALID_CMS)
|
||||
}
|
||||
|
||||
// Find a recipient whose key we hold. We only deal with recipient certs
|
||||
@ -114,7 +115,7 @@ class CMS {
|
||||
try {
|
||||
const key = await this.keychain.findKeyById(recipient.keyId)
|
||||
if (key) return true
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return false
|
||||
}
|
||||
return false
|
||||
@ -123,7 +124,7 @@ class CMS {
|
||||
if (!r) {
|
||||
// @ts-ignore cms types not defined
|
||||
const missingKeys = recipients.map(r => r.keyId)
|
||||
throw errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), 'ERR_MISSING_KEYS', {
|
||||
throw errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), codes.ERR_MISSING_KEYS, {
|
||||
missingKeys
|
||||
})
|
||||
}
|
||||
@ -131,7 +132,7 @@ class CMS {
|
||||
const key = await this.keychain.findKeyById(r.keyId)
|
||||
|
||||
if (!key) {
|
||||
throw errcode(new Error('No key available to decrypto'), 'ERR_NO_KEY')
|
||||
throw errcode(new Error('No key available to decrypto'), codes.ERR_NO_KEY)
|
||||
}
|
||||
|
||||
const pem = await this.keychain._getPrivateKey(key.name)
|
||||
|
@ -10,6 +10,7 @@ const crypto = require('libp2p-crypto')
|
||||
const { Key } = require('interface-datastore/key')
|
||||
const CMS = require('./cms')
|
||||
const errcode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||
|
||||
@ -210,21 +211,21 @@ class Keychain {
|
||||
const self = this
|
||||
|
||||
if (!validateKeyName(name) || name === 'self') {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
|
||||
if (typeof type !== 'string') {
|
||||
return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), codes.ERR_INVALID_KEY_TYPE))
|
||||
}
|
||||
|
||||
const dsname = DsName(name)
|
||||
const exists = await self.store.has(dsname)
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||
|
||||
switch (type.toLowerCase()) {
|
||||
case 'rsa':
|
||||
if (!Number.isSafeInteger(size) || size < 2048) {
|
||||
return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE'))
|
||||
return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), codes.ERR_INVALID_KEY_SIZE))
|
||||
}
|
||||
break
|
||||
default:
|
||||
@ -248,7 +249,7 @@ class Keychain {
|
||||
batch.put(DsInfoName(name), uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
|
||||
await batch.commit()
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
|
||||
@ -284,7 +285,7 @@ class Keychain {
|
||||
try {
|
||||
const keys = await this.listKeys()
|
||||
return keys.find((k) => k.id === id)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
}
|
||||
@ -297,15 +298,15 @@ class Keychain {
|
||||
*/
|
||||
async findKeyByName (name) {
|
||||
if (!validateKeyName(name)) {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
|
||||
const dsname = DsInfoName(name)
|
||||
try {
|
||||
const res = await this.store.get(dsname)
|
||||
return JSON.parse(uint8ArrayToString(res))
|
||||
} catch (err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), codes.ERR_KEY_NOT_FOUND))
|
||||
}
|
||||
}
|
||||
|
||||
@ -318,7 +319,7 @@ class Keychain {
|
||||
async removeKey (name) {
|
||||
const self = this
|
||||
if (!validateKeyName(name) || name === 'self') {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
const dsname = DsName(name)
|
||||
const keyInfo = await self.findKeyByName(name)
|
||||
@ -339,10 +340,10 @@ class Keychain {
|
||||
async renameKey (oldName, newName) {
|
||||
const self = this
|
||||
if (!validateKeyName(oldName) || oldName === 'self') {
|
||||
return throwDelayed(errcode(new Error(`Invalid old key name '${oldName}'`), 'ERR_OLD_KEY_NAME_INVALID'))
|
||||
return throwDelayed(errcode(new Error(`Invalid old key name '${oldName}'`), codes.ERR_OLD_KEY_NAME_INVALID))
|
||||
}
|
||||
if (!validateKeyName(newName) || newName === 'self') {
|
||||
return throwDelayed(errcode(new Error(`Invalid new key name '${newName}'`), 'ERR_NEW_KEY_NAME_INVALID'))
|
||||
return throwDelayed(errcode(new Error(`Invalid new key name '${newName}'`), codes.ERR_NEW_KEY_NAME_INVALID))
|
||||
}
|
||||
const oldDsname = DsName(oldName)
|
||||
const newDsname = DsName(newName)
|
||||
@ -350,7 +351,7 @@ class Keychain {
|
||||
const newInfoName = DsInfoName(newName)
|
||||
|
||||
const exists = await self.store.has(newDsname)
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||
|
||||
try {
|
||||
const pem = await self.store.get(oldDsname)
|
||||
@ -365,7 +366,7 @@ class Keychain {
|
||||
batch.delete(oldInfoName)
|
||||
await batch.commit()
|
||||
return keyInfo
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
}
|
||||
@ -379,10 +380,10 @@ class Keychain {
|
||||
*/
|
||||
async exportKey (name, password) {
|
||||
if (!validateKeyName(name)) {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
if (!password) {
|
||||
return throwDelayed(errcode(new Error('Password is required'), 'ERR_PASSWORD_REQUIRED'))
|
||||
return throwDelayed(errcode(new Error('Password is required'), codes.ERR_PASSWORD_REQUIRED))
|
||||
}
|
||||
|
||||
const dsname = DsName(name)
|
||||
@ -393,7 +394,7 @@ class Keychain {
|
||||
const dek = privates.get(this).dek
|
||||
const privateKey = await crypto.keys.import(pem, dek)
|
||||
return privateKey.export(password)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
}
|
||||
@ -409,20 +410,20 @@ class Keychain {
|
||||
async importKey (name, pem, password) {
|
||||
const self = this
|
||||
if (!validateKeyName(name) || name === 'self') {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
if (!pem) {
|
||||
return throwDelayed(errcode(new Error('PEM encoded key is required'), 'ERR_PEM_REQUIRED'))
|
||||
return throwDelayed(errcode(new Error('PEM encoded key is required'), codes.ERR_PEM_REQUIRED))
|
||||
}
|
||||
const dsname = DsName(name)
|
||||
const exists = await self.store.has(dsname)
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||
|
||||
let privateKey
|
||||
try {
|
||||
privateKey = await crypto.keys.import(pem, password)
|
||||
} catch (err) {
|
||||
return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY'))
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), codes.ERR_CANNOT_READ_KEY))
|
||||
}
|
||||
|
||||
let kid
|
||||
@ -431,7 +432,7 @@ class Keychain {
|
||||
/** @type {string} */
|
||||
const dek = privates.get(this).dek
|
||||
pem = await privateKey.export(dek)
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
|
||||
@ -457,16 +458,16 @@ class Keychain {
|
||||
async importPeer (name, peer) {
|
||||
const self = this
|
||||
if (!validateKeyName(name)) {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
if (!peer || !peer.privKey) {
|
||||
return throwDelayed(errcode(new Error('Peer.privKey is required'), 'ERR_MISSING_PRIVATE_KEY'))
|
||||
return throwDelayed(errcode(new Error('Peer.privKey is required'), codes.ERR_MISSING_PRIVATE_KEY))
|
||||
}
|
||||
|
||||
const privateKey = peer.privKey
|
||||
const dsname = DsName(name)
|
||||
const exists = await self.store.has(dsname)
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||
|
||||
try {
|
||||
const kid = await privateKey.id()
|
||||
@ -482,7 +483,7 @@ class Keychain {
|
||||
batch.put(DsInfoName(name), uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
await batch.commit()
|
||||
return keyInfo
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(err)
|
||||
}
|
||||
}
|
||||
@ -495,15 +496,15 @@ class Keychain {
|
||||
*/
|
||||
async _getPrivateKey (name) {
|
||||
if (!validateKeyName(name)) {
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||
}
|
||||
|
||||
try {
|
||||
const dsname = DsName(name)
|
||||
const res = await this.store.get(dsname)
|
||||
return uint8ArrayToString(res)
|
||||
} catch (err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
||||
} catch (/** @type {any} */ err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), codes.ERR_KEY_NOT_FOUND))
|
||||
}
|
||||
}
|
||||
|
||||
@ -515,13 +516,13 @@ class Keychain {
|
||||
*/
|
||||
async rotateKeychainPass (oldPass, newPass) {
|
||||
if (typeof oldPass !== 'string') {
|
||||
return throwDelayed(errcode(new Error(`Invalid old pass type '${typeof oldPass}'`), 'ERR_INVALID_OLD_PASS_TYPE'))
|
||||
return throwDelayed(errcode(new Error(`Invalid old pass type '${typeof oldPass}'`), codes.ERR_INVALID_OLD_PASS_TYPE))
|
||||
}
|
||||
if (typeof newPass !== 'string') {
|
||||
return throwDelayed(errcode(new Error(`Invalid new pass type '${typeof newPass}'`), 'ERR_INVALID_NEW_PASS_TYPE'))
|
||||
return throwDelayed(errcode(new Error(`Invalid new pass type '${typeof newPass}'`), codes.ERR_INVALID_NEW_PASS_TYPE))
|
||||
}
|
||||
if (newPass.length < 20) {
|
||||
return throwDelayed(errcode(new Error(`Invalid pass length ${newPass.length}`), 'ERR_INVALID_PASS_LENGTH'))
|
||||
return throwDelayed(errcode(new Error(`Invalid pass length ${newPass.length}`), codes.ERR_INVALID_PASS_LENGTH))
|
||||
}
|
||||
log('recreating keychain')
|
||||
const oldDek = privates.get(this).dek
|
||||
|
@ -24,9 +24,6 @@ const directionToEvent = {
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef MetricsProperties
|
||||
* @property {import('../connection-manager')} connectionManager
|
||||
*
|
||||
* @typedef MetricsOptions
|
||||
* @property {number} [computeThrottleMaxQueueSize = defaultOptions.computeThrottleMaxQueueSize]
|
||||
* @property {number} [computeThrottleTimeout = defaultOptions.computeThrottleTimeout]
|
||||
@ -37,7 +34,7 @@ const directionToEvent = {
|
||||
class Metrics {
|
||||
/**
|
||||
* @class
|
||||
* @param {MetricsProperties & MetricsOptions} options
|
||||
* @param {MetricsOptions} options
|
||||
*/
|
||||
constructor (options) {
|
||||
this._options = mergeOptions(defaultOptions, options)
|
||||
@ -47,10 +44,7 @@ class Metrics {
|
||||
this._oldPeers = oldPeerLRU(this._options.maxOldPeersRetention)
|
||||
this._running = false
|
||||
this._onMessage = this._onMessage.bind(this)
|
||||
this._connectionManager = options.connectionManager
|
||||
this._connectionManager.on('peer:disconnect', (connection) => {
|
||||
this.onPeerDisconnected(connection.remotePeer)
|
||||
})
|
||||
this._systems = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -94,6 +88,29 @@ class Metrics {
|
||||
return Array.from(this._peerStats.keys())
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Map<string, Map<string, Map<string, any>>>}
|
||||
*/
|
||||
getComponentMetrics () {
|
||||
return this._systems
|
||||
}
|
||||
|
||||
updateComponentMetric ({ system = 'libp2p', component, metric, value }) {
|
||||
if (!this._systems.has(system)) {
|
||||
this._systems.set(system, new Map())
|
||||
}
|
||||
|
||||
const systemMetrics = this._systems.get(system)
|
||||
|
||||
if (!systemMetrics.has(component)) {
|
||||
systemMetrics.set(component, new Map())
|
||||
}
|
||||
|
||||
const componentMetrics = systemMetrics.get(component)
|
||||
|
||||
componentMetrics.set(metric, value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the `Stats` object for the given `PeerId` whether it
|
||||
* is a live peer, or in the disconnected peer LRU cache.
|
||||
|
@ -8,6 +8,7 @@ const retimer = require('retimer')
|
||||
|
||||
/**
|
||||
* @typedef {import('@vascosantos/moving-average').IMovingAverage} IMovingAverage
|
||||
* @typedef {import('bignumber.js').BigNumber} Big
|
||||
*/
|
||||
|
||||
class Stats extends EventEmitter {
|
||||
|
94
src/metrics/tracked-map.js
Normal file
94
src/metrics/tracked-map.js
Normal file
@ -0,0 +1,94 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* @template K
|
||||
* @template V
|
||||
*/
|
||||
class TrackedMap extends Map {
|
||||
/**
|
||||
* @param {object} options
|
||||
* @param {string} options.system
|
||||
* @param {string} options.component
|
||||
* @param {string} options.metric
|
||||
* @param {import('.')} options.metrics
|
||||
*/
|
||||
constructor (options) {
|
||||
super()
|
||||
|
||||
const { system, component, metric, metrics } = options
|
||||
this._system = system
|
||||
this._component = component
|
||||
this._metric = metric
|
||||
this._metrics = metrics
|
||||
|
||||
this._metrics.updateComponentMetric({
|
||||
system: this._system,
|
||||
component: this._component,
|
||||
metric: this._metric,
|
||||
value: this.size
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {K} key
|
||||
* @param {V} value
|
||||
*/
|
||||
set (key, value) {
|
||||
super.set(key, value)
|
||||
this._metrics.updateComponentMetric({
|
||||
system: this._system,
|
||||
component: this._component,
|
||||
metric: this._metric,
|
||||
value: this.size
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {K} key
|
||||
*/
|
||||
delete (key) {
|
||||
const deleted = super.delete(key)
|
||||
this._metrics.updateComponentMetric({
|
||||
system: this._system,
|
||||
component: this._component,
|
||||
metric: this._metric,
|
||||
value: this.size
|
||||
})
|
||||
return deleted
|
||||
}
|
||||
|
||||
clear () {
|
||||
super.clear()
|
||||
|
||||
this._metrics.updateComponentMetric({
|
||||
system: this._system,
|
||||
component: this._component,
|
||||
metric: this._metric,
|
||||
value: this.size
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template K
|
||||
* @template V
|
||||
* @param {object} options
|
||||
* @param {string} [options.system]
|
||||
* @param {string} options.component
|
||||
* @param {string} options.metric
|
||||
* @param {import('.')} [options.metrics]
|
||||
* @returns {Map<K, V>}
|
||||
*/
|
||||
module.exports = ({ system = 'libp2p', component, metric, metrics }) => {
|
||||
/** @type {Map<K, V>} */
|
||||
let map
|
||||
|
||||
if (metrics) {
|
||||
map = new TrackedMap({ system, component, metric, metrics })
|
||||
} else {
|
||||
map = new Map()
|
||||
}
|
||||
|
||||
return map
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
// @ts-ignore nat-api does not export types
|
||||
const NatAPI = require('@motrix/nat-api')
|
||||
const NatAPI = require('nat-api')
|
||||
const debug = require('debug')
|
||||
const { promisify } = require('es6-promisify')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
@ -114,7 +114,7 @@ class NatManager {
|
||||
const client = this._getClient()
|
||||
const publicIp = this._externalIp || await client.externalIp()
|
||||
|
||||
// @ts-ignore isPrivate has no call signatures
|
||||
// @ts-expect-error types are wrong
|
||||
if (isPrivateIp(publicIp)) {
|
||||
throw new Error(`${publicIp} is private - please set config.nat.externalIp to an externally routable IP or ensure you are not behind a double NAT`)
|
||||
}
|
||||
@ -188,7 +188,7 @@ class NatManager {
|
||||
try {
|
||||
await this._client.destroy()
|
||||
this._client = null
|
||||
} catch (err) {
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
|
@ -5,11 +5,13 @@ const log = Object.assign(debug('libp2p:peer-routing'), {
|
||||
error: debug('libp2p:peer-routing:err')
|
||||
})
|
||||
const errCode = require('err-code')
|
||||
const errors = require('./errors')
|
||||
const {
|
||||
storeAddresses,
|
||||
uniquePeers,
|
||||
requirePeers
|
||||
} = require('./content-routing/utils')
|
||||
const { TimeoutController } = require('timeout-abort-controller')
|
||||
|
||||
const merge = require('it-merge')
|
||||
const { pipe } = require('it-pipe')
|
||||
@ -21,6 +23,9 @@ const {
|
||||
clearDelayedInterval
|
||||
// @ts-ignore module with no types
|
||||
} = require('set-delayed-interval')
|
||||
const { DHTPeerRouting } = require('./dht/dht-peer-routing')
|
||||
// @ts-expect-error setMaxListeners is missing from the types
|
||||
const { setMaxListeners } = require('events')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
@ -33,6 +38,7 @@ const {
|
||||
* @property {boolean} [enabled = true] - Whether to enable the Refresh manager
|
||||
* @property {number} [bootDelay = 6e5] - Boot delay to start the Refresh Manager (in ms)
|
||||
* @property {number} [interval = 10e3] - Interval between each Refresh Manager run (in ms)
|
||||
* @property {number} [timeout = 10e3] - How long to let each refresh run (in ms)
|
||||
*
|
||||
* @typedef {Object} PeerRoutingOptions
|
||||
* @property {RefreshManagerOptions} [refreshManager]
|
||||
@ -51,7 +57,7 @@ class PeerRouting {
|
||||
|
||||
// If we have the dht, add it to the available peer routers
|
||||
if (libp2p._dht && libp2p._config.dht.enabled) {
|
||||
this._routers.push(libp2p._dht)
|
||||
this._routers.push(new DHTPeerRouting(libp2p._dht))
|
||||
}
|
||||
|
||||
this._refreshManagerOptions = libp2p._options.peerRouting.refreshManager
|
||||
@ -78,8 +84,8 @@ class PeerRouting {
|
||||
async _findClosestPeersTask () {
|
||||
try {
|
||||
// nb getClosestPeers adds the addresses to the address book
|
||||
await drain(this.getClosestPeers(this._peerId.id))
|
||||
} catch (err) {
|
||||
await drain(this.getClosestPeers(this._peerId.id, { timeout: this._refreshManagerOptions.timeout || 10e3 }))
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
@ -101,19 +107,24 @@ class PeerRouting {
|
||||
*/
|
||||
async findPeer (id, options) { // eslint-disable-line require-await
|
||||
if (!this._routers.length) {
|
||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
throw errCode(new Error('No peer routers available'), errors.codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||
}
|
||||
|
||||
if (id.toB58String() === this._peerId.toB58String()) {
|
||||
throw errCode(new Error('Should not try to find self'), 'ERR_FIND_SELF')
|
||||
throw errCode(new Error('Should not try to find self'), errors.codes.ERR_FIND_SELF)
|
||||
}
|
||||
|
||||
const output = await pipe(
|
||||
merge(
|
||||
...this._routers.map(router => [router.findPeer(id, options)])
|
||||
...this._routers.map(router => (async function * () {
|
||||
try {
|
||||
yield await router.findPeer(id, options)
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
})())
|
||||
),
|
||||
(source) => filter(source, Boolean),
|
||||
// @ts-ignore findPeer resolves a Promise
|
||||
(source) => storeAddresses(source, this._peerStore),
|
||||
(source) => first(source)
|
||||
)
|
||||
@ -122,7 +133,7 @@ class PeerRouting {
|
||||
return output
|
||||
}
|
||||
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
throw errCode(new Error(errors.messages.NOT_FOUND), errors.codes.ERR_NOT_FOUND)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -130,12 +141,25 @@ class PeerRouting {
|
||||
*
|
||||
* @param {Uint8Array} key - A CID like key
|
||||
* @param {Object} [options]
|
||||
* @param {number} [options.timeout=30e3] - How long the query can take.
|
||||
* @param {number} [options.timeout=30e3] - How long the query can take
|
||||
* @param {AbortSignal} [options.signal] - An AbortSignal to abort the request
|
||||
* @returns {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>}
|
||||
*/
|
||||
async * getClosestPeers (key, options = { timeout: 30e3 }) {
|
||||
if (!this._routers.length) {
|
||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
throw errCode(new Error('No peer routers available'), errors.codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||
}
|
||||
|
||||
if (options.timeout) {
|
||||
const controller = new TimeoutController(options.timeout)
|
||||
// this controller will potentially be used while dialing lots of
|
||||
// peers so prevent MaxListenersExceededWarning appearing in the console
|
||||
try {
|
||||
// fails on node < 15.4
|
||||
setMaxListeners && setMaxListeners(Infinity, controller.signal)
|
||||
} catch {}
|
||||
|
||||
options.signal = controller.signal
|
||||
}
|
||||
|
||||
yield * pipe(
|
||||
|
@ -1,74 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const errcode = require('err-code')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
const { codes } = require('../errors')
|
||||
const PeerRecord = require('../record/peer-record')
|
||||
const Envelope = require('../record/envelope')
|
||||
const { pipe } = require('it-pipe')
|
||||
const all = require('it-all')
|
||||
const filter = require('it-filter')
|
||||
const map = require('it-map')
|
||||
const each = require('it-foreach')
|
||||
|
||||
/**
|
||||
* @typedef {import('./types').PeerStore} PeerStore
|
||||
* @typedef {import('./types').Address} Address
|
||||
* @typedef {import('./types').AddressBook} AddressBook
|
||||
*/
|
||||
|
||||
const log = Object.assign(debug('libp2p:peer-store:address-book'), {
|
||||
error: debug('libp2p:peer-store:address-book:err')
|
||||
})
|
||||
const errcode = require('err-code')
|
||||
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Book = require('./book')
|
||||
const PeerRecord = require('../record/peer-record')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const Envelope = require('../record/envelope')
|
||||
const EVENT_NAME = 'change:multiaddrs'
|
||||
|
||||
/**
|
||||
* @typedef {import('./')} PeerStore
|
||||
* @implements {AddressBook}
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} Address
|
||||
* @property {Multiaddr} multiaddr peer multiaddr.
|
||||
* @property {boolean} isCertified obtained from a signed peer record.
|
||||
*
|
||||
* @typedef {Object} CertifiedRecord
|
||||
* @property {Uint8Array} raw raw envelope.
|
||||
* @property {number} seqNumber seq counter.
|
||||
*
|
||||
* @typedef {Object} Entry
|
||||
* @property {Address[]} addresses peer Addresses.
|
||||
* @property {CertifiedRecord} record certified peer record.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends {Book}
|
||||
*/
|
||||
class AddressBook extends Book {
|
||||
class PeerStoreAddressBook {
|
||||
/**
|
||||
* The AddressBook is responsible for keeping the known multiaddrs of a peer.
|
||||
*
|
||||
* @class
|
||||
* @param {PeerStore} peerStore
|
||||
* @param {PeerStore["emit"]} emit
|
||||
* @param {import('./types').Store} store
|
||||
* @param {(peerId: PeerId, multiaddr: Multiaddr) => Promise<boolean>} addressFilter
|
||||
*/
|
||||
constructor (peerStore) {
|
||||
/**
|
||||
* PeerStore Event emitter, used by the AddressBook to emit:
|
||||
* "peer" - emitted when a peer is discovered by the node.
|
||||
* "change:multiaddrs" - emitted when the known multiaddrs of a peer change.
|
||||
*/
|
||||
super({
|
||||
peerStore,
|
||||
eventName: 'change:multiaddrs',
|
||||
eventProperty: 'multiaddrs',
|
||||
eventTransformer: (data) => {
|
||||
if (!data.addresses) {
|
||||
return []
|
||||
}
|
||||
return data.addresses.map((/** @type {Address} */ address) => address.multiaddr)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Map known peers to their known Address Entries.
|
||||
*
|
||||
* @type {Map<string, Entry>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
constructor (emit, store, addressFilter) {
|
||||
this._emit = emit
|
||||
this._store = store
|
||||
this._addressFilter = addressFilter
|
||||
}
|
||||
|
||||
/**
|
||||
@ -77,69 +46,90 @@ class AddressBook extends Book {
|
||||
* into the AddressBook.
|
||||
*
|
||||
* @param {Envelope} envelope
|
||||
* @returns {boolean}
|
||||
*/
|
||||
consumePeerRecord (envelope) {
|
||||
let peerRecord
|
||||
async consumePeerRecord (envelope) {
|
||||
log('consumePeerRecord await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('consumePeerRecord got write lock')
|
||||
|
||||
let peerId
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
peerRecord = PeerRecord.createFromProtobuf(envelope.payload)
|
||||
} catch (err) {
|
||||
log.error('invalid peer record received')
|
||||
return false
|
||||
}
|
||||
|
||||
// Verify peerId
|
||||
if (!peerRecord.peerId.equals(envelope.peerId)) {
|
||||
log('signing key does not match PeerId in the PeerRecord')
|
||||
return false
|
||||
}
|
||||
|
||||
// ensure the record has multiaddrs
|
||||
if (!peerRecord.multiaddrs || !peerRecord.multiaddrs.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
const peerId = peerRecord.peerId
|
||||
const id = peerId.toB58String()
|
||||
const entry = this.data.get(id) || { record: undefined }
|
||||
const storedRecord = entry.record
|
||||
|
||||
// ensure seq is greater than, or equal to, the last received
|
||||
if (storedRecord && storedRecord.seqNumber >= peerRecord.seqNumber) {
|
||||
return false
|
||||
}
|
||||
|
||||
const addresses = this._toAddresses(peerRecord.multiaddrs, true)
|
||||
|
||||
// Replace unsigned addresses by the new ones from the record
|
||||
// TODO: Once we have ttls for the addresses, we should merge these in.
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: {
|
||||
raw: envelope.marshal(),
|
||||
seqNumber: peerRecord.seqNumber
|
||||
let peerRecord
|
||||
try {
|
||||
peerRecord = PeerRecord.createFromProtobuf(envelope.payload)
|
||||
} catch (/** @type {any} */ err) {
|
||||
log.error('invalid peer record received')
|
||||
return false
|
||||
}
|
||||
})
|
||||
log(`stored provided peer record for ${id}`)
|
||||
|
||||
peerId = peerRecord.peerId
|
||||
const multiaddrs = peerRecord.multiaddrs
|
||||
|
||||
// Verify peerId
|
||||
if (!peerId.equals(envelope.peerId)) {
|
||||
log('signing key does not match PeerId in the PeerRecord')
|
||||
return false
|
||||
}
|
||||
|
||||
// ensure the record has multiaddrs
|
||||
if (!multiaddrs || !multiaddrs.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (await this._store.has(peerId)) {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
if (peer.peerRecordEnvelope) {
|
||||
const storedEnvelope = await Envelope.createFromProtobuf(peer.peerRecordEnvelope)
|
||||
const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload)
|
||||
|
||||
// ensure seq is greater than, or equal to, the last received
|
||||
if (storedRecord.seqNumber >= peerRecord.seqNumber) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace unsigned addresses by the new ones from the record
|
||||
// TODO: Once we have ttls for the addresses, we should merge these in
|
||||
updatedPeer = await this._store.patchOrCreate(peerId, {
|
||||
addresses: await filterMultiaddrs(peerId, multiaddrs, this._addressFilter, true),
|
||||
peerRecordEnvelope: envelope.marshal()
|
||||
})
|
||||
|
||||
log(`stored provided peer record for ${peerRecord.peerId.toB58String()}`)
|
||||
} finally {
|
||||
log('consumePeerRecord release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(({ multiaddr }) => multiaddr) })
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw Envelope for a peer. Returns
|
||||
* undefined if no Envelope is found.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Uint8Array|undefined}
|
||||
*/
|
||||
getRawEnvelope (peerId) {
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
async getRawEnvelope (peerId) {
|
||||
log('getRawEnvelope await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('getRawEnvelope got read lock')
|
||||
|
||||
if (!entry || !entry.record || !entry.record.raw) {
|
||||
return undefined
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return peer.peerRecordEnvelope
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('getRawEnvelope release read lock')
|
||||
release()
|
||||
}
|
||||
|
||||
return entry.record.raw
|
||||
}
|
||||
|
||||
/**
|
||||
@ -147,10 +137,9 @@ class AddressBook extends Book {
|
||||
* Returns undefined if no record exists.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Promise<Envelope|void>|undefined}
|
||||
*/
|
||||
getPeerRecord (peerId) {
|
||||
const raw = this.getRawEnvelope(peerId)
|
||||
async getPeerRecord (peerId) {
|
||||
const raw = await this.getRawEnvelope(peerId)
|
||||
|
||||
if (!raw) {
|
||||
return undefined
|
||||
@ -160,186 +149,199 @@ class AddressBook extends Book {
|
||||
}
|
||||
|
||||
/**
|
||||
* Set known multiaddrs of a provided peer.
|
||||
* This will replace previously stored multiaddrs, if available.
|
||||
* Replacing stored multiaddrs might result in losing obtained certified addresses.
|
||||
* If you are not sure, it's recommended to use `add` instead.
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {Multiaddr[]} multiaddrs
|
||||
* @returns {AddressBook}
|
||||
*/
|
||||
set (peerId, multiaddrs) {
|
||||
async get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const addresses = this._toAddresses(multiaddrs)
|
||||
log('get wait for read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('get got read lock')
|
||||
|
||||
// Not replace multiaddrs
|
||||
if (!addresses.length) {
|
||||
return this
|
||||
}
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
const id = peerId.toB58String()
|
||||
const entry = this.data.get(id)
|
||||
|
||||
// Already knows the peer
|
||||
if (entry && entry.addresses && entry.addresses.length === addresses.length) {
|
||||
const intersection = entry.addresses.filter((addr) => addresses.some((newAddr) => addr.multiaddr.equals(newAddr.multiaddr)))
|
||||
|
||||
// Are new addresses equal to the old ones?
|
||||
// If yes, no changes needed!
|
||||
if (intersection.length === entry.addresses.length) {
|
||||
log(`the addresses provided to store are equal to the already stored for ${id}`)
|
||||
return this
|
||||
return peer.addresses
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('get release read lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: entry && entry.record
|
||||
})
|
||||
log(`stored provided multiaddrs for ${id}`)
|
||||
|
||||
// Notify the existance of a new peer
|
||||
if (!entry) {
|
||||
this._ps.emit('peer', peerId)
|
||||
}
|
||||
|
||||
return this
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Add known addresses of a provided peer.
|
||||
* If the peer is not known, it is set with the given addresses.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {Multiaddr[]} multiaddrs
|
||||
* @returns {AddressBook}
|
||||
*/
|
||||
add (peerId, multiaddrs) {
|
||||
async set (peerId, multiaddrs) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const addresses = this._toAddresses(multiaddrs)
|
||||
const id = peerId.toB58String()
|
||||
|
||||
// No addresses to be added
|
||||
if (!addresses.length) {
|
||||
return this
|
||||
if (!Array.isArray(multiaddrs)) {
|
||||
log.error('multiaddrs must be an array of Multiaddrs')
|
||||
throw errcode(new Error('multiaddrs must be an array of Multiaddrs'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const entry = this.data.get(id)
|
||||
log('set await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('set got write lock')
|
||||
|
||||
if (entry && entry.addresses) {
|
||||
// Add recorded uniquely to the new array (Union)
|
||||
entry.addresses.forEach((addr) => {
|
||||
if (!addresses.find(r => r.multiaddr.equals(addr.multiaddr))) {
|
||||
addresses.push(addr)
|
||||
let hasPeer = false
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
const addresses = await filterMultiaddrs(peerId, multiaddrs, this._addressFilter)
|
||||
|
||||
// No valid addresses found
|
||||
if (!addresses.length) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
hasPeer = true
|
||||
|
||||
if (new Set([
|
||||
...addresses.map(({ multiaddr }) => multiaddr.toString()),
|
||||
...peer.addresses.map(({ multiaddr }) => multiaddr.toString())
|
||||
]).size === peer.addresses.length && addresses.length === peer.addresses.length) {
|
||||
// not changing anything, no need to update
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
// If the recorded length is equal to the new after the unique union
|
||||
// The content is the same, no need to update.
|
||||
if (entry.addresses.length === addresses.length) {
|
||||
log(`the addresses provided to store are already stored for ${id}`)
|
||||
return this
|
||||
}
|
||||
|
||||
updatedPeer = await this._store.patchOrCreate(peerId, { addresses })
|
||||
|
||||
log(`set multiaddrs for ${peerId.toB58String()}`)
|
||||
} finally {
|
||||
log('set release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: entry && entry.record
|
||||
})
|
||||
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr) })
|
||||
|
||||
log(`added provided multiaddrs for ${id}`)
|
||||
|
||||
// Notify the existance of a new peer
|
||||
if (!(entry && entry.addresses)) {
|
||||
this._ps.emit('peer', peerId)
|
||||
// Notify the existence of a new peer
|
||||
if (!hasPeer) {
|
||||
this._emit('peer', peerId)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Address[]|undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
|
||||
return entry && entry.addresses ? [...entry.addresses] : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms received multiaddrs into Address.
|
||||
*
|
||||
* @private
|
||||
* @param {Multiaddr[]} multiaddrs
|
||||
* @param {boolean} [isCertified]
|
||||
* @returns {Address[]}
|
||||
*/
|
||||
_toAddresses (multiaddrs, isCertified = false) {
|
||||
if (!multiaddrs) {
|
||||
log.error('multiaddrs must be provided to store data')
|
||||
throw errcode(new Error('multiaddrs must be provided'), ERR_INVALID_PARAMETERS)
|
||||
async add (peerId, multiaddrs) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
// create Address for each address
|
||||
/** @type {Address[]} */
|
||||
const addresses = []
|
||||
multiaddrs.forEach((addr) => {
|
||||
if (!Multiaddr.isMultiaddr(addr)) {
|
||||
log.error(`multiaddr ${addr} must be an instance of multiaddr`)
|
||||
throw errcode(new Error(`multiaddr ${addr} must be an instance of multiaddr`), ERR_INVALID_PARAMETERS)
|
||||
if (!Array.isArray(multiaddrs)) {
|
||||
log.error('multiaddrs must be an array of Multiaddrs')
|
||||
throw errcode(new Error('multiaddrs must be an array of Multiaddrs'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('add await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('add got write lock')
|
||||
|
||||
let hasPeer
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
const addresses = await filterMultiaddrs(peerId, multiaddrs, this._addressFilter)
|
||||
|
||||
// No valid addresses found
|
||||
if (!addresses.length) {
|
||||
return
|
||||
}
|
||||
|
||||
// Guarantee no replicates
|
||||
if (!addresses.find((a) => a.multiaddr.equals(addr))) {
|
||||
addresses.push({
|
||||
multiaddr: addr,
|
||||
isCertified
|
||||
})
|
||||
}
|
||||
})
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
hasPeer = true
|
||||
|
||||
return addresses
|
||||
if (new Set([
|
||||
...addresses.map(({ multiaddr }) => multiaddr.toString()),
|
||||
...peer.addresses.map(({ multiaddr }) => multiaddr.toString())
|
||||
]).size === peer.addresses.length) {
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPeer = await this._store.mergeOrCreate(peerId, { addresses })
|
||||
|
||||
log(`added multiaddrs for ${peerId.toB58String()}`)
|
||||
} finally {
|
||||
log('set release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr) })
|
||||
|
||||
// Notify the existence of a new peer
|
||||
if (!hasPeer) {
|
||||
this._emit('peer', peerId)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
*/
|
||||
async delete (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('delete await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('delete got write lock')
|
||||
|
||||
let has
|
||||
|
||||
try {
|
||||
has = await this._store.has(peerId)
|
||||
|
||||
await this._store.patchOrCreate(peerId, {
|
||||
addresses: []
|
||||
})
|
||||
} finally {
|
||||
log('delete release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
if (has) {
|
||||
this._emit(EVENT_NAME, { peerId, multiaddrs: [] })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the known multiaddrs for a given peer. All returned multiaddrs
|
||||
* will include the encapsulated `PeerId` of the peer.
|
||||
* Returns `undefined` if there are no known multiaddrs for the given peer.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {(addresses: Address[]) => Address[]} [addressSorter]
|
||||
* @returns {Multiaddr[]|undefined}
|
||||
*/
|
||||
getMultiaddrsForPeer (peerId, addressSorter = (ms) => ms) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
if (!entry || !entry.addresses) {
|
||||
return undefined
|
||||
}
|
||||
async getMultiaddrsForPeer (peerId, addressSorter = (ms) => ms) {
|
||||
const addresses = await this.get(peerId)
|
||||
|
||||
return addressSorter(
|
||||
entry.addresses || []
|
||||
addresses
|
||||
).map((address) => {
|
||||
const multiaddr = address.multiaddr
|
||||
|
||||
@ -351,4 +353,30 @@ class AddressBook extends Book {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AddressBook
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
* @param {Multiaddr[]} multiaddrs
|
||||
* @param {(peerId: PeerId, multiaddr: Multiaddr) => Promise<boolean>} addressFilter
|
||||
* @param {boolean} isCertified
|
||||
*/
|
||||
function filterMultiaddrs (peerId, multiaddrs, addressFilter, isCertified = false) {
|
||||
return pipe(
|
||||
multiaddrs,
|
||||
(source) => each(source, (multiaddr) => {
|
||||
if (!Multiaddr.isMultiaddr(multiaddr)) {
|
||||
log.error('multiaddr must be an instance of Multiaddr')
|
||||
throw errcode(new Error('multiaddr must be an instance of Multiaddr'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
}),
|
||||
(source) => filter(source, (multiaddr) => addressFilter(peerId, multiaddr)),
|
||||
(source) => map(source, (multiaddr) => {
|
||||
return {
|
||||
multiaddr: new Multiaddr(multiaddr.toString()),
|
||||
isCertified
|
||||
}
|
||||
}),
|
||||
(source) => all(source)
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = PeerStoreAddressBook
|
||||
|
@ -1,127 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const errcode = require('err-code')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
|
||||
/**
|
||||
* @param {any} data
|
||||
*/
|
||||
const passthrough = data => data
|
||||
|
||||
/**
|
||||
* @typedef {import('./')} PeerStore
|
||||
*/
|
||||
|
||||
class Book {
|
||||
/**
|
||||
* The Book is the skeleton for the PeerStore books.
|
||||
*
|
||||
* @class
|
||||
* @param {Object} properties
|
||||
* @param {PeerStore} properties.peerStore - PeerStore instance.
|
||||
* @param {string} properties.eventName - Name of the event to emit by the PeerStore.
|
||||
* @param {string} properties.eventProperty - Name of the property to emit by the PeerStore.
|
||||
* @param {(data: any) => any[]} [properties.eventTransformer] - Transformer function of the provided data for being emitted.
|
||||
*/
|
||||
constructor ({ peerStore, eventName, eventProperty, eventTransformer = passthrough }) {
|
||||
this._ps = peerStore
|
||||
this.eventName = eventName
|
||||
this.eventProperty = eventProperty
|
||||
this.eventTransformer = eventTransformer
|
||||
|
||||
/**
|
||||
* Map known peers to their data.
|
||||
*
|
||||
* @type {Map<string, any[]|any>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set known data of a provided peer.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {any[]|any} data
|
||||
*/
|
||||
set (peerId, data) {
|
||||
throw errcode(new Error('set must be implemented by the subclass'), 'ERR_NOT_IMPLEMENTED')
|
||||
}
|
||||
|
||||
/**
|
||||
* Set data into the datastructure, persistence and emit it using the provided transformers.
|
||||
*
|
||||
* @protected
|
||||
* @param {PeerId} peerId - peerId of the data to store
|
||||
* @param {any} data - data to store.
|
||||
* @param {Object} [options] - storing options.
|
||||
* @param {boolean} [options.emit = true] - emit the provided data.
|
||||
* @returns {void}
|
||||
*/
|
||||
_setData (peerId, data, { emit = true } = {}) {
|
||||
const b58key = peerId.toB58String()
|
||||
|
||||
// Store data in memory
|
||||
this.data.set(b58key, data)
|
||||
|
||||
// Emit event
|
||||
emit && this._emit(peerId, data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit data.
|
||||
*
|
||||
* @protected
|
||||
* @param {PeerId} peerId
|
||||
* @param {any} [data]
|
||||
*/
|
||||
_emit (peerId, data) {
|
||||
this._ps.emit(this.eventName, {
|
||||
peerId,
|
||||
[this.eventProperty]: this.eventTransformer(data)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
* Returns `undefined` if there is no available data for the given peer.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {any[]|any|undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const rec = this.data.get(peerId.toB58String())
|
||||
|
||||
// @ts-ignore
|
||||
return rec ? [...rec] : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the provided peer from the book.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {boolean}
|
||||
*/
|
||||
delete (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!this.data.delete(peerId.toB58String())) {
|
||||
return false
|
||||
}
|
||||
|
||||
this._emit(peerId, [])
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Book
|
@ -1,152 +1,121 @@
|
||||
'use strict'
|
||||
|
||||
const errcode = require('err-code')
|
||||
|
||||
const debug = require('debug')
|
||||
const { EventEmitter } = require('events')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const AddressBook = require('./address-book')
|
||||
const KeyBook = require('./key-book')
|
||||
const MetadataBook = require('./metadata-book')
|
||||
const ProtoBook = require('./proto-book')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const Store = require('./store')
|
||||
|
||||
/**
|
||||
* @typedef {import('./address-book').Address} Address
|
||||
* @typedef {import('./types').PeerStore} PeerStore
|
||||
* @typedef {import('./types').Peer} Peer
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('multiaddr').Multiaddr} Multiaddr
|
||||
*/
|
||||
|
||||
const log = Object.assign(debug('libp2p:peer-store'), {
|
||||
error: debug('libp2p:peer-store:err')
|
||||
})
|
||||
|
||||
/**
|
||||
* @extends {EventEmitter}
|
||||
* An implementation of PeerStore that stores data in a Datastore
|
||||
*
|
||||
* @fires PeerStore#peer Emitted when a new peer is added.
|
||||
* @fires PeerStore#change:protocols Emitted when a known peer supports a different set of protocols.
|
||||
* @fires PeerStore#change:multiaddrs Emitted when a known peer has a different set of multiaddrs.
|
||||
* @fires PeerStore#change:pubkey Emitted emitted when a peer's public key is known.
|
||||
* @fires PeerStore#change:metadata Emitted when the known metadata of a peer change.
|
||||
* @implements {PeerStore}
|
||||
*/
|
||||
class PeerStore extends EventEmitter {
|
||||
class DefaultPeerStore extends EventEmitter {
|
||||
/**
|
||||
* Peer object
|
||||
*
|
||||
* @typedef {Object} Peer
|
||||
* @property {PeerId} id peer's peer-id instance.
|
||||
* @property {Address[]} addresses peer's addresses containing its multiaddrs and metadata.
|
||||
* @property {string[]} protocols peer's supported protocols.
|
||||
* @property {Map<string, Uint8Array>|undefined} metadata peer's metadata map.
|
||||
* @param {object} properties
|
||||
* @param {PeerId} properties.peerId
|
||||
* @param {import('interface-datastore').Datastore} properties.datastore
|
||||
* @param {(peerId: PeerId, multiaddr: Multiaddr) => Promise<boolean>} properties.addressFilter
|
||||
*/
|
||||
|
||||
/**
|
||||
* Responsible for managing known peers, as well as their addresses, protocols and metadata.
|
||||
*
|
||||
* @param {object} options
|
||||
* @param {PeerId} options.peerId
|
||||
* @class
|
||||
*/
|
||||
constructor ({ peerId }) {
|
||||
constructor ({ peerId, datastore, addressFilter }) {
|
||||
super()
|
||||
|
||||
this._peerId = peerId
|
||||
this._store = new Store(datastore)
|
||||
|
||||
/**
|
||||
* AddressBook containing a map of peerIdStr to Address.
|
||||
*/
|
||||
this.addressBook = new AddressBook(this)
|
||||
this.addressBook = new AddressBook(this.emit.bind(this), this._store, addressFilter)
|
||||
this.keyBook = new KeyBook(this.emit.bind(this), this._store)
|
||||
this.metadataBook = new MetadataBook(this.emit.bind(this), this._store)
|
||||
this.protoBook = new ProtoBook(this.emit.bind(this), this._store)
|
||||
}
|
||||
|
||||
/**
|
||||
* KeyBook containing a map of peerIdStr to their PeerId with public keys.
|
||||
*/
|
||||
this.keyBook = new KeyBook(this)
|
||||
async * getPeers () {
|
||||
log('getPeers await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('getPeers got read lock')
|
||||
|
||||
/**
|
||||
* MetadataBook containing a map of peerIdStr to their metadata Map.
|
||||
*/
|
||||
this.metadataBook = new MetadataBook(this)
|
||||
try {
|
||||
for await (const peer of this._store.all()) {
|
||||
if (peer.id.toB58String() === this._peerId.toB58String()) {
|
||||
// Remove self peer if present
|
||||
continue
|
||||
}
|
||||
|
||||
/**
|
||||
* ProtoBook containing a map of peerIdStr to supported protocols.
|
||||
*/
|
||||
this.protoBook = new ProtoBook(this)
|
||||
yield peer
|
||||
}
|
||||
} finally {
|
||||
log('getPeers release read lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the PeerStore.
|
||||
*/
|
||||
start () {}
|
||||
|
||||
/**
|
||||
* Stop the PeerStore.
|
||||
*/
|
||||
stop () {}
|
||||
|
||||
/**
|
||||
* Get all the stored information of every peer known.
|
||||
*
|
||||
* @returns {Map<string, Peer>}
|
||||
*/
|
||||
get peers () {
|
||||
const storedPeers = new Set([
|
||||
...this.addressBook.data.keys(),
|
||||
...this.keyBook.data.keys(),
|
||||
...this.protoBook.data.keys(),
|
||||
...this.metadataBook.data.keys()
|
||||
])
|
||||
|
||||
// Remove self peer if present
|
||||
this._peerId && storedPeers.delete(this._peerId.toB58String())
|
||||
|
||||
const peersData = new Map()
|
||||
storedPeers.forEach((idStr) => {
|
||||
peersData.set(idStr, this.get(PeerId.createFromB58String(idStr)))
|
||||
})
|
||||
|
||||
return peersData
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the information of the given peer in every book.
|
||||
* Delete the information of the given peer in every book
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {boolean} true if found and removed
|
||||
*/
|
||||
delete (peerId) {
|
||||
const addressesDeleted = this.addressBook.delete(peerId)
|
||||
const keyDeleted = this.keyBook.delete(peerId)
|
||||
const protocolsDeleted = this.protoBook.delete(peerId)
|
||||
const metadataDeleted = this.metadataBook.delete(peerId)
|
||||
async delete (peerId) {
|
||||
log('delete await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('delete got write lock')
|
||||
|
||||
return addressesDeleted || keyDeleted || protocolsDeleted || metadataDeleted
|
||||
try {
|
||||
await this._store.delete(peerId)
|
||||
} finally {
|
||||
log('delete release write lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the stored information of a given peer.
|
||||
* Get the stored information of a given peer
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Peer|undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
async get (peerId) {
|
||||
log('get await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('get got read lock')
|
||||
|
||||
try {
|
||||
return this._store.load(peerId)
|
||||
} finally {
|
||||
log('get release read lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
const id = this.keyBook.data.get(peerId.toB58String())
|
||||
const addresses = this.addressBook.get(peerId)
|
||||
const metadata = this.metadataBook.get(peerId)
|
||||
const protocols = this.protoBook.get(peerId)
|
||||
/**
|
||||
* Returns true if we have a record of the peer
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
*/
|
||||
async has (peerId) {
|
||||
log('has await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('has got read lock')
|
||||
|
||||
if (!id && !addresses && !metadata && !protocols) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return {
|
||||
id: id || peerId,
|
||||
addresses: addresses || [],
|
||||
protocols: protocols || [],
|
||||
metadata: metadata
|
||||
try {
|
||||
return this._store.has(peerId)
|
||||
} finally {
|
||||
log('has release read lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PeerStore
|
||||
module.exports = DefaultPeerStore
|
||||
|
@ -1,96 +1,141 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const errcode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
const PeerId = require('peer-id')
|
||||
const { equals: uint8arrayEquals } = require('uint8arrays/equals')
|
||||
|
||||
/**
|
||||
* @typedef {import('./types').PeerStore} PeerStore
|
||||
* @typedef {import('./types').KeyBook} KeyBook
|
||||
* @typedef {import('libp2p-interfaces/src/keys/types').PublicKey} PublicKey
|
||||
*/
|
||||
|
||||
const log = Object.assign(debug('libp2p:peer-store:key-book'), {
|
||||
error: debug('libp2p:peer-store:key-book:err')
|
||||
})
|
||||
const errcode = require('err-code')
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Book = require('./book')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const EVENT_NAME = 'change:pubkey'
|
||||
|
||||
/**
|
||||
* @typedef {import('./')} PeerStore
|
||||
* @typedef {import('libp2p-crypto').PublicKey} PublicKey
|
||||
* @implements {KeyBook}
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends {Book}
|
||||
*/
|
||||
class KeyBook extends Book {
|
||||
class PeerStoreKeyBook {
|
||||
/**
|
||||
* The KeyBook is responsible for keeping the known public keys of a peer.
|
||||
*
|
||||
* @class
|
||||
* @param {PeerStore} peerStore
|
||||
* @param {PeerStore["emit"]} emit
|
||||
* @param {import('./types').Store} store
|
||||
*/
|
||||
constructor (peerStore) {
|
||||
super({
|
||||
peerStore,
|
||||
eventName: 'change:pubkey',
|
||||
eventProperty: 'pubkey',
|
||||
eventTransformer: (data) => data.pubKey
|
||||
})
|
||||
|
||||
/**
|
||||
* Map known peers to their known Public Key.
|
||||
*
|
||||
* @type {Map<string, PeerId>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
constructor (emit, store) {
|
||||
this._emit = emit
|
||||
this._store = store
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the Peer public key.
|
||||
* Set the Peer public key
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {PublicKey} publicKey
|
||||
* @returns {KeyBook}
|
||||
*/
|
||||
set (peerId, publicKey) {
|
||||
async set (peerId, publicKey) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const id = peerId.toB58String()
|
||||
const recPeerId = this.data.get(id)
|
||||
|
||||
// If no record available, and this is valid
|
||||
if (!recPeerId && publicKey) {
|
||||
// This might be unecessary, but we want to store the PeerId
|
||||
// to avoid an async operation when reconstructing the PeerId
|
||||
peerId.pubKey = publicKey
|
||||
|
||||
this._setData(peerId, peerId)
|
||||
log(`stored provided public key for ${id}`)
|
||||
if (!publicKey) {
|
||||
log.error('publicKey must be an instance of PublicKey to store data')
|
||||
throw errcode(new Error('publicKey must be an instance of PublicKey'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
return this
|
||||
log('set await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('set got write lock')
|
||||
|
||||
let updatedKey = false
|
||||
|
||||
try {
|
||||
try {
|
||||
const existing = await this._store.load(peerId)
|
||||
|
||||
if (existing.pubKey && uint8arrayEquals(existing.pubKey.bytes, publicKey.bytes)) {
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
await this._store.patchOrCreate(peerId, {
|
||||
pubKey: publicKey
|
||||
})
|
||||
updatedKey = true
|
||||
} finally {
|
||||
log('set release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
if (updatedKey) {
|
||||
this._emit(EVENT_NAME, { peerId, pubKey: publicKey })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Public key of the given PeerId, if stored.
|
||||
* Get Public key of the given PeerId, if stored
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @returns {PublicKey | undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
async get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const rec = this.data.get(peerId.toB58String())
|
||||
log('get await write lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('get got write lock')
|
||||
|
||||
return rec ? rec.pubKey : undefined
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return peer.pubKey
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('get release write lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
*/
|
||||
async delete (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('delete await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('delete got write lock')
|
||||
|
||||
try {
|
||||
await this._store.patchOrCreate(peerId, {
|
||||
pubKey: undefined
|
||||
})
|
||||
} finally {
|
||||
log('delete release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, pubKey: undefined })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = KeyBook
|
||||
module.exports = PeerStoreKeyBook
|
||||
|
@ -1,119 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const log = Object.assign(debug('libp2p:peer-store:proto-book'), {
|
||||
error: debug('libp2p:peer-store:proto-book:err')
|
||||
})
|
||||
const errcode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
const PeerId = require('peer-id')
|
||||
const { equals: uint8ArrayEquals } = require('uint8arrays/equals')
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Book = require('./book')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const log = Object.assign(debug('libp2p:peer-store:metadata-book'), {
|
||||
error: debug('libp2p:peer-store:metadata-book:err')
|
||||
})
|
||||
|
||||
/**
|
||||
* @typedef {import('./')} PeerStore
|
||||
* @typedef {import('./types').PeerStore} PeerStore
|
||||
* @typedef {import('./types').MetadataBook} MetadataBook
|
||||
*/
|
||||
|
||||
const EVENT_NAME = 'change:metadata'
|
||||
|
||||
/**
|
||||
* @extends {Book}
|
||||
*
|
||||
* @fires MetadataBook#change:metadata
|
||||
* @implements {MetadataBook}
|
||||
*/
|
||||
class MetadataBook extends Book {
|
||||
class PeerStoreMetadataBook {
|
||||
/**
|
||||
* The MetadataBook is responsible for keeping the known supported
|
||||
* protocols of a peer.
|
||||
* protocols of a peer
|
||||
*
|
||||
* @class
|
||||
* @param {PeerStore} peerStore
|
||||
* @param {PeerStore["emit"]} emit
|
||||
* @param {import('./types').Store} store
|
||||
*/
|
||||
constructor (peerStore) {
|
||||
/**
|
||||
* PeerStore Event emitter, used by the MetadataBook to emit:
|
||||
* "change:metadata" - emitted when the known metadata of a peer change.
|
||||
*/
|
||||
super({
|
||||
peerStore,
|
||||
eventName: 'change:metadata',
|
||||
eventProperty: 'metadata'
|
||||
})
|
||||
|
||||
/**
|
||||
* Map known peers to their known protocols.
|
||||
*
|
||||
* @type {Map<string, Map<string, Uint8Array>>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
constructor (emit, store) {
|
||||
this._emit = emit
|
||||
this._store = store
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata key and value of a provided peer.
|
||||
* Get the known data of a provided peer
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key - metadata key
|
||||
* @param {Uint8Array} value - metadata value
|
||||
* @returns {MetadataBook}
|
||||
*/
|
||||
// @ts-ignore override with more then the parameters expected in Book
|
||||
set (peerId, key, value) {
|
||||
async get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (typeof key !== 'string' || !(value instanceof Uint8Array)) {
|
||||
log.error('valid key and value must be provided to store data')
|
||||
throw errcode(new Error('valid key and value must be provided'), ERR_INVALID_PARAMETERS)
|
||||
log('get await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('get got read lock')
|
||||
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return peer.metadata
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('get release read lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._setValue(peerId, key, value)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Set data into the datastructure
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key
|
||||
* @param {Uint8Array} value
|
||||
*/
|
||||
_setValue (peerId, key, value, { emit = true } = {}) {
|
||||
const id = peerId.toB58String()
|
||||
const rec = this.data.get(id) || new Map()
|
||||
const recMap = rec.get(key)
|
||||
|
||||
// Already exists and is equal
|
||||
if (recMap && uint8ArrayEquals(value, recMap)) {
|
||||
log(`the metadata provided to store is equal to the already stored for ${id} on ${key}`)
|
||||
return
|
||||
}
|
||||
|
||||
rec.set(key, value)
|
||||
this.data.set(id, rec)
|
||||
|
||||
emit && this._emit(peerId, key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Map<string, Uint8Array>|undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
return this.data.get(peerId.toB58String())
|
||||
return new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -121,59 +69,182 @@ class MetadataBook extends Book {
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key
|
||||
* @returns {Uint8Array | undefined}
|
||||
*/
|
||||
getValue (peerId, key) {
|
||||
async getValue (peerId, key) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const rec = this.data.get(peerId.toB58String())
|
||||
return rec && rec.get(key)
|
||||
log('getValue await read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('getValue got read lock')
|
||||
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return peer.metadata.get(key)
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('getValue release write lock')
|
||||
release()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the provided peer from the book.
|
||||
* @param {PeerId} peerId
|
||||
* @param {Map<string, Uint8Array>} metadata
|
||||
*/
|
||||
async set (peerId, metadata) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!metadata || !(metadata instanceof Map)) {
|
||||
log.error('valid metadata must be provided to store data')
|
||||
throw errcode(new Error('valid metadata must be provided'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('set await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('set got write lock')
|
||||
|
||||
try {
|
||||
await this._store.mergeOrCreate(peerId, {
|
||||
metadata
|
||||
})
|
||||
} finally {
|
||||
log('set release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, metadata })
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata key and value of a provided peer
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @returns {boolean}
|
||||
* @param {string} key - metadata key
|
||||
* @param {Uint8Array} value - metadata value
|
||||
*/
|
||||
delete (peerId) {
|
||||
async setValue (peerId, key, value) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!this.data.delete(peerId.toB58String())) {
|
||||
return false
|
||||
if (typeof key !== 'string' || !(value instanceof Uint8Array)) {
|
||||
log.error('valid key and value must be provided to store data')
|
||||
throw errcode(new Error('valid key and value must be provided'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
this._emit(peerId)
|
||||
log('setValue await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('setValue got write lock')
|
||||
|
||||
return true
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
try {
|
||||
const existingPeer = await this._store.load(peerId)
|
||||
const existingValue = existingPeer.metadata.get(key)
|
||||
|
||||
if (existingValue != null && uint8ArrayEquals(value, existingValue)) {
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPeer = await this._store.mergeOrCreate(peerId, {
|
||||
metadata: new Map([[key, value]])
|
||||
})
|
||||
} finally {
|
||||
log('setValue release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, metadata: updatedPeer.metadata })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
*/
|
||||
async delete (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('delete await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('delete got write lock')
|
||||
|
||||
let has
|
||||
|
||||
try {
|
||||
has = await this._store.has(peerId)
|
||||
|
||||
if (has) {
|
||||
await this._store.patch(peerId, {
|
||||
metadata: new Map()
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
log('delete release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
if (has) {
|
||||
this._emit(EVENT_NAME, { peerId, metadata: new Map() })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the provided peer metadata key from the book.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
deleteValue (peerId, key) {
|
||||
async deleteValue (peerId, key) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const rec = this.data.get(peerId.toB58String())
|
||||
log('deleteValue await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('deleteValue got write lock')
|
||||
|
||||
if (!rec || !rec.delete(key)) {
|
||||
return false
|
||||
let metadata
|
||||
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
metadata = peer.metadata
|
||||
|
||||
metadata.delete(key)
|
||||
|
||||
await this._store.patch(peerId, {
|
||||
metadata
|
||||
})
|
||||
} catch (/** @type {any} **/ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('deleteValue release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(peerId, key)
|
||||
|
||||
return true
|
||||
if (metadata) {
|
||||
this._emit(EVENT_NAME, { peerId, metadata })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MetadataBook
|
||||
module.exports = PeerStoreMetadataBook
|
||||
|
222
src/peer-store/pb/peer.d.ts
vendored
Normal file
222
src/peer-store/pb/peer.d.ts
vendored
Normal file
@ -0,0 +1,222 @@
|
||||
import * as $protobuf from "protobufjs";
|
||||
/** Properties of a Peer. */
|
||||
export interface IPeer {
|
||||
|
||||
/** Peer addresses */
|
||||
addresses?: (IAddress[]|null);
|
||||
|
||||
/** Peer protocols */
|
||||
protocols?: (string[]|null);
|
||||
|
||||
/** Peer metadata */
|
||||
metadata?: (IMetadata[]|null);
|
||||
|
||||
/** Peer pubKey */
|
||||
pubKey?: (Uint8Array|null);
|
||||
|
||||
/** Peer peerRecordEnvelope */
|
||||
peerRecordEnvelope?: (Uint8Array|null);
|
||||
}
|
||||
|
||||
/** Represents a Peer. */
|
||||
export class Peer implements IPeer {
|
||||
|
||||
/**
|
||||
* Constructs a new Peer.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IPeer);
|
||||
|
||||
/** Peer addresses. */
|
||||
public addresses: IAddress[];
|
||||
|
||||
/** Peer protocols. */
|
||||
public protocols: string[];
|
||||
|
||||
/** Peer metadata. */
|
||||
public metadata: IMetadata[];
|
||||
|
||||
/** Peer pubKey. */
|
||||
public pubKey?: (Uint8Array|null);
|
||||
|
||||
/** Peer peerRecordEnvelope. */
|
||||
public peerRecordEnvelope?: (Uint8Array|null);
|
||||
|
||||
/** Peer _pubKey. */
|
||||
public _pubKey?: "pubKey";
|
||||
|
||||
/** Peer _peerRecordEnvelope. */
|
||||
public _peerRecordEnvelope?: "peerRecordEnvelope";
|
||||
|
||||
/**
|
||||
* Encodes the specified Peer message. Does not implicitly {@link Peer.verify|verify} messages.
|
||||
* @param m Peer message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IPeer, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a Peer message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Peer
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Peer;
|
||||
|
||||
/**
|
||||
* Creates a Peer message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Peer
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Peer;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Peer message. Also converts values to other types if specified.
|
||||
* @param m Peer
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Peer, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Peer to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
/** Properties of an Address. */
|
||||
export interface IAddress {
|
||||
|
||||
/** Address multiaddr */
|
||||
multiaddr?: (Uint8Array|null);
|
||||
|
||||
/** Address isCertified */
|
||||
isCertified?: (boolean|null);
|
||||
}
|
||||
|
||||
/** Represents an Address. */
|
||||
export class Address implements IAddress {
|
||||
|
||||
/**
|
||||
* Constructs a new Address.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IAddress);
|
||||
|
||||
/** Address multiaddr. */
|
||||
public multiaddr: Uint8Array;
|
||||
|
||||
/** Address isCertified. */
|
||||
public isCertified?: (boolean|null);
|
||||
|
||||
/** Address _isCertified. */
|
||||
public _isCertified?: "isCertified";
|
||||
|
||||
/**
|
||||
* Encodes the specified Address message. Does not implicitly {@link Address.verify|verify} messages.
|
||||
* @param m Address message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IAddress, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes an Address message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Address
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Address;
|
||||
|
||||
/**
|
||||
* Creates an Address message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Address
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Address;
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Address message. Also converts values to other types if specified.
|
||||
* @param m Address
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Address, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Address to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
/** Properties of a Metadata. */
|
||||
export interface IMetadata {
|
||||
|
||||
/** Metadata key */
|
||||
key?: (string|null);
|
||||
|
||||
/** Metadata value */
|
||||
value?: (Uint8Array|null);
|
||||
}
|
||||
|
||||
/** Represents a Metadata. */
|
||||
export class Metadata implements IMetadata {
|
||||
|
||||
/**
|
||||
* Constructs a new Metadata.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IMetadata);
|
||||
|
||||
/** Metadata key. */
|
||||
public key: string;
|
||||
|
||||
/** Metadata value. */
|
||||
public value: Uint8Array;
|
||||
|
||||
/**
|
||||
* Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
|
||||
* @param m Metadata message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IMetadata, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a Metadata message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Metadata
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Metadata;
|
||||
|
||||
/**
|
||||
* Creates a Metadata message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Metadata
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Metadata;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Metadata message. Also converts values to other types if specified.
|
||||
* @param m Metadata
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Metadata, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Metadata to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
643
src/peer-store/pb/peer.js
Normal file
643
src/peer-store/pb/peer.js
Normal file
@ -0,0 +1,643 @@
|
||||
/*eslint-disable*/
|
||||
"use strict";
|
||||
|
||||
var $protobuf = require("protobufjs/minimal");
|
||||
|
||||
// Common aliases
|
||||
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
|
||||
|
||||
// Exported root namespace
|
||||
var $root = $protobuf.roots["libp2p-peer"] || ($protobuf.roots["libp2p-peer"] = {});
|
||||
|
||||
$root.Peer = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a Peer.
|
||||
* @exports IPeer
|
||||
* @interface IPeer
|
||||
* @property {Array.<IAddress>|null} [addresses] Peer addresses
|
||||
* @property {Array.<string>|null} [protocols] Peer protocols
|
||||
* @property {Array.<IMetadata>|null} [metadata] Peer metadata
|
||||
* @property {Uint8Array|null} [pubKey] Peer pubKey
|
||||
* @property {Uint8Array|null} [peerRecordEnvelope] Peer peerRecordEnvelope
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Peer.
|
||||
* @exports Peer
|
||||
* @classdesc Represents a Peer.
|
||||
* @implements IPeer
|
||||
* @constructor
|
||||
* @param {IPeer=} [p] Properties to set
|
||||
*/
|
||||
function Peer(p) {
|
||||
this.addresses = [];
|
||||
this.protocols = [];
|
||||
this.metadata = [];
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Peer addresses.
|
||||
* @member {Array.<IAddress>} addresses
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Peer.prototype.addresses = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* Peer protocols.
|
||||
* @member {Array.<string>} protocols
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Peer.prototype.protocols = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* Peer metadata.
|
||||
* @member {Array.<IMetadata>} metadata
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Peer.prototype.metadata = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* Peer pubKey.
|
||||
* @member {Uint8Array|null|undefined} pubKey
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Peer.prototype.pubKey = null;
|
||||
|
||||
/**
|
||||
* Peer peerRecordEnvelope.
|
||||
* @member {Uint8Array|null|undefined} peerRecordEnvelope
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Peer.prototype.peerRecordEnvelope = null;
|
||||
|
||||
// OneOf field names bound to virtual getters and setters
|
||||
var $oneOfFields;
|
||||
|
||||
/**
|
||||
* Peer _pubKey.
|
||||
* @member {"pubKey"|undefined} _pubKey
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Object.defineProperty(Peer.prototype, "_pubKey", {
|
||||
get: $util.oneOfGetter($oneOfFields = ["pubKey"]),
|
||||
set: $util.oneOfSetter($oneOfFields)
|
||||
});
|
||||
|
||||
/**
|
||||
* Peer _peerRecordEnvelope.
|
||||
* @member {"peerRecordEnvelope"|undefined} _peerRecordEnvelope
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
*/
|
||||
Object.defineProperty(Peer.prototype, "_peerRecordEnvelope", {
|
||||
get: $util.oneOfGetter($oneOfFields = ["peerRecordEnvelope"]),
|
||||
set: $util.oneOfSetter($oneOfFields)
|
||||
});
|
||||
|
||||
/**
|
||||
* Encodes the specified Peer message. Does not implicitly {@link Peer.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Peer
|
||||
* @static
|
||||
* @param {IPeer} m Peer message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Peer.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.addresses != null && m.addresses.length) {
|
||||
for (var i = 0; i < m.addresses.length; ++i)
|
||||
$root.Address.encode(m.addresses[i], w.uint32(10).fork()).ldelim();
|
||||
}
|
||||
if (m.protocols != null && m.protocols.length) {
|
||||
for (var i = 0; i < m.protocols.length; ++i)
|
||||
w.uint32(18).string(m.protocols[i]);
|
||||
}
|
||||
if (m.metadata != null && m.metadata.length) {
|
||||
for (var i = 0; i < m.metadata.length; ++i)
|
||||
$root.Metadata.encode(m.metadata[i], w.uint32(26).fork()).ldelim();
|
||||
}
|
||||
if (m.pubKey != null && Object.hasOwnProperty.call(m, "pubKey"))
|
||||
w.uint32(34).bytes(m.pubKey);
|
||||
if (m.peerRecordEnvelope != null && Object.hasOwnProperty.call(m, "peerRecordEnvelope"))
|
||||
w.uint32(42).bytes(m.peerRecordEnvelope);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a Peer message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Peer
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Peer} Peer
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Peer.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Peer();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
if (!(m.addresses && m.addresses.length))
|
||||
m.addresses = [];
|
||||
m.addresses.push($root.Address.decode(r, r.uint32()));
|
||||
break;
|
||||
case 2:
|
||||
if (!(m.protocols && m.protocols.length))
|
||||
m.protocols = [];
|
||||
m.protocols.push(r.string());
|
||||
break;
|
||||
case 3:
|
||||
if (!(m.metadata && m.metadata.length))
|
||||
m.metadata = [];
|
||||
m.metadata.push($root.Metadata.decode(r, r.uint32()));
|
||||
break;
|
||||
case 4:
|
||||
m.pubKey = r.bytes();
|
||||
break;
|
||||
case 5:
|
||||
m.peerRecordEnvelope = r.bytes();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a Peer message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Peer
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Peer} Peer
|
||||
*/
|
||||
Peer.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Peer)
|
||||
return d;
|
||||
var m = new $root.Peer();
|
||||
if (d.addresses) {
|
||||
if (!Array.isArray(d.addresses))
|
||||
throw TypeError(".Peer.addresses: array expected");
|
||||
m.addresses = [];
|
||||
for (var i = 0; i < d.addresses.length; ++i) {
|
||||
if (typeof d.addresses[i] !== "object")
|
||||
throw TypeError(".Peer.addresses: object expected");
|
||||
m.addresses[i] = $root.Address.fromObject(d.addresses[i]);
|
||||
}
|
||||
}
|
||||
if (d.protocols) {
|
||||
if (!Array.isArray(d.protocols))
|
||||
throw TypeError(".Peer.protocols: array expected");
|
||||
m.protocols = [];
|
||||
for (var i = 0; i < d.protocols.length; ++i) {
|
||||
m.protocols[i] = String(d.protocols[i]);
|
||||
}
|
||||
}
|
||||
if (d.metadata) {
|
||||
if (!Array.isArray(d.metadata))
|
||||
throw TypeError(".Peer.metadata: array expected");
|
||||
m.metadata = [];
|
||||
for (var i = 0; i < d.metadata.length; ++i) {
|
||||
if (typeof d.metadata[i] !== "object")
|
||||
throw TypeError(".Peer.metadata: object expected");
|
||||
m.metadata[i] = $root.Metadata.fromObject(d.metadata[i]);
|
||||
}
|
||||
}
|
||||
if (d.pubKey != null) {
|
||||
if (typeof d.pubKey === "string")
|
||||
$util.base64.decode(d.pubKey, m.pubKey = $util.newBuffer($util.base64.length(d.pubKey)), 0);
|
||||
else if (d.pubKey.length)
|
||||
m.pubKey = d.pubKey;
|
||||
}
|
||||
if (d.peerRecordEnvelope != null) {
|
||||
if (typeof d.peerRecordEnvelope === "string")
|
||||
$util.base64.decode(d.peerRecordEnvelope, m.peerRecordEnvelope = $util.newBuffer($util.base64.length(d.peerRecordEnvelope)), 0);
|
||||
else if (d.peerRecordEnvelope.length)
|
||||
m.peerRecordEnvelope = d.peerRecordEnvelope;
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Peer message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Peer
|
||||
* @static
|
||||
* @param {Peer} m Peer
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Peer.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.arrays || o.defaults) {
|
||||
d.addresses = [];
|
||||
d.protocols = [];
|
||||
d.metadata = [];
|
||||
}
|
||||
if (m.addresses && m.addresses.length) {
|
||||
d.addresses = [];
|
||||
for (var j = 0; j < m.addresses.length; ++j) {
|
||||
d.addresses[j] = $root.Address.toObject(m.addresses[j], o);
|
||||
}
|
||||
}
|
||||
if (m.protocols && m.protocols.length) {
|
||||
d.protocols = [];
|
||||
for (var j = 0; j < m.protocols.length; ++j) {
|
||||
d.protocols[j] = m.protocols[j];
|
||||
}
|
||||
}
|
||||
if (m.metadata && m.metadata.length) {
|
||||
d.metadata = [];
|
||||
for (var j = 0; j < m.metadata.length; ++j) {
|
||||
d.metadata[j] = $root.Metadata.toObject(m.metadata[j], o);
|
||||
}
|
||||
}
|
||||
if (m.pubKey != null && m.hasOwnProperty("pubKey")) {
|
||||
d.pubKey = o.bytes === String ? $util.base64.encode(m.pubKey, 0, m.pubKey.length) : o.bytes === Array ? Array.prototype.slice.call(m.pubKey) : m.pubKey;
|
||||
if (o.oneofs)
|
||||
d._pubKey = "pubKey";
|
||||
}
|
||||
if (m.peerRecordEnvelope != null && m.hasOwnProperty("peerRecordEnvelope")) {
|
||||
d.peerRecordEnvelope = o.bytes === String ? $util.base64.encode(m.peerRecordEnvelope, 0, m.peerRecordEnvelope.length) : o.bytes === Array ? Array.prototype.slice.call(m.peerRecordEnvelope) : m.peerRecordEnvelope;
|
||||
if (o.oneofs)
|
||||
d._peerRecordEnvelope = "peerRecordEnvelope";
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Peer to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Peer
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Peer.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return Peer;
|
||||
})();
|
||||
|
||||
$root.Address = (function() {
|
||||
|
||||
/**
|
||||
* Properties of an Address.
|
||||
* @exports IAddress
|
||||
* @interface IAddress
|
||||
* @property {Uint8Array|null} [multiaddr] Address multiaddr
|
||||
* @property {boolean|null} [isCertified] Address isCertified
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Address.
|
||||
* @exports Address
|
||||
* @classdesc Represents an Address.
|
||||
* @implements IAddress
|
||||
* @constructor
|
||||
* @param {IAddress=} [p] Properties to set
|
||||
*/
|
||||
function Address(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Address multiaddr.
|
||||
* @member {Uint8Array} multiaddr
|
||||
* @memberof Address
|
||||
* @instance
|
||||
*/
|
||||
Address.prototype.multiaddr = $util.newBuffer([]);
|
||||
|
||||
/**
|
||||
* Address isCertified.
|
||||
* @member {boolean|null|undefined} isCertified
|
||||
* @memberof Address
|
||||
* @instance
|
||||
*/
|
||||
Address.prototype.isCertified = null;
|
||||
|
||||
// OneOf field names bound to virtual getters and setters
|
||||
var $oneOfFields;
|
||||
|
||||
/**
|
||||
* Address _isCertified.
|
||||
* @member {"isCertified"|undefined} _isCertified
|
||||
* @memberof Address
|
||||
* @instance
|
||||
*/
|
||||
Object.defineProperty(Address.prototype, "_isCertified", {
|
||||
get: $util.oneOfGetter($oneOfFields = ["isCertified"]),
|
||||
set: $util.oneOfSetter($oneOfFields)
|
||||
});
|
||||
|
||||
/**
|
||||
* Encodes the specified Address message. Does not implicitly {@link Address.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Address
|
||||
* @static
|
||||
* @param {IAddress} m Address message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Address.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.multiaddr != null && Object.hasOwnProperty.call(m, "multiaddr"))
|
||||
w.uint32(10).bytes(m.multiaddr);
|
||||
if (m.isCertified != null && Object.hasOwnProperty.call(m, "isCertified"))
|
||||
w.uint32(16).bool(m.isCertified);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes an Address message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Address
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Address} Address
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Address.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Address();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.multiaddr = r.bytes();
|
||||
break;
|
||||
case 2:
|
||||
m.isCertified = r.bool();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates an Address message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Address
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Address} Address
|
||||
*/
|
||||
Address.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Address)
|
||||
return d;
|
||||
var m = new $root.Address();
|
||||
if (d.multiaddr != null) {
|
||||
if (typeof d.multiaddr === "string")
|
||||
$util.base64.decode(d.multiaddr, m.multiaddr = $util.newBuffer($util.base64.length(d.multiaddr)), 0);
|
||||
else if (d.multiaddr.length)
|
||||
m.multiaddr = d.multiaddr;
|
||||
}
|
||||
if (d.isCertified != null) {
|
||||
m.isCertified = Boolean(d.isCertified);
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Address message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Address
|
||||
* @static
|
||||
* @param {Address} m Address
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Address.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
if (o.bytes === String)
|
||||
d.multiaddr = "";
|
||||
else {
|
||||
d.multiaddr = [];
|
||||
if (o.bytes !== Array)
|
||||
d.multiaddr = $util.newBuffer(d.multiaddr);
|
||||
}
|
||||
}
|
||||
if (m.multiaddr != null && m.hasOwnProperty("multiaddr")) {
|
||||
d.multiaddr = o.bytes === String ? $util.base64.encode(m.multiaddr, 0, m.multiaddr.length) : o.bytes === Array ? Array.prototype.slice.call(m.multiaddr) : m.multiaddr;
|
||||
}
|
||||
if (m.isCertified != null && m.hasOwnProperty("isCertified")) {
|
||||
d.isCertified = m.isCertified;
|
||||
if (o.oneofs)
|
||||
d._isCertified = "isCertified";
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Address to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Address
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Address.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return Address;
|
||||
})();
|
||||
|
||||
$root.Metadata = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a Metadata.
|
||||
* @exports IMetadata
|
||||
* @interface IMetadata
|
||||
* @property {string|null} [key] Metadata key
|
||||
* @property {Uint8Array|null} [value] Metadata value
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Metadata.
|
||||
* @exports Metadata
|
||||
* @classdesc Represents a Metadata.
|
||||
* @implements IMetadata
|
||||
* @constructor
|
||||
* @param {IMetadata=} [p] Properties to set
|
||||
*/
|
||||
function Metadata(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata key.
|
||||
* @member {string} key
|
||||
* @memberof Metadata
|
||||
* @instance
|
||||
*/
|
||||
Metadata.prototype.key = "";
|
||||
|
||||
/**
|
||||
* Metadata value.
|
||||
* @member {Uint8Array} value
|
||||
* @memberof Metadata
|
||||
* @instance
|
||||
*/
|
||||
Metadata.prototype.value = $util.newBuffer([]);
|
||||
|
||||
/**
|
||||
* Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Metadata
|
||||
* @static
|
||||
* @param {IMetadata} m Metadata message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Metadata.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.key != null && Object.hasOwnProperty.call(m, "key"))
|
||||
w.uint32(10).string(m.key);
|
||||
if (m.value != null && Object.hasOwnProperty.call(m, "value"))
|
||||
w.uint32(18).bytes(m.value);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a Metadata message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Metadata
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Metadata} Metadata
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Metadata.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.key = r.string();
|
||||
break;
|
||||
case 2:
|
||||
m.value = r.bytes();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a Metadata message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Metadata
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Metadata} Metadata
|
||||
*/
|
||||
Metadata.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Metadata)
|
||||
return d;
|
||||
var m = new $root.Metadata();
|
||||
if (d.key != null) {
|
||||
m.key = String(d.key);
|
||||
}
|
||||
if (d.value != null) {
|
||||
if (typeof d.value === "string")
|
||||
$util.base64.decode(d.value, m.value = $util.newBuffer($util.base64.length(d.value)), 0);
|
||||
else if (d.value.length)
|
||||
m.value = d.value;
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Metadata message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Metadata
|
||||
* @static
|
||||
* @param {Metadata} m Metadata
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Metadata.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
d.key = "";
|
||||
if (o.bytes === String)
|
||||
d.value = "";
|
||||
else {
|
||||
d.value = [];
|
||||
if (o.bytes !== Array)
|
||||
d.value = $util.newBuffer(d.value);
|
||||
}
|
||||
}
|
||||
if (m.key != null && m.hasOwnProperty("key")) {
|
||||
d.key = m.key;
|
||||
}
|
||||
if (m.value != null && m.hasOwnProperty("value")) {
|
||||
d.value = o.bytes === String ? $util.base64.encode(m.value, 0, m.value.length) : o.bytes === Array ? Array.prototype.slice.call(m.value) : m.value;
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Metadata to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Metadata
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Metadata.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return Metadata;
|
||||
})();
|
||||
|
||||
module.exports = $root;
|
31
src/peer-store/pb/peer.proto
Normal file
31
src/peer-store/pb/peer.proto
Normal file
@ -0,0 +1,31 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message Peer {
|
||||
// Multiaddrs we know about
|
||||
repeated Address addresses = 1;
|
||||
|
||||
// The protocols the peer supports
|
||||
repeated string protocols = 2;
|
||||
|
||||
// Any peer metadata
|
||||
repeated Metadata metadata = 3;
|
||||
|
||||
// The public key of the peer
|
||||
optional bytes pub_key = 4;
|
||||
|
||||
// The most recently received signed PeerRecord
|
||||
optional bytes peer_record_envelope = 5;
|
||||
}
|
||||
|
||||
// Address represents a single multiaddr
|
||||
message Address {
|
||||
bytes multiaddr = 1;
|
||||
|
||||
// Flag to indicate if the address comes from a certified source
|
||||
optional bool isCertified = 2;
|
||||
}
|
||||
|
||||
message Metadata {
|
||||
string key = 1;
|
||||
bytes value = 2;
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
module.exports.NAMESPACE_COMMON = '/peers/'
|
||||
|
||||
// /peers/protos/<b32 peer id no padding>
|
||||
module.exports.NAMESPACE_ADDRESS = '/peers/addrs/'
|
||||
|
||||
// /peers/keys/<b32 peer id no padding>
|
||||
module.exports.NAMESPACE_KEYS = '/peers/keys/'
|
||||
|
||||
// /peers/metadata/<b32 peer id no padding>/<key>
|
||||
module.exports.NAMESPACE_METADATA = '/peers/metadata/'
|
||||
|
||||
// /peers/addrs/<b32 peer id no padding>
|
||||
module.exports.NAMESPACE_PROTOCOL = '/peers/protos/'
|
@ -1,408 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const log = Object.assign(debug('libp2p:persistent-peer-store'), {
|
||||
error: debug('libp2p:persistent-peer-store:err')
|
||||
})
|
||||
const { Key } = require('interface-datastore/key')
|
||||
const { Multiaddr } = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
const { base32 } = require('multiformats/bases/base32')
|
||||
|
||||
const PeerStore = require('..')
|
||||
|
||||
const {
|
||||
NAMESPACE_ADDRESS,
|
||||
NAMESPACE_COMMON,
|
||||
NAMESPACE_KEYS,
|
||||
NAMESPACE_METADATA,
|
||||
NAMESPACE_PROTOCOL
|
||||
} = require('./consts')
|
||||
|
||||
const { Addresses } = require('./pb/address-book')
|
||||
const { Protocols } = require('./pb/proto-book')
|
||||
|
||||
/**
|
||||
* @typedef {import('interface-datastore').Batch} Batch
|
||||
* @typedef {import('../address-book.js').Address} Address
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} PersistentPeerStoreProperties
|
||||
* @property {PeerId} peerId
|
||||
* @property {import('interface-datastore').Datastore} datastore
|
||||
*
|
||||
* @typedef {Object} PersistentPeerStoreOptions
|
||||
* @property {number} [threshold = 5] - Number of dirty peers allowed before commit data.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Responsible for managing the persistence of data in the PeerStore.
|
||||
*/
|
||||
class PersistentPeerStore extends PeerStore {
|
||||
/**
|
||||
* @class
|
||||
* @param {PersistentPeerStoreProperties & PersistentPeerStoreOptions} properties
|
||||
*/
|
||||
constructor ({ peerId, datastore, threshold = 5 }) {
|
||||
super({ peerId })
|
||||
|
||||
/**
|
||||
* Backend datastore used to persist data.
|
||||
*/
|
||||
this._datastore = datastore
|
||||
|
||||
/**
|
||||
* Peers modified after the latest data persisted.
|
||||
*/
|
||||
this._dirtyPeers = new Set()
|
||||
|
||||
/**
|
||||
* Peers metadata changed mapping peer identifers to metadata changed.
|
||||
*
|
||||
* @type {Map<string, Set<string>>}
|
||||
*/
|
||||
this._dirtyMetadata = new Map()
|
||||
|
||||
this.threshold = threshold
|
||||
this._addDirtyPeer = this._addDirtyPeer.bind(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Start Persistent PeerStore.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async start () {
|
||||
log('PeerStore is starting')
|
||||
|
||||
// Handlers for dirty peers
|
||||
this.on('change:protocols', this._addDirtyPeer)
|
||||
this.on('change:multiaddrs', this._addDirtyPeer)
|
||||
this.on('change:pubkey', this._addDirtyPeerKey)
|
||||
this.on('change:metadata', this._addDirtyPeerMetadata)
|
||||
|
||||
// Load data
|
||||
for await (const entry of this._datastore.query({ prefix: NAMESPACE_COMMON })) {
|
||||
await this._processDatastoreEntry(entry)
|
||||
}
|
||||
|
||||
log('PeerStore started')
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop Persistent PeerStore.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async stop () {
|
||||
log('PeerStore is stopping')
|
||||
this.removeAllListeners()
|
||||
await this._commitData()
|
||||
log('PeerStore stopped')
|
||||
}
|
||||
|
||||
/**
|
||||
* Add modified peer to the dirty set
|
||||
*
|
||||
* @private
|
||||
* @param {Object} params
|
||||
* @param {PeerId} params.peerId
|
||||
*/
|
||||
_addDirtyPeer ({ peerId }) {
|
||||
const peerIdstr = peerId.toB58String()
|
||||
|
||||
log('add dirty peer', peerIdstr)
|
||||
this._dirtyPeers.add(peerIdstr)
|
||||
|
||||
if (this._dirtyPeers.size >= this.threshold) {
|
||||
// Commit current data
|
||||
this._commitData().catch(err => {
|
||||
log.error('error committing data', err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add modified peer key to the dirty set
|
||||
*
|
||||
* @private
|
||||
* @param {Object} params
|
||||
* @param {PeerId} params.peerId
|
||||
*/
|
||||
_addDirtyPeerKey ({ peerId }) {
|
||||
// Not add if inline key available
|
||||
if (peerId.hasInlinePublicKey()) {
|
||||
return
|
||||
}
|
||||
|
||||
const peerIdstr = peerId.toB58String()
|
||||
|
||||
log('add dirty peer key', peerIdstr)
|
||||
this._dirtyPeers.add(peerIdstr)
|
||||
|
||||
if (this._dirtyPeers.size >= this.threshold) {
|
||||
// Commit current data
|
||||
this._commitData().catch(err => {
|
||||
log.error('error committing data', err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add modified metadata peer to the set.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} params
|
||||
* @param {PeerId} params.peerId
|
||||
* @param {string} params.metadata
|
||||
*/
|
||||
_addDirtyPeerMetadata ({ peerId, metadata }) {
|
||||
const peerIdstr = peerId.toB58String()
|
||||
|
||||
log('add dirty metadata peer', peerIdstr)
|
||||
this._dirtyPeers.add(peerIdstr)
|
||||
|
||||
// Add dirty metadata key
|
||||
const mData = this._dirtyMetadata.get(peerIdstr) || new Set()
|
||||
mData.add(metadata)
|
||||
this._dirtyMetadata.set(peerIdstr, mData)
|
||||
|
||||
if (this._dirtyPeers.size >= this.threshold) {
|
||||
// Commit current data
|
||||
this._commitData().catch(err => {
|
||||
log.error('error committing data', err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all the peers current data to a datastore batch and commit it.
|
||||
*
|
||||
* @private
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _commitData () {
|
||||
const commitPeers = Array.from(this._dirtyPeers)
|
||||
|
||||
if (!commitPeers.length) {
|
||||
return
|
||||
}
|
||||
|
||||
// Clear Dirty Peers set
|
||||
this._dirtyPeers.clear()
|
||||
|
||||
log('create batch commit')
|
||||
const batch = this._datastore.batch()
|
||||
for (const peerIdStr of commitPeers) {
|
||||
// PeerId
|
||||
const peerId = this.keyBook.data.get(peerIdStr) || PeerId.createFromB58String(peerIdStr)
|
||||
|
||||
// Address Book
|
||||
this._batchAddressBook(peerId, batch)
|
||||
|
||||
// Key Book
|
||||
!peerId.hasInlinePublicKey() && this._batchKeyBook(peerId, batch)
|
||||
|
||||
// Metadata Book
|
||||
this._batchMetadataBook(peerId, batch)
|
||||
|
||||
// Proto Book
|
||||
this._batchProtoBook(peerId, batch)
|
||||
}
|
||||
|
||||
await batch.commit()
|
||||
log('batch committed')
|
||||
}
|
||||
|
||||
/**
|
||||
* Add address book data of the peer to the batch.
|
||||
*
|
||||
* @private
|
||||
* @param {PeerId} peerId
|
||||
* @param {Batch} batch
|
||||
*/
|
||||
_batchAddressBook (peerId, batch) {
|
||||
const b32key = peerId.toString()
|
||||
const key = new Key(`${NAMESPACE_ADDRESS}${b32key}`)
|
||||
|
||||
const entry = this.addressBook.data.get(peerId.toB58String())
|
||||
|
||||
try {
|
||||
// Deleted from the book
|
||||
if (!entry) {
|
||||
batch.delete(key)
|
||||
return
|
||||
}
|
||||
|
||||
const encodedData = Addresses.encode({
|
||||
addrs: entry.addresses.map((address) => ({
|
||||
multiaddr: address.multiaddr.bytes,
|
||||
isCertified: address.isCertified
|
||||
})),
|
||||
certifiedRecord: entry.record
|
||||
? {
|
||||
seq: entry.record.seqNumber,
|
||||
raw: entry.record.raw
|
||||
}
|
||||
: undefined
|
||||
}).finish()
|
||||
|
||||
batch.put(key, encodedData)
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add Key book data of the peer to the batch.
|
||||
*
|
||||
* @private
|
||||
* @param {PeerId} peerId
|
||||
* @param {Batch} batch
|
||||
*/
|
||||
_batchKeyBook (peerId, batch) {
|
||||
const b32key = peerId.toString()
|
||||
const key = new Key(`${NAMESPACE_KEYS}${b32key}`)
|
||||
|
||||
try {
|
||||
// Deleted from the book
|
||||
if (!peerId.pubKey) {
|
||||
batch.delete(key)
|
||||
return
|
||||
}
|
||||
|
||||
const encodedData = peerId.marshalPubKey()
|
||||
|
||||
batch.put(key, encodedData)
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add metadata book data of the peer to the batch.
|
||||
*
|
||||
* @private
|
||||
* @param {PeerId} peerId
|
||||
* @param {Batch} batch
|
||||
*/
|
||||
_batchMetadataBook (peerId, batch) {
|
||||
const b32key = peerId.toString()
|
||||
const dirtyMetada = this._dirtyMetadata.get(peerId.toB58String()) || []
|
||||
|
||||
try {
|
||||
dirtyMetada.forEach((/** @type {string} */ dirtyKey) => {
|
||||
const key = new Key(`${NAMESPACE_METADATA}${b32key}/${dirtyKey}`)
|
||||
const dirtyValue = this.metadataBook.getValue(peerId, dirtyKey)
|
||||
|
||||
if (dirtyValue) {
|
||||
batch.put(key, dirtyValue)
|
||||
} else {
|
||||
batch.delete(key)
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add proto book data of the peer to the batch.
|
||||
*
|
||||
* @private
|
||||
* @param {PeerId} peerId
|
||||
* @param {Batch} batch
|
||||
*/
|
||||
_batchProtoBook (peerId, batch) {
|
||||
const b32key = peerId.toString()
|
||||
const key = new Key(`${NAMESPACE_PROTOCOL}${b32key}`)
|
||||
|
||||
const protocols = this.protoBook.get(peerId)
|
||||
|
||||
try {
|
||||
// Deleted from the book
|
||||
if (!protocols) {
|
||||
batch.delete(key)
|
||||
return
|
||||
}
|
||||
|
||||
const encodedData = Protocols.encode({ protocols }).finish()
|
||||
|
||||
batch.put(key, encodedData)
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process datastore entry and add its data to the correct book.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} params
|
||||
* @param {Key} params.key - datastore key
|
||||
* @param {Uint8Array} params.value - datastore value stored
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _processDatastoreEntry ({ key, value }) {
|
||||
try {
|
||||
const keyParts = key.toString().split('/')
|
||||
const peerId = PeerId.createFromBytes(base32.decode(keyParts[3]))
|
||||
|
||||
let decoded
|
||||
switch (keyParts[2]) {
|
||||
case 'addrs':
|
||||
decoded = Addresses.decode(value)
|
||||
|
||||
// @ts-ignore protected function
|
||||
this.addressBook._setData(
|
||||
peerId,
|
||||
{
|
||||
addresses: decoded.addrs.map((address) => ({
|
||||
multiaddr: new Multiaddr(address.multiaddr),
|
||||
isCertified: Boolean(address.isCertified)
|
||||
})),
|
||||
record: decoded.certifiedRecord
|
||||
? {
|
||||
raw: decoded.certifiedRecord.raw,
|
||||
seqNumber: decoded.certifiedRecord.seq
|
||||
}
|
||||
: undefined
|
||||
},
|
||||
{ emit: false })
|
||||
break
|
||||
case 'keys':
|
||||
decoded = await PeerId.createFromPubKey(value)
|
||||
|
||||
// @ts-ignore protected function
|
||||
this.keyBook._setData(
|
||||
decoded,
|
||||
decoded,
|
||||
{ emit: false })
|
||||
break
|
||||
case 'metadata':
|
||||
this.metadataBook._setValue(
|
||||
peerId,
|
||||
keyParts[4],
|
||||
value,
|
||||
{ emit: false })
|
||||
break
|
||||
case 'protos':
|
||||
decoded = Protocols.decode(value)
|
||||
|
||||
// @ts-ignore protected function
|
||||
this.protoBook._setData(
|
||||
peerId,
|
||||
new Set(decoded.protocols),
|
||||
{ emit: false })
|
||||
break
|
||||
default:
|
||||
log('invalid data persisted for: ', key.toString())
|
||||
}
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PersistentPeerStore
|
198
src/peer-store/persistent/pb/address-book.d.ts
vendored
198
src/peer-store/persistent/pb/address-book.d.ts
vendored
@ -1,198 +0,0 @@
|
||||
import * as $protobuf from "protobufjs";
|
||||
/** Properties of an Addresses. */
|
||||
export interface IAddresses {
|
||||
|
||||
/** Addresses addrs */
|
||||
addrs?: (Addresses.IAddress[]|null);
|
||||
|
||||
/** Addresses certifiedRecord */
|
||||
certifiedRecord?: (Addresses.ICertifiedRecord|null);
|
||||
}
|
||||
|
||||
/** Represents an Addresses. */
|
||||
export class Addresses implements IAddresses {
|
||||
|
||||
/**
|
||||
* Constructs a new Addresses.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IAddresses);
|
||||
|
||||
/** Addresses addrs. */
|
||||
public addrs: Addresses.IAddress[];
|
||||
|
||||
/** Addresses certifiedRecord. */
|
||||
public certifiedRecord?: (Addresses.ICertifiedRecord|null);
|
||||
|
||||
/**
|
||||
* Encodes the specified Addresses message. Does not implicitly {@link Addresses.verify|verify} messages.
|
||||
* @param m Addresses message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IAddresses, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes an Addresses message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Addresses
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses;
|
||||
|
||||
/**
|
||||
* Creates an Addresses message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Addresses
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Addresses;
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Addresses message. Also converts values to other types if specified.
|
||||
* @param m Addresses
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Addresses, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Addresses to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
export namespace Addresses {
|
||||
|
||||
/** Properties of an Address. */
|
||||
interface IAddress {
|
||||
|
||||
/** Address multiaddr */
|
||||
multiaddr?: (Uint8Array|null);
|
||||
|
||||
/** Address isCertified */
|
||||
isCertified?: (boolean|null);
|
||||
}
|
||||
|
||||
/** Represents an Address. */
|
||||
class Address implements IAddress {
|
||||
|
||||
/**
|
||||
* Constructs a new Address.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: Addresses.IAddress);
|
||||
|
||||
/** Address multiaddr. */
|
||||
public multiaddr: Uint8Array;
|
||||
|
||||
/** Address isCertified. */
|
||||
public isCertified: boolean;
|
||||
|
||||
/**
|
||||
* Encodes the specified Address message. Does not implicitly {@link Addresses.Address.verify|verify} messages.
|
||||
* @param m Address message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: Addresses.IAddress, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes an Address message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Address
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses.Address;
|
||||
|
||||
/**
|
||||
* Creates an Address message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Address
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Addresses.Address;
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Address message. Also converts values to other types if specified.
|
||||
* @param m Address
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Addresses.Address, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Address to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
|
||||
/** Properties of a CertifiedRecord. */
|
||||
interface ICertifiedRecord {
|
||||
|
||||
/** CertifiedRecord seq */
|
||||
seq?: (number|null);
|
||||
|
||||
/** CertifiedRecord raw */
|
||||
raw?: (Uint8Array|null);
|
||||
}
|
||||
|
||||
/** Represents a CertifiedRecord. */
|
||||
class CertifiedRecord implements ICertifiedRecord {
|
||||
|
||||
/**
|
||||
* Constructs a new CertifiedRecord.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: Addresses.ICertifiedRecord);
|
||||
|
||||
/** CertifiedRecord seq. */
|
||||
public seq: number;
|
||||
|
||||
/** CertifiedRecord raw. */
|
||||
public raw: Uint8Array;
|
||||
|
||||
/**
|
||||
* Encodes the specified CertifiedRecord message. Does not implicitly {@link Addresses.CertifiedRecord.verify|verify} messages.
|
||||
* @param m CertifiedRecord message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: Addresses.ICertifiedRecord, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a CertifiedRecord message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns CertifiedRecord
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses.CertifiedRecord;
|
||||
|
||||
/**
|
||||
* Creates a CertifiedRecord message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns CertifiedRecord
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Addresses.CertifiedRecord;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a CertifiedRecord message. Also converts values to other types if specified.
|
||||
* @param m CertifiedRecord
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Addresses.CertifiedRecord, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this CertifiedRecord to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
||||
}
|
@ -1,522 +0,0 @@
|
||||
/*eslint-disable*/
|
||||
"use strict";
|
||||
|
||||
var $protobuf = require("protobufjs/minimal");
|
||||
|
||||
// Common aliases
|
||||
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
|
||||
|
||||
// Exported root namespace
|
||||
var $root = $protobuf.roots["libp2p-address-book"] || ($protobuf.roots["libp2p-address-book"] = {});
|
||||
|
||||
$root.Addresses = (function() {
|
||||
|
||||
/**
|
||||
* Properties of an Addresses.
|
||||
* @exports IAddresses
|
||||
* @interface IAddresses
|
||||
* @property {Array.<Addresses.IAddress>|null} [addrs] Addresses addrs
|
||||
* @property {Addresses.ICertifiedRecord|null} [certifiedRecord] Addresses certifiedRecord
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Addresses.
|
||||
* @exports Addresses
|
||||
* @classdesc Represents an Addresses.
|
||||
* @implements IAddresses
|
||||
* @constructor
|
||||
* @param {IAddresses=} [p] Properties to set
|
||||
*/
|
||||
function Addresses(p) {
|
||||
this.addrs = [];
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Addresses addrs.
|
||||
* @member {Array.<Addresses.IAddress>} addrs
|
||||
* @memberof Addresses
|
||||
* @instance
|
||||
*/
|
||||
Addresses.prototype.addrs = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* Addresses certifiedRecord.
|
||||
* @member {Addresses.ICertifiedRecord|null|undefined} certifiedRecord
|
||||
* @memberof Addresses
|
||||
* @instance
|
||||
*/
|
||||
Addresses.prototype.certifiedRecord = null;
|
||||
|
||||
/**
|
||||
* Encodes the specified Addresses message. Does not implicitly {@link Addresses.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Addresses
|
||||
* @static
|
||||
* @param {IAddresses} m Addresses message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Addresses.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.addrs != null && m.addrs.length) {
|
||||
for (var i = 0; i < m.addrs.length; ++i)
|
||||
$root.Addresses.Address.encode(m.addrs[i], w.uint32(10).fork()).ldelim();
|
||||
}
|
||||
if (m.certifiedRecord != null && Object.hasOwnProperty.call(m, "certifiedRecord"))
|
||||
$root.Addresses.CertifiedRecord.encode(m.certifiedRecord, w.uint32(18).fork()).ldelim();
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes an Addresses message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Addresses
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Addresses} Addresses
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Addresses.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
if (!(m.addrs && m.addrs.length))
|
||||
m.addrs = [];
|
||||
m.addrs.push($root.Addresses.Address.decode(r, r.uint32()));
|
||||
break;
|
||||
case 2:
|
||||
m.certifiedRecord = $root.Addresses.CertifiedRecord.decode(r, r.uint32());
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates an Addresses message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Addresses
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Addresses} Addresses
|
||||
*/
|
||||
Addresses.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Addresses)
|
||||
return d;
|
||||
var m = new $root.Addresses();
|
||||
if (d.addrs) {
|
||||
if (!Array.isArray(d.addrs))
|
||||
throw TypeError(".Addresses.addrs: array expected");
|
||||
m.addrs = [];
|
||||
for (var i = 0; i < d.addrs.length; ++i) {
|
||||
if (typeof d.addrs[i] !== "object")
|
||||
throw TypeError(".Addresses.addrs: object expected");
|
||||
m.addrs[i] = $root.Addresses.Address.fromObject(d.addrs[i]);
|
||||
}
|
||||
}
|
||||
if (d.certifiedRecord != null) {
|
||||
if (typeof d.certifiedRecord !== "object")
|
||||
throw TypeError(".Addresses.certifiedRecord: object expected");
|
||||
m.certifiedRecord = $root.Addresses.CertifiedRecord.fromObject(d.certifiedRecord);
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Addresses message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Addresses
|
||||
* @static
|
||||
* @param {Addresses} m Addresses
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Addresses.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.arrays || o.defaults) {
|
||||
d.addrs = [];
|
||||
}
|
||||
if (o.defaults) {
|
||||
d.certifiedRecord = null;
|
||||
}
|
||||
if (m.addrs && m.addrs.length) {
|
||||
d.addrs = [];
|
||||
for (var j = 0; j < m.addrs.length; ++j) {
|
||||
d.addrs[j] = $root.Addresses.Address.toObject(m.addrs[j], o);
|
||||
}
|
||||
}
|
||||
if (m.certifiedRecord != null && m.hasOwnProperty("certifiedRecord")) {
|
||||
d.certifiedRecord = $root.Addresses.CertifiedRecord.toObject(m.certifiedRecord, o);
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Addresses to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Addresses
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Addresses.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
Addresses.Address = (function() {
|
||||
|
||||
/**
|
||||
* Properties of an Address.
|
||||
* @memberof Addresses
|
||||
* @interface IAddress
|
||||
* @property {Uint8Array|null} [multiaddr] Address multiaddr
|
||||
* @property {boolean|null} [isCertified] Address isCertified
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Address.
|
||||
* @memberof Addresses
|
||||
* @classdesc Represents an Address.
|
||||
* @implements IAddress
|
||||
* @constructor
|
||||
* @param {Addresses.IAddress=} [p] Properties to set
|
||||
*/
|
||||
function Address(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Address multiaddr.
|
||||
* @member {Uint8Array} multiaddr
|
||||
* @memberof Addresses.Address
|
||||
* @instance
|
||||
*/
|
||||
Address.prototype.multiaddr = $util.newBuffer([]);
|
||||
|
||||
/**
|
||||
* Address isCertified.
|
||||
* @member {boolean} isCertified
|
||||
* @memberof Addresses.Address
|
||||
* @instance
|
||||
*/
|
||||
Address.prototype.isCertified = false;
|
||||
|
||||
/**
|
||||
* Encodes the specified Address message. Does not implicitly {@link Addresses.Address.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Addresses.Address
|
||||
* @static
|
||||
* @param {Addresses.IAddress} m Address message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Address.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.multiaddr != null && Object.hasOwnProperty.call(m, "multiaddr"))
|
||||
w.uint32(10).bytes(m.multiaddr);
|
||||
if (m.isCertified != null && Object.hasOwnProperty.call(m, "isCertified"))
|
||||
w.uint32(16).bool(m.isCertified);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes an Address message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Addresses.Address
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Addresses.Address} Address
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Address.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses.Address();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.multiaddr = r.bytes();
|
||||
break;
|
||||
case 2:
|
||||
m.isCertified = r.bool();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates an Address message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Addresses.Address
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Addresses.Address} Address
|
||||
*/
|
||||
Address.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Addresses.Address)
|
||||
return d;
|
||||
var m = new $root.Addresses.Address();
|
||||
if (d.multiaddr != null) {
|
||||
if (typeof d.multiaddr === "string")
|
||||
$util.base64.decode(d.multiaddr, m.multiaddr = $util.newBuffer($util.base64.length(d.multiaddr)), 0);
|
||||
else if (d.multiaddr.length)
|
||||
m.multiaddr = d.multiaddr;
|
||||
}
|
||||
if (d.isCertified != null) {
|
||||
m.isCertified = Boolean(d.isCertified);
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from an Address message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Addresses.Address
|
||||
* @static
|
||||
* @param {Addresses.Address} m Address
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Address.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
if (o.bytes === String)
|
||||
d.multiaddr = "";
|
||||
else {
|
||||
d.multiaddr = [];
|
||||
if (o.bytes !== Array)
|
||||
d.multiaddr = $util.newBuffer(d.multiaddr);
|
||||
}
|
||||
d.isCertified = false;
|
||||
}
|
||||
if (m.multiaddr != null && m.hasOwnProperty("multiaddr")) {
|
||||
d.multiaddr = o.bytes === String ? $util.base64.encode(m.multiaddr, 0, m.multiaddr.length) : o.bytes === Array ? Array.prototype.slice.call(m.multiaddr) : m.multiaddr;
|
||||
}
|
||||
if (m.isCertified != null && m.hasOwnProperty("isCertified")) {
|
||||
d.isCertified = m.isCertified;
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Address to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Addresses.Address
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Address.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return Address;
|
||||
})();
|
||||
|
||||
Addresses.CertifiedRecord = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a CertifiedRecord.
|
||||
* @memberof Addresses
|
||||
* @interface ICertifiedRecord
|
||||
* @property {number|null} [seq] CertifiedRecord seq
|
||||
* @property {Uint8Array|null} [raw] CertifiedRecord raw
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new CertifiedRecord.
|
||||
* @memberof Addresses
|
||||
* @classdesc Represents a CertifiedRecord.
|
||||
* @implements ICertifiedRecord
|
||||
* @constructor
|
||||
* @param {Addresses.ICertifiedRecord=} [p] Properties to set
|
||||
*/
|
||||
function CertifiedRecord(p) {
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* CertifiedRecord seq.
|
||||
* @member {number} seq
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @instance
|
||||
*/
|
||||
CertifiedRecord.prototype.seq = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
|
||||
|
||||
/**
|
||||
* CertifiedRecord raw.
|
||||
* @member {Uint8Array} raw
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @instance
|
||||
*/
|
||||
CertifiedRecord.prototype.raw = $util.newBuffer([]);
|
||||
|
||||
/**
|
||||
* Encodes the specified CertifiedRecord message. Does not implicitly {@link Addresses.CertifiedRecord.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @static
|
||||
* @param {Addresses.ICertifiedRecord} m CertifiedRecord message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
CertifiedRecord.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.seq != null && Object.hasOwnProperty.call(m, "seq"))
|
||||
w.uint32(8).uint64(m.seq);
|
||||
if (m.raw != null && Object.hasOwnProperty.call(m, "raw"))
|
||||
w.uint32(18).bytes(m.raw);
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a CertifiedRecord message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Addresses.CertifiedRecord} CertifiedRecord
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
CertifiedRecord.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses.CertifiedRecord();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
m.seq = r.uint64();
|
||||
break;
|
||||
case 2:
|
||||
m.raw = r.bytes();
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a CertifiedRecord message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Addresses.CertifiedRecord} CertifiedRecord
|
||||
*/
|
||||
CertifiedRecord.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Addresses.CertifiedRecord)
|
||||
return d;
|
||||
var m = new $root.Addresses.CertifiedRecord();
|
||||
if (d.seq != null) {
|
||||
if ($util.Long)
|
||||
(m.seq = $util.Long.fromValue(d.seq)).unsigned = true;
|
||||
else if (typeof d.seq === "string")
|
||||
m.seq = parseInt(d.seq, 10);
|
||||
else if (typeof d.seq === "number")
|
||||
m.seq = d.seq;
|
||||
else if (typeof d.seq === "object")
|
||||
m.seq = new $util.LongBits(d.seq.low >>> 0, d.seq.high >>> 0).toNumber(true);
|
||||
}
|
||||
if (d.raw != null) {
|
||||
if (typeof d.raw === "string")
|
||||
$util.base64.decode(d.raw, m.raw = $util.newBuffer($util.base64.length(d.raw)), 0);
|
||||
else if (d.raw.length)
|
||||
m.raw = d.raw;
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a CertifiedRecord message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @static
|
||||
* @param {Addresses.CertifiedRecord} m CertifiedRecord
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
CertifiedRecord.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.defaults) {
|
||||
if ($util.Long) {
|
||||
var n = new $util.Long(0, 0, true);
|
||||
d.seq = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
|
||||
} else
|
||||
d.seq = o.longs === String ? "0" : 0;
|
||||
if (o.bytes === String)
|
||||
d.raw = "";
|
||||
else {
|
||||
d.raw = [];
|
||||
if (o.bytes !== Array)
|
||||
d.raw = $util.newBuffer(d.raw);
|
||||
}
|
||||
}
|
||||
if (m.seq != null && m.hasOwnProperty("seq")) {
|
||||
if (typeof m.seq === "number")
|
||||
d.seq = o.longs === String ? String(m.seq) : m.seq;
|
||||
else
|
||||
d.seq = o.longs === String ? $util.Long.prototype.toString.call(m.seq) : o.longs === Number ? new $util.LongBits(m.seq.low >>> 0, m.seq.high >>> 0).toNumber(true) : m.seq;
|
||||
}
|
||||
if (m.raw != null && m.hasOwnProperty("raw")) {
|
||||
d.raw = o.bytes === String ? $util.base64.encode(m.raw, 0, m.raw.length) : o.bytes === Array ? Array.prototype.slice.call(m.raw) : m.raw;
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this CertifiedRecord to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Addresses.CertifiedRecord
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
CertifiedRecord.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return CertifiedRecord;
|
||||
})();
|
||||
|
||||
return Addresses;
|
||||
})();
|
||||
|
||||
module.exports = $root;
|
@ -1,27 +0,0 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message Addresses {
|
||||
// Address represents a single multiaddr.
|
||||
message Address {
|
||||
bytes multiaddr = 1;
|
||||
|
||||
// Flag to indicate if the address comes from a certified source.
|
||||
optional bool isCertified = 2;
|
||||
}
|
||||
|
||||
// CertifiedRecord contains a serialized signed PeerRecord used to
|
||||
// populate the signedAddrs list.
|
||||
message CertifiedRecord {
|
||||
// The Seq counter from the signed PeerRecord envelope
|
||||
uint64 seq = 1;
|
||||
|
||||
// The serialized bytes of the SignedEnvelope containing the PeerRecord.
|
||||
bytes raw = 2;
|
||||
}
|
||||
|
||||
// The known multiaddrs.
|
||||
repeated Address addrs = 1;
|
||||
|
||||
// The most recently received signed PeerRecord.
|
||||
CertifiedRecord certified_record = 2;
|
||||
}
|
59
src/peer-store/persistent/pb/proto-book.d.ts
vendored
59
src/peer-store/persistent/pb/proto-book.d.ts
vendored
@ -1,59 +0,0 @@
|
||||
import * as $protobuf from "protobufjs";
|
||||
/** Properties of a Protocols. */
|
||||
export interface IProtocols {
|
||||
|
||||
/** Protocols protocols */
|
||||
protocols?: (string[]|null);
|
||||
}
|
||||
|
||||
/** Represents a Protocols. */
|
||||
export class Protocols implements IProtocols {
|
||||
|
||||
/**
|
||||
* Constructs a new Protocols.
|
||||
* @param [p] Properties to set
|
||||
*/
|
||||
constructor(p?: IProtocols);
|
||||
|
||||
/** Protocols protocols. */
|
||||
public protocols: string[];
|
||||
|
||||
/**
|
||||
* Encodes the specified Protocols message. Does not implicitly {@link Protocols.verify|verify} messages.
|
||||
* @param m Protocols message or plain object to encode
|
||||
* @param [w] Writer to encode to
|
||||
* @returns Writer
|
||||
*/
|
||||
public static encode(m: IProtocols, w?: $protobuf.Writer): $protobuf.Writer;
|
||||
|
||||
/**
|
||||
* Decodes a Protocols message from the specified reader or buffer.
|
||||
* @param r Reader or buffer to decode from
|
||||
* @param [l] Message length if known beforehand
|
||||
* @returns Protocols
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Protocols;
|
||||
|
||||
/**
|
||||
* Creates a Protocols message from a plain object. Also converts values to their respective internal types.
|
||||
* @param d Plain object
|
||||
* @returns Protocols
|
||||
*/
|
||||
public static fromObject(d: { [k: string]: any }): Protocols;
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Protocols message. Also converts values to other types if specified.
|
||||
* @param m Protocols
|
||||
* @param [o] Conversion options
|
||||
* @returns Plain object
|
||||
*/
|
||||
public static toObject(m: Protocols, o?: $protobuf.IConversionOptions): { [k: string]: any };
|
||||
|
||||
/**
|
||||
* Converts this Protocols to JSON.
|
||||
* @returns JSON object
|
||||
*/
|
||||
public toJSON(): { [k: string]: any };
|
||||
}
|
@ -1,157 +0,0 @@
|
||||
/*eslint-disable*/
|
||||
"use strict";
|
||||
|
||||
var $protobuf = require("protobufjs/minimal");
|
||||
|
||||
// Common aliases
|
||||
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
|
||||
|
||||
// Exported root namespace
|
||||
var $root = $protobuf.roots["libp2p-proto-book"] || ($protobuf.roots["libp2p-proto-book"] = {});
|
||||
|
||||
$root.Protocols = (function() {
|
||||
|
||||
/**
|
||||
* Properties of a Protocols.
|
||||
* @exports IProtocols
|
||||
* @interface IProtocols
|
||||
* @property {Array.<string>|null} [protocols] Protocols protocols
|
||||
*/
|
||||
|
||||
/**
|
||||
* Constructs a new Protocols.
|
||||
* @exports Protocols
|
||||
* @classdesc Represents a Protocols.
|
||||
* @implements IProtocols
|
||||
* @constructor
|
||||
* @param {IProtocols=} [p] Properties to set
|
||||
*/
|
||||
function Protocols(p) {
|
||||
this.protocols = [];
|
||||
if (p)
|
||||
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
||||
if (p[ks[i]] != null)
|
||||
this[ks[i]] = p[ks[i]];
|
||||
}
|
||||
|
||||
/**
|
||||
* Protocols protocols.
|
||||
* @member {Array.<string>} protocols
|
||||
* @memberof Protocols
|
||||
* @instance
|
||||
*/
|
||||
Protocols.prototype.protocols = $util.emptyArray;
|
||||
|
||||
/**
|
||||
* Encodes the specified Protocols message. Does not implicitly {@link Protocols.verify|verify} messages.
|
||||
* @function encode
|
||||
* @memberof Protocols
|
||||
* @static
|
||||
* @param {IProtocols} m Protocols message or plain object to encode
|
||||
* @param {$protobuf.Writer} [w] Writer to encode to
|
||||
* @returns {$protobuf.Writer} Writer
|
||||
*/
|
||||
Protocols.encode = function encode(m, w) {
|
||||
if (!w)
|
||||
w = $Writer.create();
|
||||
if (m.protocols != null && m.protocols.length) {
|
||||
for (var i = 0; i < m.protocols.length; ++i)
|
||||
w.uint32(10).string(m.protocols[i]);
|
||||
}
|
||||
return w;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes a Protocols message from the specified reader or buffer.
|
||||
* @function decode
|
||||
* @memberof Protocols
|
||||
* @static
|
||||
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
||||
* @param {number} [l] Message length if known beforehand
|
||||
* @returns {Protocols} Protocols
|
||||
* @throws {Error} If the payload is not a reader or valid buffer
|
||||
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
||||
*/
|
||||
Protocols.decode = function decode(r, l) {
|
||||
if (!(r instanceof $Reader))
|
||||
r = $Reader.create(r);
|
||||
var c = l === undefined ? r.len : r.pos + l, m = new $root.Protocols();
|
||||
while (r.pos < c) {
|
||||
var t = r.uint32();
|
||||
switch (t >>> 3) {
|
||||
case 1:
|
||||
if (!(m.protocols && m.protocols.length))
|
||||
m.protocols = [];
|
||||
m.protocols.push(r.string());
|
||||
break;
|
||||
default:
|
||||
r.skipType(t & 7);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a Protocols message from a plain object. Also converts values to their respective internal types.
|
||||
* @function fromObject
|
||||
* @memberof Protocols
|
||||
* @static
|
||||
* @param {Object.<string,*>} d Plain object
|
||||
* @returns {Protocols} Protocols
|
||||
*/
|
||||
Protocols.fromObject = function fromObject(d) {
|
||||
if (d instanceof $root.Protocols)
|
||||
return d;
|
||||
var m = new $root.Protocols();
|
||||
if (d.protocols) {
|
||||
if (!Array.isArray(d.protocols))
|
||||
throw TypeError(".Protocols.protocols: array expected");
|
||||
m.protocols = [];
|
||||
for (var i = 0; i < d.protocols.length; ++i) {
|
||||
m.protocols[i] = String(d.protocols[i]);
|
||||
}
|
||||
}
|
||||
return m;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a plain object from a Protocols message. Also converts values to other types if specified.
|
||||
* @function toObject
|
||||
* @memberof Protocols
|
||||
* @static
|
||||
* @param {Protocols} m Protocols
|
||||
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
||||
* @returns {Object.<string,*>} Plain object
|
||||
*/
|
||||
Protocols.toObject = function toObject(m, o) {
|
||||
if (!o)
|
||||
o = {};
|
||||
var d = {};
|
||||
if (o.arrays || o.defaults) {
|
||||
d.protocols = [];
|
||||
}
|
||||
if (m.protocols && m.protocols.length) {
|
||||
d.protocols = [];
|
||||
for (var j = 0; j < m.protocols.length; ++j) {
|
||||
d.protocols[j] = m.protocols[j];
|
||||
}
|
||||
}
|
||||
return d;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts this Protocols to JSON.
|
||||
* @function toJSON
|
||||
* @memberof Protocols
|
||||
* @instance
|
||||
* @returns {Object.<string,*>} JSON object
|
||||
*/
|
||||
Protocols.prototype.toJSON = function toJSON() {
|
||||
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
||||
};
|
||||
|
||||
return Protocols;
|
||||
})();
|
||||
|
||||
module.exports = $root;
|
@ -1,5 +0,0 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message Protocols {
|
||||
repeated string protocols = 1;
|
||||
}
|
@ -1,171 +1,237 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const errcode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
/**
|
||||
* @typedef {import('./types').PeerStore} PeerStore
|
||||
* @typedef {import('./types').ProtoBook} ProtoBook
|
||||
*/
|
||||
|
||||
const log = Object.assign(debug('libp2p:peer-store:proto-book'), {
|
||||
error: debug('libp2p:peer-store:proto-book:err')
|
||||
})
|
||||
const errcode = require('err-code')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Book = require('./book')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const EVENT_NAME = 'change:protocols'
|
||||
|
||||
/**
|
||||
* @typedef {import('./')} PeerStore
|
||||
* @implements {ProtoBook}
|
||||
*/
|
||||
|
||||
/**
|
||||
* @extends {Book}
|
||||
*
|
||||
* @fires ProtoBook#change:protocols
|
||||
*/
|
||||
class ProtoBook extends Book {
|
||||
class PersistentProtoBook {
|
||||
/**
|
||||
* The ProtoBook is responsible for keeping the known supported
|
||||
* protocols of a peer.
|
||||
*
|
||||
* @class
|
||||
* @param {PeerStore} peerStore
|
||||
* @param {PeerStore["emit"]} emit
|
||||
* @param {import('./types').Store} store
|
||||
*/
|
||||
constructor (peerStore) {
|
||||
/**
|
||||
* PeerStore Event emitter, used by the ProtoBook to emit:
|
||||
* "change:protocols" - emitted when the known protocols of a peer change.
|
||||
*/
|
||||
super({
|
||||
peerStore,
|
||||
eventName: 'change:protocols',
|
||||
eventProperty: 'protocols',
|
||||
eventTransformer: (data) => Array.from(data)
|
||||
})
|
||||
|
||||
/**
|
||||
* Map known peers to their known protocols.
|
||||
*
|
||||
* @type {Map<string, Set<string>>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
constructor (emit, store) {
|
||||
this._emit = emit
|
||||
this._store = store
|
||||
}
|
||||
|
||||
/**
|
||||
* Set known protocols of a provided peer.
|
||||
* If the peer was not known before, it will be added.
|
||||
*
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {string[]} protocols
|
||||
* @returns {ProtoBook}
|
||||
*/
|
||||
set (peerId, protocols) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
async get (peerId) {
|
||||
log('get wait for read lock')
|
||||
const release = await this._store.lock.readLock()
|
||||
log('get got read lock')
|
||||
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return peer.protocols
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('get release read lock')
|
||||
release()
|
||||
}
|
||||
|
||||
if (!protocols) {
|
||||
log.error('protocols must be provided to store data')
|
||||
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const id = peerId.toB58String()
|
||||
const recSet = this.data.get(id)
|
||||
const newSet = new Set(protocols)
|
||||
|
||||
/**
|
||||
* @param {Set<string>} a
|
||||
* @param {Set<string>} b
|
||||
*/
|
||||
const isSetEqual = (a, b) => a.size === b.size && [...a].every(value => b.has(value))
|
||||
|
||||
// Already knows the peer and the recorded protocols are the same?
|
||||
// If yes, no changes needed!
|
||||
if (recSet && isSetEqual(recSet, newSet)) {
|
||||
log(`the protocols provided to store are equal to the already stored for ${id}`)
|
||||
return this
|
||||
}
|
||||
|
||||
this._setData(peerId, newSet)
|
||||
log(`stored provided protocols for ${id}`)
|
||||
|
||||
return this
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds known protocols of a provided peer.
|
||||
* If the peer was not known before, it will be added.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {string[]} protocols
|
||||
* @returns {ProtoBook}
|
||||
*/
|
||||
add (peerId, protocols) {
|
||||
async set (peerId, protocols) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!protocols) {
|
||||
if (!Array.isArray(protocols)) {
|
||||
log.error('protocols must be provided to store data')
|
||||
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
|
||||
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const id = peerId.toB58String()
|
||||
const recSet = this.data.get(id) || new Set()
|
||||
const newSet = new Set([...recSet, ...protocols]) // Set Union
|
||||
log('set await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('set got write lock')
|
||||
|
||||
// Any new protocol added?
|
||||
if (recSet.size === newSet.size) {
|
||||
log(`the protocols provided to store are already stored for ${id}`)
|
||||
return this
|
||||
}
|
||||
let updatedPeer
|
||||
|
||||
this._setData(peerId, newSet)
|
||||
log(`added provided protocols for ${id}`)
|
||||
try {
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes known protocols of a provided peer.
|
||||
* If the protocols did not exist before, nothing will be done.
|
||||
*
|
||||
* @param {PeerId} peerId
|
||||
* @param {string[]} protocols
|
||||
* @returns {ProtoBook}
|
||||
*/
|
||||
remove (peerId, protocols) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!protocols) {
|
||||
log.error('protocols must be provided to store data')
|
||||
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const id = peerId.toB58String()
|
||||
const recSet = this.data.get(id)
|
||||
|
||||
if (recSet) {
|
||||
const newSet = new Set([
|
||||
...recSet
|
||||
].filter((p) => !protocols.includes(p)))
|
||||
|
||||
// Any protocol removed?
|
||||
if (recSet.size === newSet.size) {
|
||||
return this
|
||||
if (new Set([
|
||||
...protocols
|
||||
]).size === peer.protocols.length) {
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
this._setData(peerId, newSet)
|
||||
log(`removed provided protocols for ${id}`)
|
||||
updatedPeer = await this._store.patchOrCreate(peerId, {
|
||||
protocols
|
||||
})
|
||||
|
||||
log(`stored provided protocols for ${peerId.toB58String()}`)
|
||||
} finally {
|
||||
log('set release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
return this
|
||||
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
* @param {string[]} protocols
|
||||
*/
|
||||
async add (peerId, protocols) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!Array.isArray(protocols)) {
|
||||
log.error('protocols must be provided to store data')
|
||||
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('add await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('add got write lock')
|
||||
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
|
||||
if (new Set([
|
||||
...peer.protocols,
|
||||
...protocols
|
||||
]).size === peer.protocols.length) {
|
||||
return
|
||||
}
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPeer = await this._store.mergeOrCreate(peerId, {
|
||||
protocols
|
||||
})
|
||||
|
||||
log(`added provided protocols for ${peerId.toB58String()}`)
|
||||
} finally {
|
||||
log('add release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
* @param {string[]} protocols
|
||||
*/
|
||||
async remove (peerId, protocols) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
log.error('peerId must be an instance of peer-id to store data')
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (!Array.isArray(protocols)) {
|
||||
log.error('protocols must be provided to store data')
|
||||
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
log('remove await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('remove got write lock')
|
||||
|
||||
let updatedPeer
|
||||
|
||||
try {
|
||||
try {
|
||||
const peer = await this._store.load(peerId)
|
||||
const protocolSet = new Set(peer.protocols)
|
||||
|
||||
for (const protocol of protocols) {
|
||||
protocolSet.delete(protocol)
|
||||
}
|
||||
|
||||
if (peer.protocols.length === protocolSet.size) {
|
||||
return
|
||||
}
|
||||
|
||||
protocols = Array.from(protocolSet)
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
updatedPeer = await this._store.patchOrCreate(peerId, {
|
||||
protocols
|
||||
})
|
||||
} finally {
|
||||
log('remove release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PeerId} peerId
|
||||
*/
|
||||
async delete (peerId) {
|
||||
log('delete await write lock')
|
||||
const release = await this._store.lock.writeLock()
|
||||
log('delete got write lock')
|
||||
let has
|
||||
|
||||
try {
|
||||
has = await this._store.has(peerId)
|
||||
|
||||
await this._store.patchOrCreate(peerId, {
|
||||
protocols: []
|
||||
})
|
||||
} catch (/** @type {any} */ err) {
|
||||
if (err.code !== codes.ERR_NOT_FOUND) {
|
||||
throw err
|
||||
}
|
||||
} finally {
|
||||
log('delete release write lock')
|
||||
release()
|
||||
}
|
||||
|
||||
if (has) {
|
||||
this._emit(EVENT_NAME, { peerId, protocols: [] })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProtoBook
|
||||
module.exports = PersistentProtoBook
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user