mirror of
https://github.com/fluencelabs/js-libp2p
synced 2025-07-10 22:31:34 +00:00
Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
faf1f89d9e | |||
76f4ea5e8a | |||
2f0b311df7 | |||
d172d0d952 | |||
f8e8023aed | |||
bdc9f16d0c | |||
1b46f47fdb | |||
b539f9b655 | |||
103818733e | |||
1f1bbc0ee6 | |||
3b683e7156 | |||
b25e0fe531 | |||
cbaa5a2ef3 | |||
51dabb1724 |
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
- run: npx aegir lint
|
- run: npx aegir lint
|
||||||
- run: npx aegir build
|
- run: npx aegir build
|
||||||
- run: npx aegir dep-check
|
- run: npx aegir dep-check
|
||||||
- uses: ipfs/aegir/actions/bundle-size@v32.1.0
|
- uses: ipfs/aegir/actions/bundle-size
|
||||||
name: size
|
name: size
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -37,7 +37,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node }}
|
node-version: ${{ matrix.node }}
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npx aegir test -t node --cov --bail
|
- run: npm run test:node -- --cov --bail
|
||||||
- uses: codecov/codecov-action@v1
|
- uses: codecov/codecov-action@v1
|
||||||
test-chrome:
|
test-chrome:
|
||||||
needs: check
|
needs: check
|
||||||
@ -48,7 +48,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: lts/*
|
node-version: lts/*
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npx aegir test -t browser -t webworker --bail
|
- run: npm run test:browser -- -t browser -t webworker --bail
|
||||||
test-firefox:
|
test-firefox:
|
||||||
needs: check
|
needs: check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -58,7 +58,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: lts/*
|
node-version: lts/*
|
||||||
- run: npm install
|
- run: npm install
|
||||||
- run: npx aegir test -t browser -t webworker --bail -- --browser firefox
|
- run: npm run test:browser -- -t browser -t webworker --bail -- --browser firefox
|
||||||
test-ts:
|
test-ts:
|
||||||
needs: check
|
needs: check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
20
CHANGELOG.md
20
CHANGELOG.md
@ -1,3 +1,23 @@
|
|||||||
|
## [0.35.4](https://github.com/libp2p/js-libp2p/compare/v0.35.3...v0.35.4) (2021-12-15)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* allow per-component metrics to be collected ([#1061](https://github.com/libp2p/js-libp2p/issues/1061)) ([2f0b311](https://github.com/libp2p/js-libp2p/commit/2f0b311df7127aa44512c2008142d4ca30268986)), closes [#1060](https://github.com/libp2p/js-libp2p/issues/1060)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [0.35.3](https://github.com/libp2p/js-libp2p/compare/v0.35.2...v0.35.3) (2021-12-13)
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* clean up pending dial targets ([#1059](https://github.com/libp2p/js-libp2p/issues/1059)) ([bdc9f16](https://github.com/libp2p/js-libp2p/commit/bdc9f16d0cbe56ccf26822f11068e7795bcef046))
|
||||||
|
* fix uncaught promise rejection when finding peers ([#1044](https://github.com/libp2p/js-libp2p/issues/1044)) ([3b683e7](https://github.com/libp2p/js-libp2p/commit/3b683e715686163e229b7b5c3a892327dfd4fc63))
|
||||||
|
* make error codes consistent ([#1054](https://github.com/libp2p/js-libp2p/issues/1054)) ([b25e0fe](https://github.com/libp2p/js-libp2p/commit/b25e0fe5312db58a06c39500ae84c50fed3a93bd))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [0.35.2](https://github.com/libp2p/js-libp2p/compare/v0.33.0...v0.35.2) (2021-12-06)
|
## [0.35.2](https://github.com/libp2p/js-libp2p/compare/v0.33.0...v0.35.2) (2021-12-06)
|
||||||
|
|
||||||
|
|
||||||
|
@ -497,9 +497,9 @@ const Libp2p = require('libp2p')
|
|||||||
const TCP = require('libp2p-tcp')
|
const TCP = require('libp2p-tcp')
|
||||||
const MPLEX = require('libp2p-mplex')
|
const MPLEX = require('libp2p-mplex')
|
||||||
const { NOISE } = require('libp2p-noise')
|
const { NOISE } = require('libp2p-noise')
|
||||||
const LevelStore = require('datastore-level')
|
const LevelDatastore = require('datastore-level')
|
||||||
|
|
||||||
const datastore = new LevelStore('path/to/store')
|
const datastore = new LevelDatastore('path/to/store')
|
||||||
await datastore.open()
|
await datastore.open()
|
||||||
|
|
||||||
const node = await Libp2p.create({
|
const node = await Libp2p.create({
|
||||||
@ -672,9 +672,9 @@ const Libp2p = require('libp2p')
|
|||||||
const TCP = require('libp2p-tcp')
|
const TCP = require('libp2p-tcp')
|
||||||
const MPLEX = require('libp2p-mplex')
|
const MPLEX = require('libp2p-mplex')
|
||||||
const { NOISE } = require('libp2p-noise')
|
const { NOISE } = require('libp2p-noise')
|
||||||
const LevelStore = require('datastore-level')
|
const LevelDatastore = require('datastore-level')
|
||||||
|
|
||||||
const datastore = new LevelStore('path/to/store')
|
const datastore = new LevelDatastore('path/to/store')
|
||||||
const dsInstant = await datastore.open()
|
const dsInstant = await datastore.open()
|
||||||
|
|
||||||
const node = await Libp2p.create({
|
const node = await Libp2p.create({
|
||||||
|
@ -41,13 +41,13 @@ const createNode = async () => {
|
|||||||
node1.pubsub.on(topic, (msg) => {
|
node1.pubsub.on(topic, (msg) => {
|
||||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||||
})
|
})
|
||||||
await node1.pubsub.subscribe(topic)
|
node1.pubsub.subscribe(topic)
|
||||||
|
|
||||||
// Will not receive own published messages by default
|
// Will not receive own published messages by default
|
||||||
node2.pubsub.on(topic, (msg) => {
|
node2.pubsub.on(topic, (msg) => {
|
||||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||||
})
|
})
|
||||||
await node2.pubsub.subscribe(topic)
|
node2.pubsub.subscribe(topic)
|
||||||
|
|
||||||
// node2 publishes "news" every second
|
// node2 publishes "news" every second
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
|
29
package.json
29
package.json
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "libp2p",
|
"name": "libp2p",
|
||||||
"version": "0.35.2",
|
"version": "0.35.4",
|
||||||
"description": "JavaScript implementation of libp2p, a modular peer to peer network stack",
|
"description": "JavaScript implementation of libp2p, a modular peer to peer network stack",
|
||||||
"leadMaintainer": "Jacob Heun <jacobheun@gmail.com>",
|
"leadMaintainer": "Jacob Heun <jacobheun@gmail.com>",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
@ -69,7 +69,7 @@
|
|||||||
"node": ">=15.0.0"
|
"node": ">=15.0.0"
|
||||||
},
|
},
|
||||||
"browser": {
|
"browser": {
|
||||||
"@motrix/nat-api": false
|
"nat-api": false
|
||||||
},
|
},
|
||||||
"eslintConfig": {
|
"eslintConfig": {
|
||||||
"extends": "ipfs",
|
"extends": "ipfs",
|
||||||
@ -80,7 +80,6 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@motrix/nat-api": "^0.3.1",
|
|
||||||
"@vascosantos/moving-average": "^1.1.0",
|
"@vascosantos/moving-average": "^1.1.0",
|
||||||
"abort-controller": "^3.0.0",
|
"abort-controller": "^3.0.0",
|
||||||
"abortable-iterator": "^3.0.0",
|
"abortable-iterator": "^3.0.0",
|
||||||
@ -114,6 +113,7 @@
|
|||||||
"multiformats": "^9.0.0",
|
"multiformats": "^9.0.0",
|
||||||
"multistream-select": "^2.0.0",
|
"multistream-select": "^2.0.0",
|
||||||
"mutable-proxy": "^1.0.0",
|
"mutable-proxy": "^1.0.0",
|
||||||
|
"nat-api": "^0.3.1",
|
||||||
"node-forge": "^0.10.0",
|
"node-forge": "^0.10.0",
|
||||||
"p-any": "^3.0.0",
|
"p-any": "^3.0.0",
|
||||||
"p-fifo": "^1.0.0",
|
"p-fifo": "^1.0.0",
|
||||||
@ -133,7 +133,7 @@
|
|||||||
"xsalsa20": "^1.1.0"
|
"xsalsa20": "^1.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@chainsafe/libp2p-noise": "^4.0.0",
|
"@chainsafe/libp2p-noise": "^5.0.0",
|
||||||
"@nodeutils/defaults-deep": "^1.1.0",
|
"@nodeutils/defaults-deep": "^1.1.0",
|
||||||
"@types/es6-promisify": "^6.0.0",
|
"@types/es6-promisify": "^6.0.0",
|
||||||
"@types/node": "^16.0.1",
|
"@types/node": "^16.0.1",
|
||||||
@ -151,9 +151,9 @@
|
|||||||
"libp2p": ".",
|
"libp2p": ".",
|
||||||
"libp2p-bootstrap": "^0.14.0",
|
"libp2p-bootstrap": "^0.14.0",
|
||||||
"libp2p-delegated-content-routing": "^0.11.0",
|
"libp2p-delegated-content-routing": "^0.11.0",
|
||||||
"libp2p-delegated-peer-routing": "^0.11.0",
|
"libp2p-delegated-peer-routing": "^0.11.1",
|
||||||
"libp2p-floodsub": "^0.27.0",
|
"libp2p-floodsub": "^0.27.0",
|
||||||
"libp2p-gossipsub": "^0.11.0",
|
"libp2p-gossipsub": "^0.12.1",
|
||||||
"libp2p-interfaces-compliance-tests": "^2.0.1",
|
"libp2p-interfaces-compliance-tests": "^2.0.1",
|
||||||
"libp2p-interop": "^0.5.0",
|
"libp2p-interop": "^0.5.0",
|
||||||
"libp2p-kad-dht": "^0.27.1",
|
"libp2p-kad-dht": "^0.27.1",
|
||||||
@ -184,21 +184,21 @@
|
|||||||
"dirkmc <dirkmdev@gmail.com>",
|
"dirkmc <dirkmdev@gmail.com>",
|
||||||
"Chris Dostert <chrisdostert@users.noreply.github.com>",
|
"Chris Dostert <chrisdostert@users.noreply.github.com>",
|
||||||
"Volker Mische <volker.mische@gmail.com>",
|
"Volker Mische <volker.mische@gmail.com>",
|
||||||
"zeim839 <50573884+zeim839@users.noreply.github.com>",
|
"Robert Kiel <robert.kiel@hoprnet.org>",
|
||||||
"Richard Littauer <richard.littauer@gmail.com>",
|
"Richard Littauer <richard.littauer@gmail.com>",
|
||||||
|
"zeim839 <50573884+zeim839@users.noreply.github.com>",
|
||||||
"a1300 <matthias-knopp@gmx.net>",
|
"a1300 <matthias-knopp@gmx.net>",
|
||||||
"Ryan Bell <ryan@piing.net>",
|
"Ryan Bell <ryan@piing.net>",
|
||||||
"ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ <victorbjelkholm@gmail.com>",
|
"ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ <victorbjelkholm@gmail.com>",
|
||||||
|
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||||
|
"Franck Royer <franck@royer.one>",
|
||||||
|
"Thomas Eizinger <thomas@eizinger.io>",
|
||||||
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
||||||
"acolytec3 <17355484+acolytec3@users.noreply.github.com>",
|
"acolytec3 <17355484+acolytec3@users.noreply.github.com>",
|
||||||
"Franck Royer <franck@royer.one>",
|
"Alan Smithee <ggnore.alan.smithee@gmail.com>",
|
||||||
"Elven <mon.samuel@qq.com>",
|
"Elven <mon.samuel@qq.com>",
|
||||||
"Robert Kiel <robert.kiel@hoprnet.org>",
|
|
||||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
|
||||||
"Samlior <samlior@foxmail.com>",
|
"Samlior <samlior@foxmail.com>",
|
||||||
"Thomas Eizinger <thomas@eizinger.io>",
|
|
||||||
"Didrik Nordström <didrik.nordstrom@gmail.com>",
|
"Didrik Nordström <didrik.nordstrom@gmail.com>",
|
||||||
"Smite Chow <xiaopengyou@live.com>",
|
|
||||||
"Soeren <nikorpoulsen@gmail.com>",
|
"Soeren <nikorpoulsen@gmail.com>",
|
||||||
"Sönke Hahn <soenkehahn@gmail.com>",
|
"Sönke Hahn <soenkehahn@gmail.com>",
|
||||||
"TJKoury <TJKoury@gmail.com>",
|
"TJKoury <TJKoury@gmail.com>",
|
||||||
@ -218,7 +218,6 @@
|
|||||||
"shresthagrawal <34920931+shresthagrawal@users.noreply.github.com>",
|
"shresthagrawal <34920931+shresthagrawal@users.noreply.github.com>",
|
||||||
"swedneck <40505480+swedneck@users.noreply.github.com>",
|
"swedneck <40505480+swedneck@users.noreply.github.com>",
|
||||||
"greenSnot <greenSnot@users.noreply.github.com>",
|
"greenSnot <greenSnot@users.noreply.github.com>",
|
||||||
"Alan Smithee <ggnore.alan.smithee@gmail.com>",
|
|
||||||
"Aleksei <vozhdb@gmail.com>",
|
"Aleksei <vozhdb@gmail.com>",
|
||||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||||
"Chris Bratlien <chrisbratlien@gmail.com>",
|
"Chris Bratlien <chrisbratlien@gmail.com>",
|
||||||
@ -243,10 +242,12 @@
|
|||||||
"Lars Gierth <lgierth@users.noreply.github.com>",
|
"Lars Gierth <lgierth@users.noreply.github.com>",
|
||||||
"Leask Wong <i@leaskh.com>",
|
"Leask Wong <i@leaskh.com>",
|
||||||
"Marcin Tojek <mtojek@users.noreply.github.com>",
|
"Marcin Tojek <mtojek@users.noreply.github.com>",
|
||||||
|
"Marston Connell <34043723+TheMarstonConnell@users.noreply.github.com>",
|
||||||
"Michael Burns <5170+mburns@users.noreply.github.com>",
|
"Michael Burns <5170+mburns@users.noreply.github.com>",
|
||||||
"Miguel Mota <miguelmota2@gmail.com>",
|
"Miguel Mota <miguelmota2@gmail.com>",
|
||||||
"Nuno Nogueira <nunofmn@gmail.com>",
|
"Nuno Nogueira <nunofmn@gmail.com>",
|
||||||
"Philipp Muens <raute1337@gmx.de>",
|
"Philipp Muens <raute1337@gmx.de>",
|
||||||
"RasmusErik Voel Jensen <github@solsort.com>"
|
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||||
|
"Smite Chow <xiaopengyou@live.com>"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ const debug = require('debug')
|
|||||||
const log = Object.assign(debug('libp2p:relay'), {
|
const log = Object.assign(debug('libp2p:relay'), {
|
||||||
error: debug('libp2p:relay:err')
|
error: debug('libp2p:relay:err')
|
||||||
})
|
})
|
||||||
|
const { codes } = require('./../errors')
|
||||||
const {
|
const {
|
||||||
setDelayedInterval,
|
setDelayedInterval,
|
||||||
clearDelayedInterval
|
clearDelayedInterval
|
||||||
@ -88,7 +88,7 @@ class Relay {
|
|||||||
const cid = await namespaceToCid(RELAY_RENDEZVOUS_NS)
|
const cid = await namespaceToCid(RELAY_RENDEZVOUS_NS)
|
||||||
await this._libp2p.contentRouting.provide(cid)
|
await this._libp2p.contentRouting.provide(cid)
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
if (err.code === 'NO_ROUTERS_AVAILABLE') {
|
if (err.code === codes.ERR_NO_ROUTERS_AVAILABLE) {
|
||||||
log.error('a content router, such as a DHT, must be provided in order to advertise the relay service', err)
|
log.error('a content router, such as a DHT, must be provided in order to advertise the relay service', err)
|
||||||
// Stop the advertise
|
// Stop the advertise
|
||||||
this.stop()
|
this.stop()
|
||||||
|
@ -60,13 +60,7 @@ const DefaultConfig = {
|
|||||||
protocolPrefix: 'ipfs',
|
protocolPrefix: 'ipfs',
|
||||||
dht: {
|
dht: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
kBucketSize: 20,
|
kBucketSize: 20
|
||||||
randomWalk: {
|
|
||||||
enabled: false, // disabled waiting for https://github.com/libp2p/js-libp2p-kad-dht/issues/86
|
|
||||||
queriesPerPeriod: 1,
|
|
||||||
interval: 300e3,
|
|
||||||
timeout: 10e3
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
nat: {
|
nat: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -32,6 +32,10 @@ const defaultOptions = {
|
|||||||
defaultPeerValue: 1
|
defaultPeerValue: 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const METRICS_COMPONENT = 'connection-manager'
|
||||||
|
const METRICS_PEER_CONNECTIONS = 'peer-connections'
|
||||||
|
const METRICS_ALL_CONNECTIONS = 'all-connections'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import('../')} Libp2p
|
* @typedef {import('../')} Libp2p
|
||||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||||
@ -160,6 +164,8 @@ class ConnectionManager extends EventEmitter {
|
|||||||
|
|
||||||
await Promise.all(tasks)
|
await Promise.all(tasks)
|
||||||
this.connections.clear()
|
this.connections.clear()
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PEER_CONNECTIONS, 0)
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_ALL_CONNECTIONS, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -211,10 +217,13 @@ class ConnectionManager extends EventEmitter {
|
|||||||
const storedConn = this.connections.get(peerIdStr)
|
const storedConn = this.connections.get(peerIdStr)
|
||||||
|
|
||||||
this.emit('peer:connect', connection)
|
this.emit('peer:connect', connection)
|
||||||
|
|
||||||
if (storedConn) {
|
if (storedConn) {
|
||||||
storedConn.push(connection)
|
storedConn.push(connection)
|
||||||
} else {
|
} else {
|
||||||
this.connections.set(peerIdStr, [connection])
|
this.connections.set(peerIdStr, [connection])
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PEER_CONNECTIONS, this.connections.size)
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_ALL_CONNECTIONS, this.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
|
this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
|
||||||
@ -243,7 +252,12 @@ class ConnectionManager extends EventEmitter {
|
|||||||
this.connections.delete(peerId)
|
this.connections.delete(peerId)
|
||||||
this._peerValues.delete(connection.remotePeer.toB58String())
|
this._peerValues.delete(connection.remotePeer.toB58String())
|
||||||
this.emit('peer:disconnect', connection)
|
this.emit('peer:disconnect', connection)
|
||||||
|
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.onPeerDisconnected(connection.remotePeer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PEER_CONNECTIONS, this.connections.size)
|
||||||
|
this._libp2p.metrics && this._libp2p.metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_ALL_CONNECTIONS, this.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -54,7 +54,7 @@ class ContentRouting {
|
|||||||
*/
|
*/
|
||||||
async * findProviders (key, options = {}) {
|
async * findProviders (key, options = {}) {
|
||||||
if (!this.routers.length) {
|
if (!this.routers.length) {
|
||||||
throw errCode(new Error('No content this.routers available'), 'NO_ROUTERS_AVAILABLE')
|
throw errCode(new Error('No content this.routers available'), codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
yield * pipe(
|
yield * pipe(
|
||||||
@ -77,7 +77,7 @@ class ContentRouting {
|
|||||||
*/
|
*/
|
||||||
async provide (key) {
|
async provide (key) {
|
||||||
if (!this.routers.length) {
|
if (!this.routers.length) {
|
||||||
throw errCode(new Error('No content routers available'), 'NO_ROUTERS_AVAILABLE')
|
throw errCode(new Error('No content routers available'), codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(this.routers.map((router) => router.provide(key)))
|
await Promise.all(this.routers.map((router) => router.provide(key)))
|
||||||
|
@ -27,8 +27,12 @@ class DHTPeerRouting {
|
|||||||
*/
|
*/
|
||||||
async findPeer (peerId, options = {}) {
|
async findPeer (peerId, options = {}) {
|
||||||
for await (const event of this._dht.findPeer(peerId, options)) {
|
for await (const event of this._dht.findPeer(peerId, options)) {
|
||||||
if (event.name === 'FINAL_PEER') {
|
if (event.name === 'PEER_RESPONSE') {
|
||||||
return event.peer
|
const peer = event.closer.find(peerData => peerData.id.equals(peerId))
|
||||||
|
|
||||||
|
if (peer) {
|
||||||
|
return peer
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ const FIFO = require('p-fifo')
|
|||||||
const pAny = require('p-any')
|
const pAny = require('p-any')
|
||||||
// @ts-expect-error setMaxListeners is missing from the types
|
// @ts-expect-error setMaxListeners is missing from the types
|
||||||
const { setMaxListeners } = require('events')
|
const { setMaxListeners } = require('events')
|
||||||
|
const { codes } = require('../errors')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||||
@ -55,7 +56,7 @@ class DialRequest {
|
|||||||
const tokens = this.dialer.getTokens(this.addrs.length)
|
const tokens = this.dialer.getTokens(this.addrs.length)
|
||||||
// If no tokens are available, throw
|
// If no tokens are available, throw
|
||||||
if (tokens.length < 1) {
|
if (tokens.length < 1) {
|
||||||
throw errCode(new Error('No dial tokens available'), 'ERR_NO_DIAL_TOKENS')
|
throw errCode(new Error('No dial tokens available'), codes.ERR_NO_DIAL_TOKENS)
|
||||||
}
|
}
|
||||||
|
|
||||||
const tokenHolder = new FIFO()
|
const tokenHolder = new FIFO()
|
||||||
|
@ -22,6 +22,10 @@ const {
|
|||||||
MAX_ADDRS_TO_DIAL
|
MAX_ADDRS_TO_DIAL
|
||||||
} = require('../constants')
|
} = require('../constants')
|
||||||
|
|
||||||
|
const METRICS_COMPONENT = 'dialler'
|
||||||
|
const METRICS_PENDING_DIALS = 'pending-dials'
|
||||||
|
const METRICS_PENDING_DIAL_TARGETS = 'pending-dials-targers'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
|
||||||
* @typedef {import('peer-id')} PeerId
|
* @typedef {import('peer-id')} PeerId
|
||||||
@ -44,6 +48,7 @@ const {
|
|||||||
* @property {number} [maxDialsPerPeer = MAX_PER_PEER_DIALS] - Number of max concurrent dials per peer.
|
* @property {number} [maxDialsPerPeer = MAX_PER_PEER_DIALS] - Number of max concurrent dials per peer.
|
||||||
* @property {number} [dialTimeout = DIAL_TIMEOUT] - How long a dial attempt is allowed to take.
|
* @property {number} [dialTimeout = DIAL_TIMEOUT] - How long a dial attempt is allowed to take.
|
||||||
* @property {Record<string, Resolver>} [resolvers = {}] - multiaddr resolvers to use when dialing
|
* @property {Record<string, Resolver>} [resolvers = {}] - multiaddr resolvers to use when dialing
|
||||||
|
* @property {import('../metrics')} [metrics]
|
||||||
*
|
*
|
||||||
* @typedef DialTarget
|
* @typedef DialTarget
|
||||||
* @property {string} id
|
* @property {string} id
|
||||||
@ -69,7 +74,8 @@ class Dialer {
|
|||||||
maxAddrsToDial = MAX_ADDRS_TO_DIAL,
|
maxAddrsToDial = MAX_ADDRS_TO_DIAL,
|
||||||
dialTimeout = DIAL_TIMEOUT,
|
dialTimeout = DIAL_TIMEOUT,
|
||||||
maxDialsPerPeer = MAX_PER_PEER_DIALS,
|
maxDialsPerPeer = MAX_PER_PEER_DIALS,
|
||||||
resolvers = {}
|
resolvers = {},
|
||||||
|
metrics
|
||||||
}) {
|
}) {
|
||||||
this.transportManager = transportManager
|
this.transportManager = transportManager
|
||||||
this.peerStore = peerStore
|
this.peerStore = peerStore
|
||||||
@ -81,6 +87,7 @@ class Dialer {
|
|||||||
this.tokens = [...new Array(maxParallelDials)].map((_, index) => index)
|
this.tokens = [...new Array(maxParallelDials)].map((_, index) => index)
|
||||||
this._pendingDials = new Map()
|
this._pendingDials = new Map()
|
||||||
this._pendingDialTargets = new Map()
|
this._pendingDialTargets = new Map()
|
||||||
|
this._metrics = metrics
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(resolvers)) {
|
for (const [key, value] of Object.entries(resolvers)) {
|
||||||
Multiaddr.resolvers.set(key, value)
|
Multiaddr.resolvers.set(key, value)
|
||||||
@ -104,6 +111,9 @@ class Dialer {
|
|||||||
pendingTarget.reject(new AbortError('Dialer was destroyed'))
|
pendingTarget.reject(new AbortError('Dialer was destroyed'))
|
||||||
}
|
}
|
||||||
this._pendingDialTargets.clear()
|
this._pendingDialTargets.clear()
|
||||||
|
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIALS, 0)
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIAL_TARGETS, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -153,16 +163,20 @@ class Dialer {
|
|||||||
const id = `${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`
|
const id = `${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`
|
||||||
const cancellablePromise = new Promise((resolve, reject) => {
|
const cancellablePromise = new Promise((resolve, reject) => {
|
||||||
this._pendingDialTargets.set(id, { resolve, reject })
|
this._pendingDialTargets.set(id, { resolve, reject })
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIAL_TARGETS, this._pendingDialTargets.size)
|
||||||
})
|
})
|
||||||
|
|
||||||
const dialTarget = await Promise.race([
|
try {
|
||||||
this._createDialTarget(peer),
|
const dialTarget = await Promise.race([
|
||||||
cancellablePromise
|
this._createDialTarget(peer),
|
||||||
])
|
cancellablePromise
|
||||||
|
])
|
||||||
|
|
||||||
this._pendingDialTargets.delete(id)
|
return dialTarget
|
||||||
|
} finally {
|
||||||
return dialTarget
|
this._pendingDialTargets.delete(id)
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIAL_TARGETS, this._pendingDialTargets.size)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -250,9 +264,13 @@ class Dialer {
|
|||||||
destroy: () => {
|
destroy: () => {
|
||||||
timeoutController.clear()
|
timeoutController.clear()
|
||||||
this._pendingDials.delete(dialTarget.id)
|
this._pendingDials.delete(dialTarget.id)
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIALS, this._pendingDials.size)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this._pendingDials.set(dialTarget.id, pendingDial)
|
this._pendingDials.set(dialTarget.id, pendingDial)
|
||||||
|
|
||||||
|
this._metrics && this._metrics.updateComponentMetric(METRICS_COMPONENT, METRICS_PENDING_DIALS, this._pendingDials.size)
|
||||||
|
|
||||||
return pendingDial
|
return pendingDial
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,5 +36,29 @@ exports.codes = {
|
|||||||
ERR_TRANSPORT_DIAL_FAILED: 'ERR_TRANSPORT_DIAL_FAILED',
|
ERR_TRANSPORT_DIAL_FAILED: 'ERR_TRANSPORT_DIAL_FAILED',
|
||||||
ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL',
|
ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL',
|
||||||
ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR',
|
ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR',
|
||||||
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID'
|
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID',
|
||||||
|
ERR_FIND_SELF: 'ERR_FIND_SELF',
|
||||||
|
ERR_NO_ROUTERS_AVAILABLE: 'ERR_NO_ROUTERS_AVAILABLE',
|
||||||
|
ERR_CONNECTION_NOT_MULTIPLEXED: 'ERR_CONNECTION_NOT_MULTIPLEXED',
|
||||||
|
ERR_NO_DIAL_TOKENS: 'ERR_NO_DIAL_TOKENS',
|
||||||
|
ERR_KEYCHAIN_REQUIRED: 'ERR_KEYCHAIN_REQUIRED',
|
||||||
|
ERR_INVALID_CMS: 'ERR_INVALID_CMS',
|
||||||
|
ERR_MISSING_KEYS: 'ERR_MISSING_KEYS',
|
||||||
|
ERR_NO_KEY: 'ERR_NO_KEY',
|
||||||
|
ERR_INVALID_KEY_NAME: 'ERR_INVALID_KEY_NAME',
|
||||||
|
ERR_INVALID_KEY_TYPE: 'ERR_INVALID_KEY_TYPE',
|
||||||
|
ERR_KEY_ALREADY_EXISTS: 'ERR_KEY_ALREADY_EXISTS',
|
||||||
|
ERR_INVALID_KEY_SIZE: 'ERR_INVALID_KEY_SIZE',
|
||||||
|
ERR_KEY_NOT_FOUND: 'ERR_KEY_NOT_FOUND',
|
||||||
|
ERR_OLD_KEY_NAME_INVALID: 'ERR_OLD_KEY_NAME_INVALID',
|
||||||
|
ERR_NEW_KEY_NAME_INVALID: 'ERR_NEW_KEY_NAME_INVALID',
|
||||||
|
ERR_PASSWORD_REQUIRED: 'ERR_PASSWORD_REQUIRED',
|
||||||
|
ERR_PEM_REQUIRED: 'ERR_PEM_REQUIRED',
|
||||||
|
ERR_CANNOT_READ_KEY: 'ERR_CANNOT_READ_KEY',
|
||||||
|
ERR_MISSING_PRIVATE_KEY: 'ERR_MISSING_PRIVATE_KEY',
|
||||||
|
ERR_INVALID_OLD_PASS_TYPE: 'ERR_INVALID_OLD_PASS_TYPE',
|
||||||
|
ERR_INVALID_NEW_PASS_TYPE: 'ERR_INVALID_NEW_PASS_TYPE',
|
||||||
|
ERR_INVALID_PASS_LENGTH: 'ERR_INVALID_PASS_LENGTH',
|
||||||
|
ERR_NOT_IMPLEMENTED: 'ERR_NOT_IMPLEMENTED',
|
||||||
|
ERR_WRONG_PING_ACK: 'ERR_WRONG_PING_ACK'
|
||||||
}
|
}
|
||||||
|
15
src/index.js
15
src/index.js
@ -56,16 +56,9 @@ const { updateSelfPeerRecord } = require('./record/utils')
|
|||||||
* @property {MuxedStream} stream
|
* @property {MuxedStream} stream
|
||||||
* @property {string} protocol
|
* @property {string} protocol
|
||||||
*
|
*
|
||||||
* @typedef {Object} RandomWalkOptions
|
|
||||||
* @property {boolean} [enabled = false]
|
|
||||||
* @property {number} [queriesPerPeriod = 1]
|
|
||||||
* @property {number} [interval = 300e3]
|
|
||||||
* @property {number} [timeout = 10e3]
|
|
||||||
*
|
|
||||||
* @typedef {Object} DhtOptions
|
* @typedef {Object} DhtOptions
|
||||||
* @property {boolean} [enabled = false]
|
* @property {boolean} [enabled = false]
|
||||||
* @property {number} [kBucketSize = 20]
|
* @property {number} [kBucketSize = 20]
|
||||||
* @property {RandomWalkOptions} [randomWalk]
|
|
||||||
* @property {boolean} [clientMode]
|
* @property {boolean} [clientMode]
|
||||||
* @property {import('libp2p-interfaces/src/types').DhtSelectors} [selectors]
|
* @property {import('libp2p-interfaces/src/types').DhtSelectors} [selectors]
|
||||||
* @property {import('libp2p-interfaces/src/types').DhtValidators} [validators]
|
* @property {import('libp2p-interfaces/src/types').DhtValidators} [validators]
|
||||||
@ -204,10 +197,11 @@ class Libp2p extends EventEmitter {
|
|||||||
|
|
||||||
// Create Metrics
|
// Create Metrics
|
||||||
if (this._options.metrics.enabled) {
|
if (this._options.metrics.enabled) {
|
||||||
this.metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
...this._options.metrics,
|
...this._options.metrics
|
||||||
connectionManager: this.connectionManager
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
this.metrics = metrics
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create keychain
|
// Create keychain
|
||||||
@ -269,6 +263,7 @@ class Libp2p extends EventEmitter {
|
|||||||
this.dialer = new Dialer({
|
this.dialer = new Dialer({
|
||||||
transportManager: this.transportManager,
|
transportManager: this.transportManager,
|
||||||
peerStore: this.peerStore,
|
peerStore: this.peerStore,
|
||||||
|
metrics: this.metrics,
|
||||||
...this._options.dialer
|
...this._options.dialer
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ const { certificateForKey, findAsync } = require('./util')
|
|||||||
const errcode = require('err-code')
|
const errcode = require('err-code')
|
||||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||||
|
const { codes } = require('../errors')
|
||||||
|
|
||||||
const privates = new WeakMap()
|
const privates = new WeakMap()
|
||||||
|
|
||||||
@ -31,7 +32,7 @@ class CMS {
|
|||||||
*/
|
*/
|
||||||
constructor (keychain, dek) {
|
constructor (keychain, dek) {
|
||||||
if (!keychain) {
|
if (!keychain) {
|
||||||
throw errcode(new Error('keychain is required'), 'ERR_KEYCHAIN_REQUIRED')
|
throw errcode(new Error('keychain is required'), codes.ERR_KEYCHAIN_REQUIRED)
|
||||||
}
|
}
|
||||||
|
|
||||||
this.keychain = keychain
|
this.keychain = keychain
|
||||||
@ -49,7 +50,7 @@ class CMS {
|
|||||||
*/
|
*/
|
||||||
async encrypt (name, plain) {
|
async encrypt (name, plain) {
|
||||||
if (!(plain instanceof Uint8Array)) {
|
if (!(plain instanceof Uint8Array)) {
|
||||||
throw errcode(new Error('Plain data must be a Uint8Array'), 'ERR_INVALID_PARAMS')
|
throw errcode(new Error('Plain data must be a Uint8Array'), codes.ERR_INVALID_PARAMETERS)
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = await this.keychain.findKeyByName(name)
|
const key = await this.keychain.findKeyByName(name)
|
||||||
@ -81,7 +82,7 @@ class CMS {
|
|||||||
*/
|
*/
|
||||||
async decrypt (cmsData) {
|
async decrypt (cmsData) {
|
||||||
if (!(cmsData instanceof Uint8Array)) {
|
if (!(cmsData instanceof Uint8Array)) {
|
||||||
throw errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS')
|
throw errcode(new Error('CMS data is required'), codes.ERR_INVALID_PARAMETERS)
|
||||||
}
|
}
|
||||||
|
|
||||||
let cms
|
let cms
|
||||||
@ -91,7 +92,7 @@ class CMS {
|
|||||||
// @ts-ignore not defined
|
// @ts-ignore not defined
|
||||||
cms = forge.pkcs7.messageFromAsn1(obj)
|
cms = forge.pkcs7.messageFromAsn1(obj)
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
throw errcode(new Error('Invalid CMS: ' + err.message), 'ERR_INVALID_CMS')
|
throw errcode(new Error('Invalid CMS: ' + err.message), codes.ERR_INVALID_CMS)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find a recipient whose key we hold. We only deal with recipient certs
|
// Find a recipient whose key we hold. We only deal with recipient certs
|
||||||
@ -123,7 +124,7 @@ class CMS {
|
|||||||
if (!r) {
|
if (!r) {
|
||||||
// @ts-ignore cms types not defined
|
// @ts-ignore cms types not defined
|
||||||
const missingKeys = recipients.map(r => r.keyId)
|
const missingKeys = recipients.map(r => r.keyId)
|
||||||
throw errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), 'ERR_MISSING_KEYS', {
|
throw errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), codes.ERR_MISSING_KEYS, {
|
||||||
missingKeys
|
missingKeys
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -131,7 +132,7 @@ class CMS {
|
|||||||
const key = await this.keychain.findKeyById(r.keyId)
|
const key = await this.keychain.findKeyById(r.keyId)
|
||||||
|
|
||||||
if (!key) {
|
if (!key) {
|
||||||
throw errcode(new Error('No key available to decrypto'), 'ERR_NO_KEY')
|
throw errcode(new Error('No key available to decrypto'), codes.ERR_NO_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
const pem = await this.keychain._getPrivateKey(key.name)
|
const pem = await this.keychain._getPrivateKey(key.name)
|
||||||
|
@ -10,6 +10,7 @@ const crypto = require('libp2p-crypto')
|
|||||||
const { Key } = require('interface-datastore/key')
|
const { Key } = require('interface-datastore/key')
|
||||||
const CMS = require('./cms')
|
const CMS = require('./cms')
|
||||||
const errcode = require('err-code')
|
const errcode = require('err-code')
|
||||||
|
const { codes } = require('../errors')
|
||||||
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
|
||||||
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
|
||||||
|
|
||||||
@ -210,21 +211,21 @@ class Keychain {
|
|||||||
const self = this
|
const self = this
|
||||||
|
|
||||||
if (!validateKeyName(name) || name === 'self') {
|
if (!validateKeyName(name) || name === 'self') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
|
|
||||||
if (typeof type !== 'string') {
|
if (typeof type !== 'string') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE'))
|
return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), codes.ERR_INVALID_KEY_TYPE))
|
||||||
}
|
}
|
||||||
|
|
||||||
const dsname = DsName(name)
|
const dsname = DsName(name)
|
||||||
const exists = await self.store.has(dsname)
|
const exists = await self.store.has(dsname)
|
||||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||||
|
|
||||||
switch (type.toLowerCase()) {
|
switch (type.toLowerCase()) {
|
||||||
case 'rsa':
|
case 'rsa':
|
||||||
if (!Number.isSafeInteger(size) || size < 2048) {
|
if (!Number.isSafeInteger(size) || size < 2048) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE'))
|
return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), codes.ERR_INVALID_KEY_SIZE))
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
default:
|
default:
|
||||||
@ -297,7 +298,7 @@ class Keychain {
|
|||||||
*/
|
*/
|
||||||
async findKeyByName (name) {
|
async findKeyByName (name) {
|
||||||
if (!validateKeyName(name)) {
|
if (!validateKeyName(name)) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
|
|
||||||
const dsname = DsInfoName(name)
|
const dsname = DsInfoName(name)
|
||||||
@ -305,7 +306,7 @@ class Keychain {
|
|||||||
const res = await this.store.get(dsname)
|
const res = await this.store.get(dsname)
|
||||||
return JSON.parse(uint8ArrayToString(res))
|
return JSON.parse(uint8ArrayToString(res))
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), codes.ERR_KEY_NOT_FOUND))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -318,7 +319,7 @@ class Keychain {
|
|||||||
async removeKey (name) {
|
async removeKey (name) {
|
||||||
const self = this
|
const self = this
|
||||||
if (!validateKeyName(name) || name === 'self') {
|
if (!validateKeyName(name) || name === 'self') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
const dsname = DsName(name)
|
const dsname = DsName(name)
|
||||||
const keyInfo = await self.findKeyByName(name)
|
const keyInfo = await self.findKeyByName(name)
|
||||||
@ -339,10 +340,10 @@ class Keychain {
|
|||||||
async renameKey (oldName, newName) {
|
async renameKey (oldName, newName) {
|
||||||
const self = this
|
const self = this
|
||||||
if (!validateKeyName(oldName) || oldName === 'self') {
|
if (!validateKeyName(oldName) || oldName === 'self') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid old key name '${oldName}'`), 'ERR_OLD_KEY_NAME_INVALID'))
|
return throwDelayed(errcode(new Error(`Invalid old key name '${oldName}'`), codes.ERR_OLD_KEY_NAME_INVALID))
|
||||||
}
|
}
|
||||||
if (!validateKeyName(newName) || newName === 'self') {
|
if (!validateKeyName(newName) || newName === 'self') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid new key name '${newName}'`), 'ERR_NEW_KEY_NAME_INVALID'))
|
return throwDelayed(errcode(new Error(`Invalid new key name '${newName}'`), codes.ERR_NEW_KEY_NAME_INVALID))
|
||||||
}
|
}
|
||||||
const oldDsname = DsName(oldName)
|
const oldDsname = DsName(oldName)
|
||||||
const newDsname = DsName(newName)
|
const newDsname = DsName(newName)
|
||||||
@ -350,7 +351,7 @@ class Keychain {
|
|||||||
const newInfoName = DsInfoName(newName)
|
const newInfoName = DsInfoName(newName)
|
||||||
|
|
||||||
const exists = await self.store.has(newDsname)
|
const exists = await self.store.has(newDsname)
|
||||||
if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const pem = await self.store.get(oldDsname)
|
const pem = await self.store.get(oldDsname)
|
||||||
@ -379,10 +380,10 @@ class Keychain {
|
|||||||
*/
|
*/
|
||||||
async exportKey (name, password) {
|
async exportKey (name, password) {
|
||||||
if (!validateKeyName(name)) {
|
if (!validateKeyName(name)) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
if (!password) {
|
if (!password) {
|
||||||
return throwDelayed(errcode(new Error('Password is required'), 'ERR_PASSWORD_REQUIRED'))
|
return throwDelayed(errcode(new Error('Password is required'), codes.ERR_PASSWORD_REQUIRED))
|
||||||
}
|
}
|
||||||
|
|
||||||
const dsname = DsName(name)
|
const dsname = DsName(name)
|
||||||
@ -409,20 +410,20 @@ class Keychain {
|
|||||||
async importKey (name, pem, password) {
|
async importKey (name, pem, password) {
|
||||||
const self = this
|
const self = this
|
||||||
if (!validateKeyName(name) || name === 'self') {
|
if (!validateKeyName(name) || name === 'self') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
if (!pem) {
|
if (!pem) {
|
||||||
return throwDelayed(errcode(new Error('PEM encoded key is required'), 'ERR_PEM_REQUIRED'))
|
return throwDelayed(errcode(new Error('PEM encoded key is required'), codes.ERR_PEM_REQUIRED))
|
||||||
}
|
}
|
||||||
const dsname = DsName(name)
|
const dsname = DsName(name)
|
||||||
const exists = await self.store.has(dsname)
|
const exists = await self.store.has(dsname)
|
||||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||||
|
|
||||||
let privateKey
|
let privateKey
|
||||||
try {
|
try {
|
||||||
privateKey = await crypto.keys.import(pem, password)
|
privateKey = await crypto.keys.import(pem, password)
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY'))
|
return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), codes.ERR_CANNOT_READ_KEY))
|
||||||
}
|
}
|
||||||
|
|
||||||
let kid
|
let kid
|
||||||
@ -457,16 +458,16 @@ class Keychain {
|
|||||||
async importPeer (name, peer) {
|
async importPeer (name, peer) {
|
||||||
const self = this
|
const self = this
|
||||||
if (!validateKeyName(name)) {
|
if (!validateKeyName(name)) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
if (!peer || !peer.privKey) {
|
if (!peer || !peer.privKey) {
|
||||||
return throwDelayed(errcode(new Error('Peer.privKey is required'), 'ERR_MISSING_PRIVATE_KEY'))
|
return throwDelayed(errcode(new Error('Peer.privKey is required'), codes.ERR_MISSING_PRIVATE_KEY))
|
||||||
}
|
}
|
||||||
|
|
||||||
const privateKey = peer.privKey
|
const privateKey = peer.privKey
|
||||||
const dsname = DsName(name)
|
const dsname = DsName(name)
|
||||||
const exists = await self.store.has(dsname)
|
const exists = await self.store.has(dsname)
|
||||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), codes.ERR_KEY_ALREADY_EXISTS))
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const kid = await privateKey.id()
|
const kid = await privateKey.id()
|
||||||
@ -495,7 +496,7 @@ class Keychain {
|
|||||||
*/
|
*/
|
||||||
async _getPrivateKey (name) {
|
async _getPrivateKey (name) {
|
||||||
if (!validateKeyName(name)) {
|
if (!validateKeyName(name)) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME'))
|
return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), codes.ERR_INVALID_KEY_NAME))
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -503,7 +504,7 @@ class Keychain {
|
|||||||
const res = await this.store.get(dsname)
|
const res = await this.store.get(dsname)
|
||||||
return uint8ArrayToString(res)
|
return uint8ArrayToString(res)
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), codes.ERR_KEY_NOT_FOUND))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -515,13 +516,13 @@ class Keychain {
|
|||||||
*/
|
*/
|
||||||
async rotateKeychainPass (oldPass, newPass) {
|
async rotateKeychainPass (oldPass, newPass) {
|
||||||
if (typeof oldPass !== 'string') {
|
if (typeof oldPass !== 'string') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid old pass type '${typeof oldPass}'`), 'ERR_INVALID_OLD_PASS_TYPE'))
|
return throwDelayed(errcode(new Error(`Invalid old pass type '${typeof oldPass}'`), codes.ERR_INVALID_OLD_PASS_TYPE))
|
||||||
}
|
}
|
||||||
if (typeof newPass !== 'string') {
|
if (typeof newPass !== 'string') {
|
||||||
return throwDelayed(errcode(new Error(`Invalid new pass type '${typeof newPass}'`), 'ERR_INVALID_NEW_PASS_TYPE'))
|
return throwDelayed(errcode(new Error(`Invalid new pass type '${typeof newPass}'`), codes.ERR_INVALID_NEW_PASS_TYPE))
|
||||||
}
|
}
|
||||||
if (newPass.length < 20) {
|
if (newPass.length < 20) {
|
||||||
return throwDelayed(errcode(new Error(`Invalid pass length ${newPass.length}`), 'ERR_INVALID_PASS_LENGTH'))
|
return throwDelayed(errcode(new Error(`Invalid pass length ${newPass.length}`), codes.ERR_INVALID_PASS_LENGTH))
|
||||||
}
|
}
|
||||||
log('recreating keychain')
|
log('recreating keychain')
|
||||||
const oldDek = privates.get(this).dek
|
const oldDek = privates.get(this).dek
|
||||||
|
@ -24,9 +24,6 @@ const directionToEvent = {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef MetricsProperties
|
|
||||||
* @property {import('../connection-manager')} connectionManager
|
|
||||||
*
|
|
||||||
* @typedef MetricsOptions
|
* @typedef MetricsOptions
|
||||||
* @property {number} [computeThrottleMaxQueueSize = defaultOptions.computeThrottleMaxQueueSize]
|
* @property {number} [computeThrottleMaxQueueSize = defaultOptions.computeThrottleMaxQueueSize]
|
||||||
* @property {number} [computeThrottleTimeout = defaultOptions.computeThrottleTimeout]
|
* @property {number} [computeThrottleTimeout = defaultOptions.computeThrottleTimeout]
|
||||||
@ -37,7 +34,7 @@ const directionToEvent = {
|
|||||||
class Metrics {
|
class Metrics {
|
||||||
/**
|
/**
|
||||||
* @class
|
* @class
|
||||||
* @param {MetricsProperties & MetricsOptions} options
|
* @param {MetricsOptions} options
|
||||||
*/
|
*/
|
||||||
constructor (options) {
|
constructor (options) {
|
||||||
this._options = mergeOptions(defaultOptions, options)
|
this._options = mergeOptions(defaultOptions, options)
|
||||||
@ -47,10 +44,7 @@ class Metrics {
|
|||||||
this._oldPeers = oldPeerLRU(this._options.maxOldPeersRetention)
|
this._oldPeers = oldPeerLRU(this._options.maxOldPeersRetention)
|
||||||
this._running = false
|
this._running = false
|
||||||
this._onMessage = this._onMessage.bind(this)
|
this._onMessage = this._onMessage.bind(this)
|
||||||
this._connectionManager = options.connectionManager
|
this._componentMetrics = new Map()
|
||||||
this._connectionManager.on('peer:disconnect', (connection) => {
|
|
||||||
this.onPeerDisconnected(connection.remotePeer)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -94,6 +88,22 @@ class Metrics {
|
|||||||
return Array.from(this._peerStats.keys())
|
return Array.from(this._peerStats.keys())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {Map}
|
||||||
|
*/
|
||||||
|
getComponentMetrics () {
|
||||||
|
return this._componentMetrics
|
||||||
|
}
|
||||||
|
|
||||||
|
updateComponentMetric (component, metric, value) {
|
||||||
|
if (!this._componentMetrics.has(component)) {
|
||||||
|
this._componentMetrics.set(component, new Map())
|
||||||
|
}
|
||||||
|
|
||||||
|
const map = this._componentMetrics.get(component)
|
||||||
|
map.set(metric, value)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the `Stats` object for the given `PeerId` whether it
|
* Returns the `Stats` object for the given `PeerId` whether it
|
||||||
* is a live peer, or in the disconnected peer LRU cache.
|
* is a live peer, or in the disconnected peer LRU cache.
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
// @ts-ignore nat-api does not export types
|
// @ts-ignore nat-api does not export types
|
||||||
const NatAPI = require('@motrix/nat-api')
|
const NatAPI = require('nat-api')
|
||||||
const debug = require('debug')
|
const debug = require('debug')
|
||||||
const { promisify } = require('es6-promisify')
|
const { promisify } = require('es6-promisify')
|
||||||
const { Multiaddr } = require('multiaddr')
|
const { Multiaddr } = require('multiaddr')
|
||||||
|
@ -5,6 +5,7 @@ const log = Object.assign(debug('libp2p:peer-routing'), {
|
|||||||
error: debug('libp2p:peer-routing:err')
|
error: debug('libp2p:peer-routing:err')
|
||||||
})
|
})
|
||||||
const errCode = require('err-code')
|
const errCode = require('err-code')
|
||||||
|
const errors = require('./errors')
|
||||||
const {
|
const {
|
||||||
storeAddresses,
|
storeAddresses,
|
||||||
uniquePeers,
|
uniquePeers,
|
||||||
@ -104,19 +105,24 @@ class PeerRouting {
|
|||||||
*/
|
*/
|
||||||
async findPeer (id, options) { // eslint-disable-line require-await
|
async findPeer (id, options) { // eslint-disable-line require-await
|
||||||
if (!this._routers.length) {
|
if (!this._routers.length) {
|
||||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
throw errCode(new Error('No peer routers available'), errors.codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (id.toB58String() === this._peerId.toB58String()) {
|
if (id.toB58String() === this._peerId.toB58String()) {
|
||||||
throw errCode(new Error('Should not try to find self'), 'ERR_FIND_SELF')
|
throw errCode(new Error('Should not try to find self'), errors.codes.ERR_FIND_SELF)
|
||||||
}
|
}
|
||||||
|
|
||||||
const output = await pipe(
|
const output = await pipe(
|
||||||
merge(
|
merge(
|
||||||
...this._routers.map(router => [router.findPeer(id, options)])
|
...this._routers.map(router => (async function * () {
|
||||||
|
try {
|
||||||
|
yield await router.findPeer(id, options)
|
||||||
|
} catch (err) {
|
||||||
|
log.error(err)
|
||||||
|
}
|
||||||
|
})())
|
||||||
),
|
),
|
||||||
(source) => filter(source, Boolean),
|
(source) => filter(source, Boolean),
|
||||||
// @ts-ignore findPeer resolves a Promise
|
|
||||||
(source) => storeAddresses(source, this._peerStore),
|
(source) => storeAddresses(source, this._peerStore),
|
||||||
(source) => first(source)
|
(source) => first(source)
|
||||||
)
|
)
|
||||||
@ -125,7 +131,7 @@ class PeerRouting {
|
|||||||
return output
|
return output
|
||||||
}
|
}
|
||||||
|
|
||||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
throw errCode(new Error(errors.messages.NOT_FOUND), errors.codes.ERR_NOT_FOUND)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -139,7 +145,7 @@ class PeerRouting {
|
|||||||
*/
|
*/
|
||||||
async * getClosestPeers (key, options = { timeout: 30e3 }) {
|
async * getClosestPeers (key, options = { timeout: 30e3 }) {
|
||||||
if (!this._routers.length) {
|
if (!this._routers.length) {
|
||||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
throw errCode(new Error('No peer routers available'), errors.codes.ERR_NO_ROUTERS_AVAILABLE)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.timeout) {
|
if (options.timeout) {
|
||||||
|
@ -2,10 +2,7 @@
|
|||||||
|
|
||||||
const errcode = require('err-code')
|
const errcode = require('err-code')
|
||||||
const PeerId = require('peer-id')
|
const PeerId = require('peer-id')
|
||||||
|
const { codes } = require('../errors')
|
||||||
const {
|
|
||||||
codes: { ERR_INVALID_PARAMETERS }
|
|
||||||
} = require('../errors')
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {any} data
|
* @param {any} data
|
||||||
@ -48,7 +45,7 @@ class Book {
|
|||||||
* @param {any[]|any} data
|
* @param {any[]|any} data
|
||||||
*/
|
*/
|
||||||
set (peerId, data) {
|
set (peerId, data) {
|
||||||
throw errcode(new Error('set must be implemented by the subclass'), 'ERR_NOT_IMPLEMENTED')
|
throw errcode(new Error('set must be implemented by the subclass'), codes.ERR_NOT_IMPLEMENTED)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -94,7 +91,7 @@ class Book {
|
|||||||
*/
|
*/
|
||||||
get (peerId) {
|
get (peerId) {
|
||||||
if (!PeerId.isPeerId(peerId)) {
|
if (!PeerId.isPeerId(peerId)) {
|
||||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||||
}
|
}
|
||||||
|
|
||||||
const rec = this.data.get(peerId.toB58String())
|
const rec = this.data.get(peerId.toB58String())
|
||||||
@ -111,7 +108,7 @@ class Book {
|
|||||||
*/
|
*/
|
||||||
delete (peerId) {
|
delete (peerId) {
|
||||||
if (!PeerId.isPeerId(peerId)) {
|
if (!PeerId.isPeerId(peerId)) {
|
||||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.data.delete(peerId.toB58String())) {
|
if (!this.data.delete(peerId.toB58String())) {
|
||||||
|
@ -5,7 +5,7 @@ const log = Object.assign(debug('libp2p:ping'), {
|
|||||||
error: debug('libp2p:ping:err')
|
error: debug('libp2p:ping:err')
|
||||||
})
|
})
|
||||||
const errCode = require('err-code')
|
const errCode = require('err-code')
|
||||||
|
const { codes } = require('../errors')
|
||||||
const crypto = require('libp2p-crypto')
|
const crypto = require('libp2p-crypto')
|
||||||
const { pipe } = require('it-pipe')
|
const { pipe } = require('it-pipe')
|
||||||
// @ts-ignore it-buffer has no types exported
|
// @ts-ignore it-buffer has no types exported
|
||||||
@ -50,7 +50,7 @@ async function ping (node, peer) {
|
|||||||
const end = Date.now()
|
const end = Date.now()
|
||||||
|
|
||||||
if (!equals(data, result)) {
|
if (!equals(data, result)) {
|
||||||
throw errCode(new Error('Received wrong ping ack'), 'ERR_WRONG_PING_ACK')
|
throw errCode(new Error('Received wrong ping ack'), codes.ERR_WRONG_PING_ACK)
|
||||||
}
|
}
|
||||||
|
|
||||||
return end - start
|
return end - start
|
||||||
|
@ -297,7 +297,7 @@ class Upgrader {
|
|||||||
maConn.timeline.upgraded = Date.now()
|
maConn.timeline.upgraded = Date.now()
|
||||||
|
|
||||||
const errConnectionNotMultiplexed = () => {
|
const errConnectionNotMultiplexed = () => {
|
||||||
throw errCode(new Error('connection is not multiplexed'), 'ERR_CONNECTION_NOT_MULTIPLEXED')
|
throw errCode(new Error('connection is not multiplexed'), codes.ERR_CONNECTION_NOT_MULTIPLEXED)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the connection
|
// Create the connection
|
||||||
|
@ -36,14 +36,14 @@ describe('content-routing', () => {
|
|||||||
throw new Error('.findProviders should return an error')
|
throw new Error('.findProviders should return an error')
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
expect(err).to.exist()
|
expect(err).to.exist()
|
||||||
expect(err.code).to.equal('NO_ROUTERS_AVAILABLE')
|
expect(err.code).to.equal('ERR_NO_ROUTERS_AVAILABLE')
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
it('.provide should return an error', async () => {
|
it('.provide should return an error', async () => {
|
||||||
await expect(node.contentRouting.provide('a cid'))
|
await expect(node.contentRouting.provide('a cid'))
|
||||||
.to.eventually.be.rejected()
|
.to.eventually.be.rejected()
|
||||||
.and.to.have.property('code', 'NO_ROUTERS_AVAILABLE')
|
.and.to.have.property('code', 'ERR_NO_ROUTERS_AVAILABLE')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -87,8 +87,11 @@ describe('content-routing', () => {
|
|||||||
sinon.stub(nodes[0]._dht, 'findProviders').callsFake(function * () {
|
sinon.stub(nodes[0]._dht, 'findProviders').callsFake(function * () {
|
||||||
deferred.resolve()
|
deferred.resolve()
|
||||||
yield {
|
yield {
|
||||||
id: providerPeerId,
|
name: 'PROVIDER',
|
||||||
multiaddrs: []
|
providers: [{
|
||||||
|
id: providerPeerId,
|
||||||
|
multiaddrs: []
|
||||||
|
}]
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -361,7 +364,12 @@ describe('content-routing', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findProviders').callsFake(async function * () {
|
sinon.stub(node._dht, 'findProviders').callsFake(async function * () {
|
||||||
yield result1
|
yield {
|
||||||
|
name: 'PROVIDER',
|
||||||
|
providers: [
|
||||||
|
result1
|
||||||
|
]
|
||||||
|
}
|
||||||
})
|
})
|
||||||
sinon.stub(delegate, 'findProviders').callsFake(async function * () {
|
sinon.stub(delegate, 'findProviders').callsFake(async function * () {
|
||||||
yield result2
|
yield result2
|
||||||
@ -382,7 +390,8 @@ describe('content-routing', () => {
|
|||||||
const dhtDeferred = pDefer()
|
const dhtDeferred = pDefer()
|
||||||
const delegatedDeferred = pDefer()
|
const delegatedDeferred = pDefer()
|
||||||
|
|
||||||
sinon.stub(node._dht, 'provide').callsFake(() => {
|
sinon.stub(node._dht, 'provide').callsFake(async function * () {
|
||||||
|
yield
|
||||||
dhtDeferred.resolve()
|
dhtDeferred.resolve()
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -406,7 +415,12 @@ describe('content-routing', () => {
|
|||||||
}]
|
}]
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findProviders').callsFake(function * () {
|
sinon.stub(node._dht, 'findProviders').callsFake(function * () {
|
||||||
yield results[0]
|
yield {
|
||||||
|
name: 'PROVIDER',
|
||||||
|
providers: [
|
||||||
|
results[0]
|
||||||
|
]
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
sinon.stub(delegate, 'findProviders').callsFake(function * () { // eslint-disable-line require-yield
|
sinon.stub(delegate, 'findProviders').callsFake(function * () { // eslint-disable-line require-yield
|
||||||
|
@ -38,13 +38,13 @@ describe('DHT subsystem is configurable', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
libp2p = await create(customOptions)
|
libp2p = await create(customOptions)
|
||||||
expect(libp2p._dht.isStarted).to.equal(false)
|
expect(libp2p._dht.isStarted()).to.equal(false)
|
||||||
|
|
||||||
await libp2p.start()
|
await libp2p.start()
|
||||||
expect(libp2p._dht.isStarted).to.equal(true)
|
expect(libp2p._dht.isStarted()).to.equal(true)
|
||||||
|
|
||||||
await libp2p.stop()
|
await libp2p.stop()
|
||||||
expect(libp2p._dht.isStarted).to.equal(false)
|
expect(libp2p._dht.isStarted()).to.equal(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not start if disabled once libp2p starts', async () => {
|
it('should not start if disabled once libp2p starts', async () => {
|
||||||
@ -63,10 +63,10 @@ describe('DHT subsystem is configurable', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
libp2p = await create(customOptions)
|
libp2p = await create(customOptions)
|
||||||
expect(libp2p._dht.isStarted).to.equal(false)
|
expect(libp2p._dht.isStarted()).to.equal(false)
|
||||||
|
|
||||||
await libp2p.start()
|
await libp2p.start()
|
||||||
expect(libp2p._dht.isStarted).to.equal(false)
|
expect(libp2p._dht.isStarted()).to.equal(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should allow a manual start', async () => {
|
it('should allow a manual start', async () => {
|
||||||
@ -86,9 +86,9 @@ describe('DHT subsystem is configurable', () => {
|
|||||||
|
|
||||||
libp2p = await create(customOptions)
|
libp2p = await create(customOptions)
|
||||||
await libp2p.start()
|
await libp2p.start()
|
||||||
expect(libp2p._dht.isStarted).to.equal(false)
|
expect(libp2p._dht.isStarted()).to.equal(false)
|
||||||
|
|
||||||
await libp2p._dht.start()
|
await libp2p._dht.start()
|
||||||
expect(libp2p._dht.isStarted).to.equal(true)
|
expect(libp2p._dht.isStarted()).to.equal(true)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -60,8 +60,8 @@ describe('DHT subsystem operates correctly', () => {
|
|||||||
expect(connection).to.exist()
|
expect(connection).to.exist()
|
||||||
|
|
||||||
return Promise.all([
|
return Promise.all([
|
||||||
pWaitFor(() => libp2p._dht.routingTable.size === 1),
|
pWaitFor(() => libp2p._dht._lan._routingTable.size === 1),
|
||||||
pWaitFor(() => remoteLibp2p._dht.routingTable.size === 1)
|
pWaitFor(() => remoteLibp2p._dht._lan._routingTable.size === 1)
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -71,14 +71,14 @@ describe('DHT subsystem operates correctly', () => {
|
|||||||
|
|
||||||
await libp2p.dialProtocol(remAddr, subsystemMulticodecs)
|
await libp2p.dialProtocol(remAddr, subsystemMulticodecs)
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
pWaitFor(() => libp2p._dht.routingTable.size === 1),
|
pWaitFor(() => libp2p._dht._lan._routingTable.size === 1),
|
||||||
pWaitFor(() => remoteLibp2p._dht.routingTable.size === 1)
|
pWaitFor(() => remoteLibp2p._dht._lan._routingTable.size === 1)
|
||||||
])
|
])
|
||||||
|
|
||||||
await libp2p.contentRouting.put(key, value)
|
await libp2p.contentRouting.put(key, value)
|
||||||
const fetchedValue = await remoteLibp2p.contentRouting.get(key)
|
|
||||||
|
|
||||||
expect(fetchedValue).to.eql(value)
|
const fetchedValue = await remoteLibp2p.contentRouting.get(key)
|
||||||
|
expect(fetchedValue).to.have.property('val').that.equalBytes(value)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -119,11 +119,13 @@ describe('DHT subsystem operates correctly', () => {
|
|||||||
const connection = await libp2p.dial(remAddr)
|
const connection = await libp2p.dial(remAddr)
|
||||||
|
|
||||||
expect(connection).to.exist()
|
expect(connection).to.exist()
|
||||||
expect(libp2p._dht.routingTable.size).to.be.eql(0)
|
expect(libp2p._dht._lan._routingTable.size).to.be.eql(0)
|
||||||
expect(remoteLibp2p._dht.routingTable.size).to.be.eql(0)
|
|
||||||
|
|
||||||
await remoteLibp2p._dht.start()
|
await remoteLibp2p._dht.start()
|
||||||
return pWaitFor(() => libp2p._dht.routingTable.size === 1)
|
// should be 0 directly after start - TODO this may be susceptible to timing bugs, we should have
|
||||||
|
// the ability to report stats on the DHT routing table instead of reaching into it's heart like this
|
||||||
|
expect(remoteLibp2p._dht._lan._routingTable.size).to.be.eql(0)
|
||||||
|
return pWaitFor(() => libp2p._dht._lan._routingTable.size === 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should put on a peer and get from the other', async () => {
|
it('should put on a peer and get from the other', async () => {
|
||||||
@ -133,12 +135,12 @@ describe('DHT subsystem operates correctly', () => {
|
|||||||
const value = uint8ArrayFromString('world')
|
const value = uint8ArrayFromString('world')
|
||||||
|
|
||||||
await remoteLibp2p._dht.start()
|
await remoteLibp2p._dht.start()
|
||||||
await pWaitFor(() => libp2p._dht.routingTable.size === 1)
|
await pWaitFor(() => libp2p._dht._lan._routingTable.size === 1)
|
||||||
|
|
||||||
await libp2p.contentRouting.put(key, value)
|
await libp2p.contentRouting.put(key, value)
|
||||||
|
|
||||||
const fetchedValue = await remoteLibp2p.contentRouting.get(key)
|
const fetchedValue = await remoteLibp2p.contentRouting.get(key)
|
||||||
expect(fetchedValue).to.eql(value)
|
expect(fetchedValue).to.have.property('val').that.equalBytes(value)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
'use strict'
|
'use strict'
|
||||||
|
|
||||||
const KadDht = require('libp2p-kad-dht')
|
const KadDht = require('libp2p-kad-dht')
|
||||||
const { multicodec } = require('libp2p-kad-dht')
|
|
||||||
const Crypto = require('../../../src/insecure/plaintext')
|
const Crypto = require('../../../src/insecure/plaintext')
|
||||||
const Muxer = require('libp2p-mplex')
|
const Muxer = require('libp2p-mplex')
|
||||||
const Transport = require('libp2p-tcp')
|
const Transport = require('libp2p-tcp')
|
||||||
@ -25,13 +24,12 @@ const subsystemOptions = mergeOptions(baseOptions, {
|
|||||||
config: {
|
config: {
|
||||||
dht: {
|
dht: {
|
||||||
kBucketSize: 20,
|
kBucketSize: 20,
|
||||||
randomWalk: {
|
|
||||||
enabled: true
|
|
||||||
},
|
|
||||||
enabled: true
|
enabled: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
module.exports.subsystemOptions = subsystemOptions
|
module.exports.subsystemOptions = subsystemOptions
|
||||||
module.exports.subsystemMulticodecs = [multicodec]
|
module.exports.subsystemMulticodecs = [
|
||||||
|
'/ipfs/lan/kad/1.0.0'
|
||||||
|
]
|
||||||
|
@ -13,9 +13,6 @@ const routingOptions = mergeOptions(baseOptions, {
|
|||||||
config: {
|
config: {
|
||||||
dht: {
|
dht: {
|
||||||
kBucketSize: 20,
|
kBucketSize: 20,
|
||||||
randomWalk: {
|
|
||||||
enabled: true
|
|
||||||
},
|
|
||||||
enabled: true
|
enabled: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -296,7 +296,7 @@ describe('keychain', () => {
|
|||||||
it('requires plain data as a Uint8Array', async () => {
|
it('requires plain data as a Uint8Array', async () => {
|
||||||
const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err)
|
const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err)
|
||||||
expect(err).to.exist()
|
expect(err).to.exist()
|
||||||
expect(err).to.have.property('code', 'ERR_INVALID_PARAMS')
|
expect(err).to.have.property('code', 'ERR_INVALID_PARAMETERS')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('encrypts', async () => {
|
it('encrypts', async () => {
|
||||||
@ -308,7 +308,7 @@ describe('keychain', () => {
|
|||||||
it('is a PKCS #7 message', async () => {
|
it('is a PKCS #7 message', async () => {
|
||||||
const err = await ks.cms.decrypt('not CMS').then(fail, err => err)
|
const err = await ks.cms.decrypt('not CMS').then(fail, err => err)
|
||||||
expect(err).to.exist()
|
expect(err).to.exist()
|
||||||
expect(err).to.have.property('code', 'ERR_INVALID_PARAMS')
|
expect(err).to.have.property('code', 'ERR_INVALID_PARAMETERS')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('is a PKCS #7 binary message', async () => {
|
it('is a PKCS #7 binary message', async () => {
|
||||||
|
@ -3,9 +3,6 @@
|
|||||||
|
|
||||||
const { expect } = require('aegir/utils/chai')
|
const { expect } = require('aegir/utils/chai')
|
||||||
const sinon = require('sinon')
|
const sinon = require('sinon')
|
||||||
|
|
||||||
const { EventEmitter } = require('events')
|
|
||||||
|
|
||||||
const { randomBytes } = require('libp2p-crypto')
|
const { randomBytes } = require('libp2p-crypto')
|
||||||
const duplexPair = require('it-pair/duplex')
|
const duplexPair = require('it-pair/duplex')
|
||||||
const pipe = require('it-pipe')
|
const pipe = require('it-pipe')
|
||||||
@ -34,8 +31,7 @@ describe('Metrics', () => {
|
|||||||
const [local, remote] = duplexPair()
|
const [local, remote] = duplexPair()
|
||||||
const metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
computeThrottleMaxQueueSize: 1, // compute after every message
|
computeThrottleMaxQueueSize: 1, // compute after every message
|
||||||
movingAverageIntervals: [10, 100, 1000],
|
movingAverageIntervals: [10, 100, 1000]
|
||||||
connectionManager: new EventEmitter()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
metrics.trackStream({
|
metrics.trackStream({
|
||||||
@ -70,8 +66,7 @@ describe('Metrics', () => {
|
|||||||
const [local, remote] = duplexPair()
|
const [local, remote] = duplexPair()
|
||||||
const metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
computeThrottleMaxQueueSize: 1, // compute after every message
|
computeThrottleMaxQueueSize: 1, // compute after every message
|
||||||
movingAverageIntervals: [10, 100, 1000],
|
movingAverageIntervals: [10, 100, 1000]
|
||||||
connectionManager: new EventEmitter()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
metrics.trackStream({
|
metrics.trackStream({
|
||||||
@ -119,8 +114,7 @@ describe('Metrics', () => {
|
|||||||
const [local2, remote2] = duplexPair()
|
const [local2, remote2] = duplexPair()
|
||||||
const metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
computeThrottleMaxQueueSize: 1, // compute after every message
|
computeThrottleMaxQueueSize: 1, // compute after every message
|
||||||
movingAverageIntervals: [10, 100, 1000],
|
movingAverageIntervals: [10, 100, 1000]
|
||||||
connectionManager: new EventEmitter()
|
|
||||||
})
|
})
|
||||||
const protocol = '/echo/1.0.0'
|
const protocol = '/echo/1.0.0'
|
||||||
metrics.start()
|
metrics.start()
|
||||||
@ -175,8 +169,7 @@ describe('Metrics', () => {
|
|||||||
const [local, remote] = duplexPair()
|
const [local, remote] = duplexPair()
|
||||||
const metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
computeThrottleMaxQueueSize: 1, // compute after every message
|
computeThrottleMaxQueueSize: 1, // compute after every message
|
||||||
movingAverageIntervals: [10, 100, 1000],
|
movingAverageIntervals: [10, 100, 1000]
|
||||||
connectionManager: new EventEmitter()
|
|
||||||
})
|
})
|
||||||
metrics.start()
|
metrics.start()
|
||||||
|
|
||||||
@ -231,8 +224,7 @@ describe('Metrics', () => {
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
const metrics = new Metrics({
|
const metrics = new Metrics({
|
||||||
maxOldPeersRetention: 5, // Only keep track of 5
|
maxOldPeersRetention: 5 // Only keep track of 5
|
||||||
connectionManager: new EventEmitter()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// Clone so trackedPeers isn't modified
|
// Clone so trackedPeers isn't modified
|
||||||
@ -262,4 +254,22 @@ describe('Metrics', () => {
|
|||||||
expect(spy).to.have.property('callCount', 1)
|
expect(spy).to.have.property('callCount', 1)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should allow components to track metrics', () => {
|
||||||
|
const metrics = new Metrics({
|
||||||
|
maxOldPeersRetention: 5 // Only keep track of 5
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(metrics.getComponentMetrics()).to.be.empty()
|
||||||
|
|
||||||
|
const component = 'my-component'
|
||||||
|
const metric = 'some-metric'
|
||||||
|
const value = 1
|
||||||
|
|
||||||
|
metrics.updateComponentMetric(component, metric, value)
|
||||||
|
|
||||||
|
expect(metrics.getComponentMetrics()).to.have.lengthOf(1)
|
||||||
|
expect(metrics.getComponentMetrics().get(component)).to.have.lengthOf(1)
|
||||||
|
expect(metrics.getComponentMetrics().get(component).get(metric)).to.equal(value)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
@ -244,6 +244,10 @@ describe('Nat Manager (TCP)', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('shuts the nat api down when stopping', async function () {
|
it('shuts the nat api down when stopping', async function () {
|
||||||
|
if (process.env.CI) {
|
||||||
|
return this.skip('CI environments will not let us map external ports')
|
||||||
|
}
|
||||||
|
|
||||||
function findRoutableAddress () {
|
function findRoutableAddress () {
|
||||||
const interfaces = networkInterfaces()
|
const interfaces = networkInterfaces()
|
||||||
|
|
||||||
@ -261,7 +265,7 @@ describe('Nat Manager (TCP)', () => {
|
|||||||
|
|
||||||
if (!addr) {
|
if (!addr) {
|
||||||
// skip test if no non-loopback address is found
|
// skip test if no non-loopback address is found
|
||||||
this.skip()
|
return this.skip()
|
||||||
}
|
}
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
@ -161,20 +161,13 @@ describe('peer discovery scenarios', () => {
|
|||||||
autoDial: false
|
autoDial: false
|
||||||
},
|
},
|
||||||
dht: {
|
dht: {
|
||||||
randomWalk: {
|
|
||||||
enabled: false,
|
|
||||||
delay: 1000, // start the first query quickly
|
|
||||||
interval: 10000,
|
|
||||||
timeout: 5000
|
|
||||||
},
|
|
||||||
enabled: true
|
enabled: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const localConfig = getConfig(peerId)
|
const localConfig = getConfig(peerId)
|
||||||
// Only run random walk on our local node
|
|
||||||
localConfig.config.dht.randomWalk.enabled = true
|
|
||||||
libp2p = new Libp2p(localConfig)
|
libp2p = new Libp2p(localConfig)
|
||||||
|
|
||||||
const remoteLibp2p1 = new Libp2p(getConfig(remotePeerId1))
|
const remoteLibp2p1 = new Libp2p(getConfig(remotePeerId1))
|
||||||
|
@ -36,7 +36,7 @@ describe('peer-routing', () => {
|
|||||||
it('.findPeer should return an error', async () => {
|
it('.findPeer should return an error', async () => {
|
||||||
await expect(node.peerRouting.findPeer('a cid'))
|
await expect(node.peerRouting.findPeer('a cid'))
|
||||||
.to.eventually.be.rejected()
|
.to.eventually.be.rejected()
|
||||||
.and.to.have.property('code', 'NO_ROUTERS_AVAILABLE')
|
.and.to.have.property('code', 'ERR_NO_ROUTERS_AVAILABLE')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('.getClosestPeers should return an error', async () => {
|
it('.getClosestPeers should return an error', async () => {
|
||||||
@ -45,7 +45,7 @@ describe('peer-routing', () => {
|
|||||||
throw new Error('.getClosestPeers should return an error')
|
throw new Error('.getClosestPeers should return an error')
|
||||||
} catch (/** @type {any} */ err) {
|
} catch (/** @type {any} */ err) {
|
||||||
expect(err).to.exist()
|
expect(err).to.exist()
|
||||||
expect(err.code).to.equal('NO_ROUTERS_AVAILABLE')
|
expect(err.code).to.equal('ERR_NO_ROUTERS_AVAILABLE')
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -72,33 +72,38 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
after(() => Promise.all(nodes.map((n) => n.stop())))
|
after(() => Promise.all(nodes.map((n) => n.stop())))
|
||||||
|
|
||||||
it('should use the nodes dht', () => {
|
it('should use the nodes dht', async () => {
|
||||||
const deferred = pDefer()
|
sinon.stub(nodes[0]._dht, 'findPeer').callsFake(async function * () {
|
||||||
|
|
||||||
sinon.stub(nodes[0]._dht, 'findPeer').callsFake(() => {
|
|
||||||
deferred.resolve()
|
|
||||||
return nodes[1].peerId
|
|
||||||
})
|
|
||||||
|
|
||||||
nodes[0].peerRouting.findPeer()
|
|
||||||
return deferred.promise
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should use the nodes dht to get the closest peers', async () => {
|
|
||||||
const deferred = pDefer()
|
|
||||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
|
||||||
|
|
||||||
sinon.stub(nodes[0]._dht, 'getClosestPeers').callsFake(function * () {
|
|
||||||
deferred.resolve()
|
|
||||||
yield {
|
yield {
|
||||||
id: remotePeerId,
|
name: 'PEER_RESPONSE',
|
||||||
multiaddrs: []
|
closer: [{
|
||||||
|
id: nodes[1].peerId,
|
||||||
|
multiaddrs: []
|
||||||
|
}]
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
await nodes[0].peerRouting.getClosestPeers().next()
|
expect(nodes[0]._dht.findPeer.called).to.be.false()
|
||||||
|
await nodes[0].peerRouting.findPeer(nodes[1].peerId)
|
||||||
|
expect(nodes[0]._dht.findPeer.called).to.be.true()
|
||||||
|
nodes[0]._dht.findPeer.restore()
|
||||||
|
})
|
||||||
|
|
||||||
return deferred.promise
|
it('should use the nodes dht to get the closest peers', async () => {
|
||||||
|
sinon.stub(nodes[0]._dht, 'getClosestPeers').callsFake(async function * () {
|
||||||
|
yield {
|
||||||
|
name: 'PEER_RESPONSE',
|
||||||
|
closer: [{
|
||||||
|
id: nodes[1].peerId,
|
||||||
|
multiaddrs: []
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(nodes[0]._dht.getClosestPeers.called).to.be.false()
|
||||||
|
await drain(nodes[0].peerRouting.getClosestPeers(nodes[1].peerId))
|
||||||
|
expect(nodes[0]._dht.getClosestPeers.called).to.be.true()
|
||||||
|
nodes[0]._dht.getClosestPeers.restore()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should error when peer tries to find itself', async () => {
|
it('should error when peer tries to find itself', async () => {
|
||||||
@ -106,6 +111,95 @@ describe('peer-routing', () => {
|
|||||||
.to.eventually.be.rejected()
|
.to.eventually.be.rejected()
|
||||||
.and.to.have.property('code', 'ERR_FIND_SELF')
|
.and.to.have.property('code', 'ERR_FIND_SELF')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should handle error thrown synchronously during find peer', async () => {
|
||||||
|
const unknownPeers = await peerUtils.createPeerId({ number: 1, fixture: false })
|
||||||
|
|
||||||
|
nodes[0].peerRouting._routers = [{
|
||||||
|
findPeer () {
|
||||||
|
throw new Error('Thrown sync')
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
await expect(nodes[0].peerRouting.findPeer(unknownPeers[0]))
|
||||||
|
.to.eventually.be.rejected()
|
||||||
|
.and.to.have.property('code', 'ERR_NOT_FOUND')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle error thrown asynchronously during find peer', async () => {
|
||||||
|
const unknownPeers = await peerUtils.createPeerId({ number: 1, fixture: false })
|
||||||
|
|
||||||
|
nodes[0].peerRouting._routers = [{
|
||||||
|
async findPeer () {
|
||||||
|
throw new Error('Thrown async')
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
await expect(nodes[0].peerRouting.findPeer(unknownPeers[0]))
|
||||||
|
.to.eventually.be.rejected()
|
||||||
|
.and.to.have.property('code', 'ERR_NOT_FOUND')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle error thrown asynchronously after delay during find peer', async () => {
|
||||||
|
const unknownPeers = await peerUtils.createPeerId({ number: 1, fixture: false })
|
||||||
|
|
||||||
|
nodes[0].peerRouting._routers = [{
|
||||||
|
async findPeer () {
|
||||||
|
await delay(100)
|
||||||
|
throw new Error('Thrown async after delay')
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
await expect(nodes[0].peerRouting.findPeer(unknownPeers[0]))
|
||||||
|
.to.eventually.be.rejected()
|
||||||
|
.and.to.have.property('code', 'ERR_NOT_FOUND')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return value when one router errors synchronously and another returns a value', async () => {
|
||||||
|
const [peer] = await peerUtils.createPeerId({ number: 1, fixture: false })
|
||||||
|
|
||||||
|
nodes[0].peerRouting._routers = [{
|
||||||
|
findPeer () {
|
||||||
|
throw new Error('Thrown sync')
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
async findPeer () {
|
||||||
|
return Promise.resolve({
|
||||||
|
id: peer,
|
||||||
|
multiaddrs: []
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
await expect(nodes[0].peerRouting.findPeer(peer))
|
||||||
|
.to.eventually.deep.equal({
|
||||||
|
id: peer,
|
||||||
|
multiaddrs: []
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return value when one router errors asynchronously and another returns a value', async () => {
|
||||||
|
const [peer] = await peerUtils.createPeerId({ number: 1, fixture: false })
|
||||||
|
|
||||||
|
nodes[0].peerRouting._routers = [{
|
||||||
|
async findPeer () {
|
||||||
|
throw new Error('Thrown sync')
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
async findPeer () {
|
||||||
|
return Promise.resolve({
|
||||||
|
id: peer,
|
||||||
|
multiaddrs: []
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
await expect(nodes[0].peerRouting.findPeer(peer))
|
||||||
|
.to.eventually.deep.equal({
|
||||||
|
id: peer,
|
||||||
|
multiaddrs: []
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('via delegate router', () => {
|
describe('via delegate router', () => {
|
||||||
@ -145,36 +239,35 @@ describe('peer-routing', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should use the delegate router to find peers', async () => {
|
it('should use the delegate router to find peers', async () => {
|
||||||
const deferred = pDefer()
|
|
||||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||||
|
|
||||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||||
deferred.resolve()
|
|
||||||
return {
|
return {
|
||||||
id: remotePeerId,
|
id: remotePeerId,
|
||||||
multiaddrs: []
|
multiaddrs: []
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
await node.peerRouting.findPeer()
|
expect(delegate.findPeer.called).to.be.false()
|
||||||
return deferred.promise
|
await node.peerRouting.findPeer(remotePeerId)
|
||||||
|
expect(delegate.findPeer.called).to.be.true()
|
||||||
|
delegate.findPeer.restore()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should use the delegate router to get the closest peers', async () => {
|
it('should use the delegate router to get the closest peers', async () => {
|
||||||
const deferred = pDefer()
|
|
||||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||||
|
|
||||||
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () {
|
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () {
|
||||||
deferred.resolve()
|
|
||||||
yield {
|
yield {
|
||||||
id: remotePeerId,
|
id: remotePeerId,
|
||||||
multiaddrs: []
|
multiaddrs: []
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
await node.peerRouting.getClosestPeers().next()
|
expect(delegate.getClosestPeers.called).to.be.false()
|
||||||
|
await drain(node.peerRouting.getClosestPeers(remotePeerId))
|
||||||
return deferred.promise
|
expect(delegate.getClosestPeers.called).to.be.true()
|
||||||
|
delegate.getClosestPeers.restore()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should be able to find a peer', async () => {
|
it('should be able to find a peer', async () => {
|
||||||
@ -200,7 +293,7 @@ describe('peer-routing', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should error when a peer cannot be found', async () => {
|
it('should error when a peer cannot be found', async () => {
|
||||||
const peerKey = 'key of a peer not on the network'
|
const peerId = await PeerId.create({ keyType: 'ed25519' })
|
||||||
const mockApi = nock('http://0.0.0.0:60197')
|
const mockApi = nock('http://0.0.0.0:60197')
|
||||||
.post('/api/v0/dht/findpeer')
|
.post('/api/v0/dht/findpeer')
|
||||||
.query(true)
|
.query(true)
|
||||||
@ -209,20 +302,20 @@ describe('peer-routing', () => {
|
|||||||
'X-Chunked-Output', '1'
|
'X-Chunked-Output', '1'
|
||||||
])
|
])
|
||||||
|
|
||||||
await expect(node.peerRouting.findPeer(peerKey))
|
await expect(node.peerRouting.findPeer(peerId))
|
||||||
.to.eventually.be.rejected()
|
.to.eventually.be.rejected()
|
||||||
|
|
||||||
expect(mockApi.isDone()).to.equal(true)
|
expect(mockApi.isDone()).to.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should handle errors from the api', async () => {
|
it('should handle errors from the api', async () => {
|
||||||
const peerKey = 'key of a peer not on the network'
|
const peerId = await PeerId.create({ keyType: 'ed25519' })
|
||||||
const mockApi = nock('http://0.0.0.0:60197')
|
const mockApi = nock('http://0.0.0.0:60197')
|
||||||
.post('/api/v0/dht/findpeer')
|
.post('/api/v0/dht/findpeer')
|
||||||
.query(true)
|
.query(true)
|
||||||
.reply(502)
|
.reply(502)
|
||||||
|
|
||||||
await expect(node.peerRouting.findPeer(peerKey))
|
await expect(node.peerRouting.findPeer(peerId))
|
||||||
.to.eventually.be.rejected()
|
.to.eventually.be.rejected()
|
||||||
|
|
||||||
expect(mockApi.isDone()).to.equal(true)
|
expect(mockApi.isDone()).to.equal(true)
|
||||||
@ -230,7 +323,6 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
it('should be able to get the closest peers', async () => {
|
it('should be able to get the closest peers', async () => {
|
||||||
const peerId = await PeerId.create({ keyType: 'ed25519' })
|
const peerId = await PeerId.create({ keyType: 'ed25519' })
|
||||||
|
|
||||||
const closest1 = '12D3KooWLewYMMdGWAtuX852n4rgCWkK7EBn4CWbwwBzhsVoKxk3'
|
const closest1 = '12D3KooWLewYMMdGWAtuX852n4rgCWkK7EBn4CWbwwBzhsVoKxk3'
|
||||||
const closest2 = '12D3KooWDtoQbpKhtnWddfj72QmpFvvLDTsBLTFkjvgQm6cde2AK'
|
const closest2 = '12D3KooWDtoQbpKhtnWddfj72QmpFvvLDTsBLTFkjvgQm6cde2AK'
|
||||||
|
|
||||||
@ -249,15 +341,12 @@ describe('peer-routing', () => {
|
|||||||
'X-Chunked-Output', '1'
|
'X-Chunked-Output', '1'
|
||||||
])
|
])
|
||||||
|
|
||||||
const closestPeers = []
|
const closestPeers = await all(node.peerRouting.getClosestPeers(peerId.id, { timeout: 1000 }))
|
||||||
for await (const peer of node.peerRouting.getClosestPeers(peerId.id, { timeout: 1000 })) {
|
|
||||||
closestPeers.push(peer)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(closestPeers).to.have.length(2)
|
expect(closestPeers).to.have.length(2)
|
||||||
expect(closestPeers[0].id.toB58String()).to.equal(closest2)
|
expect(closestPeers[0].id.toB58String()).to.equal(closest1)
|
||||||
expect(closestPeers[0].multiaddrs).to.have.lengthOf(2)
|
expect(closestPeers[0].multiaddrs).to.have.lengthOf(2)
|
||||||
expect(closestPeers[1].id.toB58String()).to.equal(closest1)
|
expect(closestPeers[1].id.toB58String()).to.equal(closest2)
|
||||||
expect(closestPeers[1].multiaddrs).to.have.lengthOf(2)
|
expect(closestPeers[1].multiaddrs).to.have.lengthOf(2)
|
||||||
expect(mockApi.isDone()).to.equal(true)
|
expect(mockApi.isDone()).to.equal(true)
|
||||||
})
|
})
|
||||||
@ -316,7 +405,7 @@ describe('peer-routing', () => {
|
|||||||
multiaddrs: []
|
multiaddrs: []
|
||||||
}
|
}
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {})
|
sinon.stub(node._dht, 'findPeer').callsFake(async function * () {})
|
||||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||||
return results
|
return results
|
||||||
})
|
})
|
||||||
@ -334,7 +423,8 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
const defer = pDefer()
|
const defer = pDefer()
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findPeer').callsFake(async () => {
|
sinon.stub(node._dht, 'findPeer').callsFake(async function * () {
|
||||||
|
yield
|
||||||
await defer.promise
|
await defer.promise
|
||||||
})
|
})
|
||||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||||
@ -349,29 +439,34 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
it('should not wait for the delegate to return if the dht does first', async () => {
|
it('should not wait for the delegate to return if the dht does first', async () => {
|
||||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||||
const results = {
|
const result = {
|
||||||
id: remotePeerId,
|
id: remotePeerId,
|
||||||
multiaddrs: []
|
multiaddrs: []
|
||||||
}
|
}
|
||||||
|
|
||||||
const defer = pDefer()
|
const defer = pDefer()
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {
|
sinon.stub(node._dht, 'findPeer').callsFake(async function * () {
|
||||||
return results
|
yield {
|
||||||
|
name: 'PEER_RESPONSE',
|
||||||
|
closer: [
|
||||||
|
result
|
||||||
|
]
|
||||||
|
}
|
||||||
})
|
})
|
||||||
sinon.stub(delegate, 'findPeer').callsFake(async () => {
|
sinon.stub(delegate, 'findPeer').callsFake(async () => {
|
||||||
await defer.promise
|
await defer.promise
|
||||||
})
|
})
|
||||||
|
|
||||||
const peer = await node.peerRouting.findPeer(remotePeerId)
|
const peer = await node.peerRouting.findPeer(remotePeerId)
|
||||||
expect(peer).to.eql(results)
|
expect(peer).to.eql(result)
|
||||||
|
|
||||||
defer.resolve()
|
defer.resolve()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should store the addresses of the found peer', async () => {
|
it('should store the addresses of the found peer', async () => {
|
||||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||||
const results = {
|
const result = {
|
||||||
id: remotePeerId,
|
id: remotePeerId,
|
||||||
multiaddrs: [
|
multiaddrs: [
|
||||||
new Multiaddr('/ip4/123.123.123.123/tcp/38982')
|
new Multiaddr('/ip4/123.123.123.123/tcp/38982')
|
||||||
@ -380,14 +475,19 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
const spy = sinon.spy(node.peerStore.addressBook, 'add')
|
const spy = sinon.spy(node.peerStore.addressBook, 'add')
|
||||||
|
|
||||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {
|
sinon.stub(node._dht, 'findPeer').callsFake(async function * () {
|
||||||
return results
|
yield {
|
||||||
|
name: 'PEER_RESPONSE',
|
||||||
|
closer: [
|
||||||
|
result
|
||||||
|
]
|
||||||
|
}
|
||||||
})
|
})
|
||||||
sinon.stub(delegate, 'findPeer').callsFake(() => {})
|
sinon.stub(delegate, 'findPeer').callsFake(() => {})
|
||||||
|
|
||||||
await node.peerRouting.findPeer(remotePeerId)
|
await node.peerRouting.findPeer(remotePeerId)
|
||||||
|
|
||||||
expect(spy.calledWith(results.id, results.multiaddrs)).to.be.true()
|
expect(spy.calledWith(result.id, result.multiaddrs)).to.be.true()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should use the delegate if the dht fails to get the closest peer', async () => {
|
it('should use the delegate if the dht fails to get the closest peer', async () => {
|
||||||
@ -487,8 +587,18 @@ describe('peer-routing', () => {
|
|||||||
|
|
||||||
sinon.spy(node.peerStore.addressBook, 'add')
|
sinon.spy(node.peerStore.addressBook, 'add')
|
||||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () {
|
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () {
|
||||||
yield results[0]
|
yield {
|
||||||
yield results[1]
|
name: 'PEER_RESPONSE',
|
||||||
|
closer: [
|
||||||
|
results[0]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
yield {
|
||||||
|
name: 'PEER_RESPONSE',
|
||||||
|
closer: [
|
||||||
|
results[1]
|
||||||
|
]
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
await node.start()
|
await node.start()
|
||||||
@ -522,7 +632,7 @@ describe('peer-routing', () => {
|
|||||||
started: false
|
started: false
|
||||||
})
|
})
|
||||||
|
|
||||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () {
|
sinon.stub(node._dht, 'getClosestPeers').callsFake(async function * () {
|
||||||
yield
|
yield
|
||||||
throw new Error('should not be called')
|
throw new Error('should not be called')
|
||||||
})
|
})
|
||||||
|
@ -13,9 +13,6 @@ const routingOptions = mergeOptions(baseOptions, {
|
|||||||
config: {
|
config: {
|
||||||
dht: {
|
dht: {
|
||||||
kBucketSize: 20,
|
kBucketSize: 20,
|
||||||
randomWalk: {
|
|
||||||
enabled: true
|
|
||||||
},
|
|
||||||
enabled: true
|
enabled: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,7 @@ const { Multiaddr } = require('multiaddr')
|
|||||||
const mockUpgrader = require('../utils/mockUpgrader')
|
const mockUpgrader = require('../utils/mockUpgrader')
|
||||||
const sinon = require('sinon')
|
const sinon = require('sinon')
|
||||||
const Peers = require('../fixtures/peers')
|
const Peers = require('../fixtures/peers')
|
||||||
|
const pWaitFor = require('p-wait-for')
|
||||||
const addrs = [
|
const addrs = [
|
||||||
new Multiaddr('/ip4/127.0.0.1/tcp/0'),
|
new Multiaddr('/ip4/127.0.0.1/tcp/0'),
|
||||||
new Multiaddr('/ip4/127.0.0.1/tcp/0')
|
new Multiaddr('/ip4/127.0.0.1/tcp/0')
|
||||||
@ -72,9 +73,13 @@ describe('Transport Manager (TCP)', () => {
|
|||||||
tm.add(Transport.prototype[Symbol.toStringTag], Transport)
|
tm.add(Transport.prototype[Symbol.toStringTag], Transport)
|
||||||
await tm.listen(addrs)
|
await tm.listen(addrs)
|
||||||
|
|
||||||
// Should created Self Peer record on new listen address
|
// Should created Self Peer record on new listen address, but it is done async
|
||||||
signedPeerRecord = await tm.libp2p.peerStore.addressBook.getPeerRecord(localPeer)
|
// with no event so we have to wait a bit
|
||||||
expect(signedPeerRecord).to.exist()
|
await pWaitFor(async () => {
|
||||||
|
signedPeerRecord = await tm.libp2p.peerStore.addressBook.getPeerRecord(localPeer)
|
||||||
|
|
||||||
|
return signedPeerRecord != null
|
||||||
|
}, { interval: 100, timeout: 2000 })
|
||||||
|
|
||||||
const record = PeerRecord.createFromProtobuf(signedPeerRecord.payload)
|
const record = PeerRecord.createFromProtobuf(signedPeerRecord.payload)
|
||||||
expect(record).to.exist()
|
expect(record).to.exist()
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"libp2p": "file:../..",
|
"libp2p": "file:../..",
|
||||||
"libp2p-bootstrap": "^0.13.0",
|
"libp2p-bootstrap": "^0.13.0",
|
||||||
"libp2p-delegated-content-routing": "^0.11.0",
|
"libp2p-delegated-content-routing": "^0.11.0",
|
||||||
"libp2p-delegated-peer-routing": "^0.10.0",
|
"libp2p-delegated-peer-routing": "^0.11.1",
|
||||||
"libp2p-gossipsub": "^0.9.0",
|
"libp2p-gossipsub": "^0.9.0",
|
||||||
"libp2p-interfaces": "^1.0.1",
|
"libp2p-interfaces": "^1.0.1",
|
||||||
"libp2p-kad-dht": "^0.26.5",
|
"libp2p-kad-dht": "^0.26.5",
|
||||||
|
@ -123,11 +123,6 @@ async function main() {
|
|||||||
dht: {
|
dht: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
kBucketSize: 20,
|
kBucketSize: 20,
|
||||||
randomWalk: {
|
|
||||||
enabled: true, // Allows to disable discovery (enabled by default)
|
|
||||||
interval: 300e3,
|
|
||||||
timeout: 10e3
|
|
||||||
},
|
|
||||||
clientMode: true,
|
clientMode: true,
|
||||||
validators: {
|
validators: {
|
||||||
pk: Libp2pRecord.validator.validators.pk
|
pk: Libp2pRecord.validator.validators.pk
|
||||||
|
Reference in New Issue
Block a user