mirror of
https://github.com/fluencelabs/js-libp2p
synced 2025-07-16 00:51:57 +00:00
Compare commits
15 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
28f52bbf75 | ||
|
ed5f8f853f | ||
|
0a6bc0d101 | ||
|
b5c9e48b68 | ||
|
9942cbd50c | ||
|
037c965a67 | ||
|
748b552876 | ||
|
961b48bb8d | ||
|
000826db21 | ||
|
45c33675a7 | ||
|
a28c878f4a | ||
|
67067c97d5 | ||
|
f45cd1c4b5 | ||
|
0a02207116 | ||
|
0b854a949f |
@@ -31,6 +31,9 @@ const before = async () => {
|
||||
enabled: true,
|
||||
active: false
|
||||
}
|
||||
},
|
||||
nat: {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -45,7 +48,7 @@ const after = async () => {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
bundlesize: { maxSize: '225kB' },
|
||||
bundlesize: { maxSize: '215kB' },
|
||||
hooks: {
|
||||
pre: before,
|
||||
post: after
|
||||
|
37
.github/workflows/main.yml
vendored
37
.github/workflows/main.yml
vendored
@@ -71,4 +71,39 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- chat
|
||||
- run: cd examples && yarn && npm run test -- chat
|
||||
test-connection-encryption-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- connection-encryption
|
||||
test-echo-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- echo
|
||||
test-libp2p-in-the-browser-example:
|
||||
needs: check
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- libp2p-in-the-browser
|
||||
test-discovery-mechanisms-example:
|
||||
needs: check
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- discovery-mechanisms
|
||||
test-pnet-example:
|
||||
needs: check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: yarn
|
||||
- run: cd examples && yarn && npm run test -- pnet
|
||||
|
22
CHANGELOG.md
22
CHANGELOG.md
@@ -1,3 +1,25 @@
|
||||
## [0.30.4](https://github.com/libp2p/js-libp2p/compare/v0.30.3...v0.30.4) (2021-01-27)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add UPnP NAT manager ([#810](https://github.com/libp2p/js-libp2p/issues/810)) ([0a6bc0d](https://github.com/libp2p/js-libp2p/commit/0a6bc0d1013dfd80ab600e8f74c1544b433ece29))
|
||||
|
||||
|
||||
|
||||
## [0.30.3](https://github.com/libp2p/js-libp2p/compare/v0.30.2...v0.30.3) (2021-01-27)
|
||||
|
||||
|
||||
|
||||
## [0.30.2](https://github.com/libp2p/js-libp2p/compare/v0.30.1...v0.30.2) (2021-01-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* store multiaddrs during content and peer routing queries ([#865](https://github.com/libp2p/js-libp2p/issues/865)) ([45c3367](https://github.com/libp2p/js-libp2p/commit/45c33675a7412c66d0fd4e113ef8506077b6f492))
|
||||
|
||||
|
||||
|
||||
## [0.30.1](https://github.com/libp2p/js-libp2p/compare/v0.30.0...v0.30.1) (2021-01-18)
|
||||
|
||||
|
||||
|
@@ -2055,6 +2055,15 @@ This event will be triggered anytime we are disconnected from another peer, rega
|
||||
- `peerId`: instance of [`PeerId`][peer-id]
|
||||
- `protocols`: array of known, supported protocols for the peer (string identifiers)
|
||||
|
||||
### libp2p.addressManager
|
||||
|
||||
#### Our addresses have changed
|
||||
|
||||
This could be in response to a peer telling us about addresses they have observed, or
|
||||
the NatManager performing NAT hole punching.
|
||||
|
||||
`libp2p.addressManager.on('change:addresses', () => {})`
|
||||
|
||||
## Types
|
||||
|
||||
### Stats
|
||||
|
@@ -28,6 +28,9 @@
|
||||
- [Configuring Metrics](#configuring-metrics)
|
||||
- [Configuring PeerStore](#configuring-peerstore)
|
||||
- [Customizing Transports](#customizing-transports)
|
||||
- [Configuring the NAT Manager](#configuring-the-nat-manager)
|
||||
- [Browser support](#browser-support)
|
||||
- [UPnP and NAT-PMP](#upnp-and-nat-pmp)
|
||||
- [Configuration examples](#configuration-examples)
|
||||
|
||||
## Overview
|
||||
@@ -733,6 +736,40 @@ const node = await Libp2p.create({
|
||||
})
|
||||
```
|
||||
|
||||
#### Configuring the NAT Manager
|
||||
|
||||
Network Address Translation (NAT) is a function performed by your router to enable multiple devices on your local network to share a single IPv4 address. It's done transparently for outgoing connections, ensuring the correct response traffic is routed to your computer, but if you wish to accept incoming connections some configuration is necessary.
|
||||
|
||||
The NAT manager can be configured as follows:
|
||||
|
||||
```js
|
||||
const node = await Libp2p.create({
|
||||
config: {
|
||||
nat: {
|
||||
description: 'my-node', // set as the port mapping description on the router, defaults the current libp2p version and your peer id
|
||||
enabled: true, // defaults to true
|
||||
gateway: '192.168.1.1', // leave unset to auto-discover
|
||||
externalIp: '80.1.1.1', // leave unset to auto-discover
|
||||
ttl: 7200, // TTL for port mappings (min 20 minutes)
|
||||
keepAlive: true, // Refresh port mapping after TTL expires
|
||||
pmp: {
|
||||
enabled: false, // defaults to false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
##### Browser support
|
||||
|
||||
Browsers cannot open TCP ports or send the UDP datagrams necessary to configure external port mapping - to accept incoming connections in the browser please use a WebRTC transport.
|
||||
|
||||
##### UPnP and NAT-PMP
|
||||
|
||||
By default under nodejs libp2p will attempt to use [UPnP](https://en.wikipedia.org/wiki/Universal_Plug_and_Play) to configure your router to allow incoming connections to any TCP transports that have been configured.
|
||||
|
||||
[NAT-PMP](http://miniupnp.free.fr/nat-pmp.html) is a feature of some modern routers which performs a similar job to UPnP. NAT-PMP is disabled by default, if enabled libp2p will try to use NAT-PMP and will fall back to UPnP if it fails.
|
||||
|
||||
## Configuration examples
|
||||
|
||||
As libp2p is designed to be a modular networking library, its usage will vary based on individual project needs. We've included links to some existing project configurations for your reference, in case you wish to replicate their configuration:
|
||||
|
@@ -1,6 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const Libp2p = require('../../')
|
||||
const Libp2p = require('../..')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
@@ -1,4 +1,4 @@
|
||||
# Encrypted Communications
|
||||
# Connection Encryption
|
||||
|
||||
libp2p can leverage the encrypted communications from the transports it uses (i.e WebRTC). To ensure that every connection is encrypted, independently of how it was set up, libp2p also supports a set of modules that encrypt every communication established.
|
||||
|
30
examples/connection-encryption/test.js
Normal file
30
examples/connection-encryption/test.js
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
async function test () {
|
||||
const messageReceived = pDefer()
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const s = uint8ArrayToString(data)
|
||||
if (s.includes('This information is sent out encrypted to the other peer')) {
|
||||
messageReceived.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageReceived.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
@@ -7,15 +7,7 @@ const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
||||
const Bootstrap = require('libp2p-bootstrap')
|
||||
|
||||
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/src/core/runtime/config-nodejs.json
|
||||
const bootstrapers = [
|
||||
'/ip4/104.131.131.82/tcp/4001/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmZa1sAxajnQjVM8WjWXoMbmPd7NsWhfKsPkErzpm9wGkp',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt'
|
||||
]
|
||||
const bootstrapers = require('./bootstrapers')
|
||||
|
||||
;(async () => {
|
||||
const node = await Libp2p.create({
|
||||
|
13
examples/discovery-mechanisms/bootstrapers.js
vendored
Normal file
13
examples/discovery-mechanisms/bootstrapers.js
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict'
|
||||
|
||||
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/src/core/runtime/config-nodejs.json
|
||||
const bootstrapers = [
|
||||
'/ip4/104.131.131.82/tcp/4001/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmZa1sAxajnQjVM8WjWXoMbmPd7NsWhfKsPkErzpm9wGkp',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa',
|
||||
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt'
|
||||
]
|
||||
|
||||
module.exports = bootstrapers
|
42
examples/discovery-mechanisms/test-1.js
Normal file
42
examples/discovery-mechanisms/test-1.js
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
const bootstrapers = require('./bootstrapers')
|
||||
|
||||
const discoveredCopy = 'Discovered:'
|
||||
const connectedCopy = 'Connection established to:'
|
||||
|
||||
async function test () {
|
||||
const discoveredNodes = []
|
||||
const connectedNodes = []
|
||||
|
||||
process.stdout.write('1.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '1.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
// Discovered or Connected
|
||||
if (line.includes(discoveredCopy)) {
|
||||
const id = line.trim().split(discoveredCopy)[1]
|
||||
discoveredNodes.push(id)
|
||||
} else if (line.includes(connectedCopy)) {
|
||||
const id = line.trim().split(connectedCopy)[1]
|
||||
connectedNodes.push(id)
|
||||
}
|
||||
})
|
||||
|
||||
await pWaitFor(() => discoveredNodes.length === bootstrapers.length && connectedNodes.length === bootstrapers.length)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
35
examples/discovery-mechanisms/test-2.js
Normal file
35
examples/discovery-mechanisms/test-2.js
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const discoveredCopy = 'Discovered:'
|
||||
|
||||
async function test() {
|
||||
const discoveredNodes = []
|
||||
|
||||
process.stdout.write('2.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, '2.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const line = uint8ArrayToString(data)
|
||||
|
||||
if (line.includes(discoveredCopy)) {
|
||||
const id = line.trim().split(discoveredCopy)[1]
|
||||
discoveredNodes.push(id)
|
||||
}
|
||||
})
|
||||
|
||||
await pWaitFor(() => discoveredNodes.length === 2)
|
||||
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
11
examples/discovery-mechanisms/test.js
Normal file
11
examples/discovery-mechanisms/test.js
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const test1 = require('./test-1')
|
||||
const test2 = require('./test-2')
|
||||
|
||||
async function test () {
|
||||
await test1()
|
||||
await test2()
|
||||
}
|
||||
|
||||
module.exports = test
|
@@ -6,7 +6,7 @@
|
||||
*/
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
const createLibp2p = require('./libp2p-bundle')
|
||||
const createLibp2p = require('./libp2p')
|
||||
const pipe = require('it-pipe')
|
||||
|
||||
async function run() {
|
||||
|
@@ -6,7 +6,7 @@
|
||||
*/
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
const createLibp2p = require('./libp2p-bundle')
|
||||
const createLibp2p = require('./libp2p')
|
||||
const pipe = require('it-pipe')
|
||||
|
||||
async function run() {
|
||||
|
61
examples/echo/test.js
Normal file
61
examples/echo/test.js
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
function startProcess(name) {
|
||||
return execa('node', [path.join(__dirname, name)], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
})
|
||||
}
|
||||
|
||||
async function test () {
|
||||
const listenerReady = pDefer()
|
||||
const messageReceived = pDefer()
|
||||
|
||||
// Step 1 process
|
||||
process.stdout.write('node listener.js\n')
|
||||
const listenerProc = startProcess('src/listener.js')
|
||||
listenerProc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const s = uint8ArrayToString(data)
|
||||
|
||||
if (s.includes('Listener ready, listening on:')) {
|
||||
listenerReady.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await listenerReady.promise
|
||||
process.stdout.write('==================================================================\n')
|
||||
|
||||
// Step 2 process
|
||||
process.stdout.write('node dialer.js\n')
|
||||
const dialerProc = startProcess('src/dialer.js')
|
||||
dialerProc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
const s = uint8ArrayToString(data)
|
||||
|
||||
if (s.includes('received echo:')) {
|
||||
messageReceived.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageReceived.promise
|
||||
process.stdout.write('echo message received\n')
|
||||
|
||||
listenerProc.kill()
|
||||
dialerProc.kill()
|
||||
await Promise.all([
|
||||
listenerProc,
|
||||
dialerProc
|
||||
]).catch((err) => {
|
||||
if (err.signal !== 'SIGTERM') {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = test
|
@@ -8,6 +8,7 @@
|
||||
],
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "parcel build index.html",
|
||||
"start": "parcel index.html"
|
||||
},
|
||||
"keywords": [],
|
||||
|
52
examples/libp2p-in-the-browser/test.js
Normal file
52
examples/libp2p-in-the-browser/test.js
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict'
|
||||
|
||||
const execa = require('execa')
|
||||
const { chromium } = require('playwright');
|
||||
|
||||
async function run() {
|
||||
let url = ''
|
||||
const proc = execa('parcel', ['./index.html'], {
|
||||
preferLocal: true,
|
||||
localDir: __dirname,
|
||||
cwd: __dirname,
|
||||
all: true
|
||||
})
|
||||
|
||||
proc.all.on('data', async (chunk) => {
|
||||
/**@type {string} */
|
||||
const out = chunk.toString()
|
||||
|
||||
if (out.includes('Server running at')) {
|
||||
url = out.replace('Server running at ', '')
|
||||
}
|
||||
|
||||
if (out.includes('✨ Built in ')) {
|
||||
try {
|
||||
const browser = await chromium.launch();
|
||||
const page = await browser.newPage();
|
||||
await page.goto(url);
|
||||
await page.waitForFunction(selector => document.querySelector(selector).innerText === 'libp2p started!', '#status')
|
||||
await page.waitForFunction(
|
||||
selector => {
|
||||
const text = document.querySelector(selector).innerText
|
||||
return text.includes('libp2p id is') &&
|
||||
text.includes('Found peer') &&
|
||||
text.includes('Connected to')
|
||||
},
|
||||
'#output',
|
||||
{ timeout: 5000 }
|
||||
)
|
||||
await browser.close();
|
||||
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
} finally {
|
||||
proc.cancel()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
module.exports = run
|
@@ -12,5 +12,8 @@
|
||||
"fs-extra": "^8.1.0",
|
||||
"p-defer": "^3.0.0",
|
||||
"which": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"playwright": "^1.7.1"
|
||||
}
|
||||
}
|
||||
|
30
examples/pnet/test.js
Normal file
30
examples/pnet/test.js
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const pDefer = require('p-defer')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
async function test () {
|
||||
const messageReceived = pDefer()
|
||||
process.stdout.write('index.js\n')
|
||||
|
||||
const proc = execa('node', [path.join(__dirname, 'index.js')], {
|
||||
cwd: path.resolve(__dirname),
|
||||
all: true
|
||||
})
|
||||
|
||||
proc.all.on('data', async (data) => {
|
||||
process.stdout.write(data)
|
||||
|
||||
const s = uint8ArrayToString(data)
|
||||
if (s.includes('This message is sent on a private network')) {
|
||||
messageReceived.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
await messageReceived.promise
|
||||
proc.kill()
|
||||
}
|
||||
|
||||
module.exports = test
|
@@ -22,7 +22,6 @@ async function testExample (dir) {
|
||||
await installDeps(dir)
|
||||
await build(dir)
|
||||
await runTest(dir)
|
||||
// TODO: add browser test setup
|
||||
}
|
||||
|
||||
async function installDeps (dir) {
|
||||
@@ -89,7 +88,7 @@ async function runTest (dir) {
|
||||
return
|
||||
}
|
||||
|
||||
const runTest = require(testFile)
|
||||
const test = require(testFile)
|
||||
|
||||
await runTest()
|
||||
}
|
||||
await test()
|
||||
}
|
||||
|
78
package.json
78
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "libp2p",
|
||||
"version": "0.30.1",
|
||||
"version": "0.30.4",
|
||||
"description": "JavaScript implementation of libp2p, a modular peer to peer network stack",
|
||||
"leadMaintainer": "Jacob Heun <jacobheun@gmail.com>",
|
||||
"main": "src/index.js",
|
||||
@@ -50,48 +50,61 @@
|
||||
"node": ">=12.0.0",
|
||||
"npm": ">=6.0.0"
|
||||
},
|
||||
"browser": {
|
||||
"@motrix/nat-api": false
|
||||
},
|
||||
"dependencies": {
|
||||
"@motrix/nat-api": "^0.3.1",
|
||||
"abort-controller": "^3.0.0",
|
||||
"aggregate-error": "^3.0.1",
|
||||
"any-signal": "^1.1.0",
|
||||
"bignumber.js": "^9.0.0",
|
||||
"cids": "^1.0.0",
|
||||
"aggregate-error": "^3.1.0",
|
||||
"any-signal": "^2.1.1",
|
||||
"bignumber.js": "^9.0.1",
|
||||
"cids": "^1.1.5",
|
||||
"class-is": "^1.1.0",
|
||||
"debug": "^4.1.1",
|
||||
"debug": "^4.3.1",
|
||||
"err-code": "^2.0.0",
|
||||
"events": "^3.1.0",
|
||||
"events": "^3.2.0",
|
||||
"hashlru": "^2.3.0",
|
||||
"interface-datastore": "^2.0.0",
|
||||
"ipfs-utils": "^5.0.1",
|
||||
"it-all": "^1.0.1",
|
||||
"interface-datastore": "^3.0.3",
|
||||
"ipfs-utils": "^6.0.0",
|
||||
"it-all": "^1.0.4",
|
||||
"it-buffer": "^0.1.2",
|
||||
"it-handshake": "^1.0.1",
|
||||
"it-length-prefixed": "^3.0.1",
|
||||
"it-drain": "^1.0.3",
|
||||
"it-filter": "^1.0.1",
|
||||
"it-first": "^1.0.4",
|
||||
"it-handshake": "^1.0.2",
|
||||
"it-length-prefixed": "^3.1.0",
|
||||
"it-map": "^1.0.4",
|
||||
"it-merge": "1.0.0",
|
||||
"it-pipe": "^1.1.0",
|
||||
"it-protocol-buffers": "^0.2.0",
|
||||
"libp2p-crypto": "^0.18.0",
|
||||
"it-take": "1.0.0",
|
||||
"libp2p-crypto": "^0.19.0",
|
||||
"libp2p-interfaces": "^0.8.1",
|
||||
"libp2p-utils": "^0.2.2",
|
||||
"mafmt": "^8.0.0",
|
||||
"merge-options": "^2.0.0",
|
||||
"merge-options": "^3.0.4",
|
||||
"moving-average": "^1.0.0",
|
||||
"multiaddr": "^8.1.0",
|
||||
"multicodec": "^2.0.0",
|
||||
"multicodec": "^2.1.0",
|
||||
"multihashing-async": "^2.0.1",
|
||||
"multistream-select": "^1.0.0",
|
||||
"mutable-proxy": "^1.0.0",
|
||||
"node-forge": "^0.9.1",
|
||||
"node-forge": "^0.10.0",
|
||||
"p-any": "^3.0.0",
|
||||
"p-fifo": "^1.0.0",
|
||||
"p-retry": "^4.2.0",
|
||||
"p-settle": "^4.0.1",
|
||||
"peer-id": "^0.14.2",
|
||||
"private-ip": "^2.0.0",
|
||||
"promisify-es6": "^1.0.3",
|
||||
"protons": "^2.0.0",
|
||||
"retimer": "^2.0.0",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"set-delayed-interval": "^1.0.0",
|
||||
"streaming-iterables": "^5.0.2",
|
||||
"timeout-abort-controller": "^1.1.1",
|
||||
"varint": "^5.0.0",
|
||||
"varint": "^6.0.0",
|
||||
"xsalsa20": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -100,20 +113,20 @@
|
||||
"aegir": "^29.2.0",
|
||||
"chai-bytes": "^0.1.2",
|
||||
"chai-string": "^1.5.0",
|
||||
"delay": "^4.3.0",
|
||||
"delay": "^4.4.0",
|
||||
"interop-libp2p": "^0.3.0",
|
||||
"into-stream": "^6.0.0",
|
||||
"ipfs-http-client": "^47.0.1",
|
||||
"ipfs-http-client": "^48.2.2",
|
||||
"it-concat": "^1.0.0",
|
||||
"it-pair": "^1.0.0",
|
||||
"it-pushable": "^1.4.0",
|
||||
"libp2p": ".",
|
||||
"libp2p-bootstrap": "^0.12.0",
|
||||
"libp2p-delegated-content-routing": "^0.8.0",
|
||||
"libp2p-delegated-content-routing": "^0.9.0",
|
||||
"libp2p-delegated-peer-routing": "^0.8.0",
|
||||
"libp2p-floodsub": "^0.24.0",
|
||||
"libp2p-gossipsub": "^0.7.0",
|
||||
"libp2p-kad-dht": "^0.20.0",
|
||||
"libp2p-gossipsub": "^0.8.0",
|
||||
"libp2p-kad-dht": "^0.20.5",
|
||||
"libp2p-mdns": "^0.15.0",
|
||||
"libp2p-mplex": "^0.10.1",
|
||||
"libp2p-noise": "^2.0.0",
|
||||
@@ -125,11 +138,10 @@
|
||||
"nock": "^13.0.3",
|
||||
"p-defer": "^3.0.0",
|
||||
"p-times": "^3.0.0",
|
||||
"p-wait-for": "^3.1.0",
|
||||
"promisify-es6": "^1.0.3",
|
||||
"p-wait-for": "^3.2.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"sinon": "^9.0.2",
|
||||
"uint8arrays": "^1.1.0"
|
||||
"sinon": "^9.2.4",
|
||||
"uint8arrays": "^2.0.5"
|
||||
},
|
||||
"contributors": [
|
||||
"David Dias <daviddias.p@gmail.com>",
|
||||
@@ -150,11 +162,11 @@
|
||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
||||
"Ryan Bell <ryan@piing.net>",
|
||||
"Samlior <samlior@foxmail.com>",
|
||||
"Thomas Eizinger <thomas@eizinger.io>",
|
||||
"ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ <victorbjelkholm@gmail.com>",
|
||||
"Didrik Nordström <didrik@betamos.se>",
|
||||
"Fei Liu <liu.feiwood@gmail.com>",
|
||||
"Ethan Lam <elmemphis2000@gmail.com>",
|
||||
"Irakli Gozalishvili <rfobic@gmail.com>",
|
||||
"Joel Gustafson <joelg@mit.edu>",
|
||||
"Julien Bouquillon <contact@revolunet.com>",
|
||||
"Kevin Kwok <antimatter15@gmail.com>",
|
||||
@@ -162,7 +174,7 @@
|
||||
"Dmitriy Ryajov <dryajov@gmail.com>",
|
||||
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||
"Diogo Silva <fsdiogo@gmail.com>",
|
||||
"Samlior <samlior@foxmail.com>",
|
||||
"isan_rivkin <isanrivkin@gmail.com>",
|
||||
"Smite Chow <xiaopengyou@live.com>",
|
||||
"Soeren <nikorpoulsen@gmail.com>",
|
||||
"Sönke Hahn <soenkehahn@gmail.com>",
|
||||
@@ -173,13 +185,13 @@
|
||||
"robertkiel <robert.kiel@validitylabs.org>",
|
||||
"Cindy Wu <ciindy.wu@gmail.com>",
|
||||
"Chris Bratlien <chrisbratlien@gmail.com>",
|
||||
"ebinks <elizabethjbinks@gmail.com>",
|
||||
"Florian-Merle <florian.david.merle@gmail.com>",
|
||||
"Francis Gulotta <wizard@roborooter.com>",
|
||||
"Felipe Martins <felipebrasil93@gmail.com>",
|
||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||
"ebinks <elizabethjbinks@gmail.com>",
|
||||
"Henrique Dias <hacdias@gmail.com>",
|
||||
"isan_rivkin <isanrivkin@gmail.com>",
|
||||
"Irakli Gozalishvili <rfobic@gmail.com>"
|
||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||
"Fei Liu <liu.feiwood@gmail.com>",
|
||||
"Ethan Lam <elmemphis2000@gmail.com>"
|
||||
]
|
||||
}
|
||||
|
@@ -1,6 +1,10 @@
|
||||
'use strict'
|
||||
|
||||
/** @typedef {import('../types').EventEmitterFactory} Events */
|
||||
/** @type Events */
|
||||
const EventEmitter = require('events')
|
||||
const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
/**
|
||||
* @typedef {import('multiaddr')} Multiaddr
|
||||
@@ -11,7 +15,11 @@ const multiaddr = require('multiaddr')
|
||||
* @property {string[]} [listen = []] - list of multiaddrs string representation to listen.
|
||||
* @property {string[]} [announce = []] - list of multiaddrs string representation to announce.
|
||||
*/
|
||||
class AddressManager {
|
||||
|
||||
/**
|
||||
* @fires AddressManager#change:addresses Emitted when a addresses change.
|
||||
*/
|
||||
class AddressManager extends EventEmitter {
|
||||
/**
|
||||
* Responsible for managing the peer addresses.
|
||||
* Peers can specify their listen and announce addresses.
|
||||
@@ -19,11 +27,18 @@ class AddressManager {
|
||||
* while the announce addresses will be used for the peer addresses' to other peers in the network.
|
||||
*
|
||||
* @class
|
||||
* @param {AddressManagerOptions} [options]
|
||||
* @param {PeerId} peerId - The Peer ID of the node
|
||||
* @param {object} [options]
|
||||
* @param {Array<string>} [options.listen = []] - list of multiaddrs string representation to listen.
|
||||
* @param {Array<string>} [options.announce = []] - list of multiaddrs string representation to announce.
|
||||
*/
|
||||
constructor ({ listen = [], announce = [] } = {}) {
|
||||
this.listen = new Set(listen)
|
||||
this.announce = new Set(announce)
|
||||
constructor (peerId, { listen = [], announce = [] } = {}) {
|
||||
super()
|
||||
|
||||
this.peerId = peerId
|
||||
this.listen = new Set(listen.map(ma => ma.toString()))
|
||||
this.announce = new Set(announce.map(ma => ma.toString()))
|
||||
this.observed = new Set()
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -43,6 +58,45 @@ class AddressManager {
|
||||
getAnnounceAddrs () {
|
||||
return Array.from(this.announce).map((a) => multiaddr(a))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get observed multiaddrs.
|
||||
*
|
||||
* @returns {Array<Multiaddr>}
|
||||
*/
|
||||
getObservedAddrs () {
|
||||
return Array.from(this.observed).map((a) => multiaddr(a))
|
||||
}
|
||||
|
||||
/**
|
||||
* Add peer observed addresses
|
||||
*
|
||||
* @param {string | Multiaddr} addr
|
||||
*/
|
||||
addObservedAddr (addr) {
|
||||
let ma = multiaddr(addr)
|
||||
const remotePeer = ma.getPeerId()
|
||||
|
||||
// strip our peer id if it has been passed
|
||||
if (remotePeer) {
|
||||
const remotePeerId = PeerId.createFromB58String(remotePeer)
|
||||
|
||||
// use same encoding for comparison
|
||||
if (remotePeerId.equals(this.peerId)) {
|
||||
ma = ma.decapsulate(multiaddr(`/p2p/${this.peerId}`))
|
||||
}
|
||||
}
|
||||
|
||||
const addrString = ma.toString()
|
||||
|
||||
// do not trigger the change:addresses event if we already know about this address
|
||||
if (this.observed.has(addrString)) {
|
||||
return
|
||||
}
|
||||
|
||||
this.observed.add(addrString)
|
||||
this.emit('change:addresses')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AddressManager
|
||||
|
@@ -12,7 +12,7 @@ const TextEncoder = require('ipfs-utils/src/text-encoder')
|
||||
* @returns {Promise<CID>}
|
||||
*/
|
||||
module.exports.namespaceToCid = async (namespace) => {
|
||||
const bytes = new TextEncoder('utf8').encode(namespace)
|
||||
const bytes = new TextEncoder().encode(namespace)
|
||||
const hash = await multihashing(bytes, 'sha2-256')
|
||||
|
||||
return new CID(hash)
|
||||
|
@@ -59,6 +59,16 @@ const DefaultConfig = {
|
||||
timeout: 10e3
|
||||
}
|
||||
},
|
||||
nat: {
|
||||
enabled: true,
|
||||
ttl: 7200,
|
||||
keepAlive: true,
|
||||
gateway: null,
|
||||
externalIp: null,
|
||||
pmp: {
|
||||
enabled: false
|
||||
}
|
||||
},
|
||||
peerDiscovery: {
|
||||
autoDial: true
|
||||
},
|
||||
|
@@ -160,7 +160,7 @@ class ConnectionManager extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
await tasks
|
||||
await Promise.all(tasks)
|
||||
this.connections.clear()
|
||||
}
|
||||
|
||||
|
@@ -5,8 +5,6 @@
|
||||
* This code is based on `latency-monitor` (https://github.com/mlucool/latency-monitor) by `mlucool` (https://github.com/mlucool), available under Apache License 2.0 (https://github.com/mlucool/latency-monitor/blob/master/LICENSE)
|
||||
*/
|
||||
|
||||
/* global window */
|
||||
const globalThis = require('ipfs-utils/src/globalthis')
|
||||
/** @typedef {import('../types').EventEmitterFactory} Events */
|
||||
/** @type Events */
|
||||
const EventEmitter = require('events')
|
||||
@@ -74,9 +72,9 @@ class LatencyMonitor extends EventEmitter {
|
||||
that.asyncTestFn = asyncTestFn // If there is no asyncFn, we measure latency
|
||||
|
||||
// If process: use high resolution timer
|
||||
if (globalThis.process && globalThis.process.hrtime) {
|
||||
if (globalThis.process && globalThis.process.hrtime) { // eslint-disable-line no-undef
|
||||
debug('Using process.hrtime for timing')
|
||||
that.now = globalThis.process.hrtime
|
||||
that.now = globalThis.process.hrtime // eslint-disable-line no-undef
|
||||
that.getDeltaMS = (startTime) => {
|
||||
const hrtime = that.now(startTime)
|
||||
return (hrtime[0] * 1000) + (hrtime[1] / 1000000)
|
||||
|
@@ -1,10 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const errCode = require('err-code')
|
||||
const { messages, codes } = require('./errors')
|
||||
const { messages, codes } = require('../errors')
|
||||
const {
|
||||
storeAddresses,
|
||||
uniquePeers,
|
||||
requirePeers,
|
||||
maybeLimitSource
|
||||
} = require('./utils')
|
||||
|
||||
const all = require('it-all')
|
||||
const pAny = require('p-any')
|
||||
const merge = require('it-merge')
|
||||
const { pipe } = require('it-pipe')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
@@ -21,22 +27,21 @@ const pAny = require('p-any')
|
||||
class ContentRouting {
|
||||
/**
|
||||
* @class
|
||||
* @param {import('./')} libp2p
|
||||
* @param {import('..')} libp2p
|
||||
*/
|
||||
constructor (libp2p) {
|
||||
this.libp2p = libp2p
|
||||
this.routers = libp2p._modules.contentRouting || []
|
||||
this.dht = libp2p._dht
|
||||
|
||||
// If we have the dht, make it first
|
||||
// If we have the dht, add it to the available content routers
|
||||
if (this.dht) {
|
||||
this.routers.unshift(this.dht)
|
||||
this.routers.push(this.dht)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over all content routers in series to find providers of the given key.
|
||||
* Once a content router succeeds, iteration will stop.
|
||||
* Iterates over all content routers in parallel to find providers of the given key.
|
||||
*
|
||||
* @param {CID} key - The CID key of the content to find
|
||||
* @param {object} [options]
|
||||
@@ -44,25 +49,20 @@ class ContentRouting {
|
||||
* @param {number} [options.maxNumProviders] - maximum number of providers to find
|
||||
* @returns {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>}
|
||||
*/
|
||||
async * findProviders (key, options) {
|
||||
async * findProviders (key, options = {}) {
|
||||
if (!this.routers.length) {
|
||||
throw errCode(new Error('No content this.routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
}
|
||||
|
||||
const result = await pAny(
|
||||
this.routers.map(async (router) => {
|
||||
const provs = await all(router.findProviders(key, options))
|
||||
|
||||
if (!provs || !provs.length) {
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
}
|
||||
return provs
|
||||
})
|
||||
yield * pipe(
|
||||
merge(
|
||||
...this.routers.map(router => router.findProviders(key, options))
|
||||
),
|
||||
(source) => storeAddresses(source, this.libp2p.peerStore),
|
||||
(source) => uniquePeers(source),
|
||||
(source) => maybeLimitSource(source, options.maxNumProviders),
|
||||
(source) => requirePeers(source)
|
||||
)
|
||||
|
||||
for (const peer of result) {
|
||||
yield peer
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
89
src/content-routing/utils.js
Normal file
89
src/content-routing/utils.js
Normal file
@@ -0,0 +1,89 @@
|
||||
'use strict'
|
||||
|
||||
const errCode = require('err-code')
|
||||
const filter = require('it-filter')
|
||||
const map = require('it-map')
|
||||
const take = require('it-take')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('multiaddr')} Multiaddr
|
||||
*/
|
||||
|
||||
/**
|
||||
* Store the multiaddrs from every peer in the passed peer store
|
||||
*
|
||||
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
|
||||
* @param {import('../peer-store')} peerStore
|
||||
*/
|
||||
function storeAddresses (source, peerStore) {
|
||||
return map(source, (peer) => {
|
||||
// ensure we have the addresses for a given peer
|
||||
peerStore.addressBook.add(peer.id, peer.multiaddrs)
|
||||
|
||||
return peer
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter peers by unique peer id
|
||||
*
|
||||
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
|
||||
*/
|
||||
function uniquePeers (source) {
|
||||
/** @type Set<string> */
|
||||
const seen = new Set()
|
||||
|
||||
return filter(source, (peer) => {
|
||||
// dedupe by peer id
|
||||
if (seen.has(peer.id.toString())) {
|
||||
return false
|
||||
}
|
||||
|
||||
seen.add(peer.id.toString())
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Require at least `min` peers to be yielded from `source`
|
||||
*
|
||||
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
|
||||
* @param {number} min
|
||||
*/
|
||||
async function * requirePeers (source, min = 1) {
|
||||
let seen = 0
|
||||
|
||||
for await (const peer of source) {
|
||||
seen++
|
||||
|
||||
yield peer
|
||||
}
|
||||
|
||||
if (seen < min) {
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If `max` is passed, only take that number of peers from the source
|
||||
* otherwise take all the peers
|
||||
*
|
||||
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
|
||||
* @param {number} [max]
|
||||
*/
|
||||
function maybeLimitSource (source, max) {
|
||||
if (max) {
|
||||
return take(source, max)
|
||||
}
|
||||
|
||||
return source
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
storeAddresses,
|
||||
uniquePeers,
|
||||
requirePeers,
|
||||
maybeLimitSource
|
||||
}
|
@@ -2,7 +2,7 @@
|
||||
|
||||
const errCode = require('err-code')
|
||||
const AbortController = require('abort-controller').default
|
||||
const anySignal = require('any-signal')
|
||||
const { anySignal } = require('any-signal')
|
||||
const FIFO = require('p-fifo')
|
||||
const pAny = require('p-any')
|
||||
|
||||
@@ -67,7 +67,7 @@ class DialRequest {
|
||||
let conn
|
||||
try {
|
||||
const signal = dialAbortControllers[i].signal
|
||||
conn = await this.dialAction(addr, { ...options, signal: anySignal([signal, options.signal]) })
|
||||
conn = await this.dialAction(addr, { ...options, signal: options.signal ? anySignal([signal, options.signal]) : signal })
|
||||
// Remove the successful AbortController so it is not aborted
|
||||
dialAbortControllers.splice(i, 1)
|
||||
} finally {
|
||||
|
@@ -7,7 +7,7 @@ const log = Object.assign(debug('libp2p:dialer'), {
|
||||
const errCode = require('err-code')
|
||||
const multiaddr = require('multiaddr')
|
||||
const TimeoutController = require('timeout-abort-controller')
|
||||
const anySignal = require('any-signal')
|
||||
const { anySignal } = require('any-signal')
|
||||
|
||||
const DialRequest = require('./dial-request')
|
||||
const { publicAddressesFirst } = require('libp2p-utils/src/address-sort')
|
||||
|
@@ -43,6 +43,7 @@ class IdentifyService {
|
||||
constructor ({ libp2p }) {
|
||||
this._libp2p = libp2p
|
||||
this.peerStore = libp2p.peerStore
|
||||
this.addressManager = libp2p.addressManager
|
||||
this.connectionManager = libp2p.connectionManager
|
||||
this.peerId = libp2p.peerId
|
||||
|
||||
@@ -201,8 +202,9 @@ class IdentifyService {
|
||||
this.peerStore.protoBook.set(id, protocols)
|
||||
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
|
||||
// TODO: Track our observed address so that we can score it
|
||||
// TODO: Score our observed addr
|
||||
log('received observed address of %s', observedAddr)
|
||||
this.addressManager.addObservedAddr(observedAddr)
|
||||
}
|
||||
|
||||
/**
|
||||
|
51
src/index.js
51
src/index.js
@@ -7,10 +7,10 @@ const log = Object.assign(debug('libp2p'), {
|
||||
/** @typedef {import('./types').EventEmitterFactory} Events */
|
||||
/** @type Events */
|
||||
const EventEmitter = require('events')
|
||||
const globalThis = require('ipfs-utils/src/globalthis')
|
||||
|
||||
const errCode = require('err-code')
|
||||
const PeerId = require('peer-id')
|
||||
const multiaddr = require('multiaddr')
|
||||
|
||||
const PeerRouting = require('./peer-routing')
|
||||
const ContentRouting = require('./content-routing')
|
||||
@@ -34,6 +34,8 @@ const Registrar = require('./registrar')
|
||||
const ping = require('./ping')
|
||||
const IdentifyService = require('./identify')
|
||||
const IDENTIFY_PROTOCOLS = IdentifyService.multicodecs
|
||||
const NatManager = require('./nat-manager')
|
||||
const { updateSelfPeerRecord } = require('./record/utils')
|
||||
|
||||
/**
|
||||
* @typedef {import('multiaddr')} Multiaddr
|
||||
@@ -134,7 +136,14 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
// Addresses {listen, announce, noAnnounce}
|
||||
this.addresses = this._options.addresses
|
||||
this.addressManager = new AddressManager(this._options.addresses)
|
||||
this.addressManager = new AddressManager(this.peerId, this._options.addresses)
|
||||
|
||||
// when addresses change, update our peer record
|
||||
this.addressManager.on('change:addresses', () => {
|
||||
updateSelfPeerRecord(this).catch(err => {
|
||||
log.error('Error updating self peer record', err)
|
||||
})
|
||||
})
|
||||
|
||||
this._modules = this._options.modules
|
||||
this._config = this._options.config
|
||||
@@ -188,6 +197,14 @@ class Libp2p extends EventEmitter {
|
||||
faultTolerance: this._options.transportManager.faultTolerance
|
||||
})
|
||||
|
||||
// Create the Nat Manager
|
||||
this.natManager = new NatManager({
|
||||
peerId: this.peerId,
|
||||
addressManager: this.addressManager,
|
||||
transportManager: this.transportManager,
|
||||
...this._options.config.nat
|
||||
})
|
||||
|
||||
// Create the Registrar
|
||||
this.registrar = new Registrar({
|
||||
peerStore: this.peerStore,
|
||||
@@ -243,7 +260,7 @@ class Libp2p extends EventEmitter {
|
||||
// Attach private network protector
|
||||
if (this._modules.connProtector) {
|
||||
this.upgrader.protector = this._modules.connProtector
|
||||
} else if (globalThis.process !== undefined && globalThis.process.env && globalThis.process.env.LIBP2P_FORCE_PNET) {
|
||||
} else if (globalThis.process !== undefined && globalThis.process.env && globalThis.process.env.LIBP2P_FORCE_PNET) { // eslint-disable-line no-undef
|
||||
throw new Error('Private network is enforced, but no protector was provided')
|
||||
}
|
||||
|
||||
@@ -351,6 +368,7 @@ class Libp2p extends EventEmitter {
|
||||
this.metrics && this.metrics.stop()
|
||||
])
|
||||
|
||||
await this.natManager.stop()
|
||||
await this.transportManager.close()
|
||||
|
||||
ping.unmount(this)
|
||||
@@ -446,22 +464,32 @@ class Libp2p extends EventEmitter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get peer advertising multiaddrs by concating the addresses used
|
||||
* by transports to listen with the announce addresses.
|
||||
* Duplicated addresses and noAnnounce addresses are filtered out.
|
||||
* Get a deduplicated list of peer advertising multiaddrs by concatenating
|
||||
* the listen addresses used by transports with any configured
|
||||
* announce addresses as well as observed addresses reported by peers.
|
||||
*
|
||||
* If Announce addrs are specified, configured listen addresses will be
|
||||
* ignored though observed addresses will still be included.
|
||||
*
|
||||
* @returns {Multiaddr[]}
|
||||
*/
|
||||
get multiaddrs () {
|
||||
const announceAddrs = this.addressManager.getAnnounceAddrs()
|
||||
if (announceAddrs.length) {
|
||||
return announceAddrs
|
||||
let addrs = this.addressManager.getAnnounceAddrs().map(ma => ma.toString())
|
||||
|
||||
if (!addrs.length) {
|
||||
// no configured announce addrs, add configured listen addresses
|
||||
addrs = this.transportManager.getAddrs().map(ma => ma.toString())
|
||||
}
|
||||
|
||||
addrs = addrs.concat(this.addressManager.getObservedAddrs().map(ma => ma.toString()))
|
||||
|
||||
const announceFilter = this._options.addresses.announceFilter || ((multiaddrs) => multiaddrs)
|
||||
|
||||
// dedupe multiaddrs
|
||||
const addrSet = new Set(addrs)
|
||||
|
||||
// Create advertising list
|
||||
return announceFilter(this.transportManager.getAddrs())
|
||||
return announceFilter(Array.from(addrSet).map(str => multiaddr(str)))
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -540,6 +568,9 @@ class Libp2p extends EventEmitter {
|
||||
const addrs = this.addressManager.getListenAddrs()
|
||||
await this.transportManager.listen(addrs)
|
||||
|
||||
// Manage your NATs
|
||||
this.natManager.start()
|
||||
|
||||
// Start PeerStore
|
||||
await this.peerStore.start()
|
||||
|
||||
|
@@ -4,10 +4,9 @@
|
||||
const sanitize = require('sanitize-filename')
|
||||
const mergeOptions = require('merge-options')
|
||||
const crypto = require('libp2p-crypto')
|
||||
const Datastore = require('interface-datastore')
|
||||
const { Key } = require('interface-datastore')
|
||||
const CMS = require('./cms')
|
||||
const errcode = require('err-code')
|
||||
const { Number } = require('ipfs-utils/src/globalthis')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
@@ -15,7 +14,7 @@ require('node-forge/lib/sha512')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('interface-datastore/src/key')} Key
|
||||
* @typedef {import('interface-datastore/src/types').Datastore} Datastore
|
||||
*/
|
||||
|
||||
const keyPrefix = '/pkcs8/'
|
||||
@@ -72,7 +71,7 @@ async function throwDelayed (err) {
|
||||
* @private
|
||||
*/
|
||||
function DsName (name) {
|
||||
return new Datastore.Key(keyPrefix + name)
|
||||
return new Key(keyPrefix + name)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -83,7 +82,7 @@ function DsName (name) {
|
||||
* @private
|
||||
*/
|
||||
function DsInfoName (name) {
|
||||
return new Datastore.Key(infoPrefix + name)
|
||||
return new Key(infoPrefix + name)
|
||||
}
|
||||
|
||||
/**
|
||||
|
168
src/nat-manager.js
Normal file
168
src/nat-manager.js
Normal file
@@ -0,0 +1,168 @@
|
||||
'use strict'
|
||||
|
||||
const NatAPI = require('@motrix/nat-api')
|
||||
const debug = require('debug')
|
||||
const promisify = require('promisify-es6')
|
||||
const Multiaddr = require('multiaddr')
|
||||
const log = Object.assign(debug('libp2p:nat'), {
|
||||
error: debug('libp2p:nat:err')
|
||||
})
|
||||
const { isBrowser } = require('ipfs-utils/src/env')
|
||||
const retry = require('p-retry')
|
||||
const isPrivateIp = require('private-ip')
|
||||
const pkg = require('../package.json')
|
||||
const errcode = require('err-code')
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('./errors')
|
||||
const isLoopback = require('libp2p-utils/src/multiaddr/is-loopback')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('./transport-manager')} TransportManager
|
||||
* @typedef {import('./address-manager')} AddressManager
|
||||
*/
|
||||
|
||||
function highPort (min = 1024, max = 65535) {
|
||||
return Math.floor(Math.random() * (max - min + 1) + min)
|
||||
}
|
||||
|
||||
const DEFAULT_TTL = 7200
|
||||
|
||||
class NatManager {
|
||||
/**
|
||||
* @class
|
||||
* @param {object} options
|
||||
* @param {PeerId} options.peerId - The peer ID of the current node
|
||||
* @param {TransportManager} options.transportManager - A transport manager
|
||||
* @param {AddressManager} options.addressManager - An address manager
|
||||
* @param {boolean} options.enabled - Whether to enable the NAT manager
|
||||
* @param {string} [options.externalIp] - Pass a value to use instead of auto-detection
|
||||
* @param {string} [options.description] - A string value to use for the port mapping description on the gateway
|
||||
* @param {number} [options.ttl] - How long UPnP port mappings should last for in seconds (minimum 1200)
|
||||
* @param {boolean} [options.keepAlive] - Whether to automatically refresh UPnP port mappings when their TTL is reached
|
||||
* @param {string} [options.gateway] - Pass a value to use instead of auto-detection
|
||||
* @param {object} [options.pmp] - PMP options
|
||||
* @param {boolean} [options.pmp.enabled] - Whether to enable PMP as well as UPnP
|
||||
*/
|
||||
constructor ({ peerId, addressManager, transportManager, ...options }) {
|
||||
this._peerId = peerId
|
||||
this._addressManager = addressManager
|
||||
this._transportManager = transportManager
|
||||
|
||||
this._enabled = options.enabled
|
||||
this._externalIp = options.externalIp
|
||||
this._options = {
|
||||
description: options.description || `${pkg.name}@${pkg.version} ${this._peerId}`,
|
||||
ttl: options.ttl || DEFAULT_TTL,
|
||||
autoUpdate: options.keepAlive || true,
|
||||
gateway: options.gateway,
|
||||
enablePMP: Boolean(options.pmp && options.pmp.enabled)
|
||||
}
|
||||
|
||||
if (this._options.ttl < DEFAULT_TTL) {
|
||||
throw errcode(new Error(`NatManager ttl should be at least ${DEFAULT_TTL} seconds`), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the NAT manager
|
||||
*/
|
||||
start () {
|
||||
if (isBrowser || !this._enabled) {
|
||||
return
|
||||
}
|
||||
|
||||
// done async to not slow down startup
|
||||
this._start().catch((err) => {
|
||||
// hole punching errors are non-fatal
|
||||
log.error(err)
|
||||
})
|
||||
}
|
||||
|
||||
async _start () {
|
||||
const addrs = this._transportManager.getAddrs()
|
||||
|
||||
for (const addr of addrs) {
|
||||
// try to open uPnP ports for each thin waist address
|
||||
const { family, host, port, transport } = addr.toOptions()
|
||||
|
||||
if (!addr.isThinWaistAddress() || transport !== 'tcp') {
|
||||
// only bare tcp addresses
|
||||
continue
|
||||
}
|
||||
|
||||
if (isLoopback(addr)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (family !== 'ipv4') {
|
||||
// ignore ipv6
|
||||
continue
|
||||
}
|
||||
|
||||
const client = this._getClient()
|
||||
const publicIp = this._externalIp || await client.externalIp()
|
||||
|
||||
if (isPrivateIp(publicIp)) {
|
||||
throw new Error(`${publicIp} is private - please set config.nat.externalIp to an externally routable IP or ensure you are not behind a double NAT`)
|
||||
}
|
||||
|
||||
const publicPort = highPort()
|
||||
|
||||
log(`opening uPnP connection from ${publicIp}:${publicPort} to ${host}:${port}`)
|
||||
|
||||
await client.map({
|
||||
publicPort,
|
||||
privatePort: port,
|
||||
protocol: transport.toUpperCase()
|
||||
})
|
||||
|
||||
this._addressManager.addObservedAddr(Multiaddr.fromNodeAddress({
|
||||
family: 'IPv4',
|
||||
address: publicIp,
|
||||
port: `${publicPort}`
|
||||
}, transport))
|
||||
}
|
||||
}
|
||||
|
||||
_getClient () {
|
||||
if (this._client) {
|
||||
return this._client
|
||||
}
|
||||
|
||||
const client = new NatAPI(this._options)
|
||||
const map = promisify(client.map, { context: client })
|
||||
const destroy = promisify(client.destroy, { context: client })
|
||||
const externalIp = promisify(client.externalIp, { context: client })
|
||||
|
||||
this._client = {
|
||||
// these are all network operations so add a retry
|
||||
map: (...args) => retry(() => map(...args), { onFailedAttempt: log.error }),
|
||||
destroy: (...args) => retry(() => destroy(...args), { onFailedAttempt: log.error }),
|
||||
externalIp: (...args) => retry(() => externalIp(...args), { onFailedAttempt: log.error })
|
||||
}
|
||||
|
||||
return this._client
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the NAT manager
|
||||
*
|
||||
* @async
|
||||
*/
|
||||
async stop () {
|
||||
if (isBrowser || !this._client) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await this._client.destroy()
|
||||
this._client = null
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NatManager
|
@@ -5,16 +5,24 @@ const log = Object.assign(debug('libp2p:peer-routing'), {
|
||||
error: debug('libp2p:peer-routing:err')
|
||||
})
|
||||
const errCode = require('err-code')
|
||||
const {
|
||||
storeAddresses,
|
||||
uniquePeers,
|
||||
requirePeers
|
||||
} = require('./content-routing/utils')
|
||||
|
||||
const all = require('it-all')
|
||||
const pAny = require('p-any')
|
||||
const merge = require('it-merge')
|
||||
const { pipe } = require('it-pipe')
|
||||
const first = require('it-first')
|
||||
const drain = require('it-drain')
|
||||
const filter = require('it-filter')
|
||||
const {
|
||||
setDelayedInterval,
|
||||
clearDelayedInterval
|
||||
} = require('set-delayed-interval')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
/**
|
||||
* @typedef {import('peer-id')} PeerId
|
||||
* @typedef {import('multiaddr')} Multiaddr
|
||||
*/
|
||||
class PeerRouting {
|
||||
@@ -27,9 +35,9 @@ class PeerRouting {
|
||||
this._peerStore = libp2p.peerStore
|
||||
this._routers = libp2p._modules.peerRouting || []
|
||||
|
||||
// If we have the dht, make it first
|
||||
// If we have the dht, add it to the available peer routers
|
||||
if (libp2p._dht) {
|
||||
this._routers.unshift(libp2p._dht)
|
||||
this._routers.push(libp2p._dht)
|
||||
}
|
||||
|
||||
this._refreshManagerOptions = libp2p._options.peerRouting.refreshManager
|
||||
@@ -55,9 +63,8 @@ class PeerRouting {
|
||||
*/
|
||||
async _findClosestPeersTask () {
|
||||
try {
|
||||
for await (const { id, multiaddrs } of this.getClosestPeers(this._peerId.id)) {
|
||||
this._peerStore.addressBook.add(id, multiaddrs)
|
||||
}
|
||||
// nb getClosestPeers adds the addresses to the address book
|
||||
await drain(this.getClosestPeers(this._peerId.id))
|
||||
} catch (err) {
|
||||
log.error(err)
|
||||
}
|
||||
@@ -71,7 +78,7 @@ class PeerRouting {
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over all peer routers in series to find the given peer.
|
||||
* Iterates over all peer routers in parallel to find the given peer.
|
||||
*
|
||||
* @param {PeerId} id - The id of the peer to find
|
||||
* @param {object} [options]
|
||||
@@ -83,16 +90,20 @@ class PeerRouting {
|
||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
}
|
||||
|
||||
return pAny(this._routers.map(async (router) => {
|
||||
const result = await router.findPeer(id, options)
|
||||
const output = await pipe(
|
||||
merge(
|
||||
...this._routers.map(router => [router.findPeer(id, options)])
|
||||
),
|
||||
(source) => filter(source, Boolean),
|
||||
(source) => storeAddresses(source, this._peerStore),
|
||||
(source) => first(source)
|
||||
)
|
||||
|
||||
// If we don't have a result, we need to provide an error to keep trying
|
||||
if (!result || Object.keys(result).length === 0) {
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
}
|
||||
if (output) {
|
||||
return output
|
||||
}
|
||||
|
||||
return result
|
||||
}))
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -108,20 +119,14 @@ class PeerRouting {
|
||||
throw errCode(new Error('No peer routers available'), 'NO_ROUTERS_AVAILABLE')
|
||||
}
|
||||
|
||||
const result = await pAny(
|
||||
this._routers.map(async (router) => {
|
||||
const peers = await all(router.getClosestPeers(key, options))
|
||||
|
||||
if (!peers || !peers.length) {
|
||||
throw errCode(new Error('not found'), 'NOT_FOUND')
|
||||
}
|
||||
return peers
|
||||
})
|
||||
yield * pipe(
|
||||
merge(
|
||||
...this._routers.map(router => router.getClosestPeers(key, options))
|
||||
),
|
||||
(source) => storeAddresses(source, this._peerStore),
|
||||
(source) => uniquePeers(source),
|
||||
(source) => requirePeers(source)
|
||||
)
|
||||
|
||||
for (const peer of result) {
|
||||
yield peer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -63,9 +63,10 @@ module.exports.decodeV1PSK = (pskBuffer) => {
|
||||
const metadata = uint8ArrayToString(pskBuffer).split(/(?:\r\n|\r|\n)/g)
|
||||
const pskTag = metadata.shift()
|
||||
const codec = metadata.shift()
|
||||
const psk = uint8ArrayFromString(metadata.shift(), 'base16')
|
||||
const pskString = metadata.shift()
|
||||
const psk = pskString && uint8ArrayFromString(pskString, 'base16')
|
||||
|
||||
if (psk.byteLength !== KEY_LENGTH) {
|
||||
if (!psk || psk.byteLength !== KEY_LENGTH) {
|
||||
throw new Error(Errors.INVALID_PSK)
|
||||
}
|
||||
|
||||
|
@@ -3,23 +3,32 @@
|
||||
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const AddressManager = require('../../src/address-manager')
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
const Peers = require('../fixtures/peers')
|
||||
|
||||
const listenAddresses = ['/ip4/127.0.0.1/tcp/15006/ws', '/ip4/127.0.0.1/tcp/15008/ws']
|
||||
const announceAddreses = ['/dns4/peer.io']
|
||||
|
||||
describe('Address Manager', () => {
|
||||
let peerId
|
||||
|
||||
before(async () => {
|
||||
peerId = await PeerId.createFromJSON(Peers[0])
|
||||
})
|
||||
|
||||
it('should not need any addresses', () => {
|
||||
const am = new AddressManager()
|
||||
const am = new AddressManager(peerId)
|
||||
|
||||
expect(am.listen.size).to.equal(0)
|
||||
expect(am.announce.size).to.equal(0)
|
||||
})
|
||||
|
||||
it('should return listen multiaddrs on get', () => {
|
||||
const am = new AddressManager({
|
||||
const am = new AddressManager(peerId, {
|
||||
listen: listenAddresses
|
||||
})
|
||||
|
||||
@@ -33,7 +42,7 @@ describe('Address Manager', () => {
|
||||
})
|
||||
|
||||
it('should return announce multiaddrs on get', () => {
|
||||
const am = new AddressManager({
|
||||
const am = new AddressManager(peerId, {
|
||||
listen: listenAddresses,
|
||||
announce: announceAddreses
|
||||
})
|
||||
@@ -45,6 +54,75 @@ describe('Address Manager', () => {
|
||||
expect(announceMultiaddrs.length).to.equal(1)
|
||||
expect(announceMultiaddrs[0].equals(multiaddr(announceAddreses[0]))).to.equal(true)
|
||||
})
|
||||
|
||||
it('should add observed addresses', () => {
|
||||
const am = new AddressManager(peerId)
|
||||
|
||||
expect(am.observed).to.be.empty()
|
||||
|
||||
am.addObservedAddr('/ip4/123.123.123.123/tcp/39201')
|
||||
|
||||
expect(am.observed).to.have.property('size', 1)
|
||||
})
|
||||
|
||||
it('should dedupe added observed addresses', () => {
|
||||
const ma = '/ip4/123.123.123.123/tcp/39201'
|
||||
const am = new AddressManager(peerId)
|
||||
|
||||
expect(am.observed).to.be.empty()
|
||||
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(ma)
|
||||
|
||||
expect(am.observed).to.have.property('size', 1)
|
||||
expect(am.observed).to.include(ma)
|
||||
})
|
||||
|
||||
it('should only emit one change:addresses event', () => {
|
||||
const ma = '/ip4/123.123.123.123/tcp/39201'
|
||||
const am = new AddressManager(peerId)
|
||||
let eventCount = 0
|
||||
|
||||
am.on('change:addresses', () => {
|
||||
eventCount++
|
||||
})
|
||||
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(`${ma}/p2p/${peerId}`)
|
||||
am.addObservedAddr(`${ma}/p2p/${peerId.toB58String()}`)
|
||||
|
||||
expect(eventCount).to.equal(1)
|
||||
})
|
||||
|
||||
it('should strip our peer address from added observed addresses', () => {
|
||||
const ma = '/ip4/123.123.123.123/tcp/39201'
|
||||
const am = new AddressManager(peerId)
|
||||
|
||||
expect(am.observed).to.be.empty()
|
||||
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(`${ma}/p2p/${peerId}`)
|
||||
|
||||
expect(am.observed).to.have.property('size', 1)
|
||||
expect(am.observed).to.include(ma)
|
||||
})
|
||||
|
||||
it('should strip our peer address from added observed addresses in difference formats', () => {
|
||||
const ma = '/ip4/123.123.123.123/tcp/39201'
|
||||
const am = new AddressManager(peerId)
|
||||
|
||||
expect(am.observed).to.be.empty()
|
||||
|
||||
am.addObservedAddr(ma)
|
||||
am.addObservedAddr(`${ma}/p2p/${peerId}`) // base32 CID
|
||||
am.addObservedAddr(`${ma}/p2p/${peerId.toB58String()}`) // base58btc
|
||||
|
||||
expect(am.observed).to.have.property('size', 1)
|
||||
expect(am.observed).to.include(ma)
|
||||
})
|
||||
})
|
||||
|
||||
describe('libp2p.addressManager', () => {
|
||||
|
@@ -147,4 +147,26 @@ describe('libp2p.multiaddrs', () => {
|
||||
expect(multiaddrs.includes(listenAddresses[0])).to.equal(false)
|
||||
expect(multiaddrs.includes(listenAddresses[1])).to.equal(false)
|
||||
})
|
||||
|
||||
it('should include observed addresses in returned multiaddrs', async () => {
|
||||
[libp2p] = await peerUtils.createPeer({
|
||||
started: false,
|
||||
config: {
|
||||
...AddressesOptions,
|
||||
addresses: {
|
||||
listen: listenAddresses
|
||||
}
|
||||
}
|
||||
})
|
||||
const ma = '/ip4/83.32.123.53/tcp/43928'
|
||||
|
||||
await libp2p.start()
|
||||
|
||||
expect(libp2p.multiaddrs).to.have.lengthOf(listenAddresses.length)
|
||||
|
||||
libp2p.addressManager.addObservedAddr(ma)
|
||||
|
||||
expect(libp2p.multiaddrs).to.have.lengthOf(listenAddresses.length + 1)
|
||||
expect(libp2p.multiaddrs.map(ma => ma.toString())).to.include(ma)
|
||||
})
|
||||
})
|
||||
|
@@ -3,6 +3,7 @@
|
||||
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
const sinon = require('sinon')
|
||||
const { CLOSED } = require('libp2p-interfaces/src/connection/status')
|
||||
|
||||
const delay = require('delay')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
@@ -268,5 +269,40 @@ describe('libp2p.connections', () => {
|
||||
|
||||
await libp2p.stop()
|
||||
})
|
||||
|
||||
it('should be closed status once immediately stopping', async () => {
|
||||
const [libp2p] = await peerUtils.createPeer({
|
||||
config: {
|
||||
peerId: peerIds[0],
|
||||
addresses: {
|
||||
listen: ['/ip4/127.0.0.1/tcp/15003/ws']
|
||||
},
|
||||
modules: baseOptions.modules
|
||||
}
|
||||
})
|
||||
const [remoteLibp2p] = await peerUtils.createPeer({
|
||||
config: {
|
||||
peerId: peerIds[1],
|
||||
addresses: {
|
||||
listen: ['/ip4/127.0.0.1/tcp/15004/ws']
|
||||
},
|
||||
modules: baseOptions.modules
|
||||
}
|
||||
})
|
||||
|
||||
libp2p.peerStore.addressBook.set(remoteLibp2p.peerId, remoteLibp2p.multiaddrs)
|
||||
await libp2p.dial(remoteLibp2p.peerId)
|
||||
|
||||
const totalConns = Array.from(libp2p.connections.values())
|
||||
expect(totalConns.length).to.eql(1)
|
||||
const conns = totalConns[0]
|
||||
expect(conns.length).to.eql(1)
|
||||
const conn = conns[0]
|
||||
|
||||
await libp2p.stop()
|
||||
expect(conn.stat.status).to.eql(CLOSED)
|
||||
|
||||
await remoteLibp2p.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@@ -12,6 +12,8 @@ const CID = require('cids')
|
||||
const ipfsHttpClient = require('ipfs-http-client')
|
||||
const DelegatedContentRouter = require('libp2p-delegated-content-routing')
|
||||
const multiaddr = require('multiaddr')
|
||||
const drain = require('it-drain')
|
||||
const all = require('it-all')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
const { baseOptions, routingOptions } = require('./utils')
|
||||
@@ -78,10 +80,14 @@ describe('content-routing', () => {
|
||||
|
||||
it('should use the nodes dht to find providers', async () => {
|
||||
const deferred = pDefer()
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
|
||||
sinon.stub(nodes[0]._dht, 'findProviders').callsFake(function * () {
|
||||
deferred.resolve()
|
||||
yield
|
||||
yield {
|
||||
id: providerPeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
await nodes[0].contentRouting.findProviders().next()
|
||||
@@ -138,10 +144,14 @@ describe('content-routing', () => {
|
||||
|
||||
it('should use the delegate router to find providers', async () => {
|
||||
const deferred = pDefer()
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
|
||||
sinon.stub(delegate, 'findProviders').callsFake(function * () {
|
||||
deferred.resolve()
|
||||
yield
|
||||
yield {
|
||||
id: providerPeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
await node.contentRouting.findProviders().next()
|
||||
@@ -151,25 +161,36 @@ describe('content-routing', () => {
|
||||
|
||||
it('should be able to register as a provider', async () => {
|
||||
const cid = new CID('QmU621oD8AhHw6t25vVyfYKmL9VV3PTgc52FngEhTGACFB')
|
||||
const mockApi = nock('http://0.0.0.0:60197')
|
||||
// mock the refs call
|
||||
.post('/api/v0/refs')
|
||||
const provider = 'QmZNgCqZCvTsi3B4Vt7gsSqpkqDpE7M2Y9TDmEhbDb4ceF'
|
||||
|
||||
const mockBlockApi = nock('http://0.0.0.0:60197')
|
||||
// mock the block/stat call
|
||||
.post('/api/v0/block/stat')
|
||||
.query(true)
|
||||
.reply(200, null, [
|
||||
.reply(200, '{"Key":"QmU621oD8AhHw6t25vVyfYKmL9VV3PTgc52FngEhTGACFB","Size":"2169"}', [
|
||||
'Content-Type', 'application/json',
|
||||
'X-Chunked-Output', '1'
|
||||
])
|
||||
const mockDhtApi = nock('http://0.0.0.0:60197')
|
||||
// mock the dht/provide call
|
||||
.post('/api/v0/dht/provide')
|
||||
.query(true)
|
||||
.reply(200, `{"Extra":"","ID":"QmWKqWXCtRXEeCQTo3FoZ7g4AfnGiauYYiczvNxFCHicbB","Responses":[{"Addrs":["/ip4/0.0.0.0/tcp/0"],"ID":"${provider}"}],"Type":4}\n`, [
|
||||
'Content-Type', 'application/json',
|
||||
'X-Chunked-Output', '1'
|
||||
])
|
||||
|
||||
await node.contentRouting.provide(cid)
|
||||
|
||||
expect(mockApi.isDone()).to.equal(true)
|
||||
expect(mockBlockApi.isDone()).to.equal(true)
|
||||
expect(mockDhtApi.isDone()).to.equal(true)
|
||||
})
|
||||
|
||||
it('should handle errors when registering as a provider', async () => {
|
||||
const cid = new CID('QmU621oD8AhHw6t25vVyfYKmL9VV3PTgc52FngEhTGACFB')
|
||||
const mockApi = nock('http://0.0.0.0:60197')
|
||||
// mock the refs call
|
||||
.post('/api/v0/refs')
|
||||
// mock the block/stat call
|
||||
.post('/api/v0/block/stat')
|
||||
.query(true)
|
||||
.reply(502, 'Bad Gateway', ['Content-Type', 'application/json'])
|
||||
|
||||
@@ -251,6 +272,110 @@ describe('content-routing', () => {
|
||||
|
||||
afterEach(() => node.stop())
|
||||
|
||||
it('should store the multiaddrs of a peer', async () => {
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const result = {
|
||||
id: providerPeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/49320')
|
||||
]
|
||||
}
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(function * () {})
|
||||
sinon.stub(delegate, 'findProviders').callsFake(function * () {
|
||||
yield result
|
||||
})
|
||||
|
||||
expect(node.peerStore.addressBook.get(providerPeerId)).to.not.be.ok()
|
||||
|
||||
await drain(node.contentRouting.findProviders('a cid'))
|
||||
|
||||
expect(node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
|
||||
isCertified: false,
|
||||
multiaddr: result.multiaddrs[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should not wait for routing findProviders to finish before returning results', async () => {
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const result = {
|
||||
id: providerPeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/49320')
|
||||
]
|
||||
}
|
||||
|
||||
const defer = pDefer()
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(async function * () { // eslint-disable-line require-yield
|
||||
await defer.promise
|
||||
})
|
||||
sinon.stub(delegate, 'findProviders').callsFake(async function * () {
|
||||
yield result
|
||||
|
||||
await defer.promise
|
||||
})
|
||||
|
||||
for await (const provider of node.contentRouting.findProviders('a cid')) {
|
||||
expect(provider.id).to.deep.equal(providerPeerId)
|
||||
defer.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
it('should dedupe results', async () => {
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const result = {
|
||||
id: providerPeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/49320')
|
||||
]
|
||||
}
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(async function * () {
|
||||
yield result
|
||||
})
|
||||
sinon.stub(delegate, 'findProviders').callsFake(async function * () {
|
||||
yield result
|
||||
})
|
||||
|
||||
const results = await all(node.contentRouting.findProviders('a cid'))
|
||||
|
||||
expect(results).to.be.an('array').with.lengthOf(1).that.deep.equals([result])
|
||||
})
|
||||
|
||||
it('should combine multiaddrs when different addresses are returned by different content routers', async () => {
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const result1 = {
|
||||
id: providerPeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/49320')
|
||||
]
|
||||
}
|
||||
const result2 = {
|
||||
id: providerPeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/213.213.213.213/tcp/2344')
|
||||
]
|
||||
}
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(async function * () {
|
||||
yield result1
|
||||
})
|
||||
sinon.stub(delegate, 'findProviders').callsFake(async function * () {
|
||||
yield result2
|
||||
})
|
||||
|
||||
await drain(node.contentRouting.findProviders('a cid'))
|
||||
|
||||
expect(node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
|
||||
isCertified: false,
|
||||
multiaddr: result1.multiaddrs[0]
|
||||
}).and.to.deep.include({
|
||||
isCertified: false,
|
||||
multiaddr: result2.multiaddrs[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should use both the dht and delegate router to provide', async () => {
|
||||
const dhtDeferred = pDefer()
|
||||
const delegatedDeferred = pDefer()
|
||||
@@ -271,15 +396,18 @@ describe('content-routing', () => {
|
||||
])
|
||||
})
|
||||
|
||||
it('should only use the dht if it finds providers', async () => {
|
||||
const results = [true]
|
||||
it('should use the dht if the delegate fails to find providers', async () => {
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = [{
|
||||
id: providerPeerId,
|
||||
multiaddrs: []
|
||||
}]
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(function * () {
|
||||
yield results[0]
|
||||
})
|
||||
|
||||
sinon.stub(delegate, 'findProviders').callsFake(function * () { // eslint-disable-line require-yield
|
||||
throw new Error('the delegate should not have been called')
|
||||
})
|
||||
|
||||
const providers = []
|
||||
@@ -292,7 +420,11 @@ describe('content-routing', () => {
|
||||
})
|
||||
|
||||
it('should use the delegate if the dht fails to find providers', async () => {
|
||||
const results = [true]
|
||||
const [providerPeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = [{
|
||||
id: providerPeerId,
|
||||
multiaddrs: []
|
||||
}]
|
||||
|
||||
sinon.stub(node._dht, 'findProviders').callsFake(function * () {})
|
||||
|
||||
|
46
test/core/consume-peer-record.spec.js
Normal file
46
test/core/consume-peer-record.spec.js
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const Transport = require('libp2p-websockets')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
|
||||
const Libp2p = require('../../src')
|
||||
const { createPeerId } = require('../utils/creators/peer')
|
||||
|
||||
describe('Consume peer record', () => {
|
||||
let libp2p
|
||||
|
||||
beforeEach(async () => {
|
||||
const [peerId] = await createPeerId()
|
||||
const config = {
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
}
|
||||
libp2p = await Libp2p.create(config)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await libp2p.stop()
|
||||
})
|
||||
|
||||
it('should consume peer record when observed addrs are added', async () => {
|
||||
let done
|
||||
|
||||
libp2p.peerStore.addressBook.consumePeerRecord = () => {
|
||||
done()
|
||||
}
|
||||
|
||||
const p = new Promise(resolve => {
|
||||
done = resolve
|
||||
})
|
||||
|
||||
libp2p.addressManager.addObservedAddr('/ip4/123.123.123.123/tcp/3983')
|
||||
|
||||
await p
|
||||
|
||||
libp2p.stop()
|
||||
})
|
||||
})
|
@@ -51,7 +51,7 @@ describe('Dialing (direct, TCP)', () => {
|
||||
peerStore = new PeerStore({ peerId: remotePeerId })
|
||||
remoteTM = new TransportManager({
|
||||
libp2p: {
|
||||
addressManager: new AddressManager({ listen: [listenAddr] }),
|
||||
addressManager: new AddressManager(remotePeerId, { listen: [listenAddr] }),
|
||||
peerId: remotePeerId,
|
||||
peerStore
|
||||
},
|
||||
|
@@ -21,14 +21,15 @@ const PeerStore = require('../../src/peer-store')
|
||||
const baseOptions = require('../utils/base-options.browser')
|
||||
const { updateSelfPeerRecord } = require('../../src/record/utils')
|
||||
const pkg = require('../../package.json')
|
||||
const AddressManager = require('../../src/address-manager')
|
||||
|
||||
const { MULTIADDRS_WEBSOCKETS } = require('../fixtures/browser')
|
||||
const remoteAddr = MULTIADDRS_WEBSOCKETS[0]
|
||||
const listenMaddrs = [multiaddr('/ip4/127.0.0.1/tcp/15002/ws')]
|
||||
|
||||
describe('Identify', () => {
|
||||
let localPeer, localPeerStore
|
||||
let remotePeer, remotePeerStore
|
||||
let localPeer, localPeerStore, localAddressManager
|
||||
let remotePeer, remotePeerStore, remoteAddressManager
|
||||
const protocols = [multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH]
|
||||
|
||||
before(async () => {
|
||||
@@ -42,6 +43,9 @@ describe('Identify', () => {
|
||||
|
||||
remotePeerStore = new PeerStore({ peerId: remotePeer })
|
||||
remotePeerStore.protoBook.set(remotePeer, protocols)
|
||||
|
||||
localAddressManager = new AddressManager(localPeer)
|
||||
remoteAddressManager = new AddressManager(remotePeer)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -110,6 +114,7 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
addressManager: localAddressManager,
|
||||
peerStore: localPeerStore,
|
||||
multiaddrs: listenMaddrs,
|
||||
isStarted: () => true,
|
||||
@@ -121,6 +126,7 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
addressManager: remoteAddressManager,
|
||||
peerStore: remotePeerStore,
|
||||
multiaddrs: listenMaddrs,
|
||||
isStarted: () => true,
|
||||
|
244
test/nat-manager/nat-manager.node.js
Normal file
244
test/nat-manager/nat-manager.node.js
Normal file
@@ -0,0 +1,244 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
const sinon = require('sinon')
|
||||
const AddressManager = require('../../src/address-manager')
|
||||
const TransportManager = require('../../src/transport-manager')
|
||||
const Transport = require('libp2p-tcp')
|
||||
const mockUpgrader = require('../utils/mockUpgrader')
|
||||
const NatManager = require('../../src/nat-manager')
|
||||
const delay = require('delay')
|
||||
const peers = require('../fixtures/peers')
|
||||
const PeerId = require('peer-id')
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../../src/errors')
|
||||
|
||||
const DEFAULT_ADDRESSES = [
|
||||
'/ip4/127.0.0.1/tcp/0',
|
||||
'/ip4/0.0.0.0/tcp/0'
|
||||
]
|
||||
|
||||
describe('Nat Manager (TCP)', () => {
|
||||
const teardown = []
|
||||
|
||||
async function createNatManager (addrs = DEFAULT_ADDRESSES, natManagerOptions = {}) {
|
||||
const peerId = await PeerId.createFromJSON(peers[0])
|
||||
const addressManager = new AddressManager(peerId, { listen: addrs })
|
||||
const transportManager = new TransportManager({
|
||||
libp2p: {
|
||||
peerId,
|
||||
addressManager,
|
||||
peerStore: {
|
||||
addressBook: {
|
||||
consumePeerRecord: sinon.stub()
|
||||
}
|
||||
}
|
||||
},
|
||||
upgrader: mockUpgrader,
|
||||
onConnection: () => {},
|
||||
faultTolerance: TransportManager.FaultTolerance.NO_FATAL
|
||||
})
|
||||
const natManager = new NatManager({
|
||||
peerId,
|
||||
addressManager,
|
||||
transportManager,
|
||||
enabled: true,
|
||||
...natManagerOptions
|
||||
})
|
||||
|
||||
natManager._client = {
|
||||
externalIp: sinon.stub().resolves('82.3.1.5'),
|
||||
map: sinon.stub(),
|
||||
destroy: sinon.stub()
|
||||
}
|
||||
|
||||
transportManager.add(Transport.prototype[Symbol.toStringTag], Transport)
|
||||
await transportManager.listen(addressManager.getListenAddrs())
|
||||
|
||||
teardown.push(async () => {
|
||||
await natManager.stop()
|
||||
await transportManager.removeAll()
|
||||
expect(transportManager._transports.size).to.equal(0)
|
||||
})
|
||||
|
||||
return {
|
||||
natManager,
|
||||
addressManager,
|
||||
transportManager
|
||||
}
|
||||
}
|
||||
|
||||
afterEach(() => Promise.all(teardown))
|
||||
|
||||
it('should map TCP connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager,
|
||||
transportManager
|
||||
} = await createNatManager()
|
||||
|
||||
let addressChangedEventFired = false
|
||||
|
||||
addressManager.on('change:addresses', () => {
|
||||
addressChangedEventFired = true
|
||||
})
|
||||
|
||||
natManager._client = {
|
||||
externalIp: sinon.stub().resolves('82.3.1.5'),
|
||||
map: sinon.stub(),
|
||||
destroy: sinon.stub()
|
||||
}
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.not.be.empty()
|
||||
|
||||
const internalPorts = transportManager.getAddrs()
|
||||
.filter(ma => ma.isThinWaistAddress())
|
||||
.map(ma => ma.toOptions())
|
||||
.filter(({ host, transport }) => host !== '127.0.0.1' && transport === 'tcp')
|
||||
.map(({ port }) => port)
|
||||
|
||||
expect(natManager._client.map.called).to.be.true()
|
||||
|
||||
internalPorts.forEach(port => {
|
||||
expect(natManager._client.map.getCall(0).args[0]).to.include({
|
||||
privatePort: port,
|
||||
protocol: 'TCP'
|
||||
})
|
||||
})
|
||||
|
||||
expect(addressChangedEventFired).to.be.true()
|
||||
})
|
||||
|
||||
it('should not map TCP connections when double-natted', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager()
|
||||
|
||||
natManager._client.externalIp = sinon.stub().resolves('192.168.1.1')
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await expect(natManager._start()).to.eventually.be.rejectedWith(/double NAT/)
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
expect(natManager._client.map.called).to.be.false()
|
||||
})
|
||||
|
||||
it('should do nothing when disabled', async () => {
|
||||
const {
|
||||
natManager
|
||||
} = await createNatManager(DEFAULT_ADDRESSES, {
|
||||
enabled: false
|
||||
})
|
||||
|
||||
natManager.start()
|
||||
|
||||
await delay(100)
|
||||
|
||||
expect(natManager._client.externalIp.called).to.be.false()
|
||||
expect(natManager._client.map.called).to.be.false()
|
||||
})
|
||||
|
||||
it('should not map non-ipv4 connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager([
|
||||
'/ip6/::/tcp/5001'
|
||||
])
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
})
|
||||
|
||||
it('should not map non-ipv6 loopback connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager([
|
||||
'/ip6/::1/tcp/5001'
|
||||
])
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
})
|
||||
|
||||
it('should not map non-TCP connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager([
|
||||
'/ip4/0.0.0.0/utp'
|
||||
])
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
})
|
||||
|
||||
it('should not map loopback connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager([
|
||||
'/ip4/127.0.0.1/tcp/5900'
|
||||
])
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
})
|
||||
|
||||
it('should not map non-thin-waist connections to external ports', async () => {
|
||||
const {
|
||||
natManager,
|
||||
addressManager
|
||||
} = await createNatManager([
|
||||
'/ip4/0.0.0.0/tcp/5900/sctp/49832'
|
||||
])
|
||||
|
||||
let observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
|
||||
await natManager._start()
|
||||
|
||||
observed = addressManager.getObservedAddrs().map(ma => ma.toString())
|
||||
expect(observed).to.be.empty()
|
||||
})
|
||||
|
||||
it('should specify large enough TTL', () => {
|
||||
expect(() => {
|
||||
new NatManager({ ttl: 5 }) // eslint-disable-line no-new
|
||||
}).to.throw().with.property('code', ERR_INVALID_PARAMETERS)
|
||||
})
|
||||
})
|
@@ -10,6 +10,8 @@ const delay = require('delay')
|
||||
const pDefer = require('p-defer')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const mergeOptions = require('merge-options')
|
||||
const drain = require('it-drain')
|
||||
const all = require('it-all')
|
||||
|
||||
const ipfsHttpClient = require('ipfs-http-client')
|
||||
const DelegatedPeerRouter = require('libp2p-delegated-peer-routing')
|
||||
@@ -82,10 +84,14 @@ describe('peer-routing', () => {
|
||||
|
||||
it('should use the nodes dht to get the closest peers', async () => {
|
||||
const deferred = pDefer()
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
|
||||
sinon.stub(nodes[0]._dht, 'getClosestPeers').callsFake(function * () {
|
||||
deferred.resolve()
|
||||
yield
|
||||
yield {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
await nodes[0].peerRouting.getClosestPeers().next()
|
||||
@@ -128,10 +134,14 @@ describe('peer-routing', () => {
|
||||
|
||||
it('should use the delegate router to find peers', async () => {
|
||||
const deferred = pDefer()
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
|
||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||
deferred.resolve()
|
||||
return 'fake peer-id'
|
||||
return {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
await node.peerRouting.findPeer()
|
||||
@@ -140,10 +150,14 @@ describe('peer-routing', () => {
|
||||
|
||||
it('should use the delegate router to get the closest peers', async () => {
|
||||
const deferred = pDefer()
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
|
||||
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () {
|
||||
deferred.resolve()
|
||||
yield
|
||||
yield {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
await node.peerRouting.getClosestPeers().next()
|
||||
@@ -152,7 +166,7 @@ describe('peer-routing', () => {
|
||||
})
|
||||
|
||||
it('should be able to find a peer', async () => {
|
||||
const peerKey = 'QmTp9VkYvnHyrqKQuFPiuZkiX9gPcqj6x5LJ1rmWuSySnL'
|
||||
const peerKey = PeerId.createFromB58String('QmTp9VkYvnHyrqKQuFPiuZkiX9gPcqj6x5LJ1rmWuSySnL')
|
||||
const mockApi = nock('http://0.0.0.0:60197')
|
||||
.post('/api/v0/dht/findpeer')
|
||||
.query(true)
|
||||
@@ -277,55 +291,93 @@ describe('peer-routing', () => {
|
||||
|
||||
afterEach(() => node.stop())
|
||||
|
||||
it('should only use the dht if it finds the peer', async () => {
|
||||
const dhtDeferred = pDefer()
|
||||
|
||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {
|
||||
dhtDeferred.resolve()
|
||||
return { id: node.peerId }
|
||||
})
|
||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||
throw new Error('the delegate should not have been called')
|
||||
})
|
||||
|
||||
await node.peerRouting.findPeer('a peer id')
|
||||
await dhtDeferred.promise
|
||||
})
|
||||
|
||||
it('should use the delegate if the dht fails to find the peer', async () => {
|
||||
const results = [true]
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
|
||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {})
|
||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||
return results
|
||||
})
|
||||
|
||||
const peer = await node.peerRouting.findPeer('a peer id')
|
||||
const peer = await node.peerRouting.findPeer(remotePeerId)
|
||||
expect(peer).to.eql(results)
|
||||
})
|
||||
|
||||
it('should only use the dht if it gets the closest peers', async () => {
|
||||
const results = [true]
|
||||
|
||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () {
|
||||
yield results[0]
|
||||
})
|
||||
|
||||
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () { // eslint-disable-line require-yield
|
||||
throw new Error('the delegate should not have been called')
|
||||
})
|
||||
|
||||
const closest = []
|
||||
for await (const peer of node.peerRouting.getClosestPeers('a cid')) {
|
||||
closest.push(peer)
|
||||
it('should not wait for the dht to return if the delegate does first', async () => {
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
|
||||
expect(closest).to.have.length.above(0)
|
||||
expect(closest).to.eql(results)
|
||||
const defer = pDefer()
|
||||
|
||||
sinon.stub(node._dht, 'findPeer').callsFake(async () => {
|
||||
await defer.promise
|
||||
})
|
||||
sinon.stub(delegate, 'findPeer').callsFake(() => {
|
||||
return results
|
||||
})
|
||||
|
||||
const peer = await node.peerRouting.findPeer(remotePeerId)
|
||||
expect(peer).to.eql(results)
|
||||
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
it('should not wait for the delegate to return if the dht does first', async () => {
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = {
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}
|
||||
|
||||
const defer = pDefer()
|
||||
|
||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {
|
||||
return results
|
||||
})
|
||||
sinon.stub(delegate, 'findPeer').callsFake(async () => {
|
||||
await defer.promise
|
||||
})
|
||||
|
||||
const peer = await node.peerRouting.findPeer(remotePeerId)
|
||||
expect(peer).to.eql(results)
|
||||
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
it('should store the addresses of the found peer', async () => {
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = {
|
||||
id: remotePeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/38982')
|
||||
]
|
||||
}
|
||||
|
||||
const spy = sinon.spy(node.peerStore.addressBook, 'add')
|
||||
|
||||
sinon.stub(node._dht, 'findPeer').callsFake(() => {
|
||||
return results
|
||||
})
|
||||
sinon.stub(delegate, 'findPeer').callsFake(() => {})
|
||||
|
||||
await node.peerRouting.findPeer(remotePeerId)
|
||||
|
||||
expect(spy.calledWith(results.id, results.multiaddrs)).to.be.true()
|
||||
})
|
||||
|
||||
it('should use the delegate if the dht fails to get the closest peer', async () => {
|
||||
const results = [true]
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = [{
|
||||
id: remotePeerId,
|
||||
multiaddrs: []
|
||||
}]
|
||||
|
||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () { })
|
||||
|
||||
@@ -333,14 +385,55 @@ describe('peer-routing', () => {
|
||||
yield results[0]
|
||||
})
|
||||
|
||||
const closest = []
|
||||
for await (const peer of node.peerRouting.getClosestPeers('a cid')) {
|
||||
closest.push(peer)
|
||||
}
|
||||
const closest = await all(node.peerRouting.getClosestPeers('a cid'))
|
||||
|
||||
expect(closest).to.have.length.above(0)
|
||||
expect(closest).to.eql(results)
|
||||
})
|
||||
|
||||
it('should store the addresses of the closest peer', async () => {
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const result = {
|
||||
id: remotePeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/38982')
|
||||
]
|
||||
}
|
||||
|
||||
const spy = sinon.spy(node.peerStore.addressBook, 'add')
|
||||
|
||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () { })
|
||||
|
||||
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () {
|
||||
yield result
|
||||
})
|
||||
|
||||
await drain(node.peerRouting.getClosestPeers('a cid'))
|
||||
|
||||
expect(spy.calledWith(result.id, result.multiaddrs)).to.be.true()
|
||||
})
|
||||
|
||||
it('should dedupe closest peers', async () => {
|
||||
const [remotePeerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const results = [{
|
||||
id: remotePeerId,
|
||||
multiaddrs: [
|
||||
multiaddr('/ip4/123.123.123.123/tcp/38982')
|
||||
]
|
||||
}]
|
||||
|
||||
sinon.stub(node._dht, 'getClosestPeers').callsFake(function * () {
|
||||
yield * results
|
||||
})
|
||||
|
||||
sinon.stub(delegate, 'getClosestPeers').callsFake(function * () {
|
||||
yield * results
|
||||
})
|
||||
|
||||
const peers = await all(node.peerRouting.getClosestPeers('a cid'))
|
||||
|
||||
expect(peers).to.be.an('array').with.a.lengthOf(1).that.deep.equals(results)
|
||||
})
|
||||
})
|
||||
|
||||
describe('peer routing refresh manager service', () => {
|
||||
|
@@ -16,6 +16,9 @@ module.exports = {
|
||||
hop: {
|
||||
enabled: false
|
||||
}
|
||||
},
|
||||
nat: {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user