feat: async peerstore backed by datastores (#1058)

We have a peerstore that keeps all data for all observed peers in memory with no eviction.

This is fine when you don't discover many peers but when using the DHT you encounter a significant number of peers so our peer storage grows and grows over time.

We have a persistent peer store, but it just periodically writes peers into the datastore to be read at startup, still keeping them in memory.

It also means a restart doesn't give you any temporary reprieve from the memory leak as the previously observed peer data is read into memory at startup.

This change refactors the peerstore to use a datastore by default, reading and writing peer info as it arrives.  It can be configured with a MemoryDatastore if desired.

It was necessary to change the peerstore and *book interfaces to be asynchronous since the datastore api is asynchronous.

BREAKING CHANGE: `libp2p.handle`, `libp2p.registrar.register` and the peerstore methods have become async
This commit is contained in:
Alex Potsides 2022-01-20 12:03:35 +00:00 committed by GitHub
parent 0a4dc54d08
commit 978eb3676f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
94 changed files with 3263 additions and 4039 deletions

View File

@ -8,380 +8,58 @@ on:
- '**'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
node-version: lts/*
- uses: ipfs/aegir/actions/cache-node-modules@master
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
directories: |
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
check:
~/.cache
build: |
cd examples
npm i
npx playwright install
cache_name: cache-examples
test-example:
needs: build
runs-on: ubuntu-latest
strategy:
matrix:
example: [
chat,
connection-encryption,
discovery-mechanisms,
echo,
libp2p-in-the-browser,
peer-and-content-routing,
pnet,
protocol-and-stream-muxing,
pubsub,
transports,
webrtc-direct
]
fail-fast: true
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
node-version: lts/*
- uses: ipfs/aegir/actions/cache-node-modules@master
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
directories: |
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: npx aegir lint
- run: npx aegir ts -p check
test-auto-relay-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- auto-relay
test-chat-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- chat
test-connection-encryption-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- connection-encryption
test-discovery-mechanisms-example:
needs: check
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- discovery-mechanisms
test-echo-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- echo
test-libp2p-in-the-browser-example:
needs: check
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- libp2p-in-the-browser
test-peer-and-content-routing-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- peer-and-content-routing
test-pnet-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- pnet
test-protocol-and-stream-muxing-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- protocol-and-stream-muxing
test-pubsub-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- pubsub
test-transports-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- transports
test-webrtc-direct-example:
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
./examples/node_modules
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
cd examples && npm i
- run: cd examples && npm run test -- webrtc-direct
build: |
cd examples
npm i
npx playwright install
cache_name: cache-examples
- run: |
cd examples
npm run test -- ${{ matrix.example }}

View File

@ -12,29 +12,14 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest]
node: [16]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
check:
needs: build
@ -43,23 +28,8 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
node-version: lts/*
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npx aegir lint
- run: npx aegir dep-check
- uses: ipfs/aegir/actions/bundle-size@master
@ -80,23 +50,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- uses: actions/cache@v2
id: cache
if: matrix.os != 'windows-latest'
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npm run test:node -- --cov --bail
- uses: codecov/codecov-action@v1
test-chrome:
@ -107,22 +61,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: lts/*
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npm run test:browser -- -t browser -t webworker --bail
test-firefox:
needs: check
@ -132,22 +71,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: lts/*
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npm run test:browser -- -t browser -t webworker --bail -- --browser firefox
test-ts:
needs: check
@ -157,22 +81,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: lts/*
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npm run test:ts
test-interop:
needs: check
@ -182,20 +91,5 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: lts/*
- uses: actions/cache@v2
id: cache
env:
CACHE_NAME: cache-node-modules
with:
path: |
~/.cache
~/.npm
./node_modules
./dist
key: ${{ runner.os }}-build-${{ env.CACHE_NAME }}-${{ github.event.pull_request.head.sha }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
npm install
npm run build
- uses: ipfs/aegir/actions/cache-node-modules@master
- run: npm run test:interop -- --bail -- --exit

View File

@ -30,7 +30,7 @@ const createNode = async () => {
createNode()
])
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.handle('/a-protocol', ({ stream }) => {
pipe(

View File

@ -3,16 +3,16 @@
"version": "0.1.0",
"private": true,
"dependencies": {
"ipfs": "~0.34.4",
"libp2p": "github:libp2p/js-libp2p#master",
"libp2p-delegated-content-routing": "~0.2.2",
"libp2p-delegated-peer-routing": "~0.2.2",
"libp2p-kad-dht": "^0.26.5",
"libp2p-mplex": "~0.8.5",
"libp2p-secio": "~0.11.1",
"libp2p-webrtc-star": "~0.15.8",
"libp2p-websocket-star": "~0.10.2",
"libp2p-websockets": "~0.12.2",
"@chainsafe/libp2p-noise": "^5.0.2",
"ipfs-core": "^0.13.0",
"libp2p": "../../",
"libp2p-delegated-content-routing": "^0.11.0",
"libp2p-delegated-peer-routing": "^0.11.1",
"libp2p-kad-dht": "^0.28.6",
"libp2p-mplex": "^0.10.4",
"libp2p-webrtc-star": "^0.25.0",
"libp2p-websocket-star": "^0.10.2",
"libp2p-websockets": "^0.16.2",
"react": "^16.8.6",
"react-dom": "^16.8.6",
"react-scripts": "2.1.8"

View File

@ -2,7 +2,7 @@
'use strict'
import React from 'react'
import Ipfs from 'ipfs'
import Ipfs from 'ipfs-core'
import libp2pBundle from './libp2p-bundle'
const Component = React.Component
@ -70,7 +70,7 @@ class App extends Component {
}
componentDidMount () {
window.ipfs = this.ipfs = new Ipfs({
window.ipfs = this.ipfs = Ipfs.create({
config: {
Addresses: {
Swarm: []

View File

@ -6,7 +6,7 @@ const Websockets = require('libp2p-websockets')
const WebSocketStar = require('libp2p-websocket-star')
const WebRTCStar = require('libp2p-webrtc-star')
const MPLEX = require('libp2p-mplex')
const SECIO = require('libp2p-secio')
const { NOISE } = require('@chainsafe/libp2p-noise')
const KadDHT = require('libp2p-kad-dht')
const DelegatedPeerRouter = require('libp2p-delegated-peer-routing')
const DelegatedContentRouter = require('libp2p-delegated-content-routing')
@ -48,7 +48,7 @@ export default function Libp2pBundle ({peerInfo, peerBook}) {
MPLEX
],
connEncryption: [
SECIO
NOISE
],
dht: KadDHT
},

View File

@ -5,7 +5,7 @@ const Libp2p = require('../../')
const TCP = require('libp2p-tcp')
const Mplex = require('libp2p-mplex')
const { NOISE } = require('@chainsafe/libp2p-noise')
const Gossipsub = require('libp2p-gossipsub')
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
const Bootstrap = require('libp2p-bootstrap')
const PubsubPeerDiscovery = require('libp2p-pubsub-peer-discovery')

View File

@ -1,13 +1,13 @@
'use strict'
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core/src/runtime/config-nodejs.js
// Find this list at: https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core-config/src/config.js
const bootstrapers = [
'/ip4/104.131.131.82/tcp/4001/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ',
'/dnsaddr/bootstrap.libp2p.io/p2p/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJN',
'/dnsaddr/bootstrap.libp2p.io/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb',
'/dnsaddr/bootstrap.libp2p.io/p2p/QmZa1sAxajnQjVM8WjWXoMbmPd7NsWhfKsPkErzpm9wGkp',
'/dnsaddr/bootstrap.libp2p.io/p2p/QmQCU2EcMqAqQPR2i9bChDtGNJchTbq5TbXJJ16u19uLTa',
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt'
'/dnsaddr/bootstrap.libp2p.io/p2p/QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt',
]
module.exports = bootstrapers

View File

@ -2,17 +2,9 @@
const path = require('path')
const execa = require('execa')
const pWaitFor = require('p-wait-for')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
const bootstrapers = require('./bootstrapers')
const discoveredCopy = 'Discovered:'
const connectedCopy = 'Connection established to:'
async function test () {
const discoveredNodes = []
const connectedNodes = []
process.stdout.write('1.js\n')
const proc = execa('node', [path.join(__dirname, '1.js')], {
@ -20,23 +12,17 @@ async function test () {
all: true
})
let output = ''
proc.all.on('data', async (data) => {
process.stdout.write(data)
const line = uint8ArrayToString(data)
output += uint8ArrayToString(data)
// Discovered or Connected
if (line.includes(discoveredCopy)) {
const id = line.trim().split(discoveredCopy)[1]
discoveredNodes.push(id)
} else if (line.includes(connectedCopy)) {
const id = line.trim().split(connectedCopy)[1]
connectedNodes.push(id)
// Discovered and connected
if (output.includes('Connection established to:')) {
proc.kill()
}
})
await pWaitFor(() => discoveredNodes.length === bootstrapers.length && connectedNodes.length === bootstrapers.length)
proc.kill()
}
module.exports = test

View File

@ -1,4 +1,3 @@
{
"presets": ["@babel/preset-env"],
"plugins": ["syntax-async-functions","transform-regenerator"]
}

View File

@ -14,12 +14,11 @@
"author": "",
"license": "ISC",
"dependencies": {
"@babel/preset-env": "^7.13.0",
"@chainsafe/libp2p-noise": "^5.0.2",
"libp2p": "../../",
"libp2p-bootstrap": "^0.13.0",
"libp2p-bootstrap": "^0.14.0",
"libp2p-mplex": "^0.10.4",
"@chainsafe/libp2p-noise": "^4.1.0",
"libp2p-webrtc-star": "^0.23.0",
"libp2p-webrtc-star": "^0.25.0",
"libp2p-websockets": "^0.16.1"
},
"devDependencies": {

View File

@ -8,12 +8,12 @@
},
"license": "MIT",
"dependencies": {
"@achingbrain/libp2p-gossipsub": "^0.12.2",
"execa": "^2.1.0",
"fs-extra": "^8.1.0",
"libp2p": "../src",
"libp2p-pubsub-peer-discovery": "^4.0.0",
"libp2p-relay-server": "^0.3.0",
"libp2p-gossipsub": "^0.11.0",
"p-defer": "^3.0.0",
"uint8arrays": "^3.0.0",
"which": "^2.0.1"

View File

@ -38,8 +38,8 @@ const createNode = async () => {
createNode()
])
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await Promise.all([
node1.dial(node2.peerId),

View File

@ -40,8 +40,8 @@ const createNode = async () => {
createNode()
])
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await Promise.all([
node1.dial(node2.peerId),

View File

@ -43,8 +43,8 @@ const node1 = nodes[0]
const node2 = nodes[1]
const node3 = nodes[2]
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await Promise.all([
node1.dial(node2.peerId),

View File

@ -2,35 +2,28 @@
const path = require('path')
const execa = require('execa')
const pWaitFor = require('p-wait-for')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
async function test() {
process.stdout.write('1.js\n')
const addrs = []
let foundIt = false
const proc = execa('node', [path.join(__dirname, '1.js')], {
cwd: path.resolve(__dirname),
all: true
})
let output = ''
proc.all.on('data', async (data) => {
process.stdout.write(data)
const line = uint8ArrayToString(data)
// Discovered peer
if (!foundIt && line.includes('Found it, multiaddrs are:')) {
foundIt = true
}
addrs.push(line)
})
await pWaitFor(() => addrs.length === 2)
output += uint8ArrayToString(data)
// Discovered peers
if (output.includes('Found it, multiaddrs are:')) {
proc.kill()
}
})
}
module.exports = test

View File

@ -2,39 +2,27 @@
const path = require('path')
const execa = require('execa')
const pDefer = require('p-defer')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
const providedCopy = 'is providing'
const foundCopy = 'Found provider:'
async function test() {
process.stdout.write('2.js\n')
const providedDefer = pDefer()
const foundDefer = pDefer()
const proc = execa('node', [path.join(__dirname, '2.js')], {
cwd: path.resolve(__dirname),
all: true
})
let output = ''
proc.all.on('data', async (data) => {
process.stdout.write(data)
const line = uint8ArrayToString(data)
output += uint8ArrayToString(data)
if (line.includes(providedCopy)) {
providedDefer.resolve()
} else if (line.includes(foundCopy)) {
foundDefer.resolve()
if (output.includes('Found provider:')) {
proc.kill()
}
})
await Promise.all([
providedDefer.promise,
foundDefer.promise
])
proc.kill()
}
module.exports = test

View File

@ -29,7 +29,7 @@ generate(otherSwarmKey)
console.log('nodes started...')
// Add node 2 data to node1's PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.dial(node2.peerId)
node2.handle('/private', ({ stream }) => {

View File

@ -31,7 +31,7 @@ const createNode = async () => {
])
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
// exact matching
node2.handle('/your-protocol', ({ stream }) => {

View File

@ -31,7 +31,7 @@ const createNode = async () => {
])
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.handle(['/a', '/b'], ({ protocol, stream }) => {
pipe(

View File

@ -32,7 +32,7 @@ const createNode = async () => {
])
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node1.handle('/node-1', ({ stream }) => {
pipe(

View File

@ -20,7 +20,7 @@ const node1 = nodes[0]
const node2 = nodes[1]
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
// Here we are telling libp2p that if someone dials this node to talk with the `/your-protocol`
// multicodec, the protocol identifier, please call this handler and give it the stream

View File

@ -5,7 +5,7 @@ const Libp2p = require('../../')
const TCP = require('libp2p-tcp')
const Mplex = require('libp2p-mplex')
const { NOISE } = require('@chainsafe/libp2p-noise')
const Gossipsub = require('libp2p-gossipsub')
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
@ -35,7 +35,7 @@ const createNode = async () => {
])
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.dial(node2.peerId)
node1.pubsub.on(topic, (msg) => {

View File

@ -49,7 +49,7 @@ const node1 = nodes[0]
const node2 = nodes[1]
// Add node's 2 data to the PeerStore
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.dial(node2.peerId)
node1.pubsub.on(topic, (msg) => {

View File

@ -5,7 +5,7 @@ const Libp2p = require('../../../')
const TCP = require('libp2p-tcp')
const Mplex = require('libp2p-mplex')
const { NOISE } = require('@chainsafe/libp2p-noise')
const Gossipsub = require('libp2p-gossipsub')
const Gossipsub = require('@achingbrain/libp2p-gossipsub')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
@ -36,10 +36,10 @@ const createNode = async () => {
])
// node1 conect to node2 and node2 conect to node3
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.dial(node2.peerId)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node2.dial(node3.peerId)
//subscribe

View File

@ -32,10 +32,10 @@ const [node1, node2, node3] = await Promise.all([
createNode(),
])
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.dial(node2.peerId)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node2.dial(node3.peerId)
```

View File

@ -49,7 +49,7 @@ function printAddrs (node, number) {
console.log(result.toString())
})
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
await pipe(

View File

@ -60,9 +60,9 @@ function print ({ stream }) {
node2.handle('/print', print)
node3.handle('/print', print)
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
// node 1 (TCP) dials to node 2 (TCP+WebSockets)
const { stream } = await node1.dialProtocol(node2.peerId, '/print')

View File

@ -140,7 +140,7 @@ Then add,
console.log(result.toString())
})
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
const { stream } = await node1.dialProtocol(node2.peerId, '/print')
await pipe(
@ -224,9 +224,9 @@ node1.handle('/print', print)
node2.handle('/print', print)
node3.handle('/print', print)
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
await node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
await node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
await node3.peerStore.addressBook.set(node1.peerId, node1.multiaddrs)
// node 1 (TCP) dials to node 2 (TCP+WebSockets)
const { stream } = await node1.dialProtocol(node2.peerId, '/print')

View File

@ -30,7 +30,7 @@ async function waitForOutput (expectedOutput, command, args = [], opts = {}) {
const proc = execa(command, args, opts)
let output = ''
let time = 120000
let time = 600000
let timeout = setTimeout(() => {
throw new Error(`Did not see "${expectedOutput}" in output from "${[command].concat(args).join(' ')}" after ${time/1000}s`)

View File

@ -40,5 +40,4 @@ const PeerId = require('peer-id')
console.log('Listening on:')
node.multiaddrs.forEach((ma) => console.log(`${ma.toString()}/p2p/${node.peerId.toB58String()}`))
})()

View File

@ -20,10 +20,10 @@
"util": "^0.12.3"
},
"dependencies": {
"@chainsafe/libp2p-noise": "^5.0.2",
"libp2p": "../../",
"libp2p-bootstrap": "^0.13.0",
"libp2p-bootstrap": "^0.14.0",
"libp2p-mplex": "^0.10.4",
"@chainsafe/libp2p-noise": "^4.1.0",
"libp2p-webrtc-direct": "^0.7.0",
"peer-id": "^0.16.0"
},

View File

@ -20,20 +20,18 @@
"scripts": {
"lint": "aegir lint",
"build": "aegir build",
"build:proto": "npm run build:proto:circuit && npm run build:proto:identify && npm run build:proto:plaintext && npm run build:proto:address-book && npm run build:proto:proto-book && npm run build:proto:peer-record && npm run build:proto:envelope",
"build:proto": "npm run build:proto:circuit && npm run build:proto:identify && npm run build:proto:plaintext && npm run build:proto:address-book && npm run build:proto:proto-book && npm run build:proto:peer && npm run build:proto:peer-record && npm run build:proto:envelope",
"build:proto:circuit": "pbjs -t static-module -w commonjs -r libp2p-circuit --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/circuit/protocol/index.js ./src/circuit/protocol/index.proto",
"build:proto:identify": "pbjs -t static-module -w commonjs -r libp2p-identify --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/identify/message.js ./src/identify/message.proto",
"build:proto:plaintext": "pbjs -t static-module -w commonjs -r libp2p-plaintext --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/insecure/proto.js ./src/insecure/proto.proto",
"build:proto:address-book": "pbjs -t static-module -w commonjs -r libp2p-address-book --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/persistent/pb/address-book.js ./src/peer-store/persistent/pb/address-book.proto",
"build:proto:proto-book": "pbjs -t static-module -w commonjs -r libp2p-proto-book --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/persistent/pb/proto-book.js ./src/peer-store/persistent/pb/proto-book.proto",
"build:proto:peer": "pbjs -t static-module -w commonjs -r libp2p-peer --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/peer-store/pb/peer.js ./src/peer-store/pb/peer.proto",
"build:proto:peer-record": "pbjs -t static-module -w commonjs -r libp2p-peer-record --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/record/peer-record/peer-record.js ./src/record/peer-record/peer-record.proto",
"build:proto:envelope": "pbjs -t static-module -w commonjs -r libp2p-envelope --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/record/envelope/envelope.js ./src/record/envelope/envelope.proto",
"build:proto-types": "npm run build:proto-types:circuit && npm run build:proto-types:identify && npm run build:proto-types:plaintext && npm run build:proto-types:address-book && npm run build:proto-types:proto-book && npm run build:proto-types:peer-record && npm run build:proto-types:envelope",
"build:proto-types": "npm run build:proto-types:circuit && npm run build:proto-types:identify && npm run build:proto-types:plaintext && npm run build:proto-types:address-book && npm run build:proto-types:proto-book && npm run build:proto-types:peer && npm run build:proto-types:peer-record && npm run build:proto-types:envelope",
"build:proto-types:circuit": "pbts -o src/circuit/protocol/index.d.ts src/circuit/protocol/index.js",
"build:proto-types:identify": "pbts -o src/identify/message.d.ts src/identify/message.js",
"build:proto-types:plaintext": "pbts -o src/insecure/proto.d.ts src/insecure/proto.js",
"build:proto-types:address-book": "pbts -o src/peer-store/persistent/pb/address-book.d.ts src/peer-store/persistent/pb/address-book.js",
"build:proto-types:proto-book": "pbts -o src/peer-store/persistent/pb/proto-book.d.ts src/peer-store/persistent/pb/proto-book.js",
"build:proto-types:peer": "pbts -o src/peer-store/pb/peer.d.ts src/peer-store/pb/peer.js",
"build:proto-types:peer-record": "pbts -o src/record/peer-record/peer-record.d.ts src/record/peer-record/peer-record.js",
"build:proto-types:envelope": "pbts -o src/record/envelope/envelope.d.ts src/record/envelope/envelope.js",
"test": "aegir test",
@ -86,6 +84,7 @@
"any-signal": "^2.1.1",
"bignumber.js": "^9.0.1",
"class-is": "^1.1.0",
"datastore-core": "^7.0.0",
"debug": "^4.3.1",
"err-code": "^3.0.0",
"es6-promisify": "^7.0.0",
@ -103,11 +102,12 @@
"it-merge": "^1.0.0",
"it-pipe": "^1.1.0",
"it-take": "^1.0.0",
"libp2p-crypto": "^0.21.0",
"libp2p-interfaces": "^2.0.1",
"libp2p-crypto": "^0.21.1",
"libp2p-interfaces": "^4.0.0",
"libp2p-utils": "^0.4.0",
"mafmt": "^10.0.0",
"merge-options": "^3.0.4",
"mortice": "^2.0.1",
"multiaddr": "^10.0.0",
"multiformats": "^9.0.0",
"multistream-select": "^2.0.0",
@ -140,7 +140,6 @@
"@types/varint": "^6.0.0",
"aegir": "^36.0.0",
"buffer": "^6.0.3",
"datastore-core": "^6.0.7",
"delay": "^5.0.0",
"into-stream": "^6.0.0",
"ipfs-http-client": "^54.0.2",
@ -151,11 +150,11 @@
"libp2p-bootstrap": "^0.14.0",
"libp2p-delegated-content-routing": "^0.11.0",
"libp2p-delegated-peer-routing": "^0.11.1",
"libp2p-interfaces-compliance-tests": "^2.0.1",
"libp2p-interop": "^0.6.0",
"libp2p-kad-dht": "^0.27.1",
"libp2p-interfaces-compliance-tests": "^4.0.8",
"libp2p-interop": "^0.7.1",
"libp2p-kad-dht": "^0.28.6",
"libp2p-mdns": "^0.18.0",
"libp2p-mplex": "^0.10.1",
"libp2p-mplex": "^0.10.4",
"libp2p-tcp": "^0.17.0",
"libp2p-webrtc-star": "^0.25.0",
"libp2p-websockets": "^0.16.0",

View File

@ -8,7 +8,6 @@ const log = Object.assign(debug('libp2p:auto-relay'), {
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { toString: uint8ArrayToString } = require('uint8arrays/to-string')
const { Multiaddr } = require('multiaddr')
const PeerId = require('peer-id')
const { relay: multicodec } = require('./multicodec')
const { canHop } = require('./circuit/hop')
@ -22,7 +21,8 @@ const {
/**
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
* @typedef {import('../peer-store/address-book').Address} Address
* @typedef {import('../peer-store/types').Address} Address
* @typedef {import('peer-id')} PeerId
*/
/**
@ -91,7 +91,7 @@ class AutoRelay {
// If no protocol, check if we were keeping the peer before as a listenRelay
if (!hasProtocol && this._listenRelays.has(id)) {
this._removeListenRelay(id)
await this._removeListenRelay(id)
return
} else if (!hasProtocol || this._listenRelays.has(id)) {
return
@ -113,7 +113,7 @@ class AutoRelay {
const supportsHop = await canHop({ connection })
if (supportsHop) {
this._peerStore.metadataBook.set(peerId, HOP_METADATA_KEY, uint8ArrayFromString(HOP_METADATA_VALUE))
await this._peerStore.metadataBook.setValue(peerId, HOP_METADATA_KEY, uint8ArrayFromString(HOP_METADATA_VALUE))
await this._addListenRelay(connection, id)
}
} catch (/** @type {any} */ err) {
@ -125,7 +125,6 @@ class AutoRelay {
* Peer disconnects.
*
* @param {Connection} connection - connection to the peer
* @returns {void}
*/
_onPeerDisconnected (connection) {
const peerId = connection.remotePeer
@ -136,7 +135,9 @@ class AutoRelay {
return
}
this._removeListenRelay(id)
this._removeListenRelay(id).catch(err => {
log.error(err)
})
}
/**
@ -154,7 +155,7 @@ class AutoRelay {
}
// Get peer known addresses and sort them per public addresses first
const remoteAddrs = this._peerStore.addressBook.getMultiaddrsForPeer(
const remoteAddrs = await this._peerStore.addressBook.getMultiaddrsForPeer(
connection.remotePeer, this._addressSorter
)
@ -180,12 +181,11 @@ class AutoRelay {
*
* @private
* @param {string} id - peer identifier string.
* @returns {void}
*/
_removeListenRelay (id) {
async _removeListenRelay (id) {
if (this._listenRelays.delete(id)) {
// TODO: this should be responsibility of the connMgr
this._listenOnAvailableHopRelays([id])
await this._listenOnAvailableHopRelays([id])
}
}
@ -197,7 +197,6 @@ class AutoRelay {
* 3. Search the network.
*
* @param {string[]} [peersToIgnore]
* @returns {Promise<void>}
*/
async _listenOnAvailableHopRelays (peersToIgnore = []) {
// TODO: The peer redial issue on disconnect should be handled by connection gating
@ -209,29 +208,30 @@ class AutoRelay {
const knownHopsToDial = []
// Check if we have known hop peers to use and attempt to listen on the already connected
for (const [id, metadataMap] of this._peerStore.metadataBook.data.entries()) {
for await (const { id, metadata } of this._peerStore.getPeers()) {
const idStr = id.toB58String()
// Continue to next if listening on this or peer to ignore
if (this._listenRelays.has(id) || peersToIgnore.includes(id)) {
if (this._listenRelays.has(idStr) || peersToIgnore.includes(idStr)) {
continue
}
const supportsHop = metadataMap.get(HOP_METADATA_KEY)
const supportsHop = metadata.get(HOP_METADATA_KEY)
// Continue to next if it does not support Hop
if (!supportsHop || uint8ArrayToString(supportsHop) !== HOP_METADATA_VALUE) {
continue
}
const peerId = PeerId.createFromB58String(id)
const connection = this._connectionManager.get(peerId)
const connection = this._connectionManager.get(id)
// If not connected, store for possible later use.
if (!connection) {
knownHopsToDial.push(peerId)
knownHopsToDial.push(id)
continue
}
await this._addListenRelay(connection, id)
await this._addListenRelay(connection, idStr)
// Check if already listening on enough relays
if (this._listenRelays.size >= this.maxListeners) {
@ -258,7 +258,7 @@ class AutoRelay {
}
const peerId = provider.id
this._peerStore.addressBook.add(peerId, provider.multiaddrs)
await this._peerStore.addressBook.add(peerId, provider.multiaddrs)
await this._tryToListenOnRelay(peerId)

View File

@ -4,6 +4,7 @@ const debug = require('debug')
const mergeOptions = require('merge-options')
// @ts-ignore retimer does not have types
const retimer = require('retimer')
const all = require('it-all')
const log = Object.assign(debug('libp2p:connection-manager:auto-dialler'), {
error: debug('libp2p:connection-manager:auto-dialler:err')
@ -50,7 +51,7 @@ class AutoDialler {
/**
* Starts the auto dialer
*/
start () {
async start () {
if (!this._options.enabled) {
log('not enabled')
return
@ -86,8 +87,10 @@ class AutoDialler {
return
}
// Sort peers on wether we know protocols of public keys for them
const peers = Array.from(this._libp2p.peerStore.peers.values())
// Sort peers on whether we know protocols of public keys for them
// TODO: assuming the `peerStore.getPeers()` order is stable this will mean
// we keep trying to connect to the same peers?
const peers = (await all(this._libp2p.peerStore.getPeers()))
.sort((a, b) => {
if (b.protocols && b.protocols.length && (!a.protocols || !a.protocols.length)) {
return 1

View File

@ -87,14 +87,22 @@ class ConnectionManager extends EventEmitter {
*
* @type {Map<string, number>}
*/
this._peerValues = trackedMap(METRICS_COMPONENT, METRICS_PEER_VALUES, this._libp2p.metrics)
this._peerValues = trackedMap({
component: METRICS_COMPONENT,
metric: METRICS_PEER_VALUES,
metrics: this._libp2p.metrics
})
/**
* Map of connections per peer
*
* @type {Map<string, Connection[]>}
*/
this.connections = trackedMap(METRICS_COMPONENT, METRICS_PEER_CONNECTIONS, this._libp2p.metrics)
this.connections = trackedMap({
component: METRICS_COMPONENT,
metric: METRICS_PEER_CONNECTIONS,
metrics: this._libp2p.metrics
})
this._started = false
this._timer = null
@ -187,29 +195,31 @@ class ConnectionManager extends EventEmitter {
*
* @private
*/
_checkMetrics () {
async _checkMetrics () {
if (this._libp2p.metrics) {
try {
const movingAverages = this._libp2p.metrics.global.movingAverages
// @ts-ignore moving averages object types
const received = movingAverages.dataReceived[this._options.movingAverageInterval].movingAverage()
this._checkMaxLimit('maxReceivedData', received)
await this._checkMaxLimit('maxReceivedData', received)
// @ts-ignore moving averages object types
const sent = movingAverages.dataSent[this._options.movingAverageInterval].movingAverage()
this._checkMaxLimit('maxSentData', sent)
await this._checkMaxLimit('maxSentData', sent)
const total = received + sent
this._checkMaxLimit('maxData', total)
await this._checkMaxLimit('maxData', total)
log('metrics update', total)
} finally {
this._timer = retimer(this._checkMetrics, this._options.pollInterval)
}
}
}
/**
* Tracks the incoming connection and check the connection limit
*
* @param {Connection} connection
* @returns {void}
*/
onConnect (connection) {
async onConnect (connection) {
const peerId = connection.remotePeer
const peerIdStr = peerId.toB58String()
const storedConn = this.connections.get(peerIdStr)
@ -222,13 +232,13 @@ class ConnectionManager extends EventEmitter {
this.connections.set(peerIdStr, [connection])
}
this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
await this._libp2p.peerStore.keyBook.set(peerId, peerId.pubKey)
if (!this._peerValues.has(peerIdStr)) {
this._peerValues.set(peerIdStr, this._options.defaultPeerValue)
}
this._checkMaxLimit('maxConnections', this.size)
await this._checkMaxLimit('maxConnections', this.size)
}
/**
@ -296,6 +306,9 @@ class ConnectionManager extends EventEmitter {
*/
_onLatencyMeasure (summary) {
this._checkMaxLimit('maxEventLoopDelay', summary.avgMs)
.catch(err => {
log.error(err)
})
}
/**
@ -305,12 +318,12 @@ class ConnectionManager extends EventEmitter {
* @param {string} name - The name of the field to check limits for
* @param {number} value - The current value of the field
*/
_checkMaxLimit (name, value) {
async _checkMaxLimit (name, value) {
const limit = this._options[name]
log('checking limit of %s. current value: %d of %d', name, value, limit)
if (value > limit) {
log('%s: limit exceeded: %s, %d', this._peerId, name, value)
this._maybeDisconnectOne()
await this._maybeDisconnectOne()
}
}
@ -320,7 +333,7 @@ class ConnectionManager extends EventEmitter {
*
* @private
*/
_maybeDisconnectOne () {
async _maybeDisconnectOne () {
if (this._options.minConnections < this.connections.size) {
const peerValues = Array.from(new Map([...this._peerValues.entries()].sort((a, b) => a[1] - b[1])))
log('%s: sorted peer values: %j', this._peerId, peerValues)
@ -331,7 +344,11 @@ class ConnectionManager extends EventEmitter {
log('%s: closing a connection to %j', this._peerId, peerId)
for (const connections of this.connections.values()) {
if (connections[0].remotePeer.toB58String() === peerId) {
connections[0].close()
connections[0].close().catch(err => {
log.error(err)
})
// TODO: should not need to invoke this manually
this.onDisconnect(connections[0])
break
}
}

View File

@ -14,12 +14,12 @@ const take = require('it-take')
* Store the multiaddrs from every peer in the passed peer store
*
* @param {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} source
* @param {import('../peer-store')} peerStore
* @param {import('../peer-store/types').PeerStore} peerStore
*/
function storeAddresses (source, peerStore) {
return map(source, (peer) => {
async function * storeAddresses (source, peerStore) {
yield * map(source, async (peer) => {
// ensure we have the addresses for a given peer
peerStore.addressBook.add(peer.id, peer.multiaddrs)
await peerStore.addressBook.add(peer.id, peer.multiaddrs)
return peer
})

View File

@ -63,7 +63,10 @@ class DialRequest {
tokens.forEach(token => tokenHolder.push(token))
const dialAbortControllers = this.addrs.map(() => {
const controller = new AbortController()
try {
// fails on node < 15.4
setMaxListeners && setMaxListeners(Infinity, controller.signal)
} catch {}
return controller
})

View File

@ -30,8 +30,8 @@ const METRICS_PENDING_DIAL_TARGETS = 'pending-dial-targets'
/**
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
* @typedef {import('peer-id')} PeerId
* @typedef {import('../peer-store')} PeerStore
* @typedef {import('../peer-store/address-book').Address} Address
* @typedef {import('../peer-store/types').PeerStore} PeerStore
* @typedef {import('../peer-store/types').Address} Address
* @typedef {import('../transport-manager')} TransportManager
*/
@ -88,10 +88,18 @@ class Dialer {
this.tokens = [...new Array(maxParallelDials)].map((_, index) => index)
/** @type {Map<string, PendingDial>} */
this._pendingDials = trackedMap(METRICS_COMPONENT, METRICS_PENDING_DIALS, metrics)
this._pendingDials = trackedMap({
component: METRICS_COMPONENT,
metric: METRICS_PENDING_DIALS,
metrics
})
/** @type {Map<string, { resolve: (value: any) => void, reject: (err: Error) => void}>} */
this._pendingDialTargets = trackedMap(METRICS_COMPONENT, METRICS_PENDING_DIAL_TARGETS, metrics)
this._pendingDialTargets = trackedMap({
component: METRICS_COMPONENT,
metric: METRICS_PENDING_DIAL_TARGETS,
metrics
})
for (const [key, value] of Object.entries(resolvers)) {
Multiaddr.resolvers.set(key, value)
@ -192,10 +200,10 @@ class Dialer {
const { id, multiaddrs } = getPeer(peer)
if (multiaddrs) {
this.peerStore.addressBook.add(id, multiaddrs)
await this.peerStore.addressBook.add(id, multiaddrs)
}
let knownAddrs = this.peerStore.addressBook.getMultiaddrsForPeer(id, this.addressSorter) || []
let knownAddrs = await this.peerStore.addressBook.getMultiaddrsForPeer(id, this.addressSorter) || []
// If received a multiaddr to dial, it should be the first to use
// But, if we know other multiaddrs for the peer, we should try them too.
@ -215,7 +223,7 @@ class Dialer {
const supportedAddrs = addrs.filter(a => this.transportManager.transportForMultiaddr(a))
if (supportedAddrs.length > this.maxAddrsToDial) {
this.peerStore.delete(id)
await this.peerStore.delete(id)
throw errCode(new Error('dial with more addresses than allowed'), codes.ERR_TOO_MANY_ADDRESSES)
}
@ -259,7 +267,10 @@ class Dialer {
// this signal will potentially be used while dialing lots of
// peers so prevent MaxListenersExceededWarning appearing in the console
try {
// fails on node < 15.4
setMaxListeners && setMaxListeners(Infinity, signal)
} catch {}
const pendingDial = {
dialRequest,

View File

@ -77,8 +77,6 @@ class IdentifyService {
...libp2p._options.host
}
this.peerStore.metadataBook.set(this.peerId, 'AgentVersion', uint8ArrayFromString(this._host.agentVersion))
this.peerStore.metadataBook.set(this.peerId, 'ProtocolVersion', uint8ArrayFromString(this._host.protocolVersion))
// When a new connection happens, trigger identify
this.connectionManager.on('peer:connect', (connection) => {
this.identify(connection).catch(log.error)
@ -87,18 +85,27 @@ class IdentifyService {
// When self multiaddrs change, trigger identify-push
this.peerStore.on('change:multiaddrs', ({ peerId }) => {
if (peerId.toString() === this.peerId.toString()) {
this.pushToPeerStore()
this.pushToPeerStore().catch(err => log.error(err))
}
})
// When self protocols change, trigger identify-push
this.peerStore.on('change:protocols', ({ peerId }) => {
if (peerId.toString() === this.peerId.toString()) {
this.pushToPeerStore()
this.pushToPeerStore().catch(err => log.error(err))
}
})
}
async start () {
await this.peerStore.metadataBook.setValue(this.peerId, 'AgentVersion', uint8ArrayFromString(this._host.agentVersion))
await this.peerStore.metadataBook.setValue(this.peerId, 'ProtocolVersion', uint8ArrayFromString(this._host.protocolVersion))
}
async stop () {
}
/**
* Send an Identify Push update to the list of connections
*
@ -108,7 +115,7 @@ class IdentifyService {
async push (connections) {
const signedPeerRecord = await this.peerStore.addressBook.getRawEnvelope(this.peerId)
const listenAddrs = this._libp2p.multiaddrs.map((ma) => ma.bytes)
const protocols = this.peerStore.protoBook.get(this.peerId) || []
const protocols = await this.peerStore.protoBook.get(this.peerId)
const pushes = connections.map(async connection => {
try {
@ -135,10 +142,8 @@ class IdentifyService {
/**
* Calls `push` for all peers in the `peerStore` that are connected
*
* @returns {void}
*/
pushToPeerStore () {
async pushToPeerStore () {
// Do not try to push if libp2p node is not running
if (!this._libp2p.isStarted()) {
return
@ -146,13 +151,13 @@ class IdentifyService {
const connections = []
let connection
for (const peer of this.peerStore.peers.values()) {
for await (const peer of this.peerStore.getPeers()) {
if (peer.protocols.includes(this.identifyPushProtocolStr) && (connection = this.connectionManager.get(peer.id))) {
connections.push(connection)
}
}
this.push(connections)
await this.push(connections)
}
/**
@ -205,10 +210,10 @@ class IdentifyService {
try {
const envelope = await Envelope.openAndCertify(signedPeerRecord, PeerRecord.DOMAIN)
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
this.peerStore.protoBook.set(id, protocols)
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
this.peerStore.metadataBook.set(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
if (await this.peerStore.addressBook.consumePeerRecord(envelope)) {
await this.peerStore.protoBook.set(id, protocols)
await this.peerStore.metadataBook.setValue(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
await this.peerStore.metadataBook.setValue(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
return
}
} catch (/** @type {any} */ err) {
@ -217,14 +222,14 @@ class IdentifyService {
// LEGACY: Update peers data in PeerStore
try {
this.peerStore.addressBook.set(id, listenAddrs.map((addr) => new Multiaddr(addr)))
await this.peerStore.addressBook.set(id, listenAddrs.map((addr) => new Multiaddr(addr)))
} catch (/** @type {any} */ err) {
log.error('received invalid addrs', err)
}
this.peerStore.protoBook.set(id, protocols)
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
this.peerStore.metadataBook.set(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
await this.peerStore.protoBook.set(id, protocols)
await this.peerStore.metadataBook.setValue(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
await this.peerStore.metadataBook.setValue(id, 'ProtocolVersion', uint8ArrayFromString(message.protocolVersion))
// TODO: Add and score our observed addr
log('received observed address of %s', cleanObservedAddr)
@ -268,7 +273,7 @@ class IdentifyService {
}
const signedPeerRecord = await this.peerStore.addressBook.getRawEnvelope(this.peerId)
const protocols = this.peerStore.protoBook.get(this.peerId) || []
const protocols = await this.peerStore.protoBook.get(this.peerId)
const message = Message.Identify.encode({
protocolVersion: this._host.protocolVersion,
@ -321,8 +326,8 @@ class IdentifyService {
try {
const envelope = await Envelope.openAndCertify(message.signedPeerRecord, PeerRecord.DOMAIN)
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
this.peerStore.protoBook.set(id, message.protocols)
if (await this.peerStore.addressBook.consumePeerRecord(envelope)) {
await this.peerStore.protoBook.set(id, message.protocols)
return
}
} catch (/** @type {any} */ err) {
@ -331,14 +336,14 @@ class IdentifyService {
// LEGACY: Update peers data in PeerStore
try {
this.peerStore.addressBook.set(id,
await this.peerStore.addressBook.set(id,
message.listenAddrs.map((addr) => new Multiaddr(addr)))
} catch (/** @type {any} */ err) {
log.error('received invalid addrs', err)
}
// Update the protocols
this.peerStore.protoBook.set(id, message.protocols)
await this.peerStore.protoBook.set(id, message.protocols)
}
/**

View File

@ -9,7 +9,7 @@ const { EventEmitter } = require('events')
const errCode = require('err-code')
const PeerId = require('peer-id')
const { Multiaddr } = require('multiaddr')
const { MemoryDatastore } = require('datastore-core/memory')
const PeerRouting = require('./peer-routing')
const ContentRouting = require('./content-routing')
const getPeer = require('./get-peer')
@ -28,7 +28,6 @@ const TransportManager = require('./transport-manager')
const Upgrader = require('./upgrader')
const PeerStore = require('./peer-store')
const PubsubAdapter = require('./pubsub-adapter')
const PersistentPeerStore = require('./peer-store/persistent')
const Registrar = require('./registrar')
const ping = require('./ping')
const IdentifyService = require('./identify')
@ -112,7 +111,7 @@ const { updateSelfPeerRecord } = require('./record/utils')
* @property {KeychainOptions & import('./keychain/index').KeychainOptions} [keychain]
* @property {MetricsOptions & import('./metrics').MetricsOptions} [metrics]
* @property {import('./peer-routing').PeerRoutingOptions} [peerRouting]
* @property {PeerStoreOptions & PersistentPeerStoreOptions} [peerStore]
* @property {PeerStoreOptions} [peerStore]
* @property {import('./transport-manager').TransportManagerOptions} [transportManager]
* @property {Libp2pConfig} [config]
*
@ -172,13 +171,11 @@ class Libp2p extends EventEmitter {
this.metrics = metrics
}
this.peerStore = (this.datastore && this._options.peerStore.persistence)
? new PersistentPeerStore({
/** @type {import('./peer-store/types').PeerStore} */
this.peerStore = new PeerStore({
peerId: this.peerId,
datastore: this.datastore,
...this._options.peerStore
datastore: (this.datastore && this._options.peerStore.persistence) ? this.datastore : new MemoryDatastore()
})
: new PeerStore({ peerId: this.peerId })
// Addresses {listen, announce, noAnnounce}
this.addresses = this._options.addresses
@ -290,7 +287,6 @@ class Libp2p extends EventEmitter {
// Add the identify service since we can multiplex
this.identifyService = new IdentifyService({ libp2p: this })
this.handle(Object.values(IdentifyService.getProtocolStr(this)), this.identifyService.handleMessage)
}
// Attach private network protector
@ -356,6 +352,10 @@ class Libp2p extends EventEmitter {
async start () {
log('libp2p is starting')
if (this.identifyService) {
await this.handle(Object.values(IdentifyService.getProtocolStr(this)), this.identifyService.handleMessage)
}
try {
await this._onStarting()
await this._onDidStart()
@ -380,9 +380,13 @@ class Libp2p extends EventEmitter {
try {
this._isStarted = false
if (this.identifyService) {
await this.identifyService.stop()
}
this.relay && this.relay.stop()
this.peerRouting.stop()
this._autodialler.stop()
await this._autodialler.stop()
await (this._dht && this._dht.stop())
for (const service of this._discovery.values()) {
@ -393,7 +397,6 @@ class Libp2p extends EventEmitter {
this._discovery = new Map()
await this.peerStore.stop()
await this.connectionManager.stop()
await Promise.all([
@ -499,7 +502,7 @@ class Libp2p extends EventEmitter {
if (!connection) {
connection = await this.dialer.connectToPeer(peer, options)
} else if (multiaddrs) {
this.peerStore.addressBook.add(id, multiaddrs)
await this.peerStore.addressBook.add(id, multiaddrs)
}
return connection
@ -579,14 +582,14 @@ class Libp2p extends EventEmitter {
* @param {string[]|string} protocols
* @param {(props: HandlerProps) => void} handler
*/
handle (protocols, handler) {
async handle (protocols, handler) {
protocols = Array.isArray(protocols) ? protocols : [protocols]
protocols.forEach(protocol => {
this.upgrader.protocols.set(protocol, handler)
})
// Add new protocols to self protocols in the Protobook
this.peerStore.protoBook.add(this.peerId, protocols)
await this.peerStore.protoBook.add(this.peerId, protocols)
}
/**
@ -595,14 +598,14 @@ class Libp2p extends EventEmitter {
*
* @param {string[]|string} protocols
*/
unhandle (protocols) {
async unhandle (protocols) {
protocols = Array.isArray(protocols) ? protocols : [protocols]
protocols.forEach(protocol => {
this.upgrader.protocols.delete(protocol)
})
// Remove protocols from self protocols in the Protobook
this.peerStore.protoBook.remove(this.peerId, protocols)
await this.peerStore.protoBook.remove(this.peerId, protocols)
}
async _onStarting () {
@ -613,11 +616,8 @@ class Libp2p extends EventEmitter {
// Manage your NATs
this.natManager.start()
// Start PeerStore
await this.peerStore.start()
if (this._config.pubsub.enabled) {
this.pubsub && this.pubsub.start()
this.pubsub && await this.pubsub.start()
}
// DHT subsystem
@ -631,6 +631,10 @@ class Libp2p extends EventEmitter {
// Start metrics if present
this.metrics && this.metrics.start()
if (this.identifyService) {
await this.identifyService.start()
}
}
/**
@ -643,17 +647,19 @@ class Libp2p extends EventEmitter {
this.peerStore.on('peer', peerId => {
this.emit('peer:discovery', peerId)
this._maybeConnect(peerId)
this._maybeConnect(peerId).catch(err => {
log.error(err)
})
})
// Once we start, emit any peers we may have already discovered
// TODO: this should be removed, as we already discovered these peers in the past
for (const peer of this.peerStore.peers.values()) {
for await (const peer of this.peerStore.getPeers()) {
this.emit('peer:discovery', peer.id)
}
this.connectionManager.start()
this._autodialler.start()
await this._autodialler.start()
// Peer discovery
await this._setupPeerDiscovery()
@ -677,8 +683,8 @@ class Libp2p extends EventEmitter {
return
}
peer.multiaddrs && this.peerStore.addressBook.add(peer.id, peer.multiaddrs)
peer.protocols && this.peerStore.protoBook.set(peer.id, peer.protocols)
peer.multiaddrs && this.peerStore.addressBook.add(peer.id, peer.multiaddrs).catch(err => log.error(err))
peer.protocols && this.peerStore.protoBook.set(peer.id, peer.protocols).catch(err => log.error(err))
}
/**

View File

@ -44,7 +44,7 @@ class Metrics {
this._oldPeers = oldPeerLRU(this._options.maxOldPeersRetention)
this._running = false
this._onMessage = this._onMessage.bind(this)
this._componentMetrics = new Map()
this._systems = new Map()
}
/**
@ -89,19 +89,26 @@ class Metrics {
}
/**
* @returns {Map}
* @returns {Map<string, Map<string, Map<string, any>>>}
*/
getComponentMetrics () {
return this._componentMetrics
return this._systems
}
updateComponentMetric (component, metric, value) {
if (!this._componentMetrics.has(component)) {
this._componentMetrics.set(component, new Map())
updateComponentMetric ({ system = 'libp2p', component, metric, value }) {
if (!this._systems.has(system)) {
this._systems.set(system, new Map())
}
const map = this._componentMetrics.get(component)
map.set(metric, value)
const systemMetrics = this._systems.get(system)
if (!systemMetrics.has(component)) {
systemMetrics.set(component, new Map())
}
const componentMetrics = systemMetrics.get(component)
componentMetrics.set(metric, value)
}
/**

View File

@ -6,18 +6,27 @@
*/
class TrackedMap extends Map {
/**
* @param {string} component
* @param {string} name
* @param {import('.')} metrics
* @param {object} options
* @param {string} options.system
* @param {string} options.component
* @param {string} options.metric
* @param {import('.')} options.metrics
*/
constructor (component, name, metrics) {
constructor (options) {
super()
const { system, component, metric, metrics } = options
this._system = system
this._component = component
this._name = name
this._metric = metric
this._metrics = metrics
this._metrics.updateComponentMetric(this._component, this._name, this.size)
this._metrics.updateComponentMetric({
system: this._system,
component: this._component,
metric: this._metric,
value: this.size
})
}
/**
@ -26,7 +35,12 @@ class TrackedMap extends Map {
*/
set (key, value) {
super.set(key, value)
this._metrics.updateComponentMetric(this._component, this._name, this.size)
this._metrics.updateComponentMetric({
system: this._system,
component: this._component,
metric: this._metric,
value: this.size
})
return this
}
@ -35,31 +49,43 @@ class TrackedMap extends Map {
*/
delete (key) {
const deleted = super.delete(key)
this._metrics.updateComponentMetric(this._component, this._name, this.size)
this._metrics.updateComponentMetric({
system: this._system,
component: this._component,
metric: this._metric,
value: this.size
})
return deleted
}
clear () {
super.clear()
this._metrics.updateComponentMetric(this._component, this._name, this.size)
this._metrics.updateComponentMetric({
system: this._system,
component: this._component,
metric: this._metric,
value: this.size
})
}
}
/**
* @template K
* @template V
* @param {string} component
* @param {string} name
* @param {import('.')} [metrics]
* @param {object} options
* @param {string} [options.system]
* @param {string} options.component
* @param {string} options.metric
* @param {import('.')} [options.metrics]
* @returns {Map<K, V>}
*/
module.exports = (component, name, metrics) => {
module.exports = ({ system = 'libp2p', component, metric, metrics }) => {
/** @type {Map<K, V>} */
let map
if (metrics) {
map = new TrackedMap(component, name, metrics)
map = new TrackedMap({ system, component, metric, metrics })
} else {
map = new Map()
}

View File

@ -154,7 +154,10 @@ class PeerRouting {
const controller = new TimeoutController(options.timeout)
// this controller will potentially be used while dialing lots of
// peers so prevent MaxListenersExceededWarning appearing in the console
try {
// fails on node < 15.4
setMaxListeners && setMaxListeners(Infinity, controller.signal)
} catch {}
options.signal = controller.signal
}

View File

@ -1,74 +1,36 @@
'use strict'
const debug = require('debug')
const log = Object.assign(debug('libp2p:peer-store:address-book'), {
error: debug('libp2p:peer-store:address-book:err')
})
const errcode = require('err-code')
const { Multiaddr } = require('multiaddr')
const PeerId = require('peer-id')
const Book = require('./book')
const { codes } = require('../errors')
const PeerRecord = require('../record/peer-record')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../errors')
const Envelope = require('../record/envelope')
/**
* @typedef {import('./')} PeerStore
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').Address} Address
* @typedef {import('./types').AddressBook} AddressBook
*/
/**
* @typedef {Object} Address
* @property {Multiaddr} multiaddr peer multiaddr.
* @property {boolean} isCertified obtained from a signed peer record.
*
* @typedef {Object} CertifiedRecord
* @property {Uint8Array} raw raw envelope.
* @property {number} seqNumber seq counter.
*
* @typedef {Object} Entry
* @property {Address[]} addresses peer Addresses.
* @property {CertifiedRecord} record certified peer record.
*/
/**
* @extends {Book}
*/
class AddressBook extends Book {
/**
* The AddressBook is responsible for keeping the known multiaddrs of a peer.
*
* @class
* @param {PeerStore} peerStore
*/
constructor (peerStore) {
/**
* PeerStore Event emitter, used by the AddressBook to emit:
* "peer" - emitted when a peer is discovered by the node.
* "change:multiaddrs" - emitted when the known multiaddrs of a peer change.
*/
super({
peerStore,
eventName: 'change:multiaddrs',
eventProperty: 'multiaddrs',
eventTransformer: (data) => {
if (!data.addresses) {
return []
}
return data.addresses.map((/** @type {Address} */ address) => address.multiaddr)
}
const log = Object.assign(debug('libp2p:peer-store:address-book'), {
error: debug('libp2p:peer-store:address-book:err')
})
const EVENT_NAME = 'change:multiaddrs'
/**
* Map known peers to their known Address Entries.
*
* @type {Map<string, Entry>}
* @implements {AddressBook}
*/
this.data = new Map()
class PeerStoreAddressBook {
/**
* @param {PeerStore["emit"]} emit
* @param {import('./types').Store} store
*/
constructor (emit, store) {
this._emit = emit
this._store = store
}
/**
@ -77,9 +39,16 @@ class AddressBook extends Book {
* into the AddressBook.
*
* @param {Envelope} envelope
* @returns {boolean}
*/
consumePeerRecord (envelope) {
async consumePeerRecord (envelope) {
log('consumePeerRecord await write lock')
const release = await this._store.lock.writeLock()
log('consumePeerRecord got write lock')
let peerId
let updatedPeer
try {
let peerRecord
try {
peerRecord = PeerRecord.createFromProtobuf(envelope.payload)
@ -88,58 +57,72 @@ class AddressBook extends Book {
return false
}
peerId = peerRecord.peerId
const multiaddrs = peerRecord.multiaddrs
// Verify peerId
if (!peerRecord.peerId.equals(envelope.peerId)) {
if (!peerId.equals(envelope.peerId)) {
log('signing key does not match PeerId in the PeerRecord')
return false
}
// ensure the record has multiaddrs
if (!peerRecord.multiaddrs || !peerRecord.multiaddrs.length) {
if (!multiaddrs || !multiaddrs.length) {
return false
}
const peerId = peerRecord.peerId
const id = peerId.toB58String()
const entry = this.data.get(id) || { record: undefined }
const storedRecord = entry.record
if (await this._store.has(peerId)) {
const peer = await this._store.load(peerId)
if (peer.peerRecordEnvelope) {
const storedEnvelope = await Envelope.createFromProtobuf(peer.peerRecordEnvelope)
const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload)
// ensure seq is greater than, or equal to, the last received
if (storedRecord && storedRecord.seqNumber >= peerRecord.seqNumber) {
if (storedRecord.seqNumber >= peerRecord.seqNumber) {
return false
}
const addresses = this._toAddresses(peerRecord.multiaddrs, true)
}
}
// Replace unsigned addresses by the new ones from the record
// TODO: Once we have ttls for the addresses, we should merge these in.
this._setData(peerId, {
addresses,
record: {
raw: envelope.marshal(),
seqNumber: peerRecord.seqNumber
}
// TODO: Once we have ttls for the addresses, we should merge these in
updatedPeer = await this._store.patchOrCreate(peerId, {
addresses: convertMultiaddrsToAddresses(multiaddrs, true),
peerRecordEnvelope: envelope.marshal()
})
log(`stored provided peer record for ${id}`)
log(`stored provided peer record for ${peerRecord.peerId.toB58String()}`)
} finally {
log('consumePeerRecord release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(({ multiaddr }) => multiaddr) })
return true
}
/**
* Get the raw Envelope for a peer. Returns
* undefined if no Envelope is found.
*
* @param {PeerId} peerId
* @returns {Uint8Array|undefined}
*/
getRawEnvelope (peerId) {
const entry = this.data.get(peerId.toB58String())
async getRawEnvelope (peerId) {
log('getRawEnvelope await read lock')
const release = await this._store.lock.readLock()
log('getRawEnvelope got read lock')
if (!entry || !entry.record || !entry.record.raw) {
return undefined
try {
const peer = await this._store.load(peerId)
return peer.peerRecordEnvelope
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('getRawEnvelope release read lock')
release()
}
return entry.record.raw
}
/**
@ -147,10 +130,9 @@ class AddressBook extends Book {
* Returns undefined if no record exists.
*
* @param {PeerId} peerId
* @returns {Promise<Envelope|void>|undefined}
*/
getPeerRecord (peerId) {
const raw = this.getRawEnvelope(peerId)
async getPeerRecord (peerId) {
const raw = await this.getRawEnvelope(peerId)
if (!raw) {
return undefined
@ -160,186 +142,189 @@ class AddressBook extends Book {
}
/**
* Set known multiaddrs of a provided peer.
* This will replace previously stored multiaddrs, if available.
* Replacing stored multiaddrs might result in losing obtained certified addresses.
* If you are not sure, it's recommended to use `add` instead.
*
* @override
* @param {PeerId} peerId
* @param {Multiaddr[]} multiaddrs
* @returns {AddressBook}
*/
set (peerId, multiaddrs) {
async get (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const addresses = this._toAddresses(multiaddrs)
log('get wait for read lock')
const release = await this._store.lock.readLock()
log('get got read lock')
// Not replace multiaddrs
if (!addresses.length) {
return this
try {
const peer = await this._store.load(peerId)
return peer.addresses
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('get release read lock')
release()
}
const id = peerId.toB58String()
const entry = this.data.get(id)
// Already knows the peer
if (entry && entry.addresses && entry.addresses.length === addresses.length) {
const intersection = entry.addresses.filter((addr) => addresses.some((newAddr) => addr.multiaddr.equals(newAddr.multiaddr)))
// Are new addresses equal to the old ones?
// If yes, no changes needed!
if (intersection.length === entry.addresses.length) {
log(`the addresses provided to store are equal to the already stored for ${id}`)
return this
}
}
this._setData(peerId, {
addresses,
record: entry && entry.record
})
log(`stored provided multiaddrs for ${id}`)
// Notify the existance of a new peer
if (!entry) {
this._ps.emit('peer', peerId)
}
return this
return []
}
/**
* Add known addresses of a provided peer.
* If the peer is not known, it is set with the given addresses.
*
* @param {PeerId} peerId
* @param {Multiaddr[]} multiaddrs
* @returns {AddressBook}
*/
add (peerId, multiaddrs) {
async set (peerId, multiaddrs) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const addresses = this._toAddresses(multiaddrs)
const id = peerId.toB58String()
log('set await write lock')
const release = await this._store.lock.writeLock()
log('set got write lock')
// No addresses to be added
let hasPeer = false
let updatedPeer
try {
const addresses = convertMultiaddrsToAddresses(multiaddrs)
// No valid addresses found
if (!addresses.length) {
return this
return
}
const entry = this.data.get(id)
try {
const peer = await this._store.load(peerId)
hasPeer = true
if (entry && entry.addresses) {
// Add recorded uniquely to the new array (Union)
entry.addresses.forEach((addr) => {
if (!addresses.find(r => r.multiaddr.equals(addr.multiaddr))) {
addresses.push(addr)
if (new Set([
...addresses.map(({ multiaddr }) => multiaddr.toString()),
...peer.addresses.map(({ multiaddr }) => multiaddr.toString())
]).size === peer.addresses.length && addresses.length === peer.addresses.length) {
// not changing anything, no need to update
return
}
})
// If the recorded length is equal to the new after the unique union
// The content is the same, no need to update.
if (entry.addresses.length === addresses.length) {
log(`the addresses provided to store are already stored for ${id}`)
return this
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
this._setData(peerId, {
addresses,
record: entry && entry.record
})
updatedPeer = await this._store.patchOrCreate(peerId, { addresses })
log(`added provided multiaddrs for ${id}`)
// Notify the existance of a new peer
if (!(entry && entry.addresses)) {
this._ps.emit('peer', peerId)
log(`set multiaddrs for ${peerId.toB58String()}`)
} finally {
log('set release write lock')
release()
}
return this
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr) })
// Notify the existence of a new peer
if (!hasPeer) {
this._emit('peer', peerId)
}
}
/**
* Get the known data of a provided peer.
*
* @override
* @param {PeerId} peerId
* @returns {Address[]|undefined}
*/
get (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
}
const entry = this.data.get(peerId.toB58String())
return entry && entry.addresses ? [...entry.addresses] : undefined
}
/**
* Transforms received multiaddrs into Address.
*
* @private
* @param {Multiaddr[]} multiaddrs
* @param {boolean} [isCertified]
* @returns {Address[]}
*/
_toAddresses (multiaddrs, isCertified = false) {
if (!multiaddrs) {
log.error('multiaddrs must be provided to store data')
throw errcode(new Error('multiaddrs must be provided'), ERR_INVALID_PARAMETERS)
async add (peerId, multiaddrs) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
// create Address for each address
/** @type {Address[]} */
const addresses = []
multiaddrs.forEach((addr) => {
if (!Multiaddr.isMultiaddr(addr)) {
log.error(`multiaddr ${addr} must be an instance of multiaddr`)
throw errcode(new Error(`multiaddr ${addr} must be an instance of multiaddr`), ERR_INVALID_PARAMETERS)
log('add await write lock')
const release = await this._store.lock.writeLock()
log('add got write lock')
let hasPeer
let updatedPeer
try {
const addresses = convertMultiaddrsToAddresses(multiaddrs)
// No valid addresses found
if (!addresses.length) {
return
}
// Guarantee no replicates
if (!addresses.find((a) => a.multiaddr.equals(addr))) {
addresses.push({
multiaddr: addr,
isCertified
})
}
})
try {
const peer = await this._store.load(peerId)
hasPeer = true
return addresses
if (new Set([
...addresses.map(({ multiaddr }) => multiaddr.toString()),
...peer.addresses.map(({ multiaddr }) => multiaddr.toString())
]).size === peer.addresses.length) {
return
}
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
updatedPeer = await this._store.mergeOrCreate(peerId, { addresses })
log(`added multiaddrs for ${peerId}`)
} finally {
log('set release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr) })
// Notify the existence of a new peer
if (!hasPeer) {
this._emit('peer', peerId)
}
}
/**
* @param {PeerId} peerId
*/
async delete (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
log('delete await write lock')
const release = await this._store.lock.writeLock()
log('delete got write lock')
let has
try {
has = await this._store.has(peerId)
await this._store.patchOrCreate(peerId, {
addresses: []
})
} finally {
log('delete release write lock')
release()
}
if (has) {
this._emit(EVENT_NAME, { peerId, multiaddrs: [] })
}
}
/**
* Get the known multiaddrs for a given peer. All returned multiaddrs
* will include the encapsulated `PeerId` of the peer.
* Returns `undefined` if there are no known multiaddrs for the given peer.
*
* @param {PeerId} peerId
* @param {(addresses: Address[]) => Address[]} [addressSorter]
* @returns {Multiaddr[]|undefined}
*/
getMultiaddrsForPeer (peerId, addressSorter = (ms) => ms) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
}
const entry = this.data.get(peerId.toB58String())
if (!entry || !entry.addresses) {
return undefined
}
async getMultiaddrsForPeer (peerId, addressSorter = (ms) => ms) {
const addresses = await this.get(peerId)
return addressSorter(
entry.addresses || []
addresses
).map((address) => {
const multiaddr = address.multiaddr
@ -351,4 +336,34 @@ class AddressBook extends Book {
}
}
module.exports = AddressBook
/**
* Transforms received multiaddrs into Address.
*
* @private
* @param {Multiaddr[]} multiaddrs
* @param {boolean} [isCertified]
* @returns {Address[]}
*/
function convertMultiaddrsToAddresses (multiaddrs, isCertified = false) {
if (!multiaddrs) {
log.error('multiaddrs must be provided to store data')
throw errcode(new Error('multiaddrs must be provided'), codes.ERR_INVALID_PARAMETERS)
}
// create Address for each address with no duplicates
return Array.from(
new Set(multiaddrs.map(ma => ma.toString()))
)
.map(addr => {
try {
return {
multiaddr: new Multiaddr(addr),
isCertified
}
} catch (err) {
throw errcode(err, codes.ERR_INVALID_PARAMETERS)
}
})
}
module.exports = PeerStoreAddressBook

View File

@ -1,124 +0,0 @@
'use strict'
const errcode = require('err-code')
const PeerId = require('peer-id')
const { codes } = require('../errors')
/**
* @param {any} data
*/
const passthrough = data => data
/**
* @typedef {import('./')} PeerStore
*/
class Book {
/**
* The Book is the skeleton for the PeerStore books.
*
* @class
* @param {Object} properties
* @param {PeerStore} properties.peerStore - PeerStore instance.
* @param {string} properties.eventName - Name of the event to emit by the PeerStore.
* @param {string} properties.eventProperty - Name of the property to emit by the PeerStore.
* @param {(data: any) => any[]} [properties.eventTransformer] - Transformer function of the provided data for being emitted.
*/
constructor ({ peerStore, eventName, eventProperty, eventTransformer = passthrough }) {
this._ps = peerStore
this.eventName = eventName
this.eventProperty = eventProperty
this.eventTransformer = eventTransformer
/**
* Map known peers to their data.
*
* @type {Map<string, any[]|any>}
*/
this.data = new Map()
}
/**
* Set known data of a provided peer.
*
* @param {PeerId} peerId
* @param {any[]|any} data
*/
set (peerId, data) {
throw errcode(new Error('set must be implemented by the subclass'), codes.ERR_NOT_IMPLEMENTED)
}
/**
* Set data into the datastructure, persistence and emit it using the provided transformers.
*
* @protected
* @param {PeerId} peerId - peerId of the data to store
* @param {any} data - data to store.
* @param {Object} [options] - storing options.
* @param {boolean} [options.emit = true] - emit the provided data.
* @returns {void}
*/
_setData (peerId, data, { emit = true } = {}) {
const b58key = peerId.toB58String()
// Store data in memory
this.data.set(b58key, data)
// Emit event
emit && this._emit(peerId, data)
}
/**
* Emit data.
*
* @protected
* @param {PeerId} peerId
* @param {any} [data]
*/
_emit (peerId, data) {
this._ps.emit(this.eventName, {
peerId,
[this.eventProperty]: this.eventTransformer(data)
})
}
/**
* Get the known data of a provided peer.
* Returns `undefined` if there is no available data for the given peer.
*
* @param {PeerId} peerId
* @returns {any[]|any|undefined}
*/
get (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const rec = this.data.get(peerId.toB58String())
// @ts-ignore
return rec ? [...rec] : undefined
}
/**
* Deletes the provided peer from the book.
*
* @param {PeerId} peerId
* @returns {boolean}
*/
delete (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!this.data.delete(peerId.toB58String())) {
return false
}
this._emit(peerId, [])
return true
}
}
module.exports = Book

View File

@ -1,152 +1,119 @@
'use strict'
const errcode = require('err-code')
const debug = require('debug')
const { EventEmitter } = require('events')
const PeerId = require('peer-id')
const AddressBook = require('./address-book')
const KeyBook = require('./key-book')
const MetadataBook = require('./metadata-book')
const ProtoBook = require('./proto-book')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../errors')
const Store = require('./store')
/**
* @typedef {import('./address-book').Address} Address
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').Peer} Peer
* @typedef {import('peer-id')} PeerId
*/
const log = Object.assign(debug('libp2p:peer-store'), {
error: debug('libp2p:peer-store:err')
})
/**
* @extends {EventEmitter}
* An implementation of PeerStore that stores data in a Datastore
*
* @fires PeerStore#peer Emitted when a new peer is added.
* @fires PeerStore#change:protocols Emitted when a known peer supports a different set of protocols.
* @fires PeerStore#change:multiaddrs Emitted when a known peer has a different set of multiaddrs.
* @fires PeerStore#change:pubkey Emitted emitted when a peer's public key is known.
* @fires PeerStore#change:metadata Emitted when the known metadata of a peer change.
* @implements {PeerStore}
*/
class PeerStore extends EventEmitter {
class DefaultPeerStore extends EventEmitter {
/**
* Peer object
*
* @typedef {Object} Peer
* @property {PeerId} id peer's peer-id instance.
* @property {Address[]} addresses peer's addresses containing its multiaddrs and metadata.
* @property {string[]} protocols peer's supported protocols.
* @property {Map<string, Uint8Array>|undefined} metadata peer's metadata map.
* @param {object} properties
* @param {PeerId} properties.peerId
* @param {import('interface-datastore').Datastore} properties.datastore
*/
/**
* Responsible for managing known peers, as well as their addresses, protocols and metadata.
*
* @param {object} options
* @param {PeerId} options.peerId
* @class
*/
constructor ({ peerId }) {
constructor ({ peerId, datastore }) {
super()
this._peerId = peerId
this._store = new Store(datastore)
/**
* AddressBook containing a map of peerIdStr to Address.
*/
this.addressBook = new AddressBook(this)
/**
* KeyBook containing a map of peerIdStr to their PeerId with public keys.
*/
this.keyBook = new KeyBook(this)
/**
* MetadataBook containing a map of peerIdStr to their metadata Map.
*/
this.metadataBook = new MetadataBook(this)
/**
* ProtoBook containing a map of peerIdStr to supported protocols.
*/
this.protoBook = new ProtoBook(this)
this.addressBook = new AddressBook(this.emit.bind(this), this._store)
this.keyBook = new KeyBook(this.emit.bind(this), this._store)
this.metadataBook = new MetadataBook(this.emit.bind(this), this._store)
this.protoBook = new ProtoBook(this.emit.bind(this), this._store)
}
/**
* Start the PeerStore.
*/
start () {}
/**
* Stop the PeerStore.
*/
stop () {}
/**
* Get all the stored information of every peer known.
*
* @returns {Map<string, Peer>}
*/
get peers () {
const storedPeers = new Set([
...this.addressBook.data.keys(),
...this.keyBook.data.keys(),
...this.protoBook.data.keys(),
...this.metadataBook.data.keys()
])
async * getPeers () {
log('getPeers await read lock')
const release = await this._store.lock.readLock()
log('getPeers got read lock')
try {
for await (const peer of this._store.all()) {
if (peer.id.toB58String() === this._peerId.toB58String()) {
// Remove self peer if present
this._peerId && storedPeers.delete(this._peerId.toB58String())
continue
}
const peersData = new Map()
storedPeers.forEach((idStr) => {
peersData.set(idStr, this.get(PeerId.createFromB58String(idStr)))
})
return peersData
yield peer
}
} finally {
log('getPeers release read lock')
release()
}
}
/**
* Delete the information of the given peer in every book.
* Delete the information of the given peer in every book
*
* @param {PeerId} peerId
* @returns {boolean} true if found and removed
*/
delete (peerId) {
const addressesDeleted = this.addressBook.delete(peerId)
const keyDeleted = this.keyBook.delete(peerId)
const protocolsDeleted = this.protoBook.delete(peerId)
const metadataDeleted = this.metadataBook.delete(peerId)
async delete (peerId) {
log('delete await write lock')
const release = await this._store.lock.writeLock()
log('delete got write lock')
return addressesDeleted || keyDeleted || protocolsDeleted || metadataDeleted
try {
await this._store.delete(peerId)
} finally {
log('delete release write lock')
release()
}
}
/**
* Get the stored information of a given peer.
* Get the stored information of a given peer
*
* @param {PeerId} peerId
* @returns {Peer|undefined}
*/
get (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
async get (peerId) {
log('get await read lock')
const release = await this._store.lock.readLock()
log('get got read lock')
try {
return this._store.load(peerId)
} finally {
log('get release read lock')
release()
}
}
const id = this.keyBook.data.get(peerId.toB58String())
const addresses = this.addressBook.get(peerId)
const metadata = this.metadataBook.get(peerId)
const protocols = this.protoBook.get(peerId)
/**
* Returns true if we have a record of the peer
*
* @param {PeerId} peerId
*/
async has (peerId) {
log('has await read lock')
const release = await this._store.lock.readLock()
log('has got read lock')
if (!id && !addresses && !metadata && !protocols) {
return undefined
}
return {
id: id || peerId,
addresses: addresses || [],
protocols: protocols || [],
metadata: metadata
try {
return this._store.has(peerId)
} finally {
log('has release read lock')
release()
}
}
}
module.exports = PeerStore
module.exports = DefaultPeerStore

View File

@ -1,96 +1,141 @@
'use strict'
const debug = require('debug')
const errcode = require('err-code')
const { codes } = require('../errors')
const PeerId = require('peer-id')
const { equals: uint8arrayEquals } = require('uint8arrays/equals')
/**
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').KeyBook} KeyBook
* @typedef {import('libp2p-interfaces/src/keys/types').PublicKey} PublicKey
*/
const log = Object.assign(debug('libp2p:peer-store:key-book'), {
error: debug('libp2p:peer-store:key-book:err')
})
const errcode = require('err-code')
const PeerId = require('peer-id')
const Book = require('./book')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../errors')
const EVENT_NAME = 'change:pubkey'
/**
* @typedef {import('./')} PeerStore
* @typedef {import('libp2p-crypto').PublicKey} PublicKey
* @implements {KeyBook}
*/
/**
* @extends {Book}
*/
class KeyBook extends Book {
class PeerStoreKeyBook {
/**
* The KeyBook is responsible for keeping the known public keys of a peer.
*
* @class
* @param {PeerStore} peerStore
* @param {PeerStore["emit"]} emit
* @param {import('./types').Store} store
*/
constructor (peerStore) {
super({
peerStore,
eventName: 'change:pubkey',
eventProperty: 'pubkey',
eventTransformer: (data) => data.pubKey
})
/**
* Map known peers to their known Public Key.
*
* @type {Map<string, PeerId>}
*/
this.data = new Map()
constructor (emit, store) {
this._emit = emit
this._store = store
}
/**
* Set the Peer public key.
* Set the Peer public key
*
* @override
* @param {PeerId} peerId
* @param {PublicKey} publicKey
* @returns {KeyBook}
*/
set (peerId, publicKey) {
async set (peerId, publicKey) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const id = peerId.toB58String()
const recPeerId = this.data.get(id)
// If no record available, and this is valid
if (!recPeerId && publicKey) {
// This might be unecessary, but we want to store the PeerId
// to avoid an async operation when reconstructing the PeerId
peerId.pubKey = publicKey
this._setData(peerId, peerId)
log(`stored provided public key for ${id}`)
if (!publicKey) {
log.error('publicKey must be an instance of PublicKey to store data')
throw errcode(new Error('publicKey must be an instance of PublicKey'), codes.ERR_INVALID_PARAMETERS)
}
return this
log('set await write lock')
const release = await this._store.lock.writeLock()
log('set got write lock')
let updatedKey = false
try {
try {
const existing = await this._store.load(peerId)
if (existing.pubKey && uint8arrayEquals(existing.pubKey.bytes, publicKey.bytes)) {
return
}
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
await this._store.patchOrCreate(peerId, {
pubKey: publicKey
})
updatedKey = true
} finally {
log('set release write lock')
release()
}
if (updatedKey) {
this._emit(EVENT_NAME, { peerId, pubKey: publicKey })
}
}
/**
* Get Public key of the given PeerId, if stored.
* Get Public key of the given PeerId, if stored
*
* @override
* @param {PeerId} peerId
* @returns {PublicKey | undefined}
*/
get (peerId) {
async get (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const rec = this.data.get(peerId.toB58String())
log('get await write lock')
const release = await this._store.lock.readLock()
log('get got write lock')
return rec ? rec.pubKey : undefined
try {
const peer = await this._store.load(peerId)
return peer.pubKey
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('get release write lock')
release()
}
}
module.exports = KeyBook
/**
* @param {PeerId} peerId
*/
async delete (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
log('delete await write lock')
const release = await this._store.lock.writeLock()
log('delete got write lock')
try {
await this._store.patchOrCreate(peerId, {
pubKey: undefined
})
} finally {
log('delete release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, pubKey: undefined })
}
}
module.exports = PeerStoreKeyBook

View File

@ -1,120 +1,67 @@
'use strict'
const debug = require('debug')
const log = Object.assign(debug('libp2p:peer-store:proto-book'), {
error: debug('libp2p:peer-store:proto-book:err')
})
const errcode = require('err-code')
const { codes } = require('../errors')
const PeerId = require('peer-id')
const { equals: uint8ArrayEquals } = require('uint8arrays/equals')
const PeerId = require('peer-id')
const Book = require('./book')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../errors')
/**
* @typedef {import('./')} PeerStore
*/
/**
* @extends {Book}
*
* @fires MetadataBook#change:metadata
*/
class MetadataBook extends Book {
/**
* The MetadataBook is responsible for keeping the known supported
* protocols of a peer.
*
* @class
* @param {PeerStore} peerStore
*/
constructor (peerStore) {
/**
* PeerStore Event emitter, used by the MetadataBook to emit:
* "change:metadata" - emitted when the known metadata of a peer change.
*/
super({
peerStore,
eventName: 'change:metadata',
eventProperty: 'metadata'
const log = Object.assign(debug('libp2p:peer-store:metadata-book'), {
error: debug('libp2p:peer-store:metadata-book:err')
})
/**
* Map known peers to their known protocols.
*
* @type {Map<string, Map<string, Uint8Array>>}
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').MetadataBook} MetadataBook
*/
this.data = new Map()
const EVENT_NAME = 'change:metadata'
/**
* @implements {MetadataBook}
*/
class PeerStoreMetadataBook {
/**
* The MetadataBook is responsible for keeping the known supported
* protocols of a peer
*
* @param {PeerStore["emit"]} emit
* @param {import('./types').Store} store
*/
constructor (emit, store) {
this._emit = emit
this._store = store
}
/**
* Set metadata key and value of a provided peer.
* Get the known data of a provided peer
*
* @override
* @param {PeerId} peerId
* @param {string} key - metadata key
* @param {Uint8Array} value - metadata value
* @returns {MetadataBook}
*/
// @ts-ignore override with more then the parameters expected in Book
set (peerId, key, value) {
async get (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (typeof key !== 'string' || !(value instanceof Uint8Array)) {
log.error('valid key and value must be provided to store data')
throw errcode(new Error('valid key and value must be provided'), ERR_INVALID_PARAMETERS)
log('get await read lock')
const release = await this._store.lock.readLock()
log('get got read lock')
try {
const peer = await this._store.load(peerId)
return peer.metadata
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('get release read lock')
release()
}
this._setValue(peerId, key, value)
return this
}
/**
* Set data into the datastructure
*
* @param {PeerId} peerId
* @param {string} key
* @param {Uint8Array} value
* @param {object} [opts]
* @param {boolean} [opts.emit]
*/
_setValue (peerId, key, value, { emit = true } = {}) {
const id = peerId.toB58String()
const rec = this.data.get(id) || new Map()
const recMap = rec.get(key)
// Already exists and is equal
if (recMap && uint8ArrayEquals(value, recMap)) {
log(`the metadata provided to store is equal to the already stored for ${id} on ${key}`)
return
}
rec.set(key, value)
this.data.set(id, rec)
emit && this._emit(peerId, key)
}
/**
* Get the known data of a provided peer.
*
* @param {PeerId} peerId
* @returns {Map<string, Uint8Array>|undefined}
*/
get (peerId) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
}
return this.data.get(peerId.toB58String())
return new Map()
}
/**
@ -122,59 +69,182 @@ class MetadataBook extends Book {
*
* @param {PeerId} peerId
* @param {string} key
* @returns {Uint8Array | undefined}
*/
getValue (peerId, key) {
async getValue (peerId, key) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const rec = this.data.get(peerId.toB58String())
return rec && rec.get(key)
log('getValue await read lock')
const release = await this._store.lock.readLock()
log('getValue got read lock')
try {
const peer = await this._store.load(peerId)
return peer.metadata.get(key)
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('getValue release write lock')
release()
}
}
/**
* Deletes the provided peer from the book.
* @param {PeerId} peerId
* @param {Map<string, Uint8Array>} metadata
*/
async set (peerId, metadata) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!metadata || !(metadata instanceof Map)) {
log.error('valid metadata must be provided to store data')
throw errcode(new Error('valid metadata must be provided'), codes.ERR_INVALID_PARAMETERS)
}
log('set await write lock')
const release = await this._store.lock.writeLock()
log('set got write lock')
try {
await this._store.mergeOrCreate(peerId, {
metadata
})
} finally {
log('set release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, metadata })
}
/**
* Set metadata key and value of a provided peer
*
* @param {PeerId} peerId
* @returns {boolean}
* @param {string} key - metadata key
* @param {Uint8Array} value - metadata value
*/
delete (peerId) {
async setValue (peerId, key, value) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!this.data.delete(peerId.toB58String())) {
return false
if (typeof key !== 'string' || !(value instanceof Uint8Array)) {
log.error('valid key and value must be provided to store data')
throw errcode(new Error('valid key and value must be provided'), codes.ERR_INVALID_PARAMETERS)
}
this._emit(peerId)
log('setValue await write lock')
const release = await this._store.lock.writeLock()
log('setValue got write lock')
return true
let updatedPeer
try {
try {
const existingPeer = await this._store.load(peerId)
const existingValue = existingPeer.metadata.get(key)
if (existingValue != null && uint8ArrayEquals(value, existingValue)) {
return
}
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
updatedPeer = await this._store.mergeOrCreate(peerId, {
metadata: new Map([[key, value]])
})
} finally {
log('setValue release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, metadata: updatedPeer.metadata })
}
/**
* @param {PeerId} peerId
*/
async delete (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
log('delete await write lock')
const release = await this._store.lock.writeLock()
log('delete got write lock')
let has
try {
has = await this._store.has(peerId)
if (has) {
await this._store.patch(peerId, {
metadata: new Map()
})
}
} finally {
log('delete release write lock')
release()
}
if (has) {
this._emit(EVENT_NAME, { peerId, metadata: new Map() })
}
}
/**
* Deletes the provided peer metadata key from the book.
*
* @param {PeerId} peerId
* @param {string} key
* @returns {boolean}
*/
deleteValue (peerId, key) {
async deleteValue (peerId, key) {
if (!PeerId.isPeerId(peerId)) {
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const rec = this.data.get(peerId.toB58String())
log('deleteValue await write lock')
const release = await this._store.lock.writeLock()
log('deleteValue got write lock')
if (!rec || !rec.delete(key)) {
return false
let metadata
try {
const peer = await this._store.load(peerId)
metadata = peer.metadata
metadata.delete(key)
await this._store.patch(peerId, {
metadata
})
} catch (/** @type {any} **/ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('deleteValue release write lock')
release()
}
this._emit(peerId, key)
return true
if (metadata) {
this._emit(EVENT_NAME, { peerId, metadata })
}
}
}
module.exports = MetadataBook
module.exports = PeerStoreMetadataBook

222
src/peer-store/pb/peer.d.ts vendored Normal file
View File

@ -0,0 +1,222 @@
import * as $protobuf from "protobufjs";
/** Properties of a Peer. */
export interface IPeer {
/** Peer addresses */
addresses?: (IAddress[]|null);
/** Peer protocols */
protocols?: (string[]|null);
/** Peer metadata */
metadata?: (IMetadata[]|null);
/** Peer pubKey */
pubKey?: (Uint8Array|null);
/** Peer peerRecordEnvelope */
peerRecordEnvelope?: (Uint8Array|null);
}
/** Represents a Peer. */
export class Peer implements IPeer {
/**
* Constructs a new Peer.
* @param [p] Properties to set
*/
constructor(p?: IPeer);
/** Peer addresses. */
public addresses: IAddress[];
/** Peer protocols. */
public protocols: string[];
/** Peer metadata. */
public metadata: IMetadata[];
/** Peer pubKey. */
public pubKey?: (Uint8Array|null);
/** Peer peerRecordEnvelope. */
public peerRecordEnvelope?: (Uint8Array|null);
/** Peer _pubKey. */
public _pubKey?: "pubKey";
/** Peer _peerRecordEnvelope. */
public _peerRecordEnvelope?: "peerRecordEnvelope";
/**
* Encodes the specified Peer message. Does not implicitly {@link Peer.verify|verify} messages.
* @param m Peer message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: IPeer, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes a Peer message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Peer
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Peer;
/**
* Creates a Peer message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Peer
*/
public static fromObject(d: { [k: string]: any }): Peer;
/**
* Creates a plain object from a Peer message. Also converts values to other types if specified.
* @param m Peer
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Peer, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Peer to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}
/** Properties of an Address. */
export interface IAddress {
/** Address multiaddr */
multiaddr?: (Uint8Array|null);
/** Address isCertified */
isCertified?: (boolean|null);
}
/** Represents an Address. */
export class Address implements IAddress {
/**
* Constructs a new Address.
* @param [p] Properties to set
*/
constructor(p?: IAddress);
/** Address multiaddr. */
public multiaddr: Uint8Array;
/** Address isCertified. */
public isCertified?: (boolean|null);
/** Address _isCertified. */
public _isCertified?: "isCertified";
/**
* Encodes the specified Address message. Does not implicitly {@link Address.verify|verify} messages.
* @param m Address message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: IAddress, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes an Address message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Address
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Address;
/**
* Creates an Address message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Address
*/
public static fromObject(d: { [k: string]: any }): Address;
/**
* Creates a plain object from an Address message. Also converts values to other types if specified.
* @param m Address
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Address, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Address to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}
/** Properties of a Metadata. */
export interface IMetadata {
/** Metadata key */
key?: (string|null);
/** Metadata value */
value?: (Uint8Array|null);
}
/** Represents a Metadata. */
export class Metadata implements IMetadata {
/**
* Constructs a new Metadata.
* @param [p] Properties to set
*/
constructor(p?: IMetadata);
/** Metadata key. */
public key: string;
/** Metadata value. */
public value: Uint8Array;
/**
* Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
* @param m Metadata message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: IMetadata, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes a Metadata message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Metadata
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Metadata;
/**
* Creates a Metadata message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Metadata
*/
public static fromObject(d: { [k: string]: any }): Metadata;
/**
* Creates a plain object from a Metadata message. Also converts values to other types if specified.
* @param m Metadata
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Metadata, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Metadata to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}

643
src/peer-store/pb/peer.js Normal file
View File

@ -0,0 +1,643 @@
/*eslint-disable*/
"use strict";
var $protobuf = require("protobufjs/minimal");
// Common aliases
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
// Exported root namespace
var $root = $protobuf.roots["libp2p-peer"] || ($protobuf.roots["libp2p-peer"] = {});
$root.Peer = (function() {
/**
* Properties of a Peer.
* @exports IPeer
* @interface IPeer
* @property {Array.<IAddress>|null} [addresses] Peer addresses
* @property {Array.<string>|null} [protocols] Peer protocols
* @property {Array.<IMetadata>|null} [metadata] Peer metadata
* @property {Uint8Array|null} [pubKey] Peer pubKey
* @property {Uint8Array|null} [peerRecordEnvelope] Peer peerRecordEnvelope
*/
/**
* Constructs a new Peer.
* @exports Peer
* @classdesc Represents a Peer.
* @implements IPeer
* @constructor
* @param {IPeer=} [p] Properties to set
*/
function Peer(p) {
this.addresses = [];
this.protocols = [];
this.metadata = [];
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Peer addresses.
* @member {Array.<IAddress>} addresses
* @memberof Peer
* @instance
*/
Peer.prototype.addresses = $util.emptyArray;
/**
* Peer protocols.
* @member {Array.<string>} protocols
* @memberof Peer
* @instance
*/
Peer.prototype.protocols = $util.emptyArray;
/**
* Peer metadata.
* @member {Array.<IMetadata>} metadata
* @memberof Peer
* @instance
*/
Peer.prototype.metadata = $util.emptyArray;
/**
* Peer pubKey.
* @member {Uint8Array|null|undefined} pubKey
* @memberof Peer
* @instance
*/
Peer.prototype.pubKey = null;
/**
* Peer peerRecordEnvelope.
* @member {Uint8Array|null|undefined} peerRecordEnvelope
* @memberof Peer
* @instance
*/
Peer.prototype.peerRecordEnvelope = null;
// OneOf field names bound to virtual getters and setters
var $oneOfFields;
/**
* Peer _pubKey.
* @member {"pubKey"|undefined} _pubKey
* @memberof Peer
* @instance
*/
Object.defineProperty(Peer.prototype, "_pubKey", {
get: $util.oneOfGetter($oneOfFields = ["pubKey"]),
set: $util.oneOfSetter($oneOfFields)
});
/**
* Peer _peerRecordEnvelope.
* @member {"peerRecordEnvelope"|undefined} _peerRecordEnvelope
* @memberof Peer
* @instance
*/
Object.defineProperty(Peer.prototype, "_peerRecordEnvelope", {
get: $util.oneOfGetter($oneOfFields = ["peerRecordEnvelope"]),
set: $util.oneOfSetter($oneOfFields)
});
/**
* Encodes the specified Peer message. Does not implicitly {@link Peer.verify|verify} messages.
* @function encode
* @memberof Peer
* @static
* @param {IPeer} m Peer message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Peer.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.addresses != null && m.addresses.length) {
for (var i = 0; i < m.addresses.length; ++i)
$root.Address.encode(m.addresses[i], w.uint32(10).fork()).ldelim();
}
if (m.protocols != null && m.protocols.length) {
for (var i = 0; i < m.protocols.length; ++i)
w.uint32(18).string(m.protocols[i]);
}
if (m.metadata != null && m.metadata.length) {
for (var i = 0; i < m.metadata.length; ++i)
$root.Metadata.encode(m.metadata[i], w.uint32(26).fork()).ldelim();
}
if (m.pubKey != null && Object.hasOwnProperty.call(m, "pubKey"))
w.uint32(34).bytes(m.pubKey);
if (m.peerRecordEnvelope != null && Object.hasOwnProperty.call(m, "peerRecordEnvelope"))
w.uint32(42).bytes(m.peerRecordEnvelope);
return w;
};
/**
* Decodes a Peer message from the specified reader or buffer.
* @function decode
* @memberof Peer
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Peer} Peer
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Peer.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Peer();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
if (!(m.addresses && m.addresses.length))
m.addresses = [];
m.addresses.push($root.Address.decode(r, r.uint32()));
break;
case 2:
if (!(m.protocols && m.protocols.length))
m.protocols = [];
m.protocols.push(r.string());
break;
case 3:
if (!(m.metadata && m.metadata.length))
m.metadata = [];
m.metadata.push($root.Metadata.decode(r, r.uint32()));
break;
case 4:
m.pubKey = r.bytes();
break;
case 5:
m.peerRecordEnvelope = r.bytes();
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates a Peer message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Peer
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Peer} Peer
*/
Peer.fromObject = function fromObject(d) {
if (d instanceof $root.Peer)
return d;
var m = new $root.Peer();
if (d.addresses) {
if (!Array.isArray(d.addresses))
throw TypeError(".Peer.addresses: array expected");
m.addresses = [];
for (var i = 0; i < d.addresses.length; ++i) {
if (typeof d.addresses[i] !== "object")
throw TypeError(".Peer.addresses: object expected");
m.addresses[i] = $root.Address.fromObject(d.addresses[i]);
}
}
if (d.protocols) {
if (!Array.isArray(d.protocols))
throw TypeError(".Peer.protocols: array expected");
m.protocols = [];
for (var i = 0; i < d.protocols.length; ++i) {
m.protocols[i] = String(d.protocols[i]);
}
}
if (d.metadata) {
if (!Array.isArray(d.metadata))
throw TypeError(".Peer.metadata: array expected");
m.metadata = [];
for (var i = 0; i < d.metadata.length; ++i) {
if (typeof d.metadata[i] !== "object")
throw TypeError(".Peer.metadata: object expected");
m.metadata[i] = $root.Metadata.fromObject(d.metadata[i]);
}
}
if (d.pubKey != null) {
if (typeof d.pubKey === "string")
$util.base64.decode(d.pubKey, m.pubKey = $util.newBuffer($util.base64.length(d.pubKey)), 0);
else if (d.pubKey.length)
m.pubKey = d.pubKey;
}
if (d.peerRecordEnvelope != null) {
if (typeof d.peerRecordEnvelope === "string")
$util.base64.decode(d.peerRecordEnvelope, m.peerRecordEnvelope = $util.newBuffer($util.base64.length(d.peerRecordEnvelope)), 0);
else if (d.peerRecordEnvelope.length)
m.peerRecordEnvelope = d.peerRecordEnvelope;
}
return m;
};
/**
* Creates a plain object from a Peer message. Also converts values to other types if specified.
* @function toObject
* @memberof Peer
* @static
* @param {Peer} m Peer
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Peer.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.arrays || o.defaults) {
d.addresses = [];
d.protocols = [];
d.metadata = [];
}
if (m.addresses && m.addresses.length) {
d.addresses = [];
for (var j = 0; j < m.addresses.length; ++j) {
d.addresses[j] = $root.Address.toObject(m.addresses[j], o);
}
}
if (m.protocols && m.protocols.length) {
d.protocols = [];
for (var j = 0; j < m.protocols.length; ++j) {
d.protocols[j] = m.protocols[j];
}
}
if (m.metadata && m.metadata.length) {
d.metadata = [];
for (var j = 0; j < m.metadata.length; ++j) {
d.metadata[j] = $root.Metadata.toObject(m.metadata[j], o);
}
}
if (m.pubKey != null && m.hasOwnProperty("pubKey")) {
d.pubKey = o.bytes === String ? $util.base64.encode(m.pubKey, 0, m.pubKey.length) : o.bytes === Array ? Array.prototype.slice.call(m.pubKey) : m.pubKey;
if (o.oneofs)
d._pubKey = "pubKey";
}
if (m.peerRecordEnvelope != null && m.hasOwnProperty("peerRecordEnvelope")) {
d.peerRecordEnvelope = o.bytes === String ? $util.base64.encode(m.peerRecordEnvelope, 0, m.peerRecordEnvelope.length) : o.bytes === Array ? Array.prototype.slice.call(m.peerRecordEnvelope) : m.peerRecordEnvelope;
if (o.oneofs)
d._peerRecordEnvelope = "peerRecordEnvelope";
}
return d;
};
/**
* Converts this Peer to JSON.
* @function toJSON
* @memberof Peer
* @instance
* @returns {Object.<string,*>} JSON object
*/
Peer.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return Peer;
})();
$root.Address = (function() {
/**
* Properties of an Address.
* @exports IAddress
* @interface IAddress
* @property {Uint8Array|null} [multiaddr] Address multiaddr
* @property {boolean|null} [isCertified] Address isCertified
*/
/**
* Constructs a new Address.
* @exports Address
* @classdesc Represents an Address.
* @implements IAddress
* @constructor
* @param {IAddress=} [p] Properties to set
*/
function Address(p) {
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Address multiaddr.
* @member {Uint8Array} multiaddr
* @memberof Address
* @instance
*/
Address.prototype.multiaddr = $util.newBuffer([]);
/**
* Address isCertified.
* @member {boolean|null|undefined} isCertified
* @memberof Address
* @instance
*/
Address.prototype.isCertified = null;
// OneOf field names bound to virtual getters and setters
var $oneOfFields;
/**
* Address _isCertified.
* @member {"isCertified"|undefined} _isCertified
* @memberof Address
* @instance
*/
Object.defineProperty(Address.prototype, "_isCertified", {
get: $util.oneOfGetter($oneOfFields = ["isCertified"]),
set: $util.oneOfSetter($oneOfFields)
});
/**
* Encodes the specified Address message. Does not implicitly {@link Address.verify|verify} messages.
* @function encode
* @memberof Address
* @static
* @param {IAddress} m Address message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Address.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.multiaddr != null && Object.hasOwnProperty.call(m, "multiaddr"))
w.uint32(10).bytes(m.multiaddr);
if (m.isCertified != null && Object.hasOwnProperty.call(m, "isCertified"))
w.uint32(16).bool(m.isCertified);
return w;
};
/**
* Decodes an Address message from the specified reader or buffer.
* @function decode
* @memberof Address
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Address} Address
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Address.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Address();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
m.multiaddr = r.bytes();
break;
case 2:
m.isCertified = r.bool();
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates an Address message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Address
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Address} Address
*/
Address.fromObject = function fromObject(d) {
if (d instanceof $root.Address)
return d;
var m = new $root.Address();
if (d.multiaddr != null) {
if (typeof d.multiaddr === "string")
$util.base64.decode(d.multiaddr, m.multiaddr = $util.newBuffer($util.base64.length(d.multiaddr)), 0);
else if (d.multiaddr.length)
m.multiaddr = d.multiaddr;
}
if (d.isCertified != null) {
m.isCertified = Boolean(d.isCertified);
}
return m;
};
/**
* Creates a plain object from an Address message. Also converts values to other types if specified.
* @function toObject
* @memberof Address
* @static
* @param {Address} m Address
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Address.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.defaults) {
if (o.bytes === String)
d.multiaddr = "";
else {
d.multiaddr = [];
if (o.bytes !== Array)
d.multiaddr = $util.newBuffer(d.multiaddr);
}
}
if (m.multiaddr != null && m.hasOwnProperty("multiaddr")) {
d.multiaddr = o.bytes === String ? $util.base64.encode(m.multiaddr, 0, m.multiaddr.length) : o.bytes === Array ? Array.prototype.slice.call(m.multiaddr) : m.multiaddr;
}
if (m.isCertified != null && m.hasOwnProperty("isCertified")) {
d.isCertified = m.isCertified;
if (o.oneofs)
d._isCertified = "isCertified";
}
return d;
};
/**
* Converts this Address to JSON.
* @function toJSON
* @memberof Address
* @instance
* @returns {Object.<string,*>} JSON object
*/
Address.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return Address;
})();
$root.Metadata = (function() {
/**
* Properties of a Metadata.
* @exports IMetadata
* @interface IMetadata
* @property {string|null} [key] Metadata key
* @property {Uint8Array|null} [value] Metadata value
*/
/**
* Constructs a new Metadata.
* @exports Metadata
* @classdesc Represents a Metadata.
* @implements IMetadata
* @constructor
* @param {IMetadata=} [p] Properties to set
*/
function Metadata(p) {
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Metadata key.
* @member {string} key
* @memberof Metadata
* @instance
*/
Metadata.prototype.key = "";
/**
* Metadata value.
* @member {Uint8Array} value
* @memberof Metadata
* @instance
*/
Metadata.prototype.value = $util.newBuffer([]);
/**
* Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages.
* @function encode
* @memberof Metadata
* @static
* @param {IMetadata} m Metadata message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Metadata.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.key != null && Object.hasOwnProperty.call(m, "key"))
w.uint32(10).string(m.key);
if (m.value != null && Object.hasOwnProperty.call(m, "value"))
w.uint32(18).bytes(m.value);
return w;
};
/**
* Decodes a Metadata message from the specified reader or buffer.
* @function decode
* @memberof Metadata
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Metadata} Metadata
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Metadata.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
m.key = r.string();
break;
case 2:
m.value = r.bytes();
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates a Metadata message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Metadata
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Metadata} Metadata
*/
Metadata.fromObject = function fromObject(d) {
if (d instanceof $root.Metadata)
return d;
var m = new $root.Metadata();
if (d.key != null) {
m.key = String(d.key);
}
if (d.value != null) {
if (typeof d.value === "string")
$util.base64.decode(d.value, m.value = $util.newBuffer($util.base64.length(d.value)), 0);
else if (d.value.length)
m.value = d.value;
}
return m;
};
/**
* Creates a plain object from a Metadata message. Also converts values to other types if specified.
* @function toObject
* @memberof Metadata
* @static
* @param {Metadata} m Metadata
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Metadata.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.defaults) {
d.key = "";
if (o.bytes === String)
d.value = "";
else {
d.value = [];
if (o.bytes !== Array)
d.value = $util.newBuffer(d.value);
}
}
if (m.key != null && m.hasOwnProperty("key")) {
d.key = m.key;
}
if (m.value != null && m.hasOwnProperty("value")) {
d.value = o.bytes === String ? $util.base64.encode(m.value, 0, m.value.length) : o.bytes === Array ? Array.prototype.slice.call(m.value) : m.value;
}
return d;
};
/**
* Converts this Metadata to JSON.
* @function toJSON
* @memberof Metadata
* @instance
* @returns {Object.<string,*>} JSON object
*/
Metadata.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return Metadata;
})();
module.exports = $root;

View File

@ -0,0 +1,31 @@
syntax = "proto3";
message Peer {
// Multiaddrs we know about
repeated Address addresses = 1;
// The protocols the peer supports
repeated string protocols = 2;
// Any peer metadata
repeated Metadata metadata = 3;
// The public key of the peer
optional bytes pub_key = 4;
// The most recently received signed PeerRecord
optional bytes peer_record_envelope = 5;
}
// Address represents a single multiaddr
message Address {
bytes multiaddr = 1;
// Flag to indicate if the address comes from a certified source
optional bool isCertified = 2;
}
message Metadata {
string key = 1;
bytes value = 2;
}

View File

@ -1,15 +0,0 @@
'use strict'
module.exports.NAMESPACE_COMMON = '/peers/'
// /peers/protos/<b32 peer id no padding>
module.exports.NAMESPACE_ADDRESS = '/peers/addrs/'
// /peers/keys/<b32 peer id no padding>
module.exports.NAMESPACE_KEYS = '/peers/keys/'
// /peers/metadata/<b32 peer id no padding>/<key>
module.exports.NAMESPACE_METADATA = '/peers/metadata/'
// /peers/addrs/<b32 peer id no padding>
module.exports.NAMESPACE_PROTOCOL = '/peers/protos/'

View File

@ -1,408 +0,0 @@
'use strict'
const debug = require('debug')
const log = Object.assign(debug('libp2p:persistent-peer-store'), {
error: debug('libp2p:persistent-peer-store:err')
})
const { Key } = require('interface-datastore/key')
const { Multiaddr } = require('multiaddr')
const PeerId = require('peer-id')
const { base32 } = require('multiformats/bases/base32')
const PeerStore = require('..')
const {
NAMESPACE_ADDRESS,
NAMESPACE_COMMON,
NAMESPACE_KEYS,
NAMESPACE_METADATA,
NAMESPACE_PROTOCOL
} = require('./consts')
const { Addresses } = require('./pb/address-book')
const { Protocols } = require('./pb/proto-book')
/**
* @typedef {import('interface-datastore').Batch} Batch
* @typedef {import('../address-book.js').Address} Address
*/
/**
* @typedef {Object} PersistentPeerStoreProperties
* @property {PeerId} peerId
* @property {import('interface-datastore').Datastore} datastore
*
* @typedef {Object} PersistentPeerStoreOptions
* @property {number} [threshold = 5] - Number of dirty peers allowed before commit data.
*/
/**
* Responsible for managing the persistence of data in the PeerStore.
*/
class PersistentPeerStore extends PeerStore {
/**
* @class
* @param {PersistentPeerStoreProperties & PersistentPeerStoreOptions} properties
*/
constructor ({ peerId, datastore, threshold = 5 }) {
super({ peerId })
/**
* Backend datastore used to persist data.
*/
this._datastore = datastore
/**
* Peers modified after the latest data persisted.
*/
this._dirtyPeers = new Set()
/**
* Peers metadata changed mapping peer identifers to metadata changed.
*
* @type {Map<string, Set<string>>}
*/
this._dirtyMetadata = new Map()
this.threshold = threshold
this._addDirtyPeer = this._addDirtyPeer.bind(this)
}
/**
* Start Persistent PeerStore.
*
* @returns {Promise<void>}
*/
async start () {
log('PeerStore is starting')
// Handlers for dirty peers
this.on('change:protocols', this._addDirtyPeer)
this.on('change:multiaddrs', this._addDirtyPeer)
this.on('change:pubkey', this._addDirtyPeerKey)
this.on('change:metadata', this._addDirtyPeerMetadata)
// Load data
for await (const entry of this._datastore.query({ prefix: NAMESPACE_COMMON })) {
await this._processDatastoreEntry(entry)
}
log('PeerStore started')
}
/**
* Stop Persistent PeerStore.
*
* @returns {Promise<void>}
*/
async stop () {
log('PeerStore is stopping')
this.removeAllListeners()
await this._commitData()
log('PeerStore stopped')
}
/**
* Add modified peer to the dirty set
*
* @private
* @param {Object} params
* @param {PeerId} params.peerId
*/
_addDirtyPeer ({ peerId }) {
const peerIdstr = peerId.toB58String()
log('add dirty peer', peerIdstr)
this._dirtyPeers.add(peerIdstr)
if (this._dirtyPeers.size >= this.threshold) {
// Commit current data
this._commitData().catch(err => {
log.error('error committing data', err)
})
}
}
/**
* Add modified peer key to the dirty set
*
* @private
* @param {Object} params
* @param {PeerId} params.peerId
*/
_addDirtyPeerKey ({ peerId }) {
// Not add if inline key available
if (peerId.hasInlinePublicKey()) {
return
}
const peerIdstr = peerId.toB58String()
log('add dirty peer key', peerIdstr)
this._dirtyPeers.add(peerIdstr)
if (this._dirtyPeers.size >= this.threshold) {
// Commit current data
this._commitData().catch(err => {
log.error('error committing data', err)
})
}
}
/**
* Add modified metadata peer to the set.
*
* @private
* @param {Object} params
* @param {PeerId} params.peerId
* @param {string} params.metadata
*/
_addDirtyPeerMetadata ({ peerId, metadata }) {
const peerIdstr = peerId.toB58String()
log('add dirty metadata peer', peerIdstr)
this._dirtyPeers.add(peerIdstr)
// Add dirty metadata key
const mData = this._dirtyMetadata.get(peerIdstr) || new Set()
mData.add(metadata)
this._dirtyMetadata.set(peerIdstr, mData)
if (this._dirtyPeers.size >= this.threshold) {
// Commit current data
this._commitData().catch(err => {
log.error('error committing data', err)
})
}
}
/**
* Add all the peers current data to a datastore batch and commit it.
*
* @private
* @returns {Promise<void>}
*/
async _commitData () {
const commitPeers = Array.from(this._dirtyPeers)
if (!commitPeers.length) {
return
}
// Clear Dirty Peers set
this._dirtyPeers.clear()
log('create batch commit')
const batch = this._datastore.batch()
for (const peerIdStr of commitPeers) {
// PeerId
const peerId = this.keyBook.data.get(peerIdStr) || PeerId.createFromB58String(peerIdStr)
// Address Book
this._batchAddressBook(peerId, batch)
// Key Book
!peerId.hasInlinePublicKey() && this._batchKeyBook(peerId, batch)
// Metadata Book
this._batchMetadataBook(peerId, batch)
// Proto Book
this._batchProtoBook(peerId, batch)
}
await batch.commit()
log('batch committed')
}
/**
* Add address book data of the peer to the batch.
*
* @private
* @param {PeerId} peerId
* @param {Batch} batch
*/
_batchAddressBook (peerId, batch) {
const b32key = peerId.toString()
const key = new Key(`${NAMESPACE_ADDRESS}${b32key}`)
const entry = this.addressBook.data.get(peerId.toB58String())
try {
// Deleted from the book
if (!entry) {
batch.delete(key)
return
}
const encodedData = Addresses.encode({
addrs: entry.addresses.map((address) => ({
multiaddr: address.multiaddr.bytes,
isCertified: address.isCertified
})),
certifiedRecord: entry.record
? {
seq: entry.record.seqNumber,
raw: entry.record.raw
}
: undefined
}).finish()
batch.put(key, encodedData)
} catch (/** @type {any} */ err) {
log.error(err)
}
}
/**
* Add Key book data of the peer to the batch.
*
* @private
* @param {PeerId} peerId
* @param {Batch} batch
*/
_batchKeyBook (peerId, batch) {
const b32key = peerId.toString()
const key = new Key(`${NAMESPACE_KEYS}${b32key}`)
try {
// Deleted from the book
if (!peerId.pubKey) {
batch.delete(key)
return
}
const encodedData = peerId.marshalPubKey()
batch.put(key, encodedData)
} catch (/** @type {any} */ err) {
log.error(err)
}
}
/**
* Add metadata book data of the peer to the batch.
*
* @private
* @param {PeerId} peerId
* @param {Batch} batch
*/
_batchMetadataBook (peerId, batch) {
const b32key = peerId.toString()
const dirtyMetada = this._dirtyMetadata.get(peerId.toB58String()) || []
try {
dirtyMetada.forEach((/** @type {string} */ dirtyKey) => {
const key = new Key(`${NAMESPACE_METADATA}${b32key}/${dirtyKey}`)
const dirtyValue = this.metadataBook.getValue(peerId, dirtyKey)
if (dirtyValue) {
batch.put(key, dirtyValue)
} else {
batch.delete(key)
}
})
} catch (/** @type {any} */ err) {
log.error(err)
}
}
/**
* Add proto book data of the peer to the batch.
*
* @private
* @param {PeerId} peerId
* @param {Batch} batch
*/
_batchProtoBook (peerId, batch) {
const b32key = peerId.toString()
const key = new Key(`${NAMESPACE_PROTOCOL}${b32key}`)
const protocols = this.protoBook.get(peerId)
try {
// Deleted from the book
if (!protocols) {
batch.delete(key)
return
}
const encodedData = Protocols.encode({ protocols }).finish()
batch.put(key, encodedData)
} catch (/** @type {any} */ err) {
log.error(err)
}
}
/**
* Process datastore entry and add its data to the correct book.
*
* @private
* @param {Object} params
* @param {Key} params.key - datastore key
* @param {Uint8Array} params.value - datastore value stored
* @returns {Promise<void>}
*/
async _processDatastoreEntry ({ key, value }) {
try {
const keyParts = key.toString().split('/')
const peerId = PeerId.createFromBytes(base32.decode(keyParts[3]))
let decoded
switch (keyParts[2]) {
case 'addrs':
decoded = Addresses.decode(value)
// @ts-ignore protected function
this.addressBook._setData(
peerId,
{
addresses: decoded.addrs.map((address) => ({
multiaddr: new Multiaddr(address.multiaddr),
isCertified: Boolean(address.isCertified)
})),
record: decoded.certifiedRecord
? {
raw: decoded.certifiedRecord.raw,
seqNumber: decoded.certifiedRecord.seq
}
: undefined
},
{ emit: false })
break
case 'keys':
decoded = await PeerId.createFromPubKey(value)
// @ts-ignore protected function
this.keyBook._setData(
decoded,
decoded,
{ emit: false })
break
case 'metadata':
this.metadataBook._setValue(
peerId,
keyParts[4],
value,
{ emit: false })
break
case 'protos':
decoded = Protocols.decode(value)
// @ts-ignore protected function
this.protoBook._setData(
peerId,
new Set(decoded.protocols),
{ emit: false })
break
default:
log('invalid data persisted for: ', key.toString())
}
} catch (/** @type {any} */ err) {
log.error(err)
}
}
}
module.exports = PersistentPeerStore

View File

@ -1,198 +0,0 @@
import * as $protobuf from "protobufjs";
/** Properties of an Addresses. */
export interface IAddresses {
/** Addresses addrs */
addrs?: (Addresses.IAddress[]|null);
/** Addresses certifiedRecord */
certifiedRecord?: (Addresses.ICertifiedRecord|null);
}
/** Represents an Addresses. */
export class Addresses implements IAddresses {
/**
* Constructs a new Addresses.
* @param [p] Properties to set
*/
constructor(p?: IAddresses);
/** Addresses addrs. */
public addrs: Addresses.IAddress[];
/** Addresses certifiedRecord. */
public certifiedRecord?: (Addresses.ICertifiedRecord|null);
/**
* Encodes the specified Addresses message. Does not implicitly {@link Addresses.verify|verify} messages.
* @param m Addresses message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: IAddresses, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes an Addresses message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Addresses
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses;
/**
* Creates an Addresses message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Addresses
*/
public static fromObject(d: { [k: string]: any }): Addresses;
/**
* Creates a plain object from an Addresses message. Also converts values to other types if specified.
* @param m Addresses
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Addresses, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Addresses to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}
export namespace Addresses {
/** Properties of an Address. */
interface IAddress {
/** Address multiaddr */
multiaddr?: (Uint8Array|null);
/** Address isCertified */
isCertified?: (boolean|null);
}
/** Represents an Address. */
class Address implements IAddress {
/**
* Constructs a new Address.
* @param [p] Properties to set
*/
constructor(p?: Addresses.IAddress);
/** Address multiaddr. */
public multiaddr: Uint8Array;
/** Address isCertified. */
public isCertified: boolean;
/**
* Encodes the specified Address message. Does not implicitly {@link Addresses.Address.verify|verify} messages.
* @param m Address message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: Addresses.IAddress, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes an Address message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Address
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses.Address;
/**
* Creates an Address message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Address
*/
public static fromObject(d: { [k: string]: any }): Addresses.Address;
/**
* Creates a plain object from an Address message. Also converts values to other types if specified.
* @param m Address
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Addresses.Address, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Address to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}
/** Properties of a CertifiedRecord. */
interface ICertifiedRecord {
/** CertifiedRecord seq */
seq?: (number|null);
/** CertifiedRecord raw */
raw?: (Uint8Array|null);
}
/** Represents a CertifiedRecord. */
class CertifiedRecord implements ICertifiedRecord {
/**
* Constructs a new CertifiedRecord.
* @param [p] Properties to set
*/
constructor(p?: Addresses.ICertifiedRecord);
/** CertifiedRecord seq. */
public seq: number;
/** CertifiedRecord raw. */
public raw: Uint8Array;
/**
* Encodes the specified CertifiedRecord message. Does not implicitly {@link Addresses.CertifiedRecord.verify|verify} messages.
* @param m CertifiedRecord message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: Addresses.ICertifiedRecord, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes a CertifiedRecord message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns CertifiedRecord
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Addresses.CertifiedRecord;
/**
* Creates a CertifiedRecord message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns CertifiedRecord
*/
public static fromObject(d: { [k: string]: any }): Addresses.CertifiedRecord;
/**
* Creates a plain object from a CertifiedRecord message. Also converts values to other types if specified.
* @param m CertifiedRecord
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Addresses.CertifiedRecord, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this CertifiedRecord to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}
}

View File

@ -1,522 +0,0 @@
/*eslint-disable*/
"use strict";
var $protobuf = require("protobufjs/minimal");
// Common aliases
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
// Exported root namespace
var $root = $protobuf.roots["libp2p-address-book"] || ($protobuf.roots["libp2p-address-book"] = {});
$root.Addresses = (function() {
/**
* Properties of an Addresses.
* @exports IAddresses
* @interface IAddresses
* @property {Array.<Addresses.IAddress>|null} [addrs] Addresses addrs
* @property {Addresses.ICertifiedRecord|null} [certifiedRecord] Addresses certifiedRecord
*/
/**
* Constructs a new Addresses.
* @exports Addresses
* @classdesc Represents an Addresses.
* @implements IAddresses
* @constructor
* @param {IAddresses=} [p] Properties to set
*/
function Addresses(p) {
this.addrs = [];
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Addresses addrs.
* @member {Array.<Addresses.IAddress>} addrs
* @memberof Addresses
* @instance
*/
Addresses.prototype.addrs = $util.emptyArray;
/**
* Addresses certifiedRecord.
* @member {Addresses.ICertifiedRecord|null|undefined} certifiedRecord
* @memberof Addresses
* @instance
*/
Addresses.prototype.certifiedRecord = null;
/**
* Encodes the specified Addresses message. Does not implicitly {@link Addresses.verify|verify} messages.
* @function encode
* @memberof Addresses
* @static
* @param {IAddresses} m Addresses message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Addresses.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.addrs != null && m.addrs.length) {
for (var i = 0; i < m.addrs.length; ++i)
$root.Addresses.Address.encode(m.addrs[i], w.uint32(10).fork()).ldelim();
}
if (m.certifiedRecord != null && Object.hasOwnProperty.call(m, "certifiedRecord"))
$root.Addresses.CertifiedRecord.encode(m.certifiedRecord, w.uint32(18).fork()).ldelim();
return w;
};
/**
* Decodes an Addresses message from the specified reader or buffer.
* @function decode
* @memberof Addresses
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Addresses} Addresses
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Addresses.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
if (!(m.addrs && m.addrs.length))
m.addrs = [];
m.addrs.push($root.Addresses.Address.decode(r, r.uint32()));
break;
case 2:
m.certifiedRecord = $root.Addresses.CertifiedRecord.decode(r, r.uint32());
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates an Addresses message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Addresses
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Addresses} Addresses
*/
Addresses.fromObject = function fromObject(d) {
if (d instanceof $root.Addresses)
return d;
var m = new $root.Addresses();
if (d.addrs) {
if (!Array.isArray(d.addrs))
throw TypeError(".Addresses.addrs: array expected");
m.addrs = [];
for (var i = 0; i < d.addrs.length; ++i) {
if (typeof d.addrs[i] !== "object")
throw TypeError(".Addresses.addrs: object expected");
m.addrs[i] = $root.Addresses.Address.fromObject(d.addrs[i]);
}
}
if (d.certifiedRecord != null) {
if (typeof d.certifiedRecord !== "object")
throw TypeError(".Addresses.certifiedRecord: object expected");
m.certifiedRecord = $root.Addresses.CertifiedRecord.fromObject(d.certifiedRecord);
}
return m;
};
/**
* Creates a plain object from an Addresses message. Also converts values to other types if specified.
* @function toObject
* @memberof Addresses
* @static
* @param {Addresses} m Addresses
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Addresses.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.arrays || o.defaults) {
d.addrs = [];
}
if (o.defaults) {
d.certifiedRecord = null;
}
if (m.addrs && m.addrs.length) {
d.addrs = [];
for (var j = 0; j < m.addrs.length; ++j) {
d.addrs[j] = $root.Addresses.Address.toObject(m.addrs[j], o);
}
}
if (m.certifiedRecord != null && m.hasOwnProperty("certifiedRecord")) {
d.certifiedRecord = $root.Addresses.CertifiedRecord.toObject(m.certifiedRecord, o);
}
return d;
};
/**
* Converts this Addresses to JSON.
* @function toJSON
* @memberof Addresses
* @instance
* @returns {Object.<string,*>} JSON object
*/
Addresses.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
Addresses.Address = (function() {
/**
* Properties of an Address.
* @memberof Addresses
* @interface IAddress
* @property {Uint8Array|null} [multiaddr] Address multiaddr
* @property {boolean|null} [isCertified] Address isCertified
*/
/**
* Constructs a new Address.
* @memberof Addresses
* @classdesc Represents an Address.
* @implements IAddress
* @constructor
* @param {Addresses.IAddress=} [p] Properties to set
*/
function Address(p) {
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Address multiaddr.
* @member {Uint8Array} multiaddr
* @memberof Addresses.Address
* @instance
*/
Address.prototype.multiaddr = $util.newBuffer([]);
/**
* Address isCertified.
* @member {boolean} isCertified
* @memberof Addresses.Address
* @instance
*/
Address.prototype.isCertified = false;
/**
* Encodes the specified Address message. Does not implicitly {@link Addresses.Address.verify|verify} messages.
* @function encode
* @memberof Addresses.Address
* @static
* @param {Addresses.IAddress} m Address message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Address.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.multiaddr != null && Object.hasOwnProperty.call(m, "multiaddr"))
w.uint32(10).bytes(m.multiaddr);
if (m.isCertified != null && Object.hasOwnProperty.call(m, "isCertified"))
w.uint32(16).bool(m.isCertified);
return w;
};
/**
* Decodes an Address message from the specified reader or buffer.
* @function decode
* @memberof Addresses.Address
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Addresses.Address} Address
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Address.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses.Address();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
m.multiaddr = r.bytes();
break;
case 2:
m.isCertified = r.bool();
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates an Address message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Addresses.Address
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Addresses.Address} Address
*/
Address.fromObject = function fromObject(d) {
if (d instanceof $root.Addresses.Address)
return d;
var m = new $root.Addresses.Address();
if (d.multiaddr != null) {
if (typeof d.multiaddr === "string")
$util.base64.decode(d.multiaddr, m.multiaddr = $util.newBuffer($util.base64.length(d.multiaddr)), 0);
else if (d.multiaddr.length)
m.multiaddr = d.multiaddr;
}
if (d.isCertified != null) {
m.isCertified = Boolean(d.isCertified);
}
return m;
};
/**
* Creates a plain object from an Address message. Also converts values to other types if specified.
* @function toObject
* @memberof Addresses.Address
* @static
* @param {Addresses.Address} m Address
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Address.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.defaults) {
if (o.bytes === String)
d.multiaddr = "";
else {
d.multiaddr = [];
if (o.bytes !== Array)
d.multiaddr = $util.newBuffer(d.multiaddr);
}
d.isCertified = false;
}
if (m.multiaddr != null && m.hasOwnProperty("multiaddr")) {
d.multiaddr = o.bytes === String ? $util.base64.encode(m.multiaddr, 0, m.multiaddr.length) : o.bytes === Array ? Array.prototype.slice.call(m.multiaddr) : m.multiaddr;
}
if (m.isCertified != null && m.hasOwnProperty("isCertified")) {
d.isCertified = m.isCertified;
}
return d;
};
/**
* Converts this Address to JSON.
* @function toJSON
* @memberof Addresses.Address
* @instance
* @returns {Object.<string,*>} JSON object
*/
Address.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return Address;
})();
Addresses.CertifiedRecord = (function() {
/**
* Properties of a CertifiedRecord.
* @memberof Addresses
* @interface ICertifiedRecord
* @property {number|null} [seq] CertifiedRecord seq
* @property {Uint8Array|null} [raw] CertifiedRecord raw
*/
/**
* Constructs a new CertifiedRecord.
* @memberof Addresses
* @classdesc Represents a CertifiedRecord.
* @implements ICertifiedRecord
* @constructor
* @param {Addresses.ICertifiedRecord=} [p] Properties to set
*/
function CertifiedRecord(p) {
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* CertifiedRecord seq.
* @member {number} seq
* @memberof Addresses.CertifiedRecord
* @instance
*/
CertifiedRecord.prototype.seq = $util.Long ? $util.Long.fromBits(0,0,true) : 0;
/**
* CertifiedRecord raw.
* @member {Uint8Array} raw
* @memberof Addresses.CertifiedRecord
* @instance
*/
CertifiedRecord.prototype.raw = $util.newBuffer([]);
/**
* Encodes the specified CertifiedRecord message. Does not implicitly {@link Addresses.CertifiedRecord.verify|verify} messages.
* @function encode
* @memberof Addresses.CertifiedRecord
* @static
* @param {Addresses.ICertifiedRecord} m CertifiedRecord message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
CertifiedRecord.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.seq != null && Object.hasOwnProperty.call(m, "seq"))
w.uint32(8).uint64(m.seq);
if (m.raw != null && Object.hasOwnProperty.call(m, "raw"))
w.uint32(18).bytes(m.raw);
return w;
};
/**
* Decodes a CertifiedRecord message from the specified reader or buffer.
* @function decode
* @memberof Addresses.CertifiedRecord
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Addresses.CertifiedRecord} CertifiedRecord
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
CertifiedRecord.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Addresses.CertifiedRecord();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
m.seq = r.uint64();
break;
case 2:
m.raw = r.bytes();
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates a CertifiedRecord message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Addresses.CertifiedRecord
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Addresses.CertifiedRecord} CertifiedRecord
*/
CertifiedRecord.fromObject = function fromObject(d) {
if (d instanceof $root.Addresses.CertifiedRecord)
return d;
var m = new $root.Addresses.CertifiedRecord();
if (d.seq != null) {
if ($util.Long)
(m.seq = $util.Long.fromValue(d.seq)).unsigned = true;
else if (typeof d.seq === "string")
m.seq = parseInt(d.seq, 10);
else if (typeof d.seq === "number")
m.seq = d.seq;
else if (typeof d.seq === "object")
m.seq = new $util.LongBits(d.seq.low >>> 0, d.seq.high >>> 0).toNumber(true);
}
if (d.raw != null) {
if (typeof d.raw === "string")
$util.base64.decode(d.raw, m.raw = $util.newBuffer($util.base64.length(d.raw)), 0);
else if (d.raw.length)
m.raw = d.raw;
}
return m;
};
/**
* Creates a plain object from a CertifiedRecord message. Also converts values to other types if specified.
* @function toObject
* @memberof Addresses.CertifiedRecord
* @static
* @param {Addresses.CertifiedRecord} m CertifiedRecord
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
CertifiedRecord.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.defaults) {
if ($util.Long) {
var n = new $util.Long(0, 0, true);
d.seq = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n;
} else
d.seq = o.longs === String ? "0" : 0;
if (o.bytes === String)
d.raw = "";
else {
d.raw = [];
if (o.bytes !== Array)
d.raw = $util.newBuffer(d.raw);
}
}
if (m.seq != null && m.hasOwnProperty("seq")) {
if (typeof m.seq === "number")
d.seq = o.longs === String ? String(m.seq) : m.seq;
else
d.seq = o.longs === String ? $util.Long.prototype.toString.call(m.seq) : o.longs === Number ? new $util.LongBits(m.seq.low >>> 0, m.seq.high >>> 0).toNumber(true) : m.seq;
}
if (m.raw != null && m.hasOwnProperty("raw")) {
d.raw = o.bytes === String ? $util.base64.encode(m.raw, 0, m.raw.length) : o.bytes === Array ? Array.prototype.slice.call(m.raw) : m.raw;
}
return d;
};
/**
* Converts this CertifiedRecord to JSON.
* @function toJSON
* @memberof Addresses.CertifiedRecord
* @instance
* @returns {Object.<string,*>} JSON object
*/
CertifiedRecord.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return CertifiedRecord;
})();
return Addresses;
})();
module.exports = $root;

View File

@ -1,27 +0,0 @@
syntax = "proto3";
message Addresses {
// Address represents a single multiaddr.
message Address {
bytes multiaddr = 1;
// Flag to indicate if the address comes from a certified source.
optional bool isCertified = 2;
}
// CertifiedRecord contains a serialized signed PeerRecord used to
// populate the signedAddrs list.
message CertifiedRecord {
// The Seq counter from the signed PeerRecord envelope
uint64 seq = 1;
// The serialized bytes of the SignedEnvelope containing the PeerRecord.
bytes raw = 2;
}
// The known multiaddrs.
repeated Address addrs = 1;
// The most recently received signed PeerRecord.
CertifiedRecord certified_record = 2;
}

View File

@ -1,59 +0,0 @@
import * as $protobuf from "protobufjs";
/** Properties of a Protocols. */
export interface IProtocols {
/** Protocols protocols */
protocols?: (string[]|null);
}
/** Represents a Protocols. */
export class Protocols implements IProtocols {
/**
* Constructs a new Protocols.
* @param [p] Properties to set
*/
constructor(p?: IProtocols);
/** Protocols protocols. */
public protocols: string[];
/**
* Encodes the specified Protocols message. Does not implicitly {@link Protocols.verify|verify} messages.
* @param m Protocols message or plain object to encode
* @param [w] Writer to encode to
* @returns Writer
*/
public static encode(m: IProtocols, w?: $protobuf.Writer): $protobuf.Writer;
/**
* Decodes a Protocols message from the specified reader or buffer.
* @param r Reader or buffer to decode from
* @param [l] Message length if known beforehand
* @returns Protocols
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Protocols;
/**
* Creates a Protocols message from a plain object. Also converts values to their respective internal types.
* @param d Plain object
* @returns Protocols
*/
public static fromObject(d: { [k: string]: any }): Protocols;
/**
* Creates a plain object from a Protocols message. Also converts values to other types if specified.
* @param m Protocols
* @param [o] Conversion options
* @returns Plain object
*/
public static toObject(m: Protocols, o?: $protobuf.IConversionOptions): { [k: string]: any };
/**
* Converts this Protocols to JSON.
* @returns JSON object
*/
public toJSON(): { [k: string]: any };
}

View File

@ -1,157 +0,0 @@
/*eslint-disable*/
"use strict";
var $protobuf = require("protobufjs/minimal");
// Common aliases
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
// Exported root namespace
var $root = $protobuf.roots["libp2p-proto-book"] || ($protobuf.roots["libp2p-proto-book"] = {});
$root.Protocols = (function() {
/**
* Properties of a Protocols.
* @exports IProtocols
* @interface IProtocols
* @property {Array.<string>|null} [protocols] Protocols protocols
*/
/**
* Constructs a new Protocols.
* @exports Protocols
* @classdesc Represents a Protocols.
* @implements IProtocols
* @constructor
* @param {IProtocols=} [p] Properties to set
*/
function Protocols(p) {
this.protocols = [];
if (p)
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
if (p[ks[i]] != null)
this[ks[i]] = p[ks[i]];
}
/**
* Protocols protocols.
* @member {Array.<string>} protocols
* @memberof Protocols
* @instance
*/
Protocols.prototype.protocols = $util.emptyArray;
/**
* Encodes the specified Protocols message. Does not implicitly {@link Protocols.verify|verify} messages.
* @function encode
* @memberof Protocols
* @static
* @param {IProtocols} m Protocols message or plain object to encode
* @param {$protobuf.Writer} [w] Writer to encode to
* @returns {$protobuf.Writer} Writer
*/
Protocols.encode = function encode(m, w) {
if (!w)
w = $Writer.create();
if (m.protocols != null && m.protocols.length) {
for (var i = 0; i < m.protocols.length; ++i)
w.uint32(10).string(m.protocols[i]);
}
return w;
};
/**
* Decodes a Protocols message from the specified reader or buffer.
* @function decode
* @memberof Protocols
* @static
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
* @param {number} [l] Message length if known beforehand
* @returns {Protocols} Protocols
* @throws {Error} If the payload is not a reader or valid buffer
* @throws {$protobuf.util.ProtocolError} If required fields are missing
*/
Protocols.decode = function decode(r, l) {
if (!(r instanceof $Reader))
r = $Reader.create(r);
var c = l === undefined ? r.len : r.pos + l, m = new $root.Protocols();
while (r.pos < c) {
var t = r.uint32();
switch (t >>> 3) {
case 1:
if (!(m.protocols && m.protocols.length))
m.protocols = [];
m.protocols.push(r.string());
break;
default:
r.skipType(t & 7);
break;
}
}
return m;
};
/**
* Creates a Protocols message from a plain object. Also converts values to their respective internal types.
* @function fromObject
* @memberof Protocols
* @static
* @param {Object.<string,*>} d Plain object
* @returns {Protocols} Protocols
*/
Protocols.fromObject = function fromObject(d) {
if (d instanceof $root.Protocols)
return d;
var m = new $root.Protocols();
if (d.protocols) {
if (!Array.isArray(d.protocols))
throw TypeError(".Protocols.protocols: array expected");
m.protocols = [];
for (var i = 0; i < d.protocols.length; ++i) {
m.protocols[i] = String(d.protocols[i]);
}
}
return m;
};
/**
* Creates a plain object from a Protocols message. Also converts values to other types if specified.
* @function toObject
* @memberof Protocols
* @static
* @param {Protocols} m Protocols
* @param {$protobuf.IConversionOptions} [o] Conversion options
* @returns {Object.<string,*>} Plain object
*/
Protocols.toObject = function toObject(m, o) {
if (!o)
o = {};
var d = {};
if (o.arrays || o.defaults) {
d.protocols = [];
}
if (m.protocols && m.protocols.length) {
d.protocols = [];
for (var j = 0; j < m.protocols.length; ++j) {
d.protocols[j] = m.protocols[j];
}
}
return d;
};
/**
* Converts this Protocols to JSON.
* @function toJSON
* @memberof Protocols
* @instance
* @returns {Object.<string,*>} JSON object
*/
Protocols.prototype.toJSON = function toJSON() {
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
};
return Protocols;
})();
module.exports = $root;

View File

@ -1,5 +0,0 @@
syntax = "proto3";
message Protocols {
repeated string protocols = 1;
}

View File

@ -1,171 +1,237 @@
'use strict'
const debug = require('debug')
const errcode = require('err-code')
const { codes } = require('../errors')
const PeerId = require('peer-id')
/**
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').ProtoBook} ProtoBook
*/
const log = Object.assign(debug('libp2p:peer-store:proto-book'), {
error: debug('libp2p:peer-store:proto-book:err')
})
const errcode = require('err-code')
const PeerId = require('peer-id')
const Book = require('./book')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../errors')
const EVENT_NAME = 'change:protocols'
/**
* @typedef {import('./')} PeerStore
* @implements {ProtoBook}
*/
class PersistentProtoBook {
/**
* @param {PeerStore["emit"]} emit
* @param {import('./types').Store} store
*/
constructor (emit, store) {
this._emit = emit
this._store = store
}
/**
* @extends {Book}
*
* @fires ProtoBook#change:protocols
* @param {PeerId} peerId
*/
class ProtoBook extends Book {
async get (peerId) {
log('get wait for read lock')
const release = await this._store.lock.readLock()
log('get got read lock')
try {
const peer = await this._store.load(peerId)
return peer.protocols
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('get release read lock')
release()
}
return []
}
/**
* The ProtoBook is responsible for keeping the known supported
* protocols of a peer.
*
* @class
* @param {PeerStore} peerStore
* @param {PeerId} peerId
* @param {string[]} protocols
*/
constructor (peerStore) {
/**
* PeerStore Event emitter, used by the ProtoBook to emit:
* "change:protocols" - emitted when the known protocols of a peer change.
*/
super({
peerStore,
eventName: 'change:protocols',
eventProperty: 'protocols',
eventTransformer: (data) => Array.from(data)
async set (peerId, protocols) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!Array.isArray(protocols)) {
log.error('protocols must be provided to store data')
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
}
log('set await write lock')
const release = await this._store.lock.writeLock()
log('set got write lock')
let updatedPeer
try {
try {
const peer = await this._store.load(peerId)
if (new Set([
...protocols
]).size === peer.protocols.length) {
return
}
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
updatedPeer = await this._store.patchOrCreate(peerId, {
protocols
})
/**
* Map known peers to their known protocols.
*
* @type {Map<string, Set<string>>}
*/
this.data = new Map()
log(`stored provided protocols for ${peerId.toB58String()}`)
} finally {
log('set release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
}
/**
* Set known protocols of a provided peer.
* If the peer was not known before, it will be added.
*
* @override
* @param {PeerId} peerId
* @param {string[]} protocols
* @returns {ProtoBook}
*/
set (peerId, protocols) {
async add (peerId, protocols) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!protocols) {
if (!Array.isArray(protocols)) {
log.error('protocols must be provided to store data')
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
}
const id = peerId.toB58String()
const recSet = this.data.get(id)
const newSet = new Set(protocols)
log('add await write lock')
const release = await this._store.lock.writeLock()
log('add got write lock')
/**
* @param {Set<string>} a
* @param {Set<string>} b
*/
const isSetEqual = (a, b) => a.size === b.size && [...a].every(value => b.has(value))
let updatedPeer
// Already knows the peer and the recorded protocols are the same?
// If yes, no changes needed!
if (recSet && isSetEqual(recSet, newSet)) {
log(`the protocols provided to store are equal to the already stored for ${id}`)
return this
try {
try {
const peer = await this._store.load(peerId)
if (new Set([
...peer.protocols,
...protocols
]).size === peer.protocols.length) {
return
}
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
this._setData(peerId, newSet)
log(`stored provided protocols for ${id}`)
updatedPeer = await this._store.mergeOrCreate(peerId, {
protocols
})
return this
log(`added provided protocols for ${peerId.toB58String()}`)
} finally {
log('add release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
}
/**
* Adds known protocols of a provided peer.
* If the peer was not known before, it will be added.
*
* @param {PeerId} peerId
* @param {string[]} protocols
* @returns {ProtoBook}
*/
add (peerId, protocols) {
async remove (peerId, protocols) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
if (!protocols) {
if (!Array.isArray(protocols)) {
log.error('protocols must be provided to store data')
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
throw errcode(new Error('protocols must be provided'), codes.ERR_INVALID_PARAMETERS)
}
const id = peerId.toB58String()
const recSet = this.data.get(id) || new Set()
const newSet = new Set([...recSet, ...protocols]) // Set Union
log('remove await write lock')
const release = await this._store.lock.writeLock()
log('remove got write lock')
// Any new protocol added?
if (recSet.size === newSet.size) {
log(`the protocols provided to store are already stored for ${id}`)
return this
let updatedPeer
try {
try {
const peer = await this._store.load(peerId)
const protocolSet = new Set(peer.protocols)
for (const protocol of protocols) {
protocolSet.delete(protocol)
}
this._setData(peerId, newSet)
log(`added provided protocols for ${id}`)
if (peer.protocols.length === protocolSet.size) {
return
}
return this
protocols = Array.from(protocolSet)
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
}
updatedPeer = await this._store.patchOrCreate(peerId, {
protocols
})
} finally {
log('remove release write lock')
release()
}
this._emit(EVENT_NAME, { peerId, protocols: updatedPeer.protocols })
}
/**
* Removes known protocols of a provided peer.
* If the protocols did not exist before, nothing will be done.
*
* @param {PeerId} peerId
* @param {string[]} protocols
* @returns {ProtoBook}
*/
remove (peerId, protocols) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
async delete (peerId) {
log('delete await write lock')
const release = await this._store.lock.writeLock()
log('delete got write lock')
let has
try {
has = await this._store.has(peerId)
await this._store.patchOrCreate(peerId, {
protocols: []
})
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
} finally {
log('delete release write lock')
release()
}
if (!protocols) {
log.error('protocols must be provided to store data')
throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS)
if (has) {
this._emit(EVENT_NAME, { peerId, protocols: [] })
}
const id = peerId.toB58String()
const recSet = this.data.get(id)
if (recSet) {
const newSet = new Set([
...recSet
].filter((p) => !protocols.includes(p)))
// Any protocol removed?
if (recSet.size === newSet.size) {
return this
}
this._setData(peerId, newSet)
log(`removed provided protocols for ${id}`)
}
return this
}
}
module.exports = ProtoBook
module.exports = PersistentProtoBook

250
src/peer-store/store.js Normal file
View File

@ -0,0 +1,250 @@
'use strict'
const debug = require('debug')
const PeerId = require('peer-id')
const errcode = require('err-code')
const { codes } = require('../errors')
const { Key } = require('interface-datastore/key')
const { base32 } = require('multiformats/bases/base32')
const { keys: { unmarshalPublicKey, marshalPublicKey } } = require('libp2p-crypto')
const { Multiaddr } = require('multiaddr')
const { Peer: PeerPB } = require('./pb/peer')
// @ts-expect-error no types
const mortice = require('mortice')
const { equals: uint8arrayEquals } = require('uint8arrays/equals')
const log = Object.assign(debug('libp2p:peer-store:store'), {
error: debug('libp2p:peer-store:store:err')
})
/**
* @typedef {import('./types').PeerStore} PeerStore
* @typedef {import('./types').EventName} EventName
* @typedef {import('./types').Peer} Peer
*/
const NAMESPACE_COMMON = '/peers/'
class PersistentStore {
/**
* @param {import('interface-datastore').Datastore} datastore
*/
constructor (datastore) {
this._datastore = datastore
this.lock = mortice('peer-store', {
singleProcess: true
})
}
/**
* @param {PeerId} peerId
* @returns {Key}
*/
_peerIdToDatastoreKey (peerId) {
if (!PeerId.isPeerId(peerId)) {
log.error('peerId must be an instance of peer-id to store data')
throw errcode(new Error('peerId must be an instance of peer-id'), codes.ERR_INVALID_PARAMETERS)
}
const b32key = peerId.toString()
return new Key(`${NAMESPACE_COMMON}${b32key}`)
}
/**
* @param {PeerId} peerId
*/
async has (peerId) {
return this._datastore.has(this._peerIdToDatastoreKey(peerId))
}
/**
* @param {PeerId} peerId
*/
async delete (peerId) {
await this._datastore.delete(this._peerIdToDatastoreKey(peerId))
}
/**
* @param {PeerId} peerId
* @returns {Promise<import('./types').Peer>} peer
*/
async load (peerId) {
const buf = await this._datastore.get(this._peerIdToDatastoreKey(peerId))
const peer = PeerPB.decode(buf)
const pubKey = peer.pubKey ? unmarshalPublicKey(peer.pubKey) : peerId.pubKey
const metadata = new Map()
for (const meta of peer.metadata) {
metadata.set(meta.key, meta.value)
}
return {
...peer,
id: peerId,
pubKey,
addresses: peer.addresses.map(({ multiaddr, isCertified }) => ({
multiaddr: new Multiaddr(multiaddr),
isCertified: isCertified || false
})),
metadata,
peerRecordEnvelope: peer.peerRecordEnvelope || undefined
}
}
/**
* @param {Peer} peer
*/
async save (peer) {
if (peer.pubKey != null && peer.id.pubKey != null && !uint8arrayEquals(peer.pubKey.bytes, peer.id.pubKey.bytes)) {
log.error('peer publicKey bytes do not match peer id publicKey bytes')
throw errcode(new Error('publicKey bytes do not match peer id publicKey bytes'), codes.ERR_INVALID_PARAMETERS)
}
const buf = PeerPB.encode({
addresses: peer.addresses.sort((a, b) => {
return a.multiaddr.toString().localeCompare(b.multiaddr.toString())
}).map(({ multiaddr, isCertified }) => ({
multiaddr: multiaddr.bytes,
isCertified
})),
protocols: peer.protocols.sort(),
pubKey: peer.pubKey ? marshalPublicKey(peer.pubKey) : undefined,
metadata: [...peer.metadata.keys()].sort().map(key => ({ key, value: peer.metadata.get(key) })),
peerRecordEnvelope: peer.peerRecordEnvelope
}).finish()
await this._datastore.put(this._peerIdToDatastoreKey(peer.id), buf)
return this.load(peer.id)
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
*/
async patch (peerId, data) {
const peer = await this.load(peerId)
return await this._patch(peerId, data, peer)
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
*/
async patchOrCreate (peerId, data) {
/** @type {Peer} */
let peer
try {
peer = await this.load(peerId)
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() }
}
return await this._patch(peerId, data, peer)
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
* @param {Peer} peer
*/
async _patch (peerId, data, peer) {
return await this.save({
...peer,
...data,
id: peerId
})
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
*/
async merge (peerId, data) {
const peer = await this.load(peerId)
return this._merge(peerId, data, peer)
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
*/
async mergeOrCreate (peerId, data) {
/** @type {Peer} */
let peer
try {
peer = await this.load(peerId)
} catch (/** @type {any} */ err) {
if (err.code !== codes.ERR_NOT_FOUND) {
throw err
}
peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() }
}
return await this._merge(peerId, data, peer)
}
/**
* @param {PeerId} peerId
* @param {Partial<Peer>} data
* @param {Peer} peer
*/
async _merge (peerId, data, peer) {
// if the peer has certified addresses, use those in
// favour of the supplied versions
/** @type {Map<string, boolean>} */
const addresses = new Map()
;(data.addresses || []).forEach(addr => {
addresses.set(addr.multiaddr.toString(), addr.isCertified)
})
peer.addresses.forEach(({ multiaddr, isCertified }) => {
const addrStr = multiaddr.toString()
addresses.set(addrStr, Boolean(addresses.get(addrStr) || isCertified))
})
return await this.save({
id: peerId,
addresses: Array.from(addresses.entries()).map(([addrStr, isCertified]) => {
return {
multiaddr: new Multiaddr(addrStr),
isCertified
}
}),
protocols: Array.from(new Set([
...(peer.protocols || []),
...(data.protocols || [])
])),
metadata: new Map([
...(peer.metadata ? peer.metadata.entries() : []),
...(data.metadata ? data.metadata.entries() : [])
]),
pubKey: data.pubKey || (peer != null ? peer.pubKey : undefined),
peerRecordEnvelope: data.peerRecordEnvelope || (peer != null ? peer.peerRecordEnvelope : undefined)
})
}
async * all () {
for await (const key of this._datastore.queryKeys({
prefix: NAMESPACE_COMMON
})) {
// /peers/${peer-id-as-libp2p-key-cid-string-in-base-32}
const base32Str = key.toString().split('/')[2]
const buf = base32.decode(base32Str)
yield this.load(PeerId.createFromBytes(buf))
}
}
}
module.exports = PersistentStore

245
src/peer-store/types.ts Normal file
View File

@ -0,0 +1,245 @@
import type PeerId from 'peer-id'
import type { Multiaddr } from 'multiaddr'
import type Envelope from '../record/envelope'
import type { PublicKey } from 'libp2p-interfaces/src/keys/types'
export interface Address {
/**
* Peer multiaddr
*/
multiaddr: Multiaddr
/**
* Obtained from a signed peer record
*/
isCertified: boolean
}
export interface Peer {
/**
* Peer's peer-id instance
*/
id: PeerId
/**
* Peer's addresses containing its multiaddrs and metadata
*/
addresses: Address[]
/**
* Peer's supported protocols
*/
protocols: string[]
/**
* Peer's metadata map
*/
metadata: Map<string, Uint8Array>
/**
* May be set if the key that this Peer has is an RSA key
*/
pubKey?: PublicKey
/**
* The last peer record envelope received
*/
peerRecordEnvelope?: Uint8Array
}
export interface CertifiedRecord {
raw: Uint8Array
seqNumber: number
}
export interface AddressBookEntry {
addresses: Address[]
record: CertifiedRecord
}
export interface Book<Type> {
/**
* Get the known data of a peer
*/
get: (peerId: PeerId) => Promise<Type>
/**
* Set the known data of a peer
*/
set: (peerId: PeerId, data: Type) => Promise<void>
/**
* Remove the known data of a peer
*/
delete: (peerId: PeerId) => Promise<void>
}
/**
* AddressBook containing a map of peerIdStr to Address.
*/
export interface AddressBook {
/**
* ConsumePeerRecord adds addresses from a signed peer record contained in a record envelope.
* This will return a boolean that indicates if the record was successfully processed and added
* into the AddressBook
*/
consumePeerRecord: (envelope: Envelope) => Promise<boolean>
/**
* Get the raw Envelope for a peer. Returns
* undefined if no Envelope is found
*/
getRawEnvelope: (peerId: PeerId) => Promise<Uint8Array | undefined>
/**
* Get an Envelope containing a PeerRecord for the given peer.
* Returns undefined if no record exists.
*/
getPeerRecord: (peerId: PeerId) => Promise<Envelope | undefined>
/**
* Add known addresses of a provided peer.
* If the peer is not known, it is set with the given addresses.
*/
add: (peerId: PeerId, multiaddrs: Multiaddr[]) => Promise<void>
/**
* Set the known addresses of a peer
*/
set: (peerId: PeerId, data: Multiaddr[]) => Promise<void>
/**
* Return the known addresses of a peer
*/
get: (peerId: PeerId) => Promise<Address[]>
/**
* Get the known multiaddrs for a given peer. All returned multiaddrs
* will include the encapsulated `PeerId` of the peer.
*/
getMultiaddrsForPeer: (peerId: PeerId, addressSorter?: (ms: Address[]) => Address[]) => Promise<Multiaddr[]>
}
/**
* KeyBook containing a map of peerIdStr to their PeerId with public keys.
*/
export interface KeyBook {
/**
* Get the known data of a peer
*/
get: (peerId: PeerId) => Promise<PublicKey | undefined>
/**
* Set the known data of a peer
*/
set: (peerId: PeerId, data: PublicKey) => Promise<void>
/**
* Remove the known data of a peer
*/
delete: (peerId: PeerId) => Promise<void>
}
/**
* MetadataBook containing a map of peerIdStr to their metadata Map.
*/
export interface MetadataBook extends Book<Map<string, Uint8Array>> {
/**
* Set a specific metadata value
*/
setValue: (peerId: PeerId, key: string, value: Uint8Array) => Promise<void>
/**
* Get specific metadata value, if it exists
*/
getValue: (peerId: PeerId, key: string) => Promise<Uint8Array | undefined>
/**
* Deletes the provided peer metadata key from the book
*/
deleteValue: (peerId: PeerId, key: string) => Promise<void>
}
/**
* ProtoBook containing a map of peerIdStr to supported protocols.
*/
export interface ProtoBook extends Book<string[]> {
/**
* Adds known protocols of a provided peer.
* If the peer was not known before, it will be added.
*/
add: (peerId: PeerId, protocols: string[]) => Promise<void>
/**
* Removes known protocols of a provided peer.
* If the protocols did not exist before, nothing will be done.
*/
remove: (peerId: PeerId, protocols: string[]) => Promise<void>
}
export interface PeerProtocolsChangeEvent {
peerId: PeerId
protocols: string[]
}
export interface PeerMultiaddrsChangeEvent {
peerId: PeerId
multiaddrs: Multiaddr[]
}
export interface PeerPublicKeyChangeEvent {
peerId: PeerId
pubKey?: PublicKey
}
export interface PeerMetadataChangeEvent {
peerId: PeerId
metadata: Map<string, Uint8Array>
}
export type EventName = 'peer' | 'change:protocols' | 'change:multiaddrs' | 'change:pubkey' | 'change:metadata'
export interface PeerStoreEvents {
'peer': (event: PeerId) => void
'change:protocols': (event: PeerProtocolsChangeEvent) => void
'change:multiaddrs': (event: PeerMultiaddrsChangeEvent) => void
'change:pubkey': (event: PeerPublicKeyChangeEvent) => void
'change:metadata': (event: PeerMetadataChangeEvent) => void
}
export interface PeerStore {
addressBook: AddressBook
keyBook: KeyBook
metadataBook: MetadataBook
protoBook: ProtoBook
getPeers: () => AsyncIterable<Peer>
delete: (peerId: PeerId) => Promise<void>
has: (peerId: PeerId) => Promise<boolean>
get: (peerId: PeerId) => Promise<Peer>
on: <U extends keyof PeerStoreEvents>(
event: U, listener: PeerStoreEvents[U]
) => this
once: <U extends keyof PeerStoreEvents>(
event: U, listener: PeerStoreEvents[U]
) => this
emit: <U extends keyof PeerStoreEvents>(
event: U, ...args: Parameters<PeerStoreEvents[U]>
) => boolean
}
export interface Store {
has: (peerId: PeerId) => Promise<boolean>
save: (peer: Peer) => Promise<Peer>
load: (peerId: PeerId) => Promise<Peer>
merge: (peerId: PeerId, data: Partial<Peer>) => Promise<Peer>
mergeOrCreate: (peerId: PeerId, data: Partial<Peer>) => Promise<Peer>
patch: (peerId: PeerId, data: Partial<Peer>) => Promise<Peer>
patchOrCreate: (peerId: PeerId, data: Partial<Peer>) => Promise<Peer>
all: () => AsyncIterable<Peer>
lock: {
readLock: () => Promise<() => void>
writeLock: () => Promise<() => void>
}
}

View File

@ -63,6 +63,9 @@ async function ping (node, peer) {
*/
function mount (node) {
node.handle(`/${node._config.protocolPrefix}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}`, ({ stream }) => pipe(stream, stream))
.catch(err => {
log.error(err)
})
}
/**
@ -72,6 +75,9 @@ function mount (node) {
*/
function unmount (node) {
node.unhandle(`/${node._config.protocolPrefix}/${PROTOCOL_NAME}/${PROTOCOL_VERSION}`)
.catch(err => {
log.error(err)
})
}
exports = module.exports = ping

View File

@ -11,7 +11,7 @@ const {
} = require('./consts')
/**
* @typedef {import('../../peer-store/address-book.js').Address} Address
* @typedef {import('../../peer-store/types').Address} Address
* @typedef {import('libp2p-interfaces/src/record/types').Record} Record
*/

View File

@ -19,7 +19,7 @@ async function updateSelfPeerRecord (libp2p) {
multiaddrs: libp2p.multiaddrs
})
const envelope = await Envelope.seal(peerRecord, libp2p.peerId)
libp2p.peerStore.addressBook.consumePeerRecord(envelope)
await libp2p.peerStore.addressBook.consumePeerRecord(envelope)
}
module.exports.updateSelfPeerRecord = updateSelfPeerRecord

View File

@ -13,7 +13,7 @@ const Topology = require('libp2p-interfaces/src/topology')
/**
* @typedef {import('peer-id')} PeerId
* @typedef {import('./peer-store')} PeerStore
* @typedef {import('./peer-store/types').PeerStore} PeerStore
* @typedef {import('./connection-manager')} ConnectionManager
* @typedef {import('libp2p-interfaces/src/connection').Connection} Connection
* @typedef {import('./').HandlerProps} HandlerProps
@ -82,9 +82,9 @@ class Registrar {
* Register handlers for a set of multicodecs given
*
* @param {Topology} topology - protocol topology
* @returns {string} registrar identifier
* @returns {Promise<string>} registrar identifier
*/
register (topology) {
async register (topology) {
if (!Topology.isTopology(topology)) {
log.error('topology must be an instance of interfaces/topology')
throw errcode(new Error('topology must be an instance of interfaces/topology'), ERR_INVALID_PARAMETERS)
@ -96,7 +96,7 @@ class Registrar {
this.topologies.set(id, topology)
// Set registrar
topology.registrar = this
await topology.setRegistrar(this)
return id
}

View File

@ -288,7 +288,9 @@ class Upgrader {
} finally {
this.onConnectionEnd(connection)
}
})()
})().catch(err => {
log.error(err)
})
}
return Reflect.set(...args)

View File

@ -10,6 +10,10 @@ const { baseOptions } = require('./utils')
describe('Protocol prefix is configurable', () => {
let libp2p
afterEach(async () => {
libp2p && await libp2p.stop()
})
it('protocolPrefix is provided', async () => {
const testProtocol = 'test-protocol'
libp2p = await create(mergeOptions(baseOptions, {
@ -17,31 +21,27 @@ describe('Protocol prefix is configurable', () => {
protocolPrefix: testProtocol
}
}))
await libp2p.start()
const protocols = libp2p.peerStore.protoBook.get(libp2p.peerId);
[
const protocols = await libp2p.peerStore.protoBook.get(libp2p.peerId)
expect(protocols).to.include.members([
'/libp2p/circuit/relay/0.1.0',
`/${testProtocol}/id/1.0.0`,
`/${testProtocol}/id/push/1.0.0`,
`/${testProtocol}/ping/1.0.0`
].forEach((i, idx) => {
expect(protocols[idx]).equals(i)
})
await libp2p.stop()
])
})
it('protocolPrefix is not provided', async () => {
libp2p = await create(baseOptions)
await libp2p.start()
const protocols = libp2p.peerStore.protoBook.get(libp2p.peerId);
[
const protocols = await libp2p.peerStore.protoBook.get(libp2p.peerId)
expect(protocols).to.include.members([
'/libp2p/circuit/relay/0.1.0',
'/ipfs/id/1.0.0',
'/ipfs/id/push/1.0.0',
'/ipfs/ping/1.0.0'
].forEach((i, idx) => {
expect(protocols[idx]).equals(i)
})
await libp2p.stop()
])
})
})

View File

@ -71,7 +71,7 @@ describe('Connection Manager', () => {
sinon.spy(libp2p.connectionManager, 'emit')
sinon.spy(remoteLibp2p.connectionManager, 'emit')
libp2p.peerStore.addressBook.set(remoteLibp2p.peerId, remoteLibp2p.multiaddrs)
await libp2p.peerStore.addressBook.set(remoteLibp2p.peerId, remoteLibp2p.multiaddrs)
await libp2p.dial(remoteLibp2p.peerId)
// check connect event
@ -219,9 +219,9 @@ describe('libp2p.connections', () => {
})
// Populate PeerStore before starting
libp2p.peerStore.addressBook.set(nodes[0].peerId, nodes[0].multiaddrs)
libp2p.peerStore.addressBook.set(nodes[1].peerId, nodes[1].multiaddrs)
libp2p.peerStore.protoBook.set(nodes[1].peerId, ['/protocol-min-conns'])
await libp2p.peerStore.addressBook.set(nodes[0].peerId, nodes[0].multiaddrs)
await libp2p.peerStore.addressBook.set(nodes[1].peerId, nodes[1].multiaddrs)
await libp2p.peerStore.protoBook.set(nodes[1].peerId, ['/protocol-min-conns'])
await libp2p.start()

View File

@ -77,7 +77,7 @@ describe('Connection Manager', () => {
const value = Math.random()
spies.set(value, spy)
libp2p.connectionManager.setPeerValue(connection.remotePeer, value)
libp2p.connectionManager.onConnect(connection)
await libp2p.connectionManager.onConnect(connection)
}))
// get the lowest value
@ -109,8 +109,8 @@ describe('Connection Manager', () => {
const spy = sinon.spy()
await Promise.all([...new Array(max + 1)].map(async () => {
const connection = await mockConnection()
sinon.stub(connection, 'close').callsFake(() => spy()) // eslint-disable-line
libp2p.connectionManager.onConnect(connection)
sinon.stub(connection, 'close').callsFake(async () => spy()) // eslint-disable-line
await libp2p.connectionManager.onConnect(connection)
}))
expect(libp2p.connectionManager._maybeDisconnectOne).to.have.property('callCount', 1)

View File

@ -291,11 +291,11 @@ describe('content-routing', () => {
yield result
})
expect(node.peerStore.addressBook.get(providerPeerId)).to.not.be.ok()
expect(await node.peerStore.has(providerPeerId)).to.not.be.ok()
await drain(node.contentRouting.findProviders('a cid'))
expect(node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
expect(await node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
isCertified: false,
multiaddr: result.multiaddrs[0]
})
@ -377,7 +377,7 @@ describe('content-routing', () => {
await drain(node.contentRouting.findProviders('a cid'))
expect(node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
expect(await node.peerStore.addressBook.get(providerPeerId)).to.deep.include({
isCertified: false,
multiaddr: result1.multiaddrs[0]
}).and.to.deep.include({

View File

@ -45,8 +45,8 @@ describe('DHT subsystem operates correctly', () => {
remoteLibp2p.start()
])
libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr])
remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId)[0]
await libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr]);
[remAddr] = await libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId)
})
afterEach(() => Promise.all([
@ -106,8 +106,8 @@ describe('DHT subsystem operates correctly', () => {
await libp2p.start()
await remoteLibp2p.start()
libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr])
remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId)[0]
await libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr])
remAddr = (await libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId))[0]
})
afterEach(() => Promise.all([

View File

@ -18,7 +18,7 @@ const AggregateError = require('aggregate-error')
const { Connection } = require('libp2p-interfaces/src/connection')
const { AbortError } = require('libp2p-interfaces/src/transport/errors')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { MemoryDatastore } = require('datastore-core/memory')
const Libp2p = require('../../src')
const Dialer = require('../../src/dialer')
const AddressManager = require('../../src/address-manager')
@ -48,7 +48,10 @@ describe('Dialing (direct, TCP)', () => {
PeerId.createFromJSON(Peers[1])
])
peerStore = new PeerStore({ peerId: remotePeerId })
peerStore = new PeerStore({
peerId: remotePeerId,
datastore: new MemoryDatastore()
})
remoteTM = new TransportManager({
libp2p: {
addressManager: new AddressManager(remotePeerId, { listen: [listenAddr] }),
@ -62,7 +65,10 @@ describe('Dialing (direct, TCP)', () => {
localTM = new TransportManager({
libp2p: {
peerId: localPeerId,
peerStore: new PeerStore({ peerId: localPeerId })
peerStore: new PeerStore({
peerId: localPeerId,
datastore: new MemoryDatastore()
})
},
upgrader: mockUpgrader
})
@ -113,7 +119,10 @@ describe('Dialing (direct, TCP)', () => {
it('should be able to connect to a given peer id', async () => {
const peerId = await PeerId.createFromJSON(Peers[0])
const peerStore = new PeerStore({ peerId })
const peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
const dialer = new Dialer({
transportManager: localTM,
peerStore
@ -249,7 +258,7 @@ describe('Dialing (direct, TCP)', () => {
connEncryption: [Crypto]
}
})
remoteLibp2p.handle('/echo/1.0.0', ({ stream }) => pipe(stream, stream))
await remoteLibp2p.handle('/echo/1.0.0', ({ stream }) => pipe(stream, stream))
await remoteLibp2p.start()
remoteAddr = remoteLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${remotePeerId.toB58String()}`)
@ -339,12 +348,12 @@ describe('Dialing (direct, TCP)', () => {
})
// register some stream handlers to simulate several protocols
libp2p.handle('/stream-count/1', ({ stream }) => pipe(stream, stream))
libp2p.handle('/stream-count/2', ({ stream }) => pipe(stream, stream))
remoteLibp2p.handle('/stream-count/3', ({ stream }) => pipe(stream, stream))
remoteLibp2p.handle('/stream-count/4', ({ stream }) => pipe(stream, stream))
await libp2p.handle('/stream-count/1', ({ stream }) => pipe(stream, stream))
await libp2p.handle('/stream-count/2', ({ stream }) => pipe(stream, stream))
await remoteLibp2p.handle('/stream-count/3', ({ stream }) => pipe(stream, stream))
await remoteLibp2p.handle('/stream-count/4', ({ stream }) => pipe(stream, stream))
libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs)
await libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs)
const connection = await libp2p.dial(remotePeerId)
// Create local to remote streams
@ -363,8 +372,8 @@ describe('Dialing (direct, TCP)', () => {
// Verify stream count
const remoteConn = remoteLibp2p.connectionManager.get(libp2p.peerId)
expect(connection.streams).to.have.length(5)
expect(remoteConn.streams).to.have.length(5)
expect(connection.streams).to.have.length(6)
expect(remoteConn.streams).to.have.length(6)
// Close the connection and verify all streams have been closed
await connection.close()

View File

@ -13,7 +13,7 @@ const { NOISE: Crypto } = require('@chainsafe/libp2p-noise')
const { Multiaddr } = require('multiaddr')
const AggregateError = require('aggregate-error')
const { AbortError } = require('libp2p-interfaces/src/transport/errors')
const { MemoryDatastore } = require('datastore-core/memory')
const { codes: ErrorCodes } = require('../../src/errors')
const Constants = require('../../src/constants')
const Dialer = require('../../src/dialer')
@ -36,7 +36,10 @@ describe('Dialing (direct, WebSockets)', () => {
before(async () => {
[peerId] = await createPeerId()
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
localTM = new TransportManager({
libp2p: {},
upgrader: mockUpgrader,
@ -215,7 +218,7 @@ describe('Dialing (direct, WebSockets)', () => {
})
// Inject data in the AddressBook
peerStore.addressBook.add(peerId, peerMultiaddrs)
await peerStore.addressBook.add(peerId, peerMultiaddrs)
// Perform 3 multiaddr dials
await dialer.connectToPeer(peerId)

View File

@ -10,7 +10,6 @@ const duplexPair = require('it-pair/duplex')
const { Multiaddr } = require('multiaddr')
const pWaitFor = require('p-wait-for')
const { toString: unit8ArrayToString } = require('uint8arrays/to-string')
const { codes: Errors } = require('../../src/errors')
const IdentifyService = require('../../src/identify')
const multicodecs = IdentifyService.multicodecs
@ -22,7 +21,7 @@ const baseOptions = require('../utils/base-options.browser')
const { updateSelfPeerRecord } = require('../../src/record/utils')
const pkg = require('../../package.json')
const AddressManager = require('../../src/address-manager')
const { MemoryDatastore } = require('datastore-core/memory')
const { MULTIADDRS_WEBSOCKETS } = require('../fixtures/browser')
const remoteAddr = MULTIADDRS_WEBSOCKETS[0]
const listenMaddrs = [new Multiaddr('/ip4/127.0.0.1/tcp/15002/ws')]
@ -38,10 +37,16 @@ describe('Identify', () => {
PeerId.createFromJSON(Peers[1])
]))
localPeerStore = new PeerStore({ peerId: localPeer })
localPeerStore = new PeerStore({
peerId: localPeer,
datastore: new MemoryDatastore()
})
localPeerStore.protoBook.set(localPeer, protocols)
remotePeerStore = new PeerStore({ peerId: remotePeer })
remotePeerStore = new PeerStore({
peerId: remotePeer,
datastore: new MemoryDatastore()
})
remotePeerStore.protoBook.set(remotePeer, protocols)
localAddressManager = new AddressManager(localPeer)
@ -103,7 +108,7 @@ describe('Identify', () => {
expect(localIdentify.peerStore.protoBook.set.callCount).to.equal(1)
// Validate the remote peer gets updated in the peer store
const addresses = localIdentify.peerStore.addressBook.get(remotePeer)
const addresses = await localIdentify.peerStore.addressBook.get(remotePeer)
expect(addresses).to.exist()
expect(addresses).have.lengthOf(listenMaddrs.length)
expect(addresses.map((a) => a.multiaddr)[0].equals(listenMaddrs[0]))
@ -149,7 +154,7 @@ describe('Identify', () => {
sinon.spy(localIdentify.peerStore.addressBook, 'set')
sinon.spy(localIdentify.peerStore.protoBook, 'set')
sinon.spy(localIdentify.peerStore.metadataBook, 'set')
sinon.spy(localIdentify.peerStore.metadataBook, 'setValue')
// Run identify
await Promise.all([
@ -164,7 +169,7 @@ describe('Identify', () => {
expect(localIdentify.peerStore.addressBook.set.callCount).to.equal(1)
expect(localIdentify.peerStore.protoBook.set.callCount).to.equal(1)
const metadataArgs = localIdentify.peerStore.metadataBook.set.firstCall.args
const metadataArgs = localIdentify.peerStore.metadataBook.setValue.firstCall.args
expect(metadataArgs[0].id.bytes).to.equal(remotePeer.bytes)
expect(metadataArgs[1]).to.equal('AgentVersion')
expect(unit8ArrayToString(metadataArgs[2])).to.equal(agentVersion)
@ -221,13 +226,16 @@ describe('Identify', () => {
.and.to.have.property('code', Errors.ERR_INVALID_PEER)
})
it('should store host data and protocol version into metadataBook', () => {
it('should store host data and protocol version into metadataBook', async () => {
const agentVersion = 'js-project/1.0.0'
const peerStore = new PeerStore({ peerId: localPeer })
const peerStore = new PeerStore({
peerId: localPeer,
datastore: new MemoryDatastore()
})
sinon.spy(peerStore.metadataBook, 'set')
sinon.spy(peerStore.metadataBook, 'setValue')
new IdentifyService({ // eslint-disable-line no-new
const service = new IdentifyService({ // eslint-disable-line no-new
libp2p: {
peerId: localPeer,
connectionManager: new EventEmitter(),
@ -243,23 +251,30 @@ describe('Identify', () => {
protocols
})
expect(peerStore.metadataBook.set.callCount).to.eql(2)
await service.start()
const storedAgentVersion = peerStore.metadataBook.getValue(localPeer, 'AgentVersion')
const storedProtocolVersion = peerStore.metadataBook.getValue(localPeer, 'ProtocolVersion')
expect(peerStore.metadataBook.setValue.callCount).to.eql(2)
const storedAgentVersion = await peerStore.metadataBook.getValue(localPeer, 'AgentVersion')
const storedProtocolVersion = await peerStore.metadataBook.getValue(localPeer, 'ProtocolVersion')
expect(agentVersion).to.eql(unit8ArrayToString(storedAgentVersion))
expect(storedProtocolVersion).to.exist()
await service.stop()
})
describe('push', () => {
it('should be able to push identify updates to another peer', async () => {
const storedProtocols = [multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0']
const storedProtocols = [multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0'].sort()
const connectionManager = new EventEmitter()
connectionManager.getConnection = () => { }
const localPeerStore = new PeerStore({ peerId: localPeer })
localPeerStore.protoBook.set(localPeer, storedProtocols)
const localPeerStore = new PeerStore({
peerId: localPeer,
datastore: new MemoryDatastore()
})
await localPeerStore.protoBook.set(localPeer, storedProtocols)
const localIdentify = new IdentifyService({
libp2p: {
@ -273,8 +288,11 @@ describe('Identify', () => {
}
})
const remotePeerStore = new PeerStore({ peerId: remotePeer })
remotePeerStore.protoBook.set(remotePeer, storedProtocols)
const remotePeerStore = new PeerStore({
peerId: remotePeer,
datastore: new MemoryDatastore()
})
await remotePeerStore.protoBook.set(remotePeer, storedProtocols)
const remoteIdentify = new IdentifyService({
libp2p: {
@ -316,7 +334,7 @@ describe('Identify', () => {
expect(remoteIdentify.peerStore.addressBook.consumePeerRecord.callCount).to.equal(2)
expect(remoteIdentify.peerStore.protoBook.set.callCount).to.equal(1)
const addresses = localIdentify.peerStore.addressBook.get(localPeer)
const addresses = await localIdentify.peerStore.addressBook.get(localPeer)
expect(addresses).to.exist()
expect(addresses).have.lengthOf(listenMaddrs.length)
expect(addresses.map((a) => a.multiaddr)).to.eql(listenMaddrs)
@ -328,12 +346,15 @@ describe('Identify', () => {
// LEGACY
it('should be able to push identify updates to another peer with no certified peer records support', async () => {
const storedProtocols = [multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0']
const storedProtocols = [multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0'].sort()
const connectionManager = new EventEmitter()
connectionManager.getConnection = () => { }
const localPeerStore = new PeerStore({ peerId: localPeer })
localPeerStore.protoBook.set(localPeer, storedProtocols)
const localPeerStore = new PeerStore({
peerId: localPeer,
datastore: new MemoryDatastore()
})
await localPeerStore.protoBook.set(localPeer, storedProtocols)
const localIdentify = new IdentifyService({
libp2p: {
@ -347,14 +368,17 @@ describe('Identify', () => {
}
})
const remotePeerStore = new PeerStore({ peerId: remotePeer })
const remotePeerStore = new PeerStore({
peerId: remotePeer,
datastore: new MemoryDatastore()
})
remotePeerStore.protoBook.set(remotePeer, storedProtocols)
const remoteIdentify = new IdentifyService({
libp2p: {
peerId: remotePeer,
connectionManager,
peerStore: new PeerStore({ peerId: remotePeer }),
peerStore: remotePeerStore,
multiaddrs: [],
_options: { host: {} },
_config: { protocolPrefix: 'ipfs' },
@ -492,11 +516,15 @@ describe('Identify', () => {
await libp2p.identifyService.identify.firstCall.returnValue
sinon.stub(libp2p, 'isStarted').returns(true)
libp2p.handle('/echo/2.0.0', () => {})
libp2p.unhandle('/echo/2.0.0')
await libp2p.handle('/echo/2.0.0', () => {})
await libp2p.unhandle('/echo/2.0.0')
// the protocol change event listener in the identity service is async
await pWaitFor(() => libp2p.identifyService.push.callCount === 2)
// Verify the remote peer is notified of both changes
expect(libp2p.identifyService.push.callCount).to.equal(2)
for (const call of libp2p.identifyService.push.getCalls()) {
const [connections] = call.args
expect(connections.length).to.equal(1)
@ -509,7 +537,7 @@ describe('Identify', () => {
await pWaitFor(() => connection.streams.length === 0)
})
it('should store host data and protocol version into metadataBook', () => {
it('should store host data and protocol version into metadataBook', async () => {
const agentVersion = 'js-project/1.0.0'
libp2p = new Libp2p({
@ -519,9 +547,10 @@ describe('Identify', () => {
agentVersion
}
})
await libp2p.start()
const storedAgentVersion = libp2p.peerStore.metadataBook.getValue(localPeer, 'AgentVersion')
const storedProtocolVersion = libp2p.peerStore.metadataBook.getValue(localPeer, 'ProtocolVersion')
const storedAgentVersion = await libp2p.peerStore.metadataBook.getValue(localPeer, 'AgentVersion')
const storedProtocolVersion = await libp2p.peerStore.metadataBook.getValue(localPeer, 'ProtocolVersion')
expect(agentVersion).to.eql(unit8ArrayToString(storedAgentVersion))
expect(storedProtocolVersion).to.exist()
@ -545,7 +574,10 @@ describe('Identify', () => {
await libp2p.identifyService.identify.firstCall.returnValue
sinon.stub(libp2p, 'isStarted').returns(true)
libp2p.peerStore.addressBook.add(libp2p.peerId, [new Multiaddr('/ip4/180.0.0.1/tcp/15001/ws')])
await libp2p.peerStore.addressBook.add(libp2p.peerId, [new Multiaddr('/ip4/180.0.0.1/tcp/15001/ws')])
// the protocol change event listener in the identity service is async
await pWaitFor(() => libp2p.identifyService.push.callCount === 1)
// Verify the remote peer is notified of change
expect(libp2p.identifyService.push.callCount).to.equal(1)

View File

@ -266,10 +266,10 @@ describe('Metrics', () => {
const metric = 'some-metric'
const value = 1
metrics.updateComponentMetric(component, metric, value)
metrics.updateComponentMetric({ component, metric, value })
expect(metrics.getComponentMetrics()).to.have.lengthOf(1)
expect(metrics.getComponentMetrics().get(component)).to.have.lengthOf(1)
expect(metrics.getComponentMetrics().get(component).get(metric)).to.equal(value)
expect(metrics.getComponentMetrics().get('libp2p').get(component)).to.have.lengthOf(1)
expect(metrics.getComponentMetrics().get('libp2p').get(component).get(metric)).to.equal(value)
})
})

View File

@ -658,10 +658,8 @@ describe('peer-routing', () => {
await node.start()
await delay(300)
expect(node._dht.getClosestPeers.callCount).to.eql(1)
await delay(500)
expect(node._dht.getClosestPeers.callCount).to.eql(2)
// should run more than once
await pWaitFor(() => node._dht.getClosestPeers.callCount === 2)
})
})
})

View File

@ -9,7 +9,7 @@ const arrayEquals = require('libp2p-utils/src/array-equals')
const addressSort = require('libp2p-utils/src/address-sort')
const PeerId = require('peer-id')
const pDefer = require('p-defer')
const { MemoryDatastore } = require('datastore-core/memory')
const PeerStore = require('../../src/peer-store')
const Envelope = require('../../src/record/envelope')
const PeerRecord = require('../../src/record/peer-record')
@ -19,6 +19,11 @@ const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../../src/errors')
/**
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
* @typedef {import('../../src/peer-store/types').AddressBook} AddressBook
*/
const addr1 = new Multiaddr('/ip4/127.0.0.1/tcp/8000')
const addr2 = new Multiaddr('/ip4/20.0.0.1/tcp/8001')
const addr3 = new Multiaddr('/ip4/127.0.0.1/tcp/8002')
@ -31,10 +36,16 @@ describe('addressBook', () => {
})
describe('addressBook.set', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
@ -42,9 +53,9 @@ describe('addressBook', () => {
peerStore.removeAllListeners()
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
ab.set('invalid peerId')
await ab.set('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -52,9 +63,9 @@ describe('addressBook', () => {
throw new Error('invalid peerId should throw error')
})
it('throwns invalid parameters error if no addresses provided', () => {
it('throws invalid parameters error if no addresses provided', async () => {
try {
ab.set(peerId)
await ab.set(peerId)
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -62,9 +73,9 @@ describe('addressBook', () => {
throw new Error('no addresses should throw error')
})
it('throwns invalid parameters error if invalid multiaddrs are provided', () => {
it('throws invalid parameters error if invalid multiaddrs are provided', async () => {
try {
ab.set(peerId, ['invalid multiaddr'])
await ab.set(peerId, ['invalid multiaddr'])
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -72,7 +83,7 @@ describe('addressBook', () => {
throw new Error('invalid multiaddrs should throw error')
})
it('replaces the stored content by default and emit change event', () => {
it('replaces the stored content by default and emit change event', async () => {
const defer = pDefer()
const supportedMultiaddrs = [addr1, addr2]
@ -82,8 +93,8 @@ describe('addressBook', () => {
defer.resolve()
})
ab.set(peerId, supportedMultiaddrs)
const addresses = ab.get(peerId)
await ab.set(peerId, supportedMultiaddrs)
const addresses = await ab.get(peerId)
const multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(supportedMultiaddrs)
@ -105,11 +116,11 @@ describe('addressBook', () => {
})
// set 1
ab.set(peerId, supportedMultiaddrsA)
await ab.set(peerId, supportedMultiaddrsA)
// set 2 (same content)
ab.set(peerId, supportedMultiaddrsB)
const addresses = ab.get(peerId)
await ab.set(peerId, supportedMultiaddrsB)
const addresses = await ab.get(peerId)
const multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB)
@ -130,10 +141,10 @@ describe('addressBook', () => {
})
// set 1
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
// set 2 (same content)
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -145,10 +156,16 @@ describe('addressBook', () => {
})
describe('addressBook.add', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
@ -156,9 +173,9 @@ describe('addressBook', () => {
peerStore.removeAllListeners()
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
ab.add('invalid peerId')
await ab.add('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -166,9 +183,9 @@ describe('addressBook', () => {
throw new Error('invalid peerId should throw error')
})
it('throwns invalid parameters error if no addresses provided', () => {
it('throws invalid parameters error if no addresses provided', async () => {
try {
ab.add(peerId)
await ab.add(peerId)
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -176,9 +193,9 @@ describe('addressBook', () => {
throw new Error('no addresses provided should throw error')
})
it('throwns invalid parameters error if invalid multiaddrs are provided', () => {
it('throws invalid parameters error if invalid multiaddrs are provided', async () => {
try {
ab.add(peerId, ['invalid multiaddr'])
await ab.add(peerId, ['invalid multiaddr'])
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -193,7 +210,7 @@ describe('addressBook', () => {
defer.reject()
})
ab.add(peerId, [])
await ab.add(peerId, [])
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -203,7 +220,7 @@ describe('addressBook', () => {
await defer.promise
})
it('adds the new content and emits change event', () => {
it('adds the new content and emits change event', async () => {
const defer = pDefer()
const supportedMultiaddrsA = [addr1, addr2]
@ -219,14 +236,14 @@ describe('addressBook', () => {
})
// Replace
ab.set(peerId, supportedMultiaddrsA)
let addresses = ab.get(peerId)
await ab.set(peerId, supportedMultiaddrsA)
let addresses = await ab.get(peerId)
let multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(supportedMultiaddrsA)
// Add
ab.add(peerId, supportedMultiaddrsB)
addresses = ab.get(peerId)
await ab.add(peerId, supportedMultiaddrsB)
addresses = await ab.get(peerId)
multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(finalMultiaddrs)
@ -249,11 +266,11 @@ describe('addressBook', () => {
})
// set 1
ab.set(peerId, supportedMultiaddrsA)
await ab.set(peerId, supportedMultiaddrsA)
// set 2 (content already existing)
ab.add(peerId, supportedMultiaddrsB)
const addresses = ab.get(peerId)
await ab.add(peerId, supportedMultiaddrsB)
const addresses = await ab.get(peerId)
const multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(finalMultiaddrs)
@ -275,10 +292,10 @@ describe('addressBook', () => {
})
// set 1
ab.set(peerId, supportedMultiaddrsA)
await ab.set(peerId, supportedMultiaddrsA)
// set 2 (content already existing)
ab.add(peerId, supportedMultiaddrsB)
await ab.add(peerId, supportedMultiaddrsB)
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -288,26 +305,32 @@ describe('addressBook', () => {
await defer.promise
})
it('does not add replicated content', () => {
it('does not add replicated content', async () => {
// set 1
ab.set(peerId, [addr1, addr1])
await ab.set(peerId, [addr1, addr1])
const addresses = ab.get(peerId)
const addresses = await ab.get(peerId)
expect(addresses).to.have.lengthOf(1)
})
})
describe('addressBook.get', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
ab.get('invalid peerId')
await ab.get('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -315,34 +338,40 @@ describe('addressBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns undefined if no multiaddrs are known for the provided peer', () => {
const addresses = ab.get(peerId)
it('returns empty if no multiaddrs are known for the provided peer', async () => {
const addresses = await ab.get(peerId)
expect(addresses).to.not.exist()
expect(addresses).to.be.empty()
})
it('returns the multiaddrs stored', () => {
it('returns the multiaddrs stored', async () => {
const supportedMultiaddrs = [addr1, addr2]
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
const addresses = ab.get(peerId)
const addresses = await ab.get(peerId)
const multiaddrs = addresses.map((mi) => mi.multiaddr)
expect(multiaddrs).to.have.deep.members(supportedMultiaddrs)
})
})
describe('addressBook.getMultiaddrsForPeer', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
ab.getMultiaddrsForPeer('invalid peerId')
await ab.getMultiaddrsForPeer('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -350,28 +379,28 @@ describe('addressBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns undefined if no multiaddrs are known for the provided peer', () => {
const addresses = ab.getMultiaddrsForPeer(peerId)
it('returns empty if no multiaddrs are known for the provided peer', async () => {
const addresses = await ab.getMultiaddrsForPeer(peerId)
expect(addresses).to.not.exist()
expect(addresses).to.be.empty()
})
it('returns the multiaddrs stored', () => {
it('returns the multiaddrs stored', async () => {
const supportedMultiaddrs = [addr1, addr2]
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
const multiaddrs = ab.getMultiaddrsForPeer(peerId)
const multiaddrs = await ab.getMultiaddrsForPeer(peerId)
multiaddrs.forEach((m) => {
expect(m.getPeerId()).to.equal(peerId.toB58String())
})
})
it('can sort multiaddrs providing a sorter', () => {
it('can sort multiaddrs providing a sorter', async () => {
const supportedMultiaddrs = [addr1, addr2]
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
const multiaddrs = ab.getMultiaddrsForPeer(peerId, addressSort.publicAddressesFirst)
const multiaddrs = await ab.getMultiaddrsForPeer(peerId, addressSort.publicAddressesFirst)
const sortedAddresses = addressSort.publicAddressesFirst(supportedMultiaddrs.map((m) => ({ multiaddr: m })))
multiaddrs.forEach((m, index) => {
@ -381,16 +410,22 @@ describe('addressBook', () => {
})
describe('addressBook.delete', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
ab.delete('invalid peerId')
await ab.delete('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -398,16 +433,14 @@ describe('addressBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns false if no records exist for the peer and no event is emitted', () => {
it('does not emit an event if no records exist for the peer', async () => {
const defer = pDefer()
peerStore.on('change:multiaddrs', () => {
defer.reject()
})
const deleted = ab.delete(peerId)
expect(deleted).to.equal(false)
await ab.delete(peerId)
// Wait 50ms for incorrect invalid event
setTimeout(() => {
@ -417,11 +450,11 @@ describe('addressBook', () => {
return defer.promise
})
it('returns true if the record exists and an event is emitted', () => {
it('emits an event if the record exists', async () => {
const defer = pDefer()
const supportedMultiaddrs = [addr1, addr2]
ab.set(peerId, supportedMultiaddrs)
await ab.set(peerId, supportedMultiaddrs)
// Listen after set
peerStore.on('change:multiaddrs', ({ multiaddrs }) => {
@ -429,20 +462,24 @@ describe('addressBook', () => {
defer.resolve()
})
const deleted = ab.delete(peerId)
expect(deleted).to.equal(true)
await ab.delete(peerId)
return defer.promise
})
})
describe('certified records', () => {
let peerStore, ab
/** @type {PeerStore} */
let peerStore
/** @type {AddressBook} */
let ab
describe('consumes a valid peer record and stores its data', () => {
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
@ -455,15 +492,11 @@ describe('addressBook', () => {
const envelope = await Envelope.seal(peerRecord, peerId)
// consume peer record
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
// Validate stored envelope
const storedEnvelope = await ab.getPeerRecord(peerId)
expect(envelope.equals(storedEnvelope)).to.eql(true)
// Validate AddressBook addresses
const addrs = ab.get(peerId)
const addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrs.length)
addrs.forEach((addr, index) => {
@ -488,7 +521,7 @@ describe('addressBook', () => {
})
// consume peer record
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
return defer.promise
@ -499,10 +532,10 @@ describe('addressBook', () => {
const multiaddrs = [addr1, addr2]
// Set addressBook data
ab.set(peerId, multiaddrs)
await ab.set(peerId, multiaddrs)
// Validate data exists, but not certified
let addrs = ab.get(peerId)
let addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrs.length)
@ -525,14 +558,14 @@ describe('addressBook', () => {
})
// consume peer record
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
// Wait event
await defer.promise
// Validate data exists and certified
addrs = ab.get(peerId)
addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrs.length)
addrs.forEach((addr, index) => {
@ -546,10 +579,10 @@ describe('addressBook', () => {
const multiaddrs = [addr1, addr2]
// Set addressBook data
ab.set(peerId, [addr1])
await ab.set(peerId, [addr1])
// Validate data exists, but not certified
let addrs = ab.get(peerId)
let addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(1)
expect(addrs[0].isCertified).to.eql(false)
@ -569,14 +602,14 @@ describe('addressBook', () => {
})
// consume peer record
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
// Wait event
await defer.promise
// Validate data exists and certified
addrs = ab.get(peerId)
addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrs.length)
addrs.forEach((addr, index) => {
@ -591,10 +624,10 @@ describe('addressBook', () => {
const multiaddrsCertified = [addr1, addr2]
// Set addressBook data
ab.set(peerId, multiaddrsUncertified)
await ab.set(peerId, multiaddrsUncertified)
// Validate data exists, but not certified
let addrs = ab.get(peerId)
let addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrsUncertified.length)
addrs.forEach((addr, index) => {
@ -616,14 +649,14 @@ describe('addressBook', () => {
})
// consume peer record
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
// Wait event
await defer.promise
// Validate data exists and certified
addrs = ab.get(peerId)
addrs = await ab.get(peerId)
expect(addrs).to.exist()
expect(addrs).to.have.lengthOf(multiaddrsCertified.length)
addrs.forEach((addr, index) => {
@ -635,16 +668,19 @@ describe('addressBook', () => {
describe('fails to consume invalid peer records', () => {
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
ab = peerStore.addressBook
})
it('invalid peer record', () => {
it('invalid peer record', async () => {
const invalidEnvelope = {
payload: Buffer.from('invalid-peerRecord')
}
const consumed = ab.consumePeerRecord(invalidEnvelope)
const consumed = await ab.consumePeerRecord(invalidEnvelope)
expect(consumed).to.eql(false)
})
@ -659,7 +695,7 @@ describe('addressBook', () => {
})
const envelope = await Envelope.seal(peerRecord, peerId)
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(false)
})
@ -679,10 +715,10 @@ describe('addressBook', () => {
const envelope2 = await Envelope.seal(peerRecord2, peerId)
// Consume envelope1 (bigger seqNumber)
let consumed = ab.consumePeerRecord(envelope1)
let consumed = await ab.consumePeerRecord(envelope1)
expect(consumed).to.eql(true)
consumed = ab.consumePeerRecord(envelope2)
consumed = await ab.consumePeerRecord(envelope2)
expect(consumed).to.eql(false)
})
@ -693,7 +729,7 @@ describe('addressBook', () => {
})
const envelope = await Envelope.seal(peerRecord, peerId)
const consumed = ab.consumePeerRecord(envelope)
const consumed = await ab.consumePeerRecord(envelope)
expect(consumed).to.eql(false)
})
})

View File

@ -3,26 +3,43 @@
const { expect } = require('aegir/utils/chai')
const sinon = require('sinon')
const { MemoryDatastore } = require('datastore-core/memory')
const PeerStore = require('../../src/peer-store')
const pDefer = require('p-defer')
const peerUtils = require('../utils/creators/peer')
const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../../src/errors')
/**
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
* @typedef {import('../../src/peer-store/types').KeyBook} KeyBook
* @typedef {import('peer-id')} PeerId
*/
describe('keyBook', () => {
let peerId, peerStore, kb
/** @type {PeerId} */
let peerId
/** @type {PeerStore} */
let peerStore
/** @type {KeyBook} */
let kb
/** @type {MemoryDatastore} */
let datastore
beforeEach(async () => {
[peerId] = await peerUtils.createPeerId()
peerStore = new PeerStore({ peerId })
datastore = new MemoryDatastore()
peerStore = new PeerStore({
peerId,
datastore
})
kb = peerStore.keyBook
})
it('throws invalid parameters error if invalid PeerId is provided in set', () => {
it('throws invalid parameters error if invalid PeerId is provided in set', async () => {
try {
kb.set('invalid peerId')
await kb.set('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -30,9 +47,9 @@ describe('keyBook', () => {
throw new Error('invalid peerId should throw error')
})
it('throws invalid parameters error if invalid PeerId is provided in get', () => {
it('throws invalid parameters error if invalid PeerId is provided in get', async () => {
try {
kb.get('invalid peerId')
await kb.get('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -40,22 +57,58 @@ describe('keyBook', () => {
throw new Error('invalid peerId should throw error')
})
it('stores the peerId in the book and returns the public key', () => {
it('stores the peerId in the book and returns the public key', async () => {
// Set PeerId
kb.set(peerId, peerId.pubKey)
await kb.set(peerId, peerId.pubKey)
// Get public key
const pubKey = kb.get(peerId)
const pubKey = await kb.get(peerId)
expect(peerId.pubKey.bytes).to.equalBytes(pubKey.bytes)
})
it('should not store if already stored', () => {
const spy = sinon.spy(kb, '_setData')
it('should not store if already stored', async () => {
const spy = sinon.spy(datastore, 'put')
// Set PeerId
kb.set(peerId, peerId.pubKey)
kb.set(peerId, peerId.pubKey)
await kb.set(peerId, peerId.pubKey)
await kb.set(peerId, peerId.pubKey)
expect(spy).to.have.property('callCount', 1)
})
it('should emit an event when setting a key', async () => {
const defer = pDefer()
peerStore.on('change:pubkey', ({ peerId: id, pubKey }) => {
expect(id.toB58String()).to.equal(peerId.toB58String())
expect(pubKey.bytes).to.equalBytes(peerId.pubKey.bytes)
defer.resolve()
})
// Set PeerId
await kb.set(peerId, peerId.pubKey)
await defer.promise
})
it('should not set when key does not match', async () => {
const [edKey] = await peerUtils.createPeerId({ fixture: false, opts: { keyType: 'Ed25519' } })
// Set PeerId
await expect(kb.set(edKey, peerId.pubKey)).to.eventually.be.rejectedWith(/bytes do not match/)
})
it('should emit an event when deleting a key', async () => {
const defer = pDefer()
await kb.set(peerId, peerId.pubKey)
peerStore.on('change:pubkey', ({ peerId: id, pubKey }) => {
expect(id.toB58String()).to.equal(peerId.toB58String())
expect(pubKey).to.be.undefined()
defer.resolve()
})
await kb.delete(peerId)
await defer.promise
})
})

View File

@ -3,7 +3,7 @@
const { expect } = require('aegir/utils/chai')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { MemoryDatastore } = require('datastore-core/memory')
const pDefer = require('p-defer')
const PeerStore = require('../../src/peer-store')
@ -12,7 +12,14 @@ const {
codes: { ERR_INVALID_PARAMETERS }
} = require('../../src/errors')
/**
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
* @typedef {import('../../src/peer-store/types').MetadataBook} MetadataBook
* @typedef {import('peer-id')} PeerId
*/
describe('metadataBook', () => {
/** @type {PeerId} */
let peerId
before(async () => {
@ -20,10 +27,16 @@ describe('metadataBook', () => {
})
describe('metadataBook.set', () => {
let peerStore, mb
/** @type {PeerStore} */
let peerStore
/** @type {MetadataBook} */
let mb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
mb = peerStore.metadataBook
})
@ -31,9 +44,9 @@ describe('metadataBook', () => {
peerStore.removeAllListeners()
})
it('throws invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
mb.set('invalid peerId')
await mb.set('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -41,9 +54,9 @@ describe('metadataBook', () => {
throw new Error('invalid peerId should throw error')
})
it('throws invalid parameters error if no key provided', () => {
it('throws invalid parameters error if no metadata provided', async () => {
try {
mb.set(peerId)
await mb.set(peerId)
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -51,9 +64,9 @@ describe('metadataBook', () => {
throw new Error('no key provided should throw error')
})
it('throws invalid parameters error if no value provided', () => {
it('throws invalid parameters error if no value provided', async () => {
try {
mb.set(peerId, 'location')
await mb.setValue(peerId, 'location')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -61,9 +74,9 @@ describe('metadataBook', () => {
throw new Error('no value provided should throw error')
})
it('throws invalid parameters error if value is not a buffer', () => {
it('throws invalid parameters error if value is not a buffer', async () => {
try {
mb.set(peerId, 'location', 'mars')
await mb.setValue(peerId, 'location', 'mars')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -71,30 +84,30 @@ describe('metadataBook', () => {
throw new Error('invalid value provided should throw error')
})
it('stores the content and emit change event', () => {
it('stores the content and emit change event', async () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
peerStore.once('change:metadata', ({ peerId, metadata }) => {
expect(peerId).to.exist()
expect(metadata).to.equal(metadataKey)
expect(metadata.get(metadataKey)).to.equalBytes(metadataValue)
defer.resolve()
})
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
const value = mb.getValue(peerId, metadataKey)
const value = await mb.getValue(peerId, metadataKey)
expect(value).to.equalBytes(metadataValue)
const peerMetadata = mb.get(peerId)
const peerMetadata = await mb.get(peerId)
expect(peerMetadata).to.exist()
expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue)
return defer.promise
})
it('emits on set if not storing the exact same content', () => {
it('emits on set if not storing the exact same content', async () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataValue1 = uint8ArrayFromString('mars')
@ -109,22 +122,22 @@ describe('metadataBook', () => {
})
// set 1
mb.set(peerId, metadataKey, metadataValue1)
await mb.setValue(peerId, metadataKey, metadataValue1)
// set 2 (same content)
mb.set(peerId, metadataKey, metadataValue2)
await mb.setValue(peerId, metadataKey, metadataValue2)
const value = mb.getValue(peerId, metadataKey)
const value = await mb.getValue(peerId, metadataKey)
expect(value).to.equalBytes(metadataValue2)
const peerMetadata = mb.get(peerId)
const peerMetadata = await mb.get(peerId)
expect(peerMetadata).to.exist()
expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue2)
return defer.promise
})
it('does not emit on set if it is storing the exact same content', () => {
it('does not emit on set if it is storing the exact same content', async () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
@ -138,10 +151,10 @@ describe('metadataBook', () => {
})
// set 1
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
// set 2 (same content)
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -153,16 +166,22 @@ describe('metadataBook', () => {
})
describe('metadataBook.get', () => {
let peerStore, mb
/** @type {PeerStore} */
let peerStore
/** @type {MetadataBook} */
let mb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
mb = peerStore.metadataBook
})
it('throws invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
mb.get('invalid peerId')
await mb.get('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -170,35 +189,43 @@ describe('metadataBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns undefined if no metadata is known for the provided peer', () => {
const metadata = mb.get(peerId)
it('returns empty if no metadata is known for the provided peer', async () => {
const metadata = await mb.get(peerId)
expect(metadata).to.not.exist()
expect(metadata).to.be.empty()
})
it('returns the metadata stored', () => {
it('returns the metadata stored', async () => {
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
const metadata = new Map()
metadata.set(metadataKey, metadataValue)
mb.set(peerId, metadataKey, metadataValue)
await mb.set(peerId, metadata)
const peerMetadata = mb.get(peerId)
const peerMetadata = await mb.get(peerId)
expect(peerMetadata).to.exist()
expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue)
})
})
describe('metadataBook.getValue', () => {
let peerStore, mb
/** @type {PeerStore} */
let peerStore
/** @type {MetadataBook} */
let mb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
mb = peerStore.metadataBook
})
it('throws invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
mb.getValue('invalid peerId')
await mb.getValue('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -206,48 +233,53 @@ describe('metadataBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns undefined if no metadata is known for the provided peer', () => {
it('returns undefined if no metadata is known for the provided peer', async () => {
const metadataKey = 'location'
const metadata = mb.getValue(peerId, metadataKey)
const metadata = await mb.getValue(peerId, metadataKey)
expect(metadata).to.not.exist()
})
it('returns the metadata value stored for the given key', () => {
it('returns the metadata value stored for the given key', async () => {
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
const value = mb.getValue(peerId, metadataKey)
const value = await mb.getValue(peerId, metadataKey)
expect(value).to.exist()
expect(value).to.equalBytes(metadataValue)
})
it('returns undefined if no metadata is known for the provided peer and key', () => {
it('returns undefined if no metadata is known for the provided peer and key', async () => {
const metadataKey = 'location'
const metadataBadKey = 'nickname'
const metadataValue = uint8ArrayFromString('mars')
mb.set(peerId, metadataKey, metadataValue)
const metadata = mb.getValue(peerId, metadataBadKey)
await mb.setValue(peerId, metadataKey, metadataValue)
const metadata = await mb.getValue(peerId, metadataBadKey)
expect(metadata).to.not.exist()
})
})
describe('metadataBook.delete', () => {
let peerStore, mb
/** @type {PeerStore} */
let peerStore
/** @type {MetadataBook} */
let mb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
mb = peerStore.metadataBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
mb.delete('invalid peerId')
await mb.delete('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -255,16 +287,14 @@ describe('metadataBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns false if no records exist for the peer and no event is emitted', () => {
it('should not emit event if no records exist for the peer', async () => {
const defer = pDefer()
peerStore.on('change:metadata', () => {
defer.reject()
})
const deleted = mb.delete(peerId)
expect(deleted).to.equal(false)
await mb.delete(peerId)
// Wait 50ms for incorrect invalid event
setTimeout(() => {
@ -274,37 +304,41 @@ describe('metadataBook', () => {
return defer.promise
})
it('returns true if the record exists and an event is emitted', () => {
it('should emit an event if the record exists for the peer', async () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
// Listen after set
peerStore.on('change:metadata', () => {
defer.resolve()
})
const deleted = mb.delete(peerId)
expect(deleted).to.equal(true)
await mb.delete(peerId)
return defer.promise
})
})
describe('metadataBook.deleteValue', () => {
let peerStore, mb
/** @type {PeerStore} */
let peerStore
/** @type {MetadataBook} */
let mb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
mb = peerStore.metadataBook
})
it('throws invalid parameters error if invalid PeerId is provided', () => {
it('throws invalid parameters error if invalid PeerId is provided', async () => {
try {
mb.deleteValue('invalid peerId')
await mb.deleteValue('invalid peerId')
} catch (/** @type {any} */ err) {
expect(err.code).to.equal(ERR_INVALID_PARAMETERS)
return
@ -312,7 +346,7 @@ describe('metadataBook', () => {
throw new Error('invalid peerId should throw error')
})
it('returns false if no records exist for the peer and no event is emitted', () => {
it('should not emit event if no records exist for the peer', async () => {
const defer = pDefer()
const metadataKey = 'location'
@ -320,9 +354,7 @@ describe('metadataBook', () => {
defer.reject()
})
const deleted = mb.deleteValue(peerId, metadataKey)
expect(deleted).to.equal(false)
await mb.deleteValue(peerId, metadataKey)
// Wait 50ms for incorrect invalid event
setTimeout(() => {
@ -332,45 +364,19 @@ describe('metadataBook', () => {
return defer.promise
})
it('returns true if the record exists and an event is emitted', () => {
it('should emit event if a record exists for the peer', async () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('mars')
mb.set(peerId, metadataKey, metadataValue)
await mb.setValue(peerId, metadataKey, metadataValue)
// Listen after set
peerStore.on('change:metadata', () => {
defer.resolve()
})
const deleted = mb.deleteValue(peerId, metadataKey)
expect(deleted).to.equal(true)
return defer.promise
})
it('returns false if there is a record for the peer but not the given metadata key', () => {
const defer = pDefer()
const metadataKey = 'location'
const metadataBadKey = 'nickname'
const metadataValue = uint8ArrayFromString('mars')
mb.set(peerId, metadataKey, metadataValue)
peerStore.on('change:metadata', () => {
defer.reject()
})
const deleted = mb.deleteValue(peerId, metadataBadKey)
expect(deleted).to.equal(false)
// Wait 50ms for incorrect invalid event
setTimeout(() => {
defer.resolve()
}, 50)
await mb.deleteValue(peerId, metadataKey)
return defer.promise
})

View File

@ -6,6 +6,7 @@ const sinon = require('sinon')
const baseOptions = require('../utils/base-options')
const peerUtils = require('../utils/creators/peer')
const all = require('it-all')
describe('libp2p.peerStore', () => {
let libp2p, remoteLibp2p
@ -35,13 +36,14 @@ describe('libp2p.peerStore', () => {
expect(spyAddressBook).to.have.property('called', true)
expect(spyKeyBook).to.have.property('called', true)
const localPeers = libp2p.peerStore.peers
expect(localPeers.size).to.equal(1)
const localPeers = await all(libp2p.peerStore.getPeers())
const publicKeyInLocalPeer = localPeers.get(remoteIdStr).id.pubKey
expect(localPeers.length).to.equal(1)
const publicKeyInLocalPeer = localPeers[0].id.pubKey
expect(publicKeyInLocalPeer.bytes).to.equalBytes(remoteLibp2p.peerId.pubKey.bytes)
const publicKeyInRemotePeer = remoteLibp2p.peerStore.keyBook.get(libp2p.peerId)
const publicKeyInRemotePeer = await remoteLibp2p.peerStore.keyBook.get(libp2p.peerId)
expect(publicKeyInRemotePeer).to.exist()
expect(publicKeyInRemotePeer.bytes).to.equalBytes(libp2p.peerId.pubKey.bytes)
})

View File

@ -2,11 +2,11 @@
/* eslint-env mocha */
const { expect } = require('aegir/utils/chai')
const all = require('it-all')
const PeerStore = require('../../src/peer-store')
const { Multiaddr } = require('multiaddr')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { MemoryDatastore } = require('datastore-core/memory')
const peerUtils = require('../utils/creators/peer')
const addr1 = new Multiaddr('/ip4/127.0.0.1/tcp/8000')
@ -18,6 +18,10 @@ const proto1 = '/protocol1'
const proto2 = '/protocol2'
const proto3 = '/protocol3'
/**
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
*/
describe('peer-store', () => {
let peerIds
before(async () => {
@ -27,62 +31,65 @@ describe('peer-store', () => {
})
describe('empty books', () => {
/** @type {PeerStore} */
let peerStore
beforeEach(() => {
peerStore = new PeerStore({ peerId: peerIds[4] })
peerStore = new PeerStore({
peerId: peerIds[4],
datastore: new MemoryDatastore()
})
})
it('has an empty map of peers', () => {
const peers = peerStore.peers
expect(peers.size).to.equal(0)
it('has an empty map of peers', async () => {
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(0)
})
it('returns false on trying to delete a non existant peerId', () => {
const deleted = peerStore.delete(peerIds[0])
expect(deleted).to.equal(false)
it('deletes a peerId', async () => {
await peerStore.addressBook.set(peerIds[0], [new Multiaddr('/ip4/127.0.0.1/tcp/4001')])
await expect(peerStore.has(peerIds[0])).to.eventually.be.true()
await peerStore.delete(peerIds[0])
await expect(peerStore.has(peerIds[0])).to.eventually.be.false()
})
it('returns undefined on trying to find a non existant peerId', () => {
const peer = peerStore.get(peerIds[0])
expect(peer).to.not.exist()
})
it('sets the peer\'s public key to the KeyBook', () => {
peerStore.keyBook.set(peerIds[0], peerIds[0].pubKey)
const pubKey = peerStore.keyBook.get(peerIds[0])
expect(pubKey).to.exist()
it('sets the peer\'s public key to the KeyBook', async () => {
await peerStore.keyBook.set(peerIds[0], peerIds[0].pubKey)
await expect(peerStore.keyBook.get(peerIds[0])).to.eventually.deep.equal(peerIds[0].pubKey)
})
})
describe('previously populated books', () => {
/** @type {PeerStore} */
let peerStore
beforeEach(() => {
peerStore = new PeerStore({ peerId: peerIds[4] })
// Add peer0 with { addr1, addr2 } and { proto1 }
peerStore.addressBook.set(peerIds[0], [addr1, addr2])
peerStore.protoBook.set(peerIds[0], [proto1])
// Add peer1 with { addr3 } and { proto2, proto3 }
peerStore.addressBook.set(peerIds[1], [addr3])
peerStore.protoBook.set(peerIds[1], [proto2, proto3])
// Add peer2 with { addr4 }
peerStore.addressBook.set(peerIds[2], [addr4])
// Add peer3 with { addr4 } and { proto2 }
peerStore.addressBook.set(peerIds[3], [addr4])
peerStore.protoBook.set(peerIds[3], [proto2])
beforeEach(async () => {
peerStore = new PeerStore({
peerId: peerIds[4],
datastore: new MemoryDatastore()
})
it('has peers', () => {
const peers = peerStore.peers
// Add peer0 with { addr1, addr2 } and { proto1 }
await peerStore.addressBook.set(peerIds[0], [addr1, addr2])
await peerStore.protoBook.set(peerIds[0], [proto1])
expect(peers.size).to.equal(4)
expect(Array.from(peers.keys())).to.have.members([
// Add peer1 with { addr3 } and { proto2, proto3 }
await peerStore.addressBook.set(peerIds[1], [addr3])
await peerStore.protoBook.set(peerIds[1], [proto2, proto3])
// Add peer2 with { addr4 }
await peerStore.addressBook.set(peerIds[2], [addr4])
// Add peer3 with { addr4 } and { proto2 }
await peerStore.addressBook.set(peerIds[3], [addr4])
await peerStore.protoBook.set(peerIds[3], [proto2])
})
it('has peers', async () => {
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(4)
expect(peers.map(peer => peer.id.toB58String())).to.have.members([
peerIds[0].toB58String(),
peerIds[1].toB58String(),
peerIds[2].toB58String(),
@ -90,47 +97,45 @@ describe('peer-store', () => {
])
})
it('returns true on deleting a stored peer', () => {
const deleted = peerStore.delete(peerIds[0])
expect(deleted).to.equal(true)
it('deletes a stored peer', async () => {
await peerStore.delete(peerIds[0])
const peers = peerStore.peers
expect(peers.size).to.equal(3)
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(3)
expect(Array.from(peers.keys())).to.not.have.members([peerIds[0].toB58String()])
})
it('returns true on deleting a stored peer which is only on one book', () => {
const deleted = peerStore.delete(peerIds[2])
expect(deleted).to.equal(true)
it('deletes a stored peer which is only on one book', async () => {
await peerStore.delete(peerIds[2])
const peers = peerStore.peers
expect(peers.size).to.equal(3)
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(3)
})
it('gets the stored information of a peer in all its books', () => {
const peer = peerStore.get(peerIds[0])
it('gets the stored information of a peer in all its books', async () => {
const peer = await peerStore.get(peerIds[0])
expect(peer).to.exist()
expect(peer.protocols).to.have.members([proto1])
const peerMultiaddrs = peer.addresses.map((mi) => mi.multiaddr)
expect(peerMultiaddrs).to.have.members([addr1, addr2])
expect(peerMultiaddrs).to.have.deep.members([addr1, addr2])
expect(peer.id).to.exist()
expect(peer.id.toB58String()).to.equal(peerIds[0].toB58String())
})
it('gets the stored information of a peer that is not present in all its books', () => {
const peers = peerStore.get(peerIds[2])
it('gets the stored information of a peer that is not present in all its books', async () => {
const peers = await peerStore.get(peerIds[2])
expect(peers).to.exist()
expect(peers.protocols.length).to.eql(0)
const peerMultiaddrs = peers.addresses.map((mi) => mi.multiaddr)
expect(peerMultiaddrs).to.have.members([addr4])
expect(peerMultiaddrs).to.have.deep.members([addr4])
})
it('can find all the peers supporting a protocol', () => {
it('can find all the peers supporting a protocol', async () => {
const peerSupporting2 = []
for (const [, peer] of peerStore.peers.entries()) {
for await (const peer of peerStore.getPeers()) {
if (peer.protocols.includes(proto2)) {
peerSupporting2.push(peer)
}
@ -141,67 +146,71 @@ describe('peer-store', () => {
expect(peerSupporting2[1].id.toB58String()).to.eql(peerIds[3].toB58String())
})
it('can find all the peers listening on a given address', () => {
const peerListenint4 = []
it('can find all the peers listening on a given address', async () => {
const peerListening4 = []
for (const [, peer] of peerStore.peers.entries()) {
const multiaddrs = peer.addresses.map((mi) => mi.multiaddr)
for await (const peer of peerStore.getPeers()) {
const multiaddrs = peer.addresses.map((mi) => mi.multiaddr.toString())
if (multiaddrs.includes(addr4)) {
peerListenint4.push(peer)
if (multiaddrs.includes(addr4.toString())) {
peerListening4.push(peer)
}
}
expect(peerListenint4.length).to.eql(2)
expect(peerListenint4[0].id.toB58String()).to.eql(peerIds[2].toB58String())
expect(peerListenint4[1].id.toB58String()).to.eql(peerIds[3].toB58String())
expect(peerListening4.length).to.eql(2)
expect(peerListening4[0].id.toB58String()).to.eql(peerIds[2].toB58String())
expect(peerListening4[1].id.toB58String()).to.eql(peerIds[3].toB58String())
})
})
describe('peerStore.peers', () => {
describe('peerStore.getPeers', () => {
/** @type {PeerStore} */
let peerStore
beforeEach(() => {
peerStore = new PeerStore({ peerId: peerIds[4] })
peerStore = new PeerStore({
peerId: peerIds[4],
datastore: new MemoryDatastore()
})
})
it('returns peers if only addresses are known', () => {
peerStore.addressBook.set(peerIds[0], [addr1])
it('returns peers if only addresses are known', async () => {
await peerStore.addressBook.set(peerIds[0], [addr1])
const peers = peerStore.peers
expect(peers.size).to.equal(1)
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(1)
const peerData = peers.get(peerIds[0].toB58String())
const peerData = peers[0]
expect(peerData).to.exist()
expect(peerData.id).to.exist()
expect(peerData.addresses).to.have.lengthOf(1)
expect(peerData.protocols).to.have.lengthOf(0)
expect(peerData.metadata).to.not.exist()
expect(peerData.metadata).to.be.empty()
})
it('returns peers if only protocols are known', () => {
peerStore.protoBook.set(peerIds[0], [proto1])
it('returns peers if only protocols are known', async () => {
await peerStore.protoBook.set(peerIds[0], [proto1])
const peers = peerStore.peers
expect(peers.size).to.equal(1)
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(1)
const peerData = peers.get(peerIds[0].toB58String())
const peerData = peers[0]
expect(peerData).to.exist()
expect(peerData.id).to.exist()
expect(peerData.addresses).to.have.lengthOf(0)
expect(peerData.protocols).to.have.lengthOf(1)
expect(peerData.metadata).to.not.exist()
expect(peerData.metadata).to.be.empty()
})
it('returns peers if only metadata is known', () => {
it('returns peers if only metadata is known', async () => {
const metadataKey = 'location'
const metadataValue = uint8ArrayFromString('earth')
peerStore.metadataBook.set(peerIds[0], metadataKey, metadataValue)
await peerStore.metadataBook.setValue(peerIds[0], metadataKey, metadataValue)
const peers = peerStore.peers
expect(peers.size).to.equal(1)
const peers = await all(peerStore.getPeers())
expect(peers.length).to.equal(1)
const peerData = peers.get(peerIds[0].toB58String())
const peerData = peers[0]
expect(peerData).to.exist()
expect(peerData.id).to.exist()
expect(peerData.addresses).to.have.lengthOf(0)

View File

@ -1,608 +0,0 @@
'use strict'
/* eslint-env mocha */
const { expect } = require('aegir/utils/chai')
const sinon = require('sinon')
const Envelope = require('../../src/record/envelope')
const PeerRecord = require('../../src/record/peer-record')
const PeerStore = require('../../src/peer-store/persistent')
const { Multiaddr } = require('multiaddr')
const { MemoryDatastore } = require('datastore-core/memory')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const peerUtils = require('../utils/creators/peer')
describe('Persisted PeerStore', () => {
let datastore, peerStore
let peerId
before(async () => {
[peerId] = await peerUtils.createPeerId({ fixture: false })
})
describe('start and stop flows', () => {
beforeEach(() => {
datastore = new MemoryDatastore()
peerStore = new PeerStore({ datastore, peerId })
})
afterEach(() => peerStore.stop())
it('should try to load content from an empty datastore on start', async () => {
const spyQuery = sinon.spy(datastore, 'query')
const spyProcessEntry = sinon.spy(peerStore, '_processDatastoreEntry')
await peerStore.start()
expect(spyQuery).to.have.property('callCount', 1)
expect(spyProcessEntry).to.have.property('callCount', 0)
// No data to populate
expect(peerStore.peers.size).to.eq(0)
})
it('should try to commit data on stop but should not add to batch if not exists', async () => {
const spyDs = sinon.spy(peerStore, '_commitData')
const spyBatch = sinon.spy(datastore, 'batch')
await peerStore.start()
expect(spyDs).to.have.property('callCount', 0)
await peerStore.stop()
expect(spyBatch).to.have.property('callCount', 0)
expect(spyDs).to.have.property('callCount', 1)
})
})
describe('simple setup with content stored per change (threshold 1)', () => {
beforeEach(() => {
datastore = new MemoryDatastore()
peerStore = new PeerStore({ datastore, peerId, threshold: 1 })
})
afterEach(() => peerStore.stop())
it('should store peerStore content on datastore after peer marked as dirty (threshold 1)', async () => {
const [peer] = await peerUtils.createPeerId({ number: 2 })
const multiaddrs = [new Multiaddr('/ip4/156.10.1.22/tcp/1000')]
const protocols = ['/ping/1.0.0']
const spyDirty = sinon.spy(peerStore, '_addDirtyPeer')
const spyDs = sinon.spy(datastore, 'batch')
const commitSpy = sinon.spy(peerStore, '_commitData')
await peerStore.start()
// AddressBook
peerStore.addressBook.set(peer, multiaddrs)
expect(spyDirty).to.have.property('callCount', 1) // Address
expect(spyDs).to.have.property('callCount', 1)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// ProtoBook
peerStore.protoBook.set(peer, protocols)
expect(spyDirty).to.have.property('callCount', 2) // Protocol
expect(spyDs).to.have.property('callCount', 2)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// Should have three peer records stored in the datastore
const queryParams = {
prefix: '/peers/'
}
let count = 0
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
count++
}
expect(count).to.equal(2)
// Validate data
const storedPeer = peerStore.get(peer)
expect(storedPeer.id.toB58String()).to.eql(peer.toB58String())
expect(storedPeer.protocols).to.have.members(protocols)
expect(storedPeer.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
expect(storedPeer.addresses.map((a) => a.isCertified)).to.have.members([false])
})
it('should load content to the peerStore when restart but not put in datastore again', async () => {
const spyDs = sinon.spy(datastore, 'batch')
const peers = await peerUtils.createPeerId({ number: 2 })
const commitSpy = sinon.spy(peerStore, '_commitData')
const multiaddrs = [
new Multiaddr('/ip4/156.10.1.22/tcp/1000'),
new Multiaddr('/ip4/156.10.1.23/tcp/1000')
]
const protocols = ['/ping/1.0.0']
await peerStore.start()
// AddressBook
peerStore.addressBook.set(peers[0], [multiaddrs[0]])
peerStore.addressBook.set(peers[1], [multiaddrs[1]])
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// KeyBook
peerStore.keyBook.set(peers[0], peers[0].pubKey)
peerStore.keyBook.set(peers[1], peers[1].pubKey)
// no batch commit as public key inline
// ProtoBook
peerStore.protoBook.set(peers[0], protocols)
peerStore.protoBook.set(peers[1], protocols)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// MetadataBook
peerStore.metadataBook.set(peers[0], 'location', uint8ArrayFromString('earth'))
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(spyDs).to.have.property('callCount', 5) // 2 Address + 2 Proto + 1 Metadata
expect(peerStore.peers.size).to.equal(2)
await peerStore.stop()
peerStore.keyBook.data.clear()
peerStore.addressBook.data.clear()
peerStore.protoBook.data.clear()
// Load on restart
const spy = sinon.spy(peerStore, '_processDatastoreEntry')
await peerStore.start()
expect(spy).to.have.property('callCount', 5)
expect(spyDs).to.have.property('callCount', 5)
expect(peerStore.peers.size).to.equal(2)
expect(peerStore.addressBook.data.size).to.equal(2)
expect(peerStore.keyBook.data.size).to.equal(0)
expect(peerStore.protoBook.data.size).to.equal(2)
expect(peerStore.metadataBook.data.size).to.equal(1)
})
it('should delete content from the datastore on delete', async () => {
const [peer] = await peerUtils.createPeerId()
const multiaddrs = [new Multiaddr('/ip4/156.10.1.22/tcp/1000')]
const protocols = ['/ping/1.0.0']
const commitSpy = sinon.spy(peerStore, '_commitData')
await peerStore.start()
// AddressBook
peerStore.addressBook.set(peer, multiaddrs)
// ProtoBook
peerStore.protoBook.set(peer, protocols)
// MetadataBook
peerStore.metadataBook.set(peer, 'location', uint8ArrayFromString('earth'))
// let batch commit complete
await Promise.all(commitSpy.returnValues)
const spyDs = sinon.spy(datastore, 'batch')
const spyAddressBook = sinon.spy(peerStore.addressBook, 'delete')
const spyKeyBook = sinon.spy(peerStore.keyBook, 'delete')
const spyProtoBook = sinon.spy(peerStore.protoBook, 'delete')
const spyMetadataBook = sinon.spy(peerStore.metadataBook, 'delete')
// Delete from PeerStore
peerStore.delete(peer)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
await peerStore.stop()
expect(spyAddressBook).to.have.property('callCount', 1)
expect(spyKeyBook).to.have.property('callCount', 1)
expect(spyProtoBook).to.have.property('callCount', 1)
expect(spyMetadataBook).to.have.property('callCount', 1)
expect(spyDs).to.have.property('callCount', 3)
// Should have zero peer records stored in the datastore
const queryParams = {
prefix: '/peers/'
}
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
throw new Error('Datastore should be empty')
}
})
it('should store certified peer records after peer marked as dirty (threshold 1)', async () => {
const [peerId] = await peerUtils.createPeerId()
const multiaddrs = [new Multiaddr('/ip4/156.10.1.22/tcp/1000')]
const spyDirty = sinon.spy(peerStore, '_addDirtyPeer')
const spyDs = sinon.spy(datastore, 'batch')
const commitSpy = sinon.spy(peerStore, '_commitData')
await peerStore.start()
const peerRecord = new PeerRecord({
peerId,
multiaddrs
})
const envelope = await Envelope.seal(peerRecord, peerId)
// consume peer record
const consumed = peerStore.addressBook.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
expect(spyDirty).to.have.property('callCount', 1) // Address
expect(spyDs).to.have.property('callCount', 1)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// Should have three peer records stored in the datastore
const queryParams = {
prefix: '/peers/'
}
let count = 0
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
count++
}
expect(count).to.equal(1)
// Validate data
const storedPeer = peerStore.get(peerId)
expect(storedPeer.id.toB58String()).to.eql(peerId.toB58String())
expect(storedPeer.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
expect(storedPeer.addresses.map((a) => a.isCertified)).to.have.members([true])
})
it('should load certified peer records to the peerStore when restart but not put in datastore again', async () => {
const spyDs = sinon.spy(datastore, 'batch')
const peers = await peerUtils.createPeerId({ number: 2 })
const commitSpy = sinon.spy(peerStore, '_commitData')
const multiaddrs = [
new Multiaddr('/ip4/156.10.1.22/tcp/1000'),
new Multiaddr('/ip4/156.10.1.23/tcp/1000')
]
const peerRecord0 = new PeerRecord({
peerId: peers[0],
multiaddrs: [multiaddrs[0]]
})
const envelope0 = await Envelope.seal(peerRecord0, peers[0])
const peerRecord1 = new PeerRecord({
peerId: peers[1],
multiaddrs: [multiaddrs[1]]
})
const envelope1 = await Envelope.seal(peerRecord1, peers[1])
await peerStore.start()
// AddressBook
let consumed = peerStore.addressBook.consumePeerRecord(envelope0)
expect(consumed).to.eql(true)
consumed = peerStore.addressBook.consumePeerRecord(envelope1)
expect(consumed).to.eql(true)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(spyDs).to.have.property('callCount', 2) // 2 Address + 2 Key + 2 Proto + 1 Metadata
expect(peerStore.peers.size).to.equal(2)
await peerStore.stop()
peerStore.addressBook.data.clear()
// Load on restart
const spy = sinon.spy(peerStore, '_processDatastoreEntry')
await peerStore.start()
expect(spy).to.have.property('callCount', 2)
expect(spyDs).to.have.property('callCount', 2)
expect(peerStore.peers.size).to.equal(2)
expect(peerStore.addressBook.data.size).to.equal(2)
expect(peerStore.addressBook.getRawEnvelope(peers[0])).to.exist()
expect(peerStore.addressBook.getRawEnvelope(peers[1])).to.exist()
// Validate stored envelopes
const storedEnvelope0 = await peerStore.addressBook.getPeerRecord(peers[0])
expect(envelope0.equals(storedEnvelope0)).to.eql(true)
const storedEnvelope1 = await peerStore.addressBook.getPeerRecord(peers[1])
expect(envelope1.equals(storedEnvelope1)).to.eql(true)
// Validate multiaddrs
const storedPeer0 = peerStore.get(peers[0])
expect(storedPeer0.id.toB58String()).to.eql(peers[0].toB58String())
expect(storedPeer0.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
expect(storedPeer0.addresses.map((a) => a.isCertified)).to.have.members([true])
const storedPeer1 = peerStore.get(peers[1])
expect(storedPeer1.id.toB58String()).to.eql(peers[1].toB58String())
expect(storedPeer1.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[1].toString()])
expect(storedPeer1.addresses.map((a) => a.isCertified)).to.have.members([true])
})
it('should delete certified peer records from the datastore on delete', async () => {
const [peer] = await peerUtils.createPeerId()
const multiaddrs = [new Multiaddr('/ip4/156.10.1.22/tcp/1000')]
const commitSpy = sinon.spy(peerStore, '_commitData')
await peerStore.start()
// AddressBook
const peerRecord = new PeerRecord({
peerId: peer,
multiaddrs
})
const envelope = await Envelope.seal(peerRecord, peer)
// consume peer record
const consumed = peerStore.addressBook.consumePeerRecord(envelope)
expect(consumed).to.eql(true)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(peerStore.addressBook.getRawEnvelope(peer)).to.exist()
const spyDs = sinon.spy(datastore, 'batch')
const spyAddressBook = sinon.spy(peerStore.addressBook, 'delete')
// Delete from PeerStore
peerStore.delete(peer)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
await peerStore.stop()
expect(spyAddressBook).to.have.property('callCount', 1)
expect(spyDs).to.have.property('callCount', 1)
// Should have zero peer records stored in the datastore
const queryParams = {
prefix: '/peers/'
}
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
throw new Error('Datastore should be empty')
}
expect(peerStore.addressBook.getRawEnvelope(peer)).to.not.exist()
})
})
describe('setup with content not stored per change (threshold 2)', () => {
beforeEach(() => {
datastore = new MemoryDatastore()
peerStore = new PeerStore({ datastore, peerId, threshold: 2 })
})
afterEach(() => peerStore.stop())
it('should not commit until threshold is reached', async () => {
const spyDirty = sinon.spy(peerStore, '_addDirtyPeer')
const spyDirtyMetadata = sinon.spy(peerStore, '_addDirtyPeerMetadata')
const spyDs = sinon.spy(datastore, 'batch')
const commitSpy = sinon.spy(peerStore, '_commitData')
const peers = await peerUtils.createPeerId({ number: 2 })
const multiaddrs = [new Multiaddr('/ip4/156.10.1.22/tcp/1000')]
const protocols = ['/ping/1.0.0']
await peerStore.start()
expect(spyDirty).to.have.property('callCount', 0)
expect(spyDs).to.have.property('callCount', 0)
// Add Peer0 data in multiple books
peerStore.addressBook.set(peers[0], multiaddrs)
peerStore.protoBook.set(peers[0], protocols)
peerStore.metadataBook.set(peers[0], 'location', uint8ArrayFromString('earth'))
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// Remove data from the same Peer
peerStore.addressBook.delete(peers[0])
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(spyDirty).to.have.property('callCount', 3) // 2 AddrBook ops, 1 ProtoBook op
expect(spyDirtyMetadata).to.have.property('callCount', 1) // 1 MetadataBook op
expect(peerStore._dirtyPeers.size).to.equal(1)
expect(spyDs).to.have.property('callCount', 0)
const queryParams = {
prefix: '/peers/'
}
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
throw new Error('Datastore should be empty')
}
// Add data for second book
peerStore.addressBook.set(peers[1], multiaddrs)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(spyDirty).to.have.property('callCount', 4)
expect(spyDirtyMetadata).to.have.property('callCount', 1)
expect(spyDs).to.have.property('callCount', 1)
// Should have three peer records stored in the datastore
let count = 0
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
count++
}
expect(count).to.equal(3)
expect(peerStore.peers.size).to.equal(2)
})
it('should commit on stop if threshold was not reached', async () => {
const spyDirty = sinon.spy(peerStore, '_addDirtyPeer')
const spyDs = sinon.spy(datastore, 'batch')
const protocols = ['/ping/1.0.0']
const [peer] = await peerUtils.createPeerId()
await peerStore.start()
// Add Peer data in a book
peerStore.protoBook.set(peer, protocols)
expect(spyDs).to.have.property('callCount', 0)
expect(spyDirty).to.have.property('callCount', 1) // ProtoBook
expect(peerStore._dirtyPeers.size).to.equal(1)
const queryParams = {
prefix: '/peers/'
}
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
throw new Error('Datastore should be empty')
}
await peerStore.stop()
expect(spyDirty).to.have.property('callCount', 1)
expect(spyDs).to.have.property('callCount', 1)
expect(peerStore._dirtyPeers.size).to.equal(0) // Reset
// Should have one peer record stored in the datastore
let count = 0
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
count++
}
expect(count).to.equal(1)
expect(peerStore.peers.size).to.equal(1)
})
})
})
describe('libp2p.peerStore (Persisted)', () => {
describe('disabled by default', () => {
let libp2p
before(async () => {
[libp2p] = await peerUtils.createPeer({
started: false
})
})
afterEach(() => libp2p.stop())
it('should not have have persistence capabilities', async () => {
await libp2p.start()
expect(libp2p.peerStore._dirtyPeers).to.not.exist()
expect(libp2p.peerStore.threshold).to.not.exist()
})
})
describe('enabled', () => {
let libp2p
let memoryDatastore
beforeEach(async () => {
memoryDatastore = new MemoryDatastore()
;[libp2p] = await peerUtils.createPeer({
started: false,
config: {
datastore: memoryDatastore,
peerStore: {
persistence: true,
threshold: 2 // trigger on second peer changed
}
}
})
})
afterEach(() => libp2p.stop())
it('should start on libp2p start and load content', async () => {
const spyPeerStore = sinon.spy(libp2p.peerStore, 'start')
const spyDs = sinon.spy(memoryDatastore, 'query')
await libp2p.start()
expect(spyPeerStore).to.have.property('callCount', 1)
expect(spyDs).to.have.property('callCount', 1)
})
it('should load content to the peerStore when a new node is started with the same datastore', async () => {
const commitSpy = sinon.spy(libp2p.peerStore, '_commitData')
const peers = await peerUtils.createPeerId({ number: 3 })
const multiaddrs = [
new Multiaddr('/ip4/156.10.1.22/tcp/1000'),
new Multiaddr('/ip4/156.10.1.23/tcp/1000')
]
const protocols = ['/ping/1.0.0']
await libp2p.start()
// AddressBook
libp2p.peerStore.addressBook.set(peers[1], [multiaddrs[0]])
libp2p.peerStore.addressBook.set(peers[2], [multiaddrs[1]])
// let batch commit complete
await Promise.all(commitSpy.returnValues)
// ProtoBook
libp2p.peerStore.protoBook.set(peers[1], protocols)
libp2p.peerStore.protoBook.set(peers[2], protocols)
// let batch commit complete
await Promise.all(commitSpy.returnValues)
expect(libp2p.peerStore.peers.size).to.equal(2)
await libp2p.stop()
// Use a new node with the previously populated datastore
const [newNode] = await peerUtils.createPeer({
started: false,
config: {
datastore: memoryDatastore,
peerStore: {
persistence: true
},
config: {
peerDiscovery: {
autoDial: false
}
}
}
})
expect(newNode.peerStore.peers.size).to.equal(0)
const spy = sinon.spy(newNode.peerStore, '_processDatastoreEntry')
await newNode.start()
expect(spy).to.have.property('callCount', 4) // 4 datastore entries
expect(newNode.peerStore.peers.size).to.equal(2)
// Validate data
const peer0 = newNode.peerStore.get(peers[1])
expect(peer0.id.toB58String()).to.eql(peers[1].toB58String())
expect(peer0.protocols).to.have.members(protocols)
expect(peer0.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
const peer1 = newNode.peerStore.get(peers[2])
expect(peer1.id.toB58String()).to.eql(peers[2].toB58String())
expect(peer1.protocols).to.have.members(protocols)
expect(peer1.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[1].toString()])
await newNode.stop()
})
})
})

View File

@ -3,7 +3,7 @@
const { expect } = require('aegir/utils/chai')
const sinon = require('sinon')
const { MemoryDatastore } = require('datastore-core/memory')
const pDefer = require('p-defer')
const pWaitFor = require('p-wait-for')
@ -11,12 +11,19 @@ const PeerStore = require('../../src/peer-store')
const peerUtils = require('../utils/creators/peer')
const {
ERR_INVALID_PARAMETERS
codes: { ERR_INVALID_PARAMETERS }
} = require('../../src/errors')
/**
* @typedef {import('../../src/peer-store/types').PeerStore} PeerStore
* @typedef {import('../../src/peer-store/types').ProtoBook} ProtoBook
* @typedef {import('peer-id')} PeerId
*/
const arraysAreEqual = (a, b) => a.length === b.length && a.sort().every((item, index) => b[index] === item)
describe('protoBook', () => {
/** @type {PeerId} */
let peerId
before(async () => {
@ -24,10 +31,16 @@ describe('protoBook', () => {
})
describe('protoBook.set', () => {
let peerStore, pb
/** @type {PeerStore} */
let peerStore
/** @type {ProtoBook} */
let pb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
pb = peerStore.protoBook
})
@ -35,19 +48,15 @@ describe('protoBook', () => {
peerStore.removeAllListeners()
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
expect(() => {
pb.set('invalid peerId')
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if invalid PeerId is provided', async () => {
await expect(pb.set('invalid peerId')).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('throwns invalid parameters error if no protocols provided', () => {
expect(() => {
pb.set(peerId)
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if no protocols provided', async () => {
await expect(pb.set(peerId)).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('replaces the stored content by default and emit change event', () => {
it('replaces the stored content by default and emit change event', async () => {
const defer = pDefer()
const supportedProtocols = ['protocol1', 'protocol2']
@ -57,14 +66,14 @@ describe('protoBook', () => {
defer.resolve()
})
pb.set(peerId, supportedProtocols)
const protocols = pb.get(peerId)
await pb.set(peerId, supportedProtocols)
const protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(supportedProtocols)
return defer.promise
await defer.promise
})
it('emits on set if not storing the exact same content', () => {
it('emits on set if not storing the exact same content', async () => {
const defer = pDefer()
const supportedProtocolsA = ['protocol1', 'protocol2']
@ -79,17 +88,17 @@ describe('protoBook', () => {
})
// set 1
pb.set(peerId, supportedProtocolsA)
await pb.set(peerId, supportedProtocolsA)
// set 2 (same content)
pb.set(peerId, supportedProtocolsB)
const protocols = pb.get(peerId)
await pb.set(peerId, supportedProtocolsB)
const protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(supportedProtocolsB)
return defer.promise
await defer.promise
})
it('does not emit on set if it is storing the exact same content', () => {
it('does not emit on set if it is storing the exact same content', async () => {
const defer = pDefer()
const supportedProtocols = ['protocol1', 'protocol2']
@ -103,10 +112,10 @@ describe('protoBook', () => {
})
// set 1
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
// set 2 (same content)
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -118,10 +127,16 @@ describe('protoBook', () => {
})
describe('protoBook.add', () => {
let peerStore, pb
/** @type {PeerStore} */
let peerStore
/** @type {ProtoBook} */
let pb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
pb = peerStore.protoBook
})
@ -129,19 +144,15 @@ describe('protoBook', () => {
peerStore.removeAllListeners()
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
expect(() => {
pb.add('invalid peerId')
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if invalid PeerId is provided', async () => {
await expect(pb.add('invalid peerId')).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('throwns invalid parameters error if no protocols provided', () => {
expect(() => {
pb.add(peerId)
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if no protocols provided', async () => {
await expect(pb.add(peerId)).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('adds the new content and emits change event', () => {
it('adds the new content and emits change event', async () => {
const defer = pDefer()
const supportedProtocolsA = ['protocol1', 'protocol2']
@ -157,19 +168,19 @@ describe('protoBook', () => {
})
// Replace
pb.set(peerId, supportedProtocolsA)
let protocols = pb.get(peerId)
await pb.set(peerId, supportedProtocolsA)
let protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(supportedProtocolsA)
// Add
pb.add(peerId, supportedProtocolsB)
protocols = pb.get(peerId)
await pb.add(peerId, supportedProtocolsB)
protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(finalProtocols)
return defer.promise
})
it('emits on add if the content to add not exists', () => {
it('emits on add if the content to add not exists', async () => {
const defer = pDefer()
const supportedProtocolsA = ['protocol1']
@ -185,17 +196,17 @@ describe('protoBook', () => {
})
// set 1
pb.set(peerId, supportedProtocolsA)
await pb.set(peerId, supportedProtocolsA)
// set 2 (content already existing)
pb.add(peerId, supportedProtocolsB)
const protocols = pb.get(peerId)
await pb.add(peerId, supportedProtocolsB)
const protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(finalProtocols)
return defer.promise
})
it('does not emit on add if the content to add already exists', () => {
it('does not emit on add if the content to add already exists', async () => {
const defer = pDefer()
const supportedProtocolsA = ['protocol1', 'protocol2']
@ -210,10 +221,10 @@ describe('protoBook', () => {
})
// set 1
pb.set(peerId, supportedProtocolsA)
await pb.set(peerId, supportedProtocolsA)
// set 2 (content already existing)
pb.add(peerId, supportedProtocolsB)
await pb.add(peerId, supportedProtocolsB)
// Wait 50ms for incorrect second event
setTimeout(() => {
@ -225,10 +236,16 @@ describe('protoBook', () => {
})
describe('protoBook.remove', () => {
let peerStore, pb
/** @type {PeerStore} */
let peerStore
/** @type {ProtoBook} */
let pb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
pb = peerStore.protoBook
})
@ -236,16 +253,12 @@ describe('protoBook', () => {
peerStore.removeAllListeners()
})
it('throws invalid parameters error if invalid PeerId is provided', () => {
expect(() => {
pb.remove('invalid peerId')
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if invalid PeerId is provided', async () => {
await expect(pb.remove('invalid peerId')).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('throws invalid parameters error if no protocols provided', () => {
expect(() => {
pb.remove(peerId)
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if no protocols provided', async () => {
await expect(pb.remove(peerId)).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('removes the given protocol and emits change event', async () => {
@ -258,13 +271,13 @@ describe('protoBook', () => {
peerStore.on('change:protocols', spy)
// Replace
pb.set(peerId, supportedProtocols)
let protocols = pb.get(peerId)
await pb.set(peerId, supportedProtocols)
let protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(supportedProtocols)
// Remove
pb.remove(peerId, removedProtocols)
protocols = pb.get(peerId)
await pb.remove(peerId, removedProtocols)
protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(finalProtocols)
await pWaitFor(() => spy.callCount === 2)
@ -275,7 +288,7 @@ describe('protoBook', () => {
expect(arraysAreEqual(secondCallArgs.protocols, finalProtocols))
})
it('emits on remove if the content changes', () => {
it('emits on remove if the content changes', async () => {
const spy = sinon.spy()
const supportedProtocols = ['protocol1', 'protocol2']
@ -285,17 +298,17 @@ describe('protoBook', () => {
peerStore.on('change:protocols', spy)
// set
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
// remove (content already existing)
pb.remove(peerId, removedProtocols)
const protocols = pb.get(peerId)
await pb.remove(peerId, removedProtocols)
const protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(finalProtocols)
return pWaitFor(() => spy.callCount === 2)
})
it('does not emit on remove if the content does not change', () => {
it('does not emit on remove if the content does not change', async () => {
const spy = sinon.spy()
const supportedProtocols = ['protocol1', 'protocol2']
@ -304,10 +317,10 @@ describe('protoBook', () => {
peerStore.on('change:protocols', spy)
// set
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
// remove
pb.remove(peerId, removedProtocols)
await pb.remove(peerId, removedProtocols)
// Only one event
expect(spy.callCount).to.eql(1)
@ -315,73 +328,79 @@ describe('protoBook', () => {
})
describe('protoBook.get', () => {
let peerStore, pb
/** @type {PeerStore} */
let peerStore
/** @type {ProtoBook} */
let pb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
pb = peerStore.protoBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
expect(() => {
pb.get('invalid peerId')
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if invalid PeerId is provided', async () => {
await expect(pb.get('invalid peerId')).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('returns undefined if no protocols are known for the provided peer', () => {
const protocols = pb.get(peerId)
it('returns empty if no protocols are known for the provided peer', async () => {
const protocols = await pb.get(peerId)
expect(protocols).to.not.exist()
expect(protocols).to.be.empty()
})
it('returns the protocols stored', () => {
it('returns the protocols stored', async () => {
const supportedProtocols = ['protocol1', 'protocol2']
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
const protocols = pb.get(peerId)
const protocols = await pb.get(peerId)
expect(protocols).to.have.deep.members(supportedProtocols)
})
})
describe('protoBook.delete', () => {
let peerStore, pb
/** @type {PeerStore} */
let peerStore
/** @type {ProtoBook} */
let pb
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
pb = peerStore.protoBook
})
it('throwns invalid parameters error if invalid PeerId is provided', () => {
expect(() => {
pb.delete('invalid peerId')
}).to.throw(ERR_INVALID_PARAMETERS)
it('throws invalid parameters error if invalid PeerId is provided', async () => {
await expect(pb.delete('invalid peerId')).to.eventually.be.rejected().with.property('code', ERR_INVALID_PARAMETERS)
})
it('returns false if no records exist for the peer and no event is emitted', () => {
it('should not emit event if no records exist for the peer', async () => {
const defer = pDefer()
peerStore.on('change:protocols', () => {
defer.reject()
})
const deleted = pb.delete(peerId)
expect(deleted).to.equal(false)
await pb.delete(peerId)
// Wait 50ms for incorrect invalid event
setTimeout(() => {
defer.resolve()
}, 50)
return defer.promise
await defer.promise
})
it('returns true if the record exists and an event is emitted', () => {
it('should emit event if a record exists for the peer', async () => {
const defer = pDefer()
const supportedProtocols = ['protocol1', 'protocol2']
pb.set(peerId, supportedProtocols)
await pb.set(peerId, supportedProtocols)
// Listen after set
peerStore.on('change:protocols', ({ protocols }) => {
@ -389,11 +408,9 @@ describe('protoBook', () => {
defer.resolve()
})
const deleted = pb.delete(peerId)
await pb.delete(peerId)
expect(deleted).to.equal(true)
return defer.promise
await defer.promise
})
})
})

View File

@ -5,7 +5,7 @@ const { expect } = require('aegir/utils/chai')
const pDefer = require('p-defer')
const { EventEmitter } = require('events')
const { MemoryDatastore } = require('datastore-core/memory')
const Topology = require('libp2p-interfaces/src/topology/multicodec-topology')
const PeerStore = require('../../src/peer-store')
const Registrar = require('../../src/registrar')
@ -27,19 +27,23 @@ describe('registrar', () => {
describe('errors', () => {
beforeEach(() => {
peerStore = new PeerStore({ peerId })
peerStore = new PeerStore({
peerId,
datastore: new MemoryDatastore()
})
registrar = new Registrar({ peerStore, connectionManager: new EventEmitter() })
})
it('should fail to register a protocol if no multicodec is provided', () => {
expect(() => registrar.register()).to.throw()
return expect(registrar.register()).to.eventually.be.rejected()
})
it('should fail to register a protocol if an invalid topology is provided', () => {
const fakeTopology = {
random: 1
}
expect(() => registrar.register(fakeTopology)).to.throw()
return expect(registrar.register(fakeTopology)).to.eventually.be.rejected()
})
})
@ -57,7 +61,7 @@ describe('registrar', () => {
afterEach(() => libp2p.stop())
it('should be able to register a protocol', () => {
it('should be able to register a protocol', async () => {
const topologyProps = new Topology({
multicodecs: multicodec,
handlers: {
@ -66,12 +70,12 @@ describe('registrar', () => {
}
})
const identifier = libp2p.registrar.register(topologyProps)
const identifier = await libp2p.registrar.register(topologyProps)
expect(identifier).to.exist()
})
it('should be able to unregister a protocol', () => {
it('should be able to unregister a protocol', async () => {
const topologyProps = new Topology({
multicodecs: multicodec,
handlers: {
@ -80,7 +84,7 @@ describe('registrar', () => {
}
})
const identifier = libp2p.registrar.register(topologyProps)
const identifier = await libp2p.registrar.register(topologyProps)
const success = libp2p.registrar.unregister(identifier)
expect(success).to.eql(true)
@ -100,12 +104,6 @@ describe('registrar', () => {
const conn = await createMockConnection()
const remotePeerId = conn.remotePeer
// Add connected peer with protocol to peerStore and registrar
libp2p.peerStore.protoBook.add(remotePeerId, [multicodec])
libp2p.connectionManager.onConnect(conn)
expect(libp2p.connectionManager.size).to.eql(1)
const topologyProps = new Topology({
multicodecs: multicodec,
handlers: {
@ -124,12 +122,18 @@ describe('registrar', () => {
})
// Register protocol
const identifier = libp2p.registrar.register(topologyProps)
const identifier = await libp2p.registrar.register(topologyProps)
const topology = libp2p.registrar.topologies.get(identifier)
// Topology created
expect(topology).to.exist()
// Add connected peer with protocol to peerStore and registrar
await libp2p.peerStore.protoBook.add(remotePeerId, [multicodec])
await libp2p.connectionManager.onConnect(conn)
expect(libp2p.connectionManager.size).to.eql(1)
await conn.close()
libp2p.connectionManager.onDisconnect(conn)
@ -159,7 +163,7 @@ describe('registrar', () => {
})
// Register protocol
const identifier = libp2p.registrar.register(topologyProps)
const identifier = await libp2p.registrar.register(topologyProps)
const topology = libp2p.registrar.topologies.get(identifier)
// Topology created
@ -171,16 +175,16 @@ describe('registrar', () => {
const remotePeerId = conn.remotePeer
// Add connected peer to peerStore and registrar
libp2p.peerStore.protoBook.set(remotePeerId, [])
libp2p.connectionManager.onConnect(conn)
await libp2p.peerStore.protoBook.set(remotePeerId, [])
// Add protocol to peer and update it
libp2p.peerStore.protoBook.add(remotePeerId, [multicodec])
await libp2p.peerStore.protoBook.add(remotePeerId, [multicodec])
await libp2p.connectionManager.onConnect(conn)
await onConnectDefer.promise
// Remove protocol to peer and update it
libp2p.peerStore.protoBook.set(remotePeerId, [])
await libp2p.peerStore.protoBook.set(remotePeerId, [])
await onDisconnectDefer.promise
})

View File

@ -82,7 +82,7 @@ describe('auto-relay', () => {
const originalMultiaddrsLength = relayLibp2p.multiaddrs.length
// Discover relay
libp2p.peerStore.addressBook.add(relayLibp2p.peerId, relayLibp2p.multiaddrs)
await libp2p.peerStore.addressBook.add(relayLibp2p.peerId, relayLibp2p.multiaddrs)
await libp2p.dial(relayLibp2p.peerId)
// Wait for peer added as listen relay
@ -94,7 +94,7 @@ describe('auto-relay', () => {
expect(libp2p.multiaddrs[originalMultiaddrsLength].getPeerId()).to.eql(relayLibp2p.peerId.toB58String())
// Peer has relay multicodec
const knownProtocols = libp2p.peerStore.protoBook.get(relayLibp2p.peerId)
const knownProtocols = await libp2p.peerStore.protoBook.get(relayLibp2p.peerId)
expect(knownProtocols).to.include(relayMulticodec)
})
})
@ -165,7 +165,7 @@ describe('auto-relay', () => {
sinon.spy(autoRelay1, '_addListenRelay')
// Discover relay
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
const originalMultiaddrs1Length = relayLibp2p1.multiaddrs.length
const originalMultiaddrs2Length = relayLibp2p2.multiaddrs.length
@ -184,7 +184,7 @@ describe('auto-relay', () => {
expect(relayLibp2p1.multiaddrs[originalMultiaddrs1Length].getPeerId()).to.eql(relayLibp2p2.peerId.toB58String())
// Peer has relay multicodec
const knownProtocols = relayLibp2p1.peerStore.protoBook.get(relayLibp2p2.peerId)
const knownProtocols = await relayLibp2p1.peerStore.protoBook.get(relayLibp2p2.peerId)
expect(knownProtocols).to.include(relayMulticodec)
})
@ -193,7 +193,7 @@ describe('auto-relay', () => {
const originalMultiaddrs2Length = relayLibp2p2.multiaddrs.length
// Discover relay
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
@ -206,7 +206,7 @@ describe('auto-relay', () => {
// Dial from the other through a relay
const relayedMultiaddr2 = new Multiaddr(`${relayLibp2p1.multiaddrs[0]}/p2p/${relayLibp2p1.peerId.toB58String()}/p2p-circuit`)
libp2p.peerStore.addressBook.add(relayLibp2p2.peerId, [relayedMultiaddr2])
await libp2p.peerStore.addressBook.add(relayLibp2p2.peerId, [relayedMultiaddr2])
await libp2p.dial(relayLibp2p2.peerId)
})
@ -220,7 +220,7 @@ describe('auto-relay', () => {
sinon.spy(autoRelay1._listenRelays, 'add')
// Discover one relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
expect(relayLibp2p1.connectionManager.size).to.eql(1)
@ -237,11 +237,11 @@ describe('auto-relay', () => {
expect(relayLibp2p1.multiaddrs[originalMultiaddrs1Length].getPeerId()).to.eql(relayLibp2p2.peerId.toB58String())
// Relay2 has relay multicodec
const knownProtocols2 = relayLibp2p1.peerStore.protoBook.get(relayLibp2p2.peerId)
const knownProtocols2 = await relayLibp2p1.peerStore.protoBook.get(relayLibp2p2.peerId)
expect(knownProtocols2).to.include(relayMulticodec)
// Discover an extra relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.dial(relayLibp2p3.peerId)
// Wait to guarantee the dialed peer is not added as a listen relay
@ -253,7 +253,7 @@ describe('auto-relay', () => {
expect(relayLibp2p1.connectionManager.size).to.eql(2)
// Relay2 has relay multicodec
const knownProtocols3 = relayLibp2p1.peerStore.protoBook.get(relayLibp2p3.peerId)
const knownProtocols3 = await relayLibp2p1.peerStore.protoBook.get(relayLibp2p3.peerId)
expect(knownProtocols3).to.include(relayMulticodec)
})
@ -264,7 +264,7 @@ describe('auto-relay', () => {
sinon.spy(relayLibp2p1.identifyService, 'pushToPeerStore')
// Discover one relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
// Wait for listenning on the relay
@ -283,7 +283,7 @@ describe('auto-relay', () => {
expect(autoRelay1._listenRelays.size).to.equal(0)
// Identify push for removing listen relay multiaddr
expect(relayLibp2p1.identifyService.pushToPeerStore.callCount).to.equal(2)
await pWaitFor(() => relayLibp2p1.identifyService.pushToPeerStore.callCount === 2)
})
it('should try to listen on other connected peers relayed address if one used relay disconnects', async () => {
@ -294,11 +294,11 @@ describe('auto-relay', () => {
sinon.spy(relayLibp2p1.transportManager, 'listen')
// Discover one relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
// Discover an extra relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.dial(relayLibp2p3.peerId)
// Wait for both peer to be attempted to added as listen relay
@ -337,11 +337,11 @@ describe('auto-relay', () => {
sinon.spy(relayLibp2p1.transportManager, 'listen')
// Discover one relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
// Discover an extra relay and connect to gather its Hop support
relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.dial(relayLibp2p3.peerId)
// Wait for both peer to be attempted to added as listen relay
@ -382,11 +382,11 @@ describe('auto-relay', () => {
sinon.spy(relayLibp2p1.transportManager, 'listen')
// Discover one relay and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, relayLibp2p2.multiaddrs)
await relayLibp2p1.dial(relayLibp2p2.peerId)
// Discover an extra relay and connect to gather its Hop support
relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.dial(relayLibp2p3.peerId)
// Wait for both peer to be attempted to added as listen relay
@ -479,7 +479,7 @@ describe('auto-relay', () => {
sinon.spy(autoRelay2, '_addListenRelay')
// Relay 1 discovers Relay 3 and connect
relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p1.dial(relayLibp2p3.peerId)
// Wait for peer added as listen relay
@ -487,7 +487,7 @@ describe('auto-relay', () => {
expect(autoRelay1._listenRelays.size).to.equal(1)
// Relay 2 discovers Relay 3 and connect
relayLibp2p2.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p2.peerStore.addressBook.add(relayLibp2p3.peerId, relayLibp2p3.multiaddrs)
await relayLibp2p2.dial(relayLibp2p3.peerId)
// Wait for peer added as listen relay
@ -496,7 +496,7 @@ describe('auto-relay', () => {
// Relay 1 discovers Relay 2 relayed multiaddr via Relay 3
const ma2RelayedBy3 = relayLibp2p2.multiaddrs[relayLibp2p2.multiaddrs.length - 1]
relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, [ma2RelayedBy3])
await relayLibp2p1.peerStore.addressBook.add(relayLibp2p2.peerId, [ma2RelayedBy3])
await relayLibp2p1.dial(relayLibp2p2.peerId)
// Peer not added as listen relay

View File

@ -8,7 +8,6 @@ const { Multiaddr } = require('multiaddr')
const { collect } = require('streaming-iterables')
const pipe = require('it-pipe')
const AggregateError = require('aggregate-error')
const PeerId = require('peer-id')
const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string')
const { createPeerId } = require('../utils/creators/peer')
@ -46,14 +45,13 @@ describe('Dialing (via relay, TCP)', () => {
return Promise.all([srcLibp2p, relayLibp2p, dstLibp2p].map(libp2p => libp2p.start()))
})
afterEach(() => {
afterEach(async () => {
// Stop each node
return Promise.all([srcLibp2p, relayLibp2p, dstLibp2p].map(async libp2p => {
await libp2p.stop()
// Clear the peer stores
for (const peerIdStr of libp2p.peerStore.peers.keys()) {
const peerId = PeerId.createFromB58String(peerIdStr)
libp2p.peerStore.delete(peerId)
for await (const peer of libp2p.peerStore.getPeers()) {
libp2p.peerStore.delete(peer.id)
}
}))
})

View File

@ -2,7 +2,7 @@
/* eslint-env mocha */
const { expect } = require('aegir/utils/chai')
const { MemoryDatastore } = require('datastore-core/memory')
const AddressManager = require('../../src/address-manager')
const TransportManager = require('../../src/transport-manager')
const PeerStore = require('../../src/peer-store')
@ -33,7 +33,10 @@ describe('Transport Manager (TCP)', () => {
peerId: localPeer,
multiaddrs: addrs,
addressManager: new AddressManager({ listen: addrs }),
peerStore: new PeerStore({ peerId: localPeer })
peerStore: new PeerStore({
peerId: localPeer,
datastore: new MemoryDatastore()
})
},
upgrader: mockUpgrader,
onConnection: () => {}
@ -67,7 +70,7 @@ describe('Transport Manager (TCP)', () => {
})
it('should create self signed peer record on listen', async () => {
let signedPeerRecord = await tm.libp2p.peerStore.addressBook.getPeerRecord(localPeer)
let signedPeerRecord = await tm.libp2p.peerStore.addressBook.getRawEnvelope(localPeer)
expect(signedPeerRecord).to.not.exist()
tm.add(Transport.prototype[Symbol.toStringTag], Transport)

View File

@ -2,17 +2,17 @@
"name": "ts-use",
"private": true,
"dependencies": {
"datastore-level": "^6.0.0",
"ipfs-http-client": "^50.1.2",
"@achingbrain/libp2p-gossipsub": "^0.12.2",
"@chainsafe/libp2p-noise": "^5.0.0",
"datastore-level": "^7.0.1",
"ipfs-http-client": "^55.0.0",
"libp2p": "file:../..",
"libp2p-bootstrap": "^0.13.0",
"libp2p-bootstrap": "^0.14.0",
"libp2p-delegated-content-routing": "^0.11.0",
"libp2p-delegated-peer-routing": "^0.11.1",
"libp2p-gossipsub": "^0.9.0",
"libp2p-interfaces": "^1.0.1",
"libp2p-kad-dht": "^0.26.5",
"libp2p-interfaces": "^4.0.0",
"libp2p-kad-dht": "^0.28.6",
"libp2p-mplex": "^0.10.4",
"@chainsafe/libp2p-noise": "^4.1.0",
"libp2p-record": "^0.10.4",
"libp2p-tcp": "^0.17.1",
"libp2p-websockets": "^0.16.1",

View File

@ -105,8 +105,7 @@ async function main() {
},
datastore: new LevelStore('path/to/store'),
peerStore: {
persistence: false,
threshold: 5
persistence: false
},
keychain: {
pass: 'notsafepassword123456789',

View File

@ -58,12 +58,13 @@ function _populateAddressBooks (peers) {
* @param {Object} [properties]
* @param {number} [properties.number] - number of peers (default: 1).
* @param {boolean} [properties.fixture] - use fixture for peer-id generation (default: true)
* @param {PeerId.CreateOptions} [properties.opts]
* @returns {Promise<Array<PeerId>>}
*/
function createPeerId ({ number = 1, fixture = true } = {}) {
function createPeerId ({ number = 1, fixture = true, opts = {} } = {}) {
return pTimes(number, (i) => fixture
? PeerId.createFromJSON(Peers[i])
: PeerId.create()
: PeerId.create(opts)
)
}

View File

@ -48,7 +48,7 @@ module.exports = async (properties = {}) => {
protocol: protocols[0]
}
},
close: () => { },
close: async () => { },
getStreams: () => openStreams,
...properties
})

View File

@ -10,8 +10,7 @@
"src/circuit/protocol/index.js", // exclude generated file
"src/identify/message.js", // exclude generated file
"src/insecure/proto.js", // exclude generated file
"src/peer-store/persistent/pb/address-book.js", // exclude generated file
"src/peer-store/persistent/pb/proto-book.js", // exclude generated file
"src/peer-store/pb/peer.js", // exclude generated file
"src/record/peer-record/peer-record.js", // exclude generated file
"src/record/envelope/envelope.js" // exclude generated file
]