mirror of
https://github.com/fluencelabs/js-libp2p
synced 2025-07-08 05:11:33 +00:00
Compare commits
55 Commits
fix/return
...
v0.29.0
Author | SHA1 | Date | |
---|---|---|---|
bbf8ef7065 | |||
d6d1a746ea | |||
28b79a76a8 | |||
81e70df742 | |||
e9478cee2e | |||
7be17a3ce1 | |||
93dda74085 | |||
cfbd52d7f7 | |||
6cd23ea6c9 | |||
9b75a0f184 | |||
b606ce0e91 | |||
64c8c0f097 | |||
9be582e222 | |||
55c9bfac44 | |||
1e869717ff | |||
9107efe121 | |||
cd09327eb6 | |||
ca57e65ecc | |||
f574e82a5d | |||
15613ccf19 | |||
dab1c8b2a5 | |||
d437defede | |||
74d414c21f | |||
8f2e69048f | |||
b0a36ccbc8 | |||
3158366579 | |||
1d318e12d8 | |||
8a97dded26 | |||
e50f0eeb7b | |||
ee57a643cc | |||
89658dd655 | |||
4ab125e017 | |||
71daac24b1 | |||
02a5095b9c | |||
3e5d450eca | |||
098f3d1dd3 | |||
689f90a698 | |||
0e3cc5866b | |||
4851680c4d | |||
0e18735b8c | |||
f68ff35625 | |||
8c56ec0d23 | |||
fa5ee873e3 | |||
51d7ca44c1 | |||
726a746479 | |||
a331b84f13 | |||
78d152dd68 | |||
7e14aa19b5 | |||
2440c872df | |||
6c7e5e5eef | |||
388df6b6e6 | |||
7dbfe6ab1a | |||
cea59a1fe4 | |||
7da9ad44ab | |||
3896941128 |
@ -45,9 +45,15 @@ const after = async () => {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
bundlesize: { maxSize: '202kB' },
|
||||
bundlesize: { maxSize: '225kB' },
|
||||
hooks: {
|
||||
pre: before,
|
||||
post: after
|
||||
},
|
||||
webpack: {
|
||||
node: {
|
||||
// needed by bcrypto
|
||||
Buffer: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,8 +6,8 @@ stages:
|
||||
- cov
|
||||
|
||||
node_js:
|
||||
- '10'
|
||||
- '12'
|
||||
- 'lts/*'
|
||||
- 'stable'
|
||||
|
||||
os:
|
||||
- linux
|
||||
|
209
CHANGELOG.md
209
CHANGELOG.md
@ -1,3 +1,212 @@
|
||||
<a name="0.29.0"></a>
|
||||
# [0.29.0](https://github.com/libp2p/js-libp2p/compare/v0.28.10...v0.29.0) (2020-08-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not return self on peerstore.peers ([15613cc](https://github.com/libp2p/js-libp2p/commit/15613cc))
|
||||
* peer record interop with go ([#739](https://github.com/libp2p/js-libp2p/issues/739)) ([93dda74](https://github.com/libp2p/js-libp2p/commit/93dda74))
|
||||
* replace node buffers with uint8arrays ([#730](https://github.com/libp2p/js-libp2p/issues/730)) ([1e86971](https://github.com/libp2p/js-libp2p/commit/1e86971))
|
||||
* revert new identify protocol versions ([3158366](https://github.com/libp2p/js-libp2p/commit/3158366))
|
||||
* signature compliant with spec ([4ab125e](https://github.com/libp2p/js-libp2p/commit/4ab125e))
|
||||
|
||||
|
||||
### Chores
|
||||
|
||||
* update travis to use node lts and stable ([098f3d1](https://github.com/libp2p/js-libp2p/commit/098f3d1))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* cerified addressbook ([8f2e690](https://github.com/libp2p/js-libp2p/commit/8f2e690))
|
||||
* create self peer record in identify ([8a97dde](https://github.com/libp2p/js-libp2p/commit/8a97dde))
|
||||
* exchange signed peer records in identify ([e50f0ee](https://github.com/libp2p/js-libp2p/commit/e50f0ee))
|
||||
* gossipsub 1.1 ([#733](https://github.com/libp2p/js-libp2p/issues/733)) ([55c9bfa](https://github.com/libp2p/js-libp2p/commit/55c9bfa))
|
||||
* signed peer records record manager ([3e5d450](https://github.com/libp2p/js-libp2p/commit/3e5d450))
|
||||
|
||||
|
||||
### Reverts
|
||||
|
||||
* reapply "fix: throw if no conn encryption module provided ([#665](https://github.com/libp2p/js-libp2p/issues/665))" ([689f90a](https://github.com/libp2p/js-libp2p/commit/689f90a))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* pubsub implementation is now directly exposed and its API was updated according to the new pubsub interface in js-libp2p-interfaces repo
|
||||
|
||||
* chore: use gossipsub branch with src added
|
||||
|
||||
* fix: add pubsub handlers adapter
|
||||
|
||||
* chore: fix deps
|
||||
|
||||
* chore: update pubsub docs and examples
|
||||
|
||||
* chore: apply suggestions from code review
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
|
||||
* chore: use new floodsub
|
||||
|
||||
* chore: change validator doc set
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
|
||||
* chore: add new gossipsub src
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
* - All deps used by this module now use Uint8Arrays in place of node Buffers
|
||||
|
||||
* chore: browser fixes
|
||||
|
||||
* chore: remove .only
|
||||
|
||||
* chore: stringify uint8array before parsing
|
||||
|
||||
* chore: update interop suite
|
||||
|
||||
* chore: remove ts from build command
|
||||
|
||||
* chore: update deps
|
||||
|
||||
* fix: update records to use uint8array
|
||||
|
||||
* chore: fix lint
|
||||
|
||||
* chore: update deps
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
* this drops testing support in node 10.
|
||||
|
||||
|
||||
|
||||
<a name="0.29.0-rc.1"></a>
|
||||
# [0.29.0-rc.1](https://github.com/libp2p/js-libp2p/compare/v0.29.0-rc.0...v0.29.0-rc.1) (2020-08-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* peer record interop with go ([#739](https://github.com/libp2p/js-libp2p/issues/739)) ([c4c7ef9](https://github.com/libp2p/js-libp2p/commit/c4c7ef9))
|
||||
|
||||
|
||||
|
||||
<a name="0.29.0-rc.0"></a>
|
||||
# [0.29.0-rc.0](https://github.com/libp2p/js-libp2p/compare/v0.28.10...v0.29.0-rc.0) (2020-08-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not return self on peerstore.peers ([e1b8edc](https://github.com/libp2p/js-libp2p/commit/e1b8edc))
|
||||
* replace node buffers with uint8arrays ([#730](https://github.com/libp2p/js-libp2p/issues/730)) ([507f8c4](https://github.com/libp2p/js-libp2p/commit/507f8c4))
|
||||
* revert new identify protocol versions ([a798c65](https://github.com/libp2p/js-libp2p/commit/a798c65))
|
||||
* signature compliant with spec ([97b5d2a](https://github.com/libp2p/js-libp2p/commit/97b5d2a))
|
||||
|
||||
|
||||
### Chores
|
||||
|
||||
* update travis to use node lts and stable ([c272288](https://github.com/libp2p/js-libp2p/commit/c272288))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* cerified addressbook ([e0ed258](https://github.com/libp2p/js-libp2p/commit/e0ed258))
|
||||
* create self peer record in identify ([83922a7](https://github.com/libp2p/js-libp2p/commit/83922a7))
|
||||
* exchange signed peer records in identify ([f835457](https://github.com/libp2p/js-libp2p/commit/f835457))
|
||||
* gossipsub 1.1 ([#733](https://github.com/libp2p/js-libp2p/issues/733)) ([e14ce40](https://github.com/libp2p/js-libp2p/commit/e14ce40))
|
||||
* signed peer records record manager ([f95edf1](https://github.com/libp2p/js-libp2p/commit/f95edf1))
|
||||
|
||||
|
||||
### Reverts
|
||||
|
||||
* reapply "fix: throw if no conn encryption module provided ([#665](https://github.com/libp2p/js-libp2p/issues/665))" ([ad7f02e](https://github.com/libp2p/js-libp2p/commit/ad7f02e))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* pubsub implementation is now directly exposed and its API was updated according to the new pubsub interface in js-libp2p-interfaces repo
|
||||
|
||||
* chore: use gossipsub branch with src added
|
||||
|
||||
* fix: add pubsub handlers adapter
|
||||
|
||||
* chore: fix deps
|
||||
|
||||
* chore: update pubsub docs and examples
|
||||
|
||||
* chore: apply suggestions from code review
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
|
||||
* chore: use new floodsub
|
||||
|
||||
* chore: change validator doc set
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
|
||||
* chore: add new gossipsub src
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
* - All deps used by this module now use Uint8Arrays in place of node Buffers
|
||||
|
||||
* chore: browser fixes
|
||||
|
||||
* chore: remove .only
|
||||
|
||||
* chore: stringify uint8array before parsing
|
||||
|
||||
* chore: update interop suite
|
||||
|
||||
* chore: remove ts from build command
|
||||
|
||||
* chore: update deps
|
||||
|
||||
* fix: update records to use uint8array
|
||||
|
||||
* chore: fix lint
|
||||
|
||||
* chore: update deps
|
||||
|
||||
Co-authored-by: Jacob Heun <jacobheun@gmail.com>
|
||||
* this drops testing support in node 10.
|
||||
|
||||
|
||||
|
||||
<a name="0.28.10"></a>
|
||||
## [0.28.10](https://github.com/libp2p/js-libp2p/compare/v0.28.9...v0.28.10) (2020-08-05)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* allow certain keychain operations without a password ([#726](https://github.com/libp2p/js-libp2p/issues/726)) ([8c56ec0](https://github.com/libp2p/js-libp2p/commit/8c56ec0))
|
||||
* **identify:** make agentversion dynamic and add it to the peerstore ([#724](https://github.com/libp2p/js-libp2p/issues/724)) ([726a746](https://github.com/libp2p/js-libp2p/commit/726a746))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **keychain:** add support for ed25519 and secp keys ([#725](https://github.com/libp2p/js-libp2p/issues/725)) ([51d7ca4](https://github.com/libp2p/js-libp2p/commit/51d7ca4))
|
||||
|
||||
|
||||
|
||||
<a name="0.28.9"></a>
|
||||
## [0.28.9](https://github.com/libp2p/js-libp2p/compare/v0.28.8...v0.28.9) (2020-07-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ping multiaddr from peer not previously stored in peerstore ([#719](https://github.com/libp2p/js-libp2p/issues/719)) ([2440c87](https://github.com/libp2p/js-libp2p/commit/2440c87))
|
||||
|
||||
|
||||
|
||||
<a name="0.28.8"></a>
|
||||
## [0.28.8](https://github.com/libp2p/js-libp2p/compare/v0.28.7...v0.28.8) (2020-07-20)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* create dial target for peer with no known addrs ([#715](https://github.com/libp2p/js-libp2p/issues/715)) ([7da9ad4](https://github.com/libp2p/js-libp2p/commit/7da9ad4))
|
||||
|
||||
|
||||
|
||||
<a name="0.28.7"></a>
|
||||
## [0.28.7](https://github.com/libp2p/js-libp2p/compare/v0.28.6...v0.28.7) (2020-07-14)
|
||||
|
||||
|
@ -23,8 +23,8 @@
|
||||
<a href="https://david-dm.org/libp2p/js-libp2p"><img src="https://david-dm.org/libp2p/js-libp2p.svg?style=flat-square" /></a>
|
||||
<a href="https://github.com/feross/standard"><img src="https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square"></a>
|
||||
<a href="https://github.com/RichardLitt/standard-readme"><img src="https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/Node.js-%3E%3D6.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/npm-%3E%3D6.0.0-orange.svg?style=flat-square" /></a>
|
||||
<a href=""><img src="https://img.shields.io/badge/Node.js-%3E%3D10.0.0-orange.svg?style=flat-square" /></a>
|
||||
<br>
|
||||
</p>
|
||||
|
||||
|
213
doc/API.md
213
doc/API.md
@ -46,6 +46,8 @@
|
||||
* [`pubsub.publish`](#pubsubpublish)
|
||||
* [`pubsub.subscribe`](#pubsubsubscribe)
|
||||
* [`pubsub.unsubscribe`](#pubsubunsubscribe)
|
||||
* [`pubsub.on`](#pubsubon)
|
||||
* [`pubsub.removeListener`](#pubsubremovelistener)
|
||||
* [`connectionManager.get`](#connectionmanagerget)
|
||||
* [`connectionManager.setPeerValue`](#connectionmanagersetpeervalue)
|
||||
* [`connectionManager.size`](#connectionmanagersize)
|
||||
@ -85,17 +87,17 @@ Creates an instance of Libp2p.
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| options | `object` | libp2p options |
|
||||
| options.modules | [`Array<object>`](./CONFIGURATION.md#modules) | libp2p modules to use |
|
||||
| options.modules | [`Array<object>`](./CONFIGURATION.md#modules) | libp2p [modules](./CONFIGURATION.md#modules) to use |
|
||||
| [options.addresses] | `{ listen: Array<string>, announce: Array<string>, noAnnounce: Array<string> }` | Addresses for transport listening and to advertise to the network |
|
||||
| [options.config] | `object` | libp2p modules configuration and core configuration |
|
||||
| [options.connectionManager] | [`object`](./CONFIGURATION.md#configuring-connection-manager) | libp2p Connection Manager configuration |
|
||||
| [options.transportManager] | [`object`](./CONFIGURATION.md#configuring-transport-manager) | libp2p transport manager configuration |
|
||||
| [options.connectionManager] | [`object`](./CONFIGURATION.md#configuring-connection-manager) | libp2p Connection Manager [configuration](./CONFIGURATION.md#configuring-connection-manager) |
|
||||
| [options.transportManager] | [`object`](./CONFIGURATION.md#configuring-transport-manager) | libp2p transport manager [configuration](./CONFIGURATION.md#configuring-transport-manager) |
|
||||
| [options.datastore] | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) |
|
||||
| [options.dialer] | [`object`](./CONFIGURATION.md#configuring-dialing) | libp2p Dialer configuration
|
||||
| [options.keychain] | [`object`](./CONFIGURATION.md#setup-with-keychain) | keychain configuration |
|
||||
| [options.metrics] | [`object`](./CONFIGURATION.md#configuring-metrics) | libp2p Metrics configuration
|
||||
| [options.dialer] | [`object`](./CONFIGURATION.md#configuring-dialing) | libp2p Dialer [configuration](./CONFIGURATION.md#configuring-dialing)
|
||||
| [options.keychain] | [`object`](./CONFIGURATION.md#setup-with-keychain) | keychain [configuration](./CONFIGURATION.md#setup-with-keychain) |
|
||||
| [options.metrics] | [`object`](./CONFIGURATION.md#configuring-metrics) | libp2p Metrics [configuration](./CONFIGURATION.md#configuring-metrics) |
|
||||
| [options.peerId] | [`PeerId`][peer-id] | peerId instance (it will be created if not provided) |
|
||||
| [options.peerStore] | [`object`](./CONFIGURATION.md#configuring-peerstore) | libp2p PeerStore configuration |
|
||||
| [options.peerStore] | [`object`](./CONFIGURATION.md#configuring-peerstore) | libp2p PeerStore [configuration](./CONFIGURATION.md#configuring-peerstore) |
|
||||
|
||||
For Libp2p configurations and modules details read the [Configuration Document](./CONFIGURATION.md).
|
||||
|
||||
@ -306,7 +308,7 @@ Dials to another peer in the network and selects a protocol to communicate with
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<{ stream:*, protocol:string }>` | Promise resolves with a [duplex stream](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#duplex-it) and the protocol used |
|
||||
| `Promise<{ stream:*, protocol:string }>` | Promise resolves with a [duplex stream](https://github.com/libp2p/js-libp2p/blob/master/doc/STREAMING_ITERABLES.md#duplex) and the protocol used |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -418,7 +420,7 @@ const latency = await libp2p.ping(otherPeerId)
|
||||
|
||||
## multiaddrs
|
||||
|
||||
Gets the multiaddrs the libp2p node announces to the network. This computes the advertising multiaddrs
|
||||
Gets the multiaddrs the libp2p node announces to the network. This computes the advertising multiaddrs
|
||||
of the peer by joining the multiaddrs that libp2p transports are listening on with the announce multiaddrs
|
||||
provided in the libp2p config. Configured no announce multiaddrs will be filtered out of the advertised addresses.
|
||||
|
||||
@ -585,7 +587,7 @@ Writes a value to a key in the DHT.
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| key | `string` | key to add to the dht |
|
||||
| value | `Buffer` | value to add to the dht |
|
||||
| value | `Uint8Array` | value to add to the dht |
|
||||
| [options] | `object` | put options |
|
||||
| [options.minPeers] | `number` | minimum number of peers required to successfully put (default: closestPeers.length) |
|
||||
|
||||
@ -600,7 +602,7 @@ Writes a value to a key in the DHT.
|
||||
```js
|
||||
// ...
|
||||
const key = '/key'
|
||||
const value = Buffer.from('oh hello there')
|
||||
const value = uint8ArrayFromString('oh hello there')
|
||||
|
||||
await libp2p.contentRouting.put(key, value)
|
||||
```
|
||||
@ -623,7 +625,7 @@ Queries the DHT for a value stored for a given key.
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<Buffer>` | Value obtained from the DHT |
|
||||
| `Promise<Uint8Array>` | Value obtained from the DHT |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -653,7 +655,7 @@ Queries the DHT for the n values stored for the given key (without sorting).
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<Array<{from: PeerId, val: Buffer}>>` | Array of records obtained from the DHT |
|
||||
| `Promise<Array<{from: PeerId, val: Uint8Array}>>` | Array of records obtained from the DHT |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -760,7 +762,7 @@ Get the known [`Addresses`][address] of a provided peer.
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Array<Address>` | Array of peer's [`Addresses`][address] containing the multiaddr and its metadata |
|
||||
| `Array<Address>|undefined` | Array of peer's [`Addresses`][address] containing the multiaddr and its metadata if available, otherwise undefined |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -797,7 +799,7 @@ Get the known `Multiaddr` of a provided peer. All returned multiaddrs will inclu
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Array<Multiaddr>` | Array of peer's multiaddr |
|
||||
| `Array<Multiaddr>|undefined` | Array of peer's multiaddr if available, otherwise undefined |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -814,7 +816,9 @@ peerStore.addressBook.getMultiaddrsForPeer(peerId)
|
||||
|
||||
### peerStore.addressBook.set
|
||||
|
||||
Set known `multiaddrs` of a given peer.
|
||||
Set known `multiaddrs` of a given peer. This will replace previously stored multiaddrs, if available.
|
||||
Replacing stored multiaddrs might result in losing obtained certified addresses, which is not desirable.
|
||||
Consider using `addressBook.add()` if you're not sure this is what you want to do.
|
||||
|
||||
`peerStore.addressBook.set(peerId, multiaddrs)`
|
||||
|
||||
@ -968,7 +972,7 @@ Delete the provided peer from the book.
|
||||
```js
|
||||
peerStore.metadataBook.delete(peerId)
|
||||
// false
|
||||
peerStore.metadataBook.set(peerId, 'nickname', Buffer.from('homePeer'))
|
||||
peerStore.metadataBook.set(peerId, 'nickname', uint8ArrayFromString('homePeer'))
|
||||
peerStore.metadataBook.delete(peerId)
|
||||
// true
|
||||
```
|
||||
@ -997,7 +1001,7 @@ Deletes the provided peer metadata key-value pair from the book.
|
||||
```js
|
||||
peerStore.metadataBook.deleteValue(peerId, 'location')
|
||||
// false
|
||||
peerStore.metadataBook.set(peerId, 'location', Buffer.from('Berlin'))
|
||||
peerStore.metadataBook.set(peerId, 'location', uint8ArrayFromString('Berlin'))
|
||||
peerStore.metadataBook.deleteValue(peerId, 'location')
|
||||
// true
|
||||
```
|
||||
@ -1018,14 +1022,14 @@ Get the known metadata of a provided peer.
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Map<string, Buffer>` | Peer Metadata |
|
||||
| `Map<string, Uint8Array>` | Peer Metadata |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
peerStore.metadataBook.get(peerId)
|
||||
// undefined
|
||||
peerStore.metadataBook.set(peerId, 'location', Buffer.from('Berlin'))
|
||||
peerStore.metadataBook.set(peerId, 'location', uint8ArrayFromString('Berlin'))
|
||||
peerStore.metadataBook.get(peerId)
|
||||
// Metadata Map
|
||||
```
|
||||
@ -1047,14 +1051,14 @@ Get specific metadata of a provided peer.
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Map<string, Buffer>` | Peer Metadata |
|
||||
| `Map<string, Uint8Array>` | Peer Metadata |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
peerStore.metadataBook.getValue(peerId, 'location')
|
||||
// undefined
|
||||
peerStore.metadataBook.set(peerId, 'location', Buffer.from('Berlin'))
|
||||
peerStore.metadataBook.set(peerId, 'location', uint8ArrayFromString('Berlin'))
|
||||
peerStore.metadataBook.getValue(peerId, 'location')
|
||||
// Metadata Map
|
||||
```
|
||||
@ -1071,7 +1075,7 @@ Set known metadata of a given `peerId`.
|
||||
|------|------|-------------|
|
||||
| peerId | [`PeerId`][peer-id] | peerId to set |
|
||||
| key | `string` | key of the metadata value to store |
|
||||
| value | `Buffer` | metadata value to store |
|
||||
| value | `Uint8Array` | metadata value to store |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -1082,7 +1086,7 @@ Set known metadata of a given `peerId`.
|
||||
#### Example
|
||||
|
||||
```js
|
||||
peerStore.metadataBook.set(peerId, 'location', Buffer.from('Berlin'))
|
||||
peerStore.metadataBook.set(peerId, 'location', uint8ArrayFromString('Berlin'))
|
||||
```
|
||||
|
||||
### peerStore.protoBook.delete
|
||||
@ -1216,7 +1220,7 @@ Get the stored information of a given peer, namely its [`PeerId`][peer-id], know
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `{ id: PeerId, addresses: Array<Address>, protocols: Array<string> }` | Peer information of the provided peer |
|
||||
| `{ id: PeerId, addresses: Array<Address>, metadata: Map<string, Buffer>}, protocols: Array<string> }` | Peer information of the provided peer |
|
||||
|
||||
#### Example
|
||||
|
||||
@ -1243,13 +1247,13 @@ Get all the stored information of every peer.
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Map<string, { id: PeerId, addresses: Array<Address>, protocols: Array<string> }>` | Peer data of every peer known |
|
||||
| `Map<string, { id: PeerId, addresses: Array<Address>, metadata: Map<string, Buffer>}, protocols: Array<string> }>` | Peer data of every peer known |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
for (let [peerIdString, peer] of peerStore.peers.entries()) {
|
||||
// peer { id, addresses, protocols }
|
||||
// peer { id, addresses, metadata, protocols }
|
||||
}
|
||||
```
|
||||
|
||||
@ -1306,7 +1310,7 @@ Publishes messages to the given topics.
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to publish |
|
||||
| data | `Buffer` | data to publish |
|
||||
| data | `Uint8Array` | data to publish |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -1318,23 +1322,22 @@ Publishes messages to the given topics.
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const data = Buffer.from('data')
|
||||
const data = uint8ArrayFromString('data')
|
||||
|
||||
await libp2p.pubsub.publish(topic, data)
|
||||
```
|
||||
|
||||
### pubsub.subscribe
|
||||
|
||||
Subscribes the given handler to a pubsub topic.
|
||||
Subscribes to a pubsub topic.
|
||||
|
||||
`libp2p.pubsub.subscribe(topic, handler)`
|
||||
`libp2p.pubsub.subscribe(topic)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to subscribe |
|
||||
| handler | `function({ from: string, data: Buffer, seqno: Buffer, topicIDs: Array<string>, signature: Buffer, key: Buffer })` | handler for new data on topic |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -1350,21 +1353,21 @@ const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
|
||||
libp2p.pubsub.subscribe(topic, handler)
|
||||
libp2p.pubsub.on(topic, handler)
|
||||
libp2p.pubsub.subscribe(topic)
|
||||
```
|
||||
|
||||
### pubsub.unsubscribe
|
||||
|
||||
Unsubscribes the given handler from a pubsub topic. If no handler is provided, all handlers for the topic are removed.
|
||||
Unsubscribes from a pubsub topic.
|
||||
|
||||
`libp2p.pubsub.unsubscribe(topic, handler)`
|
||||
`libp2p.pubsub.unsubscribe(topic)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to unsubscribe |
|
||||
| handler | `function(<object>)` | handler subscribed |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -1380,7 +1383,129 @@ const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
|
||||
libp2p.pubsub.unsubscribe(topic, handler)
|
||||
libp2p.pubsub.removeListener(topic handler)
|
||||
libp2p.pubsub.unsubscribe(topic)
|
||||
```
|
||||
|
||||
## pubsub.on
|
||||
|
||||
A Pubsub router is an [EventEmitter](https://nodejs.org/api/events.html#events_class_eventemitter) and uses its events for pubsub message handlers.
|
||||
|
||||
`libp2p.pubsub.on(topic, handler)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to listen |
|
||||
| handler | `function({ from: string, data: Uint8Array, seqno: Uint8Array, topicIDs: Array<string>, signature: Uint8Array, key: Uint8Array })` | handler for new data on topic |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `void` | |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
|
||||
libp2p.pubsub.on(topic, handler)
|
||||
libp2p.pubsub.subscribe(topic)
|
||||
```
|
||||
|
||||
## pubsub.removeListener
|
||||
|
||||
A Pubsub router is an [EventEmitter](https://nodejs.org/api/events.html#events_class_eventemitter) and uses its events for pubsub message handlers.
|
||||
|
||||
`libp2p.pubsub.removeListener(topic, handler)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to remove listener |
|
||||
| handler | `function({ from: string, data: Uint8Array, seqno: Uint8Array, topicIDs: Array<string>, signature: Uint8Array, key: Uint8Array })` | handler for new data on topic |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `void` | |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
|
||||
libp2p.pubsub.removeListener(topic handler)
|
||||
libp2p.pubsub.unsubscribe(topic)
|
||||
```
|
||||
|
||||
## pubsub.topicValidators.set
|
||||
|
||||
Pubsub routers support message validators per topic, which will validate the message before its propagations. Set is used to specify a validator for a topic.
|
||||
|
||||
`libp2p.pubsub.topicValidators.set(topic, validator)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to bind a validator |
|
||||
| handler | `function({ topic: string, msg: RPC })` | validator for new data on topic |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Map<string, function(string, RPC)>` | The `Map` object |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const validateMessage = (msgTopic, msg) => {
|
||||
const input = uint8ArrayToString(msg.data)
|
||||
const validInputs = ['a', 'b', 'c']
|
||||
|
||||
if (!validInputs.includes(input)) {
|
||||
throw new Error('no valid input received')
|
||||
}
|
||||
}
|
||||
libp2p.pubsub.topicValidators.set(topic, validateMessage)
|
||||
```
|
||||
|
||||
## pubsub.topicValidators.delete
|
||||
|
||||
Pubsub routers support message validators per topic, which will validate the message before its propagations. Delete is used to remove a validator for a topic.
|
||||
|
||||
`libp2p.pubsub.topicValidators.delete(topic)`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| topic | `string` | topic to remove a validator |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `boolean` | `true` if an element in the Map object existed and has been removed, or `false` if the element does not exist. |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
libp2p.pubsub.topicValidators.delete(topic)
|
||||
```
|
||||
|
||||
### connectionManager.get
|
||||
@ -1454,7 +1579,7 @@ Create a key in the keychain.
|
||||
|------|------|-------------|
|
||||
| name | `string` | The local key name. It cannot already exist. |
|
||||
| type | `string` | One of the key types; 'rsa' |
|
||||
| size | `number` | The key size in bits. |
|
||||
| [size] | `number` | The key size in bits. Must be provided for rsa keys. |
|
||||
|
||||
#### Returns
|
||||
|
||||
@ -1677,19 +1802,19 @@ Encrypt protected data using the Cryptographic Message Syntax (CMS).
|
||||
| Name | Type | Description |
|
||||
|------|------|-------------|
|
||||
| name | `string` | The local key name. |
|
||||
| data | `Buffer` | The data to encrypt. |
|
||||
| data | `Uint8Array` | The data to encrypt. |
|
||||
|
||||
#### Returns
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<Buffer>` | Encrypted data as a PKCS #7 message in DER. |
|
||||
| `Promise<Uint8Array>` | Encrypted data as a PKCS #7 message in DER. |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096)
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', Buffer.from('data'))
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', uint8ArrayFromString('data'))
|
||||
```
|
||||
|
||||
### keychain.cms.decrypt
|
||||
@ -1709,13 +1834,13 @@ The keychain must contain one of the keys used to encrypt the data. If none of
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `Promise<Buffer>` | Decrypted data. |
|
||||
| `Promise<Uint8Array>` | Decrypted data. |
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096)
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', Buffer.from('data'))
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', uint8ArrayFromString('data'))
|
||||
const decData = await libp2p.keychain.cms.decrypt(enc)
|
||||
```
|
||||
|
||||
@ -1801,7 +1926,7 @@ console.log(peerStats.toJSON())
|
||||
|
||||
## Events
|
||||
|
||||
Once you have a libp2p instance, you can listen to several events it emits, so that you can be notified of relevant network events.
|
||||
Once you have a libp2p instance, you can listen to several events it emits, so that you can be notified of relevant network events.
|
||||
|
||||
### libp2p
|
||||
|
||||
|
@ -52,7 +52,7 @@ The libp2p ecosystem contains at least one module for each of these subsystems.
|
||||
|
||||
After selecting the modules to use, it is also possible to configure each one according to your needs.
|
||||
|
||||
Bear in mind that only a **transport** and **connection encryption** are required, while all the other subsystems are optional.
|
||||
Bear in mind that a **transport** and **connection encryption** module are **required**, while all the other subsystems are optional.
|
||||
|
||||
### Transport
|
||||
|
||||
@ -304,7 +304,7 @@ const node = await Libp2p.create({
|
||||
},
|
||||
config: {
|
||||
peerDiscovery: {
|
||||
webRTCStar: {
|
||||
[WebRTCStar.tag]: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
|
282
doc/migrations/v0.28-v0.29.md
Normal file
282
doc/migrations/v0.28-v0.29.md
Normal file
@ -0,0 +1,282 @@
|
||||
<!--Specify versions for migration below-->
|
||||
# Migrating to libp2p@0.29
|
||||
|
||||
A migration guide for refactoring your application code from libp2p v0.28.x to v0.29.0.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [API](#api)
|
||||
- [Pubsub](#pubsub)
|
||||
- [Uint8Arrays replace node Buffers](#uint8arrays-replace-node-buffers)
|
||||
- [Module Updates](#module-updates)
|
||||
|
||||
## API
|
||||
|
||||
### Pubsub
|
||||
|
||||
The [`libp2p-gossipsub`](https://github.com/ChainSafe/js-libp2p-gossipsub) javascript implementation is now upgraded according to the Gossipsub v1.1 [spec](https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md) and it packs several security hardening extensions. You can read more about it in its [blogpost](https://blog.ipfs.io/2020-05-20-gossipsub-v1.1/).
|
||||
|
||||
We leveraged this update to rethink the pubsub interface, in order to make it easier, as well as to be consistent with the API of the routers. Moreover, the interface was also reconstructed to ease new pubsub router implementations.
|
||||
|
||||
#### Access router instance
|
||||
|
||||
Libp2p prior to 0.29 unnecessarily added a layer of abstraction over the pubsub routers. We now expose the pubsub router API directly and have a test suite in the [interface-pubsub](https://github.com/libp2p/js-libp2p-interfaces/tree/master/src/pubsub) to guarantee routers compliance. This enables more advanced usage of the underlying router.
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
libp2p.pubsub._pubsub.*
|
||||
libp2p.pubsub._pubsub.topicValidators.set(topic, validator)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
libp2p.pubsub.*
|
||||
libp2p.pubsub.topicValidators.set(topic, validator)
|
||||
```
|
||||
|
||||
#### Publish
|
||||
|
||||
Publish uses `Uint8Array` data instead of `Buffer`.
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const data = Buffer.from('data')
|
||||
|
||||
await libp2p.pubsub.publish(topic, data)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const topic = 'topic'
|
||||
const data = uint8ArrayFromString('data')
|
||||
|
||||
await libp2p.pubsub.publish(topic, data)
|
||||
```
|
||||
|
||||
#### Subscribe
|
||||
|
||||
Handlers should no longer be passed when subscribing, instead, applications should bind event handlers for each topic they wish to subscribe too. This enables more flexibility at the application level without changing the underlying subscriptions.
|
||||
Message data is now a `Uint8Array` instead of `Buffer`.
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
const data = msg.data.toString()
|
||||
}
|
||||
libp2p.pubsub.subscribe(topic, handler)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
const data = uint8ArrayToString(msg.data)
|
||||
}
|
||||
libp2p.pubsub.on(topic, handler)
|
||||
libp2p.pubsub.subscribe(topic)
|
||||
```
|
||||
|
||||
#### Unsubscribe
|
||||
|
||||
Handlers should not be directly bound to the subscription anymore.
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
libp2p.pubsub.unsubscribe(topic, handler)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const topic = 'topic'
|
||||
const handler = (msg) => {
|
||||
// msg.data - pubsub data received
|
||||
}
|
||||
libp2p.pubsub.removeListener(topic, handler)
|
||||
libp2p.pubsub.unsubscribe(topic)
|
||||
```
|
||||
|
||||
#### Topic Validators
|
||||
|
||||
The validator function does not include the peer parameter anymore. It was redundant since it is included in the message and it could lead to issues as the peer that sent the message might not be the one who created the message in first place. The validator function should also throw an error instead of returning `false` when the message is not valid.
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const validator = (msgTopic, peer, msg) => {
|
||||
// process message
|
||||
return false
|
||||
}
|
||||
libp2p.pubsub._pubsub.topicValidators.set(topic, validator)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const validator = (msgTopic, msg) => {
|
||||
const from = msg.from
|
||||
// process message
|
||||
throw new Error('not a valid message')
|
||||
}
|
||||
libp2p.pubsub.topicValidators.set(topic, validator)
|
||||
```
|
||||
|
||||
### Uint8Arrays replace node Buffers
|
||||
|
||||
Aiming to improve libp2p browser support, we are moving away from node core modules unless we can guarantee that the code we are writing will not run in a browser. It is worth mentioning that modern JavaScript runtimes have TypedArrays such as Uint8Array backed by ArrayBuffers. All libp2p dependencies were also updated to use Uint8Array.
|
||||
|
||||
We use the [uint8arrays](https://www.npmjs.com/package/uint8arrays) utilities module to deal with `Uint8Arrays` easily and we recommend its usage in the application layer. Thanks for the module [@achingbrain](https://github.com/achingbrain)! It includes utilities like `compare`, `concat`, `equals`, `fromString` and `toString`. In this migration examples, we will be using the following:
|
||||
|
||||
```js
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
```
|
||||
|
||||
#### contentRouting.put
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const key = '/key'
|
||||
const value = Buffer.from('oh hello there')
|
||||
|
||||
await libp2p.contentRouting.put(key, value)
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const key = '/key'
|
||||
const value = uint8ArrayFromString('oh hello there')
|
||||
|
||||
await libp2p.contentRouting.put(key, value)
|
||||
```
|
||||
|
||||
#### contentRouting.get
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const key = '/key'
|
||||
const value = await libp2p.contentRouting.put(key)
|
||||
|
||||
console.log('store value is: ', value.toString())
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const key = '/key'
|
||||
const value = await libp2p.contentRouting.put(key)
|
||||
|
||||
console.log('store value is: ', uint8ArrayToString(value))
|
||||
```
|
||||
|
||||
#### metadataBook.set
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
peerStore.metadataBook.set(peerId, 'location', Buffer.from('Saturn'))
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
peerStore.metadataBook.set(peerId, 'location', uint8ArrayFromString('Saturn'))
|
||||
```
|
||||
|
||||
#### metadataBook.get
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const data = peerStore.metadataBook.get(peerId)
|
||||
|
||||
console.log('stored location: ', data.get('location').toString())
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const data = peerStore.metadataBook.get(peerId)
|
||||
|
||||
console.log('stored location: ', uint8ArrayToString(data.get('location')))
|
||||
```
|
||||
|
||||
#### metadataBook.getValue
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const location = peerStore.metadataBook.getValue(peerId, 'location')
|
||||
|
||||
console.log('stored location: ', location.toString())
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const location = peerStore.metadataBook.getValue(peerId, 'location')
|
||||
|
||||
console.log('stored location: ', uint8ArrayToString(location))
|
||||
```
|
||||
|
||||
#### keychain.cms.encrypt
|
||||
|
||||
**Before**
|
||||
|
||||
```js
|
||||
const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096)
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', Buffer.from('data'))
|
||||
```
|
||||
|
||||
**After**
|
||||
|
||||
```js
|
||||
const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096)
|
||||
const enc = await libp2p.keychain.cms.encrypt('keyTest', uint8ArrayFromString('data'))
|
||||
```
|
||||
|
||||
#### pubsub
|
||||
|
||||
Already specified in its own chapter above.
|
||||
|
||||
## Module Updates
|
||||
|
||||
With this release you should update the following libp2p modules if you are relying on them:
|
||||
|
||||
```json
|
||||
"libp2p-bootstrap": "^0.12.0",
|
||||
"libp2p-delegated-content-routing": "^0.6.0",
|
||||
"libp2p-delegated-peer-routing": "^0.6.0",
|
||||
"libp2p-floodsub": "^0.23.0",
|
||||
"libp2p-gossipsub": "^0.6.0",
|
||||
"libp2p-kad-dht": "^0.20.0",
|
||||
"libp2p-mdns": "^0.15.0",
|
||||
"libp2p-mplex": "^0.10.0",
|
||||
"libp2p-noise": "^2.0.0",
|
||||
"libp2p-secio": "^0.13.1",
|
||||
"libp2p-tcp": "^0.15.1",
|
||||
"libp2p-webrtc-star": "^0.20.0",
|
||||
"libp2p-websockets": "^0.14.0",
|
||||
```
|
@ -3,6 +3,7 @@
|
||||
|
||||
const pipe = require('it-pipe')
|
||||
const lp = require('it-length-prefixed')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
function stdinToStream(stream) {
|
||||
// Read utf-8 from stdin
|
||||
@ -28,7 +29,7 @@ function streamToConsole(stream) {
|
||||
// For each chunk of data
|
||||
for await (const msg of source) {
|
||||
// Output the data as a utf8 string
|
||||
console.log('> ' + msg.toString('utf8').replace('\n', ''))
|
||||
console.log('> ' + uint8ArrayToString(msg).replace('\n', ''))
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -21,7 +21,7 @@ const createNode = async () => {
|
||||
},
|
||||
config: {
|
||||
peerDiscovery: {
|
||||
mdns: {
|
||||
[MulticastDNS.tag]: {
|
||||
interval: 20e3,
|
||||
enabled: true
|
||||
}
|
||||
|
@ -1,18 +1,17 @@
|
||||
/* eslint no-console: ["off"] */
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const { generate } = require('libp2p/src/pnet')
|
||||
const privateLibp2pNode = require('./libp2p-node')
|
||||
|
||||
const pipe = require('it-pipe')
|
||||
|
||||
// Create a buffer and write the swarm key to it
|
||||
const swarmKey = Buffer.alloc(95)
|
||||
// Create a Uint8Array and write the swarm key to it
|
||||
const swarmKey = new Uint8Array(95)
|
||||
generate(swarmKey)
|
||||
|
||||
// This key is for testing a different key not working
|
||||
const otherSwarmKey = Buffer.alloc(95)
|
||||
const otherSwarmKey = new Uint8Array(95)
|
||||
generate(otherSwarmKey)
|
||||
|
||||
;(async () => {
|
||||
|
@ -11,7 +11,7 @@ const Protector = require('libp2p/src/pnet')
|
||||
* privateLibp2pNode returns a libp2p node function that will use the swarm
|
||||
* key with the given `swarmKey` to create the Protector
|
||||
*
|
||||
* @param {Buffer} swarmKey
|
||||
* @param {Uint8Array} swarmKey
|
||||
* @returns {Promise<libp2p>} Returns a libp2pNode function for use in IPFS creation
|
||||
*/
|
||||
const privateLibp2pNode = async (swarmKey) => {
|
||||
|
@ -1,13 +1,14 @@
|
||||
/* eslint-disable no-console */
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const Libp2p = require('../../')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
||||
const SECIO = require('libp2p-secio')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const createNode = async () => {
|
||||
const node = await Libp2p.create({
|
||||
@ -38,16 +39,18 @@ const createNode = async () => {
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
await node1.pubsub.subscribe(topic, (msg) => {
|
||||
console.log(`node1 received: ${msg.data.toString()}`)
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
|
||||
await node2.pubsub.subscribe(topic, (msg) => {
|
||||
console.log(`node2 received: ${msg.data.toString()}`)
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
|
||||
// node2 publishes "news" every second
|
||||
setInterval(() => {
|
||||
node2.pubsub.publish(topic, Buffer.from('Bird bird bird, bird is the word!'))
|
||||
node2.pubsub.publish(topic, uint8ArrayFromString('Bird bird bird, bird is the word!'))
|
||||
}, 1000)
|
||||
})()
|
||||
|
@ -47,17 +47,19 @@ node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
await node1.pubsub.subscribe(topic, (msg) => {
|
||||
console.log(`node1 received: ${msg.data.toString()}`)
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
|
||||
await node2.pubsub.subscribe(topic, (msg) => {
|
||||
console.log(`node2 received: ${msg.data.toString()}`)
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
|
||||
// node2 publishes "news" every second
|
||||
setInterval(() => {
|
||||
node2.pubsub.publish(topic, Buffer.from('Bird bird bird, bird is the word!'))
|
||||
node2.pubsub.publish(topic, uint8ArrayFromString('Bird bird bird, bird is the word!'))
|
||||
}, 1000)
|
||||
```
|
||||
|
||||
|
88
examples/pubsub/message-filtering/1.js
Normal file
88
examples/pubsub/message-filtering/1.js
Normal file
@ -0,0 +1,88 @@
|
||||
/* eslint-disable no-console */
|
||||
'use strict'
|
||||
|
||||
const Libp2p = require('../../../')
|
||||
const TCP = require('libp2p-tcp')
|
||||
const Mplex = require('libp2p-mplex')
|
||||
const { NOISE } = require('libp2p-noise')
|
||||
const SECIO = require('libp2p-secio')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const createNode = async () => {
|
||||
const node = await Libp2p.create({
|
||||
addresses: {
|
||||
listen: ['/ip4/0.0.0.0/tcp/0']
|
||||
},
|
||||
modules: {
|
||||
transport: [TCP],
|
||||
streamMuxer: [Mplex],
|
||||
connEncryption: [NOISE, SECIO],
|
||||
pubsub: Gossipsub
|
||||
}
|
||||
})
|
||||
|
||||
await node.start()
|
||||
return node
|
||||
}
|
||||
|
||||
(async () => {
|
||||
const topic = 'fruit'
|
||||
|
||||
const [node1, node2, node3] = await Promise.all([
|
||||
createNode(),
|
||||
createNode(),
|
||||
createNode(),
|
||||
])
|
||||
|
||||
// node1 conect to node2 and node2 conect to node3
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.dial(node3.peerId)
|
||||
|
||||
//subscribe
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
|
||||
node3.pubsub.on(topic, (msg) => {
|
||||
console.log(`node3 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node3.pubsub.subscribe(topic)
|
||||
|
||||
const validateFruit = (msgTopic, msg) => {
|
||||
const fruit = uint8ArrayToString(msg.data)
|
||||
const validFruit = ['banana', 'apple', 'orange']
|
||||
|
||||
if (!validFruit.includes(fruit)) {
|
||||
throw new Error('no valid fruit received')
|
||||
}
|
||||
}
|
||||
|
||||
//validate fruit
|
||||
node1.pubsub.topicValidators.set(topic, validateFruit)
|
||||
node2.pubsub.topicValidators.set(topic, validateFruit)
|
||||
node3.pubsub.topicValidators.set(topic, validateFruit)
|
||||
|
||||
// node1 publishes "fruits" every five seconds
|
||||
var count = 0;
|
||||
const myFruits = ['banana', 'apple', 'car', 'orange'];
|
||||
// car is not a fruit !
|
||||
setInterval(() => {
|
||||
console.log('############## fruit ' + myFruits[count] + ' ##############')
|
||||
node1.pubsub.publish(topic, uint8ArrayFromString(myFruits[count]))
|
||||
count++
|
||||
if (count == myFruits.length) {
|
||||
count = 0
|
||||
}
|
||||
}, 5000)
|
||||
})()
|
113
examples/pubsub/message-filtering/README.md
Normal file
113
examples/pubsub/message-filtering/README.md
Normal file
@ -0,0 +1,113 @@
|
||||
# Filter Messages
|
||||
|
||||
To prevent undesired data from being propagated on the network, we can apply a filter to Gossipsub. Messages that fail validation in the filter will not be re-shared.
|
||||
|
||||
## 1. Setting up a PubSub network with three nodes
|
||||
|
||||
First, let's update our libp2p configuration with a pubsub implementation.
|
||||
|
||||
```JavaScript
|
||||
const Libp2p = require('libp2p')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
|
||||
const node = await Libp2p.create({
|
||||
addresses: {
|
||||
listen: ['/ip4/0.0.0.0/tcp/0']
|
||||
},
|
||||
modules: {
|
||||
transport: [ TCP ],
|
||||
streamMuxer: [ Mplex ],
|
||||
connEncryption: [ NOISE, SECIO ],
|
||||
pubsub: Gossipsub
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Then, create three nodes and connect them together. In this example, we will connect the nodes in series. Node 1 connected with node 2 and node 2 connected with node 3.
|
||||
|
||||
```JavaScript
|
||||
const [node1, node2, node3] = await Promise.all([
|
||||
createNode(),
|
||||
createNode(),
|
||||
createNode(),
|
||||
])
|
||||
|
||||
node1.peerStore.addressBook.set(node2.peerId, node2.multiaddrs)
|
||||
await node1.dial(node2.peerId)
|
||||
|
||||
node2.peerStore.addressBook.set(node3.peerId, node3.multiaddrs)
|
||||
await node2.dial(node3.peerId)
|
||||
```
|
||||
|
||||
Now we' can subscribe to the fruit topic and log incoming messages.
|
||||
|
||||
```JavaScript
|
||||
const topic = 'fruit'
|
||||
|
||||
node1.pubsub.on(topic, (msg) => {
|
||||
console.log(`node1 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node1.pubsub.subscribe(topic)
|
||||
|
||||
node2.pubsub.on(topic, (msg) => {
|
||||
console.log(`node2 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node2.pubsub.subscribe(topic)
|
||||
|
||||
node3.pubsub.on(topic, (msg) => {
|
||||
console.log(`node3 received: ${uint8ArrayToString(msg.data)}`)
|
||||
})
|
||||
await node3.pubsub.subscribe(topic)
|
||||
```
|
||||
Finally, let's define the additional filter in the fruit topic.
|
||||
|
||||
```JavaScript
|
||||
const validateFruit = (msgTopic, msg) => {
|
||||
const fruit = uint8ArrayToString(msg.data)
|
||||
const validFruit = ['banana', 'apple', 'orange']
|
||||
|
||||
if (!validFruit.includes(fruit)) {
|
||||
throw new Error('no valid fruit received')
|
||||
}
|
||||
}
|
||||
|
||||
node1.pubsub.topicValidators.set(topic, validateFruit)
|
||||
node2.pubsub.topicValidators.set(topic, validateFruit)
|
||||
node3.pubsub.topicValidators.set(topic, validateFruit)
|
||||
```
|
||||
|
||||
In this example, node one has an outdated version of the system, or is a malicious node. When it tries to publish fruit, the messages are re-shared and all the nodes share the message. However, when it tries to publish a vehicle the message is not re-shared.
|
||||
|
||||
```JavaScript
|
||||
var count = 0;
|
||||
const myFruits = ['banana', 'apple', 'car', 'orange'];
|
||||
|
||||
setInterval(() => {
|
||||
console.log('############## fruit ' + myFruits[count] + ' ##############')
|
||||
node1.pubsub.publish(topic, new TextEncoder().encode(myFruits[count]))
|
||||
count++
|
||||
if (count == myFruits.length) {
|
||||
count = 0
|
||||
}
|
||||
}, 5000)
|
||||
```
|
||||
|
||||
Result
|
||||
|
||||
```
|
||||
> node 1.js
|
||||
############## fruit banana ##############
|
||||
node1 received: banana
|
||||
node2 received: banana
|
||||
node3 received: banana
|
||||
############## fruit apple ##############
|
||||
node1 received: apple
|
||||
node2 received: apple
|
||||
node3 received: apple
|
||||
############## fruit car ##############
|
||||
node1 received: car
|
||||
############## fruit orange ##############
|
||||
node1 received: orange
|
||||
node2 received: orange
|
||||
node3 received: orange
|
||||
```
|
120
package.json
120
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "libp2p",
|
||||
"version": "0.28.7",
|
||||
"version": "0.29.0",
|
||||
"description": "JavaScript implementation of libp2p, a modular peer to peer network stack",
|
||||
"leadMaintainer": "Jacob Heun <jacobheun@gmail.com>",
|
||||
"main": "src/index.js",
|
||||
@ -37,7 +37,7 @@
|
||||
"homepage": "https://libp2p.io",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0",
|
||||
"node": ">=12.0.0",
|
||||
"npm": ">=6.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
@ -50,7 +50,7 @@
|
||||
"err-code": "^2.0.0",
|
||||
"events": "^3.1.0",
|
||||
"hashlru": "^2.3.0",
|
||||
"interface-datastore": "^1.0.4",
|
||||
"interface-datastore": "^2.0.0",
|
||||
"ipfs-utils": "^2.2.0",
|
||||
"it-all": "^1.0.1",
|
||||
"it-buffer": "^0.1.2",
|
||||
@ -58,111 +58,113 @@
|
||||
"it-length-prefixed": "^3.0.1",
|
||||
"it-pipe": "^1.1.0",
|
||||
"it-protocol-buffers": "^0.2.0",
|
||||
"libp2p-crypto": "^0.17.6",
|
||||
"libp2p-interfaces": "^0.3.1",
|
||||
"libp2p-utils": "^0.1.2",
|
||||
"mafmt": "^7.0.0",
|
||||
"libp2p-crypto": "^0.18.0",
|
||||
"libp2p-interfaces": "^0.5.1",
|
||||
"libp2p-utils": "^0.2.0",
|
||||
"mafmt": "^8.0.0",
|
||||
"merge-options": "^2.0.0",
|
||||
"moving-average": "^1.0.0",
|
||||
"multiaddr": "^7.4.3",
|
||||
"multistream-select": "^0.15.0",
|
||||
"multiaddr": "^8.0.0",
|
||||
"multicodec": "^2.0.0",
|
||||
"multistream-select": "^1.0.0",
|
||||
"mutable-proxy": "^1.0.0",
|
||||
"node-forge": "^0.9.1",
|
||||
"p-any": "^3.0.0",
|
||||
"p-fifo": "^1.0.0",
|
||||
"p-settle": "^4.0.1",
|
||||
"peer-id": "^0.13.11",
|
||||
"protons": "^1.0.1",
|
||||
"peer-id": "^0.14.0",
|
||||
"protons": "^2.0.0",
|
||||
"retimer": "^2.0.0",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"streaming-iterables": "^4.1.0",
|
||||
"timeout-abort-controller": "^1.0.0",
|
||||
"streaming-iterables": "^5.0.2",
|
||||
"timeout-abort-controller": "^1.1.1",
|
||||
"varint": "^5.0.0",
|
||||
"xsalsa20": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nodeutils/defaults-deep": "^1.1.0",
|
||||
"abortable-iterator": "^3.0.0",
|
||||
"aegir": "^22.0.0",
|
||||
"aegir": "^26.0.0",
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"chai-bytes": "^0.1.2",
|
||||
"chai-string": "^1.5.0",
|
||||
"cids": "^0.8.0",
|
||||
"datastore-fs": "^1.1.0",
|
||||
"datastore-level": "^1.1.0",
|
||||
"cids": "^1.0.0",
|
||||
"delay": "^4.3.0",
|
||||
"dirty-chai": "^2.0.1",
|
||||
"interop-libp2p": "^0.1.0",
|
||||
"ipfs-http-client": "^44.0.0",
|
||||
"interop-libp2p": "^0.3.0",
|
||||
"ipfs-http-client": "^46.0.0",
|
||||
"it-concat": "^1.0.0",
|
||||
"it-pair": "^1.0.0",
|
||||
"it-pushable": "^1.4.0",
|
||||
"level": "^6.0.1",
|
||||
"libp2p-bootstrap": "^0.11.0",
|
||||
"libp2p-delegated-content-routing": "^0.5.0",
|
||||
"libp2p-delegated-peer-routing": "^0.5.0",
|
||||
"libp2p-floodsub": "^0.21.0",
|
||||
"libp2p-gossipsub": "^0.4.6",
|
||||
"libp2p-kad-dht": "^0.19.1",
|
||||
"libp2p-mdns": "^0.14.1",
|
||||
"libp2p-mplex": "^0.9.5",
|
||||
"libp2p-noise": "^1.1.1",
|
||||
"libp2p-secio": "^0.12.4",
|
||||
"libp2p-tcp": "^0.14.1",
|
||||
"libp2p-webrtc-star": "^0.18.0",
|
||||
"libp2p-websockets": "^0.13.1",
|
||||
"multihashes": "^0.4.19",
|
||||
"nock": "^12.0.3",
|
||||
"libp2p": ".",
|
||||
"libp2p-bootstrap": "^0.12.0",
|
||||
"libp2p-delegated-content-routing": "^0.6.0",
|
||||
"libp2p-delegated-peer-routing": "^0.6.0",
|
||||
"libp2p-floodsub": "^0.23.0",
|
||||
"libp2p-gossipsub": "^0.6.0",
|
||||
"libp2p-kad-dht": "^0.20.0",
|
||||
"libp2p-mdns": "^0.15.0",
|
||||
"libp2p-mplex": "^0.10.0",
|
||||
"libp2p-noise": "^2.0.0",
|
||||
"libp2p-secio": "^0.13.1",
|
||||
"libp2p-tcp": "^0.15.1",
|
||||
"libp2p-webrtc-star": "^0.20.0",
|
||||
"libp2p-websockets": "^0.14.0",
|
||||
"multihashes": "^3.0.1",
|
||||
"nock": "^13.0.3",
|
||||
"p-defer": "^3.0.0",
|
||||
"p-times": "^3.0.0",
|
||||
"p-wait-for": "^3.1.0",
|
||||
"promisify-es6": "^1.0.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"sinon": "^9.0.2"
|
||||
"sinon": "^9.0.2",
|
||||
"uint8arrays": "^1.1.0"
|
||||
},
|
||||
"contributors": [
|
||||
"David Dias <daviddias.p@gmail.com>",
|
||||
"Jacob Heun <jacobheun@gmail.com>",
|
||||
"Vasco Santos <vasco.santos@moxy.studio>",
|
||||
"Alan Shaw <alan@tableflip.io>",
|
||||
"Alex Potsides <alex@achingbrain.net>",
|
||||
"Cayman <caymannava@gmail.com>",
|
||||
"Pedro Teixeira <i@pgte.me>",
|
||||
"Friedel Ziegelmayer <dignifiedquire@gmail.com>",
|
||||
"Alex Potsides <alex@achingbrain.net>",
|
||||
"Maciej Krüger <mkg20001@gmail.com>",
|
||||
"Hugo Dias <mail@hugodias.me>",
|
||||
"Volker Mische <volker.mische@gmail.com>",
|
||||
"dirkmc <dirkmdev@gmail.com>",
|
||||
"Richard Littauer <richard.littauer@gmail.com>",
|
||||
"Thomas Eizinger <thomas@eizinger.io>",
|
||||
"Ryan Bell <ryan@piing.net>",
|
||||
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||
"ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ <victorbjelkholm@gmail.com>",
|
||||
"Andrew Nesbitt <andrewnez@gmail.com>",
|
||||
"Elven <mon.samuel@qq.com>",
|
||||
"Didrik Nordström <didrik.nordstrom@gmail.com>",
|
||||
"Tiago Alves <alvesjtiago@gmail.com>",
|
||||
"Yusef Napora <yusef@napora.org>",
|
||||
"Zane Starr <zcstarr@gmail.com>",
|
||||
"ebinks <elizabethjbinks@gmail.com>",
|
||||
"isan_rivkin <isanrivkin@gmail.com>",
|
||||
"robertkiel <robert.kiel@validitylabs.org>",
|
||||
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||
"Chris Bratlien <chrisbratlien@gmail.com>",
|
||||
"Daijiro Wachi <daijiro.wachi@gmail.com>",
|
||||
"Diogo Silva <fsdiogo@gmail.com>",
|
||||
"Dmitriy Ryajov <dryajov@gmail.com>",
|
||||
"Fei Liu <liu.feiwood@gmail.com>",
|
||||
"Florian-Merle <florian.david.merle@gmail.com>",
|
||||
"Giovanni T. Parra <fiatjaf@gmail.com>",
|
||||
"Ryan Bell <ryan@piing.net>",
|
||||
"Thomas Eizinger <thomas@eizinger.io>",
|
||||
"Didrik Nordström <didrik@betamos.se>",
|
||||
"Francis Gulotta <wizard@roborooter.com>",
|
||||
"Henrique Dias <hacdias@gmail.com>",
|
||||
"Irakli Gozalishvili <rfobic@gmail.com>",
|
||||
"Florian-Merle <florian.david.merle@gmail.com>",
|
||||
"Joel Gustafson <joelg@mit.edu>",
|
||||
"Julien Bouquillon <contact@revolunet.com>",
|
||||
"Kevin Kwok <antimatter15@gmail.com>",
|
||||
"Felipe Martins <felipebrasil93@gmail.com>",
|
||||
"Nuno Nogueira <nunofmn@gmail.com>",
|
||||
"Fei Liu <liu.feiwood@gmail.com>",
|
||||
"RasmusErik Voel Jensen <github@solsort.com>",
|
||||
"Dmitriy Ryajov <dryajov@gmail.com>",
|
||||
"Soeren <nikorpoulsen@gmail.com>",
|
||||
"Sönke Hahn <soenkehahn@gmail.com>"
|
||||
"Sönke Hahn <soenkehahn@gmail.com>",
|
||||
"Tiago Alves <alvesjtiago@gmail.com>",
|
||||
"Diogo Silva <fsdiogo@gmail.com>",
|
||||
"Yusef Napora <yusef@napora.org>",
|
||||
"Zane Starr <zcstarr@gmail.com>",
|
||||
"Daijiro Wachi <daijiro.wachi@gmail.com>",
|
||||
"Chris Bratlien <chrisbratlien@gmail.com>",
|
||||
"ebinks <elizabethjbinks@gmail.com>",
|
||||
"Bernd Strehl <bernd.strehl@gmail.com>",
|
||||
"isan_rivkin <isanrivkin@gmail.com>",
|
||||
"Henrique Dias <hacdias@gmail.com>",
|
||||
"robertkiel <robert.kiel@validitylabs.org>",
|
||||
"Irakli Gozalishvili <rfobic@gmail.com>"
|
||||
]
|
||||
}
|
||||
|
@ -122,11 +122,11 @@ class Circuit {
|
||||
type: CircuitPB.Type.HOP,
|
||||
srcPeer: {
|
||||
id: this.peerId.toBytes(),
|
||||
addrs: this._libp2p.multiaddrs.map(addr => addr.buffer)
|
||||
addrs: this._libp2p.multiaddrs.map(addr => addr.bytes)
|
||||
},
|
||||
dstPeer: {
|
||||
id: destinationPeer.toBytes(),
|
||||
addrs: [multiaddr(destinationAddr).buffer]
|
||||
addrs: [multiaddr(destinationAddr).bytes]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -64,8 +64,8 @@ module.exports = (node) => {
|
||||
|
||||
/**
|
||||
* Store the given key/value pair in the DHT.
|
||||
* @param {Buffer} key
|
||||
* @param {Buffer} value
|
||||
* @param {Uint8Array} key
|
||||
* @param {Uint8Array} value
|
||||
* @param {Object} [options] - put options
|
||||
* @param {number} [options.minPeers] - minimum number of peers required to successfully put
|
||||
* @returns {Promise<void>}
|
||||
@ -81,10 +81,10 @@ module.exports = (node) => {
|
||||
/**
|
||||
* Get the value to the given key.
|
||||
* Times out after 1 minute by default.
|
||||
* @param {Buffer} key
|
||||
* @param {Uint8Array} key
|
||||
* @param {Object} [options] - get options
|
||||
* @param {number} [options.timeout] - optional timeout (default: 60000)
|
||||
* @returns {Promise<{from: PeerId, val: Buffer}>}
|
||||
* @returns {Promise<{from: PeerId, val: Uint8Array}>}
|
||||
*/
|
||||
async get (key, options) { // eslint-disable-line require-await
|
||||
if (!node.isStarted() || !dht.isStarted) {
|
||||
@ -96,11 +96,11 @@ module.exports = (node) => {
|
||||
|
||||
/**
|
||||
* Get the `n` values to the given key without sorting.
|
||||
* @param {Buffer} key
|
||||
* @param {Uint8Array} key
|
||||
* @param {number} nVals
|
||||
* @param {Object} [options] - get options
|
||||
* @param {number} [options.timeout] - optional timeout (default: 60000)
|
||||
* @returns {Promise<Array<{from: PeerId, val: Buffer}>>}
|
||||
* @returns {Promise<Array<{from: PeerId, val: Uint8Array}>>}
|
||||
*/
|
||||
async getMany (key, nVals, options) { // eslint-disable-line require-await
|
||||
if (!node.isStarted() || !dht.isStarted) {
|
||||
|
@ -112,7 +112,7 @@ class Dialer {
|
||||
this.peerStore.addressBook.add(id, multiaddrs)
|
||||
}
|
||||
|
||||
let addrs = this.peerStore.addressBook.getMultiaddrsForPeer(id)
|
||||
let addrs = this.peerStore.addressBook.getMultiaddrsForPeer(id) || []
|
||||
|
||||
// If received a multiaddr to dial, it should be the first to use
|
||||
// But, if we know other multiaddrs for the peer, we should try them too.
|
||||
|
@ -2,13 +2,15 @@
|
||||
|
||||
exports.messages = {
|
||||
NOT_STARTED_YET: 'The libp2p node is not started yet',
|
||||
DHT_DISABLED: 'DHT is not available'
|
||||
DHT_DISABLED: 'DHT is not available',
|
||||
CONN_ENCRYPTION_REQUIRED: 'At least one connection encryption module is required'
|
||||
}
|
||||
|
||||
exports.codes = {
|
||||
DHT_DISABLED: 'ERR_DHT_DISABLED',
|
||||
PUBSUB_NOT_STARTED: 'ERR_PUBSUB_NOT_STARTED',
|
||||
DHT_NOT_STARTED: 'ERR_DHT_NOT_STARTED',
|
||||
CONN_ENCRYPTION_REQUIRED: 'ERR_CONN_ENCRYPTION_REQUIRED',
|
||||
ERR_CONNECTION_ENDED: 'ERR_CONNECTION_ENDED',
|
||||
ERR_CONNECTION_FAILED: 'ERR_CONNECTION_FAILED',
|
||||
ERR_NODE_NOT_STARTED: 'ERR_NODE_NOT_STARTED',
|
||||
@ -27,5 +29,6 @@ exports.codes = {
|
||||
ERR_TRANSPORT_UNAVAILABLE: 'ERR_TRANSPORT_UNAVAILABLE',
|
||||
ERR_TRANSPORT_DIAL_FAILED: 'ERR_TRANSPORT_DIAL_FAILED',
|
||||
ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL',
|
||||
ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR'
|
||||
ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR',
|
||||
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID'
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const libp2pVersion = require('../../package.json').version
|
||||
|
||||
module.exports.PROTOCOL_VERSION = 'ipfs/0.1.0'
|
||||
module.exports.AGENT_VERSION = 'js-libp2p/0.1.0'
|
||||
module.exports.AGENT_VERSION = `js-libp2p/${libp2pVersion}`
|
||||
module.exports.MULTICODEC_IDENTIFY = '/ipfs/id/1.0.0'
|
||||
module.exports.MULTICODEC_IDENTIFY_PUSH = '/ipfs/id/push/1.0.0'
|
||||
|
@ -1,11 +1,15 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const debug = require('debug')
|
||||
const log = debug('libp2p:identify')
|
||||
log.error = debug('libp2p:identify:error')
|
||||
|
||||
const errCode = require('err-code')
|
||||
const pb = require('it-protocol-buffers')
|
||||
const lp = require('it-length-prefixed')
|
||||
const pipe = require('it-pipe')
|
||||
const { collect, take, consume } = require('streaming-iterables')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
const multiaddr = require('multiaddr')
|
||||
@ -13,8 +17,8 @@ const { toBuffer } = require('it-buffer')
|
||||
|
||||
const Message = require('./message')
|
||||
|
||||
const log = debug('libp2p:identify')
|
||||
log.error = debug('libp2p:identify:error')
|
||||
const Envelope = require('../record/envelope')
|
||||
const PeerRecord = require('../record/peer-record')
|
||||
|
||||
const {
|
||||
MULTICODEC_IDENTIFY,
|
||||
@ -23,13 +27,12 @@ const {
|
||||
PROTOCOL_VERSION
|
||||
} = require('./consts')
|
||||
|
||||
const errCode = require('err-code')
|
||||
const { codes } = require('../errors')
|
||||
|
||||
class IdentifyService {
|
||||
/**
|
||||
* Takes the `addr` and converts it to a Multiaddr if possible
|
||||
* @param {Buffer|String} addr
|
||||
* @param {Uint8Array|String} addr
|
||||
* @returns {Multiaddr|null}
|
||||
*/
|
||||
static getCleanMultiaddr (addr) {
|
||||
@ -86,15 +89,20 @@ class IdentifyService {
|
||||
* @param {Array<Connection>} connections
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
push (connections) {
|
||||
async push (connections) {
|
||||
const signedPeerRecord = await this._getSelfPeerRecord()
|
||||
const listenAddrs = this._libp2p.multiaddrs.map((ma) => ma.bytes)
|
||||
const protocols = Array.from(this._protocols.keys())
|
||||
|
||||
const pushes = connections.map(async connection => {
|
||||
try {
|
||||
const { stream } = await connection.newStream(MULTICODEC_IDENTIFY_PUSH)
|
||||
|
||||
await pipe(
|
||||
[{
|
||||
listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.buffer),
|
||||
protocols: Array.from(this._protocols.keys())
|
||||
listenAddrs,
|
||||
signedPeerRecord,
|
||||
protocols
|
||||
}],
|
||||
pb.encode(Message),
|
||||
stream,
|
||||
@ -160,7 +168,8 @@ class IdentifyService {
|
||||
publicKey,
|
||||
listenAddrs,
|
||||
protocols,
|
||||
observedAddr
|
||||
observedAddr,
|
||||
signedPeerRecord
|
||||
} = message
|
||||
|
||||
const id = await PeerId.createFromPubKey(publicKey)
|
||||
@ -172,9 +181,25 @@ class IdentifyService {
|
||||
// Get the observedAddr if there is one
|
||||
observedAddr = IdentifyService.getCleanMultiaddr(observedAddr)
|
||||
|
||||
// Update peers data in PeerStore
|
||||
this.peerStore.addressBook.set(id, listenAddrs.map((addr) => multiaddr(addr)))
|
||||
try {
|
||||
const envelope = await Envelope.openAndCertify(signedPeerRecord, PeerRecord.DOMAIN)
|
||||
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
this.peerStore.protoBook.set(id, protocols)
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
log('received invalid envelope, discard it and fallback to listenAddrs is available', err)
|
||||
}
|
||||
|
||||
// LEGACY: Update peers data in PeerStore
|
||||
try {
|
||||
this.peerStore.addressBook.set(id, listenAddrs.map((addr) => multiaddr(addr)))
|
||||
} catch (err) {
|
||||
log.error('received invalid addrs', err)
|
||||
}
|
||||
|
||||
this.peerStore.protoBook.set(id, protocols)
|
||||
this.peerStore.metadataBook.set(id, 'AgentVersion', uint8ArrayFromString(message.agentVersion))
|
||||
|
||||
// TODO: Track our observed address so that we can score it
|
||||
log('received observed address of %s', observedAddr)
|
||||
@ -201,25 +226,28 @@ class IdentifyService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the `Identify` response to the requesting peer over the
|
||||
* given `connection`
|
||||
* Sends the `Identify` response with the Signed Peer Record
|
||||
* to the requesting peer over the given `connection`
|
||||
* @private
|
||||
* @param {object} options
|
||||
* @param {*} options.stream
|
||||
* @param {Connection} options.connection
|
||||
*/
|
||||
async _handleIdentify ({ connection, stream }) {
|
||||
let publicKey = Buffer.alloc(0)
|
||||
let publicKey = new Uint8Array(0)
|
||||
if (this.peerId.pubKey) {
|
||||
publicKey = this.peerId.pubKey.bytes
|
||||
}
|
||||
|
||||
const signedPeerRecord = await this._getSelfPeerRecord()
|
||||
|
||||
const message = Message.encode({
|
||||
protocolVersion: PROTOCOL_VERSION,
|
||||
agentVersion: AGENT_VERSION,
|
||||
publicKey,
|
||||
listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.buffer),
|
||||
observedAddr: connection.remoteAddr.buffer,
|
||||
listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.bytes),
|
||||
signedPeerRecord,
|
||||
observedAddr: connection.remoteAddr.bytes,
|
||||
protocols: Array.from(this._protocols.keys())
|
||||
})
|
||||
|
||||
@ -258,17 +286,55 @@ class IdentifyService {
|
||||
return log.error('received invalid message', err)
|
||||
}
|
||||
|
||||
// Update peers data in PeerStore
|
||||
const id = connection.remotePeer
|
||||
|
||||
try {
|
||||
const envelope = await Envelope.openAndCertify(message.signedPeerRecord, PeerRecord.DOMAIN)
|
||||
if (this.peerStore.addressBook.consumePeerRecord(envelope)) {
|
||||
this.peerStore.protoBook.set(id, message.protocols)
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
log('received invalid envelope, discard it and fallback to listenAddrs is available', err)
|
||||
}
|
||||
|
||||
// LEGACY: Update peers data in PeerStore
|
||||
try {
|
||||
this.peerStore.addressBook.set(id, message.listenAddrs.map((addr) => multiaddr(addr)))
|
||||
} catch (err) {
|
||||
return log.error('received invalid listen addrs', err)
|
||||
log.error('received invalid addrs', err)
|
||||
}
|
||||
|
||||
// Update the protocols
|
||||
this.peerStore.protoBook.set(id, message.protocols)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get self signed peer record raw envelope.
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
async _getSelfPeerRecord () {
|
||||
const selfSignedPeerRecord = this.peerStore.addressBook.getRawEnvelope(this.peerId)
|
||||
|
||||
// TODO: support invalidation when dynamic multiaddrs are supported
|
||||
if (selfSignedPeerRecord) {
|
||||
return selfSignedPeerRecord
|
||||
}
|
||||
|
||||
try {
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId: this.peerId,
|
||||
multiaddrs: this._libp2p.multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, this.peerId)
|
||||
this.peerStore.addressBook.consumePeerRecord(envelope)
|
||||
|
||||
return this.peerStore.addressBook.getRawEnvelope(this.peerId)
|
||||
} catch (err) {
|
||||
log.error('failed to get self peer record')
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.IdentifyService = IdentifyService
|
||||
|
@ -24,6 +24,11 @@ message Identify {
|
||||
optional bytes observedAddr = 4;
|
||||
|
||||
repeated string protocols = 3;
|
||||
|
||||
// signedPeerRecord contains a serialized SignedEnvelope containing a PeerRecord,
|
||||
// signed by the sending node. It contains the same addresses as the listenAddrs field, but
|
||||
// in a form that lets us share authenticated addrs with other peers.
|
||||
optional bytes signedPeerRecord = 8;
|
||||
}
|
||||
`
|
||||
|
||||
|
36
src/index.js
36
src/index.js
@ -6,14 +6,14 @@ const globalThis = require('ipfs-utils/src/globalthis')
|
||||
const log = debug('libp2p')
|
||||
log.error = debug('libp2p:error')
|
||||
|
||||
const errCode = require('err-code')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const peerRouting = require('./peer-routing')
|
||||
const contentRouting = require('./content-routing')
|
||||
const pubsub = require('./pubsub')
|
||||
const getPeer = require('./get-peer')
|
||||
const { validate: validateConfig } = require('./config')
|
||||
const { codes } = require('./errors')
|
||||
const { codes, messages } = require('./errors')
|
||||
|
||||
const AddressManager = require('./address-manager')
|
||||
const ConnectionManager = require('./connection-manager')
|
||||
@ -24,6 +24,7 @@ const Metrics = require('./metrics')
|
||||
const TransportManager = require('./transport-manager')
|
||||
const Upgrader = require('./upgrader')
|
||||
const PeerStore = require('./peer-store')
|
||||
const PubsubAdapter = require('./pubsub-adapter')
|
||||
const PersistentPeerStore = require('./peer-store/persistent')
|
||||
const Registrar = require('./registrar')
|
||||
const ping = require('./ping')
|
||||
@ -50,10 +51,11 @@ class Libp2p extends EventEmitter {
|
||||
|
||||
this.peerStore = (this.datastore && this._options.peerStore.persistence)
|
||||
? new PersistentPeerStore({
|
||||
peerId: this.peerId,
|
||||
datastore: this.datastore,
|
||||
...this._options.peerStore
|
||||
})
|
||||
: new PeerStore()
|
||||
: new PeerStore({ peerId: this.peerId })
|
||||
|
||||
// Addresses {listen, announce, noAnnounce}
|
||||
this.addresses = this._options.addresses
|
||||
@ -82,7 +84,7 @@ class Libp2p extends EventEmitter {
|
||||
}
|
||||
|
||||
// Create keychain
|
||||
if (this._options.keychain && this._options.keychain.pass && this._options.keychain.datastore) {
|
||||
if (this._options.keychain && this._options.keychain.datastore) {
|
||||
log('creating keychain')
|
||||
|
||||
const keychainOpts = Keychain.generateOptions()
|
||||
@ -121,12 +123,13 @@ class Libp2p extends EventEmitter {
|
||||
this.registrar.handle = this.handle
|
||||
|
||||
// Attach crypto channels
|
||||
if (this._modules.connEncryption) {
|
||||
const cryptos = this._modules.connEncryption
|
||||
cryptos.forEach((crypto) => {
|
||||
this.upgrader.cryptos.set(crypto.protocol, crypto)
|
||||
})
|
||||
if (!this._modules.connEncryption || !this._modules.connEncryption.length) {
|
||||
throw errCode(new Error(messages.CONN_ENCRYPTION_REQUIRED), codes.CONN_ENCRYPTION_REQUIRED)
|
||||
}
|
||||
const cryptos = this._modules.connEncryption
|
||||
cryptos.forEach((crypto) => {
|
||||
this.upgrader.cryptos.set(crypto.protocol, crypto)
|
||||
})
|
||||
|
||||
this.dialer = new Dialer({
|
||||
transportManager: this.transportManager,
|
||||
@ -182,9 +185,11 @@ class Libp2p extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
// start pubsub
|
||||
// Create pubsub if provided
|
||||
if (this._modules.pubsub) {
|
||||
this.pubsub = pubsub(this, this._modules.pubsub, this._config.pubsub)
|
||||
const Pubsub = this._modules.pubsub
|
||||
// using pubsub adapter with *DEPRECATED* handlers functionality
|
||||
this.pubsub = PubsubAdapter(Pubsub, this, this._config.pubsub)
|
||||
}
|
||||
|
||||
// Attach remaining APIs
|
||||
@ -324,7 +329,7 @@ class Libp2p extends EventEmitter {
|
||||
* @returns {Promise<Connection|*>}
|
||||
*/
|
||||
async dialProtocol (peer, protocols, options) {
|
||||
const { id, multiaddrs } = getPeer(peer, this.peerStore)
|
||||
const { id, multiaddrs } = getPeer(peer)
|
||||
let connection = this.connectionManager.get(id)
|
||||
|
||||
if (!connection) {
|
||||
@ -396,7 +401,12 @@ class Libp2p extends EventEmitter {
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
ping (peer) {
|
||||
const { id } = getPeer(peer)
|
||||
const { id, multiaddrs } = getPeer(peer)
|
||||
|
||||
// If received multiaddr, ping it
|
||||
if (multiaddrs) {
|
||||
return ping(this, multiaddrs[0])
|
||||
}
|
||||
|
||||
return ping(this, id)
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ async function encrypt (localId, conn, remoteId) {
|
||||
throw new InvalidCryptoExchangeError('Remote did not provide its public key')
|
||||
}
|
||||
|
||||
if (remoteId && !peerId.isEqual(remoteId)) {
|
||||
if (remoteId && !peerId.equals(remoteId)) {
|
||||
throw new UnexpectedPeerError()
|
||||
}
|
||||
|
||||
|
@ -5,6 +5,8 @@ require('node-forge/lib/pbe')
|
||||
const forge = require('node-forge/lib/forge')
|
||||
const { certificateForKey, findAsync } = require('./util')
|
||||
const errcode = require('err-code')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
/**
|
||||
* Cryptographic Message Syntax (aka PKCS #7)
|
||||
@ -32,15 +34,15 @@ class CMS {
|
||||
/**
|
||||
* Creates some protected data.
|
||||
*
|
||||
* The output Buffer contains the PKCS #7 message in DER.
|
||||
* The output Uint8Array contains the PKCS #7 message in DER.
|
||||
*
|
||||
* @param {string} name - The local key name.
|
||||
* @param {Buffer} plain - The data to encrypt.
|
||||
* @param {Uint8Array} plain - The data to encrypt.
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async encrypt (name, plain) {
|
||||
if (!Buffer.isBuffer(plain)) {
|
||||
throw errcode(new Error('Plain data must be a Buffer'), 'ERR_INVALID_PARAMS')
|
||||
if (!(plain instanceof Uint8Array)) {
|
||||
throw errcode(new Error('Plain data must be a Uint8Array'), 'ERR_INVALID_PARAMS')
|
||||
}
|
||||
|
||||
const key = await this.keychain.findKeyByName(name)
|
||||
@ -56,7 +58,7 @@ class CMS {
|
||||
|
||||
// convert message to DER
|
||||
const der = forge.asn1.toDer(p7.toAsn1()).getBytes()
|
||||
return Buffer.from(der, 'binary')
|
||||
return uint8ArrayFromString(der, 'ascii')
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,17 +67,17 @@ class CMS {
|
||||
* The keychain must contain one of the keys used to encrypt the data. If none of the keys
|
||||
* exists, an Error is returned with the property 'missingKeys'. It is array of key ids.
|
||||
*
|
||||
* @param {Buffer} cmsData - The CMS encrypted data to decrypt.
|
||||
* @param {Uint8Array} cmsData - The CMS encrypted data to decrypt.
|
||||
* @returns {undefined}
|
||||
*/
|
||||
async decrypt (cmsData) {
|
||||
if (!Buffer.isBuffer(cmsData)) {
|
||||
if (!(cmsData instanceof Uint8Array)) {
|
||||
throw errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS')
|
||||
}
|
||||
|
||||
let cms
|
||||
try {
|
||||
const buf = forge.util.createBuffer(cmsData.toString('binary'))
|
||||
const buf = forge.util.createBuffer(uint8ArrayToString(cmsData, 'ascii'))
|
||||
const obj = forge.asn1.fromDer(buf)
|
||||
cms = forge.pkcs7.messageFromAsn1(obj)
|
||||
} catch (err) {
|
||||
@ -115,7 +117,7 @@ class CMS {
|
||||
const pem = await this.keychain._getPrivateKey(key.name)
|
||||
const privateKey = forge.pki.decryptRsaPrivateKey(pem, this.keychain._())
|
||||
cms.decrypt(r.recipient, privateKey)
|
||||
return Buffer.from(cms.content.getBytes(), 'binary')
|
||||
return uint8ArrayFromString(cms.content.getBytes(), 'ascii')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,11 @@ const crypto = require('libp2p-crypto')
|
||||
const DS = require('interface-datastore')
|
||||
const CMS = require('./cms')
|
||||
const errcode = require('err-code')
|
||||
const { Number } = require('ipfs-utils/src/globalthis')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
require('node-forge/lib/sha512')
|
||||
|
||||
const keyPrefix = '/pkcs8/'
|
||||
const infoPrefix = '/info/'
|
||||
@ -107,7 +112,7 @@ class Keychain {
|
||||
this.opts = mergeOptions(defaultOptions, options)
|
||||
|
||||
// Enforce NIST SP 800-132
|
||||
if (!this.opts.passPhrase || this.opts.passPhrase.length < 20) {
|
||||
if (this.opts.passPhrase && this.opts.passPhrase.length < 20) {
|
||||
throw new Error('passPhrase must be least 20 characters')
|
||||
}
|
||||
if (this.opts.dek.keyLength < NIST.minKeyLength) {
|
||||
@ -120,13 +125,13 @@ class Keychain {
|
||||
throw new Error(`dek.iterationCount must be least ${NIST.minIterationCount}`)
|
||||
}
|
||||
|
||||
// Create the derived encrypting key
|
||||
const dek = crypto.pbkdf2(
|
||||
const dek = this.opts.passPhrase ? crypto.pbkdf2(
|
||||
this.opts.passPhrase,
|
||||
this.opts.dek.salt,
|
||||
this.opts.dek.iterationCount,
|
||||
this.opts.dek.keyLength,
|
||||
this.opts.dek.hash)
|
||||
this.opts.dek.hash) : ''
|
||||
|
||||
Object.defineProperty(this, '_', { value: () => dek })
|
||||
}
|
||||
|
||||
@ -152,7 +157,7 @@ class Keychain {
|
||||
static generateOptions () {
|
||||
const options = Object.assign({}, defaultOptions)
|
||||
const saltLength = Math.ceil(NIST.minSaltLength / 3) * 3 // no base64 padding
|
||||
options.dek.salt = crypto.randomBytes(saltLength).toString('base64')
|
||||
options.dek.salt = uint8ArrayToString(crypto.randomBytes(saltLength), 'base64')
|
||||
return options
|
||||
}
|
||||
|
||||
@ -171,8 +176,8 @@ class Keychain {
|
||||
*
|
||||
* @param {string} name - The local key name; cannot already exist.
|
||||
* @param {string} type - One of the key types; 'rsa'.
|
||||
* @param {int} size - The key size in bits.
|
||||
* @returns {KeyInfo}
|
||||
* @param {int} [size] - The key size in bits. Used for rsa keys only.
|
||||
* @returns {KeyInfo}
|
||||
*/
|
||||
async createKey (name, type, size) {
|
||||
const self = this
|
||||
@ -185,17 +190,13 @@ class Keychain {
|
||||
return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE'))
|
||||
}
|
||||
|
||||
if (!Number.isSafeInteger(size)) {
|
||||
return throwDelayed(errcode(new Error(`Invalid key size '${size}'`), 'ERR_INVALID_KEY_SIZE'))
|
||||
}
|
||||
|
||||
const dsname = DsName(name)
|
||||
const exists = await self.store.has(dsname)
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
|
||||
switch (type.toLowerCase()) {
|
||||
case 'rsa':
|
||||
if (size < 2048) {
|
||||
if (!Number.isSafeInteger(size) || size < 2048) {
|
||||
return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE'))
|
||||
}
|
||||
break
|
||||
@ -213,8 +214,8 @@ class Keychain {
|
||||
id: kid
|
||||
}
|
||||
const batch = self.store.batch()
|
||||
batch.put(dsname, pem)
|
||||
batch.put(DsInfoName(name), JSON.stringify(keyInfo))
|
||||
batch.put(dsname, uint8ArrayFromString(pem))
|
||||
batch.put(DsInfoName(name), uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
|
||||
await batch.commit()
|
||||
} catch (err) {
|
||||
@ -237,7 +238,7 @@ class Keychain {
|
||||
|
||||
const info = []
|
||||
for await (const value of self.store.query(query)) {
|
||||
info.push(JSON.parse(value.value))
|
||||
info.push(JSON.parse(uint8ArrayToString(value.value)))
|
||||
}
|
||||
|
||||
return info
|
||||
@ -272,7 +273,7 @@ class Keychain {
|
||||
const dsname = DsInfoName(name)
|
||||
try {
|
||||
const res = await this.store.get(dsname)
|
||||
return JSON.parse(res.toString())
|
||||
return JSON.parse(uint8ArrayToString(res))
|
||||
} catch (err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
||||
}
|
||||
@ -322,15 +323,14 @@ class Keychain {
|
||||
if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS'))
|
||||
|
||||
try {
|
||||
let res = await this.store.get(oldDsname)
|
||||
const pem = res.toString()
|
||||
res = await self.store.get(oldInfoName)
|
||||
const pem = await self.store.get(oldDsname)
|
||||
const res = await self.store.get(oldInfoName)
|
||||
|
||||
const keyInfo = JSON.parse(res.toString())
|
||||
const keyInfo = JSON.parse(uint8ArrayToString(res))
|
||||
keyInfo.name = newName
|
||||
const batch = self.store.batch()
|
||||
batch.put(newDsname, pem)
|
||||
batch.put(newInfoName, JSON.stringify(keyInfo))
|
||||
batch.put(newInfoName, uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
batch.delete(oldDsname)
|
||||
batch.delete(oldInfoName)
|
||||
await batch.commit()
|
||||
@ -358,7 +358,7 @@ class Keychain {
|
||||
const dsname = DsName(name)
|
||||
try {
|
||||
const res = await this.store.get(dsname)
|
||||
const pem = res.toString()
|
||||
const pem = uint8ArrayToString(res)
|
||||
const privateKey = await crypto.keys.import(pem, this._())
|
||||
return privateKey.export(password)
|
||||
} catch (err) {
|
||||
@ -406,8 +406,8 @@ class Keychain {
|
||||
id: kid
|
||||
}
|
||||
const batch = self.store.batch()
|
||||
batch.put(dsname, pem)
|
||||
batch.put(DsInfoName(name), JSON.stringify(keyInfo))
|
||||
batch.put(dsname, uint8ArrayFromString(pem))
|
||||
batch.put(DsInfoName(name), uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
await batch.commit()
|
||||
|
||||
return keyInfo
|
||||
@ -435,8 +435,8 @@ class Keychain {
|
||||
id: kid
|
||||
}
|
||||
const batch = self.store.batch()
|
||||
batch.put(dsname, pem)
|
||||
batch.put(DsInfoName(name), JSON.stringify(keyInfo))
|
||||
batch.put(dsname, uint8ArrayFromString(pem))
|
||||
batch.put(DsInfoName(name), uint8ArrayFromString(JSON.stringify(keyInfo)))
|
||||
await batch.commit()
|
||||
return keyInfo
|
||||
} catch (err) {
|
||||
@ -459,7 +459,7 @@ class Keychain {
|
||||
try {
|
||||
const dsname = DsName(name)
|
||||
const res = await this.store.get(dsname)
|
||||
return res.toString()
|
||||
return uint8ArrayToString(res)
|
||||
} catch (err) {
|
||||
return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND'))
|
||||
}
|
||||
|
@ -8,13 +8,13 @@ exports = module.exports
|
||||
/**
|
||||
* Gets a self-signed X.509 certificate for the key.
|
||||
*
|
||||
* The output Buffer contains the PKCS #7 message in DER.
|
||||
* The output Uint8Array contains the PKCS #7 message in DER.
|
||||
*
|
||||
* TODO: move to libp2p-crypto package
|
||||
*
|
||||
* @param {KeyInfo} key - The id and name of the key
|
||||
* @param {RsaPrivateKey} privateKey - The naked key
|
||||
* @returns {undefined}
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
exports.certificateForKey = (key, privateKey) => {
|
||||
const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e)
|
||||
|
@ -75,9 +75,9 @@ A `peerId.toB58String()` identifier mapping to a `Set` of protocol identifier st
|
||||
|
||||
#### Metadata Book
|
||||
|
||||
The `metadataBook` keeps track of the known metadata of a peer. Its metadata is stored in a key value fashion, where a key identifier (`string`) represents a metadata value (`Buffer`).
|
||||
The `metadataBook` keeps track of the known metadata of a peer. Its metadata is stored in a key value fashion, where a key identifier (`string`) represents a metadata value (`Uint8Array`).
|
||||
|
||||
`Map<string, Map<string, Buffer>>`
|
||||
`Map<string, Map<string, Uint8Array>>`
|
||||
|
||||
A `peerId.toB58String()` identifier mapping to the peer metadata Map.
|
||||
|
||||
|
@ -9,10 +9,12 @@ const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Book = require('./book')
|
||||
const PeerRecord = require('../record/peer-record')
|
||||
|
||||
const {
|
||||
codes: { ERR_INVALID_PARAMETERS }
|
||||
} = require('../errors')
|
||||
const Envelope = require('../record/envelope')
|
||||
|
||||
/**
|
||||
* The AddressBook is responsible for keeping the known multiaddrs
|
||||
@ -23,8 +25,23 @@ class AddressBook extends Book {
|
||||
* Address object
|
||||
* @typedef {Object} Address
|
||||
* @property {Multiaddr} multiaddr peer multiaddr.
|
||||
* @property {boolean} isCertified obtained from a signed peer record.
|
||||
*/
|
||||
|
||||
/**
|
||||
* CertifiedRecord object
|
||||
* @typedef {Object} CertifiedRecord
|
||||
* @property {Uint8Array} raw raw envelope.
|
||||
* @property {number} seqNumber seq counter.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Entry object for the addressBook
|
||||
* @typedef {Object} Entry
|
||||
* @property {Array<Address>} addresses peer Addresses.
|
||||
* @property {CertifiedRecord} record certified peer record.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {PeerStore} peerStore
|
||||
@ -39,18 +56,111 @@ class AddressBook extends Book {
|
||||
peerStore,
|
||||
eventName: 'change:multiaddrs',
|
||||
eventProperty: 'multiaddrs',
|
||||
eventTransformer: (data) => data.map((address) => address.multiaddr)
|
||||
eventTransformer: (data) => {
|
||||
if (!data.addresses) {
|
||||
return []
|
||||
}
|
||||
return data.addresses.map((address) => address.multiaddr)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Map known peers to their known Addresses.
|
||||
* @type {Map<string, Array<Address>>}
|
||||
* Map known peers to their known Address Entries.
|
||||
* @type {Map<string, Array<Entry>>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
* ConsumePeerRecord adds addresses from a signed peer record contained in a record envelope.
|
||||
* This will return a boolean that indicates if the record was successfully processed and added
|
||||
* into the AddressBook.
|
||||
* @param {Envelope} envelope
|
||||
* @return {boolean}
|
||||
*/
|
||||
consumePeerRecord (envelope) {
|
||||
let peerRecord
|
||||
try {
|
||||
peerRecord = PeerRecord.createFromProtobuf(envelope.payload)
|
||||
} catch (err) {
|
||||
log.error('invalid peer record received')
|
||||
return false
|
||||
}
|
||||
|
||||
// Verify peerId
|
||||
if (!peerRecord.peerId.equals(envelope.peerId)) {
|
||||
log('signing key does not match PeerId in the PeerRecord')
|
||||
return false
|
||||
}
|
||||
|
||||
// ensure the record has multiaddrs
|
||||
if (!peerRecord.multiaddrs || !peerRecord.multiaddrs.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
const peerId = peerRecord.peerId
|
||||
const id = peerId.toB58String()
|
||||
const entry = this.data.get(id) || {}
|
||||
const storedRecord = entry.record
|
||||
|
||||
// ensure seq is greater than, or equal to, the last received
|
||||
if (storedRecord && storedRecord.seqNumber >= peerRecord.seqNumber) {
|
||||
return false
|
||||
}
|
||||
|
||||
const addresses = this._toAddresses(peerRecord.multiaddrs, true)
|
||||
|
||||
// Replace unsigned addresses by the new ones from the record
|
||||
// TODO: Once we have ttls for the addresses, we should merge these in.
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: {
|
||||
raw: envelope.marshal(),
|
||||
seqNumber: peerRecord.seqNumber
|
||||
}
|
||||
})
|
||||
log(`stored provided peer record for ${id}`)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw Envelope for a peer. Returns
|
||||
* undefined if no Envelope is found.
|
||||
* @param {PeerId} peerId
|
||||
* @return {Uint8Array|undefined}
|
||||
*/
|
||||
getRawEnvelope (peerId) {
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
|
||||
if (!entry || !entry.record || !entry.record.raw) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return entry.record.raw
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an Envelope containing a PeerRecord for the given peer.
|
||||
* Returns undefined if no record exists.
|
||||
* @param {PeerId} peerId
|
||||
* @return {Promise<Envelope|void>}
|
||||
*/
|
||||
getPeerRecord (peerId) {
|
||||
const raw = this.getRawEnvelope(peerId)
|
||||
|
||||
if (!raw) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return Envelope.createFromProtobuf(raw)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set known multiaddrs of a provided peer.
|
||||
* This will replace previously stored multiaddrs, if available.
|
||||
* Replacing stored multiaddrs might result in losing obtained certified addresses.
|
||||
* If you are not sure, it's recommended to use `add` instead.
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {Array<Multiaddr>} multiaddrs
|
||||
@ -64,7 +174,8 @@ class AddressBook extends Book {
|
||||
|
||||
const addresses = this._toAddresses(multiaddrs)
|
||||
const id = peerId.toB58String()
|
||||
const rec = this.data.get(id)
|
||||
const entry = this.data.get(id) || {}
|
||||
const rec = entry.addresses
|
||||
|
||||
// Not replace multiaddrs
|
||||
if (!addresses.length) {
|
||||
@ -73,7 +184,7 @@ class AddressBook extends Book {
|
||||
|
||||
// Already knows the peer
|
||||
if (rec && rec.length === addresses.length) {
|
||||
const intersection = rec.filter((mi) => addresses.some((newMi) => mi.multiaddr.equals(newMi.multiaddr)))
|
||||
const intersection = rec.filter((addr) => addresses.some((newAddr) => addr.multiaddr.equals(newAddr.multiaddr)))
|
||||
|
||||
// Are new addresses equal to the old ones?
|
||||
// If yes, no changes needed!
|
||||
@ -83,7 +194,10 @@ class AddressBook extends Book {
|
||||
}
|
||||
}
|
||||
|
||||
this._setData(peerId, addresses)
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: entry.record
|
||||
})
|
||||
log(`stored provided multiaddrs for ${id}`)
|
||||
|
||||
// Notify the existance of a new peer
|
||||
@ -109,12 +223,14 @@ class AddressBook extends Book {
|
||||
|
||||
const addresses = this._toAddresses(multiaddrs)
|
||||
const id = peerId.toB58String()
|
||||
const rec = this.data.get(id)
|
||||
|
||||
const entry = this.data.get(id) || {}
|
||||
const rec = entry.addresses || []
|
||||
|
||||
// Add recorded uniquely to the new array (Union)
|
||||
rec && rec.forEach((mi) => {
|
||||
if (!addresses.find(r => r.multiaddr.equals(mi.multiaddr))) {
|
||||
addresses.push(mi)
|
||||
rec.forEach((addr) => {
|
||||
if (!addresses.find(r => r.multiaddr.equals(addr.multiaddr))) {
|
||||
addresses.push(addr)
|
||||
}
|
||||
})
|
||||
|
||||
@ -125,25 +241,45 @@ class AddressBook extends Book {
|
||||
return this
|
||||
}
|
||||
|
||||
this._setData(peerId, addresses)
|
||||
this._setData(peerId, {
|
||||
addresses,
|
||||
record: entry.record
|
||||
})
|
||||
|
||||
log(`added provided multiaddrs for ${id}`)
|
||||
|
||||
// Notify the existance of a new peer
|
||||
if (!rec) {
|
||||
if (!entry.addresses) {
|
||||
this._ps.emit('peer', peerId)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Array<data>}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
|
||||
return entry && entry.addresses ? [...entry.addresses] : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms received multiaddrs into Address.
|
||||
* @private
|
||||
* @param {Array<Multiaddr>} multiaddrs
|
||||
* @param {boolean} [isCertified]
|
||||
* @returns {Array<Address>}
|
||||
*/
|
||||
_toAddresses (multiaddrs) {
|
||||
_toAddresses (multiaddrs, isCertified = false) {
|
||||
if (!multiaddrs) {
|
||||
log.error('multiaddrs must be provided to store data')
|
||||
throw errcode(new Error('multiaddrs must be provided'), ERR_INVALID_PARAMETERS)
|
||||
@ -158,7 +294,8 @@ class AddressBook extends Book {
|
||||
}
|
||||
|
||||
addresses.push({
|
||||
multiaddr: addr
|
||||
multiaddr: addr,
|
||||
isCertified
|
||||
})
|
||||
})
|
||||
|
||||
@ -168,21 +305,22 @@ class AddressBook extends Book {
|
||||
/**
|
||||
* Get the known multiaddrs for a given peer. All returned multiaddrs
|
||||
* will include the encapsulated `PeerId` of the peer.
|
||||
* Returns `undefined` if there are no known multiaddrs for the given peer.
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Array<Multiaddr>}
|
||||
* @returns {Array<Multiaddr>|undefined}
|
||||
*/
|
||||
getMultiaddrsForPeer (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
const record = this.data.get(peerId.toB58String())
|
||||
const entry = this.data.get(peerId.toB58String())
|
||||
|
||||
if (!record) {
|
||||
if (!entry || !entry.addresses) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return record.map((address) => {
|
||||
return entry.addresses.map((address) => {
|
||||
const multiaddr = address.multiaddr
|
||||
|
||||
const idString = multiaddr.getPeerId()
|
||||
|
@ -77,8 +77,9 @@ class Book {
|
||||
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
* Returns `undefined` if there is no available data for the given peer.
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Array<Data>}
|
||||
* @returns {Array<Data>|undefined}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
|
@ -32,14 +32,17 @@ class PeerStore extends EventEmitter {
|
||||
* @property {PeerId} id peer's peer-id instance.
|
||||
* @property {Array<Address>} addresses peer's addresses containing its multiaddrs and metadata.
|
||||
* @property {Array<string>} protocols peer's supported protocols.
|
||||
* @property {Map<string, Buffer>} metadata peer's metadata map.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*/
|
||||
constructor () {
|
||||
constructor ({ peerId }) {
|
||||
super()
|
||||
|
||||
this._peerId = peerId
|
||||
|
||||
/**
|
||||
* AddressBook containing a map of peerIdStr to Address.
|
||||
*/
|
||||
@ -72,7 +75,7 @@ class PeerStore extends EventEmitter {
|
||||
stop () {}
|
||||
|
||||
/**
|
||||
* Get all the stored information of every peer.
|
||||
* Get all the stored information of every peer known.
|
||||
* @returns {Map<string, Peer>}
|
||||
*/
|
||||
get peers () {
|
||||
@ -83,6 +86,9 @@ class PeerStore extends EventEmitter {
|
||||
...this.metadataBook.data.keys()
|
||||
])
|
||||
|
||||
// Remove self peer if present
|
||||
this._peerId && storedPeers.delete(this._peerId.toB58String())
|
||||
|
||||
const peersData = new Map()
|
||||
storedPeers.forEach((idStr) => {
|
||||
peersData.set(idStr, this.get(PeerId.createFromCID(idStr)))
|
||||
|
@ -4,8 +4,7 @@ const errcode = require('err-code')
|
||||
const debug = require('debug')
|
||||
const log = debug('libp2p:peer-store:proto-book')
|
||||
log.error = debug('libp2p:peer-store:proto-book:error')
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const uint8ArrayEquals = require('uint8arrays/equals')
|
||||
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
@ -38,7 +37,7 @@ class MetadataBook extends Book {
|
||||
|
||||
/**
|
||||
* Map known peers to their known protocols.
|
||||
* @type {Map<string, Map<string, Buffer>>}
|
||||
* @type {Map<string, Map<string, Uint8Array>>}
|
||||
*/
|
||||
this.data = new Map()
|
||||
}
|
||||
@ -48,7 +47,7 @@ class MetadataBook extends Book {
|
||||
* @override
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key metadata key
|
||||
* @param {Buffer} value metadata value
|
||||
* @param {Uint8Array} value metadata value
|
||||
* @returns {ProtoBook}
|
||||
*/
|
||||
set (peerId, key, value) {
|
||||
@ -57,7 +56,7 @@ class MetadataBook extends Book {
|
||||
throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
|
||||
if (typeof key !== 'string' || !Buffer.isBuffer(value)) {
|
||||
if (typeof key !== 'string' || !(value instanceof Uint8Array)) {
|
||||
log.error('valid key and value must be provided to store data')
|
||||
throw errcode(new Error('valid key and value must be provided'), ERR_INVALID_PARAMETERS)
|
||||
}
|
||||
@ -77,7 +76,7 @@ class MetadataBook extends Book {
|
||||
const recMap = rec.get(key)
|
||||
|
||||
// Already exists and is equal
|
||||
if (recMap && value.equals(recMap)) {
|
||||
if (recMap && uint8ArrayEquals(value, recMap)) {
|
||||
log(`the metadata provided to store is equal to the already stored for ${id} on ${key}`)
|
||||
return
|
||||
}
|
||||
@ -91,7 +90,7 @@ class MetadataBook extends Book {
|
||||
/**
|
||||
* Get the known data of a provided peer.
|
||||
* @param {PeerId} peerId
|
||||
* @returns {Map<string, Buffer>}
|
||||
* @returns {Map<string, Uint8Array>}
|
||||
*/
|
||||
get (peerId) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
@ -105,7 +104,7 @@ class MetadataBook extends Book {
|
||||
* Get specific metadata value, if it exists
|
||||
* @param {PeerId} peerId
|
||||
* @param {string} key
|
||||
* @returns {Buffer}
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
getValue (peerId, key) {
|
||||
if (!PeerId.isPeerId(peerId)) {
|
||||
|
@ -28,11 +28,12 @@ class PersistentPeerStore extends PeerStore {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Object} properties
|
||||
* @param {PeerId} properties.peerId
|
||||
* @param {Datastore} properties.datastore Datastore to persist data.
|
||||
* @param {number} [properties.threshold = 5] Number of dirty peers allowed before commit data.
|
||||
*/
|
||||
constructor ({ datastore, threshold = 5 }) {
|
||||
super()
|
||||
constructor ({ peerId, datastore, threshold = 5 }) {
|
||||
super({ peerId })
|
||||
|
||||
/**
|
||||
* Backend datastore used to persist data.
|
||||
@ -177,19 +178,24 @@ class PersistentPeerStore extends PeerStore {
|
||||
const b32key = peerId.toString()
|
||||
const key = new Key(`${NAMESPACE_ADDRESS}${b32key}`)
|
||||
|
||||
const addresses = this.addressBook.get(peerId)
|
||||
const entry = this.addressBook.data.get(peerId.toB58String())
|
||||
|
||||
try {
|
||||
// Deleted from the book
|
||||
if (!addresses) {
|
||||
if (!entry) {
|
||||
batch.delete(key)
|
||||
return
|
||||
}
|
||||
|
||||
const encodedData = Addresses.encode({
|
||||
addrs: addresses.map((address) => ({
|
||||
multiaddr: address.multiaddr.buffer
|
||||
}))
|
||||
addrs: entry.addresses.map((address) => ({
|
||||
multiaddr: address.multiaddr.bytes,
|
||||
isCertified: address.isCertified
|
||||
})),
|
||||
certified_record: entry.record ? {
|
||||
seq: entry.record.seqNumber,
|
||||
raw: entry.record.raw
|
||||
} : undefined
|
||||
})
|
||||
|
||||
batch.put(key, encodedData)
|
||||
@ -281,7 +287,7 @@ class PersistentPeerStore extends PeerStore {
|
||||
* @private
|
||||
* @param {Object} params
|
||||
* @param {Key} params.key datastore key
|
||||
* @param {Buffer} params.value datastore value stored
|
||||
* @param {Uint8Array} params.value datastore value stored
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async _processDatastoreEntry ({ key, value }) {
|
||||
@ -296,9 +302,16 @@ class PersistentPeerStore extends PeerStore {
|
||||
|
||||
this.addressBook._setData(
|
||||
peerId,
|
||||
decoded.addrs.map((address) => ({
|
||||
multiaddr: multiaddr(address.multiaddr)
|
||||
})),
|
||||
{
|
||||
addresses: decoded.addrs.map((address) => ({
|
||||
multiaddr: multiaddr(address.multiaddr),
|
||||
isCertified: Boolean(address.isCertified)
|
||||
})),
|
||||
record: decoded.certified_record ? {
|
||||
raw: decoded.certified_record.raw,
|
||||
seqNumber: decoded.certified_record.seq
|
||||
} : undefined
|
||||
},
|
||||
{ emit: false })
|
||||
break
|
||||
case 'keys':
|
||||
|
@ -4,11 +4,29 @@ const protons = require('protons')
|
||||
|
||||
const message = `
|
||||
message Addresses {
|
||||
// Address represents a single multiaddr.
|
||||
message Address {
|
||||
required bytes multiaddr = 1;
|
||||
|
||||
// Flag to indicate if the address comes from a certified source.
|
||||
optional bool isCertified = 2;
|
||||
}
|
||||
|
||||
// CertifiedRecord contains a serialized signed PeerRecord used to
|
||||
// populate the signedAddrs list.
|
||||
message CertifiedRecord {
|
||||
// The Seq counter from the signed PeerRecord envelope
|
||||
uint64 seq = 1;
|
||||
|
||||
// The serialized bytes of the SignedEnvelope containing the PeerRecord.
|
||||
bytes raw = 2;
|
||||
}
|
||||
|
||||
// The known multiaddrs.
|
||||
repeated Address addrs = 1;
|
||||
|
||||
// The most recently received signed PeerRecord.
|
||||
CertifiedRecord certified_record = 2;
|
||||
}
|
||||
`
|
||||
|
||||
|
@ -15,11 +15,11 @@ const { PROTOCOL, PING_LENGTH } = require('./constants')
|
||||
/**
|
||||
* Ping a given peer and wait for its response, getting the operation latency.
|
||||
* @param {Libp2p} node
|
||||
* @param {PeerId} peer
|
||||
* @param {PeerId|multiaddr} peer
|
||||
* @returns {Promise<Number>}
|
||||
*/
|
||||
async function ping (node, peer) {
|
||||
log('dialing %s to %s', PROTOCOL, peer.toB58String())
|
||||
log('dialing %s to %s', PROTOCOL, peer.toB58String ? peer.toB58String() : peer)
|
||||
|
||||
const { stream } = await node.dialProtocol(peer, PROTOCOL)
|
||||
|
||||
|
@ -64,7 +64,7 @@ node -e "require('libp2p/src/pnet').generate(process.stdout)" > swarm.key
|
||||
|
||||
```js
|
||||
const writeKey = require('libp2p/src/pnet').generate
|
||||
const swarmKey = Buffer.alloc(95)
|
||||
const swarmKey = new Uint8Array(95)
|
||||
writeKey(swarmKey)
|
||||
fs.writeFileSync('swarm.key', swarmKey)
|
||||
```
|
||||
|
@ -1,10 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const debug = require('debug')
|
||||
const Errors = require('./errors')
|
||||
const xsalsa20 = require('xsalsa20')
|
||||
const KEY_LENGTH = require('./key-generator').KEY_LENGTH
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const log = debug('libp2p:pnet')
|
||||
log.trace = debug('libp2p:pnet:trace')
|
||||
@ -13,15 +14,15 @@ log.error = debug('libp2p:pnet:err')
|
||||
/**
|
||||
* Creates a stream iterable to encrypt messages in a private network
|
||||
*
|
||||
* @param {Buffer} nonce The nonce to use in encryption
|
||||
* @param {Buffer} psk The private shared key to use in encryption
|
||||
* @param {Uint8Array} nonce The nonce to use in encryption
|
||||
* @param {Uint8Array} psk The private shared key to use in encryption
|
||||
* @returns {*} a through iterable
|
||||
*/
|
||||
module.exports.createBoxStream = (nonce, psk) => {
|
||||
const xor = xsalsa20(nonce, psk)
|
||||
return (source) => (async function * () {
|
||||
for await (const chunk of source) {
|
||||
yield Buffer.from(xor.update(chunk.slice()))
|
||||
yield Uint8Array.from(xor.update(chunk.slice()))
|
||||
}
|
||||
})()
|
||||
}
|
||||
@ -29,8 +30,8 @@ module.exports.createBoxStream = (nonce, psk) => {
|
||||
/**
|
||||
* Creates a stream iterable to decrypt messages in a private network
|
||||
*
|
||||
* @param {Buffer} nonce The nonce of the remote peer
|
||||
* @param {Buffer} psk The private shared key to use in decryption
|
||||
* @param {Uint8Array} nonce The nonce of the remote peer
|
||||
* @param {Uint8Array} psk The private shared key to use in decryption
|
||||
* @returns {*} a through iterable
|
||||
*/
|
||||
module.exports.createUnboxStream = (nonce, psk) => {
|
||||
@ -39,15 +40,15 @@ module.exports.createUnboxStream = (nonce, psk) => {
|
||||
log.trace('Decryption enabled')
|
||||
|
||||
for await (const chunk of source) {
|
||||
yield Buffer.from(xor.update(chunk.slice()))
|
||||
yield Uint8Array.from(xor.update(chunk.slice()))
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode the version 1 psk from the given Buffer
|
||||
* Decode the version 1 psk from the given Uint8Array
|
||||
*
|
||||
* @param {Buffer} pskBuffer
|
||||
* @param {Uint8Array} pskBuffer
|
||||
* @throws {INVALID_PSK}
|
||||
* @returns {Object} The PSK metadata (tag, codecName, psk)
|
||||
*/
|
||||
@ -58,10 +59,10 @@ module.exports.decodeV1PSK = (pskBuffer) => {
|
||||
// from the buffer line by line to evaluate the next line
|
||||
// programmatically instead of making assumptions about the
|
||||
// encodings of each line.
|
||||
const metadata = pskBuffer.toString().split(/(?:\r\n|\r|\n)/g)
|
||||
const metadata = uint8ArrayToString(pskBuffer).split(/(?:\r\n|\r|\n)/g)
|
||||
const pskTag = metadata.shift()
|
||||
const codec = metadata.shift()
|
||||
const psk = Buffer.from(metadata.shift(), 'hex')
|
||||
const psk = uint8ArrayFromString(metadata.shift(), 'base16')
|
||||
|
||||
if (psk.byteLength !== KEY_LENGTH) {
|
||||
throw new Error(Errors.INVALID_PSK)
|
||||
|
@ -25,7 +25,7 @@ log.error = debug('libp2p:pnet:err')
|
||||
*/
|
||||
class Protector {
|
||||
/**
|
||||
* @param {Buffer} keyBuffer The private shared key buffer
|
||||
* @param {Uint8Array} keyBuffer The private shared key buffer
|
||||
* @constructor
|
||||
*/
|
||||
constructor (keyBuffer) {
|
||||
|
@ -2,15 +2,19 @@
|
||||
|
||||
const crypto = require('libp2p-crypto')
|
||||
const KEY_LENGTH = 32
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
/**
|
||||
* Generates a PSK that can be used in a libp2p-pnet private network
|
||||
* @param {Writer} writer An object containing a `write` method
|
||||
* @param {Uint8Array} bytes An object to write the psk into
|
||||
* @returns {void}
|
||||
*/
|
||||
function generate (writer) {
|
||||
const psk = crypto.randomBytes(KEY_LENGTH).toString('hex')
|
||||
writer.write('/key/swarm/psk/1.0.0/\n/base16/\n' + psk)
|
||||
function generate (bytes) {
|
||||
const psk = uint8ArrayToString(crypto.randomBytes(KEY_LENGTH), 'base16')
|
||||
const key = uint8ArrayFromString('/key/swarm/psk/1.0.0/\n/base16/\n' + psk)
|
||||
|
||||
bytes.set(key)
|
||||
}
|
||||
|
||||
module.exports = generate
|
||||
|
40
src/pubsub-adapter.js
Normal file
40
src/pubsub-adapter.js
Normal file
@ -0,0 +1,40 @@
|
||||
'use strict'
|
||||
|
||||
// Pubsub adapter to keep API with handlers while not removed.
|
||||
module.exports = (PubsubRouter, libp2p, options) => {
|
||||
class Pubsub extends PubsubRouter {
|
||||
/**
|
||||
* Subscribes to a given topic.
|
||||
* @override
|
||||
* @param {string} topic
|
||||
* @param {function(msg: InMessage)} [handler]
|
||||
* @returns {void}
|
||||
*/
|
||||
subscribe (topic, handler) {
|
||||
// Bind provided handler
|
||||
handler && this.on(topic, handler)
|
||||
super.subscribe(topic)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from the given topic.
|
||||
* @override
|
||||
* @param {string} topic
|
||||
* @param {function(msg: InMessage)} [handler]
|
||||
* @returns {void}
|
||||
*/
|
||||
unsubscribe (topic, handler) {
|
||||
if (!handler) {
|
||||
this.removeAllListeners(topic)
|
||||
} else {
|
||||
this.removeListener(topic, handler)
|
||||
}
|
||||
|
||||
if (this.listenerCount(topic) === 0) {
|
||||
super.unsubscribe(topic)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new Pubsub(libp2p, options)
|
||||
}
|
105
src/pubsub.js
105
src/pubsub.js
@ -1,105 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const errCode = require('err-code')
|
||||
const { messages, codes } = require('./errors')
|
||||
|
||||
module.exports = (node, Pubsub, config) => {
|
||||
const pubsub = new Pubsub(node.peerId, node.registrar, config)
|
||||
|
||||
return {
|
||||
/**
|
||||
* Subscribe the given handler to a pubsub topic
|
||||
* @param {string} topic
|
||||
* @param {function} handler The handler to subscribe
|
||||
* @returns {void}
|
||||
*/
|
||||
subscribe: (topic, handler) => {
|
||||
if (!node.isStarted() && !pubsub.started) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.PUBSUB_NOT_STARTED)
|
||||
}
|
||||
|
||||
if (pubsub.listenerCount(topic) === 0) {
|
||||
pubsub.subscribe(topic)
|
||||
}
|
||||
|
||||
pubsub.on(topic, handler)
|
||||
},
|
||||
|
||||
/**
|
||||
* Unsubscribes from a pubsub topic
|
||||
* @param {string} topic
|
||||
* @param {function} [handler] The handler to unsubscribe from
|
||||
*/
|
||||
unsubscribe: (topic, handler) => {
|
||||
if (!node.isStarted() && !pubsub.started) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.PUBSUB_NOT_STARTED)
|
||||
}
|
||||
|
||||
if (!handler) {
|
||||
pubsub.removeAllListeners(topic)
|
||||
} else {
|
||||
pubsub.removeListener(topic, handler)
|
||||
}
|
||||
|
||||
if (pubsub.listenerCount(topic) === 0) {
|
||||
pubsub.unsubscribe(topic)
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Publish messages to the given topics.
|
||||
* @param {Array<string>|string} topic
|
||||
* @param {Buffer} data
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
publish: (topic, data) => {
|
||||
if (!node.isStarted() && !pubsub.started) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.PUBSUB_NOT_STARTED)
|
||||
}
|
||||
|
||||
try {
|
||||
data = Buffer.from(data)
|
||||
} catch (err) {
|
||||
throw errCode(new Error('data must be convertible to a Buffer'), 'ERR_DATA_IS_NOT_VALID')
|
||||
}
|
||||
|
||||
return pubsub.publish(topic, data)
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a list of topics the node is subscribed to.
|
||||
* @returns {Array<String>} topics
|
||||
*/
|
||||
getTopics: () => {
|
||||
if (!node.isStarted() && !pubsub.started) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.PUBSUB_NOT_STARTED)
|
||||
}
|
||||
|
||||
return pubsub.getTopics()
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a list of the peer-ids that are subscribed to one topic.
|
||||
* @param {string} topic
|
||||
* @returns {Array<string>}
|
||||
*/
|
||||
getSubscribers: (topic) => {
|
||||
if (!node.isStarted() && !pubsub.started) {
|
||||
throw errCode(new Error(messages.NOT_STARTED_YET), codes.PUBSUB_NOT_STARTED)
|
||||
}
|
||||
|
||||
return pubsub.getSubscribers(topic)
|
||||
},
|
||||
|
||||
setMaxListeners (n) {
|
||||
return pubsub.setMaxListeners(n)
|
||||
},
|
||||
|
||||
_pubsub: pubsub,
|
||||
|
||||
start: () => pubsub.start(),
|
||||
|
||||
stop: () => pubsub.stop()
|
||||
}
|
||||
}
|
130
src/record/README.md
Normal file
130
src/record/README.md
Normal file
@ -0,0 +1,130 @@
|
||||
# Libp2p Records
|
||||
|
||||
Libp2p nodes need to store data in a public location (e.g. a DHT), or rely on potentially untrustworthy intermediaries to relay information over its lifetime. Accordingly, libp2p nodes need to be able to verify that the data came from a specific peer and that it hasn't been tampered with.
|
||||
|
||||
## Envelope
|
||||
|
||||
Libp2p provides an all-purpose data container called **envelope**. It was created to enable the distribution of verifiable records, which we can prove originated from the addressed peer itself. The envelope includes a signature of the data, so that its authenticity is verified.
|
||||
|
||||
This envelope stores a marshaled record implementing the [interface-record](https://github.com/libp2p/js-libp2p-interfaces/tree/master/src/record). These Records are designed to be serialized to bytes and placed inside of the envelopes before being shared with other peers.
|
||||
|
||||
You can read further about the envelope in [libp2p/specs#217](https://github.com/libp2p/specs/pull/217).
|
||||
|
||||
### Usage
|
||||
|
||||
- create an envelope with an instance of an [interface-record](https://github.com/libp2p/js-libp2p-interfaces/tree/master/src/record) implementation and prepare it for being exchanged:
|
||||
|
||||
```js
|
||||
// interface-record implementation example with the "libp2p-example" namespace
|
||||
const Record = require('libp2p-interfaces/src/record')
|
||||
const fromString = require('uint8arrays/from-string')
|
||||
|
||||
class ExampleRecord extends Record {
|
||||
constructor () {
|
||||
super ('libp2p-example', fromString('0302', 'hex'))
|
||||
}
|
||||
|
||||
marshal () {}
|
||||
|
||||
equals (other) {}
|
||||
}
|
||||
|
||||
ExampleRecord.createFromProtobuf = () => {}
|
||||
```
|
||||
|
||||
```js
|
||||
const Envelope = require('libp2p/src/record/envelop')
|
||||
const ExampleRecord = require('./example-record')
|
||||
|
||||
const rec = new ExampleRecord()
|
||||
const e = await Envelope.seal(rec, peerId)
|
||||
const wireData = e.marshal()
|
||||
```
|
||||
|
||||
- consume a received envelope (`wireData`) and transform it back to a record:
|
||||
|
||||
```js
|
||||
const Envelope = require('libp2p/src/record/envelop')
|
||||
const ExampleRecord = require('./example-record')
|
||||
|
||||
const domain = 'libp2p-example'
|
||||
let e
|
||||
|
||||
try {
|
||||
e = await Envelope.openAndCertify(wireData, domain)
|
||||
} catch (err) {}
|
||||
|
||||
const rec = ExampleRecord.createFromProtobuf(e.payload)
|
||||
```
|
||||
|
||||
## Peer Record
|
||||
|
||||
All libp2p nodes keep a `PeerStore`, that among other information stores a set of known addresses for each peer, which can come from a variety of sources.
|
||||
|
||||
Libp2p peer records were created to enable the distribution of verifiable address records, which we can prove originated from the addressed peer itself. With such guarantees, libp2p is able to prioritize addresses based on their authenticity, with the most strict strategy being to only dial certified addresses (no strategies have been implemented at the time of writing).
|
||||
|
||||
A peer record contains the peers' publicly reachable listen addresses, and may be extended in the future to contain additional metadata relevant to routing. It also contains a `seqNumber` field, a timestamp per the spec, so that we can verify the most recent record.
|
||||
|
||||
You can read further about the Peer Record in [libp2p/specs#217](https://github.com/libp2p/specs/pull/217).
|
||||
|
||||
### Usage
|
||||
|
||||
- create a new Peer Record
|
||||
|
||||
```js
|
||||
const PeerRecord = require('libp2p/src/record/peer-record')
|
||||
|
||||
const pr = new PeerRecord({
|
||||
peerId: node.peerId,
|
||||
multiaddrs: node.multiaddrs
|
||||
})
|
||||
```
|
||||
|
||||
- create a Peer Record from a protobuf
|
||||
|
||||
```js
|
||||
const PeerRecord = require('libp2p/src/record/peer-record')
|
||||
|
||||
const pr = PeerRecord.createFromProtobuf(data)
|
||||
```
|
||||
|
||||
### Libp2p Flows
|
||||
|
||||
#### Self Record
|
||||
|
||||
Once a libp2p node has started and is listening on a set of multiaddrs, its own peer record can be created.
|
||||
|
||||
The identify service is responsible for creating the self record when the identify protocol kicks in for the first time. This record will be stored for future needs of the identify protocol when connecting with other peers.
|
||||
|
||||
#### Self record Updates
|
||||
|
||||
**_NOT_YET_IMPLEMENTED_**
|
||||
|
||||
While creating peer records is fairly trivial, addresses are not static and might be modified at arbitrary times. This can happen via an Address Manager API, or even through AutoRelay/AutoNAT.
|
||||
|
||||
When a libp2p node changes its listen addresses, the identify service will be informed. Once that happens, the identify service creates a new self record and stores it. With the new record, the identify push/delta protocol will be used to communicate this change to the connected peers.
|
||||
|
||||
#### Subsystem receiving a record
|
||||
|
||||
Considering that a node can discover other peers' addresses from a variety of sources, Libp2p Peerstore can differentiate the addresses that were obtained through a signed peer record.
|
||||
|
||||
Once a record is received and its signature properly validated, its envelope is stored in the AddressBook in its byte representation. The `seqNumber` remains unmarshalled so that we can quickly compare it against incoming records to determine the most recent record.
|
||||
|
||||
The AddressBook Addresses will be updated with the content of the envelope with a certified property. This allows other subsystems to identify the known certified addresses of a peer.
|
||||
|
||||
#### Subsystem providing a record
|
||||
|
||||
Libp2p subsystems that exchange other peers information will provide the envelope that they received by those peers. As a result, other peers can verify if the envelope was really created by the addressed peer.
|
||||
|
||||
When a subsystem wants to provide a record, it will get it from the AddressBook, if it exists. Other subsystems are also able to provide the self record, since it is also stored in the AddressBook.
|
||||
|
||||
### Future Work
|
||||
|
||||
- Persistence only considering certified addresses?
|
||||
- Peers may not know their own addresses. It's often impossible to automatically infer one's own public address, and peers may need to rely on third party peers to inform them of their observed public addresses.
|
||||
- A peer may inadvertently or maliciously sign an address that they do not control. In other words, a signature isn't a guarantee that a given address is valid.
|
||||
- Some addresses may be ambiguous. For example, addresses on a private subnet are valid within that subnet but are useless on the public internet.
|
||||
- Once all these pieces are in place, we will also need a way to prioritize addresses based on their authenticity, that is, the dialer can prioritize self-certified addresses over addresses from an unknown origin.
|
||||
- Modular dialer? (taken from go PR notes)
|
||||
- With the modular dialer, users should easily be able to configure precedence. With dialer v1, anything we do to prioritise dials is gonna be spaghetti and adhoc. With the modular dialer, you’d be able to specify the order of dials when instantiating the pipeline.
|
||||
- Multiple parallel dials. We already have the issue where new addresses aren't added to existing dials.
|
25
src/record/envelope/envelope.proto.js
Normal file
25
src/record/envelope/envelope.proto.js
Normal file
@ -0,0 +1,25 @@
|
||||
'use strict'
|
||||
|
||||
const protons = require('protons')
|
||||
|
||||
const message = `
|
||||
message Envelope {
|
||||
// public_key is the public key of the keypair the enclosed payload was
|
||||
// signed with.
|
||||
bytes public_key = 1;
|
||||
|
||||
// payload_type encodes the type of payload, so that it can be deserialized
|
||||
// deterministically.
|
||||
bytes payload_type = 2;
|
||||
|
||||
// payload is the actual payload carried inside this envelope.
|
||||
bytes payload = 3;
|
||||
|
||||
// signature is the signature produced by the private key corresponding to
|
||||
// the enclosed public key, over the payload, prefixing a domain string for
|
||||
// additional security.
|
||||
bytes signature = 5;
|
||||
}
|
||||
`
|
||||
|
||||
module.exports = protons(message).Envelope
|
175
src/record/envelope/index.js
Normal file
175
src/record/envelope/index.js
Normal file
@ -0,0 +1,175 @@
|
||||
'use strict'
|
||||
|
||||
const debug = require('debug')
|
||||
const log = debug('libp2p:envelope')
|
||||
log.error = debug('libp2p:envelope:error')
|
||||
const errCode = require('err-code')
|
||||
const uint8arraysConcat = require('uint8arrays/concat')
|
||||
const uint8arraysFromString = require('uint8arrays/from-string')
|
||||
const cryptoKeys = require('libp2p-crypto/src/keys')
|
||||
const PeerId = require('peer-id')
|
||||
const varint = require('varint')
|
||||
const uint8arraysEquals = require('uint8arrays/equals')
|
||||
|
||||
const { codes } = require('../../errors')
|
||||
const Protobuf = require('./envelope.proto')
|
||||
|
||||
/**
|
||||
* The Envelope is responsible for keeping an arbitrary signed record
|
||||
* by a libp2p peer.
|
||||
*/
|
||||
class Envelope {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} params
|
||||
* @param {PeerId} params.peerId
|
||||
* @param {Uint8Array} params.payloadType
|
||||
* @param {Uint8Array} params.payload marshaled record
|
||||
* @param {Uint8Array} params.signature signature of the domain string :: type hint :: payload.
|
||||
*/
|
||||
constructor ({ peerId, payloadType, payload, signature }) {
|
||||
this.peerId = peerId
|
||||
this.payloadType = payloadType
|
||||
this.payload = payload
|
||||
this.signature = signature
|
||||
|
||||
// Cache
|
||||
this._marshal = undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Marshal the envelope content.
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
marshal () {
|
||||
if (this._marshal) {
|
||||
return this._marshal
|
||||
}
|
||||
|
||||
const publicKey = cryptoKeys.marshalPublicKey(this.peerId.pubKey)
|
||||
|
||||
this._marshal = Protobuf.encode({
|
||||
public_key: publicKey,
|
||||
payload_type: this.payloadType,
|
||||
payload: this.payload,
|
||||
signature: this.signature
|
||||
})
|
||||
|
||||
return this._marshal
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies if the other Envelope is identical to this one.
|
||||
* @param {Envelope} other
|
||||
* @return {boolean}
|
||||
*/
|
||||
equals (other) {
|
||||
return uint8arraysEquals(this.peerId.pubKey.bytes, other.peerId.pubKey.bytes) &&
|
||||
uint8arraysEquals(this.payloadType, other.payloadType) &&
|
||||
uint8arraysEquals(this.payload, other.payload) &&
|
||||
uint8arraysEquals(this.signature, other.signature)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate envelope data signature for the given domain.
|
||||
* @param {string} domain
|
||||
* @return {Promise<boolean>}
|
||||
*/
|
||||
validate (domain) {
|
||||
const signData = formatSignaturePayload(domain, this.payloadType, this.payload)
|
||||
|
||||
return this.peerId.pubKey.verify(signData, this.signature)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function that prepares a Uint8Array to sign or verify a signature.
|
||||
* @param {string} domain
|
||||
* @param {Uint8Array} payloadType
|
||||
* @param {Uint8Array} payload
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
const formatSignaturePayload = (domain, payloadType, payload) => {
|
||||
// When signing, a peer will prepare a Uint8Array by concatenating the following:
|
||||
// - The length of the domain separation string string in bytes
|
||||
// - The domain separation string, encoded as UTF-8
|
||||
// - The length of the payload_type field in bytes
|
||||
// - The value of the payload_type field
|
||||
// - The length of the payload field in bytes
|
||||
// - The value of the payload field
|
||||
|
||||
domain = uint8arraysFromString(domain)
|
||||
const domainLength = varint.encode(domain.byteLength)
|
||||
const payloadTypeLength = varint.encode(payloadType.length)
|
||||
const payloadLength = varint.encode(payload.length)
|
||||
|
||||
return uint8arraysConcat([
|
||||
new Uint8Array(domainLength),
|
||||
domain,
|
||||
new Uint8Array(payloadTypeLength),
|
||||
payloadType,
|
||||
new Uint8Array(payloadLength),
|
||||
payload
|
||||
])
|
||||
}
|
||||
|
||||
/**
|
||||
* Unmarshal a serialized Envelope protobuf message.
|
||||
* @param {Uint8Array} data
|
||||
* @return {Promise<Envelope>}
|
||||
*/
|
||||
Envelope.createFromProtobuf = async (data) => {
|
||||
const envelopeData = Protobuf.decode(data)
|
||||
const peerId = await PeerId.createFromPubKey(envelopeData.public_key)
|
||||
|
||||
return new Envelope({
|
||||
peerId,
|
||||
payloadType: envelopeData.payload_type,
|
||||
payload: envelopeData.payload,
|
||||
signature: envelopeData.signature
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Seal marshals the given Record, places the marshaled bytes inside an Envelope
|
||||
* and signs it with the given peerId's private key.
|
||||
* @async
|
||||
* @param {Record} record
|
||||
* @param {PeerId} peerId
|
||||
* @return {Envelope}
|
||||
*/
|
||||
Envelope.seal = async (record, peerId) => {
|
||||
const domain = record.domain
|
||||
const payloadType = record.codec
|
||||
const payload = record.marshal()
|
||||
|
||||
const signData = formatSignaturePayload(domain, payloadType, payload)
|
||||
const signature = await peerId.privKey.sign(signData)
|
||||
|
||||
return new Envelope({
|
||||
peerId,
|
||||
payloadType,
|
||||
payload,
|
||||
signature
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Open and certify a given marshalled envelope.
|
||||
* Data is unmarshalled and the signature validated for the given domain.
|
||||
* @param {Uint8Array} data
|
||||
* @param {string} domain
|
||||
* @return {Envelope}
|
||||
*/
|
||||
Envelope.openAndCertify = async (data, domain) => {
|
||||
const envelope = await Envelope.createFromProtobuf(data)
|
||||
const valid = await envelope.validate(domain)
|
||||
|
||||
if (!valid) {
|
||||
throw errCode(new Error('envelope signature is not valid for the given domain'), codes.ERR_SIGNATURE_NOT_VALID)
|
||||
}
|
||||
|
||||
return envelope
|
||||
}
|
||||
|
||||
module.exports = Envelope
|
11
src/record/peer-record/consts.js
Normal file
11
src/record/peer-record/consts.js
Normal file
@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const multicodec = require('multicodec')
|
||||
|
||||
// The domain string used for peer records contained in a Envelope.
|
||||
module.exports.ENVELOPE_DOMAIN_PEER_RECORD = multicodec.getName(multicodec.LIBP2P_PEER_RECORD)
|
||||
|
||||
// The type hint used to identify peer records in a Envelope.
|
||||
// Defined in https://github.com/multiformats/multicodec/blob/master/table.csv
|
||||
// with name "libp2p-peer-record"
|
||||
module.exports.ENVELOPE_PAYLOAD_TYPE_PEER_RECORD = Uint8Array.from([3, 1])
|
100
src/record/peer-record/index.js
Normal file
100
src/record/peer-record/index.js
Normal file
@ -0,0 +1,100 @@
|
||||
'use strict'
|
||||
|
||||
const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
const Record = require('libp2p-interfaces/src/record')
|
||||
const arrayEquals = require('libp2p-utils/src/array-equals')
|
||||
|
||||
const Protobuf = require('./peer-record.proto')
|
||||
const {
|
||||
ENVELOPE_DOMAIN_PEER_RECORD,
|
||||
ENVELOPE_PAYLOAD_TYPE_PEER_RECORD
|
||||
} = require('./consts')
|
||||
|
||||
/**
|
||||
* The PeerRecord is used for distributing peer routing records across the network.
|
||||
* It contains the peer's reachable listen addresses.
|
||||
*/
|
||||
class PeerRecord extends Record {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} params
|
||||
* @param {PeerId} params.peerId
|
||||
* @param {Array<multiaddr>} params.multiaddrs addresses of the associated peer.
|
||||
* @param {number} [params.seqNumber] monotonically-increasing sequence counter that's used to order PeerRecords in time.
|
||||
*/
|
||||
constructor ({ peerId, multiaddrs = [], seqNumber = Date.now() }) {
|
||||
super(ENVELOPE_DOMAIN_PEER_RECORD, ENVELOPE_PAYLOAD_TYPE_PEER_RECORD)
|
||||
|
||||
this.peerId = peerId
|
||||
this.multiaddrs = multiaddrs
|
||||
this.seqNumber = seqNumber
|
||||
|
||||
// Cache
|
||||
this._marshal = undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Marshal a record to be used in an envelope.
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
marshal () {
|
||||
if (this._marshal) {
|
||||
return this._marshal
|
||||
}
|
||||
|
||||
this._marshal = Protobuf.encode({
|
||||
peer_id: this.peerId.toBytes(),
|
||||
seq: this.seqNumber,
|
||||
addresses: this.multiaddrs.map((m) => ({
|
||||
multiaddr: m.bytes
|
||||
}))
|
||||
})
|
||||
|
||||
return this._marshal
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if `this` record equals the `other`.
|
||||
* @param {Record} other
|
||||
* @return {boolean}
|
||||
*/
|
||||
equals (other) {
|
||||
// Validate PeerId
|
||||
if (!this.peerId.equals(other.peerId)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate seqNumber
|
||||
if (this.seqNumber !== other.seqNumber) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate multiaddrs
|
||||
if (!arrayEquals(this.multiaddrs, other.multiaddrs)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unmarshal Peer Record Protobuf.
|
||||
* @param {Uint8Array} buf marshaled peer record.
|
||||
* @return {PeerRecord}
|
||||
*/
|
||||
PeerRecord.createFromProtobuf = (buf) => {
|
||||
// Decode
|
||||
const peerRecord = Protobuf.decode(buf)
|
||||
|
||||
const peerId = PeerId.createFromBytes(peerRecord.peer_id)
|
||||
const multiaddrs = (peerRecord.addresses || []).map((a) => multiaddr(a.multiaddr))
|
||||
const seqNumber = peerRecord.seq
|
||||
|
||||
return new PeerRecord({ peerId, multiaddrs, seqNumber })
|
||||
}
|
||||
|
||||
PeerRecord.DOMAIN = ENVELOPE_DOMAIN_PEER_RECORD
|
||||
|
||||
module.exports = PeerRecord
|
29
src/record/peer-record/peer-record.proto.js
Normal file
29
src/record/peer-record/peer-record.proto.js
Normal file
@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const protons = require('protons')
|
||||
|
||||
// PeerRecord messages contain information that is useful to share with other peers.
|
||||
// Currently, a PeerRecord contains the public listen addresses for a peer, but this
|
||||
// is expected to expand to include other information in the future.
|
||||
// PeerRecords are designed to be serialized to bytes and placed inside of
|
||||
// SignedEnvelopes before sharing with other peers.
|
||||
const message = `
|
||||
message PeerRecord {
|
||||
// AddressInfo is a wrapper around a binary multiaddr. It is defined as a
|
||||
// separate message to allow us to add per-address metadata in the future.
|
||||
message AddressInfo {
|
||||
bytes multiaddr = 1;
|
||||
}
|
||||
|
||||
// peer_id contains a libp2p peer id in its binary representation.
|
||||
bytes peer_id = 1;
|
||||
|
||||
// seq contains a monotonically-increasing sequence counter to order PeerRecords in time.
|
||||
uint64 seq = 2;
|
||||
|
||||
// addresses is a list of public listen addresses for the peer.
|
||||
repeated AddressInfo addresses = 3;
|
||||
}
|
||||
`
|
||||
|
||||
module.exports = protons(message).PeerRecord
|
@ -75,7 +75,7 @@ describe('Connection Manager', () => {
|
||||
expect(libp2p.connectionManager.emit.callCount).to.equal(1)
|
||||
const [event, connection] = libp2p.connectionManager.emit.getCall(0).args
|
||||
expect(event).to.equal('peer:connect')
|
||||
expect(connection.remotePeer.isEqual(remoteLibp2p.peerId)).to.equal(true)
|
||||
expect(connection.remotePeer.equals(remoteLibp2p.peerId)).to.equal(true)
|
||||
|
||||
const libp2pConn = libp2p.connectionManager.get(remoteLibp2p.peerId)
|
||||
expect(libp2pConn).to.exist()
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
const { expect } = chai
|
||||
const nock = require('nock')
|
||||
const sinon = require('sinon')
|
||||
@ -96,7 +97,7 @@ describe('content-routing', () => {
|
||||
let delegate
|
||||
|
||||
beforeEach(async () => {
|
||||
const [peerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const [peerId] = await peerUtils.createPeerId({ fixture: true })
|
||||
|
||||
delegate = new DelegatedContentRouter(peerId, {
|
||||
host: '0.0.0.0',
|
||||
@ -227,7 +228,7 @@ describe('content-routing', () => {
|
||||
let delegate
|
||||
|
||||
beforeEach(async () => {
|
||||
const [peerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
const [peerId] = await peerUtils.createPeerId({ fixture: true })
|
||||
|
||||
delegate = new DelegatedContentRouter(peerId, {
|
||||
host: '0.0.0.0',
|
||||
|
@ -1,14 +1,12 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
|
||||
const multiaddr = require('multiaddr')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const mergeOptions = require('merge-options')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const { create } = require('../../../src')
|
||||
const { subsystemOptions, subsystemMulticodecs } = require('./utils')
|
||||
@ -68,8 +66,8 @@ describe('DHT subsystem operates correctly', () => {
|
||||
})
|
||||
|
||||
it('should put on a peer and get from the other', async () => {
|
||||
const key = Buffer.from('hello')
|
||||
const value = Buffer.from('world')
|
||||
const key = uint8ArrayFromString('hello')
|
||||
const value = uint8ArrayFromString('world')
|
||||
|
||||
await libp2p.dialProtocol(remAddr, subsystemMulticodecs)
|
||||
await Promise.all([
|
||||
@ -131,8 +129,8 @@ describe('DHT subsystem operates correctly', () => {
|
||||
it('should put on a peer and get from the other', async () => {
|
||||
await libp2p.dial(remAddr)
|
||||
|
||||
const key = Buffer.from('hello')
|
||||
const value = Buffer.from('world')
|
||||
const key = uint8ArrayFromString('hello')
|
||||
const value = uint8ArrayFromString('world')
|
||||
|
||||
await remoteLibp2p._dht.start()
|
||||
await pWaitFor(() => libp2p._dht.routingTable.size === 1)
|
||||
|
57
test/core/encryption.spec.js
Normal file
57
test/core/encryption.spec.js
Normal file
@ -0,0 +1,57 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
const { expect } = chai
|
||||
|
||||
const Transport = require('libp2p-websockets')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
|
||||
const Libp2p = require('../../src')
|
||||
const { codes: ErrorCodes } = require('../../src/errors')
|
||||
const { createPeerId } = require('../utils/creators/peer')
|
||||
|
||||
describe('Connection encryption configuration', () => {
|
||||
let peerId
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await createPeerId()
|
||||
})
|
||||
|
||||
it('is required', async () => {
|
||||
const config = {
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
}
|
||||
}
|
||||
|
||||
await expect(Libp2p.create(config)).to.eventually.be.rejected()
|
||||
.and.to.have.property('code', ErrorCodes.CONN_ENCRYPTION_REQUIRED)
|
||||
})
|
||||
|
||||
it('is required and needs at least one module', async () => {
|
||||
const config = {
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport],
|
||||
connEncryption: []
|
||||
}
|
||||
}
|
||||
await expect(Libp2p.create(config)).to.eventually.be.rejected()
|
||||
.and.to.have.property('code', ErrorCodes.CONN_ENCRYPTION_REQUIRED)
|
||||
})
|
||||
|
||||
it('can be created', async () => {
|
||||
const config = {
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
}
|
||||
await Libp2p.create(config)
|
||||
})
|
||||
})
|
@ -6,6 +6,7 @@ chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
|
||||
const Transport = require('libp2p-tcp')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
|
||||
const { create } = require('../../src')
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
@ -31,7 +32,8 @@ describe('Listening', () => {
|
||||
listen: [listenAddr]
|
||||
},
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -17,7 +17,7 @@ describe('ping', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
nodes = await peerUtils.createPeer({
|
||||
number: 2,
|
||||
number: 3,
|
||||
config: baseOptions
|
||||
})
|
||||
|
||||
@ -25,7 +25,14 @@ describe('ping', () => {
|
||||
nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].multiaddrs)
|
||||
})
|
||||
|
||||
it('ping once from peer0 to peer1', async () => {
|
||||
it('ping once from peer0 to peer1 using a multiaddr', async () => {
|
||||
const ma = `${nodes[2].multiaddrs[0]}/p2p/${nodes[2].peerId.toB58String()}`
|
||||
const latency = await nodes[0].ping(ma)
|
||||
|
||||
expect(latency).to.be.a('Number')
|
||||
})
|
||||
|
||||
it('ping once from peer0 to peer1 using a peerId', async () => {
|
||||
const latency = await nodes[0].ping(nodes[1].peerId)
|
||||
|
||||
expect(latency).to.be.a('Number')
|
||||
|
@ -1,11 +1,7 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
const { expect } = chai
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
const sinon = require('sinon')
|
||||
const Transport = require('libp2p-tcp')
|
||||
const Muxer = require('libp2p-mplex')
|
||||
@ -19,6 +15,7 @@ const pipe = require('it-pipe')
|
||||
const AggregateError = require('aggregate-error')
|
||||
const { Connection } = require('libp2p-interfaces/src/connection')
|
||||
const { AbortError } = require('libp2p-interfaces/src/transport/errors')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const Libp2p = require('../../src')
|
||||
const Dialer = require('../../src/dialer')
|
||||
@ -27,7 +24,7 @@ const PeerStore = require('../../src/peer-store')
|
||||
const TransportManager = require('../../src/transport-manager')
|
||||
const { codes: ErrorCodes } = require('../../src/errors')
|
||||
const Protector = require('../../src/pnet')
|
||||
const swarmKeyBuffer = Buffer.from(require('../fixtures/swarm.key'))
|
||||
const swarmKeyBuffer = uint8ArrayFromString(require('../fixtures/swarm.key'))
|
||||
|
||||
const mockUpgrader = require('../utils/mockUpgrader')
|
||||
const createMockConnection = require('../utils/mockConnection')
|
||||
@ -55,7 +52,7 @@ describe('Dialing (direct, TCP)', () => {
|
||||
})
|
||||
remoteTM.add(Transport.prototype[Symbol.toStringTag], Transport)
|
||||
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId: remotePeerId })
|
||||
localTM = new TransportManager({
|
||||
libp2p: {},
|
||||
upgrader: mockUpgrader
|
||||
@ -96,14 +93,23 @@ describe('Dialing (direct, TCP)', () => {
|
||||
.and.to.have.nested.property('._errors[0].code', ErrorCodes.ERR_TRANSPORT_UNAVAILABLE)
|
||||
})
|
||||
|
||||
it('should fail to connect if peer has no known addresses', async () => {
|
||||
const dialer = new Dialer({ transportManager: localTM, peerStore })
|
||||
const peerId = await PeerId.createFromJSON(Peers[1])
|
||||
|
||||
await expect(dialer.connectToPeer(peerId))
|
||||
.to.eventually.be.rejectedWith(Error)
|
||||
.and.to.have.nested.property('.code', ErrorCodes.ERR_NO_VALID_ADDRESSES)
|
||||
})
|
||||
|
||||
it('should be able to connect to a given peer id', async () => {
|
||||
const peerStore = new PeerStore()
|
||||
const peerId = await PeerId.createFromJSON(Peers[0])
|
||||
const peerStore = new PeerStore({ peerId })
|
||||
const dialer = new Dialer({
|
||||
transportManager: localTM,
|
||||
peerStore
|
||||
})
|
||||
|
||||
const peerId = await PeerId.createFromJSON(Peers[0])
|
||||
peerStore.addressBook.set(peerId, [remoteAddr])
|
||||
|
||||
const connection = await dialer.connectToPeer(peerId)
|
||||
|
@ -13,7 +13,6 @@ const Transport = require('libp2p-websockets')
|
||||
const Muxer = require('libp2p-mplex')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
const AggregateError = require('aggregate-error')
|
||||
const { AbortError } = require('libp2p-interfaces/src/transport/errors')
|
||||
|
||||
@ -24,7 +23,6 @@ const PeerStore = require('../../src/peer-store')
|
||||
const TransportManager = require('../../src/transport-manager')
|
||||
const Libp2p = require('../../src')
|
||||
|
||||
const Peers = require('../fixtures/peers')
|
||||
const { MULTIADDRS_WEBSOCKETS } = require('../fixtures/browser')
|
||||
const mockUpgrader = require('../utils/mockUpgrader')
|
||||
const createMockConnection = require('../utils/mockConnection')
|
||||
@ -35,9 +33,11 @@ const remoteAddr = MULTIADDRS_WEBSOCKETS[0]
|
||||
describe('Dialing (direct, WebSockets)', () => {
|
||||
let localTM
|
||||
let peerStore
|
||||
let peerId
|
||||
|
||||
before(() => {
|
||||
peerStore = new PeerStore()
|
||||
before(async () => {
|
||||
[peerId] = await createPeerId()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
localTM = new TransportManager({
|
||||
libp2p: {},
|
||||
upgrader: mockUpgrader,
|
||||
@ -132,7 +132,6 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
}
|
||||
}
|
||||
})
|
||||
const peerId = await PeerId.createFromJSON(Peers[0])
|
||||
|
||||
const connection = await dialer.connectToPeer(peerId)
|
||||
expect(connection).to.exist()
|
||||
@ -149,7 +148,6 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
}
|
||||
}
|
||||
})
|
||||
const peerId = await PeerId.createFromJSON(Peers[0])
|
||||
|
||||
await expect(dialer.connectToPeer(peerId))
|
||||
.to.eventually.be.rejectedWith(AggregateError)
|
||||
@ -198,7 +196,6 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
const deferredDial = pDefer()
|
||||
sinon.stub(localTM, 'dial').callsFake(() => deferredDial.promise)
|
||||
|
||||
const [peerId] = await createPeerId()
|
||||
// Perform 3 multiaddr dials
|
||||
dialer.connectToPeer(peerId)
|
||||
|
||||
@ -245,7 +242,6 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
})
|
||||
|
||||
// Perform 3 multiaddr dials
|
||||
const [peerId] = await createPeerId()
|
||||
const dialPromise = dialer.connectToPeer(peerId)
|
||||
|
||||
// Let the call stack run
|
||||
@ -266,14 +262,9 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
})
|
||||
|
||||
describe('libp2p.dialer', () => {
|
||||
let peerId
|
||||
let libp2p
|
||||
let remoteLibp2p
|
||||
|
||||
before(async () => {
|
||||
peerId = await PeerId.createFromJSON(Peers[0])
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sinon.restore()
|
||||
libp2p && await libp2p.stop()
|
||||
@ -363,7 +354,7 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
const connection = await libp2p.dial(remoteAddr)
|
||||
expect(connection).to.exist()
|
||||
|
||||
sinon.spy(libp2p.peerStore.addressBook, 'set')
|
||||
sinon.spy(libp2p.peerStore.addressBook, 'consumePeerRecord')
|
||||
sinon.spy(libp2p.peerStore.protoBook, 'set')
|
||||
|
||||
// Wait for onConnection to be called
|
||||
@ -372,7 +363,8 @@ describe('Dialing (direct, WebSockets)', () => {
|
||||
expect(libp2p.identifyService.identify.callCount).to.equal(1)
|
||||
await libp2p.identifyService.identify.firstCall.returnValue
|
||||
|
||||
expect(libp2p.peerStore.addressBook.set.callCount).to.equal(1)
|
||||
// Self + New peer
|
||||
expect(libp2p.peerStore.addressBook.consumePeerRecord.callCount).to.equal(2)
|
||||
expect(libp2p.peerStore.protoBook.set.callCount).to.equal(1)
|
||||
})
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
@ -13,6 +12,7 @@ const { collect } = require('streaming-iterables')
|
||||
const pipe = require('it-pipe')
|
||||
const AggregateError = require('aggregate-error')
|
||||
const PeerId = require('peer-id')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const { createPeerId } = require('../utils/creators/peer')
|
||||
const baseOptions = require('../utils/base-options')
|
||||
@ -88,7 +88,7 @@ describe('Dialing (via relay, TCP)', () => {
|
||||
)
|
||||
|
||||
const { stream: echoStream } = await connection.newStream('/echo/1.0.0')
|
||||
const input = Buffer.from('hello')
|
||||
const input = uint8ArrayFromString('hello')
|
||||
const [output] = await pipe(
|
||||
[input],
|
||||
echoStream,
|
||||
@ -162,7 +162,7 @@ describe('Dialing (via relay, TCP)', () => {
|
||||
|
||||
// Tamper with the our multiaddrs for the circuit message
|
||||
sinon.stub(srcLibp2p, 'multiaddrs').value([{
|
||||
buffer: Buffer.from('an invalid multiaddr')
|
||||
bytes: uint8ArrayFromString('an invalid multiaddr')
|
||||
}])
|
||||
|
||||
await expect(srcLibp2p.dial(dialAddr))
|
||||
|
@ -13,15 +13,20 @@ const PeerId = require('peer-id')
|
||||
const duplexPair = require('it-pair/duplex')
|
||||
const multiaddr = require('multiaddr')
|
||||
const pWaitFor = require('p-wait-for')
|
||||
const unit8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const { codes: Errors } = require('../../src/errors')
|
||||
const { IdentifyService, multicodecs } = require('../../src/identify')
|
||||
const Peers = require('../fixtures/peers')
|
||||
const Libp2p = require('../../src')
|
||||
const Envelope = require('../../src/record/envelope')
|
||||
const PeerStore = require('../../src/peer-store')
|
||||
const baseOptions = require('../utils/base-options.browser')
|
||||
const pkg = require('../../package.json')
|
||||
|
||||
const { MULTIADDRS_WEBSOCKETS } = require('../fixtures/browser')
|
||||
const remoteAddr = MULTIADDRS_WEBSOCKETS[0]
|
||||
const listenMaddrs = [multiaddr('/ip4/127.0.0.1/tcp/15002/ws')]
|
||||
|
||||
describe('Identify', () => {
|
||||
let localPeer
|
||||
@ -47,23 +52,18 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: {
|
||||
addressBook: {
|
||||
set: () => { }
|
||||
},
|
||||
protoBook: {
|
||||
set: () => { }
|
||||
}
|
||||
},
|
||||
multiaddrs: []
|
||||
peerStore: new PeerStore({ peerId: localPeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols
|
||||
})
|
||||
|
||||
const remoteIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
multiaddrs: []
|
||||
peerStore: new PeerStore({ peerId: remotePeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols
|
||||
})
|
||||
@ -75,8 +75,63 @@ describe('Identify', () => {
|
||||
const [local, remote] = duplexPair()
|
||||
sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY })
|
||||
|
||||
sinon.spy(localIdentify.peerStore.addressBook, 'consumePeerRecord')
|
||||
sinon.spy(localIdentify.peerStore.protoBook, 'set')
|
||||
|
||||
// Run identify
|
||||
await Promise.all([
|
||||
localIdentify.identify(localConnectionMock),
|
||||
remoteIdentify.handleMessage({
|
||||
connection: remoteConnectionMock,
|
||||
stream: remote,
|
||||
protocol: multicodecs.IDENTIFY
|
||||
})
|
||||
])
|
||||
|
||||
expect(localIdentify.peerStore.addressBook.consumePeerRecord.callCount).to.equal(1)
|
||||
expect(localIdentify.peerStore.protoBook.set.callCount).to.equal(1)
|
||||
|
||||
// Validate the remote peer gets updated in the peer store
|
||||
const addresses = localIdentify.peerStore.addressBook.get(remotePeer)
|
||||
expect(addresses).to.exist()
|
||||
expect(addresses).have.lengthOf(listenMaddrs.length)
|
||||
expect(addresses.map((a) => a.multiaddr)[0].equals(listenMaddrs[0]))
|
||||
expect(addresses.map((a) => a.isCertified)[0]).to.eql(true)
|
||||
})
|
||||
|
||||
// LEGACY
|
||||
it('should be able to identify another peer with no certified peer records support', async () => {
|
||||
const localIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: new PeerStore({ peerId: localPeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols
|
||||
})
|
||||
|
||||
const remoteIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: new PeerStore({ peerId: remotePeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols
|
||||
})
|
||||
|
||||
const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234')
|
||||
const localConnectionMock = { newStream: () => {}, remotePeer }
|
||||
const remoteConnectionMock = { remoteAddr: observedAddr }
|
||||
|
||||
const [local, remote] = duplexPair()
|
||||
sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY })
|
||||
sinon.stub(Envelope, 'openAndCertify').throws()
|
||||
|
||||
sinon.spy(localIdentify.peerStore.addressBook, 'set')
|
||||
sinon.spy(localIdentify.peerStore.protoBook, 'set')
|
||||
sinon.spy(localIdentify.peerStore.metadataBook, 'set')
|
||||
|
||||
// Run identify
|
||||
await Promise.all([
|
||||
@ -90,9 +145,18 @@ describe('Identify', () => {
|
||||
|
||||
expect(localIdentify.peerStore.addressBook.set.callCount).to.equal(1)
|
||||
expect(localIdentify.peerStore.protoBook.set.callCount).to.equal(1)
|
||||
|
||||
const metadataArgs = localIdentify.peerStore.metadataBook.set.firstCall.args
|
||||
expect(metadataArgs[0].id.bytes).to.equal(remotePeer.bytes)
|
||||
expect(metadataArgs[1]).to.equal('AgentVersion')
|
||||
expect(unit8ArrayToString(metadataArgs[2])).to.equal(`js-libp2p/${pkg.version}`)
|
||||
|
||||
// Validate the remote peer gets updated in the peer store
|
||||
const call = localIdentify.peerStore.addressBook.set.firstCall
|
||||
expect(call.args[0].id.bytes).to.equal(remotePeer.bytes)
|
||||
expect(call.args[1]).to.exist()
|
||||
expect(call.args[1]).have.lengthOf(listenMaddrs.length)
|
||||
expect(call.args[1][0].equals(listenMaddrs[0]))
|
||||
})
|
||||
|
||||
it('should throw if identified peer is the wrong peer', async () => {
|
||||
@ -100,14 +164,7 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: {
|
||||
addressBook: {
|
||||
set: () => { }
|
||||
},
|
||||
protoBook: {
|
||||
set: () => { }
|
||||
}
|
||||
},
|
||||
peerStore: new PeerStore({ peerId: localPeer }),
|
||||
multiaddrs: []
|
||||
},
|
||||
protocols
|
||||
@ -116,6 +173,7 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: new PeerStore({ peerId: remotePeer }),
|
||||
multiaddrs: []
|
||||
},
|
||||
protocols
|
||||
@ -145,15 +203,15 @@ describe('Identify', () => {
|
||||
|
||||
describe('push', () => {
|
||||
it('should be able to push identify updates to another peer', async () => {
|
||||
const listeningAddr = multiaddr('/ip4/127.0.0.1/tcp/1234')
|
||||
const connectionManager = new EventEmitter()
|
||||
connectionManager.getConnection = () => {}
|
||||
connectionManager.getConnection = () => { }
|
||||
|
||||
const localIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
multiaddrs: [listeningAddr]
|
||||
peerStore: new PeerStore({ peerId: localPeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols: new Map([
|
||||
[multicodecs.IDENTIFY],
|
||||
@ -165,14 +223,68 @@ describe('Identify', () => {
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager,
|
||||
peerStore: {
|
||||
addressBook: {
|
||||
set: () => { }
|
||||
},
|
||||
protoBook: {
|
||||
set: () => { }
|
||||
}
|
||||
},
|
||||
peerStore: new PeerStore({ peerId: remotePeer }),
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
|
||||
// Setup peer protocols and multiaddrs
|
||||
const localProtocols = new Set([multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0'])
|
||||
const localConnectionMock = { newStream: () => { } }
|
||||
const remoteConnectionMock = { remotePeer: localPeer }
|
||||
|
||||
const [local, remote] = duplexPair()
|
||||
sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY_PUSH })
|
||||
|
||||
sinon.spy(remoteIdentify.peerStore.addressBook, 'consumePeerRecord')
|
||||
sinon.spy(remoteIdentify.peerStore.protoBook, 'set')
|
||||
|
||||
// Run identify
|
||||
await Promise.all([
|
||||
localIdentify.push([localConnectionMock]),
|
||||
remoteIdentify.handleMessage({
|
||||
connection: remoteConnectionMock,
|
||||
stream: remote,
|
||||
protocol: multicodecs.IDENTIFY_PUSH
|
||||
})
|
||||
])
|
||||
|
||||
expect(remoteIdentify.peerStore.addressBook.consumePeerRecord.callCount).to.equal(1)
|
||||
expect(remoteIdentify.peerStore.protoBook.set.callCount).to.equal(1)
|
||||
|
||||
const addresses = localIdentify.peerStore.addressBook.get(localPeer)
|
||||
expect(addresses).to.exist()
|
||||
expect(addresses).have.lengthOf(listenMaddrs.length)
|
||||
expect(addresses.map((a) => a.multiaddr)).to.eql(listenMaddrs)
|
||||
|
||||
const [peerId2, protocols] = remoteIdentify.peerStore.protoBook.set.firstCall.args
|
||||
expect(peerId2.bytes).to.eql(localPeer.bytes)
|
||||
expect(protocols).to.eql(Array.from(localProtocols))
|
||||
})
|
||||
|
||||
// LEGACY
|
||||
it('should be able to push identify updates to another peer with no certified peer records support', async () => {
|
||||
const connectionManager = new EventEmitter()
|
||||
connectionManager.getConnection = () => { }
|
||||
|
||||
const localIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: localPeer,
|
||||
connectionManager: new EventEmitter(),
|
||||
peerStore: new PeerStore({ peerId: localPeer }),
|
||||
multiaddrs: listenMaddrs
|
||||
},
|
||||
protocols: new Map([
|
||||
[multicodecs.IDENTIFY],
|
||||
[multicodecs.IDENTIFY_PUSH],
|
||||
['/echo/1.0.0']
|
||||
])
|
||||
})
|
||||
const remoteIdentify = new IdentifyService({
|
||||
libp2p: {
|
||||
peerId: remotePeer,
|
||||
connectionManager,
|
||||
peerStore: new PeerStore({ peerId: remotePeer }),
|
||||
multiaddrs: []
|
||||
}
|
||||
})
|
||||
@ -184,6 +296,7 @@ describe('Identify', () => {
|
||||
|
||||
const [local, remote] = duplexPair()
|
||||
sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY_PUSH })
|
||||
sinon.stub(Envelope, 'openAndCertify').throws()
|
||||
|
||||
sinon.spy(remoteIdentify.peerStore.addressBook, 'set')
|
||||
sinon.spy(remoteIdentify.peerStore.protoBook, 'set')
|
||||
@ -200,9 +313,11 @@ describe('Identify', () => {
|
||||
|
||||
expect(remoteIdentify.peerStore.addressBook.set.callCount).to.equal(1)
|
||||
expect(remoteIdentify.peerStore.protoBook.set.callCount).to.equal(1)
|
||||
|
||||
const [peerId, multiaddrs] = remoteIdentify.peerStore.addressBook.set.firstCall.args
|
||||
expect(peerId.bytes).to.eql(localPeer.bytes)
|
||||
expect(multiaddrs).to.eql([listeningAddr])
|
||||
expect(multiaddrs).to.eql(listenMaddrs)
|
||||
|
||||
const [peerId2, protocols] = remoteIdentify.peerStore.protoBook.set.firstCall.args
|
||||
expect(peerId2.bytes).to.eql(localPeer.bytes)
|
||||
expect(protocols).to.eql(Array.from(localProtocols))
|
||||
@ -234,16 +349,18 @@ describe('Identify', () => {
|
||||
peerId
|
||||
})
|
||||
|
||||
await libp2p.start()
|
||||
|
||||
sinon.spy(libp2p.identifyService, 'identify')
|
||||
const peerStoreSpySet = sinon.spy(libp2p.peerStore.addressBook, 'set')
|
||||
const peerStoreSpyConsumeRecord = sinon.spy(libp2p.peerStore.addressBook, 'consumePeerRecord')
|
||||
const peerStoreSpyAdd = sinon.spy(libp2p.peerStore.addressBook, 'add')
|
||||
|
||||
const connection = await libp2p.dialer.connectToPeer(remoteAddr)
|
||||
expect(connection).to.exist()
|
||||
|
||||
// Wait for peer store to be updated
|
||||
// Dialer._createDialTarget (add), Identify (replace)
|
||||
await pWaitFor(() => peerStoreSpySet.callCount === 1 && peerStoreSpyAdd.callCount === 1)
|
||||
// Dialer._createDialTarget (add), Identify (consume), Create self (consume)
|
||||
await pWaitFor(() => peerStoreSpyConsumeRecord.callCount === 2 && peerStoreSpyAdd.callCount === 1)
|
||||
expect(libp2p.identifyService.identify.callCount).to.equal(1)
|
||||
|
||||
// The connection should have no open streams
|
||||
@ -257,6 +374,8 @@ describe('Identify', () => {
|
||||
peerId
|
||||
})
|
||||
|
||||
await libp2p.start()
|
||||
|
||||
sinon.spy(libp2p.identifyService, 'identify')
|
||||
sinon.spy(libp2p.identifyService, 'push')
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
@ -56,7 +55,7 @@ describe('plaintext', () => {
|
||||
|
||||
// When we attempt to get the remote peer key, return the wrong peers pub key
|
||||
sinon.stub(remotePeer, 'marshalPubKey').callsFake(() => {
|
||||
return Buffer.alloc(0)
|
||||
return new Uint8Array(0)
|
||||
})
|
||||
|
||||
return Promise.all([
|
||||
|
@ -7,13 +7,9 @@ const dirtyChai = require('dirty-chai')
|
||||
const expect = chai.expect
|
||||
chai.use(dirtyChai)
|
||||
chai.use(require('chai-string'))
|
||||
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const { isNode } = require('ipfs-utils/src/env')
|
||||
const FsStore = require('datastore-fs')
|
||||
const LevelStore = require('datastore-level')
|
||||
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
const { MemoryDatastore } = require('interface-datastore')
|
||||
const Keychain = require('../../src/keychain')
|
||||
|
||||
describe('cms interop', () => {
|
||||
@ -22,13 +18,11 @@ describe('cms interop', () => {
|
||||
let ks
|
||||
|
||||
before(() => {
|
||||
const datastore = isNode
|
||||
? new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now()))
|
||||
: new LevelStore('test-keystore-1', { db: require('level') })
|
||||
const datastore = new MemoryDatastore()
|
||||
ks = new Keychain(datastore, { passPhrase: passPhrase })
|
||||
})
|
||||
|
||||
const plainData = Buffer.from('This is a message from Alice to Bob')
|
||||
const plainData = uint8ArrayFromString('This is a message from Alice to Bob')
|
||||
|
||||
it('imports openssl key', async function () {
|
||||
this.timeout(10 * 1000)
|
||||
@ -67,8 +61,8 @@ knU1yykWGkdlbclCuu0NaAfmb8o0OX50CbEKZB7xmsv8tnqn0H0jMF4GCSqGSIb3
|
||||
DQEHATAdBglghkgBZQMEASoEEP/PW1JWehQx6/dsLkp/Mf+gMgQwFM9liLTqC56B
|
||||
nHILFmhac/+a/StQOKuf9dx5qXeGvt9LnwKuGGSfNX4g+dTkoa6N
|
||||
`
|
||||
const plain = await ks.cms.decrypt(Buffer.from(example, 'base64'))
|
||||
const plain = await ks.cms.decrypt(uint8ArrayFromString(example.replace(/\s/g, ''), 'base64'))
|
||||
expect(plain).to.exist()
|
||||
expect(plain.toString()).to.equal(plainData.toString())
|
||||
expect(uint8ArrayToString(plain)).to.equal(uint8ArrayToString(plainData))
|
||||
})
|
||||
})
|
||||
|
@ -2,20 +2,15 @@
|
||||
/* eslint-env mocha */
|
||||
'use strict'
|
||||
|
||||
const chai = require('chai')
|
||||
const { expect } = chai
|
||||
const { chai, expect } = require('aegir/utils/chai')
|
||||
const fail = expect.fail
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-string'))
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const { isNode } = require('ipfs-utils/src/env')
|
||||
const { MemoryDatastore } = require('interface-datastore')
|
||||
const FsStore = require('datastore-fs')
|
||||
const LevelStore = require('datastore-level')
|
||||
const Keychain = require('../../src/keychain')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
@ -28,20 +23,24 @@ describe('keychain', () => {
|
||||
let ks
|
||||
let datastore1, datastore2
|
||||
|
||||
before(() => {
|
||||
datastore1 = isNode
|
||||
? new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now()))
|
||||
: new LevelStore('test-keystore-1', { db: require('level') })
|
||||
datastore2 = isNode
|
||||
? new FsStore(path.join(os.tmpdir(), 'test-keystore-2-' + Date.now()))
|
||||
: new LevelStore('test-keystore-2', { db: require('level') })
|
||||
before(async () => {
|
||||
datastore1 = new MemoryDatastore()
|
||||
datastore2 = new MemoryDatastore()
|
||||
|
||||
ks = new Keychain(datastore2, { passPhrase: passPhrase })
|
||||
emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase })
|
||||
|
||||
await datastore1.open()
|
||||
await datastore2.open()
|
||||
})
|
||||
|
||||
it('needs a pass phrase to encrypt a key', () => {
|
||||
expect(() => new Keychain(datastore2)).to.throw()
|
||||
after(async () => {
|
||||
await datastore2.close()
|
||||
await datastore2.close()
|
||||
})
|
||||
|
||||
it('can start without a password', () => {
|
||||
expect(() => new Keychain(datastore2)).to.not.throw()
|
||||
})
|
||||
|
||||
it('needs a NIST SP 800-132 non-weak pass phrase', () => {
|
||||
@ -56,12 +55,48 @@ describe('keychain', () => {
|
||||
expect(Keychain.options).to.exist()
|
||||
})
|
||||
|
||||
it('needs a supported hashing alorithm', () => {
|
||||
it('supports supported hashing alorithms', () => {
|
||||
const ok = new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'sha2-256' } })
|
||||
expect(ok).to.exist()
|
||||
})
|
||||
|
||||
it('does not support unsupported hashing alorithms', () => {
|
||||
expect(() => new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'my-hash' } })).to.throw()
|
||||
})
|
||||
|
||||
it('can list keys without a password', async () => {
|
||||
const keychain = new Keychain(datastore2)
|
||||
|
||||
expect(await keychain.listKeys()).to.have.lengthOf(0)
|
||||
})
|
||||
|
||||
it('can find a key without a password', async () => {
|
||||
const keychain = new Keychain(datastore2)
|
||||
const keychainWithPassword = new Keychain(datastore2, { passPhrase: `hello-${Date.now()}-${Date.now()}` })
|
||||
const id = `key-${Math.random()}`
|
||||
|
||||
await keychainWithPassword.createKey(id, 'ed25519')
|
||||
|
||||
await expect(keychain.findKeyById(id)).to.eventually.be.ok()
|
||||
})
|
||||
|
||||
it('can remove a key without a password', async () => {
|
||||
const keychainWithoutPassword = new Keychain(datastore2)
|
||||
const keychainWithPassword = new Keychain(datastore2, { passPhrase: `hello-${Date.now()}-${Date.now()}` })
|
||||
const name = `key-${Math.random()}`
|
||||
|
||||
expect(await keychainWithPassword.createKey(name, 'ed25519')).to.have.property('name', name)
|
||||
expect(await keychainWithoutPassword.findKeyByName(name)).to.have.property('name', name)
|
||||
await keychainWithoutPassword.removeKey(name)
|
||||
await expect(keychainWithoutPassword.findKeyByName(name)).to.be.rejectedWith(/does not exist/)
|
||||
})
|
||||
|
||||
it('requires a key to create a password', async () => {
|
||||
const keychain = new Keychain(datastore2)
|
||||
|
||||
await expect(keychain.createKey('derp')).to.be.rejected()
|
||||
})
|
||||
|
||||
it('can generate options', () => {
|
||||
const options = Keychain.generateOptions()
|
||||
options.passPhrase = passPhrase
|
||||
@ -149,6 +184,70 @@ describe('keychain', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('ed25519 keys', () => {
|
||||
const keyName = 'my custom key'
|
||||
it('can be an ed25519 key', async () => {
|
||||
const keyInfo = await ks.createKey(keyName, 'ed25519')
|
||||
expect(keyInfo).to.exist()
|
||||
expect(keyInfo).to.have.property('name', keyName)
|
||||
expect(keyInfo).to.have.property('id')
|
||||
})
|
||||
|
||||
it('does not overwrite existing key', async () => {
|
||||
const err = await ks.createKey(keyName, 'ed25519').then(fail, err => err)
|
||||
expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS')
|
||||
})
|
||||
|
||||
it('can export/import a key', async () => {
|
||||
const keyName = 'a new key'
|
||||
const password = 'my sneaky password'
|
||||
const keyInfo = await ks.createKey(keyName, 'ed25519')
|
||||
const exportedKey = await ks.exportKey(keyName, password)
|
||||
// remove it so we can import it
|
||||
await ks.removeKey(keyName)
|
||||
const importedKey = await ks.importKey(keyName, exportedKey, password)
|
||||
expect(importedKey.id).to.eql(keyInfo.id)
|
||||
})
|
||||
|
||||
it('cannot create the "self" key', async () => {
|
||||
const err = await ks.createKey('self', 'ed25519').then(fail, err => err)
|
||||
expect(err).to.exist()
|
||||
expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME')
|
||||
})
|
||||
})
|
||||
|
||||
describe('secp256k1 keys', () => {
|
||||
const keyName = 'my secp256k1 key'
|
||||
it('can be an secp256k1 key', async () => {
|
||||
const keyInfo = await ks.createKey(keyName, 'secp256k1')
|
||||
expect(keyInfo).to.exist()
|
||||
expect(keyInfo).to.have.property('name', keyName)
|
||||
expect(keyInfo).to.have.property('id')
|
||||
})
|
||||
|
||||
it('does not overwrite existing key', async () => {
|
||||
const err = await ks.createKey(keyName, 'secp256k1').then(fail, err => err)
|
||||
expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS')
|
||||
})
|
||||
|
||||
it('can export/import a key', async () => {
|
||||
const keyName = 'a new secp256k1 key'
|
||||
const password = 'my sneaky password'
|
||||
const keyInfo = await ks.createKey(keyName, 'secp256k1')
|
||||
const exportedKey = await ks.exportKey(keyName, password)
|
||||
// remove it so we can import it
|
||||
await ks.removeKey(keyName)
|
||||
const importedKey = await ks.importKey(keyName, exportedKey, password)
|
||||
expect(importedKey.id).to.eql(keyInfo.id)
|
||||
})
|
||||
|
||||
it('cannot create the "self" key', async () => {
|
||||
const err = await ks.createKey('self', 'secp256k1').then(fail, err => err)
|
||||
expect(err).to.exist()
|
||||
expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME')
|
||||
})
|
||||
})
|
||||
|
||||
describe('query', () => {
|
||||
it('finds all existing keys', async () => {
|
||||
const keys = await ks.listKeys()
|
||||
@ -180,7 +279,7 @@ describe('keychain', () => {
|
||||
})
|
||||
|
||||
describe('CMS protected data', () => {
|
||||
const plainData = Buffer.from('This is a message from Alice to Bob')
|
||||
const plainData = uint8ArrayFromString('This is a message from Alice to Bob')
|
||||
let cms
|
||||
|
||||
it('service is available', () => {
|
||||
@ -193,7 +292,7 @@ describe('keychain', () => {
|
||||
expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND')
|
||||
})
|
||||
|
||||
it('requires plain data as a Buffer', async () => {
|
||||
it('requires plain data as a Uint8Array', async () => {
|
||||
const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err)
|
||||
expect(err).to.exist()
|
||||
expect(err).to.have.property('code', 'ERR_INVALID_PARAMS')
|
||||
@ -202,7 +301,7 @@ describe('keychain', () => {
|
||||
it('encrypts', async () => {
|
||||
cms = await ks.cms.encrypt(rsaKeyName, plainData)
|
||||
expect(cms).to.exist()
|
||||
expect(cms).to.be.instanceOf(Buffer)
|
||||
expect(cms).to.be.instanceOf(Uint8Array)
|
||||
})
|
||||
|
||||
it('is a PKCS #7 message', async () => {
|
||||
@ -228,7 +327,7 @@ describe('keychain', () => {
|
||||
it('can be read with the key', async () => {
|
||||
const plain = await ks.cms.decrypt(cms)
|
||||
expect(plain).to.exist()
|
||||
expect(plain.toString()).to.equal(plainData.toString())
|
||||
expect(uint8ArrayToString(plain)).to.equal(uint8ArrayToString(plainData))
|
||||
})
|
||||
})
|
||||
|
||||
@ -282,7 +381,7 @@ describe('keychain', () => {
|
||||
let alice
|
||||
|
||||
before(async function () {
|
||||
const encoded = Buffer.from(alicePrivKey, 'base64')
|
||||
const encoded = uint8ArrayFromString(alicePrivKey, 'base64pad')
|
||||
alice = await PeerId.createFromPrivKey(encoded)
|
||||
})
|
||||
|
||||
@ -411,7 +510,7 @@ describe('libp2p.keychain', () => {
|
||||
throw new Error('should throw an error using the keychain if no passphrase provided')
|
||||
})
|
||||
|
||||
it('can be used if a passphrase is provided', async () => {
|
||||
it('can be used when a passphrase is provided', async () => {
|
||||
const [libp2p] = await peerUtils.createPeer({
|
||||
started: false,
|
||||
config: {
|
||||
@ -424,7 +523,23 @@ describe('libp2p.keychain', () => {
|
||||
|
||||
await libp2p.loadKeychain()
|
||||
|
||||
const kInfo = await libp2p.keychain.createKey('keyName', 'rsa', 2048)
|
||||
const kInfo = await libp2p.keychain.createKey('keyName', 'ed25519')
|
||||
expect(kInfo).to.exist()
|
||||
})
|
||||
|
||||
it('does not require a keychain passphrase', async () => {
|
||||
const [libp2p] = await peerUtils.createPeer({
|
||||
started: false,
|
||||
config: {
|
||||
keychain: {
|
||||
datastore: new MemoryDatastore()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await libp2p.loadKeychain()
|
||||
|
||||
const kInfo = await libp2p.keychain.createKey('keyName', 'ed25519')
|
||||
expect(kInfo).to.exist()
|
||||
})
|
||||
|
||||
@ -441,7 +556,7 @@ describe('libp2p.keychain', () => {
|
||||
})
|
||||
await libp2p.loadKeychain()
|
||||
|
||||
const kInfo = await libp2p.keychain.createKey('keyName', 'rsa', 2048)
|
||||
const kInfo = await libp2p.keychain.createKey('keyName', 'ed25519')
|
||||
expect(kInfo).to.exist()
|
||||
|
||||
const [libp2p2] = await peerUtils.createPeer({
|
||||
|
@ -10,6 +10,7 @@ const multihash = require('multihashes')
|
||||
const crypto = require('libp2p-crypto')
|
||||
const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils')
|
||||
const rsaClass = require('libp2p-crypto/src/keys/rsa-class')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const sample = {
|
||||
id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9',
|
||||
@ -22,7 +23,7 @@ describe('peer ID', () => {
|
||||
let publicKeyDer // a buffer
|
||||
|
||||
before(async () => {
|
||||
const encoded = Buffer.from(sample.privKey, 'base64')
|
||||
const encoded = uint8ArrayFromString(sample.privKey, 'base64pad')
|
||||
peer = await PeerId.createFromPrivKey(encoded)
|
||||
})
|
||||
|
||||
|
@ -13,6 +13,7 @@ const crypto = require('libp2p-crypto')
|
||||
const KadDht = require('libp2p-kad-dht')
|
||||
const MulticastDNS = require('libp2p-mdns')
|
||||
const multiaddr = require('multiaddr')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const Libp2p = require('../../src')
|
||||
const baseOptions = require('../utils/base-options')
|
||||
@ -110,7 +111,7 @@ describe('peer discovery scenarios', () => {
|
||||
enabled: true,
|
||||
interval: 200, // discover quickly
|
||||
// use a random tag to prevent CI collision
|
||||
serviceTag: crypto.randomBytes(10).toString('hex')
|
||||
serviceTag: uint8ArrayToString(crypto.randomBytes(10), 'base16')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,20 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
/* eslint max-nested-callbacks: ["error", 6] */
|
||||
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
|
||||
const pDefer = require('p-defer')
|
||||
const { Buffer } = require('buffer')
|
||||
const multiaddr = require('multiaddr')
|
||||
const arrayEquals = require('libp2p-utils/src/array-equals')
|
||||
const PeerId = require('peer-id')
|
||||
const pDefer = require('p-defer')
|
||||
|
||||
const PeerStore = require('../../src/peer-store')
|
||||
const Envelope = require('../../src/record/envelope')
|
||||
const PeerRecord = require('../../src/record/peer-record')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
const {
|
||||
@ -19,8 +25,6 @@ const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000')
|
||||
const addr2 = multiaddr('/ip4/127.0.0.1/tcp/8001')
|
||||
const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002')
|
||||
|
||||
const arraysAreEqual = (a, b) => a.length === b.length && a.sort().every((item, index) => b[index] === item)
|
||||
|
||||
describe('addressBook', () => {
|
||||
let peerId
|
||||
|
||||
@ -32,7 +36,7 @@ describe('addressBook', () => {
|
||||
let peerStore, ab
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
@ -146,7 +150,7 @@ describe('addressBook', () => {
|
||||
let peerStore, ab
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
@ -194,7 +198,7 @@ describe('addressBook', () => {
|
||||
let changeTrigger = 2
|
||||
peerStore.on('change:multiaddrs', ({ multiaddrs }) => {
|
||||
changeTrigger--
|
||||
if (changeTrigger === 0 && arraysAreEqual(multiaddrs, finalMultiaddrs)) {
|
||||
if (changeTrigger === 0 && arrayEquals(multiaddrs, finalMultiaddrs)) {
|
||||
defer.resolve()
|
||||
}
|
||||
})
|
||||
@ -274,7 +278,7 @@ describe('addressBook', () => {
|
||||
let peerStore, ab
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
@ -309,7 +313,7 @@ describe('addressBook', () => {
|
||||
let peerStore, ab
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
@ -345,7 +349,7 @@ describe('addressBook', () => {
|
||||
let peerStore, ab
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
@ -397,4 +401,266 @@ describe('addressBook', () => {
|
||||
return defer.promise
|
||||
})
|
||||
})
|
||||
|
||||
describe('certified records', () => {
|
||||
let peerStore, ab
|
||||
|
||||
describe('consumes a valid peer record and stores its data', () => {
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
it('no previous data in AddressBook', async () => {
|
||||
const multiaddrs = [addr1, addr2]
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
// consume peer record
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// Validate stored envelope
|
||||
const storedEnvelope = await ab.getPeerRecord(peerId)
|
||||
expect(envelope.equals(storedEnvelope)).to.eql(true)
|
||||
|
||||
// Validate AddressBook addresses
|
||||
const addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrs.length)
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(true)
|
||||
expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
})
|
||||
|
||||
it('emits change:multiaddrs event when adding multiaddrs', async () => {
|
||||
const defer = pDefer()
|
||||
const multiaddrs = [addr1, addr2]
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
peerStore.once('change:multiaddrs', ({ peerId, multiaddrs }) => {
|
||||
expect(peerId).to.exist()
|
||||
expect(multiaddrs).to.eql(multiaddrs)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
// consume peer record
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
return defer.promise
|
||||
})
|
||||
|
||||
it('emits change:multiaddrs event with same data currently in AddressBook (not certified)', async () => {
|
||||
const defer = pDefer()
|
||||
const multiaddrs = [addr1, addr2]
|
||||
|
||||
// Set addressBook data
|
||||
ab.set(peerId, multiaddrs)
|
||||
|
||||
// Validate data exists, but not certified
|
||||
let addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrs.length)
|
||||
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(false)
|
||||
expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
|
||||
// Create peer record
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
peerStore.once('change:multiaddrs', ({ peerId, multiaddrs }) => {
|
||||
expect(peerId).to.exist()
|
||||
expect(multiaddrs).to.eql(multiaddrs)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
// consume peer record
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// Wait event
|
||||
await defer.promise
|
||||
|
||||
// Validate data exists and certified
|
||||
addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrs.length)
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(true)
|
||||
expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
})
|
||||
|
||||
it('emits change:multiaddrs event with previous partial data in AddressBook (not certified)', async () => {
|
||||
const defer = pDefer()
|
||||
const multiaddrs = [addr1, addr2]
|
||||
|
||||
// Set addressBook data
|
||||
ab.set(peerId, [addr1])
|
||||
|
||||
// Validate data exists, but not certified
|
||||
let addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(1)
|
||||
expect(addrs[0].isCertified).to.eql(false)
|
||||
expect(addrs[0].multiaddr.equals(addr1)).to.eql(true)
|
||||
|
||||
// Create peer record
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
peerStore.once('change:multiaddrs', ({ peerId, multiaddrs }) => {
|
||||
expect(peerId).to.exist()
|
||||
expect(multiaddrs).to.eql(multiaddrs)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
// consume peer record
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// Wait event
|
||||
await defer.promise
|
||||
|
||||
// Validate data exists and certified
|
||||
addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrs.length)
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(true)
|
||||
expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
})
|
||||
|
||||
it('with previous different data in AddressBook (not certified)', async () => {
|
||||
const defer = pDefer()
|
||||
const multiaddrsUncertified = [addr3]
|
||||
const multiaddrsCertified = [addr1, addr2]
|
||||
|
||||
// Set addressBook data
|
||||
ab.set(peerId, multiaddrsUncertified)
|
||||
|
||||
// Validate data exists, but not certified
|
||||
let addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrsUncertified.length)
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(false)
|
||||
expect(multiaddrsUncertified[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
|
||||
// Create peer record
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs: multiaddrsCertified
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
peerStore.once('change:multiaddrs', ({ peerId, multiaddrs }) => {
|
||||
expect(peerId).to.exist()
|
||||
expect(multiaddrs).to.eql(multiaddrs)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
// consume peer record
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// Wait event
|
||||
await defer.promise
|
||||
|
||||
// Validate data exists and certified
|
||||
addrs = ab.get(peerId)
|
||||
expect(addrs).to.exist()
|
||||
expect(addrs).to.have.lengthOf(multiaddrsCertified.length)
|
||||
addrs.forEach((addr, index) => {
|
||||
expect(addr.isCertified).to.eql(true)
|
||||
expect(multiaddrsCertified[index].equals(addr.multiaddr)).to.eql(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('fails to consume invalid peer records', () => {
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore({ peerId })
|
||||
ab = peerStore.addressBook
|
||||
})
|
||||
|
||||
it('invalid peer record', () => {
|
||||
const invalidEnvelope = {
|
||||
payload: Buffer.from('invalid-peerRecord')
|
||||
}
|
||||
|
||||
const consumed = ab.consumePeerRecord(invalidEnvelope)
|
||||
expect(consumed).to.eql(false)
|
||||
})
|
||||
|
||||
it('peer that created the envelope is not the same as the peer record', async () => {
|
||||
const multiaddrs = [addr1, addr2]
|
||||
|
||||
// Create peer record
|
||||
const peerId2 = await PeerId.create()
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId: peerId2,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(false)
|
||||
})
|
||||
|
||||
it('does not store an outdated record', async () => {
|
||||
const multiaddrs = [addr1, addr2]
|
||||
const peerRecord1 = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs,
|
||||
seqNumber: Date.now()
|
||||
})
|
||||
const peerRecord2 = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs,
|
||||
seqNumber: Date.now() - 1
|
||||
})
|
||||
const envelope1 = await Envelope.seal(peerRecord1, peerId)
|
||||
const envelope2 = await Envelope.seal(peerRecord2, peerId)
|
||||
|
||||
// Consume envelope1 (bigger seqNumber)
|
||||
let consumed = ab.consumePeerRecord(envelope1)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
consumed = ab.consumePeerRecord(envelope2)
|
||||
expect(consumed).to.eql(false)
|
||||
})
|
||||
|
||||
it('empty multiaddrs', async () => {
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs: []
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
const consumed = ab.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -19,7 +19,7 @@ describe('keyBook', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
[peerId] = await peerUtils.createPeerId()
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
kb = peerStore.keyBook
|
||||
})
|
||||
|
||||
|
@ -5,6 +5,7 @@ const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-bytes'))
|
||||
const { expect } = chai
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const pDefer = require('p-defer')
|
||||
const PeerStore = require('../../src/peer-store')
|
||||
@ -25,7 +26,7 @@ describe('metadataBook', () => {
|
||||
let peerStore, mb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
mb = peerStore.metadataBook
|
||||
})
|
||||
|
||||
@ -76,7 +77,7 @@ describe('metadataBook', () => {
|
||||
it('stores the content and emit change event', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
peerStore.once('change:metadata', ({ peerId, metadata }) => {
|
||||
expect(peerId).to.exist()
|
||||
@ -99,8 +100,8 @@ describe('metadataBook', () => {
|
||||
it('emits on set if not storing the exact same content', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataValue1 = Buffer.from('mars')
|
||||
const metadataValue2 = Buffer.from('saturn')
|
||||
const metadataValue1 = uint8ArrayFromString('mars')
|
||||
const metadataValue2 = uint8ArrayFromString('saturn')
|
||||
|
||||
let changeCounter = 0
|
||||
peerStore.on('change:metadata', () => {
|
||||
@ -129,7 +130,7 @@ describe('metadataBook', () => {
|
||||
it('does not emit on set if it is storing the exact same content', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
let changeCounter = 0
|
||||
peerStore.on('change:metadata', () => {
|
||||
@ -158,7 +159,7 @@ describe('metadataBook', () => {
|
||||
let peerStore, mb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
mb = peerStore.metadataBook
|
||||
})
|
||||
|
||||
@ -180,7 +181,7 @@ describe('metadataBook', () => {
|
||||
|
||||
it('returns the metadata stored', () => {
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
@ -194,7 +195,7 @@ describe('metadataBook', () => {
|
||||
let peerStore, mb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
mb = peerStore.metadataBook
|
||||
})
|
||||
|
||||
@ -217,7 +218,7 @@ describe('metadataBook', () => {
|
||||
|
||||
it('returns the metadata value stored for the given key', () => {
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
@ -229,7 +230,7 @@ describe('metadataBook', () => {
|
||||
it('returns undefined if no metadata is known for the provided peer and key', () => {
|
||||
const metadataKey = 'location'
|
||||
const metadataBadKey = 'nickname'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
@ -243,7 +244,7 @@ describe('metadataBook', () => {
|
||||
let peerStore, mb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
mb = peerStore.metadataBook
|
||||
})
|
||||
|
||||
@ -279,7 +280,7 @@ describe('metadataBook', () => {
|
||||
it('returns true if the record exists and an event is emitted', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
@ -300,7 +301,7 @@ describe('metadataBook', () => {
|
||||
let peerStore, mb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
mb = peerStore.metadataBook
|
||||
})
|
||||
|
||||
@ -337,7 +338,7 @@ describe('metadataBook', () => {
|
||||
it('returns true if the record exists and an event is emitted', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
@ -357,7 +358,7 @@ describe('metadataBook', () => {
|
||||
const defer = pDefer()
|
||||
const metadataKey = 'location'
|
||||
const metadataBadKey = 'nickname'
|
||||
const metadataValue = Buffer.from('mars')
|
||||
const metadataValue = uint8ArrayFromString('mars')
|
||||
|
||||
mb.set(peerId, metadataKey, metadataValue)
|
||||
|
||||
|
@ -7,6 +7,7 @@ const { expect } = chai
|
||||
|
||||
const PeerStore = require('../../src/peer-store')
|
||||
const multiaddr = require('multiaddr')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
@ -23,7 +24,7 @@ describe('peer-store', () => {
|
||||
let peerIds
|
||||
before(async () => {
|
||||
peerIds = await peerUtils.createPeerId({
|
||||
number: 4
|
||||
number: 5
|
||||
})
|
||||
})
|
||||
|
||||
@ -31,7 +32,7 @@ describe('peer-store', () => {
|
||||
let peerStore
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId: peerIds[4] })
|
||||
})
|
||||
|
||||
it('has an empty map of peers', () => {
|
||||
@ -61,7 +62,7 @@ describe('peer-store', () => {
|
||||
let peerStore
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId: peerIds[4] })
|
||||
|
||||
// Add peer0 with { addr1, addr2 } and { proto1 }
|
||||
peerStore.addressBook.set(peerIds[0], [addr1, addr2])
|
||||
@ -163,7 +164,7 @@ describe('peer-store', () => {
|
||||
let peerStore
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId: peerIds[4] })
|
||||
})
|
||||
|
||||
it('returns peers if only addresses are known', () => {
|
||||
@ -196,7 +197,7 @@ describe('peer-store', () => {
|
||||
|
||||
it('returns peers if only metadata is known', () => {
|
||||
const metadataKey = 'location'
|
||||
const metadataValue = Buffer.from('earth')
|
||||
const metadataValue = uint8ArrayFromString('earth')
|
||||
peerStore.metadataBook.set(peerIds[0], metadataKey, metadataValue)
|
||||
|
||||
const peers = peerStore.peers
|
||||
|
@ -5,19 +5,29 @@ const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
const sinon = require('sinon')
|
||||
|
||||
const Envelope = require('../../src/record/envelope')
|
||||
const PeerRecord = require('../../src/record/peer-record')
|
||||
const PeerStore = require('../../src/peer-store/persistent')
|
||||
|
||||
const multiaddr = require('multiaddr')
|
||||
const { MemoryDatastore } = require('interface-datastore')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
describe('Persisted PeerStore', () => {
|
||||
let datastore, peerStore
|
||||
let peerId
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await peerUtils.createPeerId({ fixture: false })
|
||||
})
|
||||
|
||||
describe('start and stop flows', () => {
|
||||
beforeEach(() => {
|
||||
datastore = new MemoryDatastore()
|
||||
peerStore = new PeerStore({ datastore })
|
||||
peerStore = new PeerStore({ datastore, peerId })
|
||||
})
|
||||
|
||||
afterEach(() => peerStore.stop())
|
||||
@ -50,7 +60,7 @@ describe('Persisted PeerStore', () => {
|
||||
describe('simple setup with content stored per change (threshold 1)', () => {
|
||||
beforeEach(() => {
|
||||
datastore = new MemoryDatastore()
|
||||
peerStore = new PeerStore({ datastore, threshold: 1 })
|
||||
peerStore = new PeerStore({ datastore, peerId, threshold: 1 })
|
||||
})
|
||||
|
||||
afterEach(() => peerStore.stop())
|
||||
@ -99,6 +109,7 @@ describe('Persisted PeerStore', () => {
|
||||
expect(storedPeer.id.toB58String()).to.eql(peer.toB58String())
|
||||
expect(storedPeer.protocols).to.have.members(protocols)
|
||||
expect(storedPeer.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
|
||||
expect(storedPeer.addresses.map((a) => a.isCertified)).to.have.members([false])
|
||||
})
|
||||
|
||||
it('should load content to the peerStore when restart but not put in datastore again', async () => {
|
||||
@ -135,7 +146,7 @@ describe('Persisted PeerStore', () => {
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
|
||||
// MetadataBook
|
||||
peerStore.metadataBook.set(peers[0], 'location', Buffer.from('earth'))
|
||||
peerStore.metadataBook.set(peers[0], 'location', uint8ArrayFromString('earth'))
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
@ -176,7 +187,7 @@ describe('Persisted PeerStore', () => {
|
||||
// ProtoBook
|
||||
peerStore.protoBook.set(peer, protocols)
|
||||
// MetadataBook
|
||||
peerStore.metadataBook.set(peer, 'location', Buffer.from('earth'))
|
||||
peerStore.metadataBook.set(peer, 'location', uint8ArrayFromString('earth'))
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
@ -210,12 +221,171 @@ describe('Persisted PeerStore', () => {
|
||||
throw new Error('Datastore should be empty')
|
||||
}
|
||||
})
|
||||
|
||||
it('should store certified peer records after peer marked as dirty (threshold 1)', async () => {
|
||||
const [peerId] = await peerUtils.createPeerId()
|
||||
const multiaddrs = [multiaddr('/ip4/156.10.1.22/tcp/1000')]
|
||||
const spyDirty = sinon.spy(peerStore, '_addDirtyPeer')
|
||||
const spyDs = sinon.spy(datastore, 'batch')
|
||||
const commitSpy = sinon.spy(peerStore, '_commitData')
|
||||
|
||||
await peerStore.start()
|
||||
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peerId)
|
||||
|
||||
// consume peer record
|
||||
const consumed = peerStore.addressBook.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
expect(spyDirty).to.have.property('callCount', 1) // Address
|
||||
expect(spyDs).to.have.property('callCount', 1)
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
|
||||
// Should have three peer records stored in the datastore
|
||||
const queryParams = {
|
||||
prefix: '/peers/'
|
||||
}
|
||||
|
||||
let count = 0
|
||||
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
|
||||
count++
|
||||
}
|
||||
expect(count).to.equal(1)
|
||||
|
||||
// Validate data
|
||||
const storedPeer = peerStore.get(peerId)
|
||||
expect(storedPeer.id.toB58String()).to.eql(peerId.toB58String())
|
||||
expect(storedPeer.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
|
||||
expect(storedPeer.addresses.map((a) => a.isCertified)).to.have.members([true])
|
||||
})
|
||||
|
||||
it('should load certified peer records to the peerStore when restart but not put in datastore again', async () => {
|
||||
const spyDs = sinon.spy(datastore, 'batch')
|
||||
const peers = await peerUtils.createPeerId({ number: 2 })
|
||||
const commitSpy = sinon.spy(peerStore, '_commitData')
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/156.10.1.22/tcp/1000'),
|
||||
multiaddr('/ip4/156.10.1.23/tcp/1000')
|
||||
]
|
||||
const peerRecord0 = new PeerRecord({
|
||||
peerId: peers[0],
|
||||
multiaddrs: [multiaddrs[0]]
|
||||
})
|
||||
const envelope0 = await Envelope.seal(peerRecord0, peers[0])
|
||||
const peerRecord1 = new PeerRecord({
|
||||
peerId: peers[1],
|
||||
multiaddrs: [multiaddrs[1]]
|
||||
})
|
||||
const envelope1 = await Envelope.seal(peerRecord1, peers[1])
|
||||
|
||||
await peerStore.start()
|
||||
|
||||
// AddressBook
|
||||
let consumed = peerStore.addressBook.consumePeerRecord(envelope0)
|
||||
expect(consumed).to.eql(true)
|
||||
consumed = peerStore.addressBook.consumePeerRecord(envelope1)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
|
||||
expect(spyDs).to.have.property('callCount', 2) // 2 Address + 2 Key + 2 Proto + 1 Metadata
|
||||
expect(peerStore.peers.size).to.equal(2)
|
||||
|
||||
await peerStore.stop()
|
||||
peerStore.addressBook.data.clear()
|
||||
|
||||
// Load on restart
|
||||
const spy = sinon.spy(peerStore, '_processDatastoreEntry')
|
||||
|
||||
await peerStore.start()
|
||||
|
||||
expect(spy).to.have.property('callCount', 2)
|
||||
expect(spyDs).to.have.property('callCount', 2)
|
||||
|
||||
expect(peerStore.peers.size).to.equal(2)
|
||||
expect(peerStore.addressBook.data.size).to.equal(2)
|
||||
|
||||
expect(peerStore.addressBook.getRawEnvelope(peers[0])).to.exist()
|
||||
expect(peerStore.addressBook.getRawEnvelope(peers[1])).to.exist()
|
||||
|
||||
// Validate stored envelopes
|
||||
const storedEnvelope0 = await peerStore.addressBook.getPeerRecord(peers[0])
|
||||
expect(envelope0.equals(storedEnvelope0)).to.eql(true)
|
||||
|
||||
const storedEnvelope1 = await peerStore.addressBook.getPeerRecord(peers[1])
|
||||
expect(envelope1.equals(storedEnvelope1)).to.eql(true)
|
||||
|
||||
// Validate multiaddrs
|
||||
const storedPeer0 = peerStore.get(peers[0])
|
||||
expect(storedPeer0.id.toB58String()).to.eql(peers[0].toB58String())
|
||||
expect(storedPeer0.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
|
||||
expect(storedPeer0.addresses.map((a) => a.isCertified)).to.have.members([true])
|
||||
|
||||
const storedPeer1 = peerStore.get(peers[1])
|
||||
expect(storedPeer1.id.toB58String()).to.eql(peers[1].toB58String())
|
||||
expect(storedPeer1.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[1].toString()])
|
||||
expect(storedPeer1.addresses.map((a) => a.isCertified)).to.have.members([true])
|
||||
})
|
||||
|
||||
it('should delete certified peer records from the datastore on delete', async () => {
|
||||
const [peer] = await peerUtils.createPeerId()
|
||||
const multiaddrs = [multiaddr('/ip4/156.10.1.22/tcp/1000')]
|
||||
const commitSpy = sinon.spy(peerStore, '_commitData')
|
||||
|
||||
await peerStore.start()
|
||||
|
||||
// AddressBook
|
||||
const peerRecord = new PeerRecord({
|
||||
peerId: peer,
|
||||
multiaddrs
|
||||
})
|
||||
const envelope = await Envelope.seal(peerRecord, peer)
|
||||
|
||||
// consume peer record
|
||||
const consumed = peerStore.addressBook.consumePeerRecord(envelope)
|
||||
expect(consumed).to.eql(true)
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
expect(peerStore.addressBook.getRawEnvelope(peer)).to.exist()
|
||||
|
||||
const spyDs = sinon.spy(datastore, 'batch')
|
||||
const spyAddressBook = sinon.spy(peerStore.addressBook, 'delete')
|
||||
|
||||
// Delete from PeerStore
|
||||
peerStore.delete(peer)
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
|
||||
await peerStore.stop()
|
||||
|
||||
expect(spyAddressBook).to.have.property('callCount', 1)
|
||||
expect(spyDs).to.have.property('callCount', 1)
|
||||
|
||||
// Should have zero peer records stored in the datastore
|
||||
const queryParams = {
|
||||
prefix: '/peers/'
|
||||
}
|
||||
|
||||
for await (const _ of datastore.query(queryParams)) { // eslint-disable-line
|
||||
throw new Error('Datastore should be empty')
|
||||
}
|
||||
|
||||
expect(peerStore.addressBook.getRawEnvelope(peer)).to.not.exist()
|
||||
})
|
||||
})
|
||||
|
||||
describe('setup with content not stored per change (threshold 2)', () => {
|
||||
beforeEach(() => {
|
||||
datastore = new MemoryDatastore()
|
||||
peerStore = new PeerStore({ datastore, threshold: 2 })
|
||||
peerStore = new PeerStore({ datastore, peerId, threshold: 2 })
|
||||
})
|
||||
|
||||
afterEach(() => peerStore.stop())
|
||||
@ -239,7 +409,7 @@ describe('Persisted PeerStore', () => {
|
||||
// Add Peer0 data in multiple books
|
||||
peerStore.addressBook.set(peers[0], multiaddrs)
|
||||
peerStore.protoBook.set(peers[0], protocols)
|
||||
peerStore.metadataBook.set(peers[0], 'location', Buffer.from('earth'))
|
||||
peerStore.metadataBook.set(peers[0], 'location', uint8ArrayFromString('earth'))
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
@ -371,7 +541,7 @@ describe('libp2p.peerStore (Persisted)', () => {
|
||||
|
||||
it('should load content to the peerStore when a new node is started with the same datastore', async () => {
|
||||
const commitSpy = sinon.spy(libp2p.peerStore, '_commitData')
|
||||
const peers = await peerUtils.createPeerId({ number: 2 })
|
||||
const peers = await peerUtils.createPeerId({ number: 3 })
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/156.10.1.22/tcp/1000'),
|
||||
multiaddr('/ip4/156.10.1.23/tcp/1000')
|
||||
@ -381,15 +551,15 @@ describe('libp2p.peerStore (Persisted)', () => {
|
||||
await libp2p.start()
|
||||
|
||||
// AddressBook
|
||||
libp2p.peerStore.addressBook.set(peers[0], [multiaddrs[0]])
|
||||
libp2p.peerStore.addressBook.set(peers[1], [multiaddrs[1]])
|
||||
libp2p.peerStore.addressBook.set(peers[1], [multiaddrs[0]])
|
||||
libp2p.peerStore.addressBook.set(peers[2], [multiaddrs[1]])
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
|
||||
// ProtoBook
|
||||
libp2p.peerStore.protoBook.set(peers[0], protocols)
|
||||
libp2p.peerStore.protoBook.set(peers[1], protocols)
|
||||
libp2p.peerStore.protoBook.set(peers[2], protocols)
|
||||
|
||||
// let batch commit complete
|
||||
await Promise.all(commitSpy.returnValues)
|
||||
@ -420,13 +590,13 @@ describe('libp2p.peerStore (Persisted)', () => {
|
||||
expect(newNode.peerStore.peers.size).to.equal(2)
|
||||
|
||||
// Validate data
|
||||
const peer0 = newNode.peerStore.get(peers[0])
|
||||
expect(peer0.id.toB58String()).to.eql(peers[0].toB58String())
|
||||
const peer0 = newNode.peerStore.get(peers[1])
|
||||
expect(peer0.id.toB58String()).to.eql(peers[1].toB58String())
|
||||
expect(peer0.protocols).to.have.members(protocols)
|
||||
expect(peer0.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[0].toString()])
|
||||
|
||||
const peer1 = newNode.peerStore.get(peers[1])
|
||||
expect(peer1.id.toB58String()).to.eql(peers[1].toB58String())
|
||||
const peer1 = newNode.peerStore.get(peers[2])
|
||||
expect(peer1.id.toB58String()).to.eql(peers[2].toB58String())
|
||||
expect(peer1.protocols).to.have.members(protocols)
|
||||
expect(peer1.addresses.map((a) => a.multiaddr.toString())).to.have.members([multiaddrs[1].toString()])
|
||||
|
||||
|
@ -27,7 +27,7 @@ describe('protoBook', () => {
|
||||
let peerStore, pb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
pb = peerStore.protoBook
|
||||
})
|
||||
|
||||
@ -121,7 +121,7 @@ describe('protoBook', () => {
|
||||
let peerStore, pb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
pb = peerStore.protoBook
|
||||
})
|
||||
|
||||
@ -228,7 +228,7 @@ describe('protoBook', () => {
|
||||
let peerStore, pb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
pb = peerStore.protoBook
|
||||
})
|
||||
|
||||
@ -258,7 +258,7 @@ describe('protoBook', () => {
|
||||
let peerStore, pb
|
||||
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
pb = peerStore.protoBook
|
||||
})
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
/* eslint-env mocha */
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
const dirtyChai = require('dirty-chai')
|
||||
chai.use(dirtyChai)
|
||||
@ -9,13 +8,14 @@ const expect = chai.expect
|
||||
const duplexPair = require('it-pair/duplex')
|
||||
const pipe = require('it-pipe')
|
||||
const { collect } = require('streaming-iterables')
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
|
||||
const Protector = require('../../src/pnet')
|
||||
const Errors = Protector.errors
|
||||
const generate = Protector.generate
|
||||
|
||||
const swarmKeyBuffer = Buffer.alloc(95)
|
||||
const wrongSwarmKeyBuffer = Buffer.alloc(95)
|
||||
const swarmKeyBuffer = new Uint8Array(95)
|
||||
const wrongSwarmKeyBuffer = new Uint8Array(95)
|
||||
|
||||
// Write new psk files to the buffers
|
||||
generate(swarmKeyBuffer)
|
||||
@ -39,7 +39,7 @@ describe('private network', () => {
|
||||
])
|
||||
|
||||
pipe(
|
||||
[Buffer.from('hello world'), Buffer.from('doo dah')],
|
||||
[uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')],
|
||||
aToB
|
||||
)
|
||||
|
||||
@ -53,7 +53,7 @@ describe('private network', () => {
|
||||
collect
|
||||
)
|
||||
|
||||
expect(output).to.eql([Buffer.from('hello world'), Buffer.from('doo dah')])
|
||||
expect(output).to.eql([uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')])
|
||||
})
|
||||
|
||||
it('should not be able to share correct data with different keys', async () => {
|
||||
@ -67,7 +67,7 @@ describe('private network', () => {
|
||||
])
|
||||
|
||||
pipe(
|
||||
[Buffer.from('hello world'), Buffer.from('doo dah')],
|
||||
[uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')],
|
||||
aToB
|
||||
)
|
||||
|
||||
@ -76,19 +76,19 @@ describe('private network', () => {
|
||||
collect
|
||||
)
|
||||
|
||||
expect(output).to.not.eql([Buffer.from('hello world'), Buffer.from('doo dah')])
|
||||
expect(output).to.not.eql([uint8ArrayFromString('hello world'), uint8ArrayFromString('doo dah')])
|
||||
})
|
||||
|
||||
describe('invalid psks', () => {
|
||||
it('should not accept a bad psk', () => {
|
||||
expect(() => {
|
||||
return new Protector(Buffer.from('not-a-key'))
|
||||
return new Protector(uint8ArrayFromString('not-a-key'))
|
||||
}).to.throw(Errors.INVALID_PSK)
|
||||
})
|
||||
|
||||
it('should not accept a psk of incorrect length', () => {
|
||||
expect(() => {
|
||||
return new Protector(Buffer.from('/key/swarm/psk/1.0.0/\n/base16/\ndffb7e'))
|
||||
return new Protector(uint8ArrayFromString('/key/swarm/psk/1.0.0/\n/base16/\ndffb7e'))
|
||||
}).to.throw(Errors.INVALID_PSK)
|
||||
})
|
||||
})
|
||||
|
@ -42,13 +42,13 @@ describe('Pubsub subsystem is configurable', () => {
|
||||
})
|
||||
|
||||
libp2p = await create(customOptions)
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(false)
|
||||
expect(libp2p.pubsub.started).to.equal(false)
|
||||
|
||||
await libp2p.start()
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(true)
|
||||
expect(libp2p.pubsub.started).to.equal(true)
|
||||
|
||||
await libp2p.stop()
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(false)
|
||||
expect(libp2p.pubsub.started).to.equal(false)
|
||||
})
|
||||
|
||||
it('should not start if disabled once libp2p starts', async () => {
|
||||
@ -67,10 +67,10 @@ describe('Pubsub subsystem is configurable', () => {
|
||||
})
|
||||
|
||||
libp2p = await create(customOptions)
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(false)
|
||||
expect(libp2p.pubsub.started).to.equal(false)
|
||||
|
||||
await libp2p.start()
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(false)
|
||||
expect(libp2p.pubsub.started).to.equal(false)
|
||||
})
|
||||
|
||||
it('should allow a manual start', async () => {
|
||||
@ -90,9 +90,9 @@ describe('Pubsub subsystem is configurable', () => {
|
||||
|
||||
libp2p = await create(customOptions)
|
||||
await libp2p.start()
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(false)
|
||||
expect(libp2p.pubsub.started).to.equal(false)
|
||||
|
||||
await libp2p.pubsub.start()
|
||||
expect(libp2p.pubsub._pubsub.started).to.equal(true)
|
||||
expect(libp2p.pubsub.started).to.equal(true)
|
||||
})
|
||||
})
|
||||
|
@ -13,6 +13,7 @@ const Floodsub = require('libp2p-floodsub')
|
||||
const Gossipsub = require('libp2p-gossipsub')
|
||||
const { multicodec: floodsubMulticodec } = require('libp2p-floodsub')
|
||||
const { multicodec: gossipsubMulticodec } = require('libp2p-gossipsub')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const multiaddr = require('multiaddr')
|
||||
|
||||
@ -81,7 +82,7 @@ describe('Pubsub subsystem is able to use different implementations', () => {
|
||||
expect(connection).to.exist()
|
||||
|
||||
libp2p.pubsub.subscribe(topic, (msg) => {
|
||||
expect(msg.data.toString()).to.equal(data)
|
||||
expect(uint8ArrayToString(msg.data)).to.equal(data)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
|
@ -10,6 +10,7 @@ const pWaitFor = require('p-wait-for')
|
||||
const pDefer = require('p-defer')
|
||||
const mergeOptions = require('merge-options')
|
||||
const multiaddr = require('multiaddr')
|
||||
const uint8ArrayToString = require('uint8arrays/to-string')
|
||||
|
||||
const { create } = require('../../src')
|
||||
const { subsystemOptions, subsystemMulticodecs } = require('./utils')
|
||||
@ -65,8 +66,8 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
expect(connection).to.exist()
|
||||
|
||||
return Promise.all([
|
||||
pWaitFor(() => libp2p.pubsub._pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub._pubsub.peers.size === 1)
|
||||
pWaitFor(() => libp2p.pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub.peers.size === 1)
|
||||
])
|
||||
})
|
||||
|
||||
@ -82,7 +83,7 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
expect(subscribedTopics).to.not.include(topic)
|
||||
|
||||
libp2p.pubsub.subscribe(topic, (msg) => {
|
||||
expect(msg.data.toString()).to.equal(data)
|
||||
expect(uint8ArrayToString(msg.data)).to.equal(data)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
@ -140,14 +141,14 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
const connection = await libp2p.dial(remotePeerId)
|
||||
|
||||
expect(connection).to.exist()
|
||||
expect(libp2p.pubsub._pubsub.peers.size).to.be.eql(0)
|
||||
expect(remoteLibp2p.pubsub._pubsub.peers.size).to.be.eql(0)
|
||||
expect(libp2p.pubsub.peers.size).to.be.eql(0)
|
||||
expect(remoteLibp2p.pubsub.peers.size).to.be.eql(0)
|
||||
|
||||
remoteLibp2p.pubsub.start()
|
||||
|
||||
return Promise.all([
|
||||
pWaitFor(() => libp2p.pubsub._pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub._pubsub.peers.size === 1)
|
||||
pWaitFor(() => libp2p.pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub.peers.size === 1)
|
||||
])
|
||||
})
|
||||
|
||||
@ -163,15 +164,15 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
remoteLibp2p.pubsub.start()
|
||||
|
||||
await Promise.all([
|
||||
pWaitFor(() => libp2p.pubsub._pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub._pubsub.peers.size === 1)
|
||||
pWaitFor(() => libp2p.pubsub.peers.size === 1),
|
||||
pWaitFor(() => remoteLibp2p.pubsub.peers.size === 1)
|
||||
])
|
||||
|
||||
let subscribedTopics = libp2p.pubsub.getTopics()
|
||||
expect(subscribedTopics).to.not.include(topic)
|
||||
|
||||
libp2p.pubsub.subscribe(topic, (msg) => {
|
||||
expect(msg.data.toString()).to.equal(data)
|
||||
expect(uint8ArrayToString(msg.data)).to.equal(data)
|
||||
defer.resolve()
|
||||
})
|
||||
|
||||
@ -242,7 +243,7 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
const defer1 = pDefer()
|
||||
const defer2 = pDefer()
|
||||
const handler = (msg) => {
|
||||
expect(msg.data.toString()).to.equal(data)
|
||||
expect(uint8ArrayToString(msg.data)).to.equal(data)
|
||||
counter++
|
||||
counter === 1 ? defer1.resolve() : defer2.resolve()
|
||||
}
|
||||
@ -307,17 +308,21 @@ describe('Pubsub subsystem operates correctly', () => {
|
||||
libp2p.pubsub.publish(topic, 'message1')
|
||||
remoteLibp2p.pubsub.publish(topic, 'message2')
|
||||
await pWaitFor(() => handlerSpy.callCount === 2)
|
||||
expect(handlerSpy.args.map(([message]) => message.data.toString())).to.include.members(['message1', 'message2'])
|
||||
expect(handlerSpy.args.map(([message]) => uint8ArrayToString(message.data))).to.include.members(['message1', 'message2'])
|
||||
|
||||
// Disconnect the first connection (this acts as a delayed reconnect)
|
||||
const libp2pConnUpdateSpy = sinon.spy(libp2p.connectionManager.connections, 'set')
|
||||
const remoteLibp2pConnUpdateSpy = sinon.spy(remoteLibp2p.connectionManager.connections, 'set')
|
||||
|
||||
await originalConnection.close()
|
||||
await pWaitFor(() => libp2pConnUpdateSpy.callCount === 1 && remoteLibp2pConnUpdateSpy.callCount === 1)
|
||||
|
||||
// Verify messages go both ways after the disconnect
|
||||
handlerSpy.resetHistory()
|
||||
libp2p.pubsub.publish(topic, 'message3')
|
||||
remoteLibp2p.pubsub.publish(topic, 'message4')
|
||||
await pWaitFor(() => handlerSpy.callCount === 2)
|
||||
expect(handlerSpy.args.map(([message]) => message.data.toString())).to.include.members(['message3', 'message4'])
|
||||
expect(handlerSpy.args.map(([message]) => uint8ArrayToString(message.data))).to.include.members(['message3', 'message4'])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
92
test/record/envelope.spec.js
Normal file
92
test/record/envelope.spec.js
Normal file
@ -0,0 +1,92 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-bytes'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
const { expect } = chai
|
||||
|
||||
const uint8arrayFromString = require('uint8arrays/from-string')
|
||||
const uint8arrayEquals = require('uint8arrays/equals')
|
||||
const Envelope = require('../../src/record/envelope')
|
||||
const Record = require('libp2p-interfaces/src/record')
|
||||
const { codes: ErrorCodes } = require('../../src/errors')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
const domain = 'libp2p-testing'
|
||||
const codec = uint8arrayFromString('/libp2p/testdata')
|
||||
|
||||
class TestRecord extends Record {
|
||||
constructor (data) {
|
||||
super(domain, codec)
|
||||
this.data = data
|
||||
}
|
||||
|
||||
marshal () {
|
||||
return uint8arrayFromString(this.data)
|
||||
}
|
||||
|
||||
equals (other) {
|
||||
return uint8arrayEquals(this.data, other.data)
|
||||
}
|
||||
}
|
||||
|
||||
describe('Envelope', () => {
|
||||
const payloadType = codec
|
||||
let peerId
|
||||
let testRecord
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await peerUtils.createPeerId()
|
||||
testRecord = new TestRecord('test-data')
|
||||
})
|
||||
|
||||
it('creates an envelope with a random key', () => {
|
||||
const payload = testRecord.marshal()
|
||||
const signature = uint8arrayFromString(Math.random().toString(36).substring(7))
|
||||
|
||||
const envelope = new Envelope({
|
||||
peerId,
|
||||
payloadType,
|
||||
payload,
|
||||
signature
|
||||
})
|
||||
|
||||
expect(envelope).to.exist()
|
||||
expect(envelope.peerId.equals(peerId)).to.eql(true)
|
||||
expect(envelope.payloadType).to.equalBytes(payloadType)
|
||||
expect(envelope.payload).to.equalBytes(payload)
|
||||
expect(envelope.signature).to.equalBytes(signature)
|
||||
})
|
||||
|
||||
it('can seal a record', async () => {
|
||||
const envelope = await Envelope.seal(testRecord, peerId)
|
||||
expect(envelope).to.exist()
|
||||
expect(envelope.peerId.equals(peerId)).to.eql(true)
|
||||
expect(envelope.payloadType).to.eql(payloadType)
|
||||
expect(envelope.payload).to.exist()
|
||||
expect(envelope.signature).to.exist()
|
||||
})
|
||||
|
||||
it('can open and verify a sealed record', async () => {
|
||||
const envelope = await Envelope.seal(testRecord, peerId)
|
||||
const rawEnvelope = envelope.marshal()
|
||||
|
||||
const unmarshalledEnvelope = await Envelope.openAndCertify(rawEnvelope, testRecord.domain)
|
||||
expect(unmarshalledEnvelope).to.exist()
|
||||
|
||||
const equals = envelope.equals(unmarshalledEnvelope)
|
||||
expect(equals).to.eql(true)
|
||||
})
|
||||
|
||||
it('throw on open and verify when a different domain is used', async () => {
|
||||
const envelope = await Envelope.seal(testRecord, peerId)
|
||||
const rawEnvelope = envelope.marshal()
|
||||
|
||||
await expect(Envelope.openAndCertify(rawEnvelope, '/bad-domain'))
|
||||
.to.eventually.be.rejected()
|
||||
.and.to.have.property('code', ErrorCodes.ERR_SIGNATURE_NOT_VALID)
|
||||
})
|
||||
})
|
159
test/record/peer-record.spec.js
Normal file
159
test/record/peer-record.spec.js
Normal file
@ -0,0 +1,159 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
const { expect } = chai
|
||||
|
||||
const tests = require('libp2p-interfaces/src/record/tests')
|
||||
const multiaddr = require('multiaddr')
|
||||
const PeerId = require('peer-id')
|
||||
|
||||
const Envelope = require('../../src/record/envelope')
|
||||
const PeerRecord = require('../../src/record/peer-record')
|
||||
|
||||
const peerUtils = require('../utils/creators/peer')
|
||||
|
||||
describe('interface-record compliance', () => {
|
||||
tests({
|
||||
async setup () {
|
||||
const [peerId] = await peerUtils.createPeerId()
|
||||
return new PeerRecord({ peerId })
|
||||
},
|
||||
async teardown () {
|
||||
// cleanup resources created by setup()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('PeerRecord', () => {
|
||||
let peerId
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await peerUtils.createPeerId()
|
||||
})
|
||||
|
||||
it('de/serializes the same as a go record', async () => {
|
||||
const privKey = Uint8Array.from([8, 1, 18, 64, 133, 251, 231, 43, 96, 100, 40, 144, 4, 165, 49, 249, 103, 137, 141, 245, 49, 158, 224, 41, 146, 253, 216, 64, 33, 250, 80, 82, 67, 75, 246, 238, 17, 187, 163, 237, 23, 33, 148, 140, 239, 180, 229, 11, 10, 11, 181, 202, 216, 166, 181, 45, 199, 177, 164, 15, 79, 102, 82, 16, 92, 145, 226, 196])
|
||||
const rawEnvelope = Uint8Array.from([10, 36, 8, 1, 18, 32, 17, 187, 163, 237, 23, 33, 148, 140, 239, 180, 229, 11, 10, 11, 181, 202, 216, 166, 181, 45, 199, 177, 164, 15, 79, 102, 82, 16, 92, 145, 226, 196, 18, 2, 3, 1, 26, 170, 1, 10, 38, 0, 36, 8, 1, 18, 32, 17, 187, 163, 237, 23, 33, 148, 140, 239, 180, 229, 11, 10, 11, 181, 202, 216, 166, 181, 45, 199, 177, 164, 15, 79, 102, 82, 16, 92, 145, 226, 196, 16, 216, 184, 224, 191, 147, 145, 182, 151, 22, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 0, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 1, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 2, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 3, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 4, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 5, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 6, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 7, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 8, 26, 10, 10, 8, 4, 1, 2, 3, 4, 6, 0, 9, 42, 64, 177, 151, 247, 107, 159, 40, 138, 242, 180, 103, 254, 102, 111, 119, 68, 118, 40, 112, 73, 180, 36, 183, 57, 117, 200, 134, 14, 251, 2, 55, 45, 2, 106, 121, 149, 132, 84, 26, 215, 47, 38, 84, 52, 100, 133, 188, 163, 236, 227, 100, 98, 183, 209, 177, 57, 28, 141, 39, 109, 196, 171, 139, 202, 11])
|
||||
const peerId = await PeerId.createFromPrivKey(privKey)
|
||||
|
||||
const env = await Envelope.openAndCertify(rawEnvelope, PeerRecord.DOMAIN)
|
||||
expect(peerId.equals(env.peerId))
|
||||
|
||||
const record = PeerRecord.createFromProtobuf(env.payload)
|
||||
|
||||
// The payload isn't going to match because of how the protobuf encodes uint64 values
|
||||
// They are marshalled correctly on both sides, but will be off by 1 value
|
||||
// Signatures will still be validated
|
||||
const jsEnv = await Envelope.seal(record, peerId)
|
||||
expect(env.payloadType).to.eql(jsEnv.payloadType)
|
||||
})
|
||||
|
||||
it('creates a peer record with peerId', () => {
|
||||
const peerRecord = new PeerRecord({ peerId })
|
||||
|
||||
expect(peerRecord).to.exist()
|
||||
expect(peerRecord.peerId).to.exist()
|
||||
expect(peerRecord.multiaddrs).to.exist()
|
||||
expect(peerRecord.multiaddrs).to.have.lengthOf(0)
|
||||
expect(peerRecord.seqNumber).to.exist()
|
||||
})
|
||||
|
||||
it('creates a peer record with provided data', () => {
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/127.0.0.1/tcp/2000')
|
||||
]
|
||||
const seqNumber = Date.now()
|
||||
const peerRecord = new PeerRecord({ peerId, multiaddrs, seqNumber })
|
||||
|
||||
expect(peerRecord).to.exist()
|
||||
expect(peerRecord.peerId).to.exist()
|
||||
expect(peerRecord.multiaddrs).to.exist()
|
||||
expect(peerRecord.multiaddrs).to.eql(multiaddrs)
|
||||
expect(peerRecord.seqNumber).to.exist()
|
||||
expect(peerRecord.seqNumber).to.eql(seqNumber)
|
||||
})
|
||||
|
||||
it('marshals and unmarshals a peer record', () => {
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/127.0.0.1/tcp/2000')
|
||||
]
|
||||
const seqNumber = Date.now()
|
||||
const peerRecord = new PeerRecord({ peerId, multiaddrs, seqNumber })
|
||||
|
||||
// Marshal
|
||||
const rawData = peerRecord.marshal()
|
||||
expect(rawData).to.exist()
|
||||
|
||||
// Unmarshal
|
||||
const unmarshalPeerRecord = PeerRecord.createFromProtobuf(rawData)
|
||||
expect(unmarshalPeerRecord).to.exist()
|
||||
|
||||
const equals = peerRecord.equals(unmarshalPeerRecord)
|
||||
expect(equals).to.eql(true)
|
||||
})
|
||||
|
||||
it('equals returns false if the peer record has a different peerId', async () => {
|
||||
const peerRecord0 = new PeerRecord({ peerId })
|
||||
|
||||
const [peerId1] = await peerUtils.createPeerId({ fixture: false })
|
||||
const peerRecord1 = new PeerRecord({ peerId: peerId1 })
|
||||
|
||||
const equals = peerRecord0.equals(peerRecord1)
|
||||
expect(equals).to.eql(false)
|
||||
})
|
||||
|
||||
it('equals returns false if the peer record has a different seqNumber', () => {
|
||||
const ts0 = Date.now()
|
||||
const peerRecord0 = new PeerRecord({ peerId, seqNumber: ts0 })
|
||||
|
||||
const ts1 = ts0 + 20
|
||||
const peerRecord1 = new PeerRecord({ peerId, seqNumber: ts1 })
|
||||
|
||||
const equals = peerRecord0.equals(peerRecord1)
|
||||
expect(equals).to.eql(false)
|
||||
})
|
||||
|
||||
it('equals returns false if the peer record has a different multiaddrs', () => {
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/127.0.0.1/tcp/2000')
|
||||
]
|
||||
const peerRecord0 = new PeerRecord({ peerId, multiaddrs })
|
||||
|
||||
const multiaddrs1 = [
|
||||
multiaddr('/ip4/127.0.0.1/tcp/2001')
|
||||
]
|
||||
const peerRecord1 = new PeerRecord({ peerId, multiaddrs: multiaddrs1 })
|
||||
|
||||
const equals = peerRecord0.equals(peerRecord1)
|
||||
expect(equals).to.eql(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('PeerRecord inside Envelope', () => {
|
||||
let peerId
|
||||
let peerRecord
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await peerUtils.createPeerId()
|
||||
const multiaddrs = [
|
||||
multiaddr('/ip4/127.0.0.1/tcp/2000')
|
||||
]
|
||||
const seqNumber = Date.now()
|
||||
peerRecord = new PeerRecord({ peerId, multiaddrs, seqNumber })
|
||||
})
|
||||
|
||||
it('creates an envelope with the PeerRecord and can unmarshal it', async () => {
|
||||
const e = await Envelope.seal(peerRecord, peerId)
|
||||
const byteE = e.marshal()
|
||||
|
||||
const decodedE = await Envelope.openAndCertify(byteE, peerRecord.domain)
|
||||
expect(decodedE).to.exist()
|
||||
|
||||
const decodedPeerRecord = PeerRecord.createFromProtobuf(decodedE.payload)
|
||||
|
||||
const equals = peerRecord.equals(decodedPeerRecord)
|
||||
expect(equals).to.eql(true)
|
||||
})
|
||||
})
|
@ -21,10 +21,15 @@ const multicodec = '/test/1.0.0'
|
||||
describe('registrar', () => {
|
||||
let peerStore
|
||||
let registrar
|
||||
let peerId
|
||||
|
||||
before(async () => {
|
||||
[peerId] = await peerUtils.createPeerId()
|
||||
})
|
||||
|
||||
describe('errors', () => {
|
||||
beforeEach(() => {
|
||||
peerStore = new PeerStore()
|
||||
peerStore = new PeerStore({ peerId })
|
||||
registrar = new Registrar({ peerStore, connectionManager: new EventEmitter() })
|
||||
})
|
||||
|
||||
|
@ -9,6 +9,7 @@ const sinon = require('sinon')
|
||||
|
||||
const multiaddr = require('multiaddr')
|
||||
const Transport = require('libp2p-websockets')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
const AddressManager = require('../../src/address-manager')
|
||||
const TransportManager = require('../../src/transport-manager')
|
||||
const mockUpgrader = require('../utils/mockUpgrader')
|
||||
@ -110,7 +111,8 @@ describe('libp2p.transportManager', () => {
|
||||
libp2p = new Libp2p({
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
})
|
||||
|
||||
@ -128,7 +130,8 @@ describe('libp2p.transportManager', () => {
|
||||
libp2p = new Libp2p({
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [spy]
|
||||
transport: [spy],
|
||||
connEncryption: [Crypto]
|
||||
},
|
||||
config: {
|
||||
transport: {
|
||||
@ -152,7 +155,8 @@ describe('libp2p.transportManager', () => {
|
||||
libp2p = new Libp2p({
|
||||
peerId,
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
})
|
||||
|
||||
@ -188,7 +192,8 @@ describe('libp2p.transportManager (dial only)', () => {
|
||||
listen: [multiaddr('/ip4/127.0.0.1/tcp/0')]
|
||||
},
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
})
|
||||
|
||||
@ -212,7 +217,8 @@ describe('libp2p.transportManager (dial only)', () => {
|
||||
faultTolerance: FaultTolerance.NO_FATAL
|
||||
},
|
||||
modules: {
|
||||
transport: [Transport]
|
||||
transport: [Transport],
|
||||
connEncryption: [Crypto]
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -1,11 +1,7 @@
|
||||
'use strict'
|
||||
/* eslint-env mocha */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const chai = require('chai')
|
||||
chai.use(require('dirty-chai'))
|
||||
chai.use(require('chai-as-promised'))
|
||||
const { expect } = chai
|
||||
const { expect } = require('aegir/utils/chai')
|
||||
const sinon = require('sinon')
|
||||
const Muxer = require('libp2p-mplex')
|
||||
const multiaddr = require('multiaddr')
|
||||
@ -16,7 +12,8 @@ const pSettle = require('p-settle')
|
||||
const Transport = require('libp2p-websockets')
|
||||
const { NOISE: Crypto } = require('libp2p-noise')
|
||||
const Protector = require('../../src/pnet')
|
||||
const swarmKeyBuffer = Buffer.from(require('../fixtures/swarm.key'))
|
||||
const uint8ArrayFromString = require('uint8arrays/from-string')
|
||||
const swarmKeyBuffer = uint8ArrayFromString(require('../fixtures/swarm.key'))
|
||||
|
||||
const Libp2p = require('../../src')
|
||||
const Upgrader = require('../../src/upgrader')
|
||||
@ -102,7 +99,7 @@ describe('Upgrader', () => {
|
||||
const { stream, protocol } = await connections[0].newStream('/echo/1.0.0')
|
||||
expect(protocol).to.equal('/echo/1.0.0')
|
||||
|
||||
const hello = Buffer.from('hello there!')
|
||||
const hello = uint8ArrayFromString('hello there!')
|
||||
const result = await pipe(
|
||||
[hello],
|
||||
stream,
|
||||
@ -172,7 +169,7 @@ describe('Upgrader', () => {
|
||||
const { stream, protocol } = await connections[0].newStream('/echo/1.0.0')
|
||||
expect(protocol).to.equal('/echo/1.0.0')
|
||||
|
||||
const hello = Buffer.from('hello there!')
|
||||
const hello = uint8ArrayFromString('hello there!')
|
||||
const result = await pipe(
|
||||
[hello],
|
||||
stream,
|
||||
@ -467,13 +464,13 @@ describe('libp2p.upgrader', () => {
|
||||
|
||||
let [event, connection] = libp2p.connectionManager.emit.getCall(0).args
|
||||
expect(event).to.equal('peer:connect')
|
||||
expect(connection.remotePeer.isEqual(remotePeer)).to.equal(true)
|
||||
expect(connection.remotePeer.equals(remotePeer)).to.equal(true)
|
||||
|
||||
// Close and check the disconnect event
|
||||
await Promise.all(connections.map(conn => conn.close()))
|
||||
expect(libp2p.connectionManager.emit.callCount).to.equal(2)
|
||||
;([event, connection] = libp2p.connectionManager.emit.getCall(1).args)
|
||||
expect(event).to.equal('peer:disconnect')
|
||||
expect(connection.remotePeer.isEqual(remotePeer)).to.equal(true)
|
||||
expect(connection.remotePeer.equals(remotePeer)).to.equal(true)
|
||||
})
|
||||
})
|
||||
|
Reference in New Issue
Block a user