2020-05-01 15:52:06 +02:00
|
|
|
'use strict'
|
|
|
|
|
|
|
|
const debug = require('debug')
|
|
|
|
const log = debug('libp2p:persistent-peer-store')
|
|
|
|
log.error = debug('libp2p:persistent-peer-store:error')
|
|
|
|
|
|
|
|
const { Key } = require('interface-datastore')
|
|
|
|
const multiaddr = require('multiaddr')
|
|
|
|
const PeerId = require('peer-id')
|
|
|
|
|
|
|
|
const PeerStore = require('..')
|
|
|
|
|
|
|
|
const {
|
2020-05-05 14:49:17 +02:00
|
|
|
NAMESPACE_ADDRESS,
|
|
|
|
NAMESPACE_COMMON,
|
2020-04-28 15:03:16 +02:00
|
|
|
NAMESPACE_KEYS,
|
2020-05-15 19:39:13 +02:00
|
|
|
NAMESPACE_METADATA,
|
2020-05-05 14:49:17 +02:00
|
|
|
NAMESPACE_PROTOCOL
|
2020-05-01 15:52:06 +02:00
|
|
|
} = require('./consts')
|
|
|
|
|
|
|
|
const Addresses = require('./pb/address-book.proto')
|
|
|
|
const Protocols = require('./pb/proto-book.proto')
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Responsible for managing the persistence of data in the PeerStore.
|
|
|
|
*/
|
|
|
|
class PersistentPeerStore extends PeerStore {
|
|
|
|
/**
|
|
|
|
* @constructor
|
|
|
|
* @param {Object} properties
|
2020-07-15 14:34:51 +02:00
|
|
|
* @param {PeerId} properties.peerId
|
2020-05-01 15:52:06 +02:00
|
|
|
* @param {Datastore} properties.datastore Datastore to persist data.
|
|
|
|
* @param {number} [properties.threshold = 5] Number of dirty peers allowed before commit data.
|
|
|
|
*/
|
2020-07-15 14:34:51 +02:00
|
|
|
constructor ({ peerId, datastore, threshold = 5 }) {
|
|
|
|
super({ peerId })
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Backend datastore used to persist data.
|
|
|
|
*/
|
|
|
|
this._datastore = datastore
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Peers modified after the latest data persisted.
|
|
|
|
*/
|
|
|
|
this._dirtyPeers = new Set()
|
|
|
|
|
2020-05-15 19:39:13 +02:00
|
|
|
/**
|
|
|
|
* Peers metadata changed mapping peer identifers to metadata changed.
|
|
|
|
* @type {Map<string, Set<string>>}
|
|
|
|
*/
|
|
|
|
this._dirtyMetadata = new Map()
|
|
|
|
|
2020-05-01 15:52:06 +02:00
|
|
|
this.threshold = threshold
|
|
|
|
this._addDirtyPeer = this._addDirtyPeer.bind(this)
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Start Persistent PeerStore.
|
|
|
|
* @return {Promise<void>}
|
|
|
|
*/
|
|
|
|
async start () {
|
2020-05-05 14:49:17 +02:00
|
|
|
log('PeerStore is starting')
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
// Handlers for dirty peers
|
|
|
|
this.on('change:protocols', this._addDirtyPeer)
|
|
|
|
this.on('change:multiaddrs', this._addDirtyPeer)
|
2020-04-28 15:03:16 +02:00
|
|
|
this.on('change:pubkey', this._addDirtyPeer)
|
2020-05-15 19:39:13 +02:00
|
|
|
this.on('change:metadata', this._addDirtyPeerMetadata)
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
// Load data
|
2020-05-05 14:49:17 +02:00
|
|
|
for await (const entry of this._datastore.query({ prefix: NAMESPACE_COMMON })) {
|
2020-04-28 15:03:16 +02:00
|
|
|
await this._processDatastoreEntry(entry)
|
2020-05-01 15:52:06 +02:00
|
|
|
}
|
|
|
|
|
2020-05-05 14:49:17 +02:00
|
|
|
log('PeerStore started')
|
2020-05-01 15:52:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async stop () {
|
2020-05-05 14:49:17 +02:00
|
|
|
log('PeerStore is stopping')
|
2020-05-01 15:52:06 +02:00
|
|
|
this.removeAllListeners()
|
|
|
|
await this._commitData()
|
2020-05-05 14:49:17 +02:00
|
|
|
log('PeerStore stopped')
|
2020-05-01 15:52:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Add modified peer to the dirty set
|
|
|
|
* @private
|
|
|
|
* @param {Object} params
|
|
|
|
* @param {PeerId} params.peerId
|
|
|
|
*/
|
|
|
|
_addDirtyPeer ({ peerId }) {
|
|
|
|
const peerIdstr = peerId.toB58String()
|
|
|
|
|
|
|
|
log('add dirty peer', peerIdstr)
|
|
|
|
this._dirtyPeers.add(peerIdstr)
|
|
|
|
|
|
|
|
if (this._dirtyPeers.size >= this.threshold) {
|
|
|
|
// Commit current data
|
2020-06-10 22:39:40 +01:00
|
|
|
this._commitData().catch(err => {
|
|
|
|
log.error('error committing data', err)
|
|
|
|
})
|
2020-05-01 15:52:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-15 19:39:13 +02:00
|
|
|
/**
|
|
|
|
* Add modified metadata peer to the set.
|
|
|
|
* @private
|
|
|
|
* @param {Object} params
|
|
|
|
* @param {PeerId} params.peerId
|
|
|
|
* @param {string} params.metadata
|
|
|
|
*/
|
|
|
|
_addDirtyPeerMetadata ({ peerId, metadata }) {
|
|
|
|
const peerIdstr = peerId.toB58String()
|
|
|
|
|
|
|
|
log('add dirty metadata peer', peerIdstr)
|
|
|
|
this._dirtyPeers.add(peerIdstr)
|
|
|
|
|
|
|
|
// Add dirty metadata key
|
|
|
|
const mData = this._dirtyMetadata.get(peerIdstr) || new Set()
|
|
|
|
mData.add(metadata)
|
|
|
|
this._dirtyMetadata.set(peerIdstr, mData)
|
|
|
|
|
|
|
|
if (this._dirtyPeers.size >= this.threshold) {
|
|
|
|
// Commit current data
|
2020-06-10 22:39:40 +01:00
|
|
|
this._commitData().catch(err => {
|
|
|
|
log.error('error committing data', err)
|
|
|
|
})
|
2020-05-15 19:39:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-01 15:52:06 +02:00
|
|
|
/**
|
|
|
|
* Add all the peers current data to a datastore batch and commit it.
|
|
|
|
* @private
|
|
|
|
* @param {Array<string>} peers
|
|
|
|
* @return {Promise<void>}
|
|
|
|
*/
|
|
|
|
async _commitData () {
|
|
|
|
const commitPeers = Array.from(this._dirtyPeers)
|
|
|
|
|
|
|
|
if (!commitPeers.length) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clear Dirty Peers set
|
|
|
|
this._dirtyPeers.clear()
|
|
|
|
|
|
|
|
log('create batch commit')
|
|
|
|
const batch = this._datastore.batch()
|
|
|
|
for (const peerIdStr of commitPeers) {
|
2020-05-06 16:55:43 +02:00
|
|
|
// PeerId
|
2020-05-07 15:49:03 +02:00
|
|
|
const peerId = this.keyBook.data.get(peerIdStr) || PeerId.createFromCID(peerIdStr)
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
// Address Book
|
|
|
|
this._batchAddressBook(peerId, batch)
|
|
|
|
|
2020-04-28 15:03:16 +02:00
|
|
|
// Key Book
|
|
|
|
this._batchKeyBook(peerId, batch)
|
|
|
|
|
2020-05-15 19:39:13 +02:00
|
|
|
// Metadata Book
|
|
|
|
this._batchMetadataBook(peerId, batch)
|
|
|
|
|
2020-05-01 15:52:06 +02:00
|
|
|
// Proto Book
|
|
|
|
this._batchProtoBook(peerId, batch)
|
|
|
|
}
|
|
|
|
|
|
|
|
await batch.commit()
|
|
|
|
log('batch committed')
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Add address book data of the peer to the batch.
|
|
|
|
* @private
|
|
|
|
* @param {PeerId} peerId
|
|
|
|
* @param {Object} batch
|
|
|
|
*/
|
|
|
|
_batchAddressBook (peerId, batch) {
|
|
|
|
const b32key = peerId.toString()
|
2020-05-05 14:49:17 +02:00
|
|
|
const key = new Key(`${NAMESPACE_ADDRESS}${b32key}`)
|
2020-05-01 15:52:06 +02:00
|
|
|
|
2020-06-29 12:10:58 +02:00
|
|
|
const entry = this.addressBook.data.get(peerId.toB58String())
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
try {
|
|
|
|
// Deleted from the book
|
2020-06-29 12:10:58 +02:00
|
|
|
if (!entry) {
|
2020-05-01 15:52:06 +02:00
|
|
|
batch.delete(key)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const encodedData = Addresses.encode({
|
2020-06-29 12:10:58 +02:00
|
|
|
addrs: entry.addresses.map((address) => ({
|
2020-08-24 11:58:02 +01:00
|
|
|
multiaddr: address.multiaddr.bytes,
|
2020-07-17 14:00:59 +02:00
|
|
|
isCertified: address.isCertified
|
2020-06-29 12:10:58 +02:00
|
|
|
})),
|
|
|
|
certified_record: entry.record ? {
|
|
|
|
seq: entry.record.seqNumber,
|
|
|
|
raw: entry.record.raw
|
|
|
|
} : undefined
|
2020-05-01 15:52:06 +02:00
|
|
|
})
|
2020-07-17 14:00:59 +02:00
|
|
|
|
2020-05-01 15:52:06 +02:00
|
|
|
batch.put(key, encodedData)
|
|
|
|
} catch (err) {
|
|
|
|
log.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-28 15:03:16 +02:00
|
|
|
/**
|
|
|
|
* Add Key book data of the peer to the batch.
|
|
|
|
* @private
|
|
|
|
* @param {PeerId} peerId
|
|
|
|
* @param {Object} batch
|
|
|
|
*/
|
|
|
|
_batchKeyBook (peerId, batch) {
|
|
|
|
const b32key = peerId.toString()
|
|
|
|
const key = new Key(`${NAMESPACE_KEYS}${b32key}`)
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Deleted from the book
|
|
|
|
if (!peerId.pubKey) {
|
|
|
|
batch.delete(key)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const encodedData = peerId.marshalPubKey()
|
|
|
|
|
|
|
|
batch.put(key, encodedData)
|
|
|
|
} catch (err) {
|
|
|
|
log.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-15 19:39:13 +02:00
|
|
|
/**
|
|
|
|
* Add metadata book data of the peer to the batch.
|
|
|
|
* @private
|
|
|
|
* @param {PeerId} peerId
|
|
|
|
* @param {Object} batch
|
|
|
|
*/
|
|
|
|
_batchMetadataBook (peerId, batch) {
|
|
|
|
const b32key = peerId.toString()
|
|
|
|
const dirtyMetada = this._dirtyMetadata.get(peerId.toB58String()) || []
|
|
|
|
|
|
|
|
try {
|
|
|
|
dirtyMetada.forEach((dirtyKey) => {
|
|
|
|
const key = new Key(`${NAMESPACE_METADATA}${b32key}/${dirtyKey}`)
|
|
|
|
const dirtyValue = this.metadataBook.getValue(peerId, dirtyKey)
|
|
|
|
|
|
|
|
if (dirtyValue) {
|
|
|
|
batch.put(key, dirtyValue)
|
|
|
|
} else {
|
|
|
|
batch.delete(key)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
} catch (err) {
|
|
|
|
log.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-01 15:52:06 +02:00
|
|
|
/**
|
|
|
|
* Add proto book data of the peer to the batch.
|
|
|
|
* @private
|
|
|
|
* @param {PeerId} peerId
|
|
|
|
* @param {Object} batch
|
|
|
|
*/
|
|
|
|
_batchProtoBook (peerId, batch) {
|
|
|
|
const b32key = peerId.toString()
|
2020-05-05 14:49:17 +02:00
|
|
|
const key = new Key(`${NAMESPACE_PROTOCOL}${b32key}`)
|
2020-05-01 15:52:06 +02:00
|
|
|
|
|
|
|
const protocols = this.protoBook.get(peerId)
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Deleted from the book
|
|
|
|
if (!protocols) {
|
|
|
|
batch.delete(key)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const encodedData = Protocols.encode({ protocols })
|
|
|
|
|
|
|
|
batch.put(key, encodedData)
|
|
|
|
} catch (err) {
|
|
|
|
log.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Process datastore entry and add its data to the correct book.
|
|
|
|
* @private
|
|
|
|
* @param {Object} params
|
2020-05-05 14:49:17 +02:00
|
|
|
* @param {Key} params.key datastore key
|
2020-08-24 11:58:02 +01:00
|
|
|
* @param {Uint8Array} params.value datastore value stored
|
2020-04-28 15:03:16 +02:00
|
|
|
* @return {Promise<void>}
|
2020-05-01 15:52:06 +02:00
|
|
|
*/
|
2020-04-28 15:03:16 +02:00
|
|
|
async _processDatastoreEntry ({ key, value }) {
|
2020-05-01 15:52:06 +02:00
|
|
|
try {
|
|
|
|
const keyParts = key.toString().split('/')
|
|
|
|
const peerId = PeerId.createFromCID(keyParts[3])
|
|
|
|
|
|
|
|
let decoded
|
|
|
|
switch (keyParts[2]) {
|
|
|
|
case 'addrs':
|
|
|
|
decoded = Addresses.decode(value)
|
|
|
|
|
|
|
|
this.addressBook._setData(
|
|
|
|
peerId,
|
2020-06-21 19:33:58 +02:00
|
|
|
{
|
|
|
|
addresses: decoded.addrs.map((address) => ({
|
2020-07-17 14:00:59 +02:00
|
|
|
multiaddr: multiaddr(address.multiaddr),
|
|
|
|
isCertified: Boolean(address.isCertified)
|
2020-06-29 12:10:58 +02:00
|
|
|
})),
|
|
|
|
record: decoded.certified_record ? {
|
|
|
|
raw: decoded.certified_record.raw,
|
|
|
|
seqNumber: decoded.certified_record.seq
|
|
|
|
} : undefined
|
2020-06-21 19:33:58 +02:00
|
|
|
},
|
2020-05-01 15:52:06 +02:00
|
|
|
{ emit: false })
|
|
|
|
break
|
2020-04-28 15:03:16 +02:00
|
|
|
case 'keys':
|
|
|
|
decoded = await PeerId.createFromPubKey(value)
|
|
|
|
|
|
|
|
this.keyBook._setData(
|
|
|
|
decoded,
|
|
|
|
decoded,
|
|
|
|
{ emit: false })
|
|
|
|
break
|
2020-05-15 19:39:13 +02:00
|
|
|
case 'metadata':
|
|
|
|
this.metadataBook._setValue(
|
|
|
|
peerId,
|
|
|
|
keyParts[4],
|
|
|
|
value,
|
|
|
|
{ emit: false })
|
|
|
|
break
|
2020-05-01 15:52:06 +02:00
|
|
|
case 'protos':
|
|
|
|
decoded = Protocols.decode(value)
|
|
|
|
|
|
|
|
this.protoBook._setData(
|
|
|
|
peerId,
|
|
|
|
new Set(decoded.protocols),
|
|
|
|
{ emit: false })
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
log('invalid data persisted for: ', key.toString())
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
log.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = PersistentPeerStore
|