mirror of
https://github.com/fluencelabs/fluence-js.git
synced 2025-04-25 17:52:13 +00:00
Extend error handling in FluencePeer (#98)
This commit is contained in:
parent
337a3f45de
commit
aa21abe465
14
package-lock.json
generated
14
package-lock.json
generated
@ -10,7 +10,7 @@
|
|||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@chainsafe/libp2p-noise": "4.0.0",
|
"@chainsafe/libp2p-noise": "4.0.0",
|
||||||
"@fluencelabs/avm": "0.15.4",
|
"@fluencelabs/avm": "0.16.6",
|
||||||
"async": "3.2.0",
|
"async": "3.2.0",
|
||||||
"base64-js": "1.5.1",
|
"base64-js": "1.5.1",
|
||||||
"bs58": "4.0.1",
|
"bs58": "4.0.1",
|
||||||
@ -646,9 +646,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@fluencelabs/avm": {
|
"node_modules/@fluencelabs/avm": {
|
||||||
"version": "0.15.4",
|
"version": "0.16.6",
|
||||||
"resolved": "https://registry.npmjs.org/@fluencelabs/avm/-/avm-0.15.4.tgz",
|
"resolved": "https://registry.npmjs.org/@fluencelabs/avm/-/avm-0.16.6.tgz",
|
||||||
"integrity": "sha512-NLZDq83ocJ1Helm0D8kPMSSkjxH0y+Tujg0px773zjIShbh3jgiJOjAW1xCYgTt9K0LqepjP0bWX4/8nUZfr7g==",
|
"integrity": "sha512-RDNXW/VYAXh+E7B7+S4pTTc/1IcvtlID2xyBs/3TDlxkjbVxM7+vMcFL6cJZOzZZl+3oAWXL3ibDhE5Elcq6ug==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"base64-js": "1.5.1"
|
"base64-js": "1.5.1"
|
||||||
}
|
}
|
||||||
@ -8689,9 +8689,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@fluencelabs/avm": {
|
"@fluencelabs/avm": {
|
||||||
"version": "0.15.4",
|
"version": "0.16.6",
|
||||||
"resolved": "https://registry.npmjs.org/@fluencelabs/avm/-/avm-0.15.4.tgz",
|
"resolved": "https://registry.npmjs.org/@fluencelabs/avm/-/avm-0.16.6.tgz",
|
||||||
"integrity": "sha512-NLZDq83ocJ1Helm0D8kPMSSkjxH0y+Tujg0px773zjIShbh3jgiJOjAW1xCYgTt9K0LqepjP0bWX4/8nUZfr7g==",
|
"integrity": "sha512-RDNXW/VYAXh+E7B7+S4pTTc/1IcvtlID2xyBs/3TDlxkjbVxM7+vMcFL6cJZOzZZl+3oAWXL3ibDhE5Elcq6ug==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"base64-js": "1.5.1"
|
"base64-js": "1.5.1"
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@chainsafe/libp2p-noise": "4.0.0",
|
"@chainsafe/libp2p-noise": "4.0.0",
|
||||||
"@fluencelabs/avm": "0.15.4",
|
"@fluencelabs/avm": "0.16.6",
|
||||||
"async": "3.2.0",
|
"async": "3.2.0",
|
||||||
"base64-js": "1.5.1",
|
"base64-js": "1.5.1",
|
||||||
"bs58": "4.0.1",
|
"bs58": "4.0.1",
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { FluencePeer } from '../../index';
|
import { FluencePeer } from '../../index';
|
||||||
import { Particle } from '../../internal/Particle';
|
import { Particle } from '../../internal/Particle';
|
||||||
|
import { handleTimeout } from '../../internal/utils';
|
||||||
import { registerHandlersHelper } from '../util';
|
import { registerHandlersHelper } from '../util';
|
||||||
|
|
||||||
describe('Avm spec', () => {
|
describe('Avm spec', () => {
|
||||||
@ -21,10 +22,9 @@ describe('Avm spec', () => {
|
|||||||
resolve(res);
|
resolve(res);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
peer.internals.initiateParticle(particle);
|
peer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -61,10 +61,9 @@ describe('Avm spec', () => {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
peer.internals.initiateParticle(particle);
|
peer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
|
@ -2,6 +2,8 @@ import { Fluence, FluencePeer } from '../../..';
|
|||||||
import { Particle } from '../../../internal/Particle';
|
import { Particle } from '../../../internal/Particle';
|
||||||
import { registerHandlersHelper } from '../../util';
|
import { registerHandlersHelper } from '../../util';
|
||||||
import { callMeBack, registerHelloWorld } from './gen1';
|
import { callMeBack, registerHelloWorld } from './gen1';
|
||||||
|
import { callFunction } from '../../../internal/compilerSupport/v2';
|
||||||
|
import { handleTimeout } from '../../../internal/utils';
|
||||||
|
|
||||||
describe('Compiler support infrastructure tests', () => {
|
describe('Compiler support infrastructure tests', () => {
|
||||||
it('Compiled code for function should work', async () => {
|
it('Compiled code for function should work', async () => {
|
||||||
@ -78,10 +80,9 @@ describe('Compiler support infrastructure tests', () => {
|
|||||||
resolve(val);
|
resolve(val);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Fluence.getPeer().internals.initiateParticle(particle);
|
Fluence.getPeer().internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -166,9 +167,8 @@ describe('Compiler support infrastructure tests', () => {
|
|||||||
resolve(val);
|
resolve(val);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -177,4 +177,33 @@ describe('Compiler support infrastructure tests', () => {
|
|||||||
|
|
||||||
await anotherPeer.stop();
|
await anotherPeer.stop();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('Should throw error if particle with incorrect AIR script is initiated', async () => {
|
||||||
|
// arrange;
|
||||||
|
const anotherPeer = new FluencePeer();
|
||||||
|
await anotherPeer.start();
|
||||||
|
|
||||||
|
// act
|
||||||
|
const action = callFunction(
|
||||||
|
[anotherPeer],
|
||||||
|
{
|
||||||
|
functionName: 'dontcare',
|
||||||
|
argDefs: [],
|
||||||
|
returnType: { tag: 'void' },
|
||||||
|
names: {
|
||||||
|
relay: '-relay-',
|
||||||
|
getDataSrv: 'getDataSrv',
|
||||||
|
callbackSrv: 'callbackSrv',
|
||||||
|
responseSrv: 'callbackSrv',
|
||||||
|
responseFnName: 'response',
|
||||||
|
errorHandlingSrv: 'errorHandlingSrv',
|
||||||
|
errorFnName: 'error',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'incorrect air script',
|
||||||
|
);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
await expect(action).rejects.toMatch(/incorrect air script/);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { Multiaddr } from 'multiaddr';
|
import { Multiaddr } from 'multiaddr';
|
||||||
import { nodes } from '../connection';
|
import { nodes } from '../connection';
|
||||||
import { Fluence, FluencePeer, setLogLevel } from '../../index';
|
import { Fluence, FluencePeer, setLogLevel } from '../../index';
|
||||||
import { checkConnection } from '../../internal/utils';
|
import { checkConnection, doNothing, handleTimeout } from '../../internal/utils';
|
||||||
import { Particle } from '../../internal/Particle';
|
import { Particle } from '../../internal/Particle';
|
||||||
import { registerHandlersHelper } from '../util';
|
import { registerHandlersHelper } from '../util';
|
||||||
|
|
||||||
@ -121,10 +121,9 @@ describe('Typescript usage suite', () => {
|
|||||||
reject(error);
|
reject(error);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -169,7 +168,7 @@ describe('Typescript usage suite', () => {
|
|||||||
)
|
)
|
||||||
`;
|
`;
|
||||||
const particle = Particle.createNew(script);
|
const particle = Particle.createNew(script);
|
||||||
await peer1.internals.initiateParticle(particle);
|
await peer1.internals.initiateParticle(particle, doNothing);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const res = await resMakingPromise;
|
const res = await resMakingPromise;
|
||||||
@ -309,10 +308,9 @@ describe('Typescript usage suite', () => {
|
|||||||
resolve(res);
|
resolve(res);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -368,10 +366,9 @@ describe('Typescript usage suite', () => {
|
|||||||
reject(error);
|
reject(error);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -381,7 +378,6 @@ describe('Typescript usage suite', () => {
|
|||||||
|
|
||||||
it('Should not crash if an error ocurred in user-defined handler', async () => {
|
it('Should not crash if an error ocurred in user-defined handler', async () => {
|
||||||
// arrange;
|
// arrange;
|
||||||
setLogLevel('trace');
|
|
||||||
await anotherPeer.start();
|
await anotherPeer.start();
|
||||||
|
|
||||||
// act
|
// act
|
||||||
@ -405,10 +401,9 @@ describe('Typescript usage suite', () => {
|
|||||||
reject(error);
|
reject(error);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -417,6 +412,22 @@ describe('Typescript usage suite', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('Should throw error if particle is initiated on a stopped peer', async () => {
|
||||||
|
// arrange;
|
||||||
|
const stoppedPeer = new FluencePeer();
|
||||||
|
|
||||||
|
// act
|
||||||
|
const action = () => {
|
||||||
|
const script = `(null)`;
|
||||||
|
const particle = Particle.createNew(script);
|
||||||
|
|
||||||
|
stoppedPeer.internals.initiateParticle(particle, doNothing);
|
||||||
|
};
|
||||||
|
|
||||||
|
// assert
|
||||||
|
await expect(action).toThrow('Cannot initiate new particle: peer is not initialized');
|
||||||
|
});
|
||||||
|
|
||||||
it.skip('Should throw correct error when the client tries to send a particle not to the relay', async () => {
|
it.skip('Should throw correct error when the client tries to send a particle not to the relay', async () => {
|
||||||
// arrange;
|
// arrange;
|
||||||
await anotherPeer.start({ connectTo: nodes[0] });
|
await anotherPeer.start({ connectTo: nodes[0] });
|
||||||
@ -439,7 +450,7 @@ describe('Typescript usage suite', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
anotherPeer.internals.initiateParticle(particle);
|
anotherPeer.internals.initiateParticle(particle, doNothing);
|
||||||
});
|
});
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
@ -468,10 +479,9 @@ async function callIncorrectService(peer: FluencePeer): Promise<string[]> {
|
|||||||
reject(error);
|
reject(error);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_timeout: reject,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
peer.internals.initiateParticle(particle);
|
peer.internals.initiateParticle(particle, handleTimeout(reject));
|
||||||
});
|
});
|
||||||
|
|
||||||
return promise;
|
return promise;
|
||||||
|
@ -3,14 +3,10 @@ import { Particle } from '../internal/Particle';
|
|||||||
import { MakeServiceCall } from '../internal/utils';
|
import { MakeServiceCall } from '../internal/utils';
|
||||||
|
|
||||||
export const registerHandlersHelper = (peer: FluencePeer, particle: Particle, handlers) => {
|
export const registerHandlersHelper = (peer: FluencePeer, particle: Particle, handlers) => {
|
||||||
const { _timeout, ...rest } = handlers;
|
for (let serviceId in handlers) {
|
||||||
if (_timeout) {
|
for (let fnName in handlers[serviceId]) {
|
||||||
peer.internals.regHandler.timeout(particle.id, _timeout);
|
|
||||||
}
|
|
||||||
for (let serviceId in rest) {
|
|
||||||
for (let fnName in rest[serviceId]) {
|
|
||||||
// of type [args] => result
|
// of type [args] => result
|
||||||
const h = rest[serviceId][fnName];
|
const h = handlers[serviceId][fnName];
|
||||||
peer.internals.regHandler.forParticle(particle.id, serviceId, fnName, MakeServiceCall(h));
|
peer.internals.regHandler.forParticle(particle.id, serviceId, fnName, MakeServiceCall(h));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ export const Fluence = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uninitializes the default peer: stops all the underltying workflows, stops the Aqua VM
|
* Un-initializes the default peer: stops all the underlying workflows, stops the Aqua VM
|
||||||
* and disconnects from the Fluence network
|
* and disconnects from the Fluence network
|
||||||
*/
|
*/
|
||||||
stop: (): Promise<void> => {
|
stop: (): Promise<void> => {
|
||||||
|
@ -27,13 +27,14 @@ import { CallServiceData, CallServiceResult, GenericCallServiceHandler, ResultCo
|
|||||||
import { CallServiceHandler as LegacyCallServiceHandler } from './compilerSupport/LegacyCallServiceHandler';
|
import { CallServiceHandler as LegacyCallServiceHandler } from './compilerSupport/LegacyCallServiceHandler';
|
||||||
import { PeerIdB58 } from './commonTypes';
|
import { PeerIdB58 } from './commonTypes';
|
||||||
import { FluenceConnection } from './FluenceConnection';
|
import { FluenceConnection } from './FluenceConnection';
|
||||||
import { Particle } from './Particle';
|
import { Particle, ParticleExecutionStage, ParticleQueueItem } from './Particle';
|
||||||
import { KeyPair } from './KeyPair';
|
import { KeyPair } from './KeyPair';
|
||||||
import { createInterpreter, dataToString } from './utils';
|
import { createInterpreter, dataToString } from './utils';
|
||||||
import { filter, pipe, Subject, tap } from 'rxjs';
|
import { filter, pipe, Subject, tap } from 'rxjs';
|
||||||
import { RequestFlow } from './compilerSupport/v1';
|
import { RequestFlow } from './compilerSupport/v1';
|
||||||
import log from 'loglevel';
|
import log from 'loglevel';
|
||||||
import { defaultServices } from './defaultServices';
|
import { defaultServices } from './defaultServices';
|
||||||
|
import { instanceOf } from 'ts-pattern';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Node of the Fluence network specified as a pair of node's multiaddr and it's peer id
|
* Node of the Fluence network specified as a pair of node's multiaddr and it's peer id
|
||||||
@ -202,7 +203,7 @@ export class FluencePeer {
|
|||||||
peerId: this._keyPair.Libp2pPeerId,
|
peerId: this._keyPair.Libp2pPeerId,
|
||||||
relayAddress: connectToMultiAddr,
|
relayAddress: connectToMultiAddr,
|
||||||
dialTimeoutMs: config.dialTimeoutMs,
|
dialTimeoutMs: config.dialTimeoutMs,
|
||||||
onIncomingParticle: (p) => this._incomingParticles.next(p),
|
onIncomingParticle: (p) => this._incomingParticles.next({ particle: p, onStageChange: () => {} }),
|
||||||
});
|
});
|
||||||
|
|
||||||
await this._connect();
|
await this._connect();
|
||||||
@ -226,7 +227,6 @@ export class FluencePeer {
|
|||||||
|
|
||||||
this._particleSpecificHandlers.clear();
|
this._particleSpecificHandlers.clear();
|
||||||
this._commonHandlers.clear();
|
this._commonHandlers.clear();
|
||||||
this._timeoutHandlers.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// internal api
|
// internal api
|
||||||
@ -240,7 +240,11 @@ export class FluencePeer {
|
|||||||
* Initiates a new particle execution starting from local peer
|
* Initiates a new particle execution starting from local peer
|
||||||
* @param particle - particle to start execution of
|
* @param particle - particle to start execution of
|
||||||
*/
|
*/
|
||||||
initiateParticle: (particle: Particle): void => {
|
initiateParticle: (particle: Particle, onStageChange: (stage: ParticleExecutionStage) => void): void => {
|
||||||
|
if (!this.getStatus().isInitialized) {
|
||||||
|
throw 'Cannot initiate new particle: peer is not initialized';
|
||||||
|
}
|
||||||
|
|
||||||
if (particle.initPeerId === undefined) {
|
if (particle.initPeerId === undefined) {
|
||||||
particle.initPeerId = this.getStatus().peerId;
|
particle.initPeerId = this.getStatus().peerId;
|
||||||
}
|
}
|
||||||
@ -249,8 +253,12 @@ export class FluencePeer {
|
|||||||
particle.ttl = this._defaultTTL;
|
particle.ttl = this._defaultTTL;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._incomingParticles.next(particle);
|
this._incomingParticles.next({
|
||||||
|
particle: particle,
|
||||||
|
onStageChange: onStageChange,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register Call Service handler functions
|
* Register Call Service handler functions
|
||||||
*/
|
*/
|
||||||
@ -283,12 +291,6 @@ export class FluencePeer {
|
|||||||
|
|
||||||
psh.set(serviceFnKey(serviceId, fnName), handler);
|
psh.set(serviceFnKey(serviceId, fnName), handler);
|
||||||
},
|
},
|
||||||
/**
|
|
||||||
* Register handler which will be called upon particle timeout
|
|
||||||
*/
|
|
||||||
timeout: (particleId: string, handler: () => void) => {
|
|
||||||
this._timeoutHandlers.set(particleId, handler);
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -303,7 +305,15 @@ export class FluencePeer {
|
|||||||
timeout: request.timeout,
|
timeout: request.timeout,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.internals.initiateParticle(particle);
|
this.internals.initiateParticle(particle, (stage) => {
|
||||||
|
if (stage.stage === 'interpreterError') {
|
||||||
|
request?.error(stage.errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stage.stage === 'expired') {
|
||||||
|
request?.timeout();
|
||||||
|
}
|
||||||
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -334,14 +344,13 @@ export class FluencePeer {
|
|||||||
|
|
||||||
// Queues for incoming and outgoing particles
|
// Queues for incoming and outgoing particles
|
||||||
|
|
||||||
private _incomingParticles = new Subject<Particle>();
|
private _incomingParticles = new Subject<ParticleQueueItem>();
|
||||||
private _outgoingParticles = new Subject<Particle>();
|
private _outgoingParticles = new Subject<ParticleQueueItem>();
|
||||||
|
|
||||||
// Call service handler
|
// Call service handler
|
||||||
|
|
||||||
private _particleSpecificHandlers = new Map<string, Map<string, GenericCallServiceHandler>>();
|
private _particleSpecificHandlers = new Map<string, Map<string, GenericCallServiceHandler>>();
|
||||||
private _commonHandlers = new Map<string, GenericCallServiceHandler>();
|
private _commonHandlers = new Map<string, GenericCallServiceHandler>();
|
||||||
private _timeoutHandlers = new Map<string, () => void>();
|
|
||||||
|
|
||||||
// Internal peer state
|
// Internal peer state
|
||||||
|
|
||||||
@ -351,15 +360,18 @@ export class FluencePeer {
|
|||||||
private _connection: FluenceConnection;
|
private _connection: FluenceConnection;
|
||||||
private _interpreter: AirInterpreter;
|
private _interpreter: AirInterpreter;
|
||||||
private _timeouts: Array<NodeJS.Timeout> = [];
|
private _timeouts: Array<NodeJS.Timeout> = [];
|
||||||
private _particleQueues = new Map<string, Subject<Particle>>();
|
private _particleQueues = new Map<string, Subject<ParticleQueueItem>>();
|
||||||
|
|
||||||
private _startParticleProcessing() {
|
private _startParticleProcessing() {
|
||||||
this._incomingParticles
|
this._incomingParticles
|
||||||
.pipe(
|
.pipe(
|
||||||
tap((x) => x.logTo('debug', 'particle received:')),
|
tap((x) => {
|
||||||
|
x.particle.logTo('debug', 'particle received:');
|
||||||
|
}),
|
||||||
filterExpiredParticles(this._expireParticle.bind(this)),
|
filterExpiredParticles(this._expireParticle.bind(this)),
|
||||||
)
|
)
|
||||||
.subscribe((p) => {
|
.subscribe((item) => {
|
||||||
|
const p = item.particle;
|
||||||
let particlesQueue = this._particleQueues.get(p.id);
|
let particlesQueue = this._particleQueues.get(p.id);
|
||||||
|
|
||||||
if (!particlesQueue) {
|
if (!particlesQueue) {
|
||||||
@ -367,34 +379,35 @@ export class FluencePeer {
|
|||||||
this._particleQueues.set(p.id, particlesQueue);
|
this._particleQueues.set(p.id, particlesQueue);
|
||||||
|
|
||||||
const timeout = setTimeout(() => {
|
const timeout = setTimeout(() => {
|
||||||
this._expireParticle(p.id);
|
this._expireParticle(item);
|
||||||
}, p.actualTtl());
|
}, p.actualTtl());
|
||||||
|
|
||||||
this._timeouts.push(timeout);
|
this._timeouts.push(timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
particlesQueue.next(p);
|
particlesQueue.next(item);
|
||||||
});
|
});
|
||||||
|
|
||||||
this._outgoingParticles.subscribe((p) => {
|
this._outgoingParticles.subscribe(async (item) => {
|
||||||
this._connection.sendParticle(p);
|
await this._connection.sendParticle(item.particle);
|
||||||
|
item.onStageChange({ stage: 'sent' });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private _expireParticle(particleId: string) {
|
private _expireParticle(item: ParticleQueueItem) {
|
||||||
log.debug(`particle ${particleId} has expired. Deleting particle-related queues and handlers`);
|
const particleId = item.particle.id;
|
||||||
|
log.debug(
|
||||||
|
`particle ${particleId} has expired after ${item.particle.ttl}. Deleting particle-related queues and handlers`,
|
||||||
|
);
|
||||||
|
|
||||||
this._particleQueues.delete(particleId);
|
this._particleQueues.delete(particleId);
|
||||||
const timeoutHandler = this._timeoutHandlers.get(particleId);
|
|
||||||
if (timeoutHandler) {
|
|
||||||
timeoutHandler();
|
|
||||||
}
|
|
||||||
this._particleSpecificHandlers.delete(particleId);
|
this._particleSpecificHandlers.delete(particleId);
|
||||||
this._timeoutHandlers.delete(particleId);
|
|
||||||
|
item.onStageChange({ stage: 'expired' });
|
||||||
}
|
}
|
||||||
|
|
||||||
private _createParticlesProcessingQueue() {
|
private _createParticlesProcessingQueue() {
|
||||||
let particlesQueue = new Subject<Particle>();
|
let particlesQueue = new Subject<ParticleQueueItem>();
|
||||||
let prevData: Uint8Array = Buffer.from([]);
|
let prevData: Uint8Array = Buffer.from([]);
|
||||||
|
|
||||||
particlesQueue
|
particlesQueue
|
||||||
@ -402,28 +415,42 @@ export class FluencePeer {
|
|||||||
// force new line
|
// force new line
|
||||||
filterExpiredParticles(this._expireParticle.bind(this)),
|
filterExpiredParticles(this._expireParticle.bind(this)),
|
||||||
)
|
)
|
||||||
.subscribe((x) => {
|
.subscribe((item) => {
|
||||||
const result = runInterpreter(this.getStatus().peerId, this._interpreter, x, prevData);
|
const particle = item.particle;
|
||||||
|
const result = runInterpreter(this.getStatus().peerId, this._interpreter, particle, prevData);
|
||||||
|
|
||||||
prevData = Buffer.from(result.data);
|
// Do not continue if there was an error in particle interpretation
|
||||||
|
if (isInterpretationSuccessful(result)) {
|
||||||
|
item.onStageChange({ stage: 'interpreterError', errorMessage: result.errorMessage });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
item.onStageChange({ stage: 'interpreted' });
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
const newData = Buffer.from(result.data);
|
||||||
|
prevData = newData;
|
||||||
|
|
||||||
// send particle further if requested
|
// send particle further if requested
|
||||||
if (result.nextPeerPks.length > 0) {
|
if (result.nextPeerPks.length > 0) {
|
||||||
const newParticle = x.clone();
|
const newParticle = particle.clone();
|
||||||
newParticle.data = prevData;
|
newParticle.data = newData;
|
||||||
this._outgoingParticles.next(newParticle);
|
this._outgoingParticles.next({ ...item, particle: newParticle });
|
||||||
}
|
}
|
||||||
|
|
||||||
// execute call requests if needed
|
// execute call requests if needed
|
||||||
// and put particle with the results back to queue
|
// and put particle with the results back to queue
|
||||||
if (result.callRequests.length > 0) {
|
if (result.callRequests.length > 0) {
|
||||||
this._execCallRequests(x, result.callRequests).then((callResults) => {
|
this._execCallRequests(particle, result.callRequests).then((callResults) => {
|
||||||
const newParticle = x.clone();
|
const newParticle = particle.clone();
|
||||||
newParticle.callResults = callResults;
|
newParticle.callResults = callResults;
|
||||||
newParticle.data = Buffer.from([]);
|
newParticle.data = Buffer.from([]);
|
||||||
|
|
||||||
particlesQueue.next(newParticle);
|
particlesQueue.next({ ...item, particle: newParticle });
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
item.onStageChange({ stage: 'localWorkDone' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -547,6 +574,10 @@ export class FluencePeer {
|
|||||||
private _legacyCallServiceHandler: LegacyCallServiceHandler;
|
private _legacyCallServiceHandler: LegacyCallServiceHandler;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isInterpretationSuccessful(result: InterpreterResult) {
|
||||||
|
return result.retCode !== 0 || result?.errorMessage?.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
function serviceFnKey(serviceId: string, fnName: string) {
|
function serviceFnKey(serviceId: string, fnName: string) {
|
||||||
return `${serviceId}/${fnName}`;
|
return `${serviceId}/${fnName}`;
|
||||||
}
|
}
|
||||||
@ -582,17 +613,22 @@ function runInterpreter(
|
|||||||
|
|
||||||
const toLog: any = { ...interpreterResult };
|
const toLog: any = { ...interpreterResult };
|
||||||
toLog.data = dataToString(toLog.data);
|
toLog.data = dataToString(toLog.data);
|
||||||
log.debug('Interpreter result: ', toLog);
|
|
||||||
|
if (isInterpretationSuccessful(interpreterResult)) {
|
||||||
|
log.debug('Interpreter result: ', toLog);
|
||||||
|
} else {
|
||||||
|
log.error('Interpreter failed: ', toLog);
|
||||||
|
}
|
||||||
return interpreterResult;
|
return interpreterResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
function filterExpiredParticles(onParticleExpiration: (particleId: string) => void) {
|
function filterExpiredParticles(onParticleExpiration: (item: ParticleQueueItem) => void) {
|
||||||
return pipe(
|
return pipe(
|
||||||
tap((p: Particle) => {
|
tap((item: ParticleQueueItem) => {
|
||||||
if (p.hasExpired()) {
|
if (item.particle.hasExpired()) {
|
||||||
onParticleExpiration(p.id);
|
onParticleExpiration(item);
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
filter((x: Particle) => !x.hasExpired()),
|
filter((x: ParticleQueueItem) => !x.particle.hasExpired()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -140,6 +140,19 @@ export class Particle {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ParticleExecutionStage =
|
||||||
|
| { stage: 'received' }
|
||||||
|
| { stage: 'interpreted' }
|
||||||
|
| { stage: 'interpreterError'; errorMessage: string }
|
||||||
|
| { stage: 'localWorkDone' }
|
||||||
|
| { stage: 'sent' }
|
||||||
|
| { stage: 'expired' };
|
||||||
|
|
||||||
|
export interface ParticleQueueItem {
|
||||||
|
particle: Particle;
|
||||||
|
onStageChange: (state: ParticleExecutionStage) => void;
|
||||||
|
}
|
||||||
|
|
||||||
function genUUID() {
|
function genUUID() {
|
||||||
return uuidv4();
|
return uuidv4();
|
||||||
}
|
}
|
||||||
|
@ -68,12 +68,28 @@ export enum ResultCodes {
|
|||||||
*/
|
*/
|
||||||
export interface ParticleContext {
|
export interface ParticleContext {
|
||||||
/**
|
/**
|
||||||
* The particle ID
|
* The identifier of particle which triggered the call
|
||||||
*/
|
*/
|
||||||
particleId: string;
|
particleId: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The peer id which created the particle
|
||||||
|
*/
|
||||||
initPeerId: PeerIdB58;
|
initPeerId: PeerIdB58;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Particle's timestamp when it was created
|
||||||
|
*/
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Time to live in milliseconds. The time after the particle should be expired
|
||||||
|
*/
|
||||||
ttl: number;
|
ttl: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Particle's signature
|
||||||
|
*/
|
||||||
signature: string;
|
signature: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -320,22 +320,25 @@ export function callFunction(rawFnArgs: Array<any>, def: FunctionCallDef, script
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// registering handler for particle timeout
|
peer.internals.initiateParticle(particle, (stage) => {
|
||||||
peer.internals.regHandler.timeout(particle.id, () => {
|
// If function is void, then it's completed when one of the two conditions is met:
|
||||||
reject(`Request timed out for ${def.functionName}`);
|
// 1. The particle is sent to the network (state 'sent')
|
||||||
});
|
// 2. All CallRequests are executed, e.g., all variable loading and local function calls are completed (state 'localWorkDone')
|
||||||
|
if (def.returnType.tag === 'void' && (stage.stage === 'sent' || stage.stage === 'localWorkDone')) {
|
||||||
|
resolve(undefined);
|
||||||
|
}
|
||||||
|
|
||||||
peer.internals.initiateParticle(particle);
|
if (stage.stage === 'expired') {
|
||||||
|
reject(`Request timed out after ${particle.ttl} for ${def.functionName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stage.stage === 'interpreterError') {
|
||||||
|
reject(`Script interpretation failed for ${def.functionName}: ${stage.errorMessage}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// if the function has void type we should resolve immediately for API symmetry with non-void types
|
return promise;
|
||||||
// to help with debugging we are returning a promise which can be used to track particle errors
|
|
||||||
// we cannot return a bare promise because JS will lift it, so returning an array with the promise
|
|
||||||
if (def.returnType.tag === 'void') {
|
|
||||||
return Promise.resolve([promise]);
|
|
||||||
} else {
|
|
||||||
return promise;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -18,7 +18,7 @@ import { AirInterpreter, LogLevel as AvmLogLevel } from '@fluencelabs/avm';
|
|||||||
import log from 'loglevel';
|
import log from 'loglevel';
|
||||||
import { CallServiceData, CallServiceResult, CallServiceResultType, ResultCodes } from './commonTypes';
|
import { CallServiceData, CallServiceResult, CallServiceResultType, ResultCodes } from './commonTypes';
|
||||||
import { AvmLoglevel, FluencePeer } from './FluencePeer';
|
import { AvmLoglevel, FluencePeer } from './FluencePeer';
|
||||||
import { Particle } from './Particle';
|
import { Particle, ParticleExecutionStage } from './Particle';
|
||||||
|
|
||||||
export const createInterpreter = (logLevel: AvmLoglevel): Promise<AirInterpreter> => {
|
export const createInterpreter = (logLevel: AvmLoglevel): Promise<AirInterpreter> => {
|
||||||
const logFn = (level: AvmLogLevel, msg: string) => {
|
const logFn = (level: AvmLogLevel, msg: string) => {
|
||||||
@ -53,6 +53,14 @@ export const MakeServiceCall = (fn: (args: any[]) => CallServiceResultType) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const handleTimeout = (fn: Function) => (stage: ParticleExecutionStage) => {
|
||||||
|
if (stage.stage === 'expired') {
|
||||||
|
fn();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const doNothing = (stage: ParticleExecutionStage) => {};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks the network connection by sending a ping-like request to relay node
|
* Checks the network connection by sending a ping-like request to relay node
|
||||||
* @param { FluenceClient } peer - The Fluence Client instance.
|
* @param { FluenceClient } peer - The Fluence Client instance.
|
||||||
@ -127,11 +135,12 @@ export const checkConnection = async (peer: FluencePeer, ttl?: number): Promise<
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
peer.internals.regHandler.timeout(particle.id, () => {
|
peer.internals.initiateParticle(
|
||||||
reject('particle timed out');
|
particle,
|
||||||
});
|
handleTimeout(() => {
|
||||||
|
reject('particle timed out');
|
||||||
peer.internals.initiateParticle(particle);
|
}),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user