intro: 4-ipfs-code-execution (#15)

This commit is contained in:
folex
2021-07-21 11:37:25 +03:00
committed by GitHub
parent 5ecce263a6
commit e5e0d59835
35 changed files with 31554 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
{
"name": "@fluencelabs/ipfs-execution",
"version": "0.1.0",
"description": "An example of executing WASM code from IPFS over IPFS files",
"main": "dist/index.js",
"scripts": {
"compile-aqua": "aqua-cli -i . -o .",
"prebuild": "npm run compile-aqua",
"build": "tsc",
"prestart:local": "npm run build",
"start:local": "node dist/index.js local",
"prestart:remote": "npm run build",
"start:remote": "node dist/index.js stage",
"start": "npm run start:remote"
},
"keywords": [
"fluence",
"wasm",
"ipfs",
"functions",
"faas",
"decentralization",
"p2p",
"libp2p"
],
"author": "Fluence Labs",
"license": "MIT",
"dependencies": {
"@fluencelabs/aqua-ipfs": "^0.1.8",
"@fluencelabs/fluence": "0.9.53",
"@fluencelabs/fluence-network-environment": "1.0.10",
"@fluencelabs/aqua-lib": "0.1.9",
"ipfs-http-client": "^50.1.2",
"it-all": "^1.0.5",
"uint8arrays": "^2.1.5",
"multiaddr": "^10.0.0"
},
"devDependencies": {
"typescript": "^3.9.5",
"@fluencelabs/aqua-cli": "0.1.9-164"
}
}

View File

@ -0,0 +1,7 @@
data SizeResult:
size: u32
success: bool
error: string
service ProcessFiles:
file_size(file_path: string) -> SizeResult

View File

@ -0,0 +1,84 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { set_timeout } from "@fluencelabs/aqua-ipfs";
import {createClient, FluenceClient, setLogLevel} from "@fluencelabs/fluence";
import {stage, krasnodar, Node, testNet} from "@fluencelabs/fluence-network-environment";
import { provideFile, globSource, urlSource } from "./provider";
import { deploy_service, get_file_size, remove_service } from "./process";
async function main(environment: Node[]) {
// setLogLevel('DEBUG');
let providerHost = environment[0];
let providerClient = await createClient(providerHost);
console.log("📘 uploading .wasm to node %s", providerHost.multiaddr);
let path = globSource('../service/artifacts/process_files.wasm');
let { file, swarmAddr, rpcAddr } = await provideFile(path, providerClient);
console.log("📗 swarmAddr", swarmAddr);
console.log("📗 rpcAddr", rpcAddr);
const fluence = await createClient(environment[1]);
console.log("📗 created a fluence client %s with relay %s", fluence.selfPeerId, fluence.relayPeerId);
// default IPFS timeout is 1 sec, set to 10 secs to retrieve file from remote node
await set_timeout(fluence, environment[2].peerId, 10);
console.log("\n\n📘 Will deploy ProcessFiles service");
let service_id = await deploy_service(
fluence,
environment[2].peerId, file.cid.toString(), rpcAddr,
(msg, value) => console.log(msg, value),
{ ttl: 10000 }
)
console.log("📗 ProcessFiles service is now deployed and available as", service_id);
console.log("\n\n📘 Will upload file & calculate its size");
let { file: newFile } = await provideFile(urlSource("https://i.imgur.com/NZgK6DB.png"), providerClient);
let fileSize = await get_file_size(
fluence,
environment[2].peerId, newFile.cid.toString(), rpcAddr, service_id,
{ ttl: 10000 }
)
console.log("📗 Calculated file size:", fileSize)
let result = await remove_service(fluence, environment[2].peerId, service_id);
console.log("📕 ProcessFiles service removed", result);
return;
}
let args = process.argv.slice(2);
var environment: Node[];
if (args.length >= 1 && args[0] == "testnet") {
environment = testNet;
console.log("📘 Will connect to testNet");
} else if (args[0] == "stage") {
environment = stage;
console.log("📘 Will connect to stage");
} else if (args[0] == "krasnodar") {
environment = krasnodar;
console.log("📘 Will connect to krasnodar");
} else {
throw "Specify environment";
}
main(environment)
.then(() => process.exit(0))
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@ -0,0 +1,2 @@
export * from './provider'
export * from './process'

View File

@ -0,0 +1,45 @@
import "@fluencelabs/aqua-lib/builtin.aqua"
import "@fluencelabs/aqua-ipfs/ipfs.aqua"
import "process_files.aqua"
alias PeerId : string
alias CID : string
alias Multiaddr : string
alias Hash : string
alias ServiceID : string
service NewDist("dist"):
default_module_config(name: string) -> ModuleConfig
add_module_from_vault(path: string, config: ModuleConfig) -> Hash
service NewOp("op"):
concat_strings(a: string, b: string) -> string
array(s: string) -> []string
func deploy_service(relay: PeerId, cid: CID, provider_ipfs: Multiaddr, log: string, u32 -> ()) -> ServiceID:
on relay:
get_result <- Ipfs.get_from(cid, provider_ipfs)
config <- NewDist.default_module_config("process_files")
module_hash <- NewDist.add_module_from_vault(get_result.path, config)
prefixed_hash <- NewOp.concat_strings("hash:", module_hash)
dependencies <- NewOp.array(prefixed_hash)
blueprint <- Dist.make_blueprint("process_files", dependencies)
blueprint_id <- Dist.add_blueprint(blueprint)
service_id <- Srv.create(blueprint_id)
ProcessFiles service_id
size <- ProcessFiles.file_size(get_result.path)
log("Size of the .wasm module is", size.size)
<- service_id
func get_file_size(relay: PeerId, cid: CID, provider_ipfs: Multiaddr, service_id: ServiceID) -> SizeResult:
ProcessFiles service_id
on relay:
get_result <- Ipfs.get_from(cid, provider_ipfs)
size <- ProcessFiles.file_size(get_result.path)
<- size
func remove_service(relay: PeerId, service_id: ServiceID) -> bool:
on relay:
Srv.remove(service_id)
<- true

View File

@ -0,0 +1,274 @@
/**
*
* This file is auto-generated. Do not edit manually: changes may be erased.
* Generated by Aqua compiler: https://github.com/fluencelabs/aqua/.
* If you find any bugs, please write an issue on GitHub: https://github.com/fluencelabs/aqua/issues
* Aqua version: 0.1.9-164
*
*/
import { FluenceClient, PeerIdB58 } from '@fluencelabs/fluence';
import { RequestFlowBuilder } from '@fluencelabs/fluence/dist/api.unstable';
import { RequestFlow } from '@fluencelabs/fluence/dist/internal/RequestFlow';
export async function deploy_service(client: FluenceClient, relay: string, cid: string, provider_ipfs: string, log: (arg0: string, arg1: number) => void, config?: {ttl?: number}): Promise<string> {
let request: RequestFlow;
const promise = new Promise<string>((resolve, reject) => {
const r = new RequestFlowBuilder()
.disableInjections()
.withRawScript(
`
(xor
(seq
(seq
(seq
(seq
(seq
(seq
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
(call %init_peer_id% ("getDataSrv" "relay") [] relay)
)
(call %init_peer_id% ("getDataSrv" "cid") [] cid)
)
(call %init_peer_id% ("getDataSrv" "provider_ipfs") [] provider_ipfs)
)
(call -relay- ("op" "noop") [])
)
(xor
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(call relay ("ipfs-adapter" "get_from") [cid provider_ipfs] get_result)
(call relay ("dist" "default_module_config") ["process_files"] config)
)
(call relay ("dist" "add_module_from_vault") [get_result.$.path! config] module_hash)
)
(call relay ("op" "concat_strings") ["hash:" module_hash] prefixed_hash)
)
(call relay ("op" "array") [prefixed_hash] dependencies)
)
(call relay ("dist" "make_blueprint") ["process_files" dependencies] blueprint)
)
(call relay ("dist" "add_blueprint") [blueprint] blueprint_id)
)
(call relay ("srv" "create") [blueprint_id] service_id)
)
(call relay (service_id "file_size") [get_result.$.path!] size)
)
(call -relay- ("op" "noop") [])
)
(xor
(call %init_peer_id% ("callbackSrv" "log") ["Size of the .wasm module is" size.$.size!])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
)
)
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
)
)
(xor
(call %init_peer_id% ("callbackSrv" "response") [service_id])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 3])
)
)
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 4])
)
`,
)
.configHandler((h) => {
h.on('getDataSrv', '-relay-', () => {
return client.relayPeerId!;
});
h.on('getDataSrv', 'relay', () => {return relay;});
h.on('getDataSrv', 'cid', () => {return cid;});
h.on('getDataSrv', 'provider_ipfs', () => {return provider_ipfs;});
h.on('callbackSrv', 'log', (args) => {log(args[0], args[1]); return {};});
h.onEvent('callbackSrv', 'response', (args) => {
const [res] = args;
resolve(res);
});
h.onEvent('errorHandlingSrv', 'error', (args) => {
// assuming error is the single argument
const [err] = args;
reject(err);
});
})
.handleScriptError(reject)
.handleTimeout(() => {
reject('Request timed out for deploy_service');
})
if(config && config.ttl) {
r.withTTL(config.ttl)
}
request = r.build();
});
await client.initiateFlow(request!);
return promise;
}
export async function get_file_size(client: FluenceClient, relay: string, cid: string, provider_ipfs: string, service_id: string, config?: {ttl?: number}): Promise<{error:string;size:number;success:boolean}> {
let request: RequestFlow;
const promise = new Promise<{error:string;size:number;success:boolean}>((resolve, reject) => {
const r = new RequestFlowBuilder()
.disableInjections()
.withRawScript(
`
(xor
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(seq
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
(call %init_peer_id% ("getDataSrv" "relay") [] relay)
)
(call %init_peer_id% ("getDataSrv" "cid") [] cid)
)
(call %init_peer_id% ("getDataSrv" "provider_ipfs") [] provider_ipfs)
)
(call %init_peer_id% ("getDataSrv" "service_id") [] service_id)
)
(call -relay- ("op" "noop") [])
)
(xor
(seq
(call relay ("ipfs-adapter" "get_from") [cid provider_ipfs] get_result)
(call relay (service_id "file_size") [get_result.$.path!] size)
)
(seq
(call -relay- ("op" "noop") [])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
)
)
)
(call -relay- ("op" "noop") [])
)
(xor
(call %init_peer_id% ("callbackSrv" "response") [size])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
)
)
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 3])
)
`,
)
.configHandler((h) => {
h.on('getDataSrv', '-relay-', () => {
return client.relayPeerId!;
});
h.on('getDataSrv', 'relay', () => {return relay;});
h.on('getDataSrv', 'cid', () => {return cid;});
h.on('getDataSrv', 'provider_ipfs', () => {return provider_ipfs;});
h.on('getDataSrv', 'service_id', () => {return service_id;});
h.onEvent('callbackSrv', 'response', (args) => {
const [res] = args;
resolve(res);
});
h.onEvent('errorHandlingSrv', 'error', (args) => {
// assuming error is the single argument
const [err] = args;
reject(err);
});
})
.handleScriptError(reject)
.handleTimeout(() => {
reject('Request timed out for get_file_size');
})
if(config && config.ttl) {
r.withTTL(config.ttl)
}
request = r.build();
});
await client.initiateFlow(request!);
return promise;
}
export async function remove_service(client: FluenceClient, relay: string, service_id: string, config?: {ttl?: number}): Promise<boolean> {
let request: RequestFlow;
const promise = new Promise<boolean>((resolve, reject) => {
const r = new RequestFlowBuilder()
.disableInjections()
.withRawScript(
`
(xor
(seq
(seq
(seq
(seq
(seq
(seq
(call %init_peer_id% ("getDataSrv" "-relay-") [] -relay-)
(call %init_peer_id% ("getDataSrv" "relay") [] relay)
)
(call %init_peer_id% ("getDataSrv" "service_id") [] service_id)
)
(call -relay- ("op" "noop") [])
)
(xor
(call relay ("srv" "remove") [service_id])
(seq
(call -relay- ("op" "noop") [])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 1])
)
)
)
(call -relay- ("op" "noop") [])
)
(xor
(call %init_peer_id% ("callbackSrv" "response") [true])
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 2])
)
)
(call %init_peer_id% ("errorHandlingSrv" "error") [%last_error% 3])
)
`,
)
.configHandler((h) => {
h.on('getDataSrv', '-relay-', () => {
return client.relayPeerId!;
});
h.on('getDataSrv', 'relay', () => {return relay;});
h.on('getDataSrv', 'service_id', () => {return service_id;});
h.onEvent('callbackSrv', 'response', (args) => {
const [res] = args;
resolve(res);
});
h.onEvent('errorHandlingSrv', 'error', (args) => {
// assuming error is the single argument
const [err] = args;
reject(err);
});
})
.handleScriptError(reject)
.handleTimeout(() => {
reject('Request timed out for remove_service');
})
if(config && config.ttl) {
r.withTTL(config.ttl)
}
request = r.build();
});
await client.initiateFlow(request!);
return promise;
}

View File

@ -0,0 +1,7 @@
data SizeResult:
size: u32
success: bool
error: string
service ProcessFiles:
file_size(file_path: string) -> SizeResult

View File

@ -0,0 +1,43 @@
export const { create, globSource, urlSource, CID } = require('ipfs-http-client');
import { Multiaddr, protocols } from 'multiaddr';
import { get_external_swarm_multiaddr, get_external_api_multiaddr } from "@fluencelabs/aqua-ipfs";
import { FluenceClient } from "@fluencelabs/fluence";
export async function provideFile(source: any, provider: FluenceClient): Promise<{ file: typeof CID, swarmAddr: string, rpcAddr: string }> {
var swarmAddr;
var result = await get_external_swarm_multiaddr(provider, provider.relayPeerId!);
if (result.success) {
swarmAddr = result.multiaddr;
} else {
console.error("Failed to retrieve external swarm multiaddr from %s: ", provider.relayPeerId);
throw result.error;
}
var rpcAddr;
var result = await get_external_api_multiaddr(provider, provider.relayPeerId!);
if (result.success) {
rpcAddr = result.multiaddr;
} else {
console.error("Failed to retrieve external api multiaddr from %s: ", provider.relayPeerId);
throw result.error;
}
var rpcMaddr = new Multiaddr(rpcAddr).decapsulateCode(protocols.names.p2p.code);
const ipfs = create(rpcMaddr);
console.log("📗 created ipfs client to %s", rpcMaddr);
await ipfs.id();
console.log("📗 connected to ipfs");
const file = await ipfs.add(source);
console.log("📗 uploaded file:", file);
// To download the file, uncomment the following code:
// let files = await ipfs.get(file.cid);
// for await (const file of files) {
// const content = uint8ArrayConcat(await all(file.content));
// console.log("📗 downloaded file of length ", content.length);
// }
return { file, swarmAddr, rpcAddr };
}

View File

@ -0,0 +1,62 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
// "lib": [], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
"declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./dist", /* Redirect output structure to the directory. */
//"rootDir": ".", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
}
}