dcp-client 4.4.9-0 → 4.4.10-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/dcp-client-bundle.js +1 -1
- package/dist/dcp-client-bundle.js.map +1 -1
- package/docs/CODEOWNERS +30 -44
- package/examples/nodejs/remote-data/simple-job-remote-data-pattern.js +121 -0
- package/examples/nodejs/remote-data/simple-job-remote-function.js +111 -0
- package/examples/nodejs/remote-data/simple-job-remote-input.js +127 -0
- package/examples/nodejs/simple-job-es6.mjs +60 -0
- package/examples/nodejs/simple-job.js +62 -0
- package/examples/nodejs/simple-worker.js +120 -8
- package/libexec/sandbox/bootstrap.js +1 -1
- package/libexec/sandbox/bravojs-env.js +1 -4
- package/libexec/sandbox/lift-webgpu.js +8 -37
- package/package.json +1 -1
- package/examples/nodejs/deploy-job.mjs +0 -28
- package/examples/nodejs/events.js +0 -67
- package/examples/nodejs/minimal.js +0 -39
- package/examples/nodejs/remote-data/dataPattern.js +0 -92
- package/examples/nodejs/remote-data/dataSet.js +0 -98
- package/examples/nodejs/remote-data/dataURL.js +0 -99
- package/examples/nodejs/workFunctionURL.js +0 -66
|
@@ -1,15 +1,127 @@
|
|
|
1
1
|
#! /usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* @file simple-worker.js
|
|
4
|
+
*
|
|
5
|
+
* Simple NodeJS application showing how to implement a simple DCP worker using the native evaluator.
|
|
6
|
+
* Look at https://gitlab.com/Distributed-Compute-Protocol/dcp-native/-/releases to download the evaluator
|
|
7
|
+
* and start it using one of the dcp-evaluator-* scripts from the dcp-worker npm package.
|
|
8
|
+
*
|
|
9
|
+
* *********************************** NOTE 1 ***********************************
|
|
10
|
+
* Your keystore should be placed in your home directory in .dcp/default.keystore.
|
|
11
|
+
* When using the dcp-client API in NodeJS, this keystore will be used for communicating over DCP.
|
|
12
|
+
*
|
|
13
|
+
* *********************************** NOTE 2 ***********************************
|
|
14
|
+
* Executing Job with DCP Worker
|
|
15
|
+
*
|
|
16
|
+
* Run the following commands in your terminal:
|
|
17
|
+
* ```
|
|
18
|
+
* npm add --global dcp-worker
|
|
19
|
+
* dcp-worker --allowedOrigins http://localhost:<port number>
|
|
20
|
+
* ```
|
|
21
|
+
*
|
|
22
|
+
* @author Wes Garland <wes@distributive.network>
|
|
23
|
+
* @author Kevin Yu <kevin@distributive.network>
|
|
24
|
+
* @date Aug 2019, April 2020, June 2024
|
|
25
|
+
*/
|
|
2
26
|
|
|
3
|
-
|
|
27
|
+
'use strict';
|
|
4
28
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
29
|
+
/**
|
|
30
|
+
* Setup event listeners for jobs
|
|
31
|
+
*
|
|
32
|
+
* @param {object} job - the job handle object
|
|
33
|
+
* @returns {void}
|
|
34
|
+
*/
|
|
35
|
+
function addWorkerEventListeners(worker)
|
|
36
|
+
{
|
|
37
|
+
// The start event fires after the worker is ready to fetch tasks, but
|
|
38
|
+
// before the first task has been fetched.
|
|
39
|
+
worker.on('start', () => {
|
|
40
|
+
console.log('Worker ready to fetch tasks');
|
|
41
|
+
});
|
|
10
42
|
|
|
11
|
-
|
|
43
|
+
// The fetch event is fired when the worker has fetched a task from the
|
|
44
|
+
// task distributor.
|
|
45
|
+
worker.on('fetch', (task) => {
|
|
46
|
+
if (task.slices)
|
|
47
|
+
console.log(`Worker has fetched ${Object.keys(task.slices).length} slices`);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
// The sandbox event is emitted when a new sandbox is created. The event
|
|
51
|
+
// handler receives as its sole argument an EventEmitter which is used
|
|
52
|
+
// to emit sandbox events
|
|
53
|
+
worker.on('sandbox', (sandbox) => {
|
|
54
|
+
// The progress event is fired to indicate progress throughout a job this is triggered by the progress() call in the work function, however, quickly-repeating calls to progress() may be composited into a single event.
|
|
55
|
+
sandbox.on('progress', (event) => {
|
|
56
|
+
console.log('Sandbox progress', event);
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
// After a result is sent to the result-submitter for consideration
|
|
61
|
+
worker.on('payment', (event) => {
|
|
62
|
+
console.log('Work payment', event);
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
// Result event is fired immediately after the worker sends a result to
|
|
66
|
+
// the the result data sink
|
|
67
|
+
worker.on('result', (event) => {
|
|
68
|
+
console.log('Worker has sent a result back to the scheduler');
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
// Worker has stopped working and no new results will be sent to the
|
|
72
|
+
// result submitter or the result data sink and no more slices will be
|
|
73
|
+
// returned to the scheduler
|
|
74
|
+
worker.on('end', () => {
|
|
75
|
+
console.log('Worker has stopped working');
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
// Log errors
|
|
79
|
+
worker.on('error', console.error);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Main function to deploy a job with remote work function
|
|
84
|
+
*
|
|
85
|
+
* @returns {void}
|
|
86
|
+
*/
|
|
87
|
+
async function main()
|
|
88
|
+
{
|
|
89
|
+
const wallet = require('dcp/wallet');
|
|
90
|
+
const { Worker: DCPWorker } = require('dcp/worker');
|
|
91
|
+
|
|
92
|
+
// Bank account funds will deposit into
|
|
93
|
+
const paymentAddress = (await wallet.get()).address;
|
|
94
|
+
|
|
95
|
+
// Retrieve auth keystore object
|
|
96
|
+
const identity = await wallet.getId();
|
|
97
|
+
|
|
98
|
+
// Define stand-alone worker options
|
|
99
|
+
const sawOptions = {
|
|
100
|
+
hostname: dcpConfig.evaluator.location.hostname,
|
|
101
|
+
port: Number(dcpConfig.evaluator.location.port),
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
// Instantiate DCP worker
|
|
105
|
+
const worker = new DCPWorker(
|
|
106
|
+
identity,
|
|
107
|
+
{
|
|
108
|
+
cores: { cpu: 1 },
|
|
109
|
+
paymentAddress,
|
|
110
|
+
sandboxOptions: {
|
|
111
|
+
SandboxConstructor: require('../../lib/standaloneWorker').workerFactory(sawOptions),
|
|
112
|
+
},
|
|
113
|
+
allowOrigins: {
|
|
114
|
+
any: ['http://localhost:12345', 'http://localhost:12346'],
|
|
115
|
+
},
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
// Listen for work emitted events
|
|
119
|
+
addWorkerEventListeners(worker);
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
// Start the worker
|
|
123
|
+
await worker.start();
|
|
12
124
|
}
|
|
13
125
|
|
|
14
|
-
require('
|
|
126
|
+
require('../../').init().then(main);
|
|
15
127
|
|
|
@@ -145,7 +145,7 @@ self.wrapScriptLoading({ scriptName: 'bootstrap', finalScript: true }, function
|
|
|
145
145
|
});
|
|
146
146
|
}
|
|
147
147
|
|
|
148
|
-
function workerBootstrap$work$reject(reason
|
|
148
|
+
function workerBootstrap$work$reject(reason) {
|
|
149
149
|
protectedStorage.workRejectReason = reason; // Memoize reason
|
|
150
150
|
throw Symbol.for('workReject');
|
|
151
151
|
}
|
|
@@ -359,11 +359,8 @@ prepPyodide`);
|
|
|
359
359
|
}
|
|
360
360
|
|
|
361
361
|
if (error === Symbol.for('workReject')) {
|
|
362
|
-
err['message'] = protectedStorage.workRejectReason;
|
|
363
|
-
err['name'] = 'EWORKREJECT';
|
|
364
|
-
err['stack'] = 'Slice was rejected in the sandbox by work.reject'
|
|
365
362
|
reportTimes(metrics);
|
|
366
|
-
ring3PostMessage({ request: '
|
|
363
|
+
ring3PostMessage({ request: 'workReject', reason: protectedStorage.workRejectReason });
|
|
367
364
|
}
|
|
368
365
|
else
|
|
369
366
|
ring3PostMessage({request: 'workError', error: err});
|
|
@@ -224,13 +224,9 @@ self.wrapScriptLoading({ scriptName: 'lift-webgpu' }, function liftWebGPU$$fn(pr
|
|
|
224
224
|
}
|
|
225
225
|
}
|
|
226
226
|
|
|
227
|
-
// Want to use the original
|
|
228
|
-
const underlyingGPUQueue = GPUQueue;
|
|
229
|
-
const underlyingGPUQueueProto = GPUQueue.prototype;
|
|
227
|
+
// Want to use the submit/onSubmittedWorkDone original functions for timing.
|
|
230
228
|
const underlyingOnSubmittedWorkDone = GPUQueue.prototype.onSubmittedWorkDone;
|
|
231
229
|
const underlyingSubmit = GPUQueue.prototype.submit;
|
|
232
|
-
const underlyingRequestDevice = GPUAdapter.prototype.requestDevice;
|
|
233
|
-
const underlyingDestroy = GPUDevice.prototype.destroy;
|
|
234
230
|
|
|
235
231
|
// some of them will get re-wrapped, that's fine, we always refer to the original function
|
|
236
232
|
const requiredWrappingGPUClasses = [
|
|
@@ -254,35 +250,13 @@ self.wrapScriptLoading({ scriptName: 'lift-webgpu' }, function liftWebGPU$$fn(pr
|
|
|
254
250
|
requiredWrappingGPUClasses.forEach(liftWebGPUPrototype);
|
|
255
251
|
|
|
256
252
|
let locked = false;
|
|
257
|
-
const
|
|
253
|
+
const submittedDonePromises = [];
|
|
258
254
|
protectedStorage.webGPU = {
|
|
259
255
|
lock: () => { locked = true; },
|
|
260
256
|
unlock: () => { locked = false; },
|
|
261
|
-
waitAllCommandToFinish: () => { return Promise.allSettled(
|
|
257
|
+
waitAllCommandToFinish: () => { return Promise.allSettled(submittedDonePromises); },
|
|
262
258
|
};
|
|
263
259
|
|
|
264
|
-
// currently, the only queue exposed is the default queue
|
|
265
|
-
GPUAdapter.prototype.requestDevice = async function requestDevice(...args)
|
|
266
|
-
{
|
|
267
|
-
const device = await underlyingRequestDevice.call(this, ...args);
|
|
268
|
-
gpuQueueRegistry.push(device.queue);
|
|
269
|
-
return device;
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
/**
|
|
273
|
-
* Redefine GPUQueue constructor in order to add the queue to our webGPUQueueRegistry.
|
|
274
|
-
* Need to redefine the `GPUQueue` function for instanceof checks to work with the changed constructor,
|
|
275
|
-
* then add the original prototype to the new object.
|
|
276
|
-
*/
|
|
277
|
-
GPUQueue = function GPUQueue$$constructor(...args)
|
|
278
|
-
{
|
|
279
|
-
const queue = new underlyingGPUQueue(...args);
|
|
280
|
-
gpuQueueRegistry.push(queue);
|
|
281
|
-
return queue;
|
|
282
|
-
}
|
|
283
|
-
GPUQueue.prototype = underlyingGPUQueueProto;
|
|
284
|
-
GPUQueue.prototype.constructor = GPUQueue;
|
|
285
|
-
|
|
286
260
|
// our submit keeps a global tracker of all submissions, so we can track the time of each submission
|
|
287
261
|
GPUQueue.prototype.submit = function submit(commandBuffers)
|
|
288
262
|
{
|
|
@@ -291,19 +265,16 @@ self.wrapScriptLoading({ scriptName: 'lift-webgpu' }, function liftWebGPU$$fn(pr
|
|
|
291
265
|
underlyingSubmit.call(this, commandBuffers);
|
|
292
266
|
|
|
293
267
|
const submitTime = performance.now();
|
|
294
|
-
underlyingOnSubmittedWorkDone.call(this).then(() => {
|
|
268
|
+
const submitDonePromise = underlyingOnSubmittedWorkDone.call(this).then(() => {
|
|
269
|
+
const idx = submittedDonePromises.indexOf(submitDonePromise);
|
|
270
|
+
submittedDonePromises.splice(idx);
|
|
271
|
+
|
|
295
272
|
const completedAt = performance.now();
|
|
296
273
|
const duration = new TimeInterval();
|
|
297
274
|
duration.overrideInterval(submitTime, completedAt);
|
|
298
275
|
webGPUTimer.push(duration);
|
|
299
276
|
});
|
|
277
|
+
submittedDonePromises.push(submitDonePromise);
|
|
300
278
|
}
|
|
301
279
|
|
|
302
|
-
GPUDevice.prototype.destroy = function destroy()
|
|
303
|
-
{
|
|
304
|
-
const idx = gpuQueueRegistry.indexOf(this.queue);
|
|
305
|
-
gpuQueueRegistry.splice(idx);
|
|
306
|
-
underlyingDestroy.call(this);
|
|
307
|
-
}
|
|
308
280
|
});
|
|
309
|
-
|
package/package.json
CHANGED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
// How to deploy a job using ES6 modules.
|
|
2
|
-
|
|
3
|
-
import { init } from '../../index.js';
|
|
4
|
-
|
|
5
|
-
const { compute } = await init();
|
|
6
|
-
|
|
7
|
-
const inputSet = [1, 2, 3, 4];
|
|
8
|
-
|
|
9
|
-
const workFunction = (input) => {
|
|
10
|
-
progress();
|
|
11
|
-
return input;
|
|
12
|
-
};
|
|
13
|
-
|
|
14
|
-
const job = compute.for(inputSet, workFunction);
|
|
15
|
-
|
|
16
|
-
job.public.name = 'ESM example';
|
|
17
|
-
|
|
18
|
-
job.on('accepted', ({ job: { id } }) => {
|
|
19
|
-
console.log('Job accepted with id', id);
|
|
20
|
-
});
|
|
21
|
-
|
|
22
|
-
job.on('result', ({ result }) => {
|
|
23
|
-
console.log('Received a result:', result);
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
const results = await job.exec();
|
|
27
|
-
|
|
28
|
-
console.log(results);
|
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* @file events.js
|
|
5
|
-
* Sample node application showing how to deploy a DCP job whilst receiving
|
|
6
|
-
* events describing the current state of the job, processing results
|
|
7
|
-
* as they are received, and so on.
|
|
8
|
-
*
|
|
9
|
-
* Note: Your keystore should be placed in your home directory in .dcp/default.keystore.
|
|
10
|
-
* When using the dcp-client API in NodeJS, this keystore will be used for communicating over DCP.
|
|
11
|
-
*
|
|
12
|
-
* @author Wes Garland, wes@kingsds.network
|
|
13
|
-
* @date Aug 2019, April 2020
|
|
14
|
-
*/
|
|
15
|
-
|
|
16
|
-
const SCHEDULER_URL = new URL('https://scheduler.distributed.computer');
|
|
17
|
-
|
|
18
|
-
/** Main program entry point */
|
|
19
|
-
async function main() {
|
|
20
|
-
const compute = require('dcp/compute');
|
|
21
|
-
const wallet = require('dcp/wallet');
|
|
22
|
-
let startTime;
|
|
23
|
-
|
|
24
|
-
const job = compute.for(
|
|
25
|
-
['red', 'green', 'yellow', 'blue', 'brown', 'orange', 'pink'],
|
|
26
|
-
(colour) => {
|
|
27
|
-
progress(0);
|
|
28
|
-
let sum = 0;
|
|
29
|
-
for (let i = 0; i < 10000000; i += 1) {
|
|
30
|
-
progress(i / 10000000);
|
|
31
|
-
sum += Math.random();
|
|
32
|
-
}
|
|
33
|
-
return { colour, sum };
|
|
34
|
-
},
|
|
35
|
-
);
|
|
36
|
-
|
|
37
|
-
job.on('accepted', () => {
|
|
38
|
-
console.log(` - Job accepted by scheduler, waiting for results`);
|
|
39
|
-
console.log(` - Job has id ${job.id}`);
|
|
40
|
-
startTime = Date.now();
|
|
41
|
-
});
|
|
42
|
-
|
|
43
|
-
job.on('readystatechange', (arg) => {
|
|
44
|
-
console.log(`new ready state: ${arg}`);
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
job.on('result', (ev) => {
|
|
48
|
-
console.log(
|
|
49
|
-
` - Received result for slice ${ev.sliceNumber} at ${
|
|
50
|
-
Math.round((Date.now() - startTime) / 100) / 10
|
|
51
|
-
}s`,
|
|
52
|
-
);
|
|
53
|
-
console.log(` * Wow! ${ev.result.colour} is such a pretty colour!`);
|
|
54
|
-
});
|
|
55
|
-
|
|
56
|
-
job.public.name = 'events example, nodejs';
|
|
57
|
-
|
|
58
|
-
const ks = await wallet.get(); /* usually loads ~/.dcp/default.keystore */
|
|
59
|
-
job.setPaymentAccountKeystore(ks);
|
|
60
|
-
const results = await job.exec(compute.marketValue);
|
|
61
|
-
console.log('results=', Array.from(results));
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
/* Initialize DCP Client and run main() */
|
|
65
|
-
require('../..')
|
|
66
|
-
.init(SCHEDULER_URL)
|
|
67
|
-
.then(main)
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
#! /usr/bin/env node
|
|
2
|
-
/**
|
|
3
|
-
* @file minimal.html
|
|
4
|
-
*
|
|
5
|
-
* Sample NodeJS application showing how to deploy a minimal DCP job.
|
|
6
|
-
*
|
|
7
|
-
* Note: Your keystore should be placed in your home directory in .dcp/default.keystore.
|
|
8
|
-
* When using the dcp-client API in NodeJS, this keystore will be used for communicating over DCP.
|
|
9
|
-
*
|
|
10
|
-
* @author Wes Garland, wes@kingsds.network
|
|
11
|
-
* @date Aug 2019, April 2020
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
const SCHEDULER_URL = new URL('https://scheduler.distributed.computer') ;
|
|
15
|
-
|
|
16
|
-
async function main() {
|
|
17
|
-
const compute = require('dcp/compute');
|
|
18
|
-
|
|
19
|
-
let job = compute.for(1, 10,
|
|
20
|
-
function(i) {
|
|
21
|
-
progress(0);
|
|
22
|
-
let sum = 0;
|
|
23
|
-
for (let i =0; i < 10000000; i++) {
|
|
24
|
-
sum += Math.random();
|
|
25
|
-
progress(i/10000000);
|
|
26
|
-
}
|
|
27
|
-
return i*3
|
|
28
|
-
}
|
|
29
|
-
)
|
|
30
|
-
|
|
31
|
-
job.on('result',function(ev) {
|
|
32
|
-
console.log('received result', ev.result);
|
|
33
|
-
})
|
|
34
|
-
|
|
35
|
-
job.public.name = 'minimal example, nodejs';
|
|
36
|
-
await job.exec(compute.marketValue);
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
require('dcp-client').init(SCHEDULER_URL, true).then(main);
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
#! /usr/bin/env node
|
|
2
|
-
/*
|
|
3
|
-
* @file dataPattern.js - An example on using RemoteDataPattern class that works perfectly for
|
|
4
|
-
* slices which their uri's has similar patterns. For more information please refer
|
|
5
|
-
* to https://gitlab.com/Distributed-Compute-Protocol/dcp-docs-wes/-/blob/wip/scheduler/remote-storage.md#data-movement
|
|
6
|
-
*
|
|
7
|
-
* There are two ways to stringify input data before sending on the wire
|
|
8
|
-
* - JSON: It is a default method
|
|
9
|
-
* - KVIN: if kvin.serialize(inputData)is being used we need to define the Content-Type `res.header("Content-Type", "application/x-kvin")`
|
|
10
|
-
*
|
|
11
|
-
* Note that to allow workers fetch data from URLs,
|
|
12
|
-
* - in the node worker: add `-a 'http://localhost:12345'` at the end of starting worker command.
|
|
13
|
-
*
|
|
14
|
-
* - in the localExec:
|
|
15
|
-
* ```dcpConfig = require('dcp/dcp-config');
|
|
16
|
-
* dcpConfig.worker.allowOrigins.any.push('http://localhost:12345');
|
|
17
|
-
* ```
|
|
18
|
-
* - in the browser worker: in the console run `dcpConfig.worker.allowOrigins.any.push('http://localhost:12345')` also,
|
|
19
|
-
* you need to add following lines in the response function in slice.get
|
|
20
|
-
* ```res.header("Access-Control-Allow-Headers", "content-type");
|
|
21
|
-
* res.header("Access-Control-Allow-Origin", "*");
|
|
22
|
-
* ```
|
|
23
|
-
* @author Nazila Akhavan <nazila@kingsds.network>
|
|
24
|
-
* @date Sep. 2021
|
|
25
|
-
*
|
|
26
|
-
*/
|
|
27
|
-
|
|
28
|
-
const SCHEDULER_URL = new URL('https://scheduler.distributed.computer');
|
|
29
|
-
const express = require('express');
|
|
30
|
-
|
|
31
|
-
/** Main program entry point */
|
|
32
|
-
async function main() {
|
|
33
|
-
const compute = require('dcp/compute');
|
|
34
|
-
|
|
35
|
-
const app = express();
|
|
36
|
-
const port1 = 12345;
|
|
37
|
-
app.get('/hello-world-1.json', (req, res) => {
|
|
38
|
-
let a = {a:1, b:2, c:3};
|
|
39
|
-
res.send(a);
|
|
40
|
-
})
|
|
41
|
-
app.get('/hello-world-2.json', (req, res) => {
|
|
42
|
-
let a = {x:1, y:2, z:3};
|
|
43
|
-
res.send(a);
|
|
44
|
-
})
|
|
45
|
-
app.listen(port1, () => {
|
|
46
|
-
console.log(`port ${port1} is ready!`)
|
|
47
|
-
})
|
|
48
|
-
|
|
49
|
-
const { RemoteDataPattern } = require('dcp/compute');
|
|
50
|
-
let remoteData = new RemoteDataPattern('http://localhost:12345/hello-world-{slice}.json',2)
|
|
51
|
-
|
|
52
|
-
let workerFunction = `async function(c){
|
|
53
|
-
let sum = 0;
|
|
54
|
-
for (let i = 0; i < 10000000; i += 1) {
|
|
55
|
-
progress(i / 10000000);
|
|
56
|
-
sum += Math.random();
|
|
57
|
-
}
|
|
58
|
-
return c;
|
|
59
|
-
}`
|
|
60
|
-
|
|
61
|
-
const job = compute.for(
|
|
62
|
-
remoteData,
|
|
63
|
-
workerFunction,
|
|
64
|
-
);
|
|
65
|
-
|
|
66
|
-
job.on('accepted', () => {
|
|
67
|
-
console.log(` - Job accepted by scheduler, waiting for results`);
|
|
68
|
-
console.log(` - Job has id ${job.id}`);
|
|
69
|
-
});
|
|
70
|
-
|
|
71
|
-
job.on('readystatechange', (arg) => {
|
|
72
|
-
console.log(`new ready state: ${arg}`);
|
|
73
|
-
});
|
|
74
|
-
|
|
75
|
-
job.on('result', (ev) => {
|
|
76
|
-
console.log(ev);
|
|
77
|
-
});
|
|
78
|
-
|
|
79
|
-
job.public.name = 'RemoteDataPattern-example';
|
|
80
|
-
|
|
81
|
-
const results = await job.exec(compute.marketValue);
|
|
82
|
-
|
|
83
|
-
console.log('results=', Array.from(results));
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
/* Initialize DCP Client and run main() */
|
|
87
|
-
require('../../..')
|
|
88
|
-
.init(SCHEDULER_URL)
|
|
89
|
-
.then(main)
|
|
90
|
-
.catch(console.error)
|
|
91
|
-
.finally(process.exit);
|
|
92
|
-
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
#! /usr/bin/env node
|
|
2
|
-
/**
|
|
3
|
-
* @file dataSet.js - An example on using RemoteDataSet class
|
|
4
|
-
* For more information please refer to https://gitlab.com/Distributed-Compute-Protocol/dcp-docs-wes/-/blob/wip/scheduler/remote-storage.md#data-movement
|
|
5
|
-
*
|
|
6
|
-
* @author Nazila Akhavan <nazila@kingsds.network>
|
|
7
|
-
* @date Sep. 2021
|
|
8
|
-
*
|
|
9
|
-
*
|
|
10
|
-
* There are two ways to stringify input data before sending on the wire
|
|
11
|
-
* - JSON: It is a default method
|
|
12
|
-
* - KVIN: if kvin.serialize(inputData)is being used we need to define the Content-Type `res.header("Content-Type", "application/x-kvin")`
|
|
13
|
-
*
|
|
14
|
-
* Note that to allow workers fetch data from URLs,
|
|
15
|
-
* - in the node worker: add `-a 'http://localhost:12345' 'http://localhost:12346'` at the end of starting worker command.
|
|
16
|
-
*
|
|
17
|
-
* - in the localExec:
|
|
18
|
-
* ```dcpConfig = require('dcp/dcp-config');
|
|
19
|
-
* dcpConfig.worker.allowOrigins.any.push('http://localhost:12345', 'http://localhost:12346');
|
|
20
|
-
* ```
|
|
21
|
-
* - in the browser worker: in the console run `dcpConfig.worker.allowOrigins.any.push('http://localhost:12345', 'http://localhost:12346')` also,
|
|
22
|
-
* you need to add following lines in the response function in slice.get
|
|
23
|
-
* ```res.header("Access-Control-Allow-Headers", "content-type");
|
|
24
|
-
* res.header("Access-Control-Allow-Origin", "*");
|
|
25
|
-
* ```
|
|
26
|
-
*/
|
|
27
|
-
|
|
28
|
-
const SCHEDULER_URL = new URL('https://scheduler.distributed.computer');
|
|
29
|
-
const express = require('express');
|
|
30
|
-
|
|
31
|
-
/** Main program entry point */
|
|
32
|
-
async function main() {
|
|
33
|
-
const compute = require('dcp/compute');
|
|
34
|
-
|
|
35
|
-
const slice1 = express();
|
|
36
|
-
const port1 = 12345;
|
|
37
|
-
slice1.get('/hello.json', (req, res) => {
|
|
38
|
-
let a = "Hello world!";
|
|
39
|
-
res.send(a);
|
|
40
|
-
})
|
|
41
|
-
slice1.listen(port1, () => {
|
|
42
|
-
console.log(`port ${port1} is ready!`)
|
|
43
|
-
})
|
|
44
|
-
|
|
45
|
-
const slice2 = express();
|
|
46
|
-
const port2 = 12346;
|
|
47
|
-
slice2.get('/', (req, res) => {
|
|
48
|
-
let a = {x:1, y:2, z:3};
|
|
49
|
-
res.send(a);
|
|
50
|
-
})
|
|
51
|
-
slice2.listen(port2, () => {
|
|
52
|
-
console.log(`port ${port2} is ready!`)
|
|
53
|
-
})
|
|
54
|
-
|
|
55
|
-
const { RemoteDataSet } = require('dcp/compute');
|
|
56
|
-
let remoteDataSet = new RemoteDataSet([`http://localhost:12345/hello.json`, `http://localhost:12346`])
|
|
57
|
-
|
|
58
|
-
let workerFunction = `async function(c){
|
|
59
|
-
let sum = 0;
|
|
60
|
-
for (let i = 0; i < 10000000; i += 1) {
|
|
61
|
-
progress(i / 10000000);
|
|
62
|
-
sum += Math.random();
|
|
63
|
-
}
|
|
64
|
-
return c;
|
|
65
|
-
}`
|
|
66
|
-
|
|
67
|
-
const job = compute.for(
|
|
68
|
-
remoteDataSet,
|
|
69
|
-
workerFunction,
|
|
70
|
-
);
|
|
71
|
-
|
|
72
|
-
job.on('accepted', () => {
|
|
73
|
-
console.log(` - Job accepted by scheduler, waiting for results`);
|
|
74
|
-
console.log(` - Job has id ${job.id}`);
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
job.on('readystatechange', (arg) => {
|
|
78
|
-
console.log(`new ready state: ${arg}`);
|
|
79
|
-
});
|
|
80
|
-
|
|
81
|
-
job.on('result', (ev) => {
|
|
82
|
-
console.log(ev);
|
|
83
|
-
});
|
|
84
|
-
|
|
85
|
-
job.public.name = 'RemoteDataSet-example';
|
|
86
|
-
|
|
87
|
-
const results = await job.exec(compute.marketValue);
|
|
88
|
-
|
|
89
|
-
console.log('results=', Array.from(results));
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
/* Initialize DCP Client and run main() */
|
|
93
|
-
require('../../..')
|
|
94
|
-
.init(SCHEDULER_URL)
|
|
95
|
-
.then(main)
|
|
96
|
-
.catch(console.error)
|
|
97
|
-
.finally(process.exit);
|
|
98
|
-
|
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
#! /usr/bin/env node
|
|
2
|
-
/**
|
|
3
|
-
* @file dataURL.js - An example on deploying a job using URL for input data
|
|
4
|
-
*
|
|
5
|
-
* @author Nazila Akhavan <nazila@kingsds.network>
|
|
6
|
-
* @date Sep. 2021
|
|
7
|
-
*
|
|
8
|
-
*
|
|
9
|
-
* There are two ways to stringify input data before sending on the wire
|
|
10
|
-
* - JSON: It is a default method
|
|
11
|
-
* - KVIN: if kvin.serialize(inputData)is being used we need to define the Content-Type `res.header("Content-Type", "application/x-kvin")`
|
|
12
|
-
*
|
|
13
|
-
* Note that to allow workers fetch data from URLs,
|
|
14
|
-
* - in the node worker: add `-a 'http://localhost:12345' 'http://localhost:12346'` at the end of starting worker command.
|
|
15
|
-
*
|
|
16
|
-
* - in the localExec:
|
|
17
|
-
* ```dcpConfig = require('dcp/dcp-config');
|
|
18
|
-
* dcpConfig.worker.allowOrigins.any.push(['http://localhost:12345' 'http://localhost:12346']);
|
|
19
|
-
* ```
|
|
20
|
-
* - in the browser worker: in the console run `dcpConfig.worker.allowOrigins.any.push('http://localhost:12345', 'http://localhost:12346')` also,
|
|
21
|
-
* you need to add following lines in the response function in slice.get
|
|
22
|
-
* ```res.header("Access-Control-Allow-Headers", "content-type");
|
|
23
|
-
* res.header("Access-Control-Allow-Origin", "*");
|
|
24
|
-
* ```
|
|
25
|
-
*/
|
|
26
|
-
|
|
27
|
-
const SCHEDULER_URL = new URL('https://scheduler.distributed.computer');
|
|
28
|
-
const kvin = require('kvin');
|
|
29
|
-
const express = require('express');
|
|
30
|
-
|
|
31
|
-
/** Main program entry point */
|
|
32
|
-
async function main() {
|
|
33
|
-
const compute = require('dcp/compute');
|
|
34
|
-
|
|
35
|
-
/* example of using the default */
|
|
36
|
-
const slice1 = express();
|
|
37
|
-
const port1 = 12345;
|
|
38
|
-
slice1.get('/', (req, res) => {
|
|
39
|
-
let a = {x:1, y:2};
|
|
40
|
-
res.send(a);
|
|
41
|
-
})
|
|
42
|
-
slice1.listen(port1, () => {
|
|
43
|
-
console.log(`port ${port1} is ready!`)
|
|
44
|
-
})
|
|
45
|
-
|
|
46
|
-
/* example of using the kvin */
|
|
47
|
-
const slice2 = express();
|
|
48
|
-
const port2 = 12346;
|
|
49
|
-
slice2.get('/', (req, res) => {
|
|
50
|
-
let a = {x:1, y:2};
|
|
51
|
-
res.header("Content-Type", "application/x-kvin");
|
|
52
|
-
res.send(kvin.serialize(a));
|
|
53
|
-
})
|
|
54
|
-
slice2.listen(port2, () => {
|
|
55
|
-
console.log(`port ${port2} is ready!`);
|
|
56
|
-
})
|
|
57
|
-
|
|
58
|
-
let dcp_inputDataArray_urls = [new URL(`http://localhost:${port1}/`) , new URL(`http://localhost:${port2}/`) ];
|
|
59
|
-
|
|
60
|
-
let workerFunction = `async function(c){
|
|
61
|
-
let sum = 0;
|
|
62
|
-
for (let i = 0; i < 10000000; i += 1) {
|
|
63
|
-
progress(i / 10000000);
|
|
64
|
-
sum += Math.random();
|
|
65
|
-
}
|
|
66
|
-
return c;
|
|
67
|
-
}`
|
|
68
|
-
|
|
69
|
-
const job = compute.for(
|
|
70
|
-
dcp_inputDataArray_urls,
|
|
71
|
-
workerFunction,
|
|
72
|
-
);
|
|
73
|
-
|
|
74
|
-
job.on('accepted', () => {
|
|
75
|
-
console.log(` - Job accepted by scheduler, waiting for results`);
|
|
76
|
-
console.log(` - Job has id ${job.id}`);
|
|
77
|
-
});
|
|
78
|
-
|
|
79
|
-
job.on('readystatechange', (arg) => {
|
|
80
|
-
console.log(`new ready state: ${arg}`);
|
|
81
|
-
});
|
|
82
|
-
|
|
83
|
-
job.on('result', (ev) => {
|
|
84
|
-
console.log(ev);
|
|
85
|
-
});
|
|
86
|
-
|
|
87
|
-
job.public.name = 'URL-data-example';
|
|
88
|
-
|
|
89
|
-
const results = await job.exec(compute.marketValue);
|
|
90
|
-
console.log('results=', Array.from(results));
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
/* Initialize DCP Client and run main() */
|
|
94
|
-
require('../../..')
|
|
95
|
-
.init(SCHEDULER_URL)
|
|
96
|
-
.then(main)
|
|
97
|
-
.catch(console.error)
|
|
98
|
-
.finally(process.exit);
|
|
99
|
-
|