concurrency.js 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.todo +1 -1
- package/README.md +86 -105
- package/demos/concurrency.async.js +22 -0
- package/demos/concurrency.process.js +24 -0
- package/demos/concurrency.threaded.js +25 -0
- package/demos/libmap.js +30 -0
- package/index.js +12 -46
- package/package.json +8 -4
- package/tasks.async.js +72 -0
- package/tasks.process.js +65 -0
- package/tasks.thread.js +92 -0
- package/test/test.process.js +85 -0
- package/test/test.promise.js +86 -0
- package/test/test.thread.js +86 -0
- package/worker.js +24 -0
- package/worker_process.js +29 -0
- package/demos/cluster.js +0 -22
- package/demos/demos.cluster.js +0 -66
- package/demos/demos.js +0 -21
- package/demos/demos.process.js +0 -37
- package/demos/demos.threads.async.js +0 -22
- package/demos/demos.threads.js +0 -40
- package/docs/Concurrency.js.Process.jpg +0 -0
- package/docs/Concurrency.js.Process.pdf +0 -0
- package/docs/Concurrency.js.Threads.jpg +0 -0
- package/docs/Concurrency.js.Threads.pdf +0 -0
- package/index.mjs +0 -39
- package/src/worker.cluster.js +0 -122
- package/src/worker.cluster.threads.js +0 -20
- package/src/worker.process.js +0 -128
- package/src/worker.thread.async.js +0 -84
- package/src/worker.threads.js +0 -181
- package/test/_.test-template.js +0 -44
- package/test/demos.cluster.js +0 -46
- package/test/demos.process.js +0 -42
- package/test/demos.threads.js +0 -43
- package/test/test_demos_cluster.js +0 -78
- package/test/test_demos_process.js +0 -53
- package/test/test_demos_promise.js +0 -36
- package/test/test_demos_threads.js +0 -40
- package/test/test_demos_threads_async.js +0 -68
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
const { expect } = require('chai');
|
|
2
|
+
const { runProcessTasks } = require('../tasks.process'); // The process-based runner
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
describe('Process-Based Task Runner', function() {
|
|
6
|
+
// Spawning processes is heavier than threads; 10s timeout recommended
|
|
7
|
+
this.timeout(10000);
|
|
8
|
+
|
|
9
|
+
let resultContext;
|
|
10
|
+
const taskLibraryPath = path.resolve(process.cwd(), './demos/libmap.js');
|
|
11
|
+
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
// Ensure a clean context before every test case
|
|
14
|
+
resultContext = { results: [] };
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
it('should execute a serial task in a child process', async () => {
|
|
18
|
+
const plan = ["taskone"];
|
|
19
|
+
const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
|
|
20
|
+
|
|
21
|
+
expect(finalContext.results).to.have.lengthOf(1);
|
|
22
|
+
expect(finalContext.results[0]).to.deep.include({
|
|
23
|
+
index: 0,
|
|
24
|
+
name: 'taskone'
|
|
25
|
+
});
|
|
26
|
+
expect(finalContext.results[0].result).to.contain('Data from taskone');
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
it('should execute parallel tasks in separate child processes', async () => {
|
|
30
|
+
const plan = [["tasktwo", "taskthree"]];
|
|
31
|
+
const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
|
|
32
|
+
|
|
33
|
+
expect(finalContext.results).to.have.lengthOf(2);
|
|
34
|
+
|
|
35
|
+
// Validate that both parallel results are assigned the same step index
|
|
36
|
+
const indices = finalContext.results.map(r => r.index);
|
|
37
|
+
expect(indices).to.members([0, 0]); // incorrect definition
|
|
38
|
+
|
|
39
|
+
const names = finalContext.results.map(r => r.name);
|
|
40
|
+
expect(names).to.include.members(['tasktwo', 'taskthree']);
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
it('should maintain order across complex transitions (Serial -> Parallel)', async () => {
|
|
44
|
+
const plan = [
|
|
45
|
+
"taskone",
|
|
46
|
+
["tasktwo", "taskthree"],
|
|
47
|
+
"taskfour"
|
|
48
|
+
];
|
|
49
|
+
|
|
50
|
+
const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
|
|
51
|
+
|
|
52
|
+
// Total 4 results: 1 (index 0) + 2 (index 1) + 1 (index 2)
|
|
53
|
+
expect(finalContext.results).to.have.lengthOf(4);
|
|
54
|
+
|
|
55
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
56
|
+
expect(finalContext.results[1].index).to.equal(1);
|
|
57
|
+
expect(finalContext.results[2].index).to.equal(1);
|
|
58
|
+
expect(finalContext.results[3].index).to.equal(2);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it('should return an error if the process encounters an invalid task name', async () => {
|
|
62
|
+
const plan = ["missingTask"];
|
|
63
|
+
try {
|
|
64
|
+
await runProcessTasks(plan, resultContext, taskLibraryPath);
|
|
65
|
+
throw new Error('Test should have thrown an error');
|
|
66
|
+
} catch (err) {
|
|
67
|
+
expect(err.message).to.contain("Task 'missingTask' not found");
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it('should recover if a child process exits unexpectedly', async () => {
|
|
72
|
+
// This tests the logic handling process exits in the coordinator
|
|
73
|
+
const plan = ["taskone"];
|
|
74
|
+
// Simulate a failure by passing a non-existent task file path
|
|
75
|
+
const badPath = './non_existent_file.js';
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
await runProcessTasks(plan, resultContext, badPath);
|
|
79
|
+
throw new Error('Should have caught process exit error');
|
|
80
|
+
} catch (err) {
|
|
81
|
+
// Depending on OS, this might be a module not found error or exit code 1
|
|
82
|
+
expect(err).to.exist;
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
});
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
const { expect } = require('chai');
|
|
2
|
+
const { runPromiseTasks } = require('../tasks.async');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
describe('Promise-Based Task Runner', () => {
|
|
6
|
+
let resultContext;
|
|
7
|
+
const taskLibraryPath = './demos/libmap.js';
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
// Reset context before each test
|
|
11
|
+
resultContext = { results: [] };
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
it('should execute a single serial task and update the context', async () => {
|
|
15
|
+
const plan = ['taskone'];
|
|
16
|
+
const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
17
|
+
|
|
18
|
+
expect(finalContext.results).to.have.lengthOf(1);
|
|
19
|
+
expect(finalContext.results[0]).to.deep.include({
|
|
20
|
+
index: 0,
|
|
21
|
+
name: 'taskone'
|
|
22
|
+
});
|
|
23
|
+
expect(finalContext.results[0].result).to.contain('Data from taskone');
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
it('should execute tasks in the correct serial order', async () => {
|
|
27
|
+
const plan = ['taskone', 'taskfour'];
|
|
28
|
+
const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
29
|
+
|
|
30
|
+
expect(finalContext.results[0].name).to.equal('taskone');
|
|
31
|
+
expect(finalContext.results[1].name).to.equal('taskfour');
|
|
32
|
+
expect(finalContext.results[1].index).to.equal(1);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
it('should execute parallel blocks correctly', async () => {
|
|
36
|
+
const plan = [['tasktwo', 'taskthree']];
|
|
37
|
+
const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
38
|
+
|
|
39
|
+
expect(finalContext.results).to.have.lengthOf(2);
|
|
40
|
+
// Both parallel tasks should share the same step index
|
|
41
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
42
|
+
expect(finalContext.results[1].index).to.equal(0);
|
|
43
|
+
|
|
44
|
+
const names = finalContext.results.map(r => r.name);
|
|
45
|
+
expect(names).to.include('tasktwo');
|
|
46
|
+
expect(names).to.include('taskthree');
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it('should maintain order for complex plans (Serial -> Parallel -> Serial)', async () => {
|
|
50
|
+
const plan = [
|
|
51
|
+
'taskone',
|
|
52
|
+
['tasktwo', 'taskthree'],
|
|
53
|
+
'taskfour'
|
|
54
|
+
];
|
|
55
|
+
const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
56
|
+
|
|
57
|
+
// Total 4 result entries (1 + 2 + 1)
|
|
58
|
+
expect(finalContext.results).to.have.lengthOf(4);
|
|
59
|
+
|
|
60
|
+
// Check indices
|
|
61
|
+
expect(finalContext.results[0].index).to.equal(0); // taskone
|
|
62
|
+
expect(finalContext.results[1].index).to.equal(1); // parallel part 1
|
|
63
|
+
expect(finalContext.results[2].index).to.equal(1); // parallel part 2
|
|
64
|
+
expect(finalContext.results[3].index).to.equal(2); // taskfour
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
it('should allow repeated tasks and distinguish them by index', async () => {
|
|
68
|
+
const plan = ['taskone', 'taskone'];
|
|
69
|
+
const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
70
|
+
|
|
71
|
+
expect(finalContext.results).to.have.lengthOf(2);
|
|
72
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
73
|
+
expect(finalContext.results[1].index).to.equal(1);
|
|
74
|
+
expect(finalContext.results[0].name).to.equal(finalContext.results[1].name);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
it('should throw an error if a task name is not found in the map', async () => {
|
|
78
|
+
const plan = ['nonExistentTask'];
|
|
79
|
+
try {
|
|
80
|
+
await runPromiseTasks(plan, resultContext, taskLibraryPath);
|
|
81
|
+
throw new Error('Should have failed');
|
|
82
|
+
} catch (err) {
|
|
83
|
+
expect(err.message).to.contain("Task 'nonExistentTask' not found");
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
});
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
const { expect } = require('chai');
|
|
2
|
+
const { runThreadedTasks } = require('../tasks.thread'); // The threaded version
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
describe('Threaded Task Runner', function() {
|
|
6
|
+
// Increase timeout because spawning threads has overhead
|
|
7
|
+
this.timeout(5000);
|
|
8
|
+
|
|
9
|
+
let resultContext;
|
|
10
|
+
const taskLibraryPath = path.resolve(process.cwd(), './demos/libmap.js');
|
|
11
|
+
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
resultContext = { results: [] };
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('should execute a serial task in a thread and return results', async () => {
|
|
17
|
+
const plan = ["taskone"];
|
|
18
|
+
const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
|
|
19
|
+
|
|
20
|
+
expect(finalContext.results).to.have.lengthOf(1);
|
|
21
|
+
expect(finalContext.results[0].name).to.equal('taskone');
|
|
22
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
23
|
+
expect(finalContext.results[0].result).to.contain('Data from taskone');
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
it('should execute parallel tasks across multiple threads', async () => {
|
|
27
|
+
const plan = [["tasktwo", "taskthree", "taskone"]];
|
|
28
|
+
const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
|
|
29
|
+
|
|
30
|
+
expect(finalContext.results).to.have.lengthOf(3);
|
|
31
|
+
|
|
32
|
+
// All tasks in a parallel block should share the same index
|
|
33
|
+
finalContext.results.forEach(res => {
|
|
34
|
+
expect(res.index).to.equal(0);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const names = finalContext.results.map(r => r.name);
|
|
38
|
+
expect(names).to.include('tasktwo');
|
|
39
|
+
expect(names).to.include('taskthree');
|
|
40
|
+
expect(names).to.include('taskone');
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
it('should preserve execution order and context across serial and parallel boundaries', async () => {
|
|
44
|
+
const plan = [
|
|
45
|
+
"taskone", // Index 0
|
|
46
|
+
["tasktwo", "taskthree"], // Index 1
|
|
47
|
+
"taskfour" // Index 2
|
|
48
|
+
];
|
|
49
|
+
|
|
50
|
+
const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
|
|
51
|
+
|
|
52
|
+
// Total 4 results (1 serial + 2 parallel + 1 serial)
|
|
53
|
+
expect(finalContext.results).to.have.lengthOf(4);
|
|
54
|
+
|
|
55
|
+
// Verify correct indexing sequence
|
|
56
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
57
|
+
expect(finalContext.results[1].index).to.equal(1);
|
|
58
|
+
expect(finalContext.results[2].index).to.equal(1);
|
|
59
|
+
expect(finalContext.results[3].index).to.equal(2);
|
|
60
|
+
|
|
61
|
+
expect(finalContext.results[3].name).to.equal('taskfour');
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('should distinguish between multiple runs of the same task name', async () => {
|
|
65
|
+
const plan = ["taskone", "taskone"];
|
|
66
|
+
const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
|
|
67
|
+
|
|
68
|
+
expect(finalContext.results).to.have.lengthOf(2);
|
|
69
|
+
expect(finalContext.results[0].name).to.equal('taskone');
|
|
70
|
+
expect(finalContext.results[1].name).to.equal('taskone');
|
|
71
|
+
|
|
72
|
+
// They must have different indices
|
|
73
|
+
expect(finalContext.results[0].index).to.equal(0);
|
|
74
|
+
expect(finalContext.results[1].index).to.equal(1);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
it('should report errors if a string name does not exist in libmap.js', async () => {
|
|
78
|
+
const plan = ["invalidTaskName"];
|
|
79
|
+
try {
|
|
80
|
+
await runThreadedTasks(plan, resultContext, taskLibraryPath);
|
|
81
|
+
throw new Error('Test should have failed');
|
|
82
|
+
} catch (err) {
|
|
83
|
+
expect(err.message).to.contain("Task 'invalidTaskName' not found");
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
});
|
package/worker.js
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
const { parentPort, workerData } = require('worker_threads');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
// Dynamically import the task map using the file path provided in workerData
|
|
5
|
+
const taskFilePath = path.resolve(workerData.taskFilePath);
|
|
6
|
+
const taskMap = require(taskFilePath);
|
|
7
|
+
|
|
8
|
+
parentPort.on('message', async ({ taskName, context, index }) => {
|
|
9
|
+
try {
|
|
10
|
+
const taskFunction = taskMap[taskName];
|
|
11
|
+
if (!taskFunction) {
|
|
12
|
+
throw new Error(`Task '${taskName}' not found in file: ${workerData.taskFilePath}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const resultValue = await taskFunction(context, index);
|
|
16
|
+
|
|
17
|
+
parentPort.postMessage({
|
|
18
|
+
status: 'success',
|
|
19
|
+
result: { index, name: taskName, result: resultValue }
|
|
20
|
+
});
|
|
21
|
+
} catch (error) {
|
|
22
|
+
parentPort.postMessage({ status: 'error', error: error.message });
|
|
23
|
+
}
|
|
24
|
+
});
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
// worker_process.js
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
// Listen for messages from the parent process
|
|
5
|
+
process.on('message', async ({ taskName, context, index, taskFilePath }) => {
|
|
6
|
+
try {
|
|
7
|
+
// Dynamically load the task map
|
|
8
|
+
const taskMap = require(path.resolve(taskFilePath));
|
|
9
|
+
const taskFunction = taskMap[taskName];
|
|
10
|
+
|
|
11
|
+
if (!taskFunction) {
|
|
12
|
+
throw new Error(`Task '${taskName}' not found in ${taskFilePath}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// Execute the task
|
|
16
|
+
const resultValue = await taskFunction(context, index);
|
|
17
|
+
|
|
18
|
+
// Send the result back to the parent
|
|
19
|
+
process.send({
|
|
20
|
+
status: 'success',
|
|
21
|
+
result: { index, name: taskName, result: resultValue }
|
|
22
|
+
});
|
|
23
|
+
} catch (error) {
|
|
24
|
+
process.send({ status: 'error', error: error.message });
|
|
25
|
+
} finally {
|
|
26
|
+
// Process exits after task completion to free up system memory
|
|
27
|
+
process.exit(0);
|
|
28
|
+
}
|
|
29
|
+
});
|
package/demos/cluster.js
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
const path = require("path");
|
|
3
|
-
const { _concurrencyClusters } = require("../index.js");
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
async function cluster() {
|
|
7
|
-
let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js";
|
|
8
|
-
return _concurrencyClusters(
|
|
9
|
-
path.join(filename),
|
|
10
|
-
// __filename,
|
|
11
|
-
8,
|
|
12
|
-
{
|
|
13
|
-
data: {
|
|
14
|
-
url: "https://www.google.com",
|
|
15
|
-
message: "Testing parent data"
|
|
16
|
-
},
|
|
17
|
-
childData: "Test data from child"
|
|
18
|
-
}
|
|
19
|
-
)
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
module.exports = cluster;
|
package/demos/demos.cluster.js
DELETED
|
@@ -1,66 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
const path = require("path");
|
|
3
|
-
let { _concurrencyClusters } = require("../index.js");
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
// console.log(_concurrencyClusters(
|
|
7
|
-
// path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.process.js"),
|
|
8
|
-
// 8,
|
|
9
|
-
// { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
|
|
10
|
-
// ).then((d) => {
|
|
11
|
-
// console.log("Data fetched", JSON.stringify(d));
|
|
12
|
-
// }).catch((e) => {
|
|
13
|
-
// console.log(e.toString());
|
|
14
|
-
// }))
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
// async function concurrency() {
|
|
20
|
-
// let result = await _concurrencyClusters(
|
|
21
|
-
// path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js"),
|
|
22
|
-
// 8,
|
|
23
|
-
// { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
|
|
24
|
-
// )
|
|
25
|
-
// console.log(result);
|
|
26
|
-
// }
|
|
27
|
-
|
|
28
|
-
// function concurrency() {
|
|
29
|
-
// _concurrencyClusters(
|
|
30
|
-
// path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js"),
|
|
31
|
-
// 8,
|
|
32
|
-
// { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
|
|
33
|
-
// ).then((d) => {
|
|
34
|
-
// console.log("Data fetched", JSON.stringify(d));
|
|
35
|
-
// }).catch((e) => {
|
|
36
|
-
// console.log(e.toString());
|
|
37
|
-
// })
|
|
38
|
-
// }
|
|
39
|
-
|
|
40
|
-
function concurrency() {
|
|
41
|
-
let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js";
|
|
42
|
-
return new Promise(function (resolve, reject) {
|
|
43
|
-
_concurrencyClusters(
|
|
44
|
-
path.join(filename),
|
|
45
|
-
8,
|
|
46
|
-
{
|
|
47
|
-
data: {
|
|
48
|
-
message: "Testing parent data",
|
|
49
|
-
url: "https://www.google.com",
|
|
50
|
-
},
|
|
51
|
-
childData: "Test data from child"
|
|
52
|
-
}
|
|
53
|
-
).then((d) => {
|
|
54
|
-
console.log("Data fetched", JSON.stringify(d));
|
|
55
|
-
resolve(d);
|
|
56
|
-
}).catch((e) => {
|
|
57
|
-
console.log(e.toString());
|
|
58
|
-
reject(e);
|
|
59
|
-
});
|
|
60
|
-
});
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
concurrency();
|
|
64
|
-
|
|
65
|
-
setTimeout(() => console.log(`demo.cluster.js: run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
|
|
66
|
-
setTimeout(() => console.log(`demo.cluster.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
|
package/demos/demos.js
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
*
|
|
3
|
-
* Package: concurrency.js
|
|
4
|
-
* Author: Ganesh B
|
|
5
|
-
* Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
|
|
6
|
-
* Install: npm i concurrency.js --save
|
|
7
|
-
* Github: https://github.com/ganeshkbhat/concurrency
|
|
8
|
-
* npmjs Link: https://www.npmjs.com/package/
|
|
9
|
-
* File: index.js
|
|
10
|
-
* File Description:
|
|
11
|
-
*
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
/* eslint no-console: 0 */
|
|
15
|
-
|
|
16
|
-
'use strict';
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
let { _concurrencyThreads, _concurrencyProcesses, _concurrencyClusters, _concurrencyThreadsAsync } = require("../index.js");
|
|
20
|
-
console.log(_concurrencyThreads, _concurrencyProcesses, _concurrencyClusters, _concurrencyThreadsAsync);
|
|
21
|
-
|
package/demos/demos.process.js
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
*
|
|
3
|
-
* Package: concurrency.js
|
|
4
|
-
* Author: Ganesh B
|
|
5
|
-
* Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
|
|
6
|
-
* Install: npm i concurrency.js --save
|
|
7
|
-
* Github: https://github.com/ganeshkbhat/concurrency
|
|
8
|
-
* npmjs Link: https://www.npmjs.com/package/
|
|
9
|
-
* File: demo.processes.js
|
|
10
|
-
* File Description:
|
|
11
|
-
*
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
/* eslint no-console: 0 */
|
|
15
|
-
|
|
16
|
-
'use strict';
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
const path = require("path");
|
|
20
|
-
let { _concurrencyProcesses } = require("../index.js");
|
|
21
|
-
|
|
22
|
-
let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.process.js";
|
|
23
|
-
_concurrencyProcesses(
|
|
24
|
-
path.join(filename), {
|
|
25
|
-
data: {
|
|
26
|
-
message: "Testing data",
|
|
27
|
-
url: "https://www.google.com"
|
|
28
|
-
}
|
|
29
|
-
}, true).then((d) => {
|
|
30
|
-
console.log("Data fetched: ", JSON.stringify(d));
|
|
31
|
-
}).catch((e) => {
|
|
32
|
-
console.log(e.toString()); setTimeout(() => { process.exit(e); }, 5000)
|
|
33
|
-
});
|
|
34
|
-
|
|
35
|
-
setTimeout(() => console.log(`demo.processes.js: Run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
|
|
36
|
-
setTimeout(() => console.log(`demo.processes.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
|
|
37
|
-
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
const path = require("path");
|
|
4
|
-
let { _concurrencyThreadsAsync } = require("../index.js");
|
|
5
|
-
|
|
6
|
-
let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\demos\\demos.threads.js";
|
|
7
|
-
|
|
8
|
-
let threads = _concurrencyThreadsAsync(filename, {
|
|
9
|
-
data: {
|
|
10
|
-
message: "Testing parent data",
|
|
11
|
-
url: "https://www.google.com"
|
|
12
|
-
},
|
|
13
|
-
childData: "Test data from child"
|
|
14
|
-
});
|
|
15
|
-
|
|
16
|
-
console.log(` STDOUT: console.log(threads.stderr); console.log(threads.stdout); `);
|
|
17
|
-
|
|
18
|
-
console.log(threads.stderr);
|
|
19
|
-
console.log(threads.stdout);
|
|
20
|
-
|
|
21
|
-
setTimeout(() => console.log(`demo.threads.async.js: run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
|
|
22
|
-
setTimeout(() => console.log(`demo.threads.async.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
|
package/demos/demos.threads.js
DELETED
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
*
|
|
3
|
-
* Package: concurrency.js
|
|
4
|
-
* Author: Ganesh B
|
|
5
|
-
* Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
|
|
6
|
-
* Install: npm i concurrency.js --save
|
|
7
|
-
* Github: https://github.com/ganeshkbhat/concurrency
|
|
8
|
-
* npmjs Link: https://www.npmjs.com/package/
|
|
9
|
-
* File: demo.threads.js
|
|
10
|
-
* File Description:
|
|
11
|
-
*
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
/* eslint no-console: 0 */
|
|
15
|
-
|
|
16
|
-
'use strict';
|
|
17
|
-
|
|
18
|
-
const path = require("path");
|
|
19
|
-
let { _concurrencyThreads } = require("../index.js");
|
|
20
|
-
|
|
21
|
-
// _concurrencyThreads(path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.threads.js"), { data: { url: "https://www.google.com", message: "Testing data" } });
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
_concurrencyThreads(__filename, {
|
|
25
|
-
data: {
|
|
26
|
-
url: "https://www.google.com",
|
|
27
|
-
message: "Testing data"
|
|
28
|
-
},
|
|
29
|
-
childData: "Testing child data"
|
|
30
|
-
}, true).then((d) => console.log(JSON.stringify(d)))
|
|
31
|
-
// .catch((e) => { console.log(e.toString()); setTimeout(() => {process.exit(e);}, 5000) })
|
|
32
|
-
|
|
33
|
-
// setTimeout(() => {
|
|
34
|
-
// console.log(`demo.threads.js: Closing process ${process.pid}: Timeout 1: 20000 `, __filename);
|
|
35
|
-
// process.exit(0);
|
|
36
|
-
// }, 20000);
|
|
37
|
-
|
|
38
|
-
setTimeout(() => console.log(`demo.threads.js: Run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
|
|
39
|
-
setTimeout(() => console.log(`demo.threads.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
|
|
40
|
-
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/index.mjs
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
*
|
|
3
|
-
* Package: concurrency.js
|
|
4
|
-
* Author: Ganesh B
|
|
5
|
-
* Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
|
|
6
|
-
* Install: npm i concurrency.js --save
|
|
7
|
-
* Github: https://github.com/ganeshkbhat/concurrency
|
|
8
|
-
* npmjs Link: https://www.npmjs.com/package/
|
|
9
|
-
* File: index.mjs
|
|
10
|
-
* File Description:
|
|
11
|
-
*
|
|
12
|
-
*/
|
|
13
|
-
|
|
14
|
-
/* eslint no-console: 0 */
|
|
15
|
-
|
|
16
|
-
'use strict';
|
|
17
|
-
|
|
18
|
-
import {
|
|
19
|
-
_concurrencyThreads,
|
|
20
|
-
_concurrencyProcesses,
|
|
21
|
-
_concurrencyThreadsAsync,
|
|
22
|
-
_concurrencyClusters,
|
|
23
|
-
concurrencyThreads,
|
|
24
|
-
concurrencyProcesses,
|
|
25
|
-
concurrencyClusters,
|
|
26
|
-
concurrencyThreadsAsync
|
|
27
|
-
} from "./index.js";
|
|
28
|
-
|
|
29
|
-
export {
|
|
30
|
-
_concurrencyThreads,
|
|
31
|
-
_concurrencyProcesses,
|
|
32
|
-
_concurrencyClusters,
|
|
33
|
-
_concurrencyThreadsAsync,
|
|
34
|
-
concurrencyThreads,
|
|
35
|
-
concurrencyProcesses,
|
|
36
|
-
concurrencyClusters,
|
|
37
|
-
concurrencyThreadsAsync
|
|
38
|
-
};
|
|
39
|
-
export default _concurrencyProcesses;
|