concurrency.js 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.todo +1 -1
- package/README.md +86 -105
- package/demos/concurrency.async.js +22 -0
- package/demos/concurrency.process.js +24 -0
- package/demos/concurrency.threaded.js +25 -0
- package/demos/libmap.js +30 -0
- package/index.js +12 -46
- package/package.json +8 -4
- package/tasks.async.js +72 -0
- package/tasks.process.js +65 -0
- package/tasks.thread.js +92 -0
- package/test/test.process.js +85 -0
- package/test/test.promise.js +86 -0
- package/test/test.thread.js +86 -0
- package/worker.js +24 -0
- package/worker_process.js +29 -0
- package/demos/cluster.js +0 -22
- package/demos/demos.cluster.js +0 -66
- package/demos/demos.js +0 -21
- package/demos/demos.process.js +0 -37
- package/demos/demos.threads.async.js +0 -22
- package/demos/demos.threads.js +0 -40
- package/docs/Concurrency.js.Process.jpg +0 -0
- package/docs/Concurrency.js.Process.pdf +0 -0
- package/docs/Concurrency.js.Threads.jpg +0 -0
- package/docs/Concurrency.js.Threads.pdf +0 -0
- package/index.mjs +0 -39
- package/src/worker.cluster.js +0 -122
- package/src/worker.cluster.threads.js +0 -20
- package/src/worker.process.js +0 -128
- package/src/worker.thread.async.js +0 -84
- package/src/worker.threads.js +0 -181
- package/test/_.test-template.js +0 -44
- package/test/demos.cluster.js +0 -46
- package/test/demos.process.js +0 -42
- package/test/demos.threads.js +0 -43
- package/test/test_demos_cluster.js +0 -78
- package/test/test_demos_process.js +0 -53
- package/test/test_demos_promise.js +0 -36
- package/test/test_demos_threads.js +0 -40
- package/test/test_demos_threads_async.js +0 -68
package/.todo
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
[*] Easy Cluster (Processess) functionality
|
|
7
7
|
[*] Add Cluster numbers to Concurrency Threading
|
|
8
8
|
[*] Add Demos for all functions
|
|
9
|
-
[] Add Tests for all functions
|
|
9
|
+
[*] Add Tests for all functions
|
|
10
10
|
[] Add thread numbers to Concurrency Threading. Create different function
|
|
11
11
|
[] Add Process numbers to Concurrency Threading. Create different function
|
|
12
12
|
[] Add Thread Async numbers to Concurrency Async Threading. Create different function
|
package/README.md
CHANGED
|
@@ -2,139 +2,120 @@
|
|
|
2
2
|
|
|
3
3
|
npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
|
|
4
4
|
|
|
5
|
-
Find the demos in the [demos folder](./demos)
|
|
6
5
|
|
|
6
|
+
# Installation
|
|
7
7
|
|
|
8
|
-
#### Cluster Methods
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
`_concurrencyClusters(filename = __filename, num = cpus().length, options = {}, greet = false)`
|
|
12
8
|
|
|
13
9
|
```
|
|
14
|
-
|
|
15
|
-
const path = require("path");
|
|
16
|
-
let { _concurrencyClusters } = require("concurrency.js");
|
|
17
|
-
|
|
18
|
-
function concurrency() {
|
|
19
|
-
return new Promise(function (resolve, reject) {
|
|
20
|
-
_concurrencyClusters(
|
|
21
|
-
path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js"),
|
|
22
|
-
8,
|
|
23
|
-
{
|
|
24
|
-
data: {
|
|
25
|
-
data: "Testing parent data",
|
|
26
|
-
url: "https://www.google.com"
|
|
27
|
-
},
|
|
28
|
-
childData: "Test data from child"
|
|
29
|
-
}
|
|
30
|
-
).then((d) => {
|
|
31
|
-
console.log("Data fetched", JSON.stringify(d));
|
|
32
|
-
resolve(d);
|
|
33
|
-
}).catch((e) => {
|
|
34
|
-
console.log(e.toString());
|
|
35
|
-
reject(e);
|
|
36
|
-
});
|
|
37
|
-
});
|
|
38
|
-
}
|
|
39
|
-
concurrency();
|
|
40
|
-
|
|
10
|
+
npm install concurrency.js --save
|
|
41
11
|
```
|
|
42
12
|
|
|
13
|
+
Find the demos in the [demos folder](./demos). Works but is experimental. Use at your own risk. Please provide as much feedbacks as possible
|
|
43
14
|
|
|
44
|
-
#### Process Methods
|
|
45
15
|
|
|
16
|
+
<br/>
|
|
46
17
|
|
|
47
|
-
|
|
18
|
+
Run tasks in async mode using promises as simple as below:
|
|
48
19
|
|
|
49
20
|
```
|
|
50
|
-
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
"
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
21
|
+
// This repository's root folder
|
|
22
|
+
const { runPromiseTasks } = require("concurrency.js") // require("../tasks.async.js");
|
|
23
|
+
|
|
24
|
+
// --- Execution Plan using String Names ---
|
|
25
|
+
const executionPlan = [
|
|
26
|
+
"taskone", // Index 0
|
|
27
|
+
["tasktwo", "taskthree", "taskone"], // Index 1 (Parallel)
|
|
28
|
+
"taskfour", // Index 2
|
|
29
|
+
"taskthree", // Index 3
|
|
30
|
+
"taskone" // Index 4
|
|
31
|
+
];
|
|
32
|
+
|
|
33
|
+
const resultContext = {};
|
|
34
|
+
// This repository's demos folder
|
|
35
|
+
const taskLibraryPath = './demos/libmap.js';
|
|
36
|
+
|
|
37
|
+
runPromiseTasks(executionPlan, resultContext, taskLibraryPath)
|
|
38
|
+
.then((finalContext) => {
|
|
39
|
+
console.log("\n--- Promises Based Final Accumulated Results Context ---");
|
|
40
|
+
console.log(JSON.stringify(finalContext, null, 2));
|
|
41
|
+
})
|
|
42
|
+
.catch((err) => {
|
|
43
|
+
console.error("Task execution failed:", err);
|
|
44
|
+
});
|
|
66
45
|
```
|
|
67
46
|
|
|
68
|
-
|
|
69
|
-

|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
#### Threads Methods
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
`_concurrencyThreads(filename = __filename, options = {}, greet = false)`
|
|
47
|
+
Run tasks in a mix of nodejs code and run threaded code of nodejs execution plan
|
|
76
48
|
|
|
77
49
|
```
|
|
78
|
-
|
|
79
|
-
const
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
50
|
+
// This repository's root folder
|
|
51
|
+
const {runThreadedTasks, createWorkerPool} = require("concurrency.js") // require("../tasks.thread")
|
|
52
|
+
|
|
53
|
+
// --- Execution Plan ---
|
|
54
|
+
const executionPlan = [
|
|
55
|
+
"taskone",
|
|
56
|
+
["tasktwo", "taskthree", "taskone"], // Threaded parallel
|
|
57
|
+
"taskfour",
|
|
58
|
+
"taskthree",
|
|
59
|
+
"taskone"
|
|
60
|
+
];
|
|
61
|
+
|
|
62
|
+
const resultContext = { results: [] };
|
|
63
|
+
// This repository's demos folder
|
|
64
|
+
const taskFileName = './demos/libmap.js';
|
|
65
|
+
|
|
66
|
+
// The file to be imported dynamically
|
|
67
|
+
// from concurrent-tasks\worker.js
|
|
68
|
+
|
|
69
|
+
runThreadedTasks(executionPlan, resultContext, taskFileName)
|
|
70
|
+
.then(final => {
|
|
71
|
+
console.log("\n--- Threads Based Final Accumulated Results ---");
|
|
72
|
+
console.log(JSON.stringify(final, null, 2));
|
|
73
|
+
process.exit(0);
|
|
74
|
+
})
|
|
75
|
+
.catch(err => {
|
|
76
|
+
console.error("Fatal Error:", err);
|
|
77
|
+
process.exit(1);
|
|
78
|
+
});
|
|
93
79
|
```
|
|
94
80
|
|
|
95
|
-
|
|
96
|
-

|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
#### Thread Async Methods
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
`_concurrencyThreadsAsync(command, options)`
|
|
103
|
-
|
|
81
|
+
Run tasks in a mix of nodejs code and run process worker code of nodejs execution plan
|
|
104
82
|
|
|
105
83
|
```
|
|
106
|
-
|
|
107
|
-
const
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
84
|
+
// This repository's root folder
|
|
85
|
+
const { runProcessTasks, runInProcess } = require("concurrency.js") // require("../tasks.process");
|
|
86
|
+
|
|
87
|
+
// --- Execution Plan ---
|
|
88
|
+
const executionPlan = [
|
|
89
|
+
"taskone", // Index 0
|
|
90
|
+
["tasktwo", "taskthree", "taskone"], // Index 1 (Parallel Processes)
|
|
91
|
+
"taskfour", // Index 2
|
|
92
|
+
"taskthree", // Index 3
|
|
93
|
+
"taskone" // Index 4
|
|
94
|
+
];
|
|
95
|
+
|
|
96
|
+
const resultContext = { results: [] };
|
|
97
|
+
const taskLibraryPath = './demos/libmap.js';
|
|
98
|
+
// This repository's demos folder
|
|
99
|
+
|
|
100
|
+
runProcessTasks(executionPlan, resultContext, taskLibraryPath)
|
|
101
|
+
.then(final => {
|
|
102
|
+
console.log("\n--- Final Accumulated Results (Process-Based) ---");
|
|
103
|
+
console.log(JSON.stringify(final, null, 2));
|
|
104
|
+
})
|
|
105
|
+
.catch(err => {
|
|
106
|
+
console.error("Execution Error:", err);
|
|
107
|
+
});
|
|
120
108
|
|
|
121
109
|
```
|
|
122
110
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
### Contributions
|
|
111
|
+
## Contributions
|
|
126
112
|
|
|
127
113
|
Contributions, Feature Improvements, Bugs, and Issues are invited. [raising an issue](https://github.com/ganeshkbhat/concurrency.js/issues)
|
|
128
114
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
### TODO
|
|
115
|
+
## TODO
|
|
132
116
|
|
|
133
117
|
[Todo](./todo)
|
|
134
118
|
|
|
135
|
-
|
|
136
|
-
|
|
137
119
|
# License
|
|
138
120
|
|
|
139
121
|
[MIT License](./LICENSE)
|
|
140
|
-
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
const { runPromiseTasks } = require("../tasks.async.js");
|
|
2
|
+
|
|
3
|
+
// --- Execution Plan using String Names ---
|
|
4
|
+
const executionPlan = [
|
|
5
|
+
"taskone", // Index 0
|
|
6
|
+
["tasktwo", "taskthree", "taskone"], // Index 1 (Parallel)
|
|
7
|
+
"taskfour", // Index 2
|
|
8
|
+
"taskthree", // Index 3
|
|
9
|
+
"taskone" // Index 4
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
const resultContext = {};
|
|
13
|
+
const taskLibraryPath = './demos/libmap.js';
|
|
14
|
+
|
|
15
|
+
runPromiseTasks(executionPlan, resultContext, taskLibraryPath)
|
|
16
|
+
.then((finalContext) => {
|
|
17
|
+
console.log("\n--- Promises Based Final Accumulated Results Context ---");
|
|
18
|
+
console.log(JSON.stringify(finalContext, null, 2));
|
|
19
|
+
})
|
|
20
|
+
.catch((err) => {
|
|
21
|
+
console.error("Task execution failed:", err);
|
|
22
|
+
});
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
|
|
2
|
+
const { runProcessTasks, runInProcess } = require("../tasks.process");
|
|
3
|
+
|
|
4
|
+
// --- Execution Plan ---
|
|
5
|
+
const executionPlan = [
|
|
6
|
+
"taskone", // Index 0
|
|
7
|
+
["tasktwo", "taskthree", "taskone"], // Index 1 (Parallel Processes)
|
|
8
|
+
"taskfour", // Index 2
|
|
9
|
+
"taskthree", // Index 3
|
|
10
|
+
"taskone" // Index 4
|
|
11
|
+
];
|
|
12
|
+
|
|
13
|
+
const resultContext = { results: [] };
|
|
14
|
+
const taskLibraryPath = './demos/libmap.js';
|
|
15
|
+
|
|
16
|
+
runProcessTasks(executionPlan, resultContext, taskLibraryPath)
|
|
17
|
+
.then(final => {
|
|
18
|
+
console.log("\n--- Final Accumulated Results (Process-Based) ---");
|
|
19
|
+
console.log(JSON.stringify(final, null, 2));
|
|
20
|
+
})
|
|
21
|
+
.catch(err => {
|
|
22
|
+
console.error("Execution Error:", err);
|
|
23
|
+
});
|
|
24
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
|
|
2
|
+
const {runThreadedTasks, createWorkerPool} = require("../tasks.thread")
|
|
3
|
+
|
|
4
|
+
// --- Execution Plan ---
|
|
5
|
+
const executionPlan = [
|
|
6
|
+
"taskone",
|
|
7
|
+
["tasktwo", "taskthree", "taskone"],
|
|
8
|
+
"taskfour",
|
|
9
|
+
"taskthree",
|
|
10
|
+
"taskone"
|
|
11
|
+
];
|
|
12
|
+
|
|
13
|
+
const resultContext = { results: [] };
|
|
14
|
+
const taskFileName = './demos/libmap.js'; // The file to be imported dynamically
|
|
15
|
+
// concurrent-tasks\worker.js
|
|
16
|
+
runThreadedTasks(executionPlan, resultContext, taskFileName)
|
|
17
|
+
.then(final => {
|
|
18
|
+
console.log("\n--- Threads Based Final Accumulated Results ---");
|
|
19
|
+
console.log(JSON.stringify(final, null, 2));
|
|
20
|
+
process.exit(0);
|
|
21
|
+
})
|
|
22
|
+
.catch(err => {
|
|
23
|
+
console.error("Fatal Error:", err);
|
|
24
|
+
process.exit(1);
|
|
25
|
+
});
|
package/demos/libmap.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
|
|
2
|
+
// --- Task Definitions ---
|
|
3
|
+
const taskone = async (context, index) => {
|
|
4
|
+
console.log(`[Step ${index}] Running: taskone`);
|
|
5
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
6
|
+
return `Data from taskone`;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
const tasktwo = async (context, index) => {
|
|
10
|
+
console.log(`[Step ${index}] Running: tasktwo`);
|
|
11
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
12
|
+
return "Data from tasktwo";
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
const taskthree = async (context, index) => {
|
|
16
|
+
console.log(`[Step ${index}] Running: taskthree`);
|
|
17
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
18
|
+
return "Data from taskthree";
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const taskfour = async (context, index) => {
|
|
22
|
+
console.log(`[Step ${index}] Running: taskfour`);
|
|
23
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
24
|
+
return "Data from taskfour";
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
// Map of tasks for the Worker to reference by name
|
|
28
|
+
const taskMap = { taskone, tasktwo, taskthree, taskfour };
|
|
29
|
+
|
|
30
|
+
module.exports = taskMap;
|
package/index.js
CHANGED
|
@@ -1,46 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
/* eslint no-console: 0 */
|
|
15
|
-
|
|
16
|
-
'use strict';
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
const { _concurrencyThreads } = require("./src/worker.threads.js");
|
|
20
|
-
const { _concurrencyProcesses } = require("./src/worker.process.js");
|
|
21
|
-
const { _concurrencyClusters } = require("./src/worker.cluster.js");
|
|
22
|
-
const { _concurrencyThreadsAsync } = require("./src/worker.thread.async.js");
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
module.exports._concurrencyThreads = _concurrencyThreads;
|
|
26
|
-
module.exports._concurrencyProcesses = _concurrencyProcesses;
|
|
27
|
-
module.exports._concurrencyClusters = _concurrencyClusters;
|
|
28
|
-
module.exports._concurrencyThreadsAsync = _concurrencyThreadsAsync;
|
|
29
|
-
|
|
30
|
-
module.exports.concurrencyThreads = _concurrencyThreads;
|
|
31
|
-
module.exports.concurrencyProcesses = _concurrencyProcesses;
|
|
32
|
-
module.exports.concurrencyClusters = _concurrencyClusters;
|
|
33
|
-
module.exports.concurrencyThreadsAsync = _concurrencyThreadsAsync;
|
|
34
|
-
|
|
35
|
-
module.exports.default = {
|
|
36
|
-
_concurrencyThreads,
|
|
37
|
-
_concurrencyProcesses,
|
|
38
|
-
_concurrencyClusters,
|
|
39
|
-
_concurrencyThreadsAsync,
|
|
40
|
-
|
|
41
|
-
concurrencyThreads: _concurrencyThreads,
|
|
42
|
-
concurrencyProcesses: _concurrencyProcesses,
|
|
43
|
-
concurrencyClusters: _concurrencyClusters,
|
|
44
|
-
concurrencyThreadsAsync: _concurrencyThreadsAsync
|
|
45
|
-
|
|
46
|
-
};
|
|
1
|
+
const { runThreadedTasks, createWorkerPool } = require("./tasks.thread");
|
|
2
|
+
const { runPromiseTasks } = require("./tasks.async");
|
|
3
|
+
const { runProcessTasks, runInProcess } = require("./tasks.process");
|
|
4
|
+
|
|
5
|
+
// concurrent-tasks\worker_process.js
|
|
6
|
+
// concurrent-tasks\worker.js
|
|
7
|
+
|
|
8
|
+
module.exports = {
|
|
9
|
+
runThreadedTasks, createWorkerPool,
|
|
10
|
+
runPromiseTasks, runProcessTasks,
|
|
11
|
+
runInProcess
|
|
12
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "concurrency.js",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.5",
|
|
4
4
|
"description": "npm module to work with concurrency - worker threads and worker processes (currrently only fork method) easily using simple functions and script files",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"directories": {
|
|
@@ -11,8 +11,9 @@
|
|
|
11
11
|
"import": "./index.mjs"
|
|
12
12
|
},
|
|
13
13
|
"devDependencies": {
|
|
14
|
-
"chai": "^4.
|
|
15
|
-
"
|
|
14
|
+
"chai": "^4.5.0",
|
|
15
|
+
"express": "^4.18.2",
|
|
16
|
+
"mocha": "^10.8.2",
|
|
16
17
|
"npm-check": "^6.0.1",
|
|
17
18
|
"sinon": "^14.0.0",
|
|
18
19
|
"unimported": "^1.22.0"
|
|
@@ -34,5 +35,8 @@
|
|
|
34
35
|
"processes"
|
|
35
36
|
],
|
|
36
37
|
"author": "Ganesh B",
|
|
37
|
-
"license": "MIT"
|
|
38
|
+
"license": "MIT",
|
|
39
|
+
"dependencies": {
|
|
40
|
+
"loadbalancerjs": "^0.0.1"
|
|
41
|
+
}
|
|
38
42
|
}
|
package/tasks.async.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Task Runner for Serial and Parallel Execution
|
|
5
|
+
* * Logic:
|
|
6
|
+
* - Resolves function names provided as strings using the taskMap.
|
|
7
|
+
* - Maintains strict order based on the executionPlan.
|
|
8
|
+
* - Passes the 'resultContext' and current 'index' into every task.
|
|
9
|
+
* - Accumulates results in a generic list to avoid key-overwriting.
|
|
10
|
+
*/
|
|
11
|
+
async function runPromiseTasks(taskList, resultContext, taskFilePath) {
|
|
12
|
+
console.log("Starting task execution sequence...");
|
|
13
|
+
|
|
14
|
+
// Dynamically load the task map from the provided library file
|
|
15
|
+
const taskMap = require(path.resolve(taskFilePath));
|
|
16
|
+
|
|
17
|
+
// Initialize the results array in context if it doesn't exist
|
|
18
|
+
if (!resultContext.results) {
|
|
19
|
+
resultContext.results = [];
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
for (let i = 0; i < taskList.length; i++) {
|
|
23
|
+
const step = taskList[i];
|
|
24
|
+
|
|
25
|
+
if (Array.isArray(step)) {
|
|
26
|
+
// Parallel Execution
|
|
27
|
+
console.log(`Step Index ${i}: Executing Parallel Block`);
|
|
28
|
+
|
|
29
|
+
const parallelPromises = step.map(async (taskName) => {
|
|
30
|
+
const taskFunction = taskMap[taskName];
|
|
31
|
+
if (!taskFunction) {
|
|
32
|
+
throw new Error(`Task '${taskName}' not found in ${taskFilePath}`);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Execute task and capture result with metadata
|
|
36
|
+
const data = await taskFunction(resultContext, i);
|
|
37
|
+
return {
|
|
38
|
+
index: i,
|
|
39
|
+
name: taskName,
|
|
40
|
+
result: data
|
|
41
|
+
};
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
// Wait for all parallel tasks in this block to complete
|
|
45
|
+
const parallelResults = await Promise.all(parallelPromises);
|
|
46
|
+
resultContext.results.push(...parallelResults);
|
|
47
|
+
} else {
|
|
48
|
+
// Serial Execution
|
|
49
|
+
console.log(`Step Index ${i}: Executing Serial Task (${step})`);
|
|
50
|
+
|
|
51
|
+
const taskFunction = taskMap[step];
|
|
52
|
+
if (!taskFunction) {
|
|
53
|
+
throw new Error(`Task '${step}' not found in ${taskFilePath}`);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const data = await taskFunction(resultContext, i);
|
|
57
|
+
|
|
58
|
+
// Accumulate serial result into the context immediately
|
|
59
|
+
resultContext.results.push({
|
|
60
|
+
index: i,
|
|
61
|
+
name: step,
|
|
62
|
+
result: data
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
return resultContext;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
module.exports = {
|
|
71
|
+
runPromiseTasks
|
|
72
|
+
};
|
package/tasks.process.js
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
// concurrency.async.js
|
|
2
|
+
const { fork } = require('child_process');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Helper to run a task in a separate Process
|
|
7
|
+
*/
|
|
8
|
+
function runInProcess(taskName, context, index, taskFilePath) {
|
|
9
|
+
return new Promise((resolve, reject) => {
|
|
10
|
+
// Spawns a new Node.js process
|
|
11
|
+
const child = fork(path.join(__dirname, 'worker_process.js'));
|
|
12
|
+
|
|
13
|
+
child.on('message', (msg) => {
|
|
14
|
+
if (msg.status === 'success') resolve(msg.result);
|
|
15
|
+
else reject(new Error(msg.error));
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
child.on('error', reject);
|
|
19
|
+
|
|
20
|
+
child.on('exit', (code) => {
|
|
21
|
+
if (code !== 0 && code !== null) {
|
|
22
|
+
reject(new Error(`Process exited with code ${code}`));
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
// Send the task details to the child process
|
|
27
|
+
child.send({ taskName, context, index, taskFilePath });
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Main Task Runner
|
|
33
|
+
*/
|
|
34
|
+
async function runProcessTasks(taskList, resultContext, taskFilePath) {
|
|
35
|
+
console.log("--- Starting Process-Based Execution ---");
|
|
36
|
+
if (!resultContext.results) resultContext.results = [];
|
|
37
|
+
|
|
38
|
+
for (let i = 0; i < taskList.length; i++) {
|
|
39
|
+
const step = taskList[i];
|
|
40
|
+
|
|
41
|
+
if (Array.isArray(step)) {
|
|
42
|
+
console.log(`Step ${i}: Dispatching Parallel Processes...`);
|
|
43
|
+
|
|
44
|
+
// Execute each string in the array as a separate process
|
|
45
|
+
const parallelPromises = step.map(taskName =>
|
|
46
|
+
runInProcess(taskName, resultContext, i, taskFilePath)
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
const outputs = await Promise.all(parallelPromises);
|
|
50
|
+
resultContext.results.push(...outputs);
|
|
51
|
+
} else {
|
|
52
|
+
console.log(`Step ${i}: Running Serial Task (${step})`);
|
|
53
|
+
|
|
54
|
+
// Serial tasks run in a process to maintain total isolation
|
|
55
|
+
const output = await runInProcess(step, resultContext, i, taskFilePath);
|
|
56
|
+
resultContext.results.push(output);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return resultContext;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
module.exports = {
|
|
63
|
+
runProcessTasks,
|
|
64
|
+
runInProcess
|
|
65
|
+
}
|
package/tasks.thread.js
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
const { Worker } = require('worker_threads');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const os = require('os');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Functional Worker Pool Factory
|
|
7
|
+
* @param {string} taskFilePath - Path to the file containing task functions
|
|
8
|
+
*/
|
|
9
|
+
function createWorkerPool(size, taskFilePath) {
|
|
10
|
+
const workers = [];
|
|
11
|
+
const queue = [];
|
|
12
|
+
|
|
13
|
+
for (let i = 0; i < size; i++) {
|
|
14
|
+
const worker = new Worker(path.join(__dirname, 'worker.js'), {
|
|
15
|
+
stdout: true,
|
|
16
|
+
workerData: { taskFilePath } // Sending the filename here
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
worker.stdout.on('data', data => process.stdout.write(data));
|
|
20
|
+
workers.push({ instance: worker, inUse: false });
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const execute = (workerObj, taskName, context, index, resolve, reject) => {
|
|
24
|
+
workerObj.inUse = true;
|
|
25
|
+
|
|
26
|
+
const onMessage = (msg) => {
|
|
27
|
+
workerObj.instance.off('message', onMessage);
|
|
28
|
+
workerObj.inUse = false;
|
|
29
|
+
|
|
30
|
+
if (msg.status === 'success') resolve(msg.result);
|
|
31
|
+
else reject(new Error(msg.error));
|
|
32
|
+
|
|
33
|
+
if (queue.length > 0) {
|
|
34
|
+
const next = queue.shift();
|
|
35
|
+
execute(workerObj, next.taskName, next.context, next.index, next.resolve, next.reject);
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
workerObj.instance.on('message', onMessage);
|
|
40
|
+
workerObj.instance.postMessage({ taskName, context, index });
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
runTask: (taskName, context, index) => {
|
|
45
|
+
return new Promise((resolve, reject) => {
|
|
46
|
+
const availableWorker = workers.find(w => !w.inUse);
|
|
47
|
+
if (availableWorker) {
|
|
48
|
+
execute(availableWorker, taskName, context, index, resolve, reject);
|
|
49
|
+
} else {
|
|
50
|
+
queue.push({ taskName, context, index, resolve, reject });
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
},
|
|
54
|
+
destroy: () => {
|
|
55
|
+
workers.forEach(w => w.instance.terminate());
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function runThreadedTasks(taskList, resultContext, taskFileName) {
|
|
61
|
+
const poolSize = Math.max(2, Math.floor(os.cpus().length / 2));
|
|
62
|
+
const pool = createWorkerPool(poolSize, taskFileName);
|
|
63
|
+
|
|
64
|
+
console.log(`--- Starting Dynamic Execution (File: ${taskFileName}) ---`);
|
|
65
|
+
if (!resultContext.results) resultContext.results = [];
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
for (let i = 0; i < taskList.length; i++) {
|
|
69
|
+
const taskEntry = taskList[i];
|
|
70
|
+
|
|
71
|
+
if (Array.isArray(taskEntry)) {
|
|
72
|
+
console.log(`[Main] Dispatching Parallel Block at Index ${i}`);
|
|
73
|
+
const outputs = await Promise.all(
|
|
74
|
+
taskEntry.map(name => pool.runTask(name, resultContext, i))
|
|
75
|
+
);
|
|
76
|
+
resultContext.results.push(...outputs);
|
|
77
|
+
} else {
|
|
78
|
+
console.log(`[Main] Running Serial Task '${taskEntry}' at Index ${i}`);
|
|
79
|
+
const output = await pool.runTask(taskEntry, resultContext, i);
|
|
80
|
+
resultContext.results.push(output);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
} finally {
|
|
84
|
+
pool.destroy();
|
|
85
|
+
}
|
|
86
|
+
return resultContext;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = {
|
|
90
|
+
createWorkerPool,
|
|
91
|
+
runThreadedTasks
|
|
92
|
+
}
|