concurrency.js 0.0.3-beta → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,85 @@
1
+ const { expect } = require('chai');
2
+ const { runProcessTasks } = require('../tasks.process'); // The process-based runner
3
+ const path = require('path');
4
+
5
+ describe('Process-Based Task Runner', function() {
6
+ // Spawning processes is heavier than threads; 10s timeout recommended
7
+ this.timeout(10000);
8
+
9
+ let resultContext;
10
+ const taskLibraryPath = path.resolve(process.cwd(), './demos/libmap.js');
11
+
12
+ beforeEach(() => {
13
+ // Ensure a clean context before every test case
14
+ resultContext = { results: [] };
15
+ });
16
+
17
+ it('should execute a serial task in a child process', async () => {
18
+ const plan = ["taskone"];
19
+ const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
20
+
21
+ expect(finalContext.results).to.have.lengthOf(1);
22
+ expect(finalContext.results[0]).to.deep.include({
23
+ index: 0,
24
+ name: 'taskone'
25
+ });
26
+ expect(finalContext.results[0].result).to.contain('Data from taskone');
27
+ });
28
+
29
+ it('should execute parallel tasks in separate child processes', async () => {
30
+ const plan = [["tasktwo", "taskthree"]];
31
+ const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
32
+
33
+ expect(finalContext.results).to.have.lengthOf(2);
34
+
35
+ // Validate that both parallel results are assigned the same step index
36
+ const indices = finalContext.results.map(r => r.index);
37
+ expect(indices).to.members([0, 0]); // incorrect definition
38
+
39
+ const names = finalContext.results.map(r => r.name);
40
+ expect(names).to.include.members(['tasktwo', 'taskthree']);
41
+ });
42
+
43
+ it('should maintain order across complex transitions (Serial -> Parallel)', async () => {
44
+ const plan = [
45
+ "taskone",
46
+ ["tasktwo", "taskthree"],
47
+ "taskfour"
48
+ ];
49
+
50
+ const finalContext = await runProcessTasks(plan, resultContext, taskLibraryPath);
51
+
52
+ // Total 4 results: 1 (index 0) + 2 (index 1) + 1 (index 2)
53
+ expect(finalContext.results).to.have.lengthOf(4);
54
+
55
+ expect(finalContext.results[0].index).to.equal(0);
56
+ expect(finalContext.results[1].index).to.equal(1);
57
+ expect(finalContext.results[2].index).to.equal(1);
58
+ expect(finalContext.results[3].index).to.equal(2);
59
+ });
60
+
61
+ it('should return an error if the process encounters an invalid task name', async () => {
62
+ const plan = ["missingTask"];
63
+ try {
64
+ await runProcessTasks(plan, resultContext, taskLibraryPath);
65
+ throw new Error('Test should have thrown an error');
66
+ } catch (err) {
67
+ expect(err.message).to.contain("Task 'missingTask' not found");
68
+ }
69
+ });
70
+
71
+ it('should recover if a child process exits unexpectedly', async () => {
72
+ // This tests the logic handling process exits in the coordinator
73
+ const plan = ["taskone"];
74
+ // Simulate a failure by passing a non-existent task file path
75
+ const badPath = './non_existent_file.js';
76
+
77
+ try {
78
+ await runProcessTasks(plan, resultContext, badPath);
79
+ throw new Error('Should have caught process exit error');
80
+ } catch (err) {
81
+ // Depending on OS, this might be a module not found error or exit code 1
82
+ expect(err).to.exist;
83
+ }
84
+ });
85
+ });
@@ -0,0 +1,86 @@
1
+ const { expect } = require('chai');
2
+ const { runPromiseTasks } = require('../tasks.async');
3
+ const path = require('path');
4
+
5
+ describe('Promise-Based Task Runner', () => {
6
+ let resultContext;
7
+ const taskLibraryPath = './demos/libmap.js';
8
+
9
+ beforeEach(() => {
10
+ // Reset context before each test
11
+ resultContext = { results: [] };
12
+ });
13
+
14
+ it('should execute a single serial task and update the context', async () => {
15
+ const plan = ['taskone'];
16
+ const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
17
+
18
+ expect(finalContext.results).to.have.lengthOf(1);
19
+ expect(finalContext.results[0]).to.deep.include({
20
+ index: 0,
21
+ name: 'taskone'
22
+ });
23
+ expect(finalContext.results[0].result).to.contain('Data from taskone');
24
+ });
25
+
26
+ it('should execute tasks in the correct serial order', async () => {
27
+ const plan = ['taskone', 'taskfour'];
28
+ const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
29
+
30
+ expect(finalContext.results[0].name).to.equal('taskone');
31
+ expect(finalContext.results[1].name).to.equal('taskfour');
32
+ expect(finalContext.results[1].index).to.equal(1);
33
+ });
34
+
35
+ it('should execute parallel blocks correctly', async () => {
36
+ const plan = [['tasktwo', 'taskthree']];
37
+ const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
38
+
39
+ expect(finalContext.results).to.have.lengthOf(2);
40
+ // Both parallel tasks should share the same step index
41
+ expect(finalContext.results[0].index).to.equal(0);
42
+ expect(finalContext.results[1].index).to.equal(0);
43
+
44
+ const names = finalContext.results.map(r => r.name);
45
+ expect(names).to.include('tasktwo');
46
+ expect(names).to.include('taskthree');
47
+ });
48
+
49
+ it('should maintain order for complex plans (Serial -> Parallel -> Serial)', async () => {
50
+ const plan = [
51
+ 'taskone',
52
+ ['tasktwo', 'taskthree'],
53
+ 'taskfour'
54
+ ];
55
+ const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
56
+
57
+ // Total 4 result entries (1 + 2 + 1)
58
+ expect(finalContext.results).to.have.lengthOf(4);
59
+
60
+ // Check indices
61
+ expect(finalContext.results[0].index).to.equal(0); // taskone
62
+ expect(finalContext.results[1].index).to.equal(1); // parallel part 1
63
+ expect(finalContext.results[2].index).to.equal(1); // parallel part 2
64
+ expect(finalContext.results[3].index).to.equal(2); // taskfour
65
+ });
66
+
67
+ it('should allow repeated tasks and distinguish them by index', async () => {
68
+ const plan = ['taskone', 'taskone'];
69
+ const finalContext = await runPromiseTasks(plan, resultContext, taskLibraryPath);
70
+
71
+ expect(finalContext.results).to.have.lengthOf(2);
72
+ expect(finalContext.results[0].index).to.equal(0);
73
+ expect(finalContext.results[1].index).to.equal(1);
74
+ expect(finalContext.results[0].name).to.equal(finalContext.results[1].name);
75
+ });
76
+
77
+ it('should throw an error if a task name is not found in the map', async () => {
78
+ const plan = ['nonExistentTask'];
79
+ try {
80
+ await runPromiseTasks(plan, resultContext, taskLibraryPath);
81
+ throw new Error('Should have failed');
82
+ } catch (err) {
83
+ expect(err.message).to.contain("Task 'nonExistentTask' not found");
84
+ }
85
+ });
86
+ });
@@ -0,0 +1,86 @@
1
+ const { expect } = require('chai');
2
+ const { runThreadedTasks } = require('../tasks.thread'); // The threaded version
3
+ const path = require('path');
4
+
5
+ describe('Threaded Task Runner', function() {
6
+ // Increase timeout because spawning threads has overhead
7
+ this.timeout(5000);
8
+
9
+ let resultContext;
10
+ const taskLibraryPath = path.resolve(process.cwd(), './demos/libmap.js');
11
+
12
+ beforeEach(() => {
13
+ resultContext = { results: [] };
14
+ });
15
+
16
+ it('should execute a serial task in a thread and return results', async () => {
17
+ const plan = ["taskone"];
18
+ const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
19
+
20
+ expect(finalContext.results).to.have.lengthOf(1);
21
+ expect(finalContext.results[0].name).to.equal('taskone');
22
+ expect(finalContext.results[0].index).to.equal(0);
23
+ expect(finalContext.results[0].result).to.contain('Data from taskone');
24
+ });
25
+
26
+ it('should execute parallel tasks across multiple threads', async () => {
27
+ const plan = [["tasktwo", "taskthree", "taskone"]];
28
+ const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
29
+
30
+ expect(finalContext.results).to.have.lengthOf(3);
31
+
32
+ // All tasks in a parallel block should share the same index
33
+ finalContext.results.forEach(res => {
34
+ expect(res.index).to.equal(0);
35
+ });
36
+
37
+ const names = finalContext.results.map(r => r.name);
38
+ expect(names).to.include('tasktwo');
39
+ expect(names).to.include('taskthree');
40
+ expect(names).to.include('taskone');
41
+ });
42
+
43
+ it('should preserve execution order and context across serial and parallel boundaries', async () => {
44
+ const plan = [
45
+ "taskone", // Index 0
46
+ ["tasktwo", "taskthree"], // Index 1
47
+ "taskfour" // Index 2
48
+ ];
49
+
50
+ const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
51
+
52
+ // Total 4 results (1 serial + 2 parallel + 1 serial)
53
+ expect(finalContext.results).to.have.lengthOf(4);
54
+
55
+ // Verify correct indexing sequence
56
+ expect(finalContext.results[0].index).to.equal(0);
57
+ expect(finalContext.results[1].index).to.equal(1);
58
+ expect(finalContext.results[2].index).to.equal(1);
59
+ expect(finalContext.results[3].index).to.equal(2);
60
+
61
+ expect(finalContext.results[3].name).to.equal('taskfour');
62
+ });
63
+
64
+ it('should distinguish between multiple runs of the same task name', async () => {
65
+ const plan = ["taskone", "taskone"];
66
+ const finalContext = await runThreadedTasks(plan, resultContext, taskLibraryPath);
67
+
68
+ expect(finalContext.results).to.have.lengthOf(2);
69
+ expect(finalContext.results[0].name).to.equal('taskone');
70
+ expect(finalContext.results[1].name).to.equal('taskone');
71
+
72
+ // They must have different indices
73
+ expect(finalContext.results[0].index).to.equal(0);
74
+ expect(finalContext.results[1].index).to.equal(1);
75
+ });
76
+
77
+ it('should report errors if a string name does not exist in libmap.js', async () => {
78
+ const plan = ["invalidTaskName"];
79
+ try {
80
+ await runThreadedTasks(plan, resultContext, taskLibraryPath);
81
+ throw new Error('Test should have failed');
82
+ } catch (err) {
83
+ expect(err.message).to.contain("Task 'invalidTaskName' not found");
84
+ }
85
+ });
86
+ });
package/worker.js ADDED
@@ -0,0 +1,24 @@
1
+ const { parentPort, workerData } = require('worker_threads');
2
+ const path = require('path');
3
+
4
+ // Dynamically import the task map using the file path provided in workerData
5
+ const taskFilePath = path.resolve(workerData.taskFilePath);
6
+ const taskMap = require(taskFilePath);
7
+
8
+ parentPort.on('message', async ({ taskName, context, index }) => {
9
+ try {
10
+ const taskFunction = taskMap[taskName];
11
+ if (!taskFunction) {
12
+ throw new Error(`Task '${taskName}' not found in file: ${workerData.taskFilePath}`);
13
+ }
14
+
15
+ const resultValue = await taskFunction(context, index);
16
+
17
+ parentPort.postMessage({
18
+ status: 'success',
19
+ result: { index, name: taskName, result: resultValue }
20
+ });
21
+ } catch (error) {
22
+ parentPort.postMessage({ status: 'error', error: error.message });
23
+ }
24
+ });
@@ -0,0 +1,29 @@
1
+ // worker_process.js
2
+ const path = require('path');
3
+
4
+ // Listen for messages from the parent process
5
+ process.on('message', async ({ taskName, context, index, taskFilePath }) => {
6
+ try {
7
+ // Dynamically load the task map
8
+ const taskMap = require(path.resolve(taskFilePath));
9
+ const taskFunction = taskMap[taskName];
10
+
11
+ if (!taskFunction) {
12
+ throw new Error(`Task '${taskName}' not found in ${taskFilePath}`);
13
+ }
14
+
15
+ // Execute the task
16
+ const resultValue = await taskFunction(context, index);
17
+
18
+ // Send the result back to the parent
19
+ process.send({
20
+ status: 'success',
21
+ result: { index, name: taskName, result: resultValue }
22
+ });
23
+ } catch (error) {
24
+ process.send({ status: 'error', error: error.message });
25
+ } finally {
26
+ // Process exits after task completion to free up system memory
27
+ process.exit(0);
28
+ }
29
+ });
package/demos/cluster.js DELETED
@@ -1,22 +0,0 @@
1
-
2
- const path = require("path");
3
- const { _concurrencyClusters } = require("../index.js");
4
-
5
-
6
- async function cluster() {
7
- let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js";
8
- return _concurrencyClusters(
9
- path.join(filename),
10
- // __filename,
11
- 8,
12
- {
13
- data: {
14
- url: "https://www.google.com",
15
- message: "Testing parent data"
16
- },
17
- childData: "Test data from child"
18
- }
19
- )
20
- }
21
-
22
- module.exports = cluster;
@@ -1,66 +0,0 @@
1
-
2
- const path = require("path");
3
- let { _concurrencyClusters } = require("../index.js");
4
-
5
-
6
- // console.log(_concurrencyClusters(
7
- // path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.process.js"),
8
- // 8,
9
- // { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
10
- // ).then((d) => {
11
- // console.log("Data fetched", JSON.stringify(d));
12
- // }).catch((e) => {
13
- // console.log(e.toString());
14
- // }))
15
-
16
-
17
-
18
-
19
- // async function concurrency() {
20
- // let result = await _concurrencyClusters(
21
- // path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js"),
22
- // 8,
23
- // { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
24
- // )
25
- // console.log(result);
26
- // }
27
-
28
- // function concurrency() {
29
- // _concurrencyClusters(
30
- // path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js"),
31
- // 8,
32
- // { url: "https://www.google.com", data: "Testing parent data", childData: "Test data from child" }
33
- // ).then((d) => {
34
- // console.log("Data fetched", JSON.stringify(d));
35
- // }).catch((e) => {
36
- // console.log(e.toString());
37
- // })
38
- // }
39
-
40
- function concurrency() {
41
- let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.cluster.js";
42
- return new Promise(function (resolve, reject) {
43
- _concurrencyClusters(
44
- path.join(filename),
45
- 8,
46
- {
47
- data: {
48
- message: "Testing parent data",
49
- url: "https://www.google.com",
50
- },
51
- childData: "Test data from child"
52
- }
53
- ).then((d) => {
54
- console.log("Data fetched", JSON.stringify(d));
55
- resolve(d);
56
- }).catch((e) => {
57
- console.log(e.toString());
58
- reject(e);
59
- });
60
- });
61
- }
62
-
63
- concurrency();
64
-
65
- setTimeout(() => console.log(`demo.cluster.js: run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
66
- setTimeout(() => console.log(`demo.cluster.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
package/demos/demos.js DELETED
@@ -1,21 +0,0 @@
1
- /**
2
- *
3
- * Package: concurrency.js
4
- * Author: Ganesh B
5
- * Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
6
- * Install: npm i concurrency.js --save
7
- * Github: https://github.com/ganeshkbhat/concurrency
8
- * npmjs Link: https://www.npmjs.com/package/
9
- * File: index.js
10
- * File Description:
11
- *
12
- */
13
-
14
- /* eslint no-console: 0 */
15
-
16
- 'use strict';
17
-
18
-
19
- let { _concurrencyThreads, _concurrencyProcesses, _concurrencyClusters, _concurrencyThreadsAsync } = require("../index.js");
20
- console.log(_concurrencyThreads, _concurrencyProcesses, _concurrencyClusters, _concurrencyThreadsAsync);
21
-
@@ -1,37 +0,0 @@
1
- /**
2
- *
3
- * Package: concurrency.js
4
- * Author: Ganesh B
5
- * Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
6
- * Install: npm i concurrency.js --save
7
- * Github: https://github.com/ganeshkbhat/concurrency
8
- * npmjs Link: https://www.npmjs.com/package/
9
- * File: demo.processes.js
10
- * File Description:
11
- *
12
- */
13
-
14
- /* eslint no-console: 0 */
15
-
16
- 'use strict';
17
-
18
-
19
- const path = require("path");
20
- let { _concurrencyProcesses } = require("../index.js");
21
-
22
- let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.process.js";
23
- _concurrencyProcesses(
24
- path.join(filename), {
25
- data: {
26
- message: "Testing data",
27
- url: "https://www.google.com"
28
- }
29
- }, true).then((d) => {
30
- console.log("Data fetched: ", JSON.stringify(d));
31
- }).catch((e) => {
32
- console.log(e.toString()); setTimeout(() => { process.exit(e); }, 5000)
33
- });
34
-
35
- setTimeout(() => console.log(`demo.processes.js: Run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
36
- setTimeout(() => console.log(`demo.processes.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
37
-
@@ -1,22 +0,0 @@
1
-
2
-
3
- const path = require("path");
4
- let { _concurrencyThreadsAsync } = require("../index.js");
5
-
6
- let filename = "C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\demos\\demos.threads.js";
7
-
8
- let threads = _concurrencyThreadsAsync(filename, {
9
- data: {
10
- message: "Testing parent data",
11
- url: "https://www.google.com"
12
- },
13
- childData: "Test data from child"
14
- });
15
-
16
- console.log(` STDOUT: console.log(threads.stderr); console.log(threads.stdout); `);
17
-
18
- console.log(threads.stderr);
19
- console.log(threads.stdout);
20
-
21
- // setTimeout(() => console.log(`demo.cluster.js: run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
22
- // setTimeout(() => console.log(`demo.cluster.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
@@ -1,40 +0,0 @@
1
- /**
2
- *
3
- * Package: concurrency.js
4
- * Author: Ganesh B
5
- * Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
6
- * Install: npm i concurrency.js --save
7
- * Github: https://github.com/ganeshkbhat/concurrency
8
- * npmjs Link: https://www.npmjs.com/package/
9
- * File: demo.threads.js
10
- * File Description:
11
- *
12
- */
13
-
14
- /* eslint no-console: 0 */
15
-
16
- 'use strict';
17
-
18
- const path = require("path");
19
- let { _concurrencyThreads } = require("../index.js");
20
-
21
- // _concurrencyThreads(path.join("C:\\Users\\GB\\Documents\\projects\\requireurl\\concurrency\\src\\worker.threads.js"), { data: { url: "https://www.google.com", message: "Testing data" } });
22
-
23
-
24
- _concurrencyThreads(__filename, {
25
- data: {
26
- url: "https://www.google.com",
27
- message: "Testing data"
28
- },
29
- childData: "Testing child data"
30
- }, true).then((d) => console.log(JSON.stringify(d)))
31
- // .catch((e) => { console.log(e.toString()); setTimeout(() => {process.exit(e);}, 5000) })
32
-
33
- // setTimeout(() => {
34
- // console.log(`demo.threads.js: Closing process ${process.pid}: Timeout 1: 20000 `, __filename);
35
- // process.exit(0);
36
- // }, 20000);
37
-
38
- setTimeout(() => console.log(`demo.processes.js: Run file PID ${process.pid}: Interval 2: 10000 `, process.pid), 10000);
39
- setTimeout(() => console.log(`demo.processes.js: Closing process ${process.pid}: Timeout 1: 10000 `, process.exit()), 20000);
40
-
package/index.mjs DELETED
@@ -1,21 +0,0 @@
1
- /**
2
- *
3
- * Package: concurrency.js
4
- * Author: Ganesh B
5
- * Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
6
- * Install: npm i concurrency.js --save
7
- * Github: https://github.com/ganeshkbhat/concurrency
8
- * npmjs Link: https://www.npmjs.com/package/
9
- * File: index.mjs
10
- * File Description:
11
- *
12
- */
13
-
14
- /* eslint no-console: 0 */
15
-
16
- 'use strict';
17
-
18
- import { _concurrencyThreads, _concurrencyProcesses, _concurrencyThreadsAsync, _concurrencyThreadsAsync } from "./index.js";
19
-
20
- export { _concurrencyThreads, _concurrencyProcesses, _concurrencyClusters, _concurrencyThreadsAsync };
21
- export default _concurrencyProcesses;
@@ -1,122 +0,0 @@
1
- /**
2
- *
3
- * Package: concurrency.js
4
- * Author: Ganesh B
5
- * Description: npm module to work with concurrency - worker threads and worker processes easily using simple functions and script files
6
- * Install: npm i concurrency.js --save
7
- * Github: https://github.com/ganeshkbhat/concurrency
8
- * npmjs Link: https://www.npmjs.com/package/
9
- * File: worker.process.js
10
- * File Description:
11
- *
12
- */
13
-
14
- /* eslint no-console: 0 */
15
-
16
- 'use strict';
17
-
18
- const cluster = require("node:cluster");
19
- const http = require("node:http");
20
- const { cpus } = require("node:os");
21
- const process = require("node:process");
22
-
23
- function _concurrencyClusters(filename = __filename, num = cpus().length, options = {}, greet = false) {
24
- var worker, workers = {}, result = [];
25
- var messageData = {}, childMessageData = [];
26
-
27
- if (!options.handlers) {
28
- options["handlers"] = {};
29
- }
30
-
31
- return new Promise((resolve, reject) => {
32
- if (cluster.isPrimary) {
33
- num = num || cpus().length;
34
- for (let i = 0; i < num; i++) {
35
- cluster.fork(filename, { env: { ...process.env, FORK: 1, childData: options.childData, handlers: { ...options.handlers } } });
36
- }
37
-
38
- for (const id in cluster.workers) {
39
- messageData[id] = [];
40
- cluster.workers[id].on("message", (msg) => {
41
- if (!messageData[id]) {
42
- messageData[id] = [];
43
- }
44
- messageData[id].push(msg);
45
- if (!!options.handlers.message) {
46
- const cbFunction = require(options.handlers.message);
47
- result.push({ return: cbFunction(msg), id: id, pid: process.pid, event: "message" });
48
- }
49
- if (!!msg.closeChild) {
50
- childMessageData.push(msg);
51
- cluster.workers[id].disconnect();
52
- }
53
- if (!Object.keys(cluster.workers).length) {
54
- resolve({ message: messageData, result: result });
55
- }
56
- });
57
-
58
- if (!!greet) {
59
- cluster.workers[id].send({ pid: process.pid, message: "Message from Parent: " + process.pid.toString() });
60
- }
61
-
62
- (!!options.data) ? cluster.workers[id].send({ id: id, pid: process.pid, message: options.data }) : null;
63
-
64
- cluster.workers[id].on("error", function (e) {
65
- if (!!options.handlers.error) {
66
- const cbFunction = require(options.handlers.error);
67
- result.push({ return: cbFunction(e), id: id, pid: process.pid, event: "error" });
68
- }
69
- reject(e);
70
- });
71
-
72
- cluster.workers[id].on("close", function (code, signal) {
73
- if (!!options.handlers.close) {
74
- let connected = cluster.workers[id].isConnected();
75
- const cbFunction = require(options.handlers.close);
76
- // result.push(cbFunction(code, signal, pid, connected));
77
- result.push({ return: cbFunction(code, signal, pid, connected), id: id, pid: process.pid, event: "close" });
78
- }
79
- });
80
-
81
- cluster.workers[id].on("exit", (code) => {
82
- if (!!options.handlers.exit) {
83
- const cbFunction = require(options.handlers.exit);
84
- result.push({ return: cbFunction(code), id: id, pid: process.pid, event: "exit" });
85
- }
86
- if (!Object.keys(cluster.workers).length) {
87
- // console.log("exit called");
88
- }
89
- });
90
-
91
- cluster.workers[id].send({ closeChild: true })
92
- }
93
- } else if (cluster.isWorker) {
94
- // } else {
95
- // return new Promise((resolve, reject) => {
96
- process.on("message", (msg) => {
97
- childMessageData.push(msg);
98
- // if (!!process.env.handlers.childMessage) {
99
- // const childCBFunction = require(process.env.handlers.childMessage);
100
- // result.push({ return: cbFunction(msg), pid: process.pid, event: "message" });
101
- // }
102
- if (!!msg.closeChild) {
103
- process.send({ closeChild: true, pid: process.pid, childMessageData: childMessageData, result: result });
104
- }
105
- });
106
-
107
- if (!!greet) {
108
- process.send({ pid: process.pid, message: "Message from worker: " + process.pid.toString() });
109
- }
110
-
111
- (!!process.env.childData) ? child.send({ pid: process.pid, message: process.env.childData }) : null;
112
- }
113
- });
114
-
115
- }
116
-
117
- if (process.env.FORK) {
118
- _concurrencyClusters();
119
- }
120
-
121
-
122
- module.exports._concurrencyClusters = _concurrencyClusters;