@nxtedition/scheduler 3.0.2 → 3.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +162 -0
  3. package/lib/index.js +18 -1
  4. package/package.json +14 -6
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) nxtedition
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,162 @@
1
+ # @nxtedition/scheduler
2
+
3
+ A high-performance, priority-based task scheduler for Node.js with support for concurrency limiting and multi-worker coordination via `SharedArrayBuffer`.
4
+
5
+ ## Install
6
+
7
+ ```sh
8
+ npm install @nxtedition/scheduler
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ ### Basic
14
+
15
+ ```js
16
+ import { Scheduler } from '@nxtedition/scheduler'
17
+
18
+ const scheduler = new Scheduler({ concurrency: 4 })
19
+
20
+ const result = await scheduler.run(async () => {
21
+ const res = await fetch('https://example.com')
22
+ return res.json()
23
+ })
24
+ ```
25
+
26
+ ### Priority
27
+
28
+ Seven priority levels are available:
29
+
30
+ | Name | Value |
31
+ | --------- | ----- |
32
+ | `lowest` | -3 |
33
+ | `lower` | -2 |
34
+ | `low` | -1 |
35
+ | `normal` | 0 |
36
+ | `high` | 1 |
37
+ | `higher` | 2 |
38
+ | `highest` | 3 |
39
+
40
+ ```js
41
+ // Using string priority
42
+ await scheduler.run(() => importantWork(), 'high')
43
+
44
+ // Using static constants
45
+ await scheduler.run(() => backgroundWork(), Scheduler.LOW)
46
+ ```
47
+
48
+ ### Low-Level API
49
+
50
+ For more control, use `acquire` / `release` directly:
51
+
52
+ ```js
53
+ scheduler.acquire(
54
+ (opaque) => {
55
+ try {
56
+ doWork(opaque)
57
+ } finally {
58
+ scheduler.release()
59
+ }
60
+ },
61
+ Scheduler.NORMAL,
62
+ opaqueData,
63
+ )
64
+ ```
65
+
66
+ ### Multi-Worker Coordination
67
+
68
+ Share a concurrency limit across worker threads using `SharedArrayBuffer`:
69
+
70
+ ```js
71
+ // Main thread
72
+ import { Scheduler } from '@nxtedition/scheduler'
73
+ import { Worker } from 'node:worker_threads'
74
+
75
+ const sharedState = Scheduler.makeSharedState(8)
76
+ const worker = new Worker('./worker.js', { workerData: sharedState })
77
+
78
+ // Worker thread
79
+ import { Scheduler } from '@nxtedition/scheduler'
80
+ import { workerData } from 'node:worker_threads'
81
+
82
+ const scheduler = new Scheduler(workerData)
83
+ await scheduler.run(() => work())
84
+ ```
85
+
86
+ ### UV Thread Pool Scheduling
87
+
88
+ A practical use case is coordinating access to the libuv thread pool (`UV_THREADPOOL_SIZE`) across multiple worker threads. For example, several HTTP file-serving workers can share a scheduler so that file-system operations (which consume UV thread pool slots) are prioritized and throttled globally:
89
+
90
+ ```js
91
+ // main.js
92
+ import { Scheduler } from '@nxtedition/scheduler'
93
+ import { Worker } from 'node:worker_threads'
94
+
95
+ const UV_THREADPOOL_SIZE = parseInt(process.env.UV_THREADPOOL_SIZE || '4', 10)
96
+ const sharedState = Scheduler.makeSharedState(UV_THREADPOOL_SIZE)
97
+
98
+ for (let i = 0; i < 4; i++) {
99
+ new Worker('./http-worker.js', { workerData: { sharedState } })
100
+ }
101
+ ```
102
+
103
+ ```js
104
+ // http-worker.js
105
+ import { Scheduler, parsePriority } from '@nxtedition/scheduler'
106
+ import { workerData } from 'node:worker_threads'
107
+ import fs from 'node:fs/promises'
108
+
109
+ const scheduler = new Scheduler(workerData.sharedState)
110
+
111
+ async function handleRequest(req, res) {
112
+ // Derive priority from a request header, e.g. "X-Priority: high"
113
+ const priority = parsePriority(req.headers['x-priority'] || 'normal')
114
+
115
+ const data = await scheduler.run(() => fs.readFile(filePath), priority)
116
+ res.end(data)
117
+ }
118
+ ```
119
+
120
+ This ensures that high-priority requests get file-system access first, while low-priority background work (thumbnails, transcoding, etc.) yields thread pool capacity without starving entirely — thanks to the built-in starvation prevention.
121
+
122
+ ### Monitoring
123
+
124
+ ```js
125
+ const { running, pending, deferred, queues } = scheduler.stats
126
+ ```
127
+
128
+ - `running` — currently executing tasks
129
+ - `pending` — tasks waiting in queues
130
+ - `deferred` — total tasks that were queued (lifetime counter)
131
+ - `queues` — per-priority queue counts
132
+
133
+ ## API
134
+
135
+ ### `new Scheduler(opts)`
136
+
137
+ - `opts.concurrency` — max concurrent tasks (default: `Infinity`)
138
+ - `opts` may also be a `SharedArrayBuffer` created by `Scheduler.makeSharedState()`
139
+
140
+ ### `scheduler.run(fn, priority?, opaque?): Promise<T>`
141
+
142
+ Execute `fn` within the scheduler. Returns a promise that resolves with the return value of `fn`.
143
+
144
+ ### `scheduler.acquire(fn, priority?, opaque?): void`
145
+
146
+ Low-level task acquisition. You **must** call `scheduler.release()` when done.
147
+
148
+ ### `scheduler.release(): void`
149
+
150
+ Signal task completion. Dequeues the next pending task if concurrency allows.
151
+
152
+ ### `Scheduler.makeSharedState(concurrency): SharedArrayBuffer`
153
+
154
+ Create shared state for cross-worker scheduling.
155
+
156
+ ### `parsePriority(value): number`
157
+
158
+ Parse a string or number into a normalized priority value.
159
+
160
+ ## License
161
+
162
+ MIT
package/lib/index.js CHANGED
@@ -1,8 +1,15 @@
1
+ import os from 'node:os'
2
+
1
3
  const RUNNING_INDEX = 0
2
4
  const CONCURRENCY_INDEX = 1
3
5
 
4
6
  const maxInt = 2147483647
5
7
 
8
+ // On x86/x64, aligned 32-bit reads do not tear, so a plain array access is safe
9
+ // and avoids the overhead of Atomics.load() in V8. On other architectures (e.g. ARM),
10
+ // use Atomics.load() to prevent tearing.
11
+ const useAtomicLoad = os.arch() !== 'x64' && os.arch() !== 'ia32'
12
+
6
13
  class FastQueue {
7
14
  idx = 0
8
15
  cnt = 0
@@ -145,7 +152,17 @@ export class Scheduler {
145
152
  const queue = this.#queues[p + 3]
146
153
 
147
154
  if (this.#stateView) {
148
- if (this.#running < 1 || this.#stateView[RUNNING_INDEX] < this.#concurrency) {
155
+ // NOTE: The read of stateView followed by Atomics.add is a TOCTOU race. Multiple
156
+ // workers may simultaneously read a value below the concurrency limit and all
157
+ // proceed, briefly exceeding it by up to N-1 (where N is the number of workers).
158
+ // This is by design — the concurrency limit is a soft / best-effort constraint.
159
+ // Small, transient over-subscriptions are acceptable and self-correcting on the
160
+ // next release() cycle.
161
+ // Plain array read on x86 (aligned 32-bit reads don't tear); Atomics.load elsewhere.
162
+ const running = useAtomicLoad
163
+ ? Atomics.load(this.#stateView, RUNNING_INDEX)
164
+ : this.#stateView[RUNNING_INDEX]
165
+ if (this.#running < 1 || running < this.#concurrency) {
149
166
  Atomics.add(this.#stateView, RUNNING_INDEX, 1)
150
167
  this.#running += 1
151
168
  fn(opaque)
package/package.json CHANGED
@@ -1,23 +1,31 @@
1
1
  {
2
2
  "name": "@nxtedition/scheduler",
3
- "version": "3.0.2",
3
+ "version": "3.0.4",
4
4
  "type": "module",
5
5
  "main": "lib/index.js",
6
6
  "types": "lib/index.d.ts",
7
7
  "files": [
8
- "lib"
8
+ "lib",
9
+ "README.md",
10
+ "LICENSE"
9
11
  ],
10
- "license": "UNLICENSED",
12
+ "license": "MIT",
13
+ "publishConfig": {
14
+ "access": "public"
15
+ },
11
16
  "scripts": {
12
17
  "build": "rimraf lib && tsc && amaroc ./src/index.ts && mv src/index.js lib/",
13
18
  "prepublishOnly": "yarn build",
14
19
  "typecheck": "tsc --noEmit",
15
- "test": "node --test",
16
- "test:ci": "node --test"
20
+ "test": "yarn build && node --test",
21
+ "test:ci": "yarn build && node --test"
17
22
  },
18
23
  "devDependencies": {
24
+ "@types/node": "^25.2.3",
19
25
  "amaroc": "^1.0.1",
26
+ "oxlint-tsgolint": "^0.12.2",
20
27
  "rimraf": "^6.1.2",
21
28
  "typescript": "^5.9.3"
22
- }
29
+ },
30
+ "gitHead": "17807cefcac092e20ebf99befddf0e742e5fc0e2"
23
31
  }