arcway 0.1.13 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,253 @@
1
+ import { Worker } from 'node:worker_threads';
2
+ import { fileURLToPath } from 'node:url';
3
+ import path from 'node:path';
4
+
5
+ const WORKER_ENTRY_URL = new URL('./worker-entry.js', import.meta.url);
6
+
7
+ // Sent from workers so the pool can correlate replies back to the matching
8
+ // in-flight task. Tasks time out (terminating the worker) if the worker never
9
+ // produces a settlement message for this id.
10
+ let taskCounter = 0;
11
+ function nextTaskId() {
12
+ taskCounter = (taskCounter + 1) >>> 0;
13
+ return taskCounter;
14
+ }
15
+
16
+ // Structured-clone-safe snapshot of an Error. Worker threads can structured-
17
+ // clone Error instances, but across `postMessage` non-enumerable fields (cause,
18
+ // custom props) survive unevenly across Node versions — normalise explicitly.
19
+ function serializeError(err) {
20
+ if (!(err instanceof Error)) return { name: 'Error', message: String(err) };
21
+ return { name: err.name, message: err.message, stack: err.stack };
22
+ }
23
+
24
+ function deserializeError(raw) {
25
+ const err = new Error(raw?.message ?? 'Worker task failed');
26
+ if (raw?.name) err.name = raw.name;
27
+ if (raw?.stack) err.stack = raw.stack;
28
+ return err;
29
+ }
30
+
31
+ // Each pending task carries its own resolve/reject plus a timeout timer that
32
+ // nukes the worker if the handler wedges (esbuild-style infinite loops, deadlocks
33
+ // against an external service, etc.). staleTimeoutMs defaults to the dispatcher's
34
+ // stale timeout but can be overridden per-job.
35
+ class WorkerPool {
36
+ _entryUrl;
37
+ _size;
38
+ _workerData;
39
+ _workers = new Set();
40
+ _idle = [];
41
+ _pending = [];
42
+ _closed = false;
43
+ _defaultTimeoutMs;
44
+
45
+ constructor(options = {}) {
46
+ this._entryUrl = options.entryUrl ?? WORKER_ENTRY_URL;
47
+ const requested = options.size ?? 1;
48
+ this._size = Math.max(1, requested | 0);
49
+ this._workerData = options.workerData ?? null;
50
+ this._defaultTimeoutMs = options.defaultTimeoutMs ?? null;
51
+ }
52
+
53
+ get size() {
54
+ return this._size;
55
+ }
56
+
57
+ get workerCount() {
58
+ return this._workers.size;
59
+ }
60
+
61
+ get idleCount() {
62
+ return this._idle.length;
63
+ }
64
+
65
+ get pendingCount() {
66
+ return this._pending.length;
67
+ }
68
+
69
+ async run(task, options = {}) {
70
+ if (this._closed) throw new Error('Worker pool is closed');
71
+ const timeoutMs = options.timeoutMs ?? this._defaultTimeoutMs;
72
+ return new Promise((resolve, reject) => {
73
+ const entry = {
74
+ id: nextTaskId(),
75
+ task,
76
+ timeoutMs,
77
+ resolve,
78
+ reject,
79
+ worker: null,
80
+ timer: null,
81
+ };
82
+ const worker = this._idle.pop();
83
+ if (worker) {
84
+ this._dispatch(worker, entry);
85
+ return;
86
+ }
87
+ if (this._workers.size < this._size) {
88
+ const fresh = this._spawnWorker();
89
+ this._dispatch(fresh, entry);
90
+ return;
91
+ }
92
+ this._pending.push(entry);
93
+ });
94
+ }
95
+
96
+ _spawnWorker() {
97
+ const entryPath =
98
+ typeof this._entryUrl === 'string' ? this._entryUrl : fileURLToPath(this._entryUrl);
99
+ const worker = new Worker(entryPath, {
100
+ workerData: this._workerData,
101
+ });
102
+ worker._activeTask = null;
103
+ worker.on('message', (msg) => this._onMessage(worker, msg));
104
+ worker.on('error', (err) => this._onError(worker, err));
105
+ worker.on('exit', (code) => this._onExit(worker, code));
106
+ this._workers.add(worker);
107
+ return worker;
108
+ }
109
+
110
+ _dispatch(worker, entry) {
111
+ entry.worker = worker;
112
+ worker._activeTask = entry;
113
+ if (entry.timeoutMs && entry.timeoutMs > 0) {
114
+ entry.timer = setTimeout(() => this._onTimeout(entry), entry.timeoutMs);
115
+ // Don't let a pending timer keep the process alive during shutdown.
116
+ if (typeof entry.timer.unref === 'function') entry.timer.unref();
117
+ }
118
+ worker.postMessage({ id: entry.id, task: entry.task });
119
+ }
120
+
121
+ _onMessage(worker, msg) {
122
+ const entry = worker._activeTask;
123
+ // Worker sent a reply after we already timed it out or terminated it.
124
+ if (!entry || entry.id !== msg?.id) return;
125
+ this._clearTimer(entry);
126
+ worker._activeTask = null;
127
+ if (msg.status === 'ok') entry.resolve(msg.result);
128
+ else entry.reject(deserializeError(msg.error));
129
+ this._drainOrPark(worker);
130
+ }
131
+
132
+ _onError(worker, err) {
133
+ const entry = worker._activeTask;
134
+ if (entry) {
135
+ this._clearTimer(entry);
136
+ worker._activeTask = null;
137
+ entry.reject(err instanceof Error ? err : new Error(String(err)));
138
+ }
139
+ // An 'error' event means the worker is going down — Node will emit 'exit'
140
+ // shortly. Don't park or reuse it.
141
+ this._workers.delete(worker);
142
+ this._removeFromIdle(worker);
143
+ if (!this._closed) this._fillFromPending();
144
+ }
145
+
146
+ _onExit(worker) {
147
+ const entry = worker._activeTask;
148
+ if (entry) {
149
+ this._clearTimer(entry);
150
+ worker._activeTask = null;
151
+ entry.reject(new Error('Worker exited before producing a result'));
152
+ }
153
+ this._workers.delete(worker);
154
+ this._removeFromIdle(worker);
155
+ if (!this._closed) this._fillFromPending();
156
+ }
157
+
158
+ _onTimeout(entry) {
159
+ const worker = entry.worker;
160
+ if (!worker || worker._activeTask !== entry) return;
161
+ worker._activeTask = null;
162
+ entry.reject(new Error(`Worker task timed out after ${entry.timeoutMs}ms`));
163
+ // A wedged handler will never return; terminate the worker so it doesn't
164
+ // keep holding a DB connection / file handle / etc. A replacement spawns
165
+ // lazily on the next `run()`.
166
+ this._workers.delete(worker);
167
+ this._removeFromIdle(worker);
168
+ worker.terminate().catch(() => {});
169
+ if (!this._closed) this._fillFromPending();
170
+ }
171
+
172
+ _drainOrPark(worker) {
173
+ const next = this._pending.shift();
174
+ if (next) {
175
+ this._dispatch(worker, next);
176
+ } else {
177
+ this._idle.push(worker);
178
+ }
179
+ }
180
+
181
+ _fillFromPending() {
182
+ while (this._pending.length > 0 && this._workers.size < this._size) {
183
+ const entry = this._pending.shift();
184
+ const worker = this._spawnWorker();
185
+ this._dispatch(worker, entry);
186
+ }
187
+ }
188
+
189
+ _removeFromIdle(worker) {
190
+ const idx = this._idle.indexOf(worker);
191
+ if (idx !== -1) this._idle.splice(idx, 1);
192
+ }
193
+
194
+ _clearTimer(entry) {
195
+ if (entry.timer) {
196
+ clearTimeout(entry.timer);
197
+ entry.timer = null;
198
+ }
199
+ }
200
+
201
+ // Ask a worker to tear down its infrastructure (DB pool, redis) and exit
202
+ // cleanly. Workers that don't ack within `gracefulExitMs` are terminated.
203
+ _gracefulShutdownWorker(worker, gracefulExitMs) {
204
+ return new Promise((resolve) => {
205
+ let settled = false;
206
+ const done = () => {
207
+ if (settled) return;
208
+ settled = true;
209
+ clearTimeout(timer);
210
+ resolve();
211
+ };
212
+ const timer = setTimeout(() => {
213
+ worker.terminate().catch(() => {}).finally(done);
214
+ }, gracefulExitMs);
215
+ if (typeof timer.unref === 'function') timer.unref();
216
+ worker.once('exit', done);
217
+ // Zero-id control message — workers react to task.__shutdown.
218
+ try {
219
+ worker.postMessage({ id: 0, task: { __shutdown: true } });
220
+ } catch {
221
+ // Worker already down or channel closed — terminate to be safe.
222
+ worker.terminate().catch(() => {}).finally(done);
223
+ }
224
+ });
225
+ }
226
+
227
+ async shutdown({ gracefulExitMs = 2000 } = {}) {
228
+ this._closed = true;
229
+ for (const entry of this._pending) {
230
+ entry.reject(new Error('Worker pool is closed'));
231
+ }
232
+ this._pending = [];
233
+ // Reject in-flight tasks — the worker will be torn down either via the
234
+ // graceful shutdown message or via terminate() below.
235
+ for (const worker of this._workers) {
236
+ const entry = worker._activeTask;
237
+ if (entry) {
238
+ this._clearTimer(entry);
239
+ worker._activeTask = null;
240
+ entry.reject(new Error('Worker pool is closed'));
241
+ }
242
+ }
243
+ const workers = [...this._workers];
244
+ const exits = workers.map((w) => this._gracefulShutdownWorker(w, gracefulExitMs));
245
+ this._workers.clear();
246
+ this._idle = [];
247
+ await Promise.all(exits);
248
+ }
249
+ }
250
+
251
+ // Exposed for tests and for callers who need to refer to the default entry
252
+ // path (e.g. alternate workerData).
253
+ export { WorkerPool, WORKER_ENTRY_URL, serializeError, deserializeError };
@@ -74,4 +74,4 @@ class McpRouter {
74
74
  }
75
75
  }
76
76
 
77
- export { McpRouter, createDebugHandler, createMcpRuntime, isDebugRequest, startMcpServer };
77
+ export { McpRouter };
@@ -0,0 +1,274 @@
1
+ import path from 'node:path';
2
+ import fs from 'node:fs/promises';
3
+ import crypto from 'node:crypto';
4
+ import { createRequire } from 'node:module';
5
+
6
+ const require_ = createRequire(import.meta.url);
7
+ const ESBUILD_VERSION = require_('esbuild/package.json').version;
8
+
9
+ function sha256(data) {
10
+ return crypto.createHash('sha256').update(data).digest('hex');
11
+ }
12
+
13
+ function computeConfigHash({ target, minify, devMode, kind, nodeEnv }) {
14
+ return sha256(
15
+ JSON.stringify({
16
+ esbuildVersion: ESBUILD_VERSION,
17
+ target: target ?? '',
18
+ minify: !!minify,
19
+ devMode: !!devMode,
20
+ kind: kind ?? '',
21
+ nodeEnv: nodeEnv ?? process.env.NODE_ENV ?? '',
22
+ }),
23
+ );
24
+ }
25
+
26
+ function cacheRoot(rootDir) {
27
+ return path.join(rootDir, 'node_modules', '.cache', 'arcway-pages');
28
+ }
29
+
30
+ function entryKey(entryPath) {
31
+ return sha256(path.resolve(entryPath));
32
+ }
33
+
34
+ function bucketDir(rootDir, kind) {
35
+ return path.join(cacheRoot(rootDir), kind);
36
+ }
37
+
38
+ async function hashFileContent(filePath) {
39
+ try {
40
+ const buf = await fs.readFile(filePath);
41
+ return sha256(buf);
42
+ } catch {
43
+ return null;
44
+ }
45
+ }
46
+
47
+ function resolveInputPath(p, rootDir) {
48
+ if (path.isAbsolute(p)) return p;
49
+ // esbuild metafile paths may be relative to cwd; try that first, then rootDir.
50
+ const abs = path.resolve(p);
51
+ return abs;
52
+ }
53
+
54
+ async function hashInputs(inputs) {
55
+ const sorted = [...inputs].sort();
56
+ const parts = await Promise.all(
57
+ sorted.map(async (p) => {
58
+ const h = await hashFileContent(p);
59
+ return h === null ? null : `${p}:${h}`;
60
+ }),
61
+ );
62
+ if (parts.some((p) => p === null)) return null;
63
+ return sha256(parts.join('\n'));
64
+ }
65
+
66
+ async function readJson(filePath) {
67
+ try {
68
+ return JSON.parse(await fs.readFile(filePath, 'utf-8'));
69
+ } catch {
70
+ return null;
71
+ }
72
+ }
73
+
74
+ async function writeJsonAtomic(filePath, value) {
75
+ const tmp = filePath + '.tmp-' + crypto.randomBytes(4).toString('hex');
76
+ await fs.writeFile(tmp, JSON.stringify(value));
77
+ await fs.rename(tmp, filePath);
78
+ }
79
+
80
+ // fs.cp with an existing destination is not atomic (internally unlink + copy),
81
+ // so a concurrent reader can observe ENOENT mid-write. Copy into a tmp sibling
82
+ // and rename into place — rename is atomic on the same filesystem.
83
+ async function cpAtomic(src, dst) {
84
+ const tmp = dst + '.tmp-' + crypto.randomBytes(4).toString('hex');
85
+ try {
86
+ await fs.cp(src, tmp);
87
+ await fs.rename(tmp, dst);
88
+ } catch (err) {
89
+ await fs.rm(tmp, { force: true }).catch(() => {});
90
+ throw err;
91
+ }
92
+ }
93
+
94
+ async function lookupBundle({ rootDir, entryPath, configHash, kind }) {
95
+ const bucket = bucketDir(rootDir, kind);
96
+ const key = entryKey(entryPath);
97
+ const metaPath = path.join(bucket, `${key}.meta.json`);
98
+ const jsPath = path.join(bucket, `${key}.js`);
99
+ const mapPath = path.join(bucket, `${key}.js.map`);
100
+
101
+ const meta = await readJson(metaPath);
102
+ if (!meta || meta.configHash !== configHash) {
103
+ return { hit: false, key, bucket, metaPath, jsPath, mapPath };
104
+ }
105
+ // Verify every recorded input still hashes to the stored value.
106
+ const currentHash = await hashInputs(meta.inputs);
107
+ if (currentHash === null || currentHash !== meta.inputsHash) {
108
+ return { hit: false, key, bucket, metaPath, jsPath, mapPath };
109
+ }
110
+ // Confirm cached artifact actually exists on disk.
111
+ try {
112
+ await fs.access(jsPath);
113
+ } catch {
114
+ return { hit: false, key, bucket, metaPath, jsPath, mapPath };
115
+ }
116
+ return { hit: true, key, bucket, metaPath, jsPath, mapPath };
117
+ }
118
+
119
+ async function storeBundle({ bucket, key, outFile, inputs, configHash, sourcemap }) {
120
+ await fs.mkdir(bucket, { recursive: true });
121
+ const cachedJs = path.join(bucket, `${key}.js`);
122
+ await cpAtomic(outFile, cachedJs);
123
+ if (sourcemap) {
124
+ const cachedMap = path.join(bucket, `${key}.js.map`);
125
+ try {
126
+ await cpAtomic(outFile + '.map', cachedMap);
127
+ } catch {
128
+ // Source map may be absent; non-fatal.
129
+ }
130
+ }
131
+ const inputsHash = await hashInputs(inputs);
132
+ await writeJsonAtomic(path.join(bucket, `${key}.meta.json`), {
133
+ configHash,
134
+ inputs,
135
+ inputsHash,
136
+ mtime: Date.now(),
137
+ });
138
+ }
139
+
140
+ async function restoreBundle({ cacheJs, cacheMap, outFile, sourcemap }) {
141
+ await fs.mkdir(path.dirname(outFile), { recursive: true });
142
+ await fs.cp(cacheJs, outFile);
143
+ if (sourcemap) {
144
+ try {
145
+ await fs.cp(cacheMap, outFile + '.map');
146
+ } catch {
147
+ // Source map may be absent in the cache entry; non-fatal.
148
+ }
149
+ }
150
+ }
151
+
152
+ async function buildWithCache({
153
+ rootDir,
154
+ kind,
155
+ entryPath,
156
+ outFile,
157
+ esbuildOptions,
158
+ esbuild,
159
+ devMode,
160
+ }) {
161
+ const absEntry = path.resolve(entryPath);
162
+ const configHash = computeConfigHash({
163
+ target: esbuildOptions.target,
164
+ minify: esbuildOptions.minify,
165
+ devMode,
166
+ kind,
167
+ });
168
+
169
+ const lookup = await lookupBundle({ rootDir, entryPath: absEntry, configHash, kind });
170
+ if (lookup.hit) {
171
+ await restoreBundle({
172
+ cacheJs: lookup.jsPath,
173
+ cacheMap: lookup.mapPath,
174
+ outFile,
175
+ sourcemap: !!esbuildOptions.sourcemap,
176
+ });
177
+ return { cacheHit: true };
178
+ }
179
+
180
+ const result = await esbuild.build({ ...esbuildOptions, metafile: true });
181
+ const inputs = Object.keys(result.metafile?.inputs ?? {}).map((p) =>
182
+ resolveInputPath(p, rootDir),
183
+ );
184
+
185
+ await storeBundle({
186
+ bucket: lookup.bucket,
187
+ key: lookup.key,
188
+ outFile,
189
+ inputs,
190
+ configHash,
191
+ sourcemap: !!esbuildOptions.sourcemap,
192
+ });
193
+
194
+ return { cacheHit: false, metafile: result.metafile };
195
+ }
196
+
197
+ // Multi-file cache for bundles with several output files (client build with
198
+ // code-splitting). Each cache entry lives under <bucket>/<coarseKey>/ and the
199
+ // index is a sidecar <bucket>/<coarseKey>.meta.json.
200
+ async function lookupMultiFileCache({ rootDir, kind, coarseKey }) {
201
+ const bucket = bucketDir(rootDir, kind);
202
+ const metaPath = path.join(bucket, `${coarseKey}.meta.json`);
203
+ const cacheDir = path.join(bucket, coarseKey);
204
+ const meta = await readJson(metaPath);
205
+ if (!meta) return { hit: false, bucket, cacheDir, metaPath };
206
+ const currentHash = await hashInputs(meta.inputs);
207
+ if (currentHash === null || currentHash !== meta.inputsHash) {
208
+ return { hit: false, bucket, cacheDir, metaPath };
209
+ }
210
+ // Sanity-check every recorded output file still exists in the cache dir.
211
+ for (const rel of meta.outputs) {
212
+ try {
213
+ await fs.access(path.join(cacheDir, rel));
214
+ } catch {
215
+ return { hit: false, bucket, cacheDir, metaPath };
216
+ }
217
+ }
218
+ return { hit: true, bucket, cacheDir, metaPath, meta };
219
+ }
220
+
221
+ async function restoreMultiFileCache({ cacheDir, outputs, destDir }) {
222
+ await fs.mkdir(destDir, { recursive: true });
223
+ await Promise.all(
224
+ outputs.map(async (rel) => {
225
+ const src = path.join(cacheDir, rel);
226
+ const dst = path.join(destDir, rel);
227
+ await fs.mkdir(path.dirname(dst), { recursive: true });
228
+ await fs.cp(src, dst);
229
+ }),
230
+ );
231
+ }
232
+
233
+ async function storeMultiFileCache({
234
+ bucket,
235
+ coarseKey,
236
+ destDir,
237
+ outputs,
238
+ inputs,
239
+ metadata,
240
+ }) {
241
+ const cacheDir = path.join(bucket, coarseKey);
242
+ await fs.rm(cacheDir, { recursive: true, force: true });
243
+ await fs.mkdir(cacheDir, { recursive: true });
244
+ await Promise.all(
245
+ outputs.map(async (rel) => {
246
+ const src = path.join(destDir, rel);
247
+ const dst = path.join(cacheDir, rel);
248
+ await fs.mkdir(path.dirname(dst), { recursive: true });
249
+ await cpAtomic(src, dst);
250
+ }),
251
+ );
252
+ const inputsHash = await hashInputs(inputs);
253
+ const metaPath = path.join(bucket, `${coarseKey}.meta.json`);
254
+ await writeJsonAtomic(metaPath, {
255
+ inputs,
256
+ inputsHash,
257
+ outputs,
258
+ metadata,
259
+ mtime: Date.now(),
260
+ });
261
+ }
262
+
263
+ export {
264
+ buildWithCache,
265
+ computeConfigHash,
266
+ cacheRoot,
267
+ entryKey,
268
+ ESBUILD_VERSION,
269
+ sha256,
270
+ hashInputs,
271
+ lookupMultiFileCache,
272
+ restoreMultiFileCache,
273
+ storeMultiFileCache,
274
+ };