@hanna84/mcp-writing 2.10.1 → 2.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -4,11 +4,21 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.10.2](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.10.1...v2.10.2)
9
+
10
+ - refactor(async-jobs): extract startAsyncJob/pruneAsyncJobs/toPublicJob into async-jobs.js [`#106`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/106)
12
+
7
13
  #### [v2.10.1](https://github.com/hannasdev/mcp-writing.git
8
14
  /compare/v2.10.0...v2.10.1)
9
15
 
16
+ > 26 April 2026
17
+
10
18
  - refactor(review-bundles): split 997-line module into planner/renderer/writer [`#105`](https://github.com/hannasdev/mcp-writing.git
11
19
  /pull/105)
20
+ - Release 2.10.1 [`6d5702b`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/6d5702bb679dcacea9d6a4fe3b5305648e1c1100)
12
22
 
13
23
  #### [v2.10.0](https://github.com/hannasdev/mcp-writing.git
14
24
  /compare/v2.9.9...v2.10.0)
package/async-jobs.js ADDED
@@ -0,0 +1,218 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import os from "node:os";
4
+ import { spawn } from "node:child_process";
5
+ import { randomUUID } from "node:crypto";
6
+ import { ASYNC_PROGRESS_PREFIX } from "./async-progress.js";
7
+ import { checkpointJobCreate, checkpointJobFinish, pruneJobCheckpoints } from "./db.js";
8
+
9
+ export function readJsonIfExists(filePath) {
10
+ if (!filePath || !fs.existsSync(filePath)) return null;
11
+ try {
12
+ return JSON.parse(fs.readFileSync(filePath, "utf8"));
13
+ } catch {
14
+ return null;
15
+ }
16
+ }
17
+
18
+ export function createAsyncJobManager({ db, asyncJobs, ttlMs, runnerDir }) {
19
+ function pruneAsyncJobs() {
20
+ const now = Date.now();
21
+ let anyPruned = false;
22
+ for (const [id, job] of asyncJobs.entries()) {
23
+ if (!job.finishedAt) continue;
24
+ if (now - Date.parse(job.finishedAt) > ttlMs) {
25
+ try {
26
+ if (job.tmpDir && fs.existsSync(job.tmpDir)) {
27
+ fs.rmSync(job.tmpDir, { recursive: true, force: true });
28
+ } else {
29
+ if (job.requestPath && fs.existsSync(job.requestPath)) fs.unlinkSync(job.requestPath);
30
+ if (job.resultPath && fs.existsSync(job.resultPath)) fs.unlinkSync(job.resultPath);
31
+ }
32
+ } catch {
33
+ // best effort cleanup
34
+ }
35
+ asyncJobs.delete(id);
36
+ anyPruned = true;
37
+ }
38
+ }
39
+ if (anyPruned) {
40
+ try { pruneJobCheckpoints(db, ttlMs); } catch { /* best effort */ }
41
+ }
42
+ }
43
+
44
+ function toPublicJob(job, includeResult = true) {
45
+ return {
46
+ job_id: job.id,
47
+ kind: job.kind,
48
+ status: job.status,
49
+ created_at: job.createdAt,
50
+ started_at: job.startedAt,
51
+ finished_at: job.finishedAt,
52
+ pid: job.pid,
53
+ error: job.error,
54
+ ...(job.progress ? { progress: job.progress } : {}),
55
+ ...(includeResult ? { result: job.result } : {}),
56
+ };
57
+ }
58
+
59
+ function startAsyncJob({ kind, requestPayload, onComplete }) {
60
+ pruneAsyncJobs();
61
+
62
+ const id = randomUUID();
63
+ const tmpPrefix = path.join(os.tmpdir(), "mcp-writing-job-");
64
+ const tmpDir = fs.mkdtempSync(tmpPrefix);
65
+ const requestPath = path.join(tmpDir, `${id}.request.json`);
66
+ const resultPath = path.join(tmpDir, `${id}.result.json`);
67
+
68
+ fs.writeFileSync(requestPath, JSON.stringify(requestPayload, null, 2), "utf8");
69
+
70
+ const runnerPath = path.join(runnerDir, "scripts", "async-job-runner.mjs");
71
+ const child = spawn(
72
+ process.execPath,
73
+ ["--experimental-sqlite", runnerPath, requestPath, resultPath],
74
+ {
75
+ env: process.env,
76
+ stdio: ["ignore", "pipe", "pipe"],
77
+ }
78
+ );
79
+
80
+ const job = {
81
+ id,
82
+ kind,
83
+ status: "running",
84
+ createdAt: new Date().toISOString(),
85
+ startedAt: new Date().toISOString(),
86
+ finishedAt: null,
87
+ pid: child.pid,
88
+ tmpDir,
89
+ requestPath,
90
+ resultPath,
91
+ result: null,
92
+ progress: null,
93
+ error: null,
94
+ onComplete,
95
+ child,
96
+ };
97
+ asyncJobs.set(id, job);
98
+ try {
99
+ checkpointJobCreate(db, job);
100
+ } catch (err) {
101
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
102
+ }
103
+
104
+ let stdoutBuffer = "";
105
+ child.stdout.on("data", (chunk) => {
106
+ stdoutBuffer += chunk.toString("utf8");
107
+ const lines = stdoutBuffer.split("\n");
108
+ stdoutBuffer = lines.pop() ?? "";
109
+
110
+ for (const line of lines) {
111
+ const trimmed = line.trim();
112
+ if (!trimmed.startsWith(ASYNC_PROGRESS_PREFIX)) continue;
113
+ const payload = trimmed.slice(ASYNC_PROGRESS_PREFIX.length);
114
+ try {
115
+ const progress = JSON.parse(payload);
116
+ if (progress && typeof progress === "object") {
117
+ const nextProgress = {
118
+ total_scenes: Number(progress.total_scenes ?? 0),
119
+ processed_scenes: Number(progress.processed_scenes ?? 0),
120
+ scenes_changed: Number(progress.scenes_changed ?? 0),
121
+ failed_scenes: Number(progress.failed_scenes ?? 0),
122
+ };
123
+ job.progress = nextProgress;
124
+ }
125
+ } catch {
126
+ // Ignore malformed progress lines; they are best-effort telemetry.
127
+ }
128
+ }
129
+ });
130
+ child.stderr.on("data", () => {
131
+ // avoid crashing on stderr backpressure for noisy runs
132
+ });
133
+
134
+ child.on("error", (error) => {
135
+ if (job.status === "cancelling") {
136
+ job.status = "cancelled";
137
+ job.error = error.message;
138
+ job.finishedAt = new Date().toISOString();
139
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
140
+ pruneAsyncJobs();
141
+ return;
142
+ }
143
+ job.status = "failed";
144
+ job.error = error.message;
145
+ job.finishedAt = new Date().toISOString();
146
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
147
+ pruneAsyncJobs();
148
+ });
149
+
150
+ child.on("exit", (code, signal) => {
151
+ const payload = readJsonIfExists(resultPath);
152
+ const successful = payload?.ok === true;
153
+ const cancelledBySignal = signal === "SIGTERM" || signal === "SIGKILL";
154
+ const cancelledByPayload = payload?.cancelled === true;
155
+
156
+ job.finishedAt = new Date().toISOString();
157
+ job.result = payload;
158
+
159
+ const hasProgressFields = payload && (
160
+ payload.total_scenes !== undefined
161
+ || payload.processed_scenes !== undefined
162
+ || payload.scenes_changed !== undefined
163
+ || payload.failed_scenes !== undefined
164
+ );
165
+
166
+ if (payload && payload.ok === true && hasProgressFields) {
167
+ job.progress = {
168
+ total_scenes: Number(payload.total_scenes ?? job.progress?.total_scenes ?? 0),
169
+ processed_scenes: Number(payload.processed_scenes ?? job.progress?.processed_scenes ?? 0),
170
+ scenes_changed: Number(payload.scenes_changed ?? job.progress?.scenes_changed ?? 0),
171
+ failed_scenes: Number(payload.failed_scenes ?? job.progress?.failed_scenes ?? 0),
172
+ };
173
+ }
174
+
175
+ if (job.status === "cancelling") {
176
+ if (cancelledByPayload) {
177
+ job.status = "cancelled";
178
+ job.error = "Async job cancelled after returning partial results.";
179
+ } else if (successful && !cancelledBySignal) {
180
+ // Race: cancellation was requested as work completed successfully.
181
+ job.status = "completed";
182
+ } else {
183
+ job.status = "cancelled";
184
+ job.error = cancelledBySignal
185
+ ? `Async job cancelled by signal ${signal}.`
186
+ : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
187
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
188
+ pruneAsyncJobs();
189
+ return;
190
+ }
191
+ } else {
192
+ job.status = successful ? "completed" : "failed";
193
+ if (!successful) {
194
+ job.error = payload?.error?.message
195
+ ?? payload?.error
196
+ ?? (signal
197
+ ? `Async job exited due to signal ${signal}.`
198
+ : `Async job exited with code ${code}.`);
199
+ }
200
+ }
201
+
202
+ if (job.status === "completed" && typeof job.onComplete === "function") {
203
+ try {
204
+ job.onComplete(job);
205
+ } catch (error) {
206
+ job.status = "failed";
207
+ job.error = error instanceof Error ? error.message : String(error);
208
+ }
209
+ }
210
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
211
+ pruneAsyncJobs();
212
+ });
213
+
214
+ return job;
215
+ }
216
+
217
+ return { pruneAsyncJobs, toPublicJob, startAsyncJob };
218
+ }
package/index.js CHANGED
@@ -4,17 +4,15 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
4
4
  import http from "node:http";
5
5
  import fs from "node:fs";
6
6
  import path from "node:path";
7
- import os from "node:os";
8
7
  import { randomUUID } from "node:crypto";
9
- import { spawn } from "node:child_process";
10
8
  import { fileURLToPath } from "node:url";
11
9
  import matter from "gray-matter";
12
10
  import yaml from "js-yaml";
13
- import { openDb, checkpointJobCreate, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
11
+ import { openDb, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
14
12
  import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, sidecarPath, isStructuralProjectId } from "./sync.js";
15
13
  import { isGitAvailable, isGitRepository, initGitRepository, getSceneProseAtCommit } from "./git.js";
16
14
  import { renderCharacterArcTemplate, renderCharacterSheetTemplate, renderPlaceSheetTemplate, slugifyEntityName } from "./world-entity-templates.js";
17
- import { ASYNC_PROGRESS_PREFIX } from "./async-progress.js";
15
+ import { createAsyncJobManager, readJsonIfExists } from "./async-jobs.js";
18
16
  import { STYLEGUIDE_CONFIG_BASENAME } from "./prose-styleguide.js";
19
17
  import { ReviewBundlePlanError } from "./review-bundles.js";
20
18
  import { registerSyncTools } from "./tools/sync.js";
@@ -156,214 +154,6 @@ const MCP_SERVER_VERSION = typeof pkg.version === "string" && pkg.version.trim()
156
154
  : "0.0.0";
157
155
  const asyncJobs = new Map();
158
156
 
159
- function pruneAsyncJobs() {
160
- const now = Date.now();
161
- let anyPruned = false;
162
- for (const [id, job] of asyncJobs.entries()) {
163
- if (!job.finishedAt) continue;
164
- if (now - Date.parse(job.finishedAt) > ASYNC_JOB_TTL_MS) {
165
- try {
166
- if (job.tmpDir && fs.existsSync(job.tmpDir)) {
167
- fs.rmSync(job.tmpDir, { recursive: true, force: true });
168
- } else {
169
- if (job.requestPath && fs.existsSync(job.requestPath)) fs.unlinkSync(job.requestPath);
170
- if (job.resultPath && fs.existsSync(job.resultPath)) fs.unlinkSync(job.resultPath);
171
- }
172
- } catch {
173
- // best effort cleanup
174
- }
175
- asyncJobs.delete(id);
176
- anyPruned = true;
177
- }
178
- }
179
- if (anyPruned) {
180
- try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
181
- }
182
- }
183
-
184
- function readJsonIfExists(filePath) {
185
- if (!filePath || !fs.existsSync(filePath)) return null;
186
- try {
187
- return JSON.parse(fs.readFileSync(filePath, "utf8"));
188
- } catch {
189
- return null;
190
- }
191
- }
192
-
193
- function toPublicJob(job, includeResult = true) {
194
- return {
195
- job_id: job.id,
196
- kind: job.kind,
197
- status: job.status,
198
- created_at: job.createdAt,
199
- started_at: job.startedAt,
200
- finished_at: job.finishedAt,
201
- pid: job.pid,
202
- error: job.error,
203
- ...(job.progress ? { progress: job.progress } : {}),
204
- ...(includeResult ? { result: job.result } : {}),
205
- };
206
- }
207
-
208
- function startAsyncJob({ kind, requestPayload, onComplete }) {
209
- pruneAsyncJobs();
210
- const progressPrefix = ASYNC_PROGRESS_PREFIX;
211
-
212
- const id = randomUUID();
213
- const tmpPrefix = path.join(os.tmpdir(), "mcp-writing-job-");
214
- const tmpDir = fs.mkdtempSync(tmpPrefix);
215
- const requestPath = path.join(tmpDir, `${id}.request.json`);
216
- const resultPath = path.join(tmpDir, `${id}.result.json`);
217
-
218
- fs.writeFileSync(requestPath, JSON.stringify(requestPayload, null, 2), "utf8");
219
-
220
- const runnerPath = path.join(__dirname, "scripts", "async-job-runner.mjs");
221
- const child = spawn(
222
- process.execPath,
223
- ["--experimental-sqlite", runnerPath, requestPath, resultPath],
224
- {
225
- env: process.env,
226
- stdio: ["ignore", "pipe", "pipe"],
227
- }
228
- );
229
-
230
- const job = {
231
- id,
232
- kind,
233
- status: "running",
234
- createdAt: new Date().toISOString(),
235
- startedAt: new Date().toISOString(),
236
- finishedAt: null,
237
- pid: child.pid,
238
- tmpDir,
239
- requestPath,
240
- resultPath,
241
- result: null,
242
- progress: null,
243
- error: null,
244
- onComplete,
245
- child,
246
- };
247
- asyncJobs.set(id, job);
248
- try {
249
- checkpointJobCreate(db, job);
250
- } catch (err) {
251
- process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
252
- }
253
-
254
- let stdoutBuffer = "";
255
- child.stdout.on("data", (chunk) => {
256
- stdoutBuffer += chunk.toString("utf8");
257
- const lines = stdoutBuffer.split("\n");
258
- stdoutBuffer = lines.pop() ?? "";
259
-
260
- for (const line of lines) {
261
- const trimmed = line.trim();
262
- if (!trimmed.startsWith(progressPrefix)) continue;
263
- const payload = trimmed.slice(progressPrefix.length);
264
- try {
265
- const progress = JSON.parse(payload);
266
- if (progress && typeof progress === "object") {
267
- const nextProgress = {
268
- total_scenes: Number(progress.total_scenes ?? 0),
269
- processed_scenes: Number(progress.processed_scenes ?? 0),
270
- scenes_changed: Number(progress.scenes_changed ?? 0),
271
- failed_scenes: Number(progress.failed_scenes ?? 0),
272
- };
273
- job.progress = nextProgress;
274
- }
275
- } catch {
276
- // Ignore malformed progress lines; they are best-effort telemetry.
277
- }
278
- }
279
- });
280
- child.stderr.on("data", () => {
281
- // avoid crashing on stderr backpressure for noisy runs
282
- });
283
-
284
- child.on("error", (error) => {
285
- if (job.status === "cancelling") {
286
- job.status = "cancelled";
287
- job.error = error.message;
288
- job.finishedAt = new Date().toISOString();
289
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
290
- pruneAsyncJobs();
291
- return;
292
- }
293
- job.status = "failed";
294
- job.error = error.message;
295
- job.finishedAt = new Date().toISOString();
296
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
297
- pruneAsyncJobs();
298
- });
299
-
300
- child.on("exit", (code, signal) => {
301
- const payload = readJsonIfExists(resultPath);
302
- const successful = payload?.ok === true;
303
- const cancelledBySignal = signal === "SIGTERM" || signal === "SIGKILL";
304
- const cancelledByPayload = payload?.cancelled === true;
305
-
306
- job.finishedAt = new Date().toISOString();
307
- job.result = payload;
308
-
309
- const hasProgressFields = payload && (
310
- payload.total_scenes !== undefined
311
- || payload.processed_scenes !== undefined
312
- || payload.scenes_changed !== undefined
313
- || payload.failed_scenes !== undefined
314
- );
315
-
316
- if (payload && payload.ok === true && hasProgressFields) {
317
- job.progress = {
318
- total_scenes: Number(payload.total_scenes ?? job.progress?.total_scenes ?? 0),
319
- processed_scenes: Number(payload.processed_scenes ?? job.progress?.processed_scenes ?? 0),
320
- scenes_changed: Number(payload.scenes_changed ?? job.progress?.scenes_changed ?? 0),
321
- failed_scenes: Number(payload.failed_scenes ?? job.progress?.failed_scenes ?? 0),
322
- };
323
- }
324
-
325
- if (job.status === "cancelling") {
326
- if (cancelledByPayload) {
327
- job.status = "cancelled";
328
- job.error = "Async job cancelled after returning partial results.";
329
- } else if (successful && !cancelledBySignal) {
330
- // Race: cancellation was requested as work completed successfully.
331
- job.status = "completed";
332
- } else {
333
- job.status = "cancelled";
334
- job.error = cancelledBySignal
335
- ? `Async job cancelled by signal ${signal}.`
336
- : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
337
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
338
- pruneAsyncJobs();
339
- return;
340
- }
341
- } else {
342
- job.status = successful ? "completed" : "failed";
343
- if (!successful) {
344
- job.error = payload?.error?.message
345
- ?? payload?.error
346
- ?? (signal
347
- ? `Async job exited due to signal ${signal}.`
348
- : `Async job exited with code ${code}.`);
349
- }
350
- }
351
-
352
- if (job.status === "completed" && typeof job.onComplete === "function") {
353
- try {
354
- job.onComplete(job);
355
- } catch (error) {
356
- job.status = "failed";
357
- job.error = error instanceof Error ? error.message : String(error);
358
- }
359
- }
360
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
361
- pruneAsyncJobs();
362
- });
363
-
364
- return job;
365
- }
366
-
367
157
  function paginateRows(rows, { page, pageSize, forcePagination = false }) {
368
158
  const totalCount = rows.length;
369
159
  const shouldPaginate = forcePagination || page !== undefined || pageSize !== undefined;
@@ -682,6 +472,13 @@ if (stalledJobs.length > 0) {
682
472
  process.stderr.write(`[mcp-writing] Marked ${stalledJobs.length} stalled job(s) as failed after restart.\n`);
683
473
  }
684
474
 
475
+ const { pruneAsyncJobs, startAsyncJob, toPublicJob } = createAsyncJobManager({
476
+ db,
477
+ asyncJobs,
478
+ ttlMs: ASYNC_JOB_TTL_MS,
479
+ runnerDir: __dirname,
480
+ });
481
+
685
482
  process.stderr.write(`[mcp-writing] Sync dir: ${SYNC_DIR_ABS}\n`);
686
483
  process.stderr.write(`[mcp-writing] DB path: ${DB_PATH_DISPLAY}\n`);
687
484
 
package/package.json CHANGED
@@ -1,11 +1,12 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.10.1",
3
+ "version": "2.10.2",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "type": "module",
6
6
  "main": "index.js",
7
7
  "files": [
8
8
  "index.js",
9
+ "async-jobs.js",
9
10
  "async-progress.js",
10
11
  "scene-character-batch.js",
11
12
  "scrivener-direct.js",