@hanna84/mcp-writing 2.9.9 → 2.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/db.js +73 -0
  3. package/index.js +37 -1
  4. package/package.json +1 -1
package/CHANGELOG.md CHANGED
@@ -4,11 +4,21 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.10.0](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.9.9...v2.10.0)
9
+
10
+ - feat(db): persist async job state to SQLite for restart recovery [`#104`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/104)
12
+
7
13
  #### [v2.9.9](https://github.com/hannasdev/mcp-writing.git
8
14
  /compare/v2.9.8...v2.9.9)
9
15
 
16
+ > 26 April 2026
17
+
10
18
  - refactor(db): replace ad-hoc migration checks with numbered migration… [`#102`](https://github.com/hannasdev/mcp-writing.git
11
19
  /pull/102)
20
+ - Release 2.9.9 [`22babc1`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/22babc1e857a3401240c28b5ac59164e01fd3784)
12
22
 
13
23
  #### [v2.9.8](https://github.com/hannasdev/mcp-writing.git
14
24
  /compare/v2.9.7...v2.9.8)
package/db.js CHANGED
@@ -124,6 +124,17 @@ export const SCHEMA = `
124
124
  id INTEGER PRIMARY KEY CHECK (id = 1),
125
125
  version INTEGER NOT NULL
126
126
  );
127
+
128
+ CREATE TABLE IF NOT EXISTS async_jobs (
129
+ job_id TEXT NOT NULL PRIMARY KEY,
130
+ kind TEXT NOT NULL,
131
+ status TEXT NOT NULL,
132
+ created_at TEXT NOT NULL,
133
+ started_at TEXT,
134
+ finished_at TEXT,
135
+ error TEXT,
136
+ result_json TEXT
137
+ );
127
138
  `;
128
139
 
129
140
  // Each function is applied exactly once, in order, when version < its index+1.
@@ -193,3 +204,65 @@ export function openDb(dbPath) {
193
204
  applyMigrations(db);
194
205
  return db;
195
206
  }
207
+
208
+ export function checkpointJobCreate(db, job) {
209
+ db.prepare(`
210
+ INSERT OR IGNORE INTO async_jobs (job_id, kind, status, created_at, started_at)
211
+ VALUES (?, ?, ?, ?, ?)
212
+ `).run(job.id, job.kind, job.status, job.createdAt, job.startedAt ?? null);
213
+ }
214
+
215
+ export function checkpointJobFinish(db, job) {
216
+ // UPSERT so a terminal state is always recorded even if checkpointJobCreate
217
+ // was skipped due to a best-effort failure.
218
+ db.prepare(`
219
+ INSERT INTO async_jobs
220
+ (job_id, kind, status, created_at, started_at, finished_at, error, result_json)
221
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
222
+ ON CONFLICT(job_id) DO UPDATE SET
223
+ status = excluded.status,
224
+ finished_at = excluded.finished_at,
225
+ error = excluded.error,
226
+ result_json = excluded.result_json
227
+ `).run(
228
+ job.id,
229
+ job.kind,
230
+ job.status,
231
+ job.createdAt,
232
+ job.startedAt ?? null,
233
+ job.finishedAt ?? null,
234
+ job.error ?? null,
235
+ job.result != null ? JSON.stringify(job.result) : null
236
+ );
237
+ }
238
+
239
+ export function pruneJobCheckpoints(db, ttlMs) {
240
+ const cutoff = new Date(Date.now() - ttlMs).toISOString();
241
+ db.prepare(`
242
+ DELETE FROM async_jobs WHERE finished_at IS NOT NULL AND finished_at < ?
243
+ `).run(cutoff);
244
+ }
245
+
246
+ export function loadStalledJobs(db) {
247
+ // 'cancelling' included defensively; in practice only 'running' rows exist
248
+ // since we never write a 'cancelling' checkpoint between create and finish.
249
+ return db.prepare(`
250
+ SELECT job_id, kind, status, created_at, started_at
251
+ FROM async_jobs WHERE status IN ('running', 'cancelling')
252
+ `).all().map(row => ({
253
+ id: row.job_id,
254
+ kind: row.kind,
255
+ status: row.status,
256
+ createdAt: row.created_at,
257
+ startedAt: row.started_at ?? null,
258
+ finishedAt: null,
259
+ error: null,
260
+ result: null,
261
+ progress: null,
262
+ child: null,
263
+ onComplete: null,
264
+ tmpDir: null,
265
+ requestPath: null,
266
+ resultPath: null,
267
+ }));
268
+ }
package/index.js CHANGED
@@ -10,7 +10,7 @@ import { spawn } from "node:child_process";
10
10
  import { fileURLToPath } from "node:url";
11
11
  import matter from "gray-matter";
12
12
  import yaml from "js-yaml";
13
- import { openDb } from "./db.js";
13
+ import { openDb, checkpointJobCreate, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
14
14
  import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, sidecarPath, isStructuralProjectId } from "./sync.js";
15
15
  import { isGitAvailable, isGitRepository, initGitRepository, getSceneProseAtCommit } from "./git.js";
16
16
  import { renderCharacterArcTemplate, renderCharacterSheetTemplate, renderPlaceSheetTemplate, slugifyEntityName } from "./world-entity-templates.js";
@@ -158,6 +158,7 @@ const asyncJobs = new Map();
158
158
 
159
159
  function pruneAsyncJobs() {
160
160
  const now = Date.now();
161
+ let anyPruned = false;
161
162
  for (const [id, job] of asyncJobs.entries()) {
162
163
  if (!job.finishedAt) continue;
163
164
  if (now - Date.parse(job.finishedAt) > ASYNC_JOB_TTL_MS) {
@@ -172,8 +173,12 @@ function pruneAsyncJobs() {
172
173
  // best effort cleanup
173
174
  }
174
175
  asyncJobs.delete(id);
176
+ anyPruned = true;
175
177
  }
176
178
  }
179
+ if (anyPruned) {
180
+ try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
181
+ }
177
182
  }
178
183
 
179
184
  function readJsonIfExists(filePath) {
@@ -240,6 +245,11 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
240
245
  child,
241
246
  };
242
247
  asyncJobs.set(id, job);
248
+ try {
249
+ checkpointJobCreate(db, job);
250
+ } catch (err) {
251
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
252
+ }
243
253
 
244
254
  let stdoutBuffer = "";
245
255
  child.stdout.on("data", (chunk) => {
@@ -276,12 +286,14 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
276
286
  job.status = "cancelled";
277
287
  job.error = error.message;
278
288
  job.finishedAt = new Date().toISOString();
289
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
279
290
  pruneAsyncJobs();
280
291
  return;
281
292
  }
282
293
  job.status = "failed";
283
294
  job.error = error.message;
284
295
  job.finishedAt = new Date().toISOString();
296
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
285
297
  pruneAsyncJobs();
286
298
  });
287
299
 
@@ -322,6 +334,7 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
322
334
  job.error = cancelledBySignal
323
335
  ? `Async job cancelled by signal ${signal}.`
324
336
  : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
337
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
325
338
  pruneAsyncJobs();
326
339
  return;
327
340
  }
@@ -344,6 +357,7 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
344
357
  job.error = error instanceof Error ? error.message : String(error);
345
358
  }
346
359
  }
360
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
347
361
  pruneAsyncJobs();
348
362
  });
349
363
 
@@ -646,6 +660,28 @@ function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId,
646
660
  // ---------------------------------------------------------------------------
647
661
  const db = openDb(DB_PATH);
648
662
 
663
+ // Recover jobs that were in-flight when the server last exited.
664
+ const stalledJobs = loadStalledJobs(db);
665
+ for (const job of stalledJobs) {
666
+ job.status = "failed";
667
+ job.error = "server restarted while job was running";
668
+ job.finishedAt = new Date().toISOString();
669
+ try {
670
+ checkpointJobFinish(db, job);
671
+ } catch (err) {
672
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint recovered stalled job ${job.id}: ${err.message}\n`);
673
+ }
674
+ asyncJobs.set(job.id, job);
675
+ }
676
+ // Prune expired rows from previous sessions unconditionally — completed/failed
677
+ // jobs from prior runs are never loaded into asyncJobs, so anyPruned in
678
+ // pruneAsyncJobs() would never be true for them.
679
+ try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
680
+
681
+ if (stalledJobs.length > 0) {
682
+ process.stderr.write(`[mcp-writing] Marked ${stalledJobs.length} stalled job(s) as failed after restart.\n`);
683
+ }
684
+
649
685
  process.stderr.write(`[mcp-writing] Sync dir: ${SYNC_DIR_ABS}\n`);
650
686
  process.stderr.write(`[mcp-writing] DB path: ${DB_PATH_DISPLAY}\n`);
651
687
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.9.9",
3
+ "version": "2.10.0",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "type": "module",
6
6
  "main": "index.js",