@hanna84/mcp-writing 2.9.8 → 2.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/CHANGELOG.md +20 -0
  2. package/db.js +123 -19
  3. package/index.js +37 -1
  4. package/package.json +1 -1
package/CHANGELOG.md CHANGED
@@ -4,11 +4,31 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.10.0](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.9.9...v2.10.0)
9
+
10
+ - feat(db): persist async job state to SQLite for restart recovery [`#104`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/104)
12
+
13
+ #### [v2.9.9](https://github.com/hannasdev/mcp-writing.git
14
+ /compare/v2.9.8...v2.9.9)
15
+
16
+ > 26 April 2026
17
+
18
+ - refactor(db): replace ad-hoc migration checks with numbered migration… [`#102`](https://github.com/hannasdev/mcp-writing.git
19
+ /pull/102)
20
+ - Release 2.9.9 [`22babc1`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/22babc1e857a3401240c28b5ac59164e01fd3784)
22
+
7
23
  #### [v2.9.8](https://github.com/hannasdev/mcp-writing.git
8
24
  /compare/v2.9.7...v2.9.8)
9
25
 
26
+ > 26 April 2026
27
+
10
28
  - refactor(tools): extract registerEditingTools into tools/editing.js [`#101`](https://github.com/hannasdev/mcp-writing.git
11
29
  /pull/101)
30
+ - Release 2.9.8 [`6e7d649`](https://github.com/hannasdev/mcp-writing.git
31
+ /commit/6e7d64997cd22b6111a7053ef6050b568443c6b5)
12
32
 
13
33
  #### [v2.9.7](https://github.com/hannasdev/mcp-writing.git
14
34
  /compare/v2.9.6...v2.9.7)
package/db.js CHANGED
@@ -119,28 +119,42 @@ export const SCHEMA = `
119
119
  CREATE VIRTUAL TABLE IF NOT EXISTS scenes_fts USING fts5(
120
120
  scene_id, project_id, logline, title, keywords
121
121
  );
122
- `;
123
122
 
124
- export function openDb(dbPath) {
125
- const db = new DatabaseSync(dbPath);
126
- db.exec(SCHEMA);
123
+ CREATE TABLE IF NOT EXISTS schema_version (
124
+ id INTEGER PRIMARY KEY CHECK (id = 1),
125
+ version INTEGER NOT NULL
126
+ );
127
127
 
128
- const sceneColumns = db.prepare(`PRAGMA table_info(scenes)`).all();
129
- if (!sceneColumns.some(column => column.name === "chapter_title")) {
130
- db.exec(`ALTER TABLE scenes ADD COLUMN chapter_title TEXT;`);
131
- }
128
+ CREATE TABLE IF NOT EXISTS async_jobs (
129
+ job_id TEXT NOT NULL PRIMARY KEY,
130
+ kind TEXT NOT NULL,
131
+ status TEXT NOT NULL,
132
+ created_at TEXT NOT NULL,
133
+ started_at TEXT,
134
+ finished_at TEXT,
135
+ error TEXT,
136
+ result_json TEXT
137
+ );
138
+ `;
132
139
 
133
- // Rebuild legacy FTS table if it predates keyword indexing.
134
- // Preserve existing indexed rows so metadata search remains available
135
- // even before the next sync pass repopulates from source files.
136
- const ftsSql = db.prepare(`
137
- SELECT sql
138
- FROM sqlite_master
139
- WHERE type = 'table' AND name = 'scenes_fts'
140
- `).get()?.sql;
141
- if (typeof ftsSql === "string" && !ftsSql.toLowerCase().includes("keywords")) {
142
- db.exec(`BEGIN IMMEDIATE;`);
143
- try {
140
+ // Each function is applied exactly once, in order, when version < its index+1.
141
+ // Each migration runs inside a transaction with the version bump — crash-safe.
142
+ // Migrations must be idempotent (guard against already-applied state).
143
+ // Never edit existing entries — add new ones at the end.
144
+ const MIGRATIONS = [
145
+ // 1: add chapter_title column to scenes
146
+ (db) => {
147
+ const sceneColumns = db.prepare(`PRAGMA table_info(scenes)`).all();
148
+ if (!sceneColumns.some(c => c.name === "chapter_title")) {
149
+ db.exec(`ALTER TABLE scenes ADD COLUMN chapter_title TEXT;`);
150
+ }
151
+ },
152
+ // 2: rebuild FTS table to include keywords column (preserve existing rows)
153
+ (db) => {
154
+ const ftsSql = db.prepare(`
155
+ SELECT sql FROM sqlite_master WHERE type = 'table' AND name = 'scenes_fts'
156
+ `).get()?.sql;
157
+ if (typeof ftsSql === "string" && !ftsSql.toLowerCase().includes("keywords")) {
144
158
  db.exec(`
145
159
  CREATE VIRTUAL TABLE scenes_fts_migrating USING fts5(
146
160
  scene_id, project_id, logline, title, keywords
@@ -153,12 +167,102 @@ export function openDb(dbPath) {
153
167
  `);
154
168
  db.exec(`DROP TABLE scenes_fts;`);
155
169
  db.exec(`ALTER TABLE scenes_fts_migrating RENAME TO scenes_fts;`);
170
+ }
171
+ },
172
+ ];
173
+
174
+ // The version every database should reach after openDb. Not the current DB value —
175
+ // query schema_version directly if you need the live version of a specific database.
176
+ export const CURRENT_SCHEMA_VERSION = MIGRATIONS.length;
177
+
178
+ function applyMigrations(db) {
179
+ db.prepare(`INSERT OR IGNORE INTO schema_version (id, version) VALUES (1, 0)`).run();
180
+ for (;;) {
181
+ db.exec(`BEGIN IMMEDIATE;`);
182
+ try {
183
+ const { version } = db.prepare(`SELECT version FROM schema_version WHERE id = 1`).get();
184
+ if (version >= MIGRATIONS.length) {
185
+ db.exec(`COMMIT;`);
186
+ break;
187
+ }
188
+ MIGRATIONS[version](db);
189
+ // WHERE version = ? ensures the bump is monotonic: a concurrent opener
190
+ // that advanced the version first will cause this UPDATE to match 0 rows,
191
+ // which is safe — the migration is already applied.
192
+ db.prepare(`UPDATE schema_version SET version = ? WHERE id = 1 AND version = ?`).run(version + 1, version);
156
193
  db.exec(`COMMIT;`);
157
194
  } catch (err) {
158
195
  db.exec(`ROLLBACK;`);
159
196
  throw err;
160
197
  }
161
198
  }
199
+ }
162
200
 
201
+ export function openDb(dbPath) {
202
+ const db = new DatabaseSync(dbPath);
203
+ db.exec(SCHEMA);
204
+ applyMigrations(db);
163
205
  return db;
164
206
  }
207
+
208
+ export function checkpointJobCreate(db, job) {
209
+ db.prepare(`
210
+ INSERT OR IGNORE INTO async_jobs (job_id, kind, status, created_at, started_at)
211
+ VALUES (?, ?, ?, ?, ?)
212
+ `).run(job.id, job.kind, job.status, job.createdAt, job.startedAt ?? null);
213
+ }
214
+
215
+ export function checkpointJobFinish(db, job) {
216
+ // UPSERT so a terminal state is always recorded even if checkpointJobCreate
217
+ // was skipped due to a best-effort failure.
218
+ db.prepare(`
219
+ INSERT INTO async_jobs
220
+ (job_id, kind, status, created_at, started_at, finished_at, error, result_json)
221
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
222
+ ON CONFLICT(job_id) DO UPDATE SET
223
+ status = excluded.status,
224
+ finished_at = excluded.finished_at,
225
+ error = excluded.error,
226
+ result_json = excluded.result_json
227
+ `).run(
228
+ job.id,
229
+ job.kind,
230
+ job.status,
231
+ job.createdAt,
232
+ job.startedAt ?? null,
233
+ job.finishedAt ?? null,
234
+ job.error ?? null,
235
+ job.result != null ? JSON.stringify(job.result) : null
236
+ );
237
+ }
238
+
239
+ export function pruneJobCheckpoints(db, ttlMs) {
240
+ const cutoff = new Date(Date.now() - ttlMs).toISOString();
241
+ db.prepare(`
242
+ DELETE FROM async_jobs WHERE finished_at IS NOT NULL AND finished_at < ?
243
+ `).run(cutoff);
244
+ }
245
+
246
+ export function loadStalledJobs(db) {
247
+ // 'cancelling' included defensively; in practice only 'running' rows exist
248
+ // since we never write a 'cancelling' checkpoint between create and finish.
249
+ return db.prepare(`
250
+ SELECT job_id, kind, status, created_at, started_at
251
+ FROM async_jobs WHERE status IN ('running', 'cancelling')
252
+ `).all().map(row => ({
253
+ id: row.job_id,
254
+ kind: row.kind,
255
+ status: row.status,
256
+ createdAt: row.created_at,
257
+ startedAt: row.started_at ?? null,
258
+ finishedAt: null,
259
+ error: null,
260
+ result: null,
261
+ progress: null,
262
+ child: null,
263
+ onComplete: null,
264
+ tmpDir: null,
265
+ requestPath: null,
266
+ resultPath: null,
267
+ }));
268
+ }
package/index.js CHANGED
@@ -10,7 +10,7 @@ import { spawn } from "node:child_process";
10
10
  import { fileURLToPath } from "node:url";
11
11
  import matter from "gray-matter";
12
12
  import yaml from "js-yaml";
13
- import { openDb } from "./db.js";
13
+ import { openDb, checkpointJobCreate, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
14
14
  import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, sidecarPath, isStructuralProjectId } from "./sync.js";
15
15
  import { isGitAvailable, isGitRepository, initGitRepository, getSceneProseAtCommit } from "./git.js";
16
16
  import { renderCharacterArcTemplate, renderCharacterSheetTemplate, renderPlaceSheetTemplate, slugifyEntityName } from "./world-entity-templates.js";
@@ -158,6 +158,7 @@ const asyncJobs = new Map();
158
158
 
159
159
  function pruneAsyncJobs() {
160
160
  const now = Date.now();
161
+ let anyPruned = false;
161
162
  for (const [id, job] of asyncJobs.entries()) {
162
163
  if (!job.finishedAt) continue;
163
164
  if (now - Date.parse(job.finishedAt) > ASYNC_JOB_TTL_MS) {
@@ -172,8 +173,12 @@ function pruneAsyncJobs() {
172
173
  // best effort cleanup
173
174
  }
174
175
  asyncJobs.delete(id);
176
+ anyPruned = true;
175
177
  }
176
178
  }
179
+ if (anyPruned) {
180
+ try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
181
+ }
177
182
  }
178
183
 
179
184
  function readJsonIfExists(filePath) {
@@ -240,6 +245,11 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
240
245
  child,
241
246
  };
242
247
  asyncJobs.set(id, job);
248
+ try {
249
+ checkpointJobCreate(db, job);
250
+ } catch (err) {
251
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
252
+ }
243
253
 
244
254
  let stdoutBuffer = "";
245
255
  child.stdout.on("data", (chunk) => {
@@ -276,12 +286,14 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
276
286
  job.status = "cancelled";
277
287
  job.error = error.message;
278
288
  job.finishedAt = new Date().toISOString();
289
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
279
290
  pruneAsyncJobs();
280
291
  return;
281
292
  }
282
293
  job.status = "failed";
283
294
  job.error = error.message;
284
295
  job.finishedAt = new Date().toISOString();
296
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
285
297
  pruneAsyncJobs();
286
298
  });
287
299
 
@@ -322,6 +334,7 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
322
334
  job.error = cancelledBySignal
323
335
  ? `Async job cancelled by signal ${signal}.`
324
336
  : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
337
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
325
338
  pruneAsyncJobs();
326
339
  return;
327
340
  }
@@ -344,6 +357,7 @@ function startAsyncJob({ kind, requestPayload, onComplete }) {
344
357
  job.error = error instanceof Error ? error.message : String(error);
345
358
  }
346
359
  }
360
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
347
361
  pruneAsyncJobs();
348
362
  });
349
363
 
@@ -646,6 +660,28 @@ function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId,
646
660
  // ---------------------------------------------------------------------------
647
661
  const db = openDb(DB_PATH);
648
662
 
663
+ // Recover jobs that were in-flight when the server last exited.
664
+ const stalledJobs = loadStalledJobs(db);
665
+ for (const job of stalledJobs) {
666
+ job.status = "failed";
667
+ job.error = "server restarted while job was running";
668
+ job.finishedAt = new Date().toISOString();
669
+ try {
670
+ checkpointJobFinish(db, job);
671
+ } catch (err) {
672
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint recovered stalled job ${job.id}: ${err.message}\n`);
673
+ }
674
+ asyncJobs.set(job.id, job);
675
+ }
676
+ // Prune expired rows from previous sessions unconditionally — completed/failed
677
+ // jobs from prior runs are never loaded into asyncJobs, so anyPruned in
678
+ // pruneAsyncJobs() would never be true for them.
679
+ try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
680
+
681
+ if (stalledJobs.length > 0) {
682
+ process.stderr.write(`[mcp-writing] Marked ${stalledJobs.length} stalled job(s) as failed after restart.\n`);
683
+ }
684
+
649
685
  process.stderr.write(`[mcp-writing] Sync dir: ${SYNC_DIR_ABS}\n`);
650
686
  process.stderr.write(`[mcp-writing] DB path: ${DB_PATH_DISPLAY}\n`);
651
687
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.9.8",
3
+ "version": "2.10.0",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "type": "module",
6
6
  "main": "index.js",