@hanna84/mcp-writing 2.10.1 → 2.10.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -4,11 +4,31 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.10.3](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.10.2...v2.10.3)
9
+
10
+ - refactor(helpers): extract domain helpers and path safety utilities from index.js [`#107`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/107)
12
+
13
+ #### [v2.10.2](https://github.com/hannasdev/mcp-writing.git
14
+ /compare/v2.10.1...v2.10.2)
15
+
16
+ > 26 April 2026
17
+
18
+ - refactor(async-jobs): extract startAsyncJob/pruneAsyncJobs/toPublicJob into async-jobs.js [`#106`](https://github.com/hannasdev/mcp-writing.git
19
+ /pull/106)
20
+ - Release 2.10.2 [`7478041`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/7478041fb85b7c7ccb35b9cd54e8a75171a8270c)
22
+
7
23
  #### [v2.10.1](https://github.com/hannasdev/mcp-writing.git
8
24
  /compare/v2.10.0...v2.10.1)
9
25
 
26
+ > 26 April 2026
27
+
10
28
  - refactor(review-bundles): split 997-line module into planner/renderer/writer [`#105`](https://github.com/hannasdev/mcp-writing.git
11
29
  /pull/105)
30
+ - Release 2.10.1 [`6d5702b`](https://github.com/hannasdev/mcp-writing.git
31
+ /commit/6d5702bb679dcacea9d6a4fe3b5305648e1c1100)
12
32
 
13
33
  #### [v2.10.0](https://github.com/hannasdev/mcp-writing.git
14
34
  /compare/v2.9.9...v2.10.0)
package/async-jobs.js ADDED
@@ -0,0 +1,218 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import os from "node:os";
4
+ import { spawn } from "node:child_process";
5
+ import { randomUUID } from "node:crypto";
6
+ import { ASYNC_PROGRESS_PREFIX } from "./async-progress.js";
7
+ import { checkpointJobCreate, checkpointJobFinish, pruneJobCheckpoints } from "./db.js";
8
+
9
+ export function readJsonIfExists(filePath) {
10
+ if (!filePath || !fs.existsSync(filePath)) return null;
11
+ try {
12
+ return JSON.parse(fs.readFileSync(filePath, "utf8"));
13
+ } catch {
14
+ return null;
15
+ }
16
+ }
17
+
18
+ export function createAsyncJobManager({ db, asyncJobs, ttlMs, runnerDir }) {
19
+ function pruneAsyncJobs() {
20
+ const now = Date.now();
21
+ let anyPruned = false;
22
+ for (const [id, job] of asyncJobs.entries()) {
23
+ if (!job.finishedAt) continue;
24
+ if (now - Date.parse(job.finishedAt) > ttlMs) {
25
+ try {
26
+ if (job.tmpDir && fs.existsSync(job.tmpDir)) {
27
+ fs.rmSync(job.tmpDir, { recursive: true, force: true });
28
+ } else {
29
+ if (job.requestPath && fs.existsSync(job.requestPath)) fs.unlinkSync(job.requestPath);
30
+ if (job.resultPath && fs.existsSync(job.resultPath)) fs.unlinkSync(job.resultPath);
31
+ }
32
+ } catch {
33
+ // best effort cleanup
34
+ }
35
+ asyncJobs.delete(id);
36
+ anyPruned = true;
37
+ }
38
+ }
39
+ if (anyPruned) {
40
+ try { pruneJobCheckpoints(db, ttlMs); } catch { /* best effort */ }
41
+ }
42
+ }
43
+
44
+ function toPublicJob(job, includeResult = true) {
45
+ return {
46
+ job_id: job.id,
47
+ kind: job.kind,
48
+ status: job.status,
49
+ created_at: job.createdAt,
50
+ started_at: job.startedAt,
51
+ finished_at: job.finishedAt,
52
+ pid: job.pid,
53
+ error: job.error,
54
+ ...(job.progress ? { progress: job.progress } : {}),
55
+ ...(includeResult ? { result: job.result } : {}),
56
+ };
57
+ }
58
+
59
+ function startAsyncJob({ kind, requestPayload, onComplete }) {
60
+ pruneAsyncJobs();
61
+
62
+ const id = randomUUID();
63
+ const tmpPrefix = path.join(os.tmpdir(), "mcp-writing-job-");
64
+ const tmpDir = fs.mkdtempSync(tmpPrefix);
65
+ const requestPath = path.join(tmpDir, `${id}.request.json`);
66
+ const resultPath = path.join(tmpDir, `${id}.result.json`);
67
+
68
+ fs.writeFileSync(requestPath, JSON.stringify(requestPayload, null, 2), "utf8");
69
+
70
+ const runnerPath = path.join(runnerDir, "scripts", "async-job-runner.mjs");
71
+ const child = spawn(
72
+ process.execPath,
73
+ ["--experimental-sqlite", runnerPath, requestPath, resultPath],
74
+ {
75
+ env: process.env,
76
+ stdio: ["ignore", "pipe", "pipe"],
77
+ }
78
+ );
79
+
80
+ const job = {
81
+ id,
82
+ kind,
83
+ status: "running",
84
+ createdAt: new Date().toISOString(),
85
+ startedAt: new Date().toISOString(),
86
+ finishedAt: null,
87
+ pid: child.pid,
88
+ tmpDir,
89
+ requestPath,
90
+ resultPath,
91
+ result: null,
92
+ progress: null,
93
+ error: null,
94
+ onComplete,
95
+ child,
96
+ };
97
+ asyncJobs.set(id, job);
98
+ try {
99
+ checkpointJobCreate(db, job);
100
+ } catch (err) {
101
+ process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
102
+ }
103
+
104
+ let stdoutBuffer = "";
105
+ child.stdout.on("data", (chunk) => {
106
+ stdoutBuffer += chunk.toString("utf8");
107
+ const lines = stdoutBuffer.split("\n");
108
+ stdoutBuffer = lines.pop() ?? "";
109
+
110
+ for (const line of lines) {
111
+ const trimmed = line.trim();
112
+ if (!trimmed.startsWith(ASYNC_PROGRESS_PREFIX)) continue;
113
+ const payload = trimmed.slice(ASYNC_PROGRESS_PREFIX.length);
114
+ try {
115
+ const progress = JSON.parse(payload);
116
+ if (progress && typeof progress === "object") {
117
+ const nextProgress = {
118
+ total_scenes: Number(progress.total_scenes ?? 0),
119
+ processed_scenes: Number(progress.processed_scenes ?? 0),
120
+ scenes_changed: Number(progress.scenes_changed ?? 0),
121
+ failed_scenes: Number(progress.failed_scenes ?? 0),
122
+ };
123
+ job.progress = nextProgress;
124
+ }
125
+ } catch {
126
+ // Ignore malformed progress lines; they are best-effort telemetry.
127
+ }
128
+ }
129
+ });
130
+ child.stderr.on("data", () => {
131
+ // avoid crashing on stderr backpressure for noisy runs
132
+ });
133
+
134
+ child.on("error", (error) => {
135
+ if (job.status === "cancelling") {
136
+ job.status = "cancelled";
137
+ job.error = error.message;
138
+ job.finishedAt = new Date().toISOString();
139
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
140
+ pruneAsyncJobs();
141
+ return;
142
+ }
143
+ job.status = "failed";
144
+ job.error = error.message;
145
+ job.finishedAt = new Date().toISOString();
146
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
147
+ pruneAsyncJobs();
148
+ });
149
+
150
+ child.on("exit", (code, signal) => {
151
+ const payload = readJsonIfExists(resultPath);
152
+ const successful = payload?.ok === true;
153
+ const cancelledBySignal = signal === "SIGTERM" || signal === "SIGKILL";
154
+ const cancelledByPayload = payload?.cancelled === true;
155
+
156
+ job.finishedAt = new Date().toISOString();
157
+ job.result = payload;
158
+
159
+ const hasProgressFields = payload && (
160
+ payload.total_scenes !== undefined
161
+ || payload.processed_scenes !== undefined
162
+ || payload.scenes_changed !== undefined
163
+ || payload.failed_scenes !== undefined
164
+ );
165
+
166
+ if (payload && payload.ok === true && hasProgressFields) {
167
+ job.progress = {
168
+ total_scenes: Number(payload.total_scenes ?? job.progress?.total_scenes ?? 0),
169
+ processed_scenes: Number(payload.processed_scenes ?? job.progress?.processed_scenes ?? 0),
170
+ scenes_changed: Number(payload.scenes_changed ?? job.progress?.scenes_changed ?? 0),
171
+ failed_scenes: Number(payload.failed_scenes ?? job.progress?.failed_scenes ?? 0),
172
+ };
173
+ }
174
+
175
+ if (job.status === "cancelling") {
176
+ if (cancelledByPayload) {
177
+ job.status = "cancelled";
178
+ job.error = "Async job cancelled after returning partial results.";
179
+ } else if (successful && !cancelledBySignal) {
180
+ // Race: cancellation was requested as work completed successfully.
181
+ job.status = "completed";
182
+ } else {
183
+ job.status = "cancelled";
184
+ job.error = cancelledBySignal
185
+ ? `Async job cancelled by signal ${signal}.`
186
+ : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
187
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
188
+ pruneAsyncJobs();
189
+ return;
190
+ }
191
+ } else {
192
+ job.status = successful ? "completed" : "failed";
193
+ if (!successful) {
194
+ job.error = payload?.error?.message
195
+ ?? payload?.error
196
+ ?? (signal
197
+ ? `Async job exited due to signal ${signal}.`
198
+ : `Async job exited with code ${code}.`);
199
+ }
200
+ }
201
+
202
+ if (job.status === "completed" && typeof job.onComplete === "function") {
203
+ try {
204
+ job.onComplete(job);
205
+ } catch (error) {
206
+ job.status = "failed";
207
+ job.error = error instanceof Error ? error.message : String(error);
208
+ }
209
+ }
210
+ try { checkpointJobFinish(db, job); } catch { /* best effort */ }
211
+ pruneAsyncJobs();
212
+ });
213
+
214
+ return job;
215
+ }
216
+
217
+ return { pruneAsyncJobs, toPublicJob, startAsyncJob };
218
+ }
package/helpers.js ADDED
@@ -0,0 +1,345 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import matter from "gray-matter";
4
+ import yaml from "js-yaml";
5
+ import { sidecarPath, syncAll } from "./sync.js";
6
+ import {
7
+ slugifyEntityName,
8
+ renderCharacterSheetTemplate,
9
+ renderPlaceSheetTemplate,
10
+ renderCharacterArcTemplate,
11
+ } from "./world-entity-templates.js";
12
+ import { ReviewBundlePlanError } from "./review-bundles.js";
13
+
14
+ export function deriveLoglineFromProse(prose) {
15
+ const compact = prose.replace(/\s+/g, " ").trim();
16
+ if (!compact) return null;
17
+ const sentence = compact.match(/^(.+?[.!?])(?:\s|$)/);
18
+ const candidate = (sentence?.[1] ?? compact).trim();
19
+ if (candidate.length <= 220) return candidate;
20
+ return `${candidate.slice(0, 217).trimEnd()}...`;
21
+ }
22
+
23
+ export function inferCharacterIdsFromProse(dbHandle, prose, projectId) {
24
+ const lower = prose.toLowerCase();
25
+ const rows = dbHandle.prepare(`
26
+ SELECT character_id, name
27
+ FROM characters
28
+ WHERE project_id = ? OR universe_id = (SELECT universe_id FROM projects WHERE project_id = ?)
29
+ ORDER BY length(name) DESC
30
+ `).all(projectId, projectId);
31
+
32
+ const found = [];
33
+ for (const row of rows) {
34
+ if (!row.name) continue;
35
+ const words = row.name.toLowerCase().split(/\s+/).filter(Boolean);
36
+ if (words.length && words.every(w => lower.includes(w))) {
37
+ found.push(row.character_id);
38
+ }
39
+ }
40
+ return [...new Set(found)].slice(0, 12);
41
+ }
42
+
43
+ export function readSupportingNotesForEntity(filePath) {
44
+ const ext = path.extname(filePath).toLowerCase();
45
+ const base = path.basename(filePath, ext).toLowerCase();
46
+ if (base !== "sheet") return [];
47
+
48
+ const dir = path.dirname(filePath);
49
+ let entries;
50
+ try {
51
+ entries = fs.readdirSync(dir, { withFileTypes: true });
52
+ } catch {
53
+ return [];
54
+ }
55
+
56
+ return entries
57
+ .filter(entry => entry.isFile())
58
+ .map(entry => entry.name)
59
+ .filter(name => /\.(md|txt)$/i.test(name))
60
+ .filter(name => !/^sheet\.(md|txt)$/i.test(name))
61
+ .sort((a, b) => a.localeCompare(b))
62
+ .map(name => {
63
+ const notePath = path.join(dir, name);
64
+ try {
65
+ const raw = fs.readFileSync(notePath, "utf8");
66
+ const { content } = matter(raw);
67
+ return {
68
+ file_name: name,
69
+ content: content.trim(),
70
+ };
71
+ } catch {
72
+ return null;
73
+ }
74
+ })
75
+ .filter(Boolean)
76
+ .filter(note => note.content);
77
+ }
78
+
79
+ export function readEntityMetadata(filePath) {
80
+ const metaPath = sidecarPath(filePath);
81
+ if (fs.existsSync(metaPath)) {
82
+ try {
83
+ return yaml.load(fs.readFileSync(metaPath, "utf8")) ?? {};
84
+ } catch {
85
+ return {};
86
+ }
87
+ }
88
+
89
+ try {
90
+ return matter(fs.readFileSync(filePath, "utf8")).data ?? {};
91
+ } catch {
92
+ return {};
93
+ }
94
+ }
95
+
96
+ export function resolveBatchTargetScenes(dbHandle, {
97
+ projectId,
98
+ sceneIds,
99
+ part,
100
+ chapter,
101
+ onlyStale,
102
+ }) {
103
+ const projectExists = Boolean(
104
+ dbHandle.prepare(`SELECT 1 FROM projects WHERE project_id = ? LIMIT 1`).get(projectId)
105
+ );
106
+
107
+ if (sceneIds?.length) {
108
+ const placeholders = sceneIds.map(() => "?").join(",");
109
+ const existingRows = dbHandle.prepare(
110
+ `SELECT scene_id FROM scenes WHERE project_id = ? AND scene_id IN (${placeholders})`
111
+ ).all(projectId, ...sceneIds);
112
+ const existing = new Set(existingRows.map(row => row.scene_id));
113
+ const missing = sceneIds.filter(sceneId => !existing.has(sceneId));
114
+ if (missing.length > 0) {
115
+ return { ok: false, code: "NOT_FOUND", message: `Requested scene IDs were not found in project '${projectId}'.`, details: { missing_scene_ids: missing, project_id: projectId } };
116
+ }
117
+ }
118
+
119
+ const conditions = ["project_id = ?"];
120
+ const params = [projectId];
121
+
122
+ if (sceneIds?.length) {
123
+ const placeholders = sceneIds.map(() => "?").join(",");
124
+ conditions.push(`scene_id IN (${placeholders})`);
125
+ params.push(...sceneIds);
126
+ }
127
+ if (part !== undefined) {
128
+ conditions.push("part = ?");
129
+ params.push(part);
130
+ }
131
+ if (chapter !== undefined) {
132
+ conditions.push("chapter = ?");
133
+ params.push(chapter);
134
+ }
135
+ if (onlyStale) {
136
+ conditions.push("metadata_stale = 1");
137
+ }
138
+
139
+ const query = `
140
+ SELECT scene_id, project_id, file_path
141
+ FROM scenes
142
+ WHERE ${conditions.join(" AND ")}
143
+ ORDER BY part, chapter, timeline_position
144
+ `;
145
+
146
+ return {
147
+ ok: true,
148
+ rows: dbHandle.prepare(query).all(...params),
149
+ project_exists: projectExists,
150
+ };
151
+ }
152
+
153
+ export function createHelpers({ syncDir, syncDirReal, syncDirAbs, db, syncDirWritable }) {
154
+ function isPathInsideSyncDir(candidatePath) {
155
+ const resolvedCandidate = path.resolve(candidatePath);
156
+ const canonicalCandidate = (() => {
157
+ try {
158
+ return fs.realpathSync(resolvedCandidate);
159
+ } catch {
160
+ return resolvedCandidate;
161
+ }
162
+ })();
163
+
164
+ const rel = path.relative(syncDirReal, canonicalCandidate);
165
+ return !(rel.startsWith("..") || path.isAbsolute(rel));
166
+ }
167
+
168
+ // Like isPathInsideSyncDir, but works for paths that do not yet exist by
169
+ // walking up to the nearest existing ancestor before canonicalising.
170
+ function isPathCandidateInsideSyncDir(candidatePath) {
171
+ const resolvedCandidate = path.resolve(candidatePath);
172
+
173
+ let existingAncestor = resolvedCandidate;
174
+ while (!fs.existsSync(existingAncestor)) {
175
+ const parent = path.dirname(existingAncestor);
176
+ if (parent === existingAncestor) break;
177
+ existingAncestor = parent;
178
+ }
179
+
180
+ const canonicalBase = (() => {
181
+ try {
182
+ return fs.realpathSync(existingAncestor);
183
+ } catch {
184
+ return existingAncestor;
185
+ }
186
+ })();
187
+
188
+ const canonical = path.resolve(canonicalBase, path.relative(existingAncestor, resolvedCandidate));
189
+ const rel = path.relative(syncDirReal, canonical);
190
+ return !(rel.startsWith("..") || path.isAbsolute(rel));
191
+ }
192
+
193
+ function resolveOutputDirWithinSync(outputDir) {
194
+ let resolvedOutputDir = path.resolve(outputDir);
195
+ let existingAncestor = resolvedOutputDir;
196
+
197
+ while (!fs.existsSync(existingAncestor)) {
198
+ const parentDir = path.dirname(existingAncestor);
199
+ if (parentDir === existingAncestor) {
200
+ throw new ReviewBundlePlanError(
201
+ "INVALID_OUTPUT_DIR",
202
+ "output_dir must be inside WRITING_SYNC_DIR.",
203
+ { output_dir: resolvedOutputDir, sync_dir: syncDirAbs }
204
+ );
205
+ }
206
+ existingAncestor = parentDir;
207
+ }
208
+
209
+ let realExistingAncestor;
210
+ try {
211
+ realExistingAncestor = fs.realpathSync.native(existingAncestor);
212
+ } catch (err) {
213
+ throw new ReviewBundlePlanError(
214
+ "INVALID_OUTPUT_DIR",
215
+ "output_dir ancestor could not be resolved: path may be inaccessible.",
216
+ { output_dir: outputDir, existing_ancestor: existingAncestor, cause: err.message }
217
+ );
218
+ }
219
+ const relativeFromAncestor = path.relative(existingAncestor, resolvedOutputDir);
220
+ resolvedOutputDir = path.resolve(realExistingAncestor, relativeFromAncestor);
221
+
222
+ const relativeToSyncDir = path.relative(syncDirReal, resolvedOutputDir);
223
+ if (relativeToSyncDir.startsWith("..") || path.isAbsolute(relativeToSyncDir)) {
224
+ throw new ReviewBundlePlanError(
225
+ "INVALID_OUTPUT_DIR",
226
+ "output_dir must be inside WRITING_SYNC_DIR.",
227
+ { output_dir: resolvedOutputDir, sync_dir: syncDirAbs }
228
+ );
229
+ }
230
+
231
+ return { resolvedOutputDir, relativeToSyncDir };
232
+ }
233
+
234
+ function resolveProjectRoot(projectId) {
235
+ if (projectId.includes("/")) {
236
+ const [universeId, projectSlug] = projectId.split("/");
237
+ return path.join(syncDir, "universes", universeId, projectSlug);
238
+ }
239
+ return path.join(syncDir, "projects", projectId);
240
+ }
241
+
242
+ function resolveWorldEntityDir({ kind, projectId, universeId, name }) {
243
+ const slug = slugifyEntityName(name);
244
+ const baseDir = projectId
245
+ ? path.join(resolveProjectRoot(projectId), "world")
246
+ : path.join(syncDir, "universes", universeId, "world");
247
+ const bucket = kind === "character" ? "characters" : "places";
248
+ return {
249
+ slug,
250
+ dir: path.join(baseDir, bucket, slug),
251
+ };
252
+ }
253
+
254
+ function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId, meta }) {
255
+ const prefix = kind === "character" ? "char" : "place";
256
+ const idKey = kind === "character" ? "character_id" : "place_id";
257
+ const slug = slugifyEntityName(name);
258
+ if (!slug) throw new Error("Name must contain at least one alphanumeric character.");
259
+
260
+ const { dir } = resolveWorldEntityDir({ kind, projectId, universeId, name });
261
+ const prosePath = path.join(dir, "sheet.md");
262
+ const metaPath = sidecarPath(prosePath);
263
+ const hadProse = fs.existsSync(prosePath);
264
+ const hadMeta = fs.existsSync(metaPath);
265
+
266
+ let shouldWriteMeta = !hadMeta;
267
+ let payload;
268
+ const derivedId = `${prefix}-${slug}`;
269
+ if (hadMeta) {
270
+ let parsedMeta;
271
+ try {
272
+ parsedMeta = yaml.load(fs.readFileSync(metaPath, "utf8"));
273
+ } catch (err) {
274
+ throw new Error(
275
+ `Existing metadata sidecar is invalid YAML at ${metaPath}: ${err.message}`,
276
+ { cause: err }
277
+ );
278
+ }
279
+
280
+ if (parsedMeta != null && (typeof parsedMeta !== "object" || Array.isArray(parsedMeta))) {
281
+ throw new Error(`Existing metadata sidecar must be a YAML mapping at ${metaPath}.`);
282
+ }
283
+
284
+ const existingMeta = parsedMeta ?? {};
285
+
286
+ const backfilledId = existingMeta[idKey] ?? derivedId;
287
+ const backfilledName = existingMeta.name ?? name;
288
+ shouldWriteMeta = existingMeta[idKey] == null || existingMeta.name == null;
289
+ payload = shouldWriteMeta
290
+ ? {
291
+ ...existingMeta,
292
+ [idKey]: backfilledId,
293
+ name: backfilledName,
294
+ }
295
+ : existingMeta;
296
+ } else {
297
+ payload = {
298
+ [idKey]: derivedId,
299
+ name,
300
+ ...(meta ?? {}),
301
+ };
302
+ }
303
+
304
+ fs.mkdirSync(dir, { recursive: true });
305
+
306
+ if (!hadProse) {
307
+ const defaultSheet = kind === "character"
308
+ ? renderCharacterSheetTemplate(name)
309
+ : renderPlaceSheetTemplate(name);
310
+ const body = notes?.trim() ?? defaultSheet;
311
+ fs.writeFileSync(prosePath, `${body}${body ? "\n" : ""}`, "utf8");
312
+ }
313
+
314
+ if (kind === "character") {
315
+ const arcPath = path.join(dir, "arc.md");
316
+ if (!fs.existsSync(arcPath)) {
317
+ fs.writeFileSync(arcPath, `${renderCharacterArcTemplate(name)}\n`, "utf8");
318
+ }
319
+ }
320
+
321
+ if (shouldWriteMeta) {
322
+ fs.writeFileSync(metaPath, yaml.dump(payload, { lineWidth: 120 }), "utf8");
323
+ }
324
+
325
+ syncAll(db, syncDir, { writable: syncDirWritable });
326
+
327
+ return {
328
+ created: !hadProse && !hadMeta,
329
+ id: payload[idKey],
330
+ prose_path: prosePath,
331
+ meta_path: metaPath,
332
+ project_id: projectId ?? null,
333
+ universe_id: universeId ?? null,
334
+ };
335
+ }
336
+
337
+ return {
338
+ isPathInsideSyncDir,
339
+ isPathCandidateInsideSyncDir,
340
+ resolveOutputDirWithinSync,
341
+ resolveProjectRoot,
342
+ resolveWorldEntityDir,
343
+ createCanonicalWorldEntity,
344
+ };
345
+ }
package/index.js CHANGED
@@ -4,19 +4,21 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
4
4
  import http from "node:http";
5
5
  import fs from "node:fs";
6
6
  import path from "node:path";
7
- import os from "node:os";
8
7
  import { randomUUID } from "node:crypto";
9
- import { spawn } from "node:child_process";
10
8
  import { fileURLToPath } from "node:url";
11
- import matter from "gray-matter";
12
- import yaml from "js-yaml";
13
- import { openDb, checkpointJobCreate, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
14
- import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, sidecarPath, isStructuralProjectId } from "./sync.js";
9
+ import { openDb, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
10
+ import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, isStructuralProjectId } from "./sync.js";
15
11
  import { isGitAvailable, isGitRepository, initGitRepository, getSceneProseAtCommit } from "./git.js";
16
- import { renderCharacterArcTemplate, renderCharacterSheetTemplate, renderPlaceSheetTemplate, slugifyEntityName } from "./world-entity-templates.js";
17
- import { ASYNC_PROGRESS_PREFIX } from "./async-progress.js";
12
+ import { createAsyncJobManager, readJsonIfExists } from "./async-jobs.js";
13
+ import {
14
+ createHelpers,
15
+ deriveLoglineFromProse,
16
+ inferCharacterIdsFromProse,
17
+ readSupportingNotesForEntity,
18
+ readEntityMetadata,
19
+ resolveBatchTargetScenes,
20
+ } from "./helpers.js";
18
21
  import { STYLEGUIDE_CONFIG_BASENAME } from "./prose-styleguide.js";
19
- import { ReviewBundlePlanError } from "./review-bundles.js";
20
22
  import { registerSyncTools } from "./tools/sync.js";
21
23
  import { registerSearchTools } from "./tools/search.js";
22
24
  import { registerMetadataTools } from "./tools/metadata.js";
@@ -36,86 +38,6 @@ const SYNC_DIR_REAL = (() => {
36
38
  })();
37
39
  const DB_PATH_DISPLAY = DB_PATH === ":memory:" ? DB_PATH : path.resolve(DB_PATH);
38
40
 
39
- function isPathInsideSyncDir(candidatePath) {
40
- const resolvedCandidate = path.resolve(candidatePath);
41
- const canonicalCandidate = (() => {
42
- try {
43
- return fs.realpathSync(resolvedCandidate);
44
- } catch {
45
- return resolvedCandidate;
46
- }
47
- })();
48
-
49
- const rel = path.relative(SYNC_DIR_REAL, canonicalCandidate);
50
- return !(rel.startsWith("..") || path.isAbsolute(rel));
51
- }
52
-
53
- // Like isPathInsideSyncDir, but works for paths that do not yet exist by
54
- // walking up to the nearest existing ancestor before canonicalising.
55
- function isPathCandidateInsideSyncDir(candidatePath) {
56
- const resolvedCandidate = path.resolve(candidatePath);
57
-
58
- let existingAncestor = resolvedCandidate;
59
- while (!fs.existsSync(existingAncestor)) {
60
- const parent = path.dirname(existingAncestor);
61
- if (parent === existingAncestor) break;
62
- existingAncestor = parent;
63
- }
64
-
65
- const canonicalBase = (() => {
66
- try {
67
- return fs.realpathSync(existingAncestor);
68
- } catch {
69
- return existingAncestor;
70
- }
71
- })();
72
-
73
- const canonical = path.resolve(canonicalBase, path.relative(existingAncestor, resolvedCandidate));
74
- const rel = path.relative(SYNC_DIR_REAL, canonical);
75
- return !(rel.startsWith("..") || path.isAbsolute(rel));
76
- }
77
-
78
- function resolveOutputDirWithinSync(outputDir) {
79
- let resolvedOutputDir = path.resolve(outputDir);
80
- let existingAncestor = resolvedOutputDir;
81
-
82
- while (!fs.existsSync(existingAncestor)) {
83
- const parentDir = path.dirname(existingAncestor);
84
- if (parentDir === existingAncestor) {
85
- throw new ReviewBundlePlanError(
86
- "INVALID_OUTPUT_DIR",
87
- "output_dir must be inside WRITING_SYNC_DIR.",
88
- { output_dir: resolvedOutputDir, sync_dir: SYNC_DIR_ABS }
89
- );
90
- }
91
- existingAncestor = parentDir;
92
- }
93
-
94
- let realExistingAncestor;
95
- try {
96
- realExistingAncestor = fs.realpathSync.native(existingAncestor);
97
- } catch (err) {
98
- throw new ReviewBundlePlanError(
99
- "INVALID_OUTPUT_DIR",
100
- "output_dir ancestor could not be resolved: path may be inaccessible.",
101
- { output_dir: outputDir, existing_ancestor: existingAncestor, cause: err.message }
102
- );
103
- }
104
- const relativeFromAncestor = path.relative(existingAncestor, resolvedOutputDir);
105
- resolvedOutputDir = path.resolve(realExistingAncestor, relativeFromAncestor);
106
-
107
- const relativeToSyncDir = path.relative(SYNC_DIR_REAL, resolvedOutputDir);
108
- if (relativeToSyncDir.startsWith("..") || path.isAbsolute(relativeToSyncDir)) {
109
- throw new ReviewBundlePlanError(
110
- "INVALID_OUTPUT_DIR",
111
- "output_dir must be inside WRITING_SYNC_DIR.",
112
- { output_dir: resolvedOutputDir, sync_dir: SYNC_DIR_ABS }
113
- );
114
- }
115
-
116
- return { resolvedOutputDir, relativeToSyncDir };
117
- }
118
-
119
41
  function parsePositiveIntEnv(rawValue, defaultValue) {
120
42
  const parsed = parseInt(rawValue ?? String(defaultValue), 10);
121
43
  return Number.isFinite(parsed) && parsed > 0 ? parsed : defaultValue;
@@ -156,214 +78,6 @@ const MCP_SERVER_VERSION = typeof pkg.version === "string" && pkg.version.trim()
156
78
  : "0.0.0";
157
79
  const asyncJobs = new Map();
158
80
 
159
- function pruneAsyncJobs() {
160
- const now = Date.now();
161
- let anyPruned = false;
162
- for (const [id, job] of asyncJobs.entries()) {
163
- if (!job.finishedAt) continue;
164
- if (now - Date.parse(job.finishedAt) > ASYNC_JOB_TTL_MS) {
165
- try {
166
- if (job.tmpDir && fs.existsSync(job.tmpDir)) {
167
- fs.rmSync(job.tmpDir, { recursive: true, force: true });
168
- } else {
169
- if (job.requestPath && fs.existsSync(job.requestPath)) fs.unlinkSync(job.requestPath);
170
- if (job.resultPath && fs.existsSync(job.resultPath)) fs.unlinkSync(job.resultPath);
171
- }
172
- } catch {
173
- // best effort cleanup
174
- }
175
- asyncJobs.delete(id);
176
- anyPruned = true;
177
- }
178
- }
179
- if (anyPruned) {
180
- try { pruneJobCheckpoints(db, ASYNC_JOB_TTL_MS); } catch { /* best effort */ }
181
- }
182
- }
183
-
184
- function readJsonIfExists(filePath) {
185
- if (!filePath || !fs.existsSync(filePath)) return null;
186
- try {
187
- return JSON.parse(fs.readFileSync(filePath, "utf8"));
188
- } catch {
189
- return null;
190
- }
191
- }
192
-
193
- function toPublicJob(job, includeResult = true) {
194
- return {
195
- job_id: job.id,
196
- kind: job.kind,
197
- status: job.status,
198
- created_at: job.createdAt,
199
- started_at: job.startedAt,
200
- finished_at: job.finishedAt,
201
- pid: job.pid,
202
- error: job.error,
203
- ...(job.progress ? { progress: job.progress } : {}),
204
- ...(includeResult ? { result: job.result } : {}),
205
- };
206
- }
207
-
208
- function startAsyncJob({ kind, requestPayload, onComplete }) {
209
- pruneAsyncJobs();
210
- const progressPrefix = ASYNC_PROGRESS_PREFIX;
211
-
212
- const id = randomUUID();
213
- const tmpPrefix = path.join(os.tmpdir(), "mcp-writing-job-");
214
- const tmpDir = fs.mkdtempSync(tmpPrefix);
215
- const requestPath = path.join(tmpDir, `${id}.request.json`);
216
- const resultPath = path.join(tmpDir, `${id}.result.json`);
217
-
218
- fs.writeFileSync(requestPath, JSON.stringify(requestPayload, null, 2), "utf8");
219
-
220
- const runnerPath = path.join(__dirname, "scripts", "async-job-runner.mjs");
221
- const child = spawn(
222
- process.execPath,
223
- ["--experimental-sqlite", runnerPath, requestPath, resultPath],
224
- {
225
- env: process.env,
226
- stdio: ["ignore", "pipe", "pipe"],
227
- }
228
- );
229
-
230
- const job = {
231
- id,
232
- kind,
233
- status: "running",
234
- createdAt: new Date().toISOString(),
235
- startedAt: new Date().toISOString(),
236
- finishedAt: null,
237
- pid: child.pid,
238
- tmpDir,
239
- requestPath,
240
- resultPath,
241
- result: null,
242
- progress: null,
243
- error: null,
244
- onComplete,
245
- child,
246
- };
247
- asyncJobs.set(id, job);
248
- try {
249
- checkpointJobCreate(db, job);
250
- } catch (err) {
251
- process.stderr.write(`[mcp-writing] WARNING: failed to checkpoint job ${id}: ${err.message}\n`);
252
- }
253
-
254
- let stdoutBuffer = "";
255
- child.stdout.on("data", (chunk) => {
256
- stdoutBuffer += chunk.toString("utf8");
257
- const lines = stdoutBuffer.split("\n");
258
- stdoutBuffer = lines.pop() ?? "";
259
-
260
- for (const line of lines) {
261
- const trimmed = line.trim();
262
- if (!trimmed.startsWith(progressPrefix)) continue;
263
- const payload = trimmed.slice(progressPrefix.length);
264
- try {
265
- const progress = JSON.parse(payload);
266
- if (progress && typeof progress === "object") {
267
- const nextProgress = {
268
- total_scenes: Number(progress.total_scenes ?? 0),
269
- processed_scenes: Number(progress.processed_scenes ?? 0),
270
- scenes_changed: Number(progress.scenes_changed ?? 0),
271
- failed_scenes: Number(progress.failed_scenes ?? 0),
272
- };
273
- job.progress = nextProgress;
274
- }
275
- } catch {
276
- // Ignore malformed progress lines; they are best-effort telemetry.
277
- }
278
- }
279
- });
280
- child.stderr.on("data", () => {
281
- // avoid crashing on stderr backpressure for noisy runs
282
- });
283
-
284
- child.on("error", (error) => {
285
- if (job.status === "cancelling") {
286
- job.status = "cancelled";
287
- job.error = error.message;
288
- job.finishedAt = new Date().toISOString();
289
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
290
- pruneAsyncJobs();
291
- return;
292
- }
293
- job.status = "failed";
294
- job.error = error.message;
295
- job.finishedAt = new Date().toISOString();
296
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
297
- pruneAsyncJobs();
298
- });
299
-
300
- child.on("exit", (code, signal) => {
301
- const payload = readJsonIfExists(resultPath);
302
- const successful = payload?.ok === true;
303
- const cancelledBySignal = signal === "SIGTERM" || signal === "SIGKILL";
304
- const cancelledByPayload = payload?.cancelled === true;
305
-
306
- job.finishedAt = new Date().toISOString();
307
- job.result = payload;
308
-
309
- const hasProgressFields = payload && (
310
- payload.total_scenes !== undefined
311
- || payload.processed_scenes !== undefined
312
- || payload.scenes_changed !== undefined
313
- || payload.failed_scenes !== undefined
314
- );
315
-
316
- if (payload && payload.ok === true && hasProgressFields) {
317
- job.progress = {
318
- total_scenes: Number(payload.total_scenes ?? job.progress?.total_scenes ?? 0),
319
- processed_scenes: Number(payload.processed_scenes ?? job.progress?.processed_scenes ?? 0),
320
- scenes_changed: Number(payload.scenes_changed ?? job.progress?.scenes_changed ?? 0),
321
- failed_scenes: Number(payload.failed_scenes ?? job.progress?.failed_scenes ?? 0),
322
- };
323
- }
324
-
325
- if (job.status === "cancelling") {
326
- if (cancelledByPayload) {
327
- job.status = "cancelled";
328
- job.error = "Async job cancelled after returning partial results.";
329
- } else if (successful && !cancelledBySignal) {
330
- // Race: cancellation was requested as work completed successfully.
331
- job.status = "completed";
332
- } else {
333
- job.status = "cancelled";
334
- job.error = cancelledBySignal
335
- ? `Async job cancelled by signal ${signal}.`
336
- : payload?.error?.message ?? payload?.error ?? "Async job cancelled.";
337
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
338
- pruneAsyncJobs();
339
- return;
340
- }
341
- } else {
342
- job.status = successful ? "completed" : "failed";
343
- if (!successful) {
344
- job.error = payload?.error?.message
345
- ?? payload?.error
346
- ?? (signal
347
- ? `Async job exited due to signal ${signal}.`
348
- : `Async job exited with code ${code}.`);
349
- }
350
- }
351
-
352
- if (job.status === "completed" && typeof job.onComplete === "function") {
353
- try {
354
- job.onComplete(job);
355
- } catch (error) {
356
- job.status = "failed";
357
- job.error = error instanceof Error ? error.message : String(error);
358
- }
359
- }
360
- try { checkpointJobFinish(db, job); } catch { /* best effort */ }
361
- pruneAsyncJobs();
362
- });
363
-
364
- return job;
365
- }
366
-
367
81
  function paginateRows(rows, { page, pageSize, forcePagination = false }) {
368
82
  const totalCount = rows.length;
369
83
  const shouldPaginate = forcePagination || page !== undefined || pageSize !== undefined;
@@ -413,248 +127,6 @@ function errorResponse(code, message, details) {
413
127
  return jsonResponse(payload);
414
128
  }
415
129
 
416
- function deriveLoglineFromProse(prose) {
417
- const compact = prose.replace(/\s+/g, " ").trim();
418
- if (!compact) return null;
419
- const sentence = compact.match(/^(.+?[.!?])(?:\s|$)/);
420
- const candidate = (sentence?.[1] ?? compact).trim();
421
- if (candidate.length <= 220) return candidate;
422
- return `${candidate.slice(0, 217).trimEnd()}...`;
423
- }
424
-
425
- function inferCharacterIdsFromProse(dbHandle, prose, projectId) {
426
- const lower = prose.toLowerCase();
427
- const rows = dbHandle.prepare(`
428
- SELECT character_id, name
429
- FROM characters
430
- WHERE project_id = ? OR universe_id = (SELECT universe_id FROM projects WHERE project_id = ?)
431
- ORDER BY length(name) DESC
432
- `).all(projectId, projectId);
433
-
434
- const found = [];
435
- for (const row of rows) {
436
- if (!row.name) continue;
437
- const words = row.name.toLowerCase().split(/\s+/).filter(Boolean);
438
- if (words.length && words.every(w => lower.includes(w))) {
439
- found.push(row.character_id);
440
- }
441
- }
442
- return [...new Set(found)].slice(0, 12);
443
- }
444
-
445
- function readSupportingNotesForEntity(filePath) {
446
- const ext = path.extname(filePath).toLowerCase();
447
- const base = path.basename(filePath, ext).toLowerCase();
448
- if (base !== "sheet") return [];
449
-
450
- const dir = path.dirname(filePath);
451
- let entries;
452
- try {
453
- entries = fs.readdirSync(dir, { withFileTypes: true });
454
- } catch {
455
- return [];
456
- }
457
-
458
- return entries
459
- .filter(entry => entry.isFile())
460
- .map(entry => entry.name)
461
- .filter(name => /\.(md|txt)$/i.test(name))
462
- .filter(name => !/^sheet\.(md|txt)$/i.test(name))
463
- .sort((a, b) => a.localeCompare(b))
464
- .map(name => {
465
- const notePath = path.join(dir, name);
466
- try {
467
- const raw = fs.readFileSync(notePath, "utf8");
468
- const { content } = matter(raw);
469
- return {
470
- file_name: name,
471
- content: content.trim(),
472
- };
473
- } catch {
474
- return null;
475
- }
476
- })
477
- .filter(Boolean)
478
- .filter(note => note.content);
479
- }
480
-
481
- function readEntityMetadata(filePath) {
482
- const metaPath = sidecarPath(filePath);
483
- if (fs.existsSync(metaPath)) {
484
- try {
485
- return yaml.load(fs.readFileSync(metaPath, "utf8")) ?? {};
486
- } catch {
487
- return {};
488
- }
489
- }
490
-
491
- try {
492
- return matter(fs.readFileSync(filePath, "utf8")).data ?? {};
493
- } catch {
494
- return {};
495
- }
496
- }
497
-
498
- function resolveProjectRoot(projectId) {
499
- if (projectId.includes("/")) {
500
- const [universeId, projectSlug] = projectId.split("/");
501
- return path.join(SYNC_DIR, "universes", universeId, projectSlug);
502
- }
503
- return path.join(SYNC_DIR, "projects", projectId);
504
- }
505
-
506
- function resolveWorldEntityDir({ kind, projectId, universeId, name }) {
507
- const slug = slugifyEntityName(name);
508
- const baseDir = projectId
509
- ? path.join(resolveProjectRoot(projectId), "world")
510
- : path.join(SYNC_DIR, "universes", universeId, "world");
511
- const bucket = kind === "character" ? "characters" : "places";
512
- return {
513
- slug,
514
- dir: path.join(baseDir, bucket, slug),
515
- };
516
- }
517
-
518
- function resolveBatchTargetScenes(dbHandle, {
519
- projectId,
520
- sceneIds,
521
- part,
522
- chapter,
523
- onlyStale,
524
- }) {
525
- const projectExists = Boolean(
526
- dbHandle.prepare(`SELECT 1 FROM projects WHERE project_id = ? LIMIT 1`).get(projectId)
527
- );
528
-
529
- if (sceneIds?.length) {
530
- const placeholders = sceneIds.map(() => "?").join(",");
531
- const existingRows = dbHandle.prepare(
532
- `SELECT scene_id FROM scenes WHERE project_id = ? AND scene_id IN (${placeholders})`
533
- ).all(projectId, ...sceneIds);
534
- const existing = new Set(existingRows.map(row => row.scene_id));
535
- const missing = sceneIds.filter(sceneId => !existing.has(sceneId));
536
- if (missing.length > 0) {
537
- return { ok: false, code: "NOT_FOUND", message: `Requested scene IDs were not found in project '${projectId}'.`, details: { missing_scene_ids: missing, project_id: projectId } };
538
- }
539
- }
540
-
541
- const conditions = ["project_id = ?"];
542
- const params = [projectId];
543
-
544
- if (sceneIds?.length) {
545
- const placeholders = sceneIds.map(() => "?").join(",");
546
- conditions.push(`scene_id IN (${placeholders})`);
547
- params.push(...sceneIds);
548
- }
549
- if (part !== undefined) {
550
- conditions.push("part = ?");
551
- params.push(part);
552
- }
553
- if (chapter !== undefined) {
554
- conditions.push("chapter = ?");
555
- params.push(chapter);
556
- }
557
- if (onlyStale) {
558
- conditions.push("metadata_stale = 1");
559
- }
560
-
561
- const query = `
562
- SELECT scene_id, project_id, file_path
563
- FROM scenes
564
- WHERE ${conditions.join(" AND ")}
565
- ORDER BY part, chapter, timeline_position
566
- `;
567
-
568
- return {
569
- ok: true,
570
- rows: dbHandle.prepare(query).all(...params),
571
- project_exists: projectExists,
572
- };
573
- }
574
-
575
- function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId, meta }) {
576
- const prefix = kind === "character" ? "char" : "place";
577
- const idKey = kind === "character" ? "character_id" : "place_id";
578
- const slug = slugifyEntityName(name);
579
- if (!slug) throw new Error("Name must contain at least one alphanumeric character.");
580
-
581
- const { dir } = resolveWorldEntityDir({ kind, projectId, universeId, name });
582
- const prosePath = path.join(dir, "sheet.md");
583
- const metaPath = sidecarPath(prosePath);
584
- const hadProse = fs.existsSync(prosePath);
585
- const hadMeta = fs.existsSync(metaPath);
586
-
587
- let shouldWriteMeta = !hadMeta;
588
- let payload;
589
- const derivedId = `${prefix}-${slug}`;
590
- if (hadMeta) {
591
- let parsedMeta;
592
- try {
593
- parsedMeta = yaml.load(fs.readFileSync(metaPath, "utf8"));
594
- } catch (err) {
595
- throw new Error(
596
- `Existing metadata sidecar is invalid YAML at ${metaPath}: ${err.message}`,
597
- { cause: err }
598
- );
599
- }
600
-
601
- if (parsedMeta != null && (typeof parsedMeta !== "object" || Array.isArray(parsedMeta))) {
602
- throw new Error(`Existing metadata sidecar must be a YAML mapping at ${metaPath}.`);
603
- }
604
-
605
- const existingMeta = parsedMeta ?? {};
606
-
607
- const backfilledId = existingMeta[idKey] ?? derivedId;
608
- const backfilledName = existingMeta.name ?? name;
609
- shouldWriteMeta = existingMeta[idKey] == null || existingMeta.name == null;
610
- payload = shouldWriteMeta
611
- ? {
612
- ...existingMeta,
613
- [idKey]: backfilledId,
614
- name: backfilledName,
615
- }
616
- : existingMeta;
617
- } else {
618
- payload = {
619
- [idKey]: derivedId,
620
- name,
621
- ...(meta ?? {}),
622
- };
623
- }
624
-
625
- fs.mkdirSync(dir, { recursive: true });
626
-
627
- if (!hadProse) {
628
- const defaultSheet = kind === "character"
629
- ? renderCharacterSheetTemplate(name)
630
- : renderPlaceSheetTemplate(name);
631
- const body = notes?.trim() ?? defaultSheet;
632
- fs.writeFileSync(prosePath, `${body}${body ? "\n" : ""}`, "utf8");
633
- }
634
-
635
- if (kind === "character") {
636
- const arcPath = path.join(dir, "arc.md");
637
- if (!fs.existsSync(arcPath)) {
638
- fs.writeFileSync(arcPath, `${renderCharacterArcTemplate(name)}\n`, "utf8");
639
- }
640
- }
641
-
642
- if (shouldWriteMeta) {
643
- fs.writeFileSync(metaPath, yaml.dump(payload, { lineWidth: 120 }), "utf8");
644
- }
645
-
646
- syncAll(db, SYNC_DIR, { writable: SYNC_DIR_WRITABLE });
647
-
648
- return {
649
- created: !hadProse && !hadMeta,
650
- id: payload[idKey],
651
- prose_path: prosePath,
652
- meta_path: metaPath,
653
- project_id: projectId ?? null,
654
- universe_id: universeId ?? null,
655
- };
656
- }
657
-
658
130
  // ---------------------------------------------------------------------------
659
131
  // Database setup
660
132
  // ---------------------------------------------------------------------------
@@ -682,6 +154,13 @@ if (stalledJobs.length > 0) {
682
154
  process.stderr.write(`[mcp-writing] Marked ${stalledJobs.length} stalled job(s) as failed after restart.\n`);
683
155
  }
684
156
 
157
+ const { pruneAsyncJobs, startAsyncJob, toPublicJob } = createAsyncJobManager({
158
+ db,
159
+ asyncJobs,
160
+ ttlMs: ASYNC_JOB_TTL_MS,
161
+ runnerDir: __dirname,
162
+ });
163
+
685
164
  process.stderr.write(`[mcp-writing] Sync dir: ${SYNC_DIR_ABS}\n`);
686
165
  process.stderr.write(`[mcp-writing] DB path: ${DB_PATH_DISPLAY}\n`);
687
166
 
@@ -813,6 +292,20 @@ if (SHOULD_ENFORCE_OWNERSHIP_FAIL_GUARD && SYNC_OWNERSHIP_DIAGNOSTICS.non_runtim
813
292
  process.exit(1);
814
293
  }
815
294
 
295
+ const {
296
+ isPathInsideSyncDir,
297
+ isPathCandidateInsideSyncDir,
298
+ resolveOutputDirWithinSync,
299
+ resolveProjectRoot,
300
+ createCanonicalWorldEntity,
301
+ } = createHelpers({
302
+ syncDir: SYNC_DIR,
303
+ syncDirReal: SYNC_DIR_REAL,
304
+ syncDirAbs: SYNC_DIR_ABS,
305
+ db,
306
+ syncDirWritable: SYNC_DIR_WRITABLE,
307
+ });
308
+
816
309
  // Run sync on startup
817
310
  syncAll(db, SYNC_DIR, { writable: SYNC_DIR_WRITABLE });
818
311
 
package/package.json CHANGED
@@ -1,12 +1,14 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.10.1",
3
+ "version": "2.10.3",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "type": "module",
6
6
  "main": "index.js",
7
7
  "files": [
8
8
  "index.js",
9
+ "async-jobs.js",
9
10
  "async-progress.js",
11
+ "helpers.js",
10
12
  "scene-character-batch.js",
11
13
  "scrivener-direct.js",
12
14
  "importer.js",