@hanna84/mcp-writing 2.10.2 → 2.10.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/helpers.js +345 -0
  3. package/index.js +23 -327
  4. package/package.json +2 -1
package/CHANGELOG.md CHANGED
@@ -4,11 +4,21 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.10.3](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.10.2...v2.10.3)
9
+
10
+ - refactor(helpers): extract domain helpers and path safety utilities from index.js [`#107`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/107)
12
+
7
13
  #### [v2.10.2](https://github.com/hannasdev/mcp-writing.git
8
14
  /compare/v2.10.1...v2.10.2)
9
15
 
16
+ > 26 April 2026
17
+
10
18
  - refactor(async-jobs): extract startAsyncJob/pruneAsyncJobs/toPublicJob into async-jobs.js [`#106`](https://github.com/hannasdev/mcp-writing.git
11
19
  /pull/106)
20
+ - Release 2.10.2 [`7478041`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/7478041fb85b7c7ccb35b9cd54e8a75171a8270c)
12
22
 
13
23
  #### [v2.10.1](https://github.com/hannasdev/mcp-writing.git
14
24
  /compare/v2.10.0...v2.10.1)
package/helpers.js ADDED
@@ -0,0 +1,345 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import matter from "gray-matter";
4
+ import yaml from "js-yaml";
5
+ import { sidecarPath, syncAll } from "./sync.js";
6
+ import {
7
+ slugifyEntityName,
8
+ renderCharacterSheetTemplate,
9
+ renderPlaceSheetTemplate,
10
+ renderCharacterArcTemplate,
11
+ } from "./world-entity-templates.js";
12
+ import { ReviewBundlePlanError } from "./review-bundles.js";
13
+
14
+ export function deriveLoglineFromProse(prose) {
15
+ const compact = prose.replace(/\s+/g, " ").trim();
16
+ if (!compact) return null;
17
+ const sentence = compact.match(/^(.+?[.!?])(?:\s|$)/);
18
+ const candidate = (sentence?.[1] ?? compact).trim();
19
+ if (candidate.length <= 220) return candidate;
20
+ return `${candidate.slice(0, 217).trimEnd()}...`;
21
+ }
22
+
23
+ export function inferCharacterIdsFromProse(dbHandle, prose, projectId) {
24
+ const lower = prose.toLowerCase();
25
+ const rows = dbHandle.prepare(`
26
+ SELECT character_id, name
27
+ FROM characters
28
+ WHERE project_id = ? OR universe_id = (SELECT universe_id FROM projects WHERE project_id = ?)
29
+ ORDER BY length(name) DESC
30
+ `).all(projectId, projectId);
31
+
32
+ const found = [];
33
+ for (const row of rows) {
34
+ if (!row.name) continue;
35
+ const words = row.name.toLowerCase().split(/\s+/).filter(Boolean);
36
+ if (words.length && words.every(w => lower.includes(w))) {
37
+ found.push(row.character_id);
38
+ }
39
+ }
40
+ return [...new Set(found)].slice(0, 12);
41
+ }
42
+
43
+ export function readSupportingNotesForEntity(filePath) {
44
+ const ext = path.extname(filePath).toLowerCase();
45
+ const base = path.basename(filePath, ext).toLowerCase();
46
+ if (base !== "sheet") return [];
47
+
48
+ const dir = path.dirname(filePath);
49
+ let entries;
50
+ try {
51
+ entries = fs.readdirSync(dir, { withFileTypes: true });
52
+ } catch {
53
+ return [];
54
+ }
55
+
56
+ return entries
57
+ .filter(entry => entry.isFile())
58
+ .map(entry => entry.name)
59
+ .filter(name => /\.(md|txt)$/i.test(name))
60
+ .filter(name => !/^sheet\.(md|txt)$/i.test(name))
61
+ .sort((a, b) => a.localeCompare(b))
62
+ .map(name => {
63
+ const notePath = path.join(dir, name);
64
+ try {
65
+ const raw = fs.readFileSync(notePath, "utf8");
66
+ const { content } = matter(raw);
67
+ return {
68
+ file_name: name,
69
+ content: content.trim(),
70
+ };
71
+ } catch {
72
+ return null;
73
+ }
74
+ })
75
+ .filter(Boolean)
76
+ .filter(note => note.content);
77
+ }
78
+
79
+ export function readEntityMetadata(filePath) {
80
+ const metaPath = sidecarPath(filePath);
81
+ if (fs.existsSync(metaPath)) {
82
+ try {
83
+ return yaml.load(fs.readFileSync(metaPath, "utf8")) ?? {};
84
+ } catch {
85
+ return {};
86
+ }
87
+ }
88
+
89
+ try {
90
+ return matter(fs.readFileSync(filePath, "utf8")).data ?? {};
91
+ } catch {
92
+ return {};
93
+ }
94
+ }
95
+
96
+ export function resolveBatchTargetScenes(dbHandle, {
97
+ projectId,
98
+ sceneIds,
99
+ part,
100
+ chapter,
101
+ onlyStale,
102
+ }) {
103
+ const projectExists = Boolean(
104
+ dbHandle.prepare(`SELECT 1 FROM projects WHERE project_id = ? LIMIT 1`).get(projectId)
105
+ );
106
+
107
+ if (sceneIds?.length) {
108
+ const placeholders = sceneIds.map(() => "?").join(",");
109
+ const existingRows = dbHandle.prepare(
110
+ `SELECT scene_id FROM scenes WHERE project_id = ? AND scene_id IN (${placeholders})`
111
+ ).all(projectId, ...sceneIds);
112
+ const existing = new Set(existingRows.map(row => row.scene_id));
113
+ const missing = sceneIds.filter(sceneId => !existing.has(sceneId));
114
+ if (missing.length > 0) {
115
+ return { ok: false, code: "NOT_FOUND", message: `Requested scene IDs were not found in project '${projectId}'.`, details: { missing_scene_ids: missing, project_id: projectId } };
116
+ }
117
+ }
118
+
119
+ const conditions = ["project_id = ?"];
120
+ const params = [projectId];
121
+
122
+ if (sceneIds?.length) {
123
+ const placeholders = sceneIds.map(() => "?").join(",");
124
+ conditions.push(`scene_id IN (${placeholders})`);
125
+ params.push(...sceneIds);
126
+ }
127
+ if (part !== undefined) {
128
+ conditions.push("part = ?");
129
+ params.push(part);
130
+ }
131
+ if (chapter !== undefined) {
132
+ conditions.push("chapter = ?");
133
+ params.push(chapter);
134
+ }
135
+ if (onlyStale) {
136
+ conditions.push("metadata_stale = 1");
137
+ }
138
+
139
+ const query = `
140
+ SELECT scene_id, project_id, file_path
141
+ FROM scenes
142
+ WHERE ${conditions.join(" AND ")}
143
+ ORDER BY part, chapter, timeline_position
144
+ `;
145
+
146
+ return {
147
+ ok: true,
148
+ rows: dbHandle.prepare(query).all(...params),
149
+ project_exists: projectExists,
150
+ };
151
+ }
152
+
153
+ export function createHelpers({ syncDir, syncDirReal, syncDirAbs, db, syncDirWritable }) {
154
+ function isPathInsideSyncDir(candidatePath) {
155
+ const resolvedCandidate = path.resolve(candidatePath);
156
+ const canonicalCandidate = (() => {
157
+ try {
158
+ return fs.realpathSync(resolvedCandidate);
159
+ } catch {
160
+ return resolvedCandidate;
161
+ }
162
+ })();
163
+
164
+ const rel = path.relative(syncDirReal, canonicalCandidate);
165
+ return !(rel.startsWith("..") || path.isAbsolute(rel));
166
+ }
167
+
168
+ // Like isPathInsideSyncDir, but works for paths that do not yet exist by
169
+ // walking up to the nearest existing ancestor before canonicalising.
170
+ function isPathCandidateInsideSyncDir(candidatePath) {
171
+ const resolvedCandidate = path.resolve(candidatePath);
172
+
173
+ let existingAncestor = resolvedCandidate;
174
+ while (!fs.existsSync(existingAncestor)) {
175
+ const parent = path.dirname(existingAncestor);
176
+ if (parent === existingAncestor) break;
177
+ existingAncestor = parent;
178
+ }
179
+
180
+ const canonicalBase = (() => {
181
+ try {
182
+ return fs.realpathSync(existingAncestor);
183
+ } catch {
184
+ return existingAncestor;
185
+ }
186
+ })();
187
+
188
+ const canonical = path.resolve(canonicalBase, path.relative(existingAncestor, resolvedCandidate));
189
+ const rel = path.relative(syncDirReal, canonical);
190
+ return !(rel.startsWith("..") || path.isAbsolute(rel));
191
+ }
192
+
193
+ function resolveOutputDirWithinSync(outputDir) {
194
+ let resolvedOutputDir = path.resolve(outputDir);
195
+ let existingAncestor = resolvedOutputDir;
196
+
197
+ while (!fs.existsSync(existingAncestor)) {
198
+ const parentDir = path.dirname(existingAncestor);
199
+ if (parentDir === existingAncestor) {
200
+ throw new ReviewBundlePlanError(
201
+ "INVALID_OUTPUT_DIR",
202
+ "output_dir must be inside WRITING_SYNC_DIR.",
203
+ { output_dir: resolvedOutputDir, sync_dir: syncDirAbs }
204
+ );
205
+ }
206
+ existingAncestor = parentDir;
207
+ }
208
+
209
+ let realExistingAncestor;
210
+ try {
211
+ realExistingAncestor = fs.realpathSync.native(existingAncestor);
212
+ } catch (err) {
213
+ throw new ReviewBundlePlanError(
214
+ "INVALID_OUTPUT_DIR",
215
+ "output_dir ancestor could not be resolved: path may be inaccessible.",
216
+ { output_dir: outputDir, existing_ancestor: existingAncestor, cause: err.message }
217
+ );
218
+ }
219
+ const relativeFromAncestor = path.relative(existingAncestor, resolvedOutputDir);
220
+ resolvedOutputDir = path.resolve(realExistingAncestor, relativeFromAncestor);
221
+
222
+ const relativeToSyncDir = path.relative(syncDirReal, resolvedOutputDir);
223
+ if (relativeToSyncDir.startsWith("..") || path.isAbsolute(relativeToSyncDir)) {
224
+ throw new ReviewBundlePlanError(
225
+ "INVALID_OUTPUT_DIR",
226
+ "output_dir must be inside WRITING_SYNC_DIR.",
227
+ { output_dir: resolvedOutputDir, sync_dir: syncDirAbs }
228
+ );
229
+ }
230
+
231
+ return { resolvedOutputDir, relativeToSyncDir };
232
+ }
233
+
234
+ function resolveProjectRoot(projectId) {
235
+ if (projectId.includes("/")) {
236
+ const [universeId, projectSlug] = projectId.split("/");
237
+ return path.join(syncDir, "universes", universeId, projectSlug);
238
+ }
239
+ return path.join(syncDir, "projects", projectId);
240
+ }
241
+
242
+ function resolveWorldEntityDir({ kind, projectId, universeId, name }) {
243
+ const slug = slugifyEntityName(name);
244
+ const baseDir = projectId
245
+ ? path.join(resolveProjectRoot(projectId), "world")
246
+ : path.join(syncDir, "universes", universeId, "world");
247
+ const bucket = kind === "character" ? "characters" : "places";
248
+ return {
249
+ slug,
250
+ dir: path.join(baseDir, bucket, slug),
251
+ };
252
+ }
253
+
254
+ function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId, meta }) {
255
+ const prefix = kind === "character" ? "char" : "place";
256
+ const idKey = kind === "character" ? "character_id" : "place_id";
257
+ const slug = slugifyEntityName(name);
258
+ if (!slug) throw new Error("Name must contain at least one alphanumeric character.");
259
+
260
+ const { dir } = resolveWorldEntityDir({ kind, projectId, universeId, name });
261
+ const prosePath = path.join(dir, "sheet.md");
262
+ const metaPath = sidecarPath(prosePath);
263
+ const hadProse = fs.existsSync(prosePath);
264
+ const hadMeta = fs.existsSync(metaPath);
265
+
266
+ let shouldWriteMeta = !hadMeta;
267
+ let payload;
268
+ const derivedId = `${prefix}-${slug}`;
269
+ if (hadMeta) {
270
+ let parsedMeta;
271
+ try {
272
+ parsedMeta = yaml.load(fs.readFileSync(metaPath, "utf8"));
273
+ } catch (err) {
274
+ throw new Error(
275
+ `Existing metadata sidecar is invalid YAML at ${metaPath}: ${err.message}`,
276
+ { cause: err }
277
+ );
278
+ }
279
+
280
+ if (parsedMeta != null && (typeof parsedMeta !== "object" || Array.isArray(parsedMeta))) {
281
+ throw new Error(`Existing metadata sidecar must be a YAML mapping at ${metaPath}.`);
282
+ }
283
+
284
+ const existingMeta = parsedMeta ?? {};
285
+
286
+ const backfilledId = existingMeta[idKey] ?? derivedId;
287
+ const backfilledName = existingMeta.name ?? name;
288
+ shouldWriteMeta = existingMeta[idKey] == null || existingMeta.name == null;
289
+ payload = shouldWriteMeta
290
+ ? {
291
+ ...existingMeta,
292
+ [idKey]: backfilledId,
293
+ name: backfilledName,
294
+ }
295
+ : existingMeta;
296
+ } else {
297
+ payload = {
298
+ [idKey]: derivedId,
299
+ name,
300
+ ...(meta ?? {}),
301
+ };
302
+ }
303
+
304
+ fs.mkdirSync(dir, { recursive: true });
305
+
306
+ if (!hadProse) {
307
+ const defaultSheet = kind === "character"
308
+ ? renderCharacterSheetTemplate(name)
309
+ : renderPlaceSheetTemplate(name);
310
+ const body = notes?.trim() ?? defaultSheet;
311
+ fs.writeFileSync(prosePath, `${body}${body ? "\n" : ""}`, "utf8");
312
+ }
313
+
314
+ if (kind === "character") {
315
+ const arcPath = path.join(dir, "arc.md");
316
+ if (!fs.existsSync(arcPath)) {
317
+ fs.writeFileSync(arcPath, `${renderCharacterArcTemplate(name)}\n`, "utf8");
318
+ }
319
+ }
320
+
321
+ if (shouldWriteMeta) {
322
+ fs.writeFileSync(metaPath, yaml.dump(payload, { lineWidth: 120 }), "utf8");
323
+ }
324
+
325
+ syncAll(db, syncDir, { writable: syncDirWritable });
326
+
327
+ return {
328
+ created: !hadProse && !hadMeta,
329
+ id: payload[idKey],
330
+ prose_path: prosePath,
331
+ meta_path: metaPath,
332
+ project_id: projectId ?? null,
333
+ universe_id: universeId ?? null,
334
+ };
335
+ }
336
+
337
+ return {
338
+ isPathInsideSyncDir,
339
+ isPathCandidateInsideSyncDir,
340
+ resolveOutputDirWithinSync,
341
+ resolveProjectRoot,
342
+ resolveWorldEntityDir,
343
+ createCanonicalWorldEntity,
344
+ };
345
+ }
package/index.js CHANGED
@@ -6,15 +6,19 @@ import fs from "node:fs";
6
6
  import path from "node:path";
7
7
  import { randomUUID } from "node:crypto";
8
8
  import { fileURLToPath } from "node:url";
9
- import matter from "gray-matter";
10
- import yaml from "js-yaml";
11
9
  import { openDb, checkpointJobFinish, loadStalledJobs, pruneJobCheckpoints } from "./db.js";
12
- import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, sidecarPath, isStructuralProjectId } from "./sync.js";
10
+ import { syncAll, isSyncDirWritable, getSyncOwnershipDiagnostics, isStructuralProjectId } from "./sync.js";
13
11
  import { isGitAvailable, isGitRepository, initGitRepository, getSceneProseAtCommit } from "./git.js";
14
- import { renderCharacterArcTemplate, renderCharacterSheetTemplate, renderPlaceSheetTemplate, slugifyEntityName } from "./world-entity-templates.js";
15
12
  import { createAsyncJobManager, readJsonIfExists } from "./async-jobs.js";
13
+ import {
14
+ createHelpers,
15
+ deriveLoglineFromProse,
16
+ inferCharacterIdsFromProse,
17
+ readSupportingNotesForEntity,
18
+ readEntityMetadata,
19
+ resolveBatchTargetScenes,
20
+ } from "./helpers.js";
16
21
  import { STYLEGUIDE_CONFIG_BASENAME } from "./prose-styleguide.js";
17
- import { ReviewBundlePlanError } from "./review-bundles.js";
18
22
  import { registerSyncTools } from "./tools/sync.js";
19
23
  import { registerSearchTools } from "./tools/search.js";
20
24
  import { registerMetadataTools } from "./tools/metadata.js";
@@ -34,86 +38,6 @@ const SYNC_DIR_REAL = (() => {
34
38
  })();
35
39
  const DB_PATH_DISPLAY = DB_PATH === ":memory:" ? DB_PATH : path.resolve(DB_PATH);
36
40
 
37
- function isPathInsideSyncDir(candidatePath) {
38
- const resolvedCandidate = path.resolve(candidatePath);
39
- const canonicalCandidate = (() => {
40
- try {
41
- return fs.realpathSync(resolvedCandidate);
42
- } catch {
43
- return resolvedCandidate;
44
- }
45
- })();
46
-
47
- const rel = path.relative(SYNC_DIR_REAL, canonicalCandidate);
48
- return !(rel.startsWith("..") || path.isAbsolute(rel));
49
- }
50
-
51
- // Like isPathInsideSyncDir, but works for paths that do not yet exist by
52
- // walking up to the nearest existing ancestor before canonicalising.
53
- function isPathCandidateInsideSyncDir(candidatePath) {
54
- const resolvedCandidate = path.resolve(candidatePath);
55
-
56
- let existingAncestor = resolvedCandidate;
57
- while (!fs.existsSync(existingAncestor)) {
58
- const parent = path.dirname(existingAncestor);
59
- if (parent === existingAncestor) break;
60
- existingAncestor = parent;
61
- }
62
-
63
- const canonicalBase = (() => {
64
- try {
65
- return fs.realpathSync(existingAncestor);
66
- } catch {
67
- return existingAncestor;
68
- }
69
- })();
70
-
71
- const canonical = path.resolve(canonicalBase, path.relative(existingAncestor, resolvedCandidate));
72
- const rel = path.relative(SYNC_DIR_REAL, canonical);
73
- return !(rel.startsWith("..") || path.isAbsolute(rel));
74
- }
75
-
76
- function resolveOutputDirWithinSync(outputDir) {
77
- let resolvedOutputDir = path.resolve(outputDir);
78
- let existingAncestor = resolvedOutputDir;
79
-
80
- while (!fs.existsSync(existingAncestor)) {
81
- const parentDir = path.dirname(existingAncestor);
82
- if (parentDir === existingAncestor) {
83
- throw new ReviewBundlePlanError(
84
- "INVALID_OUTPUT_DIR",
85
- "output_dir must be inside WRITING_SYNC_DIR.",
86
- { output_dir: resolvedOutputDir, sync_dir: SYNC_DIR_ABS }
87
- );
88
- }
89
- existingAncestor = parentDir;
90
- }
91
-
92
- let realExistingAncestor;
93
- try {
94
- realExistingAncestor = fs.realpathSync.native(existingAncestor);
95
- } catch (err) {
96
- throw new ReviewBundlePlanError(
97
- "INVALID_OUTPUT_DIR",
98
- "output_dir ancestor could not be resolved: path may be inaccessible.",
99
- { output_dir: outputDir, existing_ancestor: existingAncestor, cause: err.message }
100
- );
101
- }
102
- const relativeFromAncestor = path.relative(existingAncestor, resolvedOutputDir);
103
- resolvedOutputDir = path.resolve(realExistingAncestor, relativeFromAncestor);
104
-
105
- const relativeToSyncDir = path.relative(SYNC_DIR_REAL, resolvedOutputDir);
106
- if (relativeToSyncDir.startsWith("..") || path.isAbsolute(relativeToSyncDir)) {
107
- throw new ReviewBundlePlanError(
108
- "INVALID_OUTPUT_DIR",
109
- "output_dir must be inside WRITING_SYNC_DIR.",
110
- { output_dir: resolvedOutputDir, sync_dir: SYNC_DIR_ABS }
111
- );
112
- }
113
-
114
- return { resolvedOutputDir, relativeToSyncDir };
115
- }
116
-
117
41
  function parsePositiveIntEnv(rawValue, defaultValue) {
118
42
  const parsed = parseInt(rawValue ?? String(defaultValue), 10);
119
43
  return Number.isFinite(parsed) && parsed > 0 ? parsed : defaultValue;
@@ -203,248 +127,6 @@ function errorResponse(code, message, details) {
203
127
  return jsonResponse(payload);
204
128
  }
205
129
 
206
- function deriveLoglineFromProse(prose) {
207
- const compact = prose.replace(/\s+/g, " ").trim();
208
- if (!compact) return null;
209
- const sentence = compact.match(/^(.+?[.!?])(?:\s|$)/);
210
- const candidate = (sentence?.[1] ?? compact).trim();
211
- if (candidate.length <= 220) return candidate;
212
- return `${candidate.slice(0, 217).trimEnd()}...`;
213
- }
214
-
215
- function inferCharacterIdsFromProse(dbHandle, prose, projectId) {
216
- const lower = prose.toLowerCase();
217
- const rows = dbHandle.prepare(`
218
- SELECT character_id, name
219
- FROM characters
220
- WHERE project_id = ? OR universe_id = (SELECT universe_id FROM projects WHERE project_id = ?)
221
- ORDER BY length(name) DESC
222
- `).all(projectId, projectId);
223
-
224
- const found = [];
225
- for (const row of rows) {
226
- if (!row.name) continue;
227
- const words = row.name.toLowerCase().split(/\s+/).filter(Boolean);
228
- if (words.length && words.every(w => lower.includes(w))) {
229
- found.push(row.character_id);
230
- }
231
- }
232
- return [...new Set(found)].slice(0, 12);
233
- }
234
-
235
- function readSupportingNotesForEntity(filePath) {
236
- const ext = path.extname(filePath).toLowerCase();
237
- const base = path.basename(filePath, ext).toLowerCase();
238
- if (base !== "sheet") return [];
239
-
240
- const dir = path.dirname(filePath);
241
- let entries;
242
- try {
243
- entries = fs.readdirSync(dir, { withFileTypes: true });
244
- } catch {
245
- return [];
246
- }
247
-
248
- return entries
249
- .filter(entry => entry.isFile())
250
- .map(entry => entry.name)
251
- .filter(name => /\.(md|txt)$/i.test(name))
252
- .filter(name => !/^sheet\.(md|txt)$/i.test(name))
253
- .sort((a, b) => a.localeCompare(b))
254
- .map(name => {
255
- const notePath = path.join(dir, name);
256
- try {
257
- const raw = fs.readFileSync(notePath, "utf8");
258
- const { content } = matter(raw);
259
- return {
260
- file_name: name,
261
- content: content.trim(),
262
- };
263
- } catch {
264
- return null;
265
- }
266
- })
267
- .filter(Boolean)
268
- .filter(note => note.content);
269
- }
270
-
271
- function readEntityMetadata(filePath) {
272
- const metaPath = sidecarPath(filePath);
273
- if (fs.existsSync(metaPath)) {
274
- try {
275
- return yaml.load(fs.readFileSync(metaPath, "utf8")) ?? {};
276
- } catch {
277
- return {};
278
- }
279
- }
280
-
281
- try {
282
- return matter(fs.readFileSync(filePath, "utf8")).data ?? {};
283
- } catch {
284
- return {};
285
- }
286
- }
287
-
288
- function resolveProjectRoot(projectId) {
289
- if (projectId.includes("/")) {
290
- const [universeId, projectSlug] = projectId.split("/");
291
- return path.join(SYNC_DIR, "universes", universeId, projectSlug);
292
- }
293
- return path.join(SYNC_DIR, "projects", projectId);
294
- }
295
-
296
- function resolveWorldEntityDir({ kind, projectId, universeId, name }) {
297
- const slug = slugifyEntityName(name);
298
- const baseDir = projectId
299
- ? path.join(resolveProjectRoot(projectId), "world")
300
- : path.join(SYNC_DIR, "universes", universeId, "world");
301
- const bucket = kind === "character" ? "characters" : "places";
302
- return {
303
- slug,
304
- dir: path.join(baseDir, bucket, slug),
305
- };
306
- }
307
-
308
- function resolveBatchTargetScenes(dbHandle, {
309
- projectId,
310
- sceneIds,
311
- part,
312
- chapter,
313
- onlyStale,
314
- }) {
315
- const projectExists = Boolean(
316
- dbHandle.prepare(`SELECT 1 FROM projects WHERE project_id = ? LIMIT 1`).get(projectId)
317
- );
318
-
319
- if (sceneIds?.length) {
320
- const placeholders = sceneIds.map(() => "?").join(",");
321
- const existingRows = dbHandle.prepare(
322
- `SELECT scene_id FROM scenes WHERE project_id = ? AND scene_id IN (${placeholders})`
323
- ).all(projectId, ...sceneIds);
324
- const existing = new Set(existingRows.map(row => row.scene_id));
325
- const missing = sceneIds.filter(sceneId => !existing.has(sceneId));
326
- if (missing.length > 0) {
327
- return { ok: false, code: "NOT_FOUND", message: `Requested scene IDs were not found in project '${projectId}'.`, details: { missing_scene_ids: missing, project_id: projectId } };
328
- }
329
- }
330
-
331
- const conditions = ["project_id = ?"];
332
- const params = [projectId];
333
-
334
- if (sceneIds?.length) {
335
- const placeholders = sceneIds.map(() => "?").join(",");
336
- conditions.push(`scene_id IN (${placeholders})`);
337
- params.push(...sceneIds);
338
- }
339
- if (part !== undefined) {
340
- conditions.push("part = ?");
341
- params.push(part);
342
- }
343
- if (chapter !== undefined) {
344
- conditions.push("chapter = ?");
345
- params.push(chapter);
346
- }
347
- if (onlyStale) {
348
- conditions.push("metadata_stale = 1");
349
- }
350
-
351
- const query = `
352
- SELECT scene_id, project_id, file_path
353
- FROM scenes
354
- WHERE ${conditions.join(" AND ")}
355
- ORDER BY part, chapter, timeline_position
356
- `;
357
-
358
- return {
359
- ok: true,
360
- rows: dbHandle.prepare(query).all(...params),
361
- project_exists: projectExists,
362
- };
363
- }
364
-
365
- function createCanonicalWorldEntity({ kind, name, notes, projectId, universeId, meta }) {
366
- const prefix = kind === "character" ? "char" : "place";
367
- const idKey = kind === "character" ? "character_id" : "place_id";
368
- const slug = slugifyEntityName(name);
369
- if (!slug) throw new Error("Name must contain at least one alphanumeric character.");
370
-
371
- const { dir } = resolveWorldEntityDir({ kind, projectId, universeId, name });
372
- const prosePath = path.join(dir, "sheet.md");
373
- const metaPath = sidecarPath(prosePath);
374
- const hadProse = fs.existsSync(prosePath);
375
- const hadMeta = fs.existsSync(metaPath);
376
-
377
- let shouldWriteMeta = !hadMeta;
378
- let payload;
379
- const derivedId = `${prefix}-${slug}`;
380
- if (hadMeta) {
381
- let parsedMeta;
382
- try {
383
- parsedMeta = yaml.load(fs.readFileSync(metaPath, "utf8"));
384
- } catch (err) {
385
- throw new Error(
386
- `Existing metadata sidecar is invalid YAML at ${metaPath}: ${err.message}`,
387
- { cause: err }
388
- );
389
- }
390
-
391
- if (parsedMeta != null && (typeof parsedMeta !== "object" || Array.isArray(parsedMeta))) {
392
- throw new Error(`Existing metadata sidecar must be a YAML mapping at ${metaPath}.`);
393
- }
394
-
395
- const existingMeta = parsedMeta ?? {};
396
-
397
- const backfilledId = existingMeta[idKey] ?? derivedId;
398
- const backfilledName = existingMeta.name ?? name;
399
- shouldWriteMeta = existingMeta[idKey] == null || existingMeta.name == null;
400
- payload = shouldWriteMeta
401
- ? {
402
- ...existingMeta,
403
- [idKey]: backfilledId,
404
- name: backfilledName,
405
- }
406
- : existingMeta;
407
- } else {
408
- payload = {
409
- [idKey]: derivedId,
410
- name,
411
- ...(meta ?? {}),
412
- };
413
- }
414
-
415
- fs.mkdirSync(dir, { recursive: true });
416
-
417
- if (!hadProse) {
418
- const defaultSheet = kind === "character"
419
- ? renderCharacterSheetTemplate(name)
420
- : renderPlaceSheetTemplate(name);
421
- const body = notes?.trim() ?? defaultSheet;
422
- fs.writeFileSync(prosePath, `${body}${body ? "\n" : ""}`, "utf8");
423
- }
424
-
425
- if (kind === "character") {
426
- const arcPath = path.join(dir, "arc.md");
427
- if (!fs.existsSync(arcPath)) {
428
- fs.writeFileSync(arcPath, `${renderCharacterArcTemplate(name)}\n`, "utf8");
429
- }
430
- }
431
-
432
- if (shouldWriteMeta) {
433
- fs.writeFileSync(metaPath, yaml.dump(payload, { lineWidth: 120 }), "utf8");
434
- }
435
-
436
- syncAll(db, SYNC_DIR, { writable: SYNC_DIR_WRITABLE });
437
-
438
- return {
439
- created: !hadProse && !hadMeta,
440
- id: payload[idKey],
441
- prose_path: prosePath,
442
- meta_path: metaPath,
443
- project_id: projectId ?? null,
444
- universe_id: universeId ?? null,
445
- };
446
- }
447
-
448
130
  // ---------------------------------------------------------------------------
449
131
  // Database setup
450
132
  // ---------------------------------------------------------------------------
@@ -610,6 +292,20 @@ if (SHOULD_ENFORCE_OWNERSHIP_FAIL_GUARD && SYNC_OWNERSHIP_DIAGNOSTICS.non_runtim
610
292
  process.exit(1);
611
293
  }
612
294
 
295
+ const {
296
+ isPathInsideSyncDir,
297
+ isPathCandidateInsideSyncDir,
298
+ resolveOutputDirWithinSync,
299
+ resolveProjectRoot,
300
+ createCanonicalWorldEntity,
301
+ } = createHelpers({
302
+ syncDir: SYNC_DIR,
303
+ syncDirReal: SYNC_DIR_REAL,
304
+ syncDirAbs: SYNC_DIR_ABS,
305
+ db,
306
+ syncDirWritable: SYNC_DIR_WRITABLE,
307
+ });
308
+
613
309
  // Run sync on startup
614
310
  syncAll(db, SYNC_DIR, { writable: SYNC_DIR_WRITABLE });
615
311
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.10.2",
3
+ "version": "2.10.3",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "type": "module",
6
6
  "main": "index.js",
@@ -8,6 +8,7 @@
8
8
  "index.js",
9
9
  "async-jobs.js",
10
10
  "async-progress.js",
11
+ "helpers.js",
11
12
  "scene-character-batch.js",
12
13
  "scrivener-direct.js",
13
14
  "importer.js",