@hanna84/mcp-writing 2.12.9 → 2.12.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -4,11 +4,21 @@ All notable changes to this project will be documented in this file. Dates are d
4
4
 
5
5
  Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
6
6
 
7
+ #### [v2.12.10](https://github.com/hannasdev/mcp-writing.git
8
+ /compare/v2.12.9...v2.12.10)
9
+
10
+ - refactor(scripts): move async job runner under src/scripts [`#125`](https://github.com/hannasdev/mcp-writing.git
11
+ /pull/125)
12
+
7
13
  #### [v2.12.9](https://github.com/hannasdev/mcp-writing.git
8
14
  /compare/v2.12.8...v2.12.9)
9
15
 
16
+ > 29 April 2026
17
+
10
18
  - refactor(tools): move tool modules under src/tools [`#124`](https://github.com/hannasdev/mcp-writing.git
11
19
  /pull/124)
20
+ - Release 2.12.9 [`316b703`](https://github.com/hannasdev/mcp-writing.git
21
+ /commit/316b7039a6f2d6bf13e6770d5a9e134ff1144c88)
12
22
 
13
23
  #### [v2.12.8](https://github.com/hannasdev/mcp-writing.git
14
24
  /compare/v2.12.7...v2.12.8)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hanna84/mcp-writing",
3
- "version": "2.12.9",
3
+ "version": "2.12.10",
4
4
  "description": "MCP service for AI-assisted reasoning and editing on long-form fiction projects",
5
5
  "homepage": "https://hannasdev.github.io/mcp-writing/",
6
6
  "type": "module",
@@ -1,201 +1 @@
1
- import fs from "node:fs";
2
- import path from "node:path";
3
- import { importScrivenerSync } from "../src/sync/importer.js";
4
- import { mergeScrivenerProjectMetadata } from "../src/sync/scrivener-direct.js";
5
- import { runSceneCharacterBatch } from "../src/sync/scene-character-batch.js";
6
- import { ASYNC_PROGRESS_PREFIX } from "../async-progress.js";
7
-
8
- const PROGRESS_PREFIX = ASYNC_PROGRESS_PREFIX;
9
-
10
- function writeResult(resultPath, payload) {
11
- fs.mkdirSync(path.dirname(resultPath), { recursive: true });
12
- fs.writeFileSync(resultPath, JSON.stringify(payload, null, 2), "utf8");
13
- }
14
-
15
- function writeProgress(payload) {
16
- try {
17
- process.stdout.write(`${PROGRESS_PREFIX}${JSON.stringify(payload)}\n`);
18
- } catch {
19
- // Best-effort only; never fail the job due to progress telemetry.
20
- }
21
- }
22
-
23
- function normalizeImportResult(importResult) {
24
- const importPayload = {
25
- source_dir: importResult.scrivenerDir,
26
- sync_dir: importResult.mcpSyncDir,
27
- scenes_dir: importResult.scenesDir,
28
- project_id: importResult.projectId,
29
- source_files: importResult.sourceFiles,
30
- created: importResult.created,
31
- existing: importResult.existing,
32
- skipped: importResult.skipped,
33
- beat_markers_seen: importResult.beatMarkersSeen,
34
- dry_run: importResult.dryRun,
35
- preflight: importResult.preflight,
36
- ignored_files: importResult.ignoredFiles,
37
- };
38
-
39
- if (importResult.preflight) {
40
- importPayload.files_to_process = importResult.filesToProcess;
41
- importPayload.file_previews = importResult.filePreviews;
42
- importPayload.existing_sidecars = importResult.existingSidecars;
43
- }
44
-
45
- return {
46
- ok: true,
47
- import: importPayload,
48
- sync: null,
49
- };
50
- }
51
-
52
- function normalizeMergeResult(mergeResult) {
53
- return {
54
- ok: true,
55
- merge: {
56
- source_project_dir: mergeResult.scrivPath,
57
- sync_dir: mergeResult.mcpSyncDir,
58
- scenes_dir: mergeResult.scenesDir,
59
- project_id: mergeResult.projectId,
60
- dry_run: mergeResult.dryRun,
61
- sidecar_files: mergeResult.sidecarFiles,
62
- updated: mergeResult.updated,
63
- relocated: mergeResult.relocated,
64
- unchanged: mergeResult.unchanged,
65
- no_data: mergeResult.noData,
66
- field_add_counts: mergeResult.fieldAddCounts,
67
- preview_changes: mergeResult.previewChanges,
68
- warnings: mergeResult.warnings,
69
- warnings_truncated: mergeResult.warningsTruncated,
70
- warning_summary: mergeResult.warningSummary,
71
- stats: {
72
- sync_map_entries: mergeResult.stats.syncMapEntries,
73
- keyword_map_entries: mergeResult.stats.keywordMapEntries,
74
- binder_items: mergeResult.stats.binderItems,
75
- part_chapter_assignments: mergeResult.stats.partChapterAssignments,
76
- },
77
- },
78
- sync: null,
79
- warnings: [],
80
- };
81
- }
82
-
83
- function normalizeSceneCharacterBatchResult(batchResult) {
84
- return {
85
- ok: true,
86
- ...batchResult,
87
- };
88
- }
89
-
90
- async function main() {
91
- const requestPath = process.argv[2];
92
- const resultPath = process.argv[3];
93
-
94
- if (!requestPath || !resultPath) {
95
- throw new Error("Usage: node scripts/async-job-runner.mjs <request.json> <result.json>");
96
- }
97
-
98
- const request = JSON.parse(fs.readFileSync(requestPath, "utf8"));
99
- const syncDir = request.context?.sync_dir;
100
- if (!syncDir) {
101
- throw new Error("Missing sync_dir in async job request context.");
102
- }
103
-
104
- if (request.kind === "import_scrivener_sync") {
105
- const result = importScrivenerSync({
106
- scrivenerDir: request.args?.source_dir,
107
- mcpSyncDir: syncDir,
108
- projectId: request.args?.project_id,
109
- dryRun: Boolean(request.args?.dry_run) || Boolean(request.args?.preflight),
110
- preflight: Boolean(request.args?.preflight),
111
- ignorePatterns: request.args?.ignore_patterns ?? [],
112
- });
113
- writeResult(resultPath, normalizeImportResult(result));
114
- return;
115
- }
116
-
117
- if (request.kind === "merge_scrivener_project_beta") {
118
- const result = mergeScrivenerProjectMetadata({
119
- scrivPath: request.args?.source_project_dir,
120
- mcpSyncDir: syncDir,
121
- projectId: request.args?.project_id,
122
- scenesDir: request.args?.scenes_dir,
123
- dryRun: Boolean(request.args?.dry_run),
124
- organizeByChapters: Boolean(request.args?.organize_by_chapters),
125
- });
126
- writeResult(resultPath, normalizeMergeResult(result));
127
- return;
128
- }
129
-
130
- if (request.kind === "enrich_scene_characters_batch") {
131
- let cancellationRequested = false;
132
- const handleSigterm = () => {
133
- cancellationRequested = true;
134
- };
135
- process.on("SIGTERM", handleSigterm);
136
-
137
- const result = await runSceneCharacterBatch({
138
- syncDir,
139
- args: {
140
- project_id: request.args?.project_id,
141
- dry_run: Boolean(request.args?.dry_run),
142
- replace_mode: request.args?.replace_mode ?? "merge",
143
- include_match_details: Boolean(request.args?.include_match_details),
144
- project_exists: request.args?.project_exists !== false,
145
- target_scenes: request.args?.target_scenes ?? [],
146
- character_rows: request.args?.character_rows ?? [],
147
- },
148
- onProgress: progress => writeProgress({ kind: request.kind, ...progress }),
149
- shouldCancel: () => cancellationRequested,
150
- });
151
- process.off("SIGTERM", handleSigterm);
152
- writeResult(resultPath, normalizeSceneCharacterBatchResult(result));
153
- return;
154
- }
155
-
156
- throw new Error(`Unsupported async job kind '${request.kind}'.`);
157
- }
158
-
159
- try {
160
- await main();
161
- } catch (error) {
162
- const resultPath = process.argv[3];
163
- const requestPath = process.argv[2];
164
- if (resultPath) {
165
- const baseErrorCode = error && typeof error === "object" && typeof error.code === "string"
166
- ? error.code
167
- : "ASYNC_JOB_FAILED";
168
- let requestKind = null;
169
- if (requestPath && fs.existsSync(requestPath)) {
170
- try {
171
- const request = JSON.parse(fs.readFileSync(requestPath, "utf8"));
172
- requestKind = request?.kind ?? null;
173
- } catch {
174
- requestKind = null;
175
- }
176
- }
177
-
178
- const errorCode = requestKind === "merge_scrivener_project_beta" && baseErrorCode === "ASYNC_JOB_FAILED"
179
- ? "SCRIVENER_DIRECT_BETA_FAILED"
180
- : baseErrorCode;
181
-
182
- const errorDetails = {
183
- ...(error && typeof error === "object" && error.pattern ? { pattern: error.pattern } : {}),
184
- ...(error && typeof error === "object" && error.details && typeof error.details === "object" ? error.details : {}),
185
- ...(requestKind === "merge_scrivener_project_beta"
186
- ? {
187
- fallback: "Use import_scrivener_sync with an External Folder Sync export as the stable default path.",
188
- }
189
- : {}),
190
- };
191
- writeResult(resultPath, {
192
- ok: false,
193
- error: {
194
- code: errorCode,
195
- message: error instanceof Error ? error.message : String(error),
196
- ...(Object.keys(errorDetails).length ? { details: errorDetails } : {}),
197
- },
198
- });
199
- }
200
- process.exit(1);
201
- }
1
+ import "../src/scripts/async-job-runner.mjs";
@@ -67,7 +67,7 @@ export function createAsyncJobManager({ db, asyncJobs, ttlMs, runnerDir }) {
67
67
 
68
68
  fs.writeFileSync(requestPath, JSON.stringify(requestPayload, null, 2), "utf8");
69
69
 
70
- const runnerPath = path.join(runnerDir, "scripts", "async-job-runner.mjs");
70
+ const runnerPath = path.join(runnerDir, "src", "scripts", "async-job-runner.mjs");
71
71
  const child = spawn(
72
72
  process.execPath,
73
73
  ["--experimental-sqlite", runnerPath, requestPath, resultPath],
@@ -0,0 +1,201 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import { importScrivenerSync } from "../sync/importer.js";
4
+ import { mergeScrivenerProjectMetadata } from "../sync/scrivener-direct.js";
5
+ import { runSceneCharacterBatch } from "../sync/scene-character-batch.js";
6
+ import { ASYNC_PROGRESS_PREFIX } from "../runtime/async-progress.js";
7
+
8
+ const PROGRESS_PREFIX = ASYNC_PROGRESS_PREFIX;
9
+
10
+ function writeResult(resultPath, payload) {
11
+ fs.mkdirSync(path.dirname(resultPath), { recursive: true });
12
+ fs.writeFileSync(resultPath, JSON.stringify(payload, null, 2), "utf8");
13
+ }
14
+
15
+ function writeProgress(payload) {
16
+ try {
17
+ process.stdout.write(`${PROGRESS_PREFIX}${JSON.stringify(payload)}\n`);
18
+ } catch {
19
+ // Best-effort only; never fail the job due to progress telemetry.
20
+ }
21
+ }
22
+
23
+ function normalizeImportResult(importResult) {
24
+ const importPayload = {
25
+ source_dir: importResult.scrivenerDir,
26
+ sync_dir: importResult.mcpSyncDir,
27
+ scenes_dir: importResult.scenesDir,
28
+ project_id: importResult.projectId,
29
+ source_files: importResult.sourceFiles,
30
+ created: importResult.created,
31
+ existing: importResult.existing,
32
+ skipped: importResult.skipped,
33
+ beat_markers_seen: importResult.beatMarkersSeen,
34
+ dry_run: importResult.dryRun,
35
+ preflight: importResult.preflight,
36
+ ignored_files: importResult.ignoredFiles,
37
+ };
38
+
39
+ if (importResult.preflight) {
40
+ importPayload.files_to_process = importResult.filesToProcess;
41
+ importPayload.file_previews = importResult.filePreviews;
42
+ importPayload.existing_sidecars = importResult.existingSidecars;
43
+ }
44
+
45
+ return {
46
+ ok: true,
47
+ import: importPayload,
48
+ sync: null,
49
+ };
50
+ }
51
+
52
+ function normalizeMergeResult(mergeResult) {
53
+ return {
54
+ ok: true,
55
+ merge: {
56
+ source_project_dir: mergeResult.scrivPath,
57
+ sync_dir: mergeResult.mcpSyncDir,
58
+ scenes_dir: mergeResult.scenesDir,
59
+ project_id: mergeResult.projectId,
60
+ dry_run: mergeResult.dryRun,
61
+ sidecar_files: mergeResult.sidecarFiles,
62
+ updated: mergeResult.updated,
63
+ relocated: mergeResult.relocated,
64
+ unchanged: mergeResult.unchanged,
65
+ no_data: mergeResult.noData,
66
+ field_add_counts: mergeResult.fieldAddCounts,
67
+ preview_changes: mergeResult.previewChanges,
68
+ warnings: mergeResult.warnings,
69
+ warnings_truncated: mergeResult.warningsTruncated,
70
+ warning_summary: mergeResult.warningSummary,
71
+ stats: {
72
+ sync_map_entries: mergeResult.stats.syncMapEntries,
73
+ keyword_map_entries: mergeResult.stats.keywordMapEntries,
74
+ binder_items: mergeResult.stats.binderItems,
75
+ part_chapter_assignments: mergeResult.stats.partChapterAssignments,
76
+ },
77
+ },
78
+ sync: null,
79
+ warnings: [],
80
+ };
81
+ }
82
+
83
+ function normalizeSceneCharacterBatchResult(batchResult) {
84
+ return {
85
+ ok: true,
86
+ ...batchResult,
87
+ };
88
+ }
89
+
90
+ async function main() {
91
+ const requestPath = process.argv[2];
92
+ const resultPath = process.argv[3];
93
+
94
+ if (!requestPath || !resultPath) {
95
+ throw new Error("Usage: node scripts/async-job-runner.mjs <request.json> <result.json>");
96
+ }
97
+
98
+ const request = JSON.parse(fs.readFileSync(requestPath, "utf8"));
99
+ const syncDir = request.context?.sync_dir;
100
+ if (!syncDir) {
101
+ throw new Error("Missing sync_dir in async job request context.");
102
+ }
103
+
104
+ if (request.kind === "import_scrivener_sync") {
105
+ const result = importScrivenerSync({
106
+ scrivenerDir: request.args?.source_dir,
107
+ mcpSyncDir: syncDir,
108
+ projectId: request.args?.project_id,
109
+ dryRun: Boolean(request.args?.dry_run) || Boolean(request.args?.preflight),
110
+ preflight: Boolean(request.args?.preflight),
111
+ ignorePatterns: request.args?.ignore_patterns ?? [],
112
+ });
113
+ writeResult(resultPath, normalizeImportResult(result));
114
+ return;
115
+ }
116
+
117
+ if (request.kind === "merge_scrivener_project_beta") {
118
+ const result = mergeScrivenerProjectMetadata({
119
+ scrivPath: request.args?.source_project_dir,
120
+ mcpSyncDir: syncDir,
121
+ projectId: request.args?.project_id,
122
+ scenesDir: request.args?.scenes_dir,
123
+ dryRun: Boolean(request.args?.dry_run),
124
+ organizeByChapters: Boolean(request.args?.organize_by_chapters),
125
+ });
126
+ writeResult(resultPath, normalizeMergeResult(result));
127
+ return;
128
+ }
129
+
130
+ if (request.kind === "enrich_scene_characters_batch") {
131
+ let cancellationRequested = false;
132
+ const handleSigterm = () => {
133
+ cancellationRequested = true;
134
+ };
135
+ process.on("SIGTERM", handleSigterm);
136
+
137
+ const result = await runSceneCharacterBatch({
138
+ syncDir,
139
+ args: {
140
+ project_id: request.args?.project_id,
141
+ dry_run: Boolean(request.args?.dry_run),
142
+ replace_mode: request.args?.replace_mode ?? "merge",
143
+ include_match_details: Boolean(request.args?.include_match_details),
144
+ project_exists: request.args?.project_exists !== false,
145
+ target_scenes: request.args?.target_scenes ?? [],
146
+ character_rows: request.args?.character_rows ?? [],
147
+ },
148
+ onProgress: progress => writeProgress({ kind: request.kind, ...progress }),
149
+ shouldCancel: () => cancellationRequested,
150
+ });
151
+ process.off("SIGTERM", handleSigterm);
152
+ writeResult(resultPath, normalizeSceneCharacterBatchResult(result));
153
+ return;
154
+ }
155
+
156
+ throw new Error(`Unsupported async job kind '${request.kind}'.`);
157
+ }
158
+
159
+ try {
160
+ await main();
161
+ } catch (error) {
162
+ const resultPath = process.argv[3];
163
+ const requestPath = process.argv[2];
164
+ if (resultPath) {
165
+ const baseErrorCode = error && typeof error === "object" && typeof error.code === "string"
166
+ ? error.code
167
+ : "ASYNC_JOB_FAILED";
168
+ let requestKind = null;
169
+ if (requestPath && fs.existsSync(requestPath)) {
170
+ try {
171
+ const request = JSON.parse(fs.readFileSync(requestPath, "utf8"));
172
+ requestKind = request?.kind ?? null;
173
+ } catch {
174
+ requestKind = null;
175
+ }
176
+ }
177
+
178
+ const errorCode = requestKind === "merge_scrivener_project_beta" && baseErrorCode === "ASYNC_JOB_FAILED"
179
+ ? "SCRIVENER_DIRECT_BETA_FAILED"
180
+ : baseErrorCode;
181
+
182
+ const errorDetails = {
183
+ ...(error && typeof error === "object" && error.pattern ? { pattern: error.pattern } : {}),
184
+ ...(error && typeof error === "object" && error.details && typeof error.details === "object" ? error.details : {}),
185
+ ...(requestKind === "merge_scrivener_project_beta"
186
+ ? {
187
+ fallback: "Use import_scrivener_sync with an External Folder Sync export as the stable default path.",
188
+ }
189
+ : {}),
190
+ };
191
+ writeResult(resultPath, {
192
+ ok: false,
193
+ error: {
194
+ code: errorCode,
195
+ message: error instanceof Error ? error.message : String(error),
196
+ ...(Object.keys(errorDetails).length ? { details: errorDetails } : {}),
197
+ },
198
+ });
199
+ }
200
+ process.exit(1);
201
+ }