novel-writer-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +103 -0
  3. package/agents/chapter-writer.md +142 -0
  4. package/agents/character-weaver.md +117 -0
  5. package/agents/consistency-auditor.md +85 -0
  6. package/agents/plot-architect.md +128 -0
  7. package/agents/quality-judge.md +232 -0
  8. package/agents/style-analyzer.md +109 -0
  9. package/agents/style-refiner.md +97 -0
  10. package/agents/summarizer.md +128 -0
  11. package/agents/world-builder.md +161 -0
  12. package/dist/__tests__/character-voice.test.js +445 -0
  13. package/dist/__tests__/commit-prototype-pollution.test.js +45 -0
  14. package/dist/__tests__/engagement.test.js +382 -0
  15. package/dist/__tests__/foreshadow-visibility.test.js +131 -0
  16. package/dist/__tests__/hook-ledger.test.js +1028 -0
  17. package/dist/__tests__/naming-lint.test.js +132 -0
  18. package/dist/__tests__/narrative-health-injection.test.js +359 -0
  19. package/dist/__tests__/next-step-prejudge-guardrails.test.js +325 -0
  20. package/dist/__tests__/next-step-title-fix.test.js +153 -0
  21. package/dist/__tests__/platform-profile.test.js +274 -0
  22. package/dist/__tests__/promise-ledger.test.js +189 -0
  23. package/dist/__tests__/readability-lint.test.js +209 -0
  24. package/dist/__tests__/text-utils.test.js +39 -0
  25. package/dist/__tests__/title-policy.test.js +147 -0
  26. package/dist/advance.js +75 -0
  27. package/dist/character-voice.js +805 -0
  28. package/dist/checkpoint.js +126 -0
  29. package/dist/cli.js +563 -0
  30. package/dist/cliche-lint.js +515 -0
  31. package/dist/commit.js +1460 -0
  32. package/dist/consistency-auditor.js +684 -0
  33. package/dist/engagement.js +687 -0
  34. package/dist/errors.js +7 -0
  35. package/dist/fingerprint.js +16 -0
  36. package/dist/foreshadow-visibility.js +214 -0
  37. package/dist/fs-utils.js +68 -0
  38. package/dist/hook-ledger.js +721 -0
  39. package/dist/hook-policy.js +107 -0
  40. package/dist/instruction-gates.js +51 -0
  41. package/dist/instructions.js +406 -0
  42. package/dist/latest-summary-loader.js +29 -0
  43. package/dist/lock.js +121 -0
  44. package/dist/naming-lint.js +531 -0
  45. package/dist/ner.js +73 -0
  46. package/dist/next-step.js +408 -0
  47. package/dist/novel-ask.js +270 -0
  48. package/dist/output.js +9 -0
  49. package/dist/platform-constraints.js +518 -0
  50. package/dist/platform-profile.js +325 -0
  51. package/dist/prejudge-guardrails.js +370 -0
  52. package/dist/project.js +40 -0
  53. package/dist/promise-ledger.js +723 -0
  54. package/dist/readability-lint.js +555 -0
  55. package/dist/safe-parse.js +36 -0
  56. package/dist/safe-path.js +29 -0
  57. package/dist/scoring-weights.js +290 -0
  58. package/dist/steps.js +60 -0
  59. package/dist/text-utils.js +18 -0
  60. package/dist/title-policy.js +251 -0
  61. package/dist/type-guards.js +6 -0
  62. package/dist/validate.js +131 -0
  63. package/docs/user/README.md +17 -0
  64. package/docs/user/guardrails.md +179 -0
  65. package/docs/user/interactive-gates.md +124 -0
  66. package/docs/user/novel-cli.md +289 -0
  67. package/docs/user/ops.md +123 -0
  68. package/docs/user/quick-start.md +97 -0
  69. package/docs/user/spec-system.md +166 -0
  70. package/docs/user/storylines.md +144 -0
  71. package/package.json +48 -0
  72. package/schemas/README.md +18 -0
  73. package/schemas/character-voice-drift.schema.json +135 -0
  74. package/schemas/character-voice-profiles.schema.json +141 -0
  75. package/schemas/engagement-metrics.schema.json +38 -0
  76. package/schemas/hook-ledger.schema.json +108 -0
  77. package/schemas/platform-profile.schema.json +235 -0
  78. package/schemas/promise-ledger.schema.json +97 -0
  79. package/scripts/calibrate-quality-judge.sh +91 -0
  80. package/scripts/compare-regression-runs.sh +86 -0
  81. package/scripts/lib/_common.py +131 -0
  82. package/scripts/lib/calibrate_quality_judge.py +312 -0
  83. package/scripts/lib/compare_regression_runs.py +142 -0
  84. package/scripts/lib/run_regression.py +621 -0
  85. package/scripts/lint-blacklist.sh +201 -0
  86. package/scripts/lint-cliche.sh +370 -0
  87. package/scripts/lint-readability.sh +404 -0
  88. package/scripts/query-foreshadow.sh +252 -0
  89. package/scripts/run-ner.sh +669 -0
  90. package/scripts/run-regression.sh +122 -0
  91. package/skills/cli-step/SKILL.md +158 -0
  92. package/skills/continue/SKILL.md +348 -0
  93. package/skills/continue/references/context-contracts.md +169 -0
  94. package/skills/continue/references/continuity-checks.md +187 -0
  95. package/skills/continue/references/file-protocols.md +64 -0
  96. package/skills/continue/references/foreshadowing.md +130 -0
  97. package/skills/continue/references/gate-decision.md +53 -0
  98. package/skills/continue/references/periodic-maintenance.md +46 -0
  99. package/skills/novel-writing/SKILL.md +77 -0
  100. package/skills/novel-writing/references/quality-rubric.md +140 -0
  101. package/skills/novel-writing/references/style-guide.md +145 -0
  102. package/skills/start/SKILL.md +458 -0
  103. package/skills/start/references/quality-review.md +86 -0
  104. package/skills/start/references/setting-update.md +44 -0
  105. package/skills/start/references/vol-planning.md +61 -0
  106. package/skills/start/references/vol-review.md +58 -0
  107. package/skills/status/SKILL.md +116 -0
  108. package/skills/status/references/sample-output.md +60 -0
  109. package/templates/ai-blacklist.json +79 -0
  110. package/templates/brief-template.md +46 -0
  111. package/templates/genre-weight-profiles.json +90 -0
  112. package/templates/novel-ask/example.answer.json +12 -0
  113. package/templates/novel-ask/example.question.json +51 -0
  114. package/templates/platform-profile.json +148 -0
  115. package/templates/style-profile-template.json +58 -0
  116. package/templates/web-novel-cliche-lint.json +41 -0
@@ -0,0 +1,518 @@
1
+ import { join } from "node:path";
2
+ import { NovelCliError } from "./errors.js";
3
+ import { fingerprintFile, fingerprintsMatch } from "./fingerprint.js";
4
+ import { ensureDir, pathExists, readJsonFile, writeJsonFile } from "./fs-utils.js";
5
+ import { runNer } from "./ner.js";
6
+ import { pad3 } from "./steps.js";
7
+ import { computeTitlePolicyReport } from "./title-policy.js";
8
+ import { isPlainObject } from "./type-guards.js";
9
+ const INFO_LOAD_WINDOW_CHAPTERS = 10;
10
+ function countNonWhitespaceChars(text) {
11
+ const compact = text.replace(/\s+/gu, "");
12
+ return Array.from(compact).length;
13
+ }
14
+ function findPhraseHits(text, phrase) {
15
+ if (!phrase)
16
+ return { count: 0, lines: [], snippets: [] };
17
+ let count = 0;
18
+ let idx = 0;
19
+ while (true) {
20
+ const next = text.indexOf(phrase, idx);
21
+ if (next < 0)
22
+ break;
23
+ count += 1;
24
+ idx = next + phrase.length;
25
+ }
26
+ const lines = [];
27
+ const snippets = [];
28
+ for (const [i, line] of text.split(/\r?\n/gu).entries()) {
29
+ if (!line.includes(phrase))
30
+ continue;
31
+ const lineNo = i + 1;
32
+ lines.push(lineNo);
33
+ if (snippets.length < 5) {
34
+ const snippet = line.trim();
35
+ snippets.push(snippet.length > 160 ? `${snippet.slice(0, 160)}…` : snippet);
36
+ }
37
+ }
38
+ return { count, lines: lines.slice(0, 20), snippets };
39
+ }
40
+ const SIMPLIFIED_TRADITIONAL_PAIRS = [
41
+ ["后", "後"],
42
+ ["里", "裡"],
43
+ ["发", "發"],
44
+ ["复", "復"],
45
+ ["面", "麵"],
46
+ ["台", "臺"],
47
+ ["万", "萬"],
48
+ ["云", "雲"],
49
+ ["么", "麼"],
50
+ ["为", "為"],
51
+ ["于", "於"],
52
+ ["众", "眾"],
53
+ ["优", "優"],
54
+ ["会", "會"],
55
+ ["体", "體"],
56
+ ["余", "餘"],
57
+ ["伤", "傷"],
58
+ ["传", "傳"],
59
+ ["价", "價"],
60
+ ["儿", "兒"],
61
+ ["写", "寫"],
62
+ ["凤", "鳳"],
63
+ ["刘", "劉"],
64
+ ["别", "別"],
65
+ ["制", "製"],
66
+ ["动", "動"],
67
+ ["历", "歷"],
68
+ ["听", "聽"],
69
+ ["国", "國"],
70
+ ["图", "圖"],
71
+ ["场", "場"],
72
+ ["声", "聲"],
73
+ ["够", "夠"],
74
+ ["妈", "媽"],
75
+ ["学", "學"],
76
+ ["宁", "寧"],
77
+ ["实", "實"],
78
+ ["对", "對"],
79
+ ["将", "將"],
80
+ ["尽", "盡"],
81
+ ["归", "歸"],
82
+ ["当", "當"],
83
+ ["录", "錄"],
84
+ ["开", "開"],
85
+ ["张", "張"],
86
+ ["强", "強"],
87
+ ["忆", "憶"],
88
+ ["态", "態"],
89
+ ["总", "總"],
90
+ ["战", "戰"],
91
+ ["时", "時"],
92
+ ["术", "術"],
93
+ ["来", "來"],
94
+ ["条", "條"],
95
+ ["气", "氣"],
96
+ ["没", "沒"],
97
+ ["灭", "滅"],
98
+ ["灵", "靈"],
99
+ ["点", "點"],
100
+ ["现", "現"],
101
+ ["着", "著"],
102
+ ["离", "離"],
103
+ ["种", "種"],
104
+ ["线", "線"],
105
+ ["绝", "絕"],
106
+ ["续", "續"],
107
+ ["罗", "羅"],
108
+ ["脸", "臉"],
109
+ ["见", "見"],
110
+ ["觉", "覺"],
111
+ ["说", "說"],
112
+ ["语", "語"],
113
+ ["识", "識"],
114
+ ["轻", "輕"],
115
+ ["这", "這"],
116
+ ["进", "進"],
117
+ ["连", "連"],
118
+ ["过", "過"],
119
+ ["还", "還"],
120
+ ["远", "遠"],
121
+ ["门", "門"],
122
+ ["阴", "陰"],
123
+ ["难", "難"],
124
+ ["陈", "陳"],
125
+ ["风", "風"],
126
+ ["飞", "飛"],
127
+ ["马", "馬"]
128
+ ];
129
+ function detectScriptConsistency(text) {
130
+ const simplifiedCounts = new Map();
131
+ const traditionalCounts = new Map();
132
+ const targetChars = new Set();
133
+ for (const [s, t] of SIMPLIFIED_TRADITIONAL_PAIRS) {
134
+ targetChars.add(s);
135
+ targetChars.add(t);
136
+ }
137
+ const freq = new Map();
138
+ for (const ch of text) {
139
+ if (!targetChars.has(ch))
140
+ continue;
141
+ freq.set(ch, (freq.get(ch) ?? 0) + 1);
142
+ }
143
+ for (const [s, t] of SIMPLIFIED_TRADITIONAL_PAIRS) {
144
+ const sCount = freq.get(s) ?? 0;
145
+ const tCount = freq.get(t) ?? 0;
146
+ if (sCount > 0)
147
+ simplifiedCounts.set(s, sCount);
148
+ if (tCount > 0)
149
+ traditionalCounts.set(t, tCount);
150
+ }
151
+ const hasSimplified = simplifiedCounts.size > 0;
152
+ const hasTraditional = traditionalCounts.size > 0;
153
+ const topSimplified = Array.from(simplifiedCounts.entries())
154
+ .sort((a, b) => b[1] - a[1])
155
+ .slice(0, 8)
156
+ .map(([c]) => c);
157
+ const topTraditional = Array.from(traditionalCounts.entries())
158
+ .sort((a, b) => b[1] - a[1])
159
+ .slice(0, 8)
160
+ .map(([c]) => c);
161
+ return { has_simplified_signal: hasSimplified, has_traditional_signal: hasTraditional, samples: { simplified: topSimplified, traditional: topTraditional } };
162
+ }
163
+ function collectKnownEntityNames(state) {
164
+ const names = new Set();
165
+ for (const category of ["characters", "items", "locations", "factions"]) {
166
+ const raw = state[category];
167
+ if (!isPlainObject(raw))
168
+ continue;
169
+ for (const [id, entryRaw] of Object.entries(raw)) {
170
+ if (id.trim().length > 0)
171
+ names.add(id.trim());
172
+ if (!isPlainObject(entryRaw))
173
+ continue;
174
+ const display = entryRaw.display_name;
175
+ if (typeof display === "string" && display.trim().length > 0)
176
+ names.add(display.trim());
177
+ }
178
+ }
179
+ return names;
180
+ }
181
+ function findDuplicateDisplayNames(state) {
182
+ const index = new Map();
183
+ for (const category of ["characters", "items", "locations", "factions"]) {
184
+ const raw = state[category];
185
+ if (!isPlainObject(raw))
186
+ continue;
187
+ for (const [id, entryRaw] of Object.entries(raw)) {
188
+ if (!isPlainObject(entryRaw))
189
+ continue;
190
+ const display = entryRaw.display_name;
191
+ if (typeof display !== "string" || display.trim().length === 0)
192
+ continue;
193
+ const key = display.trim();
194
+ const path = `${category}.${id}`;
195
+ const prev = index.get(key) ?? [];
196
+ prev.push(path);
197
+ index.set(key, prev);
198
+ }
199
+ }
200
+ const out = [];
201
+ for (const [display_name, entity_paths] of index.entries()) {
202
+ const unique = Array.from(new Set(entity_paths));
203
+ if (unique.length <= 1)
204
+ continue;
205
+ out.push({ display_name, entity_paths: unique.sort() });
206
+ }
207
+ out.sort((a, b) => a.display_name.localeCompare(b.display_name, "zh"));
208
+ return out;
209
+ }
210
+ function buildEntityIndex(ner) {
211
+ const index = new Map();
212
+ const add = (category, list) => {
213
+ for (const e of list) {
214
+ if (index.has(e.text))
215
+ continue;
216
+ const first = e.mentions[0] ?? null;
217
+ const evidence = first ? `L${first.line}: ${first.snippet}` : null;
218
+ index.set(e.text, { category, evidence });
219
+ }
220
+ };
221
+ add("character", ner.entities.characters);
222
+ add("location", ner.entities.locations);
223
+ add("time_marker", ner.entities.time_markers);
224
+ add("event", ner.entities.events);
225
+ return index;
226
+ }
227
+ async function collectRecentEntityTexts(args) {
228
+ const recent = new Set();
229
+ const start = Math.max(1, args.chapter - INFO_LOAD_WINDOW_CHAPTERS);
230
+ const end = args.chapter - 1;
231
+ const chapterAbsPaths = [];
232
+ for (let c = start; c <= end; c += 1) {
233
+ const rel = `chapters/chapter-${pad3(c)}.md`;
234
+ const abs = join(args.rootDir, rel);
235
+ if (await pathExists(abs))
236
+ chapterAbsPaths.push(abs);
237
+ }
238
+ const MAX_PARALLEL_NER = 4;
239
+ let cursor = 0;
240
+ const workers = Array.from({ length: Math.min(MAX_PARALLEL_NER, chapterAbsPaths.length) }, async () => {
241
+ while (true) {
242
+ const idx = cursor;
243
+ cursor += 1;
244
+ const abs = chapterAbsPaths[idx];
245
+ if (!abs)
246
+ break;
247
+ try {
248
+ const ner = await runNer(abs);
249
+ for (const text of buildEntityIndex(ner).keys())
250
+ recent.add(text);
251
+ }
252
+ catch {
253
+ // ignore
254
+ }
255
+ }
256
+ });
257
+ await Promise.all(workers);
258
+ return recent;
259
+ }
260
+ export async function precomputeInfoLoadNer(args) {
261
+ try {
262
+ const fpBefore = await fingerprintFile(args.chapterAbsPath);
263
+ const ner = await runNer(args.chapterAbsPath);
264
+ const fpAfter = await fingerprintFile(args.chapterAbsPath);
265
+ if (!fingerprintsMatch(fpBefore, fpAfter)) {
266
+ return {
267
+ status: "skipped",
268
+ error: "Chapter changed while running NER; skipping info-load NER.",
269
+ chapter_fingerprint: null,
270
+ current_index: null,
271
+ recent_texts: null
272
+ };
273
+ }
274
+ const current_index = buildEntityIndex(ner);
275
+ const recent_texts = await collectRecentEntityTexts({ rootDir: args.rootDir, chapter: args.chapter });
276
+ return { status: "pass", chapter_fingerprint: fpAfter, current_index, recent_texts };
277
+ }
278
+ catch (err) {
279
+ const message = err instanceof Error ? err.message : String(err);
280
+ return { status: "skipped", error: message, chapter_fingerprint: null, current_index: null, recent_texts: null };
281
+ }
282
+ }
283
+ export async function computePlatformConstraints(args) {
284
+ const generated_at = new Date().toISOString();
285
+ const wordCount = countNonWhitespaceChars(args.chapterText);
286
+ const wc = args.platformProfile.word_count;
287
+ const wcHardViolation = wordCount < wc.hard_min || wordCount > wc.hard_max;
288
+ const wcSoftWarn = !wcHardViolation && (wordCount < wc.target_min || wordCount > wc.target_max);
289
+ const wcStatus = wcHardViolation ? "violation" : wcSoftWarn ? "warn" : "pass";
290
+ const issues = [];
291
+ if (wcHardViolation) {
292
+ issues.push({
293
+ id: "word_count.hard_violation",
294
+ severity: "hard",
295
+ summary: `Word count ${wordCount} is outside hard range ${wc.hard_min}-${wc.hard_max}.`,
296
+ suggestion: "Revise the chapter length to fit platform hard bounds."
297
+ });
298
+ }
299
+ else if (wcSoftWarn) {
300
+ issues.push({
301
+ id: "word_count.target_deviation",
302
+ severity: "soft",
303
+ summary: `Word count ${wordCount} is outside target range ${wc.target_min}-${wc.target_max} (within hard bounds).`,
304
+ suggestion: "Consider adjusting chapter length to better match platform target range."
305
+ });
306
+ }
307
+ // Compliance: banned words.
308
+ const bannedHits = [];
309
+ let bannedTotalHits = 0;
310
+ for (const word of args.platformProfile.compliance.banned_words) {
311
+ const hit = findPhraseHits(args.chapterText, word);
312
+ if (hit.count <= 0)
313
+ continue;
314
+ bannedTotalHits += hit.count;
315
+ bannedHits.push({ word, count: hit.count, lines: hit.lines, snippets: hit.snippets });
316
+ }
317
+ bannedHits.sort((a, b) => b.count - a.count || a.word.localeCompare(b.word, "zh"));
318
+ const bannedStatus = bannedTotalHits > 0 ? "violation" : "pass";
319
+ if (bannedTotalHits > 0) {
320
+ const top = bannedHits[0];
321
+ issues.push({
322
+ id: "compliance.banned_words",
323
+ severity: "hard",
324
+ summary: `Detected banned words (${bannedTotalHits} hits).`,
325
+ evidence: top ? `${top.word} x${top.count} (${top.snippets[0] ?? "no snippet"})` : undefined,
326
+ suggestion: "Remove or replace banned words."
327
+ });
328
+ }
329
+ // Compliance: duplicate display names (project scope).
330
+ const duplicates = findDuplicateDisplayNames(args.state);
331
+ const dupPolicy = args.platformProfile.compliance.duplicate_name_policy;
332
+ const dupStatus = duplicates.length > 0 ? (dupPolicy === "hard" ? "violation" : "warn") : "pass";
333
+ if (duplicates.length > 0) {
334
+ issues.push({
335
+ id: "compliance.duplicate_names",
336
+ severity: dupPolicy,
337
+ summary: `Duplicate display_name detected (${duplicates.length}).`,
338
+ evidence: duplicates[0] ? `${duplicates[0].display_name}: ${duplicates[0].entity_paths.join(", ")}` : undefined,
339
+ suggestion: "Rename or consolidate duplicate entities to keep display names unique."
340
+ });
341
+ }
342
+ // Compliance: simplified/traditional consistency (best-effort).
343
+ const scriptConsistency = detectScriptConsistency(args.chapterText);
344
+ const mixedScript = scriptConsistency.has_simplified_signal && scriptConsistency.has_traditional_signal;
345
+ const scriptStatus = mixedScript ? "warn" : "pass";
346
+ if (mixedScript) {
347
+ issues.push({
348
+ id: "compliance.script_inconsistency",
349
+ severity: "warn",
350
+ summary: "Mixed simplified/traditional Chinese signals detected in chapter text.",
351
+ evidence: `simplified_samples=${scriptConsistency.samples.simplified.join("")} traditional_samples=${scriptConsistency.samples.traditional.join("")}`,
352
+ suggestion: "Normalize text to a single writing system (simplified or traditional)."
353
+ });
354
+ }
355
+ const complianceStatus = [bannedStatus, dupStatus, scriptStatus].includes("violation")
356
+ ? "violation"
357
+ : [bannedStatus, dupStatus, scriptStatus].includes("warn")
358
+ ? "warn"
359
+ : "pass";
360
+ // Info-load metrics.
361
+ let nerStatus = "pass";
362
+ let nerError;
363
+ let unknownEntitiesCount = null;
364
+ let newEntitiesCount = null;
365
+ let newTermsPer1k = null;
366
+ let unknownEntities = null;
367
+ let newEntities = null;
368
+ try {
369
+ const pre = args.infoLoadNer;
370
+ if (pre && pre.status === "skipped") {
371
+ throw new Error(pre.error ?? "NER precompute skipped.");
372
+ }
373
+ // Inline NER fallback: when called without precompute (e.g. standalone callers outside commit pipeline).
374
+ // In commit.ts, precomputeInfoLoadNer always provides these, so the ?? branches are not reached.
375
+ const currentIndex = pre?.current_index ?? buildEntityIndex(await runNer(args.chapterAbsPath));
376
+ const recentTexts = pre?.recent_texts ?? await collectRecentEntityTexts({ rootDir: args.rootDir, chapter: args.chapter });
377
+ const knownNames = collectKnownEntityNames(args.state);
378
+ const unknown = [];
379
+ const newlyIntroduced = [];
380
+ for (const [text, meta] of currentIndex.entries()) {
381
+ if (!knownNames.has(text))
382
+ unknown.push({ text, category: meta.category, evidence: meta.evidence });
383
+ if (!recentTexts.has(text) && meta.category !== "time_marker")
384
+ newlyIntroduced.push({ text, category: meta.category, evidence: meta.evidence });
385
+ }
386
+ unknown.sort((a, b) => a.text.localeCompare(b.text, "zh"));
387
+ newlyIntroduced.sort((a, b) => a.text.localeCompare(b.text, "zh"));
388
+ unknownEntities = unknown;
389
+ newEntities = newlyIntroduced;
390
+ unknownEntitiesCount = unknown.length;
391
+ newEntitiesCount = newlyIntroduced.length;
392
+ const denom = wordCount > 0 ? wordCount / 1000.0 : null;
393
+ if (denom && denom > 0) {
394
+ newTermsPer1k = Math.round((newlyIntroduced.length / denom) * 1000) / 1000;
395
+ }
396
+ else {
397
+ newTermsPer1k = 0;
398
+ }
399
+ }
400
+ catch (err) {
401
+ nerStatus = "skipped";
402
+ const message = err instanceof Error ? err.message : String(err);
403
+ nerError = message;
404
+ }
405
+ const info = args.platformProfile.info_load;
406
+ const infoIssues = [];
407
+ if (unknownEntitiesCount !== null && unknownEntitiesCount > info.max_unknown_entities_per_chapter) {
408
+ infoIssues.push({
409
+ id: "info_load.unknown_entities_exceeded",
410
+ severity: "soft",
411
+ summary: `Unknown entities ${unknownEntitiesCount} exceeds max ${info.max_unknown_entities_per_chapter}.`,
412
+ evidence: unknownEntities?.[0] ? `${unknownEntities[0].text} (${unknownEntities[0].category})` : undefined,
413
+ suggestion: "Reduce new/unknown names or add brief reintroductions/grounding context."
414
+ });
415
+ }
416
+ if (newEntitiesCount !== null && newEntitiesCount > info.max_new_entities_per_chapter) {
417
+ infoIssues.push({
418
+ id: "info_load.new_entities_exceeded",
419
+ severity: "soft",
420
+ summary: `New entities ${newEntitiesCount} exceeds max ${info.max_new_entities_per_chapter}.`,
421
+ evidence: newEntities?.[0] ? `${newEntities[0].text} (${newEntities[0].category})` : undefined,
422
+ suggestion: "Reduce the number of newly introduced entities in a single chapter."
423
+ });
424
+ }
425
+ if (newTermsPer1k !== null && newTermsPer1k > info.max_new_terms_per_1k_words) {
426
+ infoIssues.push({
427
+ id: "info_load.new_terms_density_exceeded",
428
+ severity: "soft",
429
+ summary: `New terms per 1k ${newTermsPer1k} exceeds max ${info.max_new_terms_per_1k_words}.`,
430
+ suggestion: "Spread new terminology across chapters or add clearer context for each new term."
431
+ });
432
+ }
433
+ issues.push(...infoIssues);
434
+ const infoStatus = nerStatus === "skipped"
435
+ ? "skipped"
436
+ : infoIssues.length > 0
437
+ ? "warn"
438
+ : "pass";
439
+ const titleReport = computeTitlePolicyReport({ chapter: args.chapter, chapterText: args.chapterText, platformProfile: args.platformProfile });
440
+ const titleIssues = titleReport.issues.map((i) => ({ ...i }));
441
+ issues.push(...titleIssues);
442
+ const retention = {
443
+ status: titleReport.status,
444
+ title_policy: {
445
+ status: titleReport.status,
446
+ enabled: Boolean(titleReport.policy?.enabled),
447
+ auto_fix: Boolean(titleReport.policy?.auto_fix),
448
+ title: { text: titleReport.title.text, chars: titleReport.title.chars },
449
+ issues: titleIssues,
450
+ has_hard_violations: titleReport.has_hard_violations
451
+ }
452
+ };
453
+ const hasHard = issues.some((i) => i.severity === "hard");
454
+ return {
455
+ schema_version: 1,
456
+ generated_at,
457
+ scope: { chapter: args.chapter },
458
+ platform: args.platformProfile.platform,
459
+ platform_profile: { schema_version: args.platformProfile.schema_version, created_at: args.platformProfile.created_at },
460
+ word_count: {
461
+ chars: wordCount,
462
+ target_min: wc.target_min,
463
+ target_max: wc.target_max,
464
+ hard_min: wc.hard_min,
465
+ hard_max: wc.hard_max,
466
+ status: wcStatus
467
+ },
468
+ compliance: {
469
+ status: complianceStatus,
470
+ banned_words: { status: bannedStatus, total_hits: bannedTotalHits, hits: bannedHits },
471
+ duplicate_names: { status: dupStatus, policy: dupPolicy, duplicates },
472
+ script_consistency: { status: scriptStatus, ...scriptConsistency }
473
+ },
474
+ info_load: {
475
+ status: infoStatus,
476
+ window_chapters: INFO_LOAD_WINDOW_CHAPTERS,
477
+ ner: { status: nerStatus, ...(nerError ? { error: nerError } : {}) },
478
+ unknown_entities_count: unknownEntitiesCount,
479
+ max_unknown_entities_per_chapter: info.max_unknown_entities_per_chapter,
480
+ new_entities_count: newEntitiesCount,
481
+ max_new_entities_per_chapter: info.max_new_entities_per_chapter,
482
+ new_terms_per_1k_words: newTermsPer1k,
483
+ max_new_terms_per_1k_words: info.max_new_terms_per_1k_words,
484
+ unknown_entities: unknownEntities,
485
+ new_entities: newEntities
486
+ },
487
+ retention,
488
+ issues,
489
+ has_hard_violations: hasHard
490
+ };
491
+ }
492
+ export async function writePlatformConstraintsLogs(args) {
493
+ const dirRel = "logs/platform-constraints";
494
+ const dirAbs = join(args.rootDir, dirRel);
495
+ await ensureDir(dirAbs);
496
+ const historyRel = `${dirRel}/platform-constraints-chapter-${pad3(args.chapter)}.json`;
497
+ const latestRel = `${dirRel}/latest.json`;
498
+ await writeJsonFile(join(args.rootDir, historyRel), args.report);
499
+ await writeJsonFile(join(args.rootDir, latestRel), args.report);
500
+ return { latestRel, historyRel };
501
+ }
502
+ export async function attachPlatformConstraintsToEval(args) {
503
+ const raw = await readJsonFile(args.evalAbsPath);
504
+ if (!isPlainObject(raw))
505
+ throw new NovelCliError(`Invalid ${args.evalRelPath}: eval JSON must be an object.`, 2);
506
+ const obj = raw;
507
+ obj.platform = args.platform;
508
+ obj.platform_constraints = {
509
+ report_path: args.reportRelPath,
510
+ word_count: args.report.word_count,
511
+ compliance: args.report.compliance,
512
+ info_load: args.report.info_load,
513
+ retention: args.report.retention,
514
+ has_hard_violations: args.report.has_hard_violations,
515
+ issues: args.report.issues
516
+ };
517
+ await writeJsonFile(args.evalAbsPath, obj);
518
+ }