novel-writer-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +103 -0
- package/agents/chapter-writer.md +142 -0
- package/agents/character-weaver.md +117 -0
- package/agents/consistency-auditor.md +85 -0
- package/agents/plot-architect.md +128 -0
- package/agents/quality-judge.md +232 -0
- package/agents/style-analyzer.md +109 -0
- package/agents/style-refiner.md +97 -0
- package/agents/summarizer.md +128 -0
- package/agents/world-builder.md +161 -0
- package/dist/__tests__/character-voice.test.js +445 -0
- package/dist/__tests__/commit-prototype-pollution.test.js +45 -0
- package/dist/__tests__/engagement.test.js +382 -0
- package/dist/__tests__/foreshadow-visibility.test.js +131 -0
- package/dist/__tests__/hook-ledger.test.js +1028 -0
- package/dist/__tests__/naming-lint.test.js +132 -0
- package/dist/__tests__/narrative-health-injection.test.js +359 -0
- package/dist/__tests__/next-step-prejudge-guardrails.test.js +325 -0
- package/dist/__tests__/next-step-title-fix.test.js +153 -0
- package/dist/__tests__/platform-profile.test.js +274 -0
- package/dist/__tests__/promise-ledger.test.js +189 -0
- package/dist/__tests__/readability-lint.test.js +209 -0
- package/dist/__tests__/text-utils.test.js +39 -0
- package/dist/__tests__/title-policy.test.js +147 -0
- package/dist/advance.js +75 -0
- package/dist/character-voice.js +805 -0
- package/dist/checkpoint.js +126 -0
- package/dist/cli.js +563 -0
- package/dist/cliche-lint.js +515 -0
- package/dist/commit.js +1460 -0
- package/dist/consistency-auditor.js +684 -0
- package/dist/engagement.js +687 -0
- package/dist/errors.js +7 -0
- package/dist/fingerprint.js +16 -0
- package/dist/foreshadow-visibility.js +214 -0
- package/dist/fs-utils.js +68 -0
- package/dist/hook-ledger.js +721 -0
- package/dist/hook-policy.js +107 -0
- package/dist/instruction-gates.js +51 -0
- package/dist/instructions.js +406 -0
- package/dist/latest-summary-loader.js +29 -0
- package/dist/lock.js +121 -0
- package/dist/naming-lint.js +531 -0
- package/dist/ner.js +73 -0
- package/dist/next-step.js +408 -0
- package/dist/novel-ask.js +270 -0
- package/dist/output.js +9 -0
- package/dist/platform-constraints.js +518 -0
- package/dist/platform-profile.js +325 -0
- package/dist/prejudge-guardrails.js +370 -0
- package/dist/project.js +40 -0
- package/dist/promise-ledger.js +723 -0
- package/dist/readability-lint.js +555 -0
- package/dist/safe-parse.js +36 -0
- package/dist/safe-path.js +29 -0
- package/dist/scoring-weights.js +290 -0
- package/dist/steps.js +60 -0
- package/dist/text-utils.js +18 -0
- package/dist/title-policy.js +251 -0
- package/dist/type-guards.js +6 -0
- package/dist/validate.js +131 -0
- package/docs/user/README.md +17 -0
- package/docs/user/guardrails.md +179 -0
- package/docs/user/interactive-gates.md +124 -0
- package/docs/user/novel-cli.md +289 -0
- package/docs/user/ops.md +123 -0
- package/docs/user/quick-start.md +97 -0
- package/docs/user/spec-system.md +166 -0
- package/docs/user/storylines.md +144 -0
- package/package.json +48 -0
- package/schemas/README.md +18 -0
- package/schemas/character-voice-drift.schema.json +135 -0
- package/schemas/character-voice-profiles.schema.json +141 -0
- package/schemas/engagement-metrics.schema.json +38 -0
- package/schemas/hook-ledger.schema.json +108 -0
- package/schemas/platform-profile.schema.json +235 -0
- package/schemas/promise-ledger.schema.json +97 -0
- package/scripts/calibrate-quality-judge.sh +91 -0
- package/scripts/compare-regression-runs.sh +86 -0
- package/scripts/lib/_common.py +131 -0
- package/scripts/lib/calibrate_quality_judge.py +312 -0
- package/scripts/lib/compare_regression_runs.py +142 -0
- package/scripts/lib/run_regression.py +621 -0
- package/scripts/lint-blacklist.sh +201 -0
- package/scripts/lint-cliche.sh +370 -0
- package/scripts/lint-readability.sh +404 -0
- package/scripts/query-foreshadow.sh +252 -0
- package/scripts/run-ner.sh +669 -0
- package/scripts/run-regression.sh +122 -0
- package/skills/cli-step/SKILL.md +158 -0
- package/skills/continue/SKILL.md +348 -0
- package/skills/continue/references/context-contracts.md +169 -0
- package/skills/continue/references/continuity-checks.md +187 -0
- package/skills/continue/references/file-protocols.md +64 -0
- package/skills/continue/references/foreshadowing.md +130 -0
- package/skills/continue/references/gate-decision.md +53 -0
- package/skills/continue/references/periodic-maintenance.md +46 -0
- package/skills/novel-writing/SKILL.md +77 -0
- package/skills/novel-writing/references/quality-rubric.md +140 -0
- package/skills/novel-writing/references/style-guide.md +145 -0
- package/skills/start/SKILL.md +458 -0
- package/skills/start/references/quality-review.md +86 -0
- package/skills/start/references/setting-update.md +44 -0
- package/skills/start/references/vol-planning.md +61 -0
- package/skills/start/references/vol-review.md +58 -0
- package/skills/status/SKILL.md +116 -0
- package/skills/status/references/sample-output.md +60 -0
- package/templates/ai-blacklist.json +79 -0
- package/templates/brief-template.md +46 -0
- package/templates/genre-weight-profiles.json +90 -0
- package/templates/novel-ask/example.answer.json +12 -0
- package/templates/novel-ask/example.question.json +51 -0
- package/templates/platform-profile.json +148 -0
- package/templates/style-profile-template.json +58 -0
- package/templates/web-novel-cliche-lint.json +41 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import { join } from "node:path";
|
|
2
|
+
import { NovelCliError } from "./errors.js";
|
|
3
|
+
import { pathExists, readJsonFile, writeJsonFile } from "./fs-utils.js";
|
|
4
|
+
import { isPlainObject } from "./type-guards.js";
|
|
5
|
+
const CORE_DIMENSIONS = [
|
|
6
|
+
"plot_logic",
|
|
7
|
+
"character",
|
|
8
|
+
"immersion",
|
|
9
|
+
"foreshadowing",
|
|
10
|
+
"pacing",
|
|
11
|
+
"style_naturalness",
|
|
12
|
+
"emotional_impact",
|
|
13
|
+
"storyline_coherence",
|
|
14
|
+
];
|
|
15
|
+
const OPTIONAL_DIMENSIONS = ["hook_strength"];
|
|
16
|
+
const KNOWN_DIMENSIONS = new Set([...CORE_DIMENSIONS, ...OPTIONAL_DIMENSIONS]);
|
|
17
|
+
function requireIntField(obj, field, file) {
|
|
18
|
+
const v = obj[field];
|
|
19
|
+
if (typeof v !== "number" || !Number.isInteger(v))
|
|
20
|
+
throw new NovelCliError(`Invalid ${file}: '${field}' must be an int.`, 2);
|
|
21
|
+
return v;
|
|
22
|
+
}
|
|
23
|
+
function requireStringField(obj, field, file) {
|
|
24
|
+
const v = obj[field];
|
|
25
|
+
if (typeof v !== "string" || v.trim().length === 0)
|
|
26
|
+
throw new NovelCliError(`Invalid ${file}: '${field}' must be a non-empty string.`, 2);
|
|
27
|
+
return v.trim();
|
|
28
|
+
}
|
|
29
|
+
function requireFiniteNonNegativeNumber(value, file, field) {
|
|
30
|
+
if (typeof value !== "number" || !Number.isFinite(value) || value < 0) {
|
|
31
|
+
throw new NovelCliError(`Invalid ${file}: '${field}' must be a finite number >= 0.`, 2);
|
|
32
|
+
}
|
|
33
|
+
return value;
|
|
34
|
+
}
|
|
35
|
+
function requireStringArrayField(obj, field, file) {
|
|
36
|
+
const v = obj[field];
|
|
37
|
+
if (!Array.isArray(v) || !v.every((s) => typeof s === "string" && s.trim().length > 0)) {
|
|
38
|
+
throw new NovelCliError(`Invalid ${file}: '${field}' must be a non-empty string array.`, 2);
|
|
39
|
+
}
|
|
40
|
+
const uniq = Array.from(new Set(v.map((s) => s.trim()))).filter((s) => s.length > 0);
|
|
41
|
+
if (uniq.length === 0)
|
|
42
|
+
throw new NovelCliError(`Invalid ${file}: '${field}' must be a non-empty string array.`, 2);
|
|
43
|
+
return uniq;
|
|
44
|
+
}
|
|
45
|
+
function parseNormalizationSpec(raw, file) {
|
|
46
|
+
if (!isPlainObject(raw))
|
|
47
|
+
throw new NovelCliError(`Invalid ${file}: 'normalization' must be an object.`, 2);
|
|
48
|
+
const obj = raw;
|
|
49
|
+
const method = requireStringField(obj, "method", file);
|
|
50
|
+
if (method !== "scale_to_sum") {
|
|
51
|
+
throw new NovelCliError(`Invalid ${file}: 'normalization.method' must be 'scale_to_sum'.`, 2);
|
|
52
|
+
}
|
|
53
|
+
const sum_to = requireFiniteNonNegativeNumber(obj.sum_to, file, "normalization.sum_to");
|
|
54
|
+
if (sum_to <= 0)
|
|
55
|
+
throw new NovelCliError(`Invalid ${file}: 'normalization.sum_to' must be > 0.`, 2);
|
|
56
|
+
const tolerance = requireFiniteNonNegativeNumber(obj.tolerance, file, "normalization.tolerance");
|
|
57
|
+
return { method: "scale_to_sum", sum_to, tolerance };
|
|
58
|
+
}
|
|
59
|
+
export function parseGenreWeightProfiles(raw, file) {
|
|
60
|
+
if (!isPlainObject(raw))
|
|
61
|
+
throw new NovelCliError(`Invalid ${file}: expected a JSON object.`, 2);
|
|
62
|
+
const obj = raw;
|
|
63
|
+
const schema_version = requireIntField(obj, "schema_version", file);
|
|
64
|
+
if (schema_version !== 1)
|
|
65
|
+
throw new NovelCliError(`Invalid ${file}: 'schema_version' must be 1.`, 2);
|
|
66
|
+
const dimensions = requireStringArrayField(obj, "dimensions", file);
|
|
67
|
+
for (const dim of dimensions) {
|
|
68
|
+
if (!KNOWN_DIMENSIONS.has(dim)) {
|
|
69
|
+
throw new NovelCliError(`Invalid ${file}: unknown dimension '${dim}' in dimensions (allowed: ${Array.from(KNOWN_DIMENSIONS).join(", ")}).`, 2);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
for (const required of CORE_DIMENSIONS) {
|
|
73
|
+
if (!dimensions.includes(required)) {
|
|
74
|
+
throw new NovelCliError(`Invalid ${file}: dimensions missing required core dimension '${required}'.`, 2);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const normalization = parseNormalizationSpec(obj.normalization, file);
|
|
78
|
+
if (!isPlainObject(obj.default_profile_by_drive_type)) {
|
|
79
|
+
throw new NovelCliError(`Invalid ${file}: 'default_profile_by_drive_type' must be an object.`, 2);
|
|
80
|
+
}
|
|
81
|
+
const dp = obj.default_profile_by_drive_type;
|
|
82
|
+
const default_profile_by_drive_type = {};
|
|
83
|
+
for (const [k, v] of Object.entries(dp)) {
|
|
84
|
+
if (typeof v !== "string" || v.trim().length === 0) {
|
|
85
|
+
throw new NovelCliError(`Invalid ${file}: 'default_profile_by_drive_type.${k}' must be a non-empty string.`, 2);
|
|
86
|
+
}
|
|
87
|
+
default_profile_by_drive_type[k.trim()] = v.trim();
|
|
88
|
+
}
|
|
89
|
+
if (!isPlainObject(obj.profiles))
|
|
90
|
+
throw new NovelCliError(`Invalid ${file}: 'profiles' must be an object.`, 2);
|
|
91
|
+
const profilesRaw = obj.profiles;
|
|
92
|
+
const profiles = {};
|
|
93
|
+
const allowedDims = new Set(dimensions);
|
|
94
|
+
for (const [profileId, profileRaw] of Object.entries(profilesRaw)) {
|
|
95
|
+
if (!isPlainObject(profileRaw))
|
|
96
|
+
throw new NovelCliError(`Invalid ${file}: 'profiles.${profileId}' must be an object.`, 2);
|
|
97
|
+
const p = profileRaw;
|
|
98
|
+
const drive_type = requireStringField(p, "drive_type", file);
|
|
99
|
+
if (!isPlainObject(p.weights))
|
|
100
|
+
throw new NovelCliError(`Invalid ${file}: 'profiles.${profileId}.weights' must be an object.`, 2);
|
|
101
|
+
const weightsRaw = p.weights;
|
|
102
|
+
const weights = {};
|
|
103
|
+
for (const [dim, val] of Object.entries(weightsRaw)) {
|
|
104
|
+
if (!allowedDims.has(dim)) {
|
|
105
|
+
throw new NovelCliError(`Invalid ${file}: unknown dimension '${dim}' in profiles.${profileId}.weights (allowed: ${dimensions.join(", ")}).`, 2);
|
|
106
|
+
}
|
|
107
|
+
weights[dim] = requireFiniteNonNegativeNumber(val, file, `profiles.${profileId}.weights.${dim}`);
|
|
108
|
+
}
|
|
109
|
+
for (const dim of dimensions) {
|
|
110
|
+
if (!(dim in weights)) {
|
|
111
|
+
throw new NovelCliError(`Invalid ${file}: profiles.${profileId}.weights missing required dimension '${dim}'.`, 2);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
profiles[profileId] = { drive_type, weights };
|
|
115
|
+
}
|
|
116
|
+
// Validate default profile references.
|
|
117
|
+
for (const [driveType, profileId] of Object.entries(default_profile_by_drive_type)) {
|
|
118
|
+
const p = profiles[profileId];
|
|
119
|
+
if (!p)
|
|
120
|
+
throw new NovelCliError(`Invalid ${file}: default_profile_by_drive_type.${driveType} references missing profile '${profileId}'.`, 2);
|
|
121
|
+
if (p.drive_type !== driveType) {
|
|
122
|
+
throw new NovelCliError(`Invalid ${file}: default_profile_by_drive_type.${driveType} references profile '${profileId}' with drive_type='${p.drive_type}'.`, 2);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
const out = {
|
|
126
|
+
schema_version: 1,
|
|
127
|
+
dimensions,
|
|
128
|
+
normalization,
|
|
129
|
+
default_profile_by_drive_type,
|
|
130
|
+
profiles
|
|
131
|
+
};
|
|
132
|
+
if (typeof obj.description === "string" && obj.description.trim().length > 0)
|
|
133
|
+
out.description = obj.description.trim();
|
|
134
|
+
if (typeof obj.last_updated === "string" && obj.last_updated.trim().length > 0)
|
|
135
|
+
out.last_updated = obj.last_updated.trim();
|
|
136
|
+
// Validate normalizability for each profile.
|
|
137
|
+
for (const [profileId, p] of Object.entries(profiles)) {
|
|
138
|
+
const sum = dimensions.reduce((acc, dim) => acc + p.weights[dim], 0);
|
|
139
|
+
if (!Number.isFinite(sum) || sum <= 0) {
|
|
140
|
+
throw new NovelCliError(`Invalid ${file}: profiles.${profileId} weights must have sum > 0 (got ${String(sum)}).`, 2);
|
|
141
|
+
}
|
|
142
|
+
const scale = normalization.sum_to / sum;
|
|
143
|
+
const sumAfter = dimensions.reduce((acc, dim) => acc + p.weights[dim] * scale, 0);
|
|
144
|
+
if (Math.abs(sumAfter - normalization.sum_to) > normalization.tolerance) {
|
|
145
|
+
throw new NovelCliError(`Invalid ${file}: profiles.${profileId} weights are not normalizable within tolerance (sum_after=${sumAfter}, expected ${normalization.sum_to}±${normalization.tolerance}).`, 2);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
return out;
|
|
149
|
+
}
|
|
150
|
+
export async function loadGenreWeightProfiles(rootDir) {
|
|
151
|
+
const relPath = "genre-weight-profiles.json";
|
|
152
|
+
const absPath = join(rootDir, relPath);
|
|
153
|
+
if (!(await pathExists(absPath)))
|
|
154
|
+
return null;
|
|
155
|
+
const raw = await readJsonFile(absPath);
|
|
156
|
+
return { relPath, config: parseGenreWeightProfiles(raw, relPath) };
|
|
157
|
+
}
|
|
158
|
+
function normalizeWeights(args) {
|
|
159
|
+
const sum_before = args.dimensions.reduce((acc, dim) => acc + (args.weights[dim] ?? 0), 0);
|
|
160
|
+
if (!Number.isFinite(sum_before) || sum_before <= 0) {
|
|
161
|
+
throw new NovelCliError(`Invalid scoring weights (${args.fileHint}): weights must have sum > 0 (got ${String(sum_before)}).`, 2);
|
|
162
|
+
}
|
|
163
|
+
const scale_factor = args.normalization.sum_to / sum_before;
|
|
164
|
+
const sum_after = args.dimensions.reduce((acc, dim) => acc + (args.weights[dim] ?? 0) * scale_factor, 0);
|
|
165
|
+
if (Math.abs(sum_after - args.normalization.sum_to) > args.normalization.tolerance) {
|
|
166
|
+
throw new NovelCliError(`Invalid scoring weights (${args.fileHint}): weights are not normalizable within tolerance (sum_after=${sum_after}, expected ${args.normalization.sum_to}±${args.normalization.tolerance}).`, 2);
|
|
167
|
+
}
|
|
168
|
+
return { ...args.normalization, sum_before, scale_factor, sum_after };
|
|
169
|
+
}
|
|
170
|
+
export function computeEffectiveScoringWeights(args) {
|
|
171
|
+
const driveType = args.scoring.genre_drive_type;
|
|
172
|
+
const selectedProfileId = args.scoring.weight_profile_id;
|
|
173
|
+
const profile = args.config.profiles[selectedProfileId];
|
|
174
|
+
if (!profile) {
|
|
175
|
+
const known = Object.keys(args.config.profiles).sort();
|
|
176
|
+
const hint = known.length > 0 ? ` Known profiles: ${known.join(", ")}` : "";
|
|
177
|
+
throw new NovelCliError(`Invalid platform-profile.json: scoring.weight_profile_id='${selectedProfileId}' not found in genre-weight-profiles.json.${hint}`, 2);
|
|
178
|
+
}
|
|
179
|
+
if (profile.drive_type !== driveType) {
|
|
180
|
+
throw new NovelCliError(`Invalid platform-profile.json: scoring.genre_drive_type='${driveType}' does not match selected profile '${selectedProfileId}' (drive_type='${profile.drive_type}').`, 2);
|
|
181
|
+
}
|
|
182
|
+
// When hook policy is enabled, hook_strength should be a first-class dimension.
|
|
183
|
+
const configDims = args.config.dimensions;
|
|
184
|
+
if (args.hookPolicy?.required && !configDims.includes("hook_strength")) {
|
|
185
|
+
throw new NovelCliError(`Invalid genre-weight-profiles.json: missing 'hook_strength' in dimensions while platform-profile.json.hook_policy.required=true.`, 2);
|
|
186
|
+
}
|
|
187
|
+
// Effective dimensions: hook_strength only participates when hooks are enabled.
|
|
188
|
+
const dims = args.hookPolicy?.required ? configDims : configDims.filter((d) => d !== "hook_strength");
|
|
189
|
+
const overridesRaw = args.scoring.weight_overrides ?? null;
|
|
190
|
+
const overrides = overridesRaw ? { ...overridesRaw } : null;
|
|
191
|
+
const allowedDims = new Set(configDims);
|
|
192
|
+
const effectiveDims = new Set(dims);
|
|
193
|
+
if (overrides) {
|
|
194
|
+
for (const [dim, v] of Object.entries(overrides)) {
|
|
195
|
+
if (!allowedDims.has(dim)) {
|
|
196
|
+
throw new NovelCliError(`Invalid platform-profile.json: scoring.weight_overrides has unknown dimension '${dim}' (allowed: ${configDims.join(", ")}).`, 2);
|
|
197
|
+
}
|
|
198
|
+
overrides[dim] = requireFiniteNonNegativeNumber(v, "platform-profile.json", `scoring.weight_overrides.${dim}`);
|
|
199
|
+
if (dim === "hook_strength" && !args.hookPolicy?.required && overrides[dim] > 0) {
|
|
200
|
+
throw new NovelCliError(`Invalid platform-profile.json: scoring.weight_overrides.hook_strength=${overrides[dim]} but hook_policy.required=false. Set hook_strength override to 0 or enable hook_policy.required.`, 2);
|
|
201
|
+
}
|
|
202
|
+
if (!effectiveDims.has(dim)) {
|
|
203
|
+
// Currently only possible for hook_strength when hook policy is disabled. Allowed as long as it is not positive.
|
|
204
|
+
continue;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const rawWeights = {};
|
|
209
|
+
for (const dim of dims) {
|
|
210
|
+
rawWeights[dim] = typeof overrides?.[dim] === "number" ? overrides[dim] : profile.weights[dim];
|
|
211
|
+
}
|
|
212
|
+
const normalization = normalizeWeights({
|
|
213
|
+
dimensions: dims,
|
|
214
|
+
normalization: args.config.normalization,
|
|
215
|
+
weights: rawWeights,
|
|
216
|
+
fileHint: "platform-profile.json.scoring + genre-weight-profiles.json"
|
|
217
|
+
});
|
|
218
|
+
const weights = {};
|
|
219
|
+
for (const dim of dims) {
|
|
220
|
+
weights[dim] = rawWeights[dim] * normalization.scale_factor;
|
|
221
|
+
}
|
|
222
|
+
return {
|
|
223
|
+
genre_drive_type: driveType,
|
|
224
|
+
weight_profile_id: selectedProfileId,
|
|
225
|
+
weight_overrides: overrides,
|
|
226
|
+
dimensions: dims,
|
|
227
|
+
weights,
|
|
228
|
+
normalization
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
function getScoresTargets(evalObj) {
|
|
232
|
+
const targets = [];
|
|
233
|
+
const evalUsed = evalObj.eval_used;
|
|
234
|
+
if (isPlainObject(evalUsed) && isPlainObject(evalUsed.scores)) {
|
|
235
|
+
targets.push({ scores: evalUsed.scores, path: "eval_used.scores" });
|
|
236
|
+
}
|
|
237
|
+
const topScores = evalObj.scores;
|
|
238
|
+
if (isPlainObject(topScores)) {
|
|
239
|
+
targets.push({ scores: topScores, path: "scores" });
|
|
240
|
+
}
|
|
241
|
+
return targets;
|
|
242
|
+
}
|
|
243
|
+
export async function attachScoringWeightsToEval(args) {
|
|
244
|
+
const scoring = args.platformProfile.scoring;
|
|
245
|
+
if (!scoring)
|
|
246
|
+
return;
|
|
247
|
+
const effective = computeEffectiveScoringWeights({
|
|
248
|
+
config: args.genreWeightProfiles.config,
|
|
249
|
+
scoring,
|
|
250
|
+
hookPolicy: args.platformProfile.hook_policy
|
|
251
|
+
});
|
|
252
|
+
const raw = await readJsonFile(args.evalAbsPath);
|
|
253
|
+
if (!isPlainObject(raw))
|
|
254
|
+
throw new NovelCliError(`Invalid ${args.evalRelPath}: eval JSON must be an object.`, 2);
|
|
255
|
+
const obj = raw;
|
|
256
|
+
obj.scoring_weights = {
|
|
257
|
+
genre_drive_type: effective.genre_drive_type,
|
|
258
|
+
weight_profile_id: effective.weight_profile_id,
|
|
259
|
+
weight_overrides: effective.weight_overrides,
|
|
260
|
+
dimensions: effective.dimensions,
|
|
261
|
+
weights: effective.weights,
|
|
262
|
+
normalization: effective.normalization,
|
|
263
|
+
source: {
|
|
264
|
+
platform_profile: "platform-profile.json",
|
|
265
|
+
genre_weight_profiles: args.genreWeightProfiles.relPath
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
const targets = getScoresTargets(obj);
|
|
269
|
+
if (targets.length === 0) {
|
|
270
|
+
throw new NovelCliError(`Invalid ${args.evalRelPath}: missing scores object (expected 'scores' or 'eval_used.scores'); cannot attach per-dimension weights.`, 2);
|
|
271
|
+
}
|
|
272
|
+
const canonical = targets[0];
|
|
273
|
+
const missing = [];
|
|
274
|
+
for (const dim of effective.dimensions) {
|
|
275
|
+
if (!isPlainObject(canonical.scores[dim]))
|
|
276
|
+
missing.push(dim);
|
|
277
|
+
}
|
|
278
|
+
if (missing.length > 0) {
|
|
279
|
+
throw new NovelCliError(`Invalid ${args.evalRelPath}: missing score dimensions in ${canonical.path}: ${missing.join(", ")}. Re-run QualityJudge with the updated contract.`, 2);
|
|
280
|
+
}
|
|
281
|
+
for (const target of targets) {
|
|
282
|
+
for (const dim of effective.dimensions) {
|
|
283
|
+
const entry = target.scores[dim];
|
|
284
|
+
if (!isPlainObject(entry))
|
|
285
|
+
continue;
|
|
286
|
+
entry.weight = effective.weights[dim] ?? 0;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
await writeJsonFile(args.evalAbsPath, obj);
|
|
290
|
+
}
|
package/dist/steps.js
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { NovelCliError } from "./errors.js";
|
|
2
|
+
export const CHAPTER_STAGES = ["draft", "summarize", "refine", "judge", "title-fix", "hook-fix", "review", "commit"];
|
|
3
|
+
export function pad3(n) {
|
|
4
|
+
return String(n).padStart(3, "0");
|
|
5
|
+
}
|
|
6
|
+
export function pad2(n) {
|
|
7
|
+
return String(n).padStart(2, "0");
|
|
8
|
+
}
|
|
9
|
+
export function titleFixSnapshotRel(chapter) {
|
|
10
|
+
return `staging/logs/title-fix-chapter-${pad3(chapter)}-before.md`;
|
|
11
|
+
}
|
|
12
|
+
export function formatStepId(step) {
|
|
13
|
+
if (step.kind !== "chapter")
|
|
14
|
+
throw new NovelCliError(`Unsupported step kind: ${step.kind}`, 2);
|
|
15
|
+
return `chapter:${pad3(step.chapter)}:${step.stage}`;
|
|
16
|
+
}
|
|
17
|
+
export function parseStepId(input) {
|
|
18
|
+
const trimmed = input.trim();
|
|
19
|
+
const parts = trimmed.split(":");
|
|
20
|
+
if (parts.length !== 3) {
|
|
21
|
+
throw new NovelCliError(`Invalid step id: ${input}. Expected format: chapter:048:draft`, 2);
|
|
22
|
+
}
|
|
23
|
+
const [kind, chapterRaw, stageRaw] = parts;
|
|
24
|
+
if (kind !== "chapter")
|
|
25
|
+
throw new NovelCliError(`Invalid step id: ${input}. Only 'chapter' steps are supported.`, 2);
|
|
26
|
+
if (!/^\d+$/.test(chapterRaw))
|
|
27
|
+
throw new NovelCliError(`Invalid step id: ${input}. Chapter must be a number.`, 2);
|
|
28
|
+
const chapter = Number.parseInt(chapterRaw, 10);
|
|
29
|
+
if (!Number.isInteger(chapter) || chapter <= 0) {
|
|
30
|
+
throw new NovelCliError(`Invalid step id: ${input}. Chapter must be an int >= 1.`, 2);
|
|
31
|
+
}
|
|
32
|
+
if (!CHAPTER_STAGES.includes(stageRaw)) {
|
|
33
|
+
throw new NovelCliError(`Invalid step id: ${input}. Stage must be one of: ${CHAPTER_STAGES.join(", ")}`, 2);
|
|
34
|
+
}
|
|
35
|
+
return { kind: "chapter", chapter, stage: stageRaw };
|
|
36
|
+
}
|
|
37
|
+
export function chapterRelPaths(chapter, storylineId) {
|
|
38
|
+
const id = pad3(chapter);
|
|
39
|
+
return {
|
|
40
|
+
staging: {
|
|
41
|
+
chapterMd: `staging/chapters/chapter-${id}.md`,
|
|
42
|
+
summaryMd: `staging/summaries/chapter-${id}-summary.md`,
|
|
43
|
+
deltaJson: `staging/state/chapter-${id}-delta.json`,
|
|
44
|
+
crossrefJson: `staging/state/chapter-${id}-crossref.json`,
|
|
45
|
+
evalJson: `staging/evaluations/chapter-${id}-eval.json`,
|
|
46
|
+
styleRefinerChangesJson: `staging/logs/style-refiner-chapter-${id}-changes.json`,
|
|
47
|
+
storylineMemoryMd: storylineId ? `staging/storylines/${storylineId}/memory.md` : null
|
|
48
|
+
},
|
|
49
|
+
final: {
|
|
50
|
+
chapterMd: `chapters/chapter-${id}.md`,
|
|
51
|
+
summaryMd: `summaries/chapter-${id}-summary.md`,
|
|
52
|
+
evalJson: `evaluations/chapter-${id}-eval.json`,
|
|
53
|
+
crossrefJson: `state/chapter-${id}-crossref.json`,
|
|
54
|
+
storylineMemoryMd: storylineId ? `storylines/${storylineId}/memory.md` : null,
|
|
55
|
+
foreshadowGlobalJson: "foreshadowing/global.json",
|
|
56
|
+
stateCurrentJson: "state/current-state.json",
|
|
57
|
+
stateChangelogJsonl: "state/changelog.jsonl"
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export function truncateWithEllipsis(text, maxLen) {
|
|
2
|
+
if (text.length <= maxLen)
|
|
3
|
+
return text;
|
|
4
|
+
if (maxLen <= 0)
|
|
5
|
+
return "";
|
|
6
|
+
if (maxLen === 1)
|
|
7
|
+
return "…";
|
|
8
|
+
let end = Math.max(0, maxLen - 1);
|
|
9
|
+
if (end > 0) {
|
|
10
|
+
const last = text.charCodeAt(end - 1);
|
|
11
|
+
if (last >= 0xd800 && last <= 0xdbff) {
|
|
12
|
+
const next = text.charCodeAt(end);
|
|
13
|
+
if (next >= 0xdc00 && next <= 0xdfff)
|
|
14
|
+
end -= 1;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
return `${text.slice(0, end)}…`;
|
|
18
|
+
}
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
import { join } from "node:path";
|
|
2
|
+
import { NovelCliError } from "./errors.js";
|
|
3
|
+
import { ensureDir, writeJsonFile } from "./fs-utils.js";
|
|
4
|
+
import { pad3 } from "./steps.js";
|
|
5
|
+
function countOccurrences(text, needle) {
|
|
6
|
+
if (!needle)
|
|
7
|
+
return 0;
|
|
8
|
+
let count = 0;
|
|
9
|
+
let idx = 0;
|
|
10
|
+
while (true) {
|
|
11
|
+
const next = text.indexOf(needle, idx);
|
|
12
|
+
if (next < 0)
|
|
13
|
+
break;
|
|
14
|
+
count += 1;
|
|
15
|
+
idx = next + needle.length;
|
|
16
|
+
}
|
|
17
|
+
return count;
|
|
18
|
+
}
|
|
19
|
+
function extractFirstNonEmptyLine(text) {
|
|
20
|
+
const lines = text.split(/\r?\n/gu);
|
|
21
|
+
for (const [i, line] of lines.entries()) {
|
|
22
|
+
if (line.trim().length === 0)
|
|
23
|
+
continue;
|
|
24
|
+
return { line, line_no: i + 1 };
|
|
25
|
+
}
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
export function extractChapterTitleFromMarkdown(text) {
|
|
29
|
+
const first = extractFirstNonEmptyLine(text);
|
|
30
|
+
if (!first)
|
|
31
|
+
return { has_h1: false, line_no: null, raw_line: null, title_text: null };
|
|
32
|
+
// CommonMark: up to 3 leading spaces allowed; allow optional BOM for robustness.
|
|
33
|
+
const m = /^(?:\uFEFF)? {0,3}#(?!#)\s*(.*)$/u.exec(first.line);
|
|
34
|
+
if (!m)
|
|
35
|
+
return { has_h1: false, line_no: first.line_no, raw_line: first.line, title_text: null };
|
|
36
|
+
const title_text = (m[1] ?? "").trim();
|
|
37
|
+
return { has_h1: true, line_no: first.line_no, raw_line: first.line, title_text: title_text.length > 0 ? title_text : null };
|
|
38
|
+
}
|
|
39
|
+
export function computeTitlePolicyReport(args) {
|
|
40
|
+
const generated_at = new Date().toISOString();
|
|
41
|
+
const titlePolicy = args.platformProfile.retention?.title_policy ?? null;
|
|
42
|
+
const policy = titlePolicy
|
|
43
|
+
? {
|
|
44
|
+
enabled: titlePolicy.enabled,
|
|
45
|
+
min_chars: titlePolicy.min_chars,
|
|
46
|
+
max_chars: titlePolicy.max_chars,
|
|
47
|
+
forbidden_patterns: titlePolicy.forbidden_patterns,
|
|
48
|
+
...(titlePolicy.required_patterns ? { required_patterns: titlePolicy.required_patterns } : {}),
|
|
49
|
+
auto_fix: titlePolicy.auto_fix
|
|
50
|
+
}
|
|
51
|
+
: null;
|
|
52
|
+
const extracted = extractChapterTitleFromMarkdown(args.chapterText);
|
|
53
|
+
const titleText = extracted.title_text;
|
|
54
|
+
const chars = titleText ? Array.from(titleText).length : null;
|
|
55
|
+
const issues = [];
|
|
56
|
+
if (!titlePolicy || !titlePolicy.enabled) {
|
|
57
|
+
return {
|
|
58
|
+
schema_version: 1,
|
|
59
|
+
generated_at,
|
|
60
|
+
scope: { chapter: args.chapter },
|
|
61
|
+
title: {
|
|
62
|
+
has_h1: extracted.has_h1,
|
|
63
|
+
line_no: extracted.line_no,
|
|
64
|
+
raw_line: extracted.raw_line,
|
|
65
|
+
text: titleText,
|
|
66
|
+
chars
|
|
67
|
+
},
|
|
68
|
+
policy,
|
|
69
|
+
status: "skipped",
|
|
70
|
+
issues: [],
|
|
71
|
+
has_hard_violations: false
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
if (!extracted.has_h1) {
|
|
75
|
+
issues.push({
|
|
76
|
+
id: "retention.title_policy.missing_h1",
|
|
77
|
+
severity: "hard",
|
|
78
|
+
summary: "Missing chapter title: expected the first non-empty line to be a Markdown H1 ('# ...').",
|
|
79
|
+
evidence: extracted.raw_line ?? undefined,
|
|
80
|
+
suggestion: "Add an H1 title as the first non-empty line, and keep it within configured length/pattern rules."
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
else if (!titleText) {
|
|
84
|
+
issues.push({
|
|
85
|
+
id: "retention.title_policy.empty_title",
|
|
86
|
+
severity: "hard",
|
|
87
|
+
summary: "Empty chapter title: H1 line exists but title text is missing.",
|
|
88
|
+
evidence: extracted.raw_line ?? undefined,
|
|
89
|
+
suggestion: "Fill in a meaningful title (avoid spoilers) and keep it within configured length/pattern rules."
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
if (titleText && chars !== null) {
|
|
93
|
+
if (chars < titlePolicy.min_chars) {
|
|
94
|
+
issues.push({
|
|
95
|
+
id: "retention.title_policy.too_short",
|
|
96
|
+
severity: "soft",
|
|
97
|
+
summary: `Title is too short (${chars} chars < min ${titlePolicy.min_chars}).`,
|
|
98
|
+
evidence: titleText,
|
|
99
|
+
suggestion: "Expand the title to better signal the chapter's hook without spoilers."
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
if (chars > titlePolicy.max_chars) {
|
|
103
|
+
issues.push({
|
|
104
|
+
id: "retention.title_policy.too_long",
|
|
105
|
+
severity: "soft",
|
|
106
|
+
summary: `Title is too long (${chars} chars > max ${titlePolicy.max_chars}).`,
|
|
107
|
+
evidence: titleText,
|
|
108
|
+
suggestion: "Shorten the title; keep the promise clear and avoid padding words."
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
// Banned words: reuse platform-profile compliance list.
|
|
112
|
+
const bannedHits = [];
|
|
113
|
+
for (const word of args.platformProfile.compliance.banned_words) {
|
|
114
|
+
const count = countOccurrences(titleText, word);
|
|
115
|
+
if (count <= 0)
|
|
116
|
+
continue;
|
|
117
|
+
bannedHits.push({ word, count });
|
|
118
|
+
}
|
|
119
|
+
if (bannedHits.length > 0) {
|
|
120
|
+
bannedHits.sort((a, b) => b.count - a.count || a.word.localeCompare(b.word, "zh"));
|
|
121
|
+
const top = bannedHits[0];
|
|
122
|
+
issues.push({
|
|
123
|
+
id: "retention.title_policy.banned_words",
|
|
124
|
+
severity: "hard",
|
|
125
|
+
summary: `Title contains banned words (${bannedHits.length} distinct).`,
|
|
126
|
+
evidence: top ? `${top.word} x${top.count}` : undefined,
|
|
127
|
+
suggestion: "Remove or replace banned words in the title."
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
// Forbidden patterns.
|
|
131
|
+
for (const [i, pattern] of titlePolicy.forbidden_patterns.entries()) {
|
|
132
|
+
let re;
|
|
133
|
+
try {
|
|
134
|
+
re = new RegExp(pattern);
|
|
135
|
+
}
|
|
136
|
+
catch {
|
|
137
|
+
issues.push({
|
|
138
|
+
id: `retention.title_policy.invalid_forbidden_pattern.${i}`,
|
|
139
|
+
severity: "soft",
|
|
140
|
+
summary: `Invalid forbidden_patterns regex: /${pattern}/ — skipped.`,
|
|
141
|
+
suggestion: "Fix the regex syntax in platform-profile.json."
|
|
142
|
+
});
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
const m = re.exec(titleText);
|
|
146
|
+
if (!m)
|
|
147
|
+
continue;
|
|
148
|
+
const match = (m[0] ?? "").trim();
|
|
149
|
+
issues.push({
|
|
150
|
+
id: `retention.title_policy.forbidden_pattern.${i}`,
|
|
151
|
+
severity: "soft",
|
|
152
|
+
summary: `Title matches forbidden pattern /${pattern}/.`,
|
|
153
|
+
evidence: match.length > 0 ? match : titleText,
|
|
154
|
+
suggestion: "Adjust the title to avoid forbidden patterns (spoilers/markers/phrases)."
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
// Required patterns (OR semantics).
|
|
158
|
+
if (titlePolicy.required_patterns && titlePolicy.required_patterns.length > 0) {
|
|
159
|
+
const ok = titlePolicy.required_patterns.some((p) => {
|
|
160
|
+
try {
|
|
161
|
+
return new RegExp(p).test(titleText);
|
|
162
|
+
}
|
|
163
|
+
catch {
|
|
164
|
+
return false;
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
if (!ok) {
|
|
168
|
+
issues.push({
|
|
169
|
+
id: "retention.title_policy.required_pattern_missing",
|
|
170
|
+
severity: "soft",
|
|
171
|
+
summary: "Title does not match any required pattern configured by the platform policy.",
|
|
172
|
+
evidence: titleText,
|
|
173
|
+
suggestion: "Rewrite the title to match one of the required patterns (e.g. '第XX章 …')."
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
const hasHard = issues.some((i) => i.severity === "hard");
|
|
179
|
+
const status = issues.length === 0 ? "pass" : hasHard ? "violation" : "warn";
|
|
180
|
+
return {
|
|
181
|
+
schema_version: 1,
|
|
182
|
+
generated_at,
|
|
183
|
+
scope: { chapter: args.chapter },
|
|
184
|
+
title: {
|
|
185
|
+
has_h1: extracted.has_h1,
|
|
186
|
+
line_no: extracted.line_no,
|
|
187
|
+
raw_line: extracted.raw_line,
|
|
188
|
+
text: titleText,
|
|
189
|
+
chars
|
|
190
|
+
},
|
|
191
|
+
policy,
|
|
192
|
+
status,
|
|
193
|
+
issues,
|
|
194
|
+
has_hard_violations: hasHard
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
export async function writeTitlePolicyLogs(args) {
|
|
198
|
+
const dirRel = "logs/retention/title-policy";
|
|
199
|
+
const dirAbs = join(args.rootDir, dirRel);
|
|
200
|
+
await ensureDir(dirAbs);
|
|
201
|
+
const historyRel = `${dirRel}/title-policy-chapter-${pad3(args.chapter)}.json`;
|
|
202
|
+
const latestRel = `${dirRel}/latest.json`;
|
|
203
|
+
await writeJsonFile(join(args.rootDir, historyRel), args.report);
|
|
204
|
+
await writeJsonFile(join(args.rootDir, latestRel), args.report);
|
|
205
|
+
return { latestRel, historyRel };
|
|
206
|
+
}
|
|
207
|
+
// Strip the first non-empty ATX heading line (H1–H6) from the original text.
|
|
208
|
+
// This is used for the title-fix body guard: it intentionally treats `## ...` as a title-line candidate so title-fix can promote it to `# ...`.
|
|
209
|
+
export function stripFirstAtxHeadingLine(text) {
|
|
210
|
+
let scanIdx = 0;
|
|
211
|
+
const findNextLine = () => {
|
|
212
|
+
if (scanIdx > text.length)
|
|
213
|
+
return null;
|
|
214
|
+
const nextNl = text.indexOf("\n", scanIdx);
|
|
215
|
+
if (nextNl === -1) {
|
|
216
|
+
const line = text.slice(scanIdx);
|
|
217
|
+
const start = scanIdx;
|
|
218
|
+
const end = text.length;
|
|
219
|
+
scanIdx = text.length + 1;
|
|
220
|
+
return { line, start, end };
|
|
221
|
+
}
|
|
222
|
+
const line = text.slice(scanIdx, nextNl).replace(/\r$/u, "");
|
|
223
|
+
const start = scanIdx;
|
|
224
|
+
const end = nextNl + 1;
|
|
225
|
+
scanIdx = end;
|
|
226
|
+
return { line, start, end };
|
|
227
|
+
};
|
|
228
|
+
// Find first non-empty line by scanning the original string to preserve exact slicing.
|
|
229
|
+
while (true) {
|
|
230
|
+
const next = findNextLine();
|
|
231
|
+
if (!next)
|
|
232
|
+
return { stripped: text, removed_line: null };
|
|
233
|
+
const trimmed = next.line.trim();
|
|
234
|
+
if (trimmed.length === 0) {
|
|
235
|
+
continue;
|
|
236
|
+
}
|
|
237
|
+
// Treat the first non-empty ATX heading (H1–H6, up to 3 leading spaces) as the title-line candidate.
|
|
238
|
+
if (!/^(?:\uFEFF)? {0,3}#{1,6}(?!#)\s*.*$/u.test(next.line)) {
|
|
239
|
+
return { stripped: text, removed_line: null };
|
|
240
|
+
}
|
|
241
|
+
const stripped = text.slice(0, next.start) + text.slice(next.end);
|
|
242
|
+
return { stripped, removed_line: next.line };
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
export function assertTitleFixOnlyChangedTitleLine(args) {
|
|
246
|
+
const b = stripFirstAtxHeadingLine(args.before);
|
|
247
|
+
const a = stripFirstAtxHeadingLine(args.after);
|
|
248
|
+
if (b.stripped !== a.stripped) {
|
|
249
|
+
throw new NovelCliError(`Invalid ${args.file}: title-fix must only change the first title line; chapter body changed.`, 2);
|
|
250
|
+
}
|
|
251
|
+
}
|