@pknx/waterfall-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +62 -0
- package/bin/waterfall.mjs +14 -0
- package/lib/cli/agent/agent-message.ts +71 -0
- package/lib/cli/agent/agent-translators.ts +145 -0
- package/lib/cli/agent/backend-invoke.ts +133 -0
- package/lib/cli/agent/backends.ts +100 -0
- package/lib/cli/agent/global-prompts.ts +55 -0
- package/lib/cli/commands/bug-start.ts +115 -0
- package/lib/cli/commands/comment-add.ts +47 -0
- package/lib/cli/commands/cr-all.ts +18 -0
- package/lib/cli/commands/cr-finish.ts +176 -0
- package/lib/cli/commands/cr-start.ts +105 -0
- package/lib/cli/commands/cr-to-rq.ts +18 -0
- package/lib/cli/commands/export-pdf.ts +193 -0
- package/lib/cli/commands/horizontal/horizontal.ts +232 -0
- package/lib/cli/commands/horizontal-create.ts +34 -0
- package/lib/cli/commands/horizontal-update.ts +32 -0
- package/lib/cli/commands/join-hint.ts +4 -0
- package/lib/cli/commands/registry.ts +59 -0
- package/lib/cli/commands/resolve-operator-hint.ts +120 -0
- package/lib/cli/commands/rq-all.ts +18 -0
- package/lib/cli/commands/rq-to-uc.ts +18 -0
- package/lib/cli/commands/story-close.ts +124 -0
- package/lib/cli/commands/sync-work-items.ts +59 -0
- package/lib/cli/commands/sys-start.ts +96 -0
- package/lib/cli/commands/test-all.ts +18 -0
- package/lib/cli/commands/test-to-story.ts +18 -0
- package/lib/cli/commands/types.ts +33 -0
- package/lib/cli/commands/uc-all.ts +18 -0
- package/lib/cli/commands/uc-to-story.ts +18 -0
- package/lib/cli/commands/uc-to-test.ts +18 -0
- package/lib/cli/comments/item-comments.ts +285 -0
- package/lib/cli/config/dot-waterfall.ts +404 -0
- package/lib/cli/config/global-cli.ts +21 -0
- package/lib/cli/config/sync-work-item-config.ts +34 -0
- package/lib/cli/core/cli-help-spec.ts +833 -0
- package/lib/cli/core/cli-log.ts +124 -0
- package/lib/cli/core/exec-file.ts +8 -0
- package/lib/cli/core/prompt-map.ts +64 -0
- package/lib/cli/core/slug.ts +44 -0
- package/lib/cli/entry.ts +4 -0
- package/lib/cli/export/collect-md.ts +41 -0
- package/lib/cli/export/export-items.ts +104 -0
- package/lib/cli/export/export-pdf-path.ts +88 -0
- package/lib/cli/export/merge-md.ts +37 -0
- package/lib/cli/export/mermaid-run.ts +104 -0
- package/lib/cli/export/pandoc-pdf.ts +90 -0
- package/lib/cli/export/pdf-bundled-worker.mjs +73 -0
- package/lib/cli/export/pdf-bundled.ts +36 -0
- package/lib/cli/git/cr-agent-context.ts +62 -0
- package/lib/cli/git/git-branch-guards.ts +60 -0
- package/lib/cli/git/git-cli-mock.ts +191 -0
- package/lib/cli/git/git-cli.ts +24 -0
- package/lib/cli/main.ts +434 -0
- package/lib/cli/paths.ts +9 -0
- package/lib/cli/project/pom-json.ts +55 -0
- package/lib/cli/spec/spec-init.ts +216 -0
- package/lib/cli/spec/spec-root.ts +93 -0
- package/lib/cli/sync/apply-remote-comments.ts +87 -0
- package/lib/cli/sync/attachment-category.ts +43 -0
- package/lib/cli/sync/diff-work-items.ts +113 -0
- package/lib/cli/sync/materialize-remote-bugs.ts +66 -0
- package/lib/cli/sync/provider-types.ts +43 -0
- package/lib/cli/sync/providers/direct-provider.ts +27 -0
- package/lib/cli/sync/providers/jira-provider.ts +34 -0
- package/lib/cli/sync/providers/registry.ts +26 -0
- package/lib/cli/sync/run-sync-work-items.ts +202 -0
- package/lib/cli/sync/spec-work-items.ts +226 -0
- package/lib/cli/sync/sync-hint-json.ts +163 -0
- package/lib/cli/sync/work-item-meta.ts +117 -0
- package/lib/cli/work-items/infer-bug-sys.ts +147 -0
- package/lib/cli/work-items/remote-bug-import-scaffold.ts +32 -0
- package/lib/cli/work-items/write-bug-to-spec.ts +158 -0
- package/package.json +54 -0
- package/prompts/commands/bug-start.md +46 -0
- package/prompts/commands/cr-finish.md +44 -0
- package/prompts/commands/cr-start.md +65 -0
- package/prompts/commands/cr-to-rq.md +62 -0
- package/prompts/commands/horizontal-create.md +27 -0
- package/prompts/commands/horizontal-update.md +39 -0
- package/prompts/commands/rq-to-uc.md +62 -0
- package/prompts/commands/story-close-all.md +34 -0
- package/prompts/commands/story-close.md +44 -0
- package/prompts/commands/sync-bugs-refine-imports.md +33 -0
- package/prompts/commands/sys-start.md +63 -0
- package/prompts/commands/test-to-story.md +64 -0
- package/prompts/commands/uc-to-story.md +85 -0
- package/prompts/commands/uc-to-test.md +58 -0
- package/prompts/global/before-changing-spec.md +62 -0
- package/prompts/global/content-requirements-vs-use-cases.md +116 -0
- package/prompts/global/cursor-overview.md +31 -0
- package/prompts/global/git-usage.md +46 -0
- package/prompts/global/horizontal-structure.md +75 -0
- package/prompts/global/workflows-index.md +59 -0
- package/prompts/items/bug-document-structure.md +23 -0
- package/prompts/items/cr-document-structure.md +45 -0
- package/prompts/items/rq-theme-document-structure.md +36 -0
- package/prompts/items/story-document-structure.md +49 -0
- package/prompts/items/sys-document-structure.md +36 -0
- package/prompts/items/tst-document-structure.md +55 -0
- package/prompts/items/uc-document-structure.md +38 -0
- package/spec-template/README.md +11 -0
- package/spec-template/full/doc/spec-structure.md +16 -0
- package/spec-template/full/prompts/before-changing-spec.md +7 -0
- package/spec-template/full/prompts/workflows.md +25 -0
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Log / trace levels for agent invocation (driven by .waterfall `level` and flags).
|
|
3
|
+
*/
|
|
4
|
+
import type { BackendId } from "../agent/backends";
|
|
5
|
+
|
|
6
|
+
export type LogLevel = "silent" | "normal" | "verbose";
|
|
7
|
+
|
|
8
|
+
export function parseLogLevel(raw: string | undefined): LogLevel | undefined {
|
|
9
|
+
if (!raw?.trim()) return undefined;
|
|
10
|
+
const v = raw.trim().toLowerCase();
|
|
11
|
+
if (v === "silent" || v === "quiet" || v === "0") return "silent";
|
|
12
|
+
if (v === "normal" || v === "1") return "normal";
|
|
13
|
+
if (v === "verbose" || v === "debug" || v === "2") return "verbose";
|
|
14
|
+
return undefined;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function defaultLogLevel(): LogLevel {
|
|
18
|
+
return "normal";
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export type AgentTraceLabels = {
|
|
22
|
+
backend: BackendId;
|
|
23
|
+
/** e.g. cr-to-rq.md */
|
|
24
|
+
promptBasename: string;
|
|
25
|
+
/** e.g. prompts/commands/cr-to-rq.md */
|
|
26
|
+
promptRel: string;
|
|
27
|
+
specRoot: string;
|
|
28
|
+
hint: string;
|
|
29
|
+
orchestrationContext?: string;
|
|
30
|
+
/** Cline: WATERFALL_CLINE_MODEL when set */
|
|
31
|
+
model?: string;
|
|
32
|
+
promptByteLength?: number;
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
function wf(): string {
|
|
36
|
+
return "[waterfall]";
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/** Full JSON wire string passed to the backend (`-p` payload). Verbose only (non-Cursor backends). */
|
|
40
|
+
export function traceWireMessage(level: LogLevel, wire: string): void {
|
|
41
|
+
if (level !== "verbose") return;
|
|
42
|
+
process.stdout.write(`${wf()} agent wire message:\n${wire}`);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Echo the translated JSON payload to stdout before spawn.
|
|
47
|
+
* Cursor: printed whenever log level is not silent (so operators see what `agent -p` receives).
|
|
48
|
+
* Other backends: same as verbose-only `traceWireMessage`.
|
|
49
|
+
*/
|
|
50
|
+
export function traceTranslatedPayloadForBackend(
|
|
51
|
+
backend: BackendId,
|
|
52
|
+
level: LogLevel,
|
|
53
|
+
wire: string,
|
|
54
|
+
): void {
|
|
55
|
+
if (level === "silent") return;
|
|
56
|
+
if (backend === "cursor") {
|
|
57
|
+
process.stdout.write(
|
|
58
|
+
`${wf()} cursor translated payload (JSON string passed as agent -p positional prompt):\n${wire}`,
|
|
59
|
+
);
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
traceWireMessage(level, wire);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/** Dry-run plan line (describeBackendInvocation). Silent = no stdout. */
|
|
66
|
+
export function traceDryRun(level: LogLevel, planLine: string): void {
|
|
67
|
+
if (level === "silent") return;
|
|
68
|
+
process.stdout.write(`[dry-run] ${planLine}\n`);
|
|
69
|
+
if (level === "verbose") {
|
|
70
|
+
process.stdout.write(`${wf()} log level: verbose (dry-run; no subprocess)\n`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/** Before synchronous agent subprocess. */
|
|
75
|
+
export function traceAgentInvokeStart(level: LogLevel, labels: AgentTraceLabels): void {
|
|
76
|
+
if (level === "silent") return;
|
|
77
|
+
const { backend, promptBasename, specRoot, orchestrationContext, hint } = labels;
|
|
78
|
+
if (level === "normal") {
|
|
79
|
+
const ctx = orchestrationContext ? ` · ${orchestrationContext}` : "";
|
|
80
|
+
process.stdout.write(`${wf()} ◐ agent · ${backend} · ${promptBasename}${ctx}\n`);
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
const modelLine =
|
|
84
|
+
labels.model?.trim() ? ` cline model: ${labels.model.trim()}\n` : "";
|
|
85
|
+
const bytesLine =
|
|
86
|
+
labels.promptByteLength != null
|
|
87
|
+
? ` prompt file: ${labels.promptRel} (${labels.promptByteLength} bytes)\n`
|
|
88
|
+
: ` prompt file: ${labels.promptRel}\n`;
|
|
89
|
+
const hintPreview =
|
|
90
|
+
hint.length > 220 ? `${hint.slice(0, 220)}…` : hint;
|
|
91
|
+
const orch = orchestrationContext ? ` ${orchestrationContext}\n` : "";
|
|
92
|
+
process.stdout.write(
|
|
93
|
+
`${wf()} invoking agent (verbose)\n` +
|
|
94
|
+
` backend: ${backend}\n` +
|
|
95
|
+
bytesLine +
|
|
96
|
+
` cwd: ${specRoot}\n` +
|
|
97
|
+
modelLine +
|
|
98
|
+
` hint: ${JSON.stringify(hintPreview)}\n` +
|
|
99
|
+
orch,
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/** After agent subprocess returns successfully. */
|
|
104
|
+
export function traceAgentInvokeDone(
|
|
105
|
+
level: LogLevel,
|
|
106
|
+
labels: Pick<AgentTraceLabels, "promptBasename" | "backend">,
|
|
107
|
+
): void {
|
|
108
|
+
if (level === "silent") return;
|
|
109
|
+
if (level === "normal") {
|
|
110
|
+
process.stdout.write(
|
|
111
|
+
`${wf()} ✓ agent · ${labels.backend} · ${labels.promptBasename}\n`,
|
|
112
|
+
);
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
process.stdout.write(
|
|
116
|
+
`${wf()} subprocess finished · ${labels.backend} · ${labels.promptBasename}\n`,
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/** `export pdf` pipeline tracing (respects silent / normal / verbose). */
|
|
121
|
+
export function traceExportPdfLine(level: LogLevel, message: string): void {
|
|
122
|
+
if (level === "silent") return;
|
|
123
|
+
process.stdout.write(`${wf()} ${message}\n`);
|
|
124
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { execFileSync as nodeExecFileSync } from "node:child_process";
|
|
2
|
+
|
|
3
|
+
/** Indirection so Vitest can mock backend spawns without affecting `git` in cr-*.ts */
|
|
4
|
+
export function execFileSync(
|
|
5
|
+
...args: Parameters<typeof nodeExecFileSync>
|
|
6
|
+
): ReturnType<typeof nodeExecFileSync> {
|
|
7
|
+
return nodeExecFileSync(...args);
|
|
8
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maps CLI argv to prompts/*.md paths (repo-relative to spec root).
|
|
3
|
+
* Align with waterfall-spec CR-005 §2.6 and prompts/workflows.md.
|
|
4
|
+
*/
|
|
5
|
+
export type LifecycleStep =
|
|
6
|
+
| "cr-to-rq"
|
|
7
|
+
| "rq-to-uc"
|
|
8
|
+
| "uc-to-story"
|
|
9
|
+
| "uc-to-test"
|
|
10
|
+
| "test-to-story";
|
|
11
|
+
|
|
12
|
+
export const ORCHESTRATE_CR_ALL_ORDER: LifecycleStep[] = [
|
|
13
|
+
"cr-to-rq",
|
|
14
|
+
"rq-to-uc",
|
|
15
|
+
"uc-to-story",
|
|
16
|
+
"uc-to-test",
|
|
17
|
+
"test-to-story",
|
|
18
|
+
];
|
|
19
|
+
|
|
20
|
+
export const ORCHESTRATE_RQ_ALL_ORDER: LifecycleStep[] = [
|
|
21
|
+
"rq-to-uc",
|
|
22
|
+
"uc-to-story",
|
|
23
|
+
"uc-to-test",
|
|
24
|
+
"test-to-story",
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
export const ORCHESTRATE_UC_ALL_ORDER: LifecycleStep[] = [
|
|
28
|
+
"uc-to-story",
|
|
29
|
+
"uc-to-test",
|
|
30
|
+
"test-to-story",
|
|
31
|
+
];
|
|
32
|
+
|
|
33
|
+
export const ORCHESTRATE_TEST_ALL_ORDER: LifecycleStep[] = [
|
|
34
|
+
"test-to-story",
|
|
35
|
+
"uc-to-story",
|
|
36
|
+
"uc-to-test",
|
|
37
|
+
];
|
|
38
|
+
|
|
39
|
+
const STEP_TO_FILE: Record<LifecycleStep, string> = {
|
|
40
|
+
"cr-to-rq": "prompts/commands/cr-to-rq.md",
|
|
41
|
+
"rq-to-uc": "prompts/commands/rq-to-uc.md",
|
|
42
|
+
"uc-to-story": "prompts/commands/uc-to-story.md",
|
|
43
|
+
"uc-to-test": "prompts/commands/uc-to-test.md",
|
|
44
|
+
"test-to-story": "prompts/commands/test-to-story.md",
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
export function promptFileForStep(step: LifecycleStep): string {
|
|
48
|
+
return STEP_TO_FILE[step];
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export type SingleCommand =
|
|
52
|
+
| { kind: "cr-to-rq"; hint: string }
|
|
53
|
+
| { kind: "rq-to-uc"; hint: string }
|
|
54
|
+
| { kind: "uc-to-story"; hint: string }
|
|
55
|
+
| { kind: "uc-to-test"; hint: string }
|
|
56
|
+
| { kind: "test-to-story"; hint: string };
|
|
57
|
+
|
|
58
|
+
export function singleCommandToStep(cmd: SingleCommand): LifecycleStep {
|
|
59
|
+
return cmd.kind;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export function promptPathForSingleCommand(cmd: SingleCommand): string {
|
|
63
|
+
return promptFileForStep(singleCommandToStep(cmd));
|
|
64
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/** Kebab-case slug for branch / CR folder names (STORY-018). */
|
|
2
|
+
export function slugifyDescription(description: string): string {
|
|
3
|
+
const s = description
|
|
4
|
+
.trim()
|
|
5
|
+
.toLowerCase()
|
|
6
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
7
|
+
.replace(/^-+|-+$/g, "")
|
|
8
|
+
.slice(0, 48);
|
|
9
|
+
return s || "change";
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/** Zero-pad CR id to three digits. */
|
|
13
|
+
export function formatCrId(n: number): string {
|
|
14
|
+
return String(n).padStart(3, "0");
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/** Title case from a kebab slug (branch / folder slug) for scaffold headings — not full operator prose. */
|
|
18
|
+
export function humanizeKebabSlug(slug: string): string {
|
|
19
|
+
if (!slug || slug === "change") return "New change request";
|
|
20
|
+
return slug
|
|
21
|
+
.split("-")
|
|
22
|
+
.filter(Boolean)
|
|
23
|
+
.map((w) => w.charAt(0).toUpperCase() + w.slice(1))
|
|
24
|
+
.join(" ");
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const SCAFFOLD_HEADING_MAX = 100;
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* H1 line for CR/SYS scaffolds: first line of operator text (trimmed), length-capped at a word boundary.
|
|
31
|
+
* Avoids titles derived from {@link slugifyDescription}'s 48-char slug, which often cuts mid-word.
|
|
32
|
+
*/
|
|
33
|
+
export function scaffoldHeadingFromDescription(
|
|
34
|
+
description: string,
|
|
35
|
+
slug: string,
|
|
36
|
+
): string {
|
|
37
|
+
const first = description.trim().split(/\r?\n/u)[0]?.trim() ?? "";
|
|
38
|
+
if (!first) return humanizeKebabSlug(slug);
|
|
39
|
+
if (first.length <= SCAFFOLD_HEADING_MAX) return first;
|
|
40
|
+
const cut = first.slice(0, SCAFFOLD_HEADING_MAX);
|
|
41
|
+
const lastSpace = cut.lastIndexOf(" ");
|
|
42
|
+
const base = lastSpace > 40 ? cut.slice(0, lastSpace) : cut;
|
|
43
|
+
return `${base}…`;
|
|
44
|
+
}
|
package/lib/cli/entry.ts
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
const SKIP_DIR_NAMES = new Set([
|
|
5
|
+
"node_modules",
|
|
6
|
+
".git",
|
|
7
|
+
"dist",
|
|
8
|
+
"build",
|
|
9
|
+
"coverage",
|
|
10
|
+
]);
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* All `.md` files under `specRoot`, depth-first by sorted relative path.
|
|
14
|
+
* Skips hidden directories, `node_modules`, `.git`, and common build folders.
|
|
15
|
+
*/
|
|
16
|
+
export function collectMarkdownFiles(specRoot: string): string[] {
|
|
17
|
+
const absRoot = path.resolve(specRoot);
|
|
18
|
+
const results: string[] = [];
|
|
19
|
+
|
|
20
|
+
function walk(dir: string) {
|
|
21
|
+
let entries: fs.Dirent[];
|
|
22
|
+
try {
|
|
23
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
24
|
+
} catch {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
entries.sort((a, b) => a.name.localeCompare(b.name));
|
|
28
|
+
for (const e of entries) {
|
|
29
|
+
if (e.name.startsWith(".")) continue;
|
|
30
|
+
if (SKIP_DIR_NAMES.has(e.name)) continue;
|
|
31
|
+
const full = path.join(dir, e.name);
|
|
32
|
+
if (e.isDirectory()) walk(full);
|
|
33
|
+
else if (e.isFile() && e.name.endsWith(".md")) results.push(full);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
walk(absRoot);
|
|
38
|
+
return results.sort((a, b) =>
|
|
39
|
+
path.relative(absRoot, a).localeCompare(path.relative(absRoot, b)),
|
|
40
|
+
);
|
|
41
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
/** Known spec artifact families for `export pdf --items`. */
|
|
4
|
+
export const EXPORT_ITEM_CODES = [
|
|
5
|
+
"CR",
|
|
6
|
+
"RQ",
|
|
7
|
+
"UC",
|
|
8
|
+
"SYS",
|
|
9
|
+
"STORY",
|
|
10
|
+
"BUG",
|
|
11
|
+
"HOR",
|
|
12
|
+
"TST",
|
|
13
|
+
"CURSOR",
|
|
14
|
+
] as const;
|
|
15
|
+
|
|
16
|
+
export type ExportItemCode = (typeof EXPORT_ITEM_CODES)[number];
|
|
17
|
+
|
|
18
|
+
/** Default: lifecycle + horizontals + tests; excludes CURSOR.md, SYS/STORY/BUG, prompts/doc. */
|
|
19
|
+
export const DEFAULT_EXPORT_ITEMS: readonly ExportItemCode[] = [
|
|
20
|
+
"CR",
|
|
21
|
+
"RQ",
|
|
22
|
+
"UC",
|
|
23
|
+
"HOR",
|
|
24
|
+
"TST",
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
export function parseExportItemsList(raw: string): string[] {
|
|
28
|
+
return raw
|
|
29
|
+
.split(",")
|
|
30
|
+
.map((s) => s.trim().toUpperCase())
|
|
31
|
+
.filter(Boolean);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function validateExportItemCodes(codes: string[]): void {
|
|
35
|
+
const allowed = new Set<string>(EXPORT_ITEM_CODES);
|
|
36
|
+
const bad = codes.filter((c) => !allowed.has(c));
|
|
37
|
+
if (bad.length > 0) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
`[waterfall] Unknown --items code(s): ${bad.join(", ")}. Allowed: ${EXPORT_ITEM_CODES.join(", ")}`,
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Map a markdown path relative to spec root (forward slashes) to one export code, or null if excluded
|
|
46
|
+
* (unknown layout, prompts/, doc/, docs/, etc.).
|
|
47
|
+
*/
|
|
48
|
+
export function classifyMarkdownExportItem(relPath: string): string | null {
|
|
49
|
+
const norm = relPath.split(path.sep).join("/");
|
|
50
|
+
|
|
51
|
+
if (/^CURSOR\.md$/i.test(norm)) return "CURSOR";
|
|
52
|
+
|
|
53
|
+
if (
|
|
54
|
+
norm.startsWith("prompts/") ||
|
|
55
|
+
norm.startsWith("doc/") ||
|
|
56
|
+
norm.startsWith("docs/")
|
|
57
|
+
) {
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (norm.startsWith("changerequests/") && norm.toLowerCase().endsWith(".md")) {
|
|
62
|
+
return "CR";
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (norm.startsWith("tests/")) {
|
|
66
|
+
const base = path.posix.basename(norm);
|
|
67
|
+
if (/^TST-\d+/i.test(base)) return "TST";
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (norm.startsWith("technical/horizontals/")) {
|
|
72
|
+
const base = path.posix.basename(norm);
|
|
73
|
+
if (/^HOR-\d+/i.test(base)) return "HOR";
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (norm.startsWith("technical/")) {
|
|
78
|
+
if (/\/STORY-\d+[^/]*\/STORY-\d+\.md$/i.test(norm)) return "STORY";
|
|
79
|
+
if (/\/SYS-\d+[^/]*\/SYS-\d+\.md$/i.test(norm)) return "SYS";
|
|
80
|
+
if (/\/BUG-\d+[^/]*\/BUG-\d+\.md$/i.test(norm)) return "BUG";
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (norm.startsWith("requirements/")) {
|
|
85
|
+
if (/\/RQ-\d+[^/]*\/RQ-\d+\.md$/i.test(norm)) return "RQ";
|
|
86
|
+
if (/\/UC-\d+[^/]*\/UC-\d+\.md$/i.test(norm)) return "UC";
|
|
87
|
+
return null;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function filterMarkdownPathsByExportItems(
|
|
94
|
+
absolutePaths: string[],
|
|
95
|
+
specRoot: string,
|
|
96
|
+
itemSet: Set<string>,
|
|
97
|
+
): string[] {
|
|
98
|
+
const root = path.resolve(specRoot);
|
|
99
|
+
return absolutePaths.filter((abs) => {
|
|
100
|
+
const rel = path.relative(root, abs).split(path.sep).join("/");
|
|
101
|
+
const kind = classifyMarkdownExportItem(rel);
|
|
102
|
+
return kind !== null && itemSet.has(kind);
|
|
103
|
+
});
|
|
104
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { gitExecSync } from "../git/git-cli";
|
|
4
|
+
|
|
5
|
+
function runGitText(specRoot: string, args: string[]): string {
|
|
6
|
+
try {
|
|
7
|
+
return String(
|
|
8
|
+
gitExecSync(specRoot, args, {
|
|
9
|
+
encoding: "utf8",
|
|
10
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
11
|
+
}),
|
|
12
|
+
).trimEnd();
|
|
13
|
+
} catch (e) {
|
|
14
|
+
throw new Error(
|
|
15
|
+
`[waterfall] export pdf needs a git repo at the spec root: ${(e as Error).message}`,
|
|
16
|
+
);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/** Safe single path segment for PDF filename (basename, tag, or short hash). */
|
|
21
|
+
export function sanitizePdfSegment(raw: string): string {
|
|
22
|
+
return raw
|
|
23
|
+
.trim()
|
|
24
|
+
.replace(/[/\\?*:|"<>]/g, "-")
|
|
25
|
+
.replace(/\s+/g, "_")
|
|
26
|
+
.replace(/_+/g, "_")
|
|
27
|
+
.replace(/^-+|-+$/g, "");
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Fail if the spec repo has any staged/unstaged/untracked changes.
|
|
32
|
+
*/
|
|
33
|
+
export function assertSpecRepoCleanForExport(specRoot: string): void {
|
|
34
|
+
const porcelain = runGitText(specRoot, ["status", "--porcelain"]);
|
|
35
|
+
if (porcelain.length > 0) {
|
|
36
|
+
throw new Error(
|
|
37
|
+
"[waterfall] export pdf requires a clean working tree (commit or stash all changes, including untracked files you care about).",
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Tag name if HEAD matches at least one tag (lexicographically first by refname);
|
|
44
|
+
* otherwise 12-char revision.
|
|
45
|
+
*/
|
|
46
|
+
export function resolveExportRevisionLabel(specRoot: string): string {
|
|
47
|
+
const tagsOut = runGitText(specRoot, [
|
|
48
|
+
"tag",
|
|
49
|
+
"--points-at",
|
|
50
|
+
"HEAD",
|
|
51
|
+
"--sort=refname",
|
|
52
|
+
]);
|
|
53
|
+
const tags = tagsOut
|
|
54
|
+
.split("\n")
|
|
55
|
+
.map((t) => t.trim())
|
|
56
|
+
.filter(Boolean);
|
|
57
|
+
if (tags.length > 0) {
|
|
58
|
+
return sanitizePdfSegment(tags[0]!);
|
|
59
|
+
}
|
|
60
|
+
const short = runGitText(specRoot, ["rev-parse", "--short=12", "HEAD"]);
|
|
61
|
+
return sanitizePdfSegment(short) || "unknown";
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Default PDF path: `<specRoot>/docs/<basename>-<tag|hash>.pdf` (creates `docs/`).
|
|
66
|
+
*/
|
|
67
|
+
export function resolveDefaultExportPdfPath(specRoot: string): string {
|
|
68
|
+
const abs = path.resolve(specRoot);
|
|
69
|
+
const base = sanitizePdfSegment(path.basename(abs)) || "spec";
|
|
70
|
+
const rev = resolveExportRevisionLabel(specRoot);
|
|
71
|
+
const docsDir = path.join(abs, "docs");
|
|
72
|
+
fs.mkdirSync(docsDir, { recursive: true });
|
|
73
|
+
return path.join(docsDir, `${base}-${rev}.pdf`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* `-o` override (resolved against `cwd`) or {@link resolveDefaultExportPdfPath}.
|
|
78
|
+
*/
|
|
79
|
+
export function resolveExportPdfOutputPath(
|
|
80
|
+
specRoot: string,
|
|
81
|
+
cwd: string,
|
|
82
|
+
explicitOutput: string | undefined,
|
|
83
|
+
): string {
|
|
84
|
+
if (explicitOutput?.trim()) {
|
|
85
|
+
return path.resolve(cwd, explicitOutput.trim());
|
|
86
|
+
}
|
|
87
|
+
return resolveDefaultExportPdfPath(specRoot);
|
|
88
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
/** YAML `title` for Pandoc (PDF metadata, HTML `<title>`); value is JSON-quoted for safety. */
|
|
5
|
+
function yamlTitleBlock(documentTitle: string): string {
|
|
6
|
+
return `---\ntitle: ${JSON.stringify(documentTitle)}\n---\n`;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Build one markdown document: each file becomes a section with a source path heading.
|
|
11
|
+
* @param documentTitle Shown as H1 and in PDF/HTML metadata via YAML front matter.
|
|
12
|
+
*/
|
|
13
|
+
export function mergeMarkdownFiles(
|
|
14
|
+
specRoot: string,
|
|
15
|
+
absolutePaths: string[],
|
|
16
|
+
documentTitle: string,
|
|
17
|
+
): string {
|
|
18
|
+
const root = path.resolve(specRoot);
|
|
19
|
+
const title = documentTitle.trim() || "Waterfall specification";
|
|
20
|
+
const parts: string[] = [
|
|
21
|
+
yamlTitleBlock(title),
|
|
22
|
+
`# ${title}\n`,
|
|
23
|
+
"",
|
|
24
|
+
"_Generated by `waterfall export pdf`. Source files are merged in path order._",
|
|
25
|
+
"",
|
|
26
|
+
"---",
|
|
27
|
+
"",
|
|
28
|
+
];
|
|
29
|
+
|
|
30
|
+
for (const abs of absolutePaths) {
|
|
31
|
+
const rel = path.relative(root, abs).split(path.sep).join("/");
|
|
32
|
+
const body = fs.readFileSync(abs, "utf8").replace(/\r\n/g, "\n");
|
|
33
|
+
parts.push(`## Source: \`${rel}\``, "", body, "", "---", "");
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
return parts.join("\n").trimEnd() + "\n";
|
|
37
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { execFileSync, spawnSync } from "node:child_process";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { WATERFALL_CLI_PACKAGE_ROOT } from "../paths";
|
|
5
|
+
|
|
6
|
+
const MERMAID_BLOCK = /```mermaid\s*\r?\n([\s\S]*?)```/gi;
|
|
7
|
+
|
|
8
|
+
/** Human-readable label for which `mmdc` invocation path will be used (no subprocess). */
|
|
9
|
+
export function describeMmdcResolution(): string {
|
|
10
|
+
const envBin = process.env.WATERFALL_MMDC?.trim();
|
|
11
|
+
if (envBin) return `WATERFALL_MMDC (${envBin})`;
|
|
12
|
+
const win = process.platform === "win32";
|
|
13
|
+
const localBin = path.join(
|
|
14
|
+
WATERFALL_CLI_PACKAGE_ROOT,
|
|
15
|
+
"node_modules",
|
|
16
|
+
".bin",
|
|
17
|
+
win ? "mmdc.cmd" : "mmdc",
|
|
18
|
+
);
|
|
19
|
+
if (fs.existsSync(localBin)) return `bundled (${localBin})`;
|
|
20
|
+
return "PATH mmdc, else npx -y @mermaid-js/mermaid-cli";
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export type MermaidReplacement = {
|
|
24
|
+
/** Markdown with ```mermaid blocks replaced by image references. */
|
|
25
|
+
markdown: string;
|
|
26
|
+
/** Basenames of PNG files written under `workDir` (same order as replacements). */
|
|
27
|
+
imageFiles: string[];
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Replace fenced `mermaid` blocks with `` and write each diagram
|
|
32
|
+
* to `workDir` using the Mermaid CLI (`mmdc` ships with waterfall-cli dependencies).
|
|
33
|
+
*
|
|
34
|
+
* Resolution order: `WATERFALL_MMDC` → `waterfall-cli/node_modules/.bin/mmdc` → `mmdc` on PATH →
|
|
35
|
+
* `npx -y @mermaid-js/mermaid-cli` (fallback if node_modules was trimmed).
|
|
36
|
+
*/
|
|
37
|
+
export function renderMermaidBlocksToPng(
|
|
38
|
+
markdown: string,
|
|
39
|
+
workDir: string,
|
|
40
|
+
): MermaidReplacement {
|
|
41
|
+
fs.mkdirSync(workDir, { recursive: true });
|
|
42
|
+
let index = 0;
|
|
43
|
+
const imageFiles: string[] = [];
|
|
44
|
+
|
|
45
|
+
const next = () => {
|
|
46
|
+
const name = `mermaid-${index}.png`;
|
|
47
|
+
index += 1;
|
|
48
|
+
imageFiles.push(name);
|
|
49
|
+
return name;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const outMd = markdown.replace(MERMAID_BLOCK, (_full, code: string) => {
|
|
53
|
+
const pngName = next();
|
|
54
|
+
const mmdPath = path.join(workDir, pngName.replace(/\.png$/, ".mmd"));
|
|
55
|
+
const pngPath = path.join(workDir, pngName);
|
|
56
|
+
const trimmed = String(code).trimEnd() + "\n";
|
|
57
|
+
fs.writeFileSync(mmdPath, trimmed, "utf8");
|
|
58
|
+
runMmdc(mmdPath, pngPath);
|
|
59
|
+
return ``;
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
return { markdown: outMd, imageFiles };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function runMmdc(inputPath: string, outputPath: string): void {
|
|
66
|
+
const args = ["-i", inputPath, "-o", outputPath, "-b", "white"];
|
|
67
|
+
const envBin = process.env.WATERFALL_MMDC?.trim();
|
|
68
|
+
if (envBin) {
|
|
69
|
+
execFileSync(envBin, args, { stdio: "inherit" });
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const win = process.platform === "win32";
|
|
74
|
+
const localBin = path.join(
|
|
75
|
+
WATERFALL_CLI_PACKAGE_ROOT,
|
|
76
|
+
"node_modules",
|
|
77
|
+
".bin",
|
|
78
|
+
win ? "mmdc.cmd" : "mmdc",
|
|
79
|
+
);
|
|
80
|
+
if (fs.existsSync(localBin)) {
|
|
81
|
+
execFileSync(localBin, args, { stdio: "inherit" });
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
try {
|
|
86
|
+
execFileSync("mmdc", args, { stdio: "inherit" });
|
|
87
|
+
return;
|
|
88
|
+
} catch {
|
|
89
|
+
/* try npx */
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const npx = win ? "npx.cmd" : "npx";
|
|
93
|
+
const npxArgs = ["-y", "@mermaid-js/mermaid-cli", ...args];
|
|
94
|
+
const r = spawnSync(npx, npxArgs, {
|
|
95
|
+
stdio: "inherit",
|
|
96
|
+
shell: win,
|
|
97
|
+
});
|
|
98
|
+
if (r.error) throw r.error;
|
|
99
|
+
if (r.status !== 0) {
|
|
100
|
+
throw new Error(
|
|
101
|
+
`mmdc failed (exit ${r.status}). Install Mermaid CLI: npm i -g @mermaid-js/mermaid-cli, or set WATERFALL_MMDC to the mmdc binary.`,
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
}
|