akm-cli 0.6.0-rc1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +33 -0
- package/README.md +9 -9
- package/dist/cli.js +199 -114
- package/dist/{completions.js → commands/completions.js} +1 -1
- package/dist/{config-cli.js → commands/config-cli.js} +109 -11
- package/dist/{curate.js → commands/curate.js} +8 -3
- package/dist/{info.js → commands/info.js} +15 -9
- package/dist/{init.js → commands/init.js} +4 -4
- package/dist/{install-audit.js → commands/install-audit.js} +4 -7
- package/dist/{installed-stashes.js → commands/installed-stashes.js} +77 -31
- package/dist/{migration-help.js → commands/migration-help.js} +2 -2
- package/dist/{registry-search.js → commands/registry-search.js} +8 -6
- package/dist/{remember.js → commands/remember.js} +55 -49
- package/dist/{stash-search.js → commands/search.js} +28 -69
- package/dist/{self-update.js → commands/self-update.js} +69 -3
- package/dist/{stash-show.js → commands/show.js} +104 -84
- package/dist/{stash-add.js → commands/source-add.js} +42 -32
- package/dist/{stash-clone.js → commands/source-clone.js} +12 -10
- package/dist/{stash-source-manage.js → commands/source-manage.js} +24 -24
- package/dist/{vault.js → commands/vault.js} +43 -0
- package/dist/{stash-ref.js → core/asset-ref.js} +4 -4
- package/dist/{asset-registry.js → core/asset-registry.js} +1 -1
- package/dist/{asset-spec.js → core/asset-spec.js} +1 -1
- package/dist/{config.js → core/config.js} +133 -56
- package/dist/core/errors.js +90 -0
- package/dist/{frontmatter.js → core/frontmatter.js} +5 -3
- package/dist/core/write-source.js +280 -0
- package/dist/{db-search.js → indexer/db-search.js} +25 -19
- package/dist/{db.js → indexer/db.js} +79 -47
- package/dist/{file-context.js → indexer/file-context.js} +3 -3
- package/dist/{indexer.js → indexer/indexer.js} +132 -33
- package/dist/{manifest.js → indexer/manifest.js} +10 -10
- package/dist/{matchers.js → indexer/matchers.js} +3 -6
- package/dist/{metadata.js → indexer/metadata.js} +9 -5
- package/dist/{search-source.js → indexer/search-source.js} +52 -41
- package/dist/{semantic-status.js → indexer/semantic-status.js} +2 -2
- package/dist/{walker.js → indexer/walker.js} +1 -1
- package/dist/{lockfile.js → integrations/lockfile.js} +1 -1
- package/dist/{llm-client.js → llm/client.js} +1 -1
- package/dist/{embedders → llm/embedders}/local.js +2 -2
- package/dist/{embedders → llm/embedders}/remote.js +1 -1
- package/dist/{embedders → llm/embedders}/types.js +1 -1
- package/dist/{metadata-enhance.js → llm/metadata-enhance.js} +2 -2
- package/dist/{cli-hints.js → output/cli-hints.js} +3 -0
- package/dist/{output-context.js → output/context.js} +21 -3
- package/dist/{renderers.js → output/renderers.js} +9 -65
- package/dist/{output-shapes.js → output/shapes.js} +18 -4
- package/dist/{output-text.js → output/text.js} +2 -2
- package/dist/{registry-build-index.js → registry/build-index.js} +16 -7
- package/dist/{create-provider-registry.js → registry/create-provider-registry.js} +6 -2
- package/dist/registry/factory.js +33 -0
- package/dist/{origin-resolve.js → registry/origin-resolve.js} +1 -1
- package/dist/{providers → registry/providers}/index.js +1 -1
- package/dist/{providers → registry/providers}/skills-sh.js +59 -3
- package/dist/{providers → registry/providers}/static-index.js +80 -12
- package/dist/registry/providers/types.js +25 -0
- package/dist/{registry-resolve.js → registry/resolve.js} +3 -3
- package/dist/{detect.js → setup/detect.js} +0 -27
- package/dist/{ripgrep-install.js → setup/ripgrep-install.js} +1 -1
- package/dist/{ripgrep-resolve.js → setup/ripgrep-resolve.js} +2 -2
- package/dist/{setup.js → setup/setup.js} +16 -56
- package/dist/{stash-include.js → sources/include.js} +1 -1
- package/dist/sources/provider-factory.js +36 -0
- package/dist/sources/provider.js +21 -0
- package/dist/sources/providers/filesystem.js +35 -0
- package/dist/{stash-providers → sources/providers}/git.js +53 -64
- package/dist/{stash-providers → sources/providers}/index.js +3 -4
- package/dist/sources/providers/install-types.js +14 -0
- package/dist/{stash-providers → sources/providers}/npm.js +42 -41
- package/dist/{stash-providers → sources/providers}/provider-utils.js +3 -3
- package/dist/{stash-providers → sources/providers}/sync-from-ref.js +2 -2
- package/dist/{stash-providers → sources/providers}/tar-utils.js +11 -8
- package/dist/{stash-providers → sources/providers}/website.js +29 -65
- package/dist/{stash-resolve.js → sources/resolve.js} +8 -7
- package/dist/{wiki.js → wiki/wiki.js} +34 -18
- package/dist/{workflow-authoring.js → workflows/authoring.js} +37 -14
- package/dist/{workflow-cli.js → workflows/cli.js} +2 -1
- package/dist/{workflow-db.js → workflows/db.js} +1 -1
- package/dist/workflows/document-cache.js +20 -0
- package/dist/workflows/parser.js +379 -0
- package/dist/workflows/renderer.js +78 -0
- package/dist/{workflow-runs.js → workflows/runs.js} +72 -28
- package/dist/workflows/schema.js +11 -0
- package/dist/workflows/validator.js +48 -0
- package/docs/migration/release-notes/0.6.0.md +91 -23
- package/package.json +1 -1
- package/dist/errors.js +0 -45
- package/dist/llm.js +0 -16
- package/dist/registry-factory.js +0 -19
- package/dist/ripgrep.js +0 -2
- package/dist/stash-provider-factory.js +0 -35
- package/dist/stash-provider.js +0 -3
- package/dist/stash-providers/filesystem.js +0 -71
- package/dist/stash-providers/openviking.js +0 -348
- package/dist/stash-types.js +0 -1
- package/dist/workflow-markdown.js +0 -260
- /package/dist/{common.js → core/common.js} +0 -0
- /package/dist/{markdown.js → core/markdown.js} +0 -0
- /package/dist/{paths.js → core/paths.js} +0 -0
- /package/dist/{warn.js → core/warn.js} +0 -0
- /package/dist/{search-fields.js → indexer/search-fields.js} +0 -0
- /package/dist/{usage-events.js → indexer/usage-events.js} +0 -0
- /package/dist/{github.js → integrations/github.js} +0 -0
- /package/dist/{embedder.js → llm/embedder.js} +0 -0
- /package/dist/{embedders → llm/embedders}/cache.js +0 -0
- /package/dist/{registry-provider.js → registry/types.js} +0 -0
- /package/dist/{setup-steps.js → setup/steps.js} +0 -0
- /package/dist/{registry-types.js → sources/types.js} +0 -0
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflow markdown → WorkflowDocument JSON.
|
|
3
|
+
*
|
|
4
|
+
* Composition over invention: frontmatter is parsed with the `yaml` package,
|
|
5
|
+
* heading discovery with `parseMarkdownToc`, and section bodies with
|
|
6
|
+
* `extractLineRange` — all already in the codebase. The parser walks the
|
|
7
|
+
* heading list once to assemble a `WorkflowDocument` with `SourceRef`
|
|
8
|
+
* line spans, accumulating `WorkflowError`s rather than throwing.
|
|
9
|
+
*/
|
|
10
|
+
import { parse as yamlParse } from "yaml";
|
|
11
|
+
import { parseFrontmatterBlock } from "../core/frontmatter";
|
|
12
|
+
import { parseMarkdownToc } from "../core/markdown";
|
|
13
|
+
import { WORKFLOW_SCHEMA_VERSION, } from "./schema";
|
|
14
|
+
import { runSemanticChecks } from "./validator";
|
|
15
|
+
const WORKFLOW_TITLE_PREFIX = "Workflow:";
|
|
16
|
+
const STEP_PREFIX = "Step:";
|
|
17
|
+
const STEP_ID_LINE = /^Step ID:\s+(.+?)\s*$/;
|
|
18
|
+
const BULLET_LINE = /^[-*]\s+(.+)$/;
|
|
19
|
+
const SUBSECTION_INSTRUCTIONS = "Instructions";
|
|
20
|
+
const SUBSECTION_COMPLETION_CRITERIA = "Completion Criteria";
|
|
21
|
+
/**
|
|
22
|
+
* Cheap structural probe for the matcher. Returns true if the body has the
|
|
23
|
+
* unmistakable shape of a workflow file. Used in `src/indexer/matchers.ts` so
|
|
24
|
+
* the matcher and parser cannot drift.
|
|
25
|
+
*/
|
|
26
|
+
export function looksLikeWorkflow(body) {
|
|
27
|
+
return (/^#\s+Workflow:\s+/m.test(body) &&
|
|
28
|
+
/^##\s+Step:\s+/m.test(body) &&
|
|
29
|
+
/^Step ID:\s+/m.test(body) &&
|
|
30
|
+
/^###\s+Instructions\s*$/m.test(body));
|
|
31
|
+
}
|
|
32
|
+
export function parseWorkflow(markdown, source) {
|
|
33
|
+
const errors = [];
|
|
34
|
+
const path = source.path;
|
|
35
|
+
const lines = markdown.split(/\r?\n/);
|
|
36
|
+
const totalLines = lines.length;
|
|
37
|
+
const fmBlock = parseFrontmatterBlock(markdown);
|
|
38
|
+
const frontmatterEndLine = fmBlock ? Math.max(1, fmBlock.bodyStartLine - 1) : 1;
|
|
39
|
+
const fmData = readFrontmatter(fmBlock?.frontmatter, errors);
|
|
40
|
+
const description = readDescription(fmData);
|
|
41
|
+
const tags = readTags(fmData, errors, frontmatterEndLine);
|
|
42
|
+
const parameters = readParameters(fmData, errors, frontmatterEndLine, path);
|
|
43
|
+
const toc = parseMarkdownToc(markdown);
|
|
44
|
+
const { title, titleLine } = extractTitle(toc.headings, errors);
|
|
45
|
+
// Disallow stray level-1 and non-Step level-2 headings.
|
|
46
|
+
for (const h of toc.headings) {
|
|
47
|
+
if (h.level === 1 && !h.text.startsWith(WORKFLOW_TITLE_PREFIX)) {
|
|
48
|
+
errors.push({
|
|
49
|
+
line: h.line,
|
|
50
|
+
message: `Unexpected top-level heading "# ${h.text}" on line ${h.line}. A workflow file may only contain one "# Workflow: <title>" heading.`,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
if (h.level === 2 && !h.text.startsWith(STEP_PREFIX)) {
|
|
54
|
+
errors.push({
|
|
55
|
+
line: h.line,
|
|
56
|
+
message: `Unexpected level-2 heading "## ${h.text}" on line ${h.line}. Only "## Step: <title>" sections are allowed.`,
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
const steps = extractSteps(toc.headings, lines, totalLines, path, errors);
|
|
61
|
+
if (steps.length === 0 && titleLine > 0) {
|
|
62
|
+
errors.push({
|
|
63
|
+
line: titleLine,
|
|
64
|
+
message: `Workflow has no "## Step: <title>" sections. Add at least one step.`,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
const draft = {
|
|
68
|
+
schemaVersion: WORKFLOW_SCHEMA_VERSION,
|
|
69
|
+
title,
|
|
70
|
+
...(description ? { description } : {}),
|
|
71
|
+
...(tags ? { tags } : {}),
|
|
72
|
+
...(parameters ? { parameters } : {}),
|
|
73
|
+
steps,
|
|
74
|
+
source: { path, lineCount: totalLines },
|
|
75
|
+
};
|
|
76
|
+
runSemanticChecks(draft, fmData, frontmatterEndLine, errors);
|
|
77
|
+
if (errors.length > 0) {
|
|
78
|
+
return { ok: false, errors: sortErrors(errors) };
|
|
79
|
+
}
|
|
80
|
+
return { ok: true, document: draft };
|
|
81
|
+
}
|
|
82
|
+
// ── Title ───────────────────────────────────────────────────────────────────
|
|
83
|
+
function extractTitle(headings, errors) {
|
|
84
|
+
const titleHeadings = headings.filter((h) => h.level === 1 && h.text.startsWith(WORKFLOW_TITLE_PREFIX));
|
|
85
|
+
if (titleHeadings.length === 0) {
|
|
86
|
+
errors.push({
|
|
87
|
+
line: 1,
|
|
88
|
+
message: `Workflow markdown must start with a "# Workflow: <title>" heading. Add one at the top of the file.`,
|
|
89
|
+
});
|
|
90
|
+
return { title: "", titleLine: 0 };
|
|
91
|
+
}
|
|
92
|
+
if (titleHeadings.length > 1) {
|
|
93
|
+
for (const extra of titleHeadings.slice(1)) {
|
|
94
|
+
errors.push({
|
|
95
|
+
line: extra.line,
|
|
96
|
+
message: `Found a second "# Workflow:" heading on line ${extra.line}. A workflow file must contain exactly one.`,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
const first = titleHeadings[0];
|
|
101
|
+
const title = first.text.slice(WORKFLOW_TITLE_PREFIX.length).trim();
|
|
102
|
+
if (!title) {
|
|
103
|
+
errors.push({
|
|
104
|
+
line: first.line,
|
|
105
|
+
message: `The "# Workflow:" heading on line ${first.line} is missing a title. Use "# Workflow: <title>".`,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
return { title, titleLine: first.line };
|
|
109
|
+
}
|
|
110
|
+
// ── Steps ───────────────────────────────────────────────────────────────────
|
|
111
|
+
function extractSteps(headings, lines, totalLines, path, errors) {
|
|
112
|
+
const steps = [];
|
|
113
|
+
let sequenceIndex = 0;
|
|
114
|
+
for (let i = 0; i < headings.length; i++) {
|
|
115
|
+
const h = headings[i];
|
|
116
|
+
if (h.level !== 2 || !h.text.startsWith(STEP_PREFIX))
|
|
117
|
+
continue;
|
|
118
|
+
const stepTitle = h.text.slice(STEP_PREFIX.length).trim();
|
|
119
|
+
if (!stepTitle) {
|
|
120
|
+
errors.push({
|
|
121
|
+
line: h.line,
|
|
122
|
+
message: `The "## Step:" heading on line ${h.line} is missing a title. Use "## Step: <title>".`,
|
|
123
|
+
});
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
const stepEnd = Math.min(findNextSiblingOrParentLine(headings, i, 2) - 1, totalLines);
|
|
127
|
+
const stepSource = { path, start: h.line, end: stepEnd };
|
|
128
|
+
const subsections = collectSubsections(headings, i, stepEnd);
|
|
129
|
+
const stepIdSearchEnd = subsections.length > 0 ? subsections[0].headingLine - 1 : stepEnd;
|
|
130
|
+
const stepId = scanStepId(lines, h.line + 1, stepIdSearchEnd, stepTitle, errors);
|
|
131
|
+
const { instructions, completionCriteria } = collectStepBody(subsections, lines, path, stepTitle, errors);
|
|
132
|
+
if (!stepId)
|
|
133
|
+
continue; // scanStepId already pushed the missing-id error
|
|
134
|
+
if (!instructions) {
|
|
135
|
+
errors.push({
|
|
136
|
+
line: h.line,
|
|
137
|
+
message: `Step "${stepTitle}" is missing the required "### Instructions" section. Add one under the step.`,
|
|
138
|
+
});
|
|
139
|
+
continue;
|
|
140
|
+
}
|
|
141
|
+
steps.push({
|
|
142
|
+
id: stepId,
|
|
143
|
+
title: stepTitle,
|
|
144
|
+
sequenceIndex: sequenceIndex++,
|
|
145
|
+
instructions,
|
|
146
|
+
...(completionCriteria ? { completionCriteria } : {}),
|
|
147
|
+
source: stepSource,
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
return steps;
|
|
151
|
+
}
|
|
152
|
+
function collectSubsections(headings, stepIndex, stepEnd) {
|
|
153
|
+
const subs = [];
|
|
154
|
+
for (let j = stepIndex + 1; j < headings.length; j++) {
|
|
155
|
+
const sub = headings[j];
|
|
156
|
+
if (sub.level <= 2)
|
|
157
|
+
break;
|
|
158
|
+
if (sub.level !== 3)
|
|
159
|
+
continue;
|
|
160
|
+
const next = headings[j + 1];
|
|
161
|
+
const rawEnd = next ? next.line - 1 : stepEnd;
|
|
162
|
+
subs.push({
|
|
163
|
+
name: sub.text,
|
|
164
|
+
headingLine: sub.line,
|
|
165
|
+
bodyStart: sub.line + 1,
|
|
166
|
+
bodyEnd: Math.min(rawEnd, stepEnd),
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
return subs;
|
|
170
|
+
}
|
|
171
|
+
function collectStepBody(subsections, lines, path, stepTitle, errors) {
|
|
172
|
+
let instructions;
|
|
173
|
+
let completionCriteria;
|
|
174
|
+
for (const sub of subsections) {
|
|
175
|
+
if (sub.name === SUBSECTION_INSTRUCTIONS) {
|
|
176
|
+
if (instructions) {
|
|
177
|
+
errors.push({
|
|
178
|
+
line: sub.headingLine,
|
|
179
|
+
message: `Step "${stepTitle}" has more than one "### Instructions" section (line ${sub.headingLine}). Keep only one.`,
|
|
180
|
+
});
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
const text = sliceLines(lines, sub.bodyStart, sub.bodyEnd).trim();
|
|
184
|
+
if (!text) {
|
|
185
|
+
errors.push({
|
|
186
|
+
line: sub.headingLine,
|
|
187
|
+
message: `Step "${stepTitle}" has an empty "### Instructions" section. Add the instructions text below the heading.`,
|
|
188
|
+
});
|
|
189
|
+
continue;
|
|
190
|
+
}
|
|
191
|
+
instructions = {
|
|
192
|
+
text,
|
|
193
|
+
source: { path, start: sub.bodyStart, end: sub.bodyEnd },
|
|
194
|
+
};
|
|
195
|
+
continue;
|
|
196
|
+
}
|
|
197
|
+
if (sub.name === SUBSECTION_COMPLETION_CRITERIA) {
|
|
198
|
+
if (completionCriteria) {
|
|
199
|
+
errors.push({
|
|
200
|
+
line: sub.headingLine,
|
|
201
|
+
message: `Step "${stepTitle}" has more than one "### Completion Criteria" section (line ${sub.headingLine}). Keep only one.`,
|
|
202
|
+
});
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
const items = collectBullets(lines, sub.bodyStart, sub.bodyEnd, path);
|
|
206
|
+
if (items.length === 0) {
|
|
207
|
+
errors.push({
|
|
208
|
+
line: sub.headingLine,
|
|
209
|
+
message: `Step "${stepTitle}" has an empty "### Completion Criteria" section. Add at least one "- criterion" bullet.`,
|
|
210
|
+
});
|
|
211
|
+
continue;
|
|
212
|
+
}
|
|
213
|
+
completionCriteria = items;
|
|
214
|
+
continue;
|
|
215
|
+
}
|
|
216
|
+
errors.push({
|
|
217
|
+
line: sub.headingLine,
|
|
218
|
+
message: `Step "${stepTitle}" has an unknown "### ${sub.name}" section. Only "### Instructions" and "### Completion Criteria" are supported.`,
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
return {
|
|
222
|
+
...(instructions ? { instructions } : {}),
|
|
223
|
+
...(completionCriteria ? { completionCriteria } : {}),
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
function scanStepId(lines, startLineInclusive, endLineInclusive, stepTitle, errors) {
|
|
227
|
+
let foundId;
|
|
228
|
+
let foundLine = -1;
|
|
229
|
+
for (let lineNum = startLineInclusive; lineNum <= endLineInclusive; lineNum++) {
|
|
230
|
+
const trimmed = (lines[lineNum - 1] ?? "").trim();
|
|
231
|
+
if (!trimmed)
|
|
232
|
+
continue;
|
|
233
|
+
const match = trimmed.match(STEP_ID_LINE);
|
|
234
|
+
if (!match)
|
|
235
|
+
continue;
|
|
236
|
+
if (foundId !== undefined) {
|
|
237
|
+
errors.push({
|
|
238
|
+
line: lineNum,
|
|
239
|
+
message: `Step "${stepTitle}" has more than one "Step ID:" line (first on line ${foundLine}). Keep only one.`,
|
|
240
|
+
});
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
foundId = match[1].trim();
|
|
244
|
+
foundLine = lineNum;
|
|
245
|
+
}
|
|
246
|
+
if (!foundId) {
|
|
247
|
+
errors.push({
|
|
248
|
+
line: startLineInclusive,
|
|
249
|
+
message: `Step "${stepTitle}" is missing a "Step ID: <id>" line. Add one between the step heading and its subsections.`,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
return foundId;
|
|
253
|
+
}
|
|
254
|
+
function collectBullets(lines, startLineInclusive, endLineInclusive, path) {
|
|
255
|
+
const items = [];
|
|
256
|
+
for (let lineNum = startLineInclusive; lineNum <= endLineInclusive; lineNum++) {
|
|
257
|
+
const trimmed = (lines[lineNum - 1] ?? "").trim();
|
|
258
|
+
if (!trimmed)
|
|
259
|
+
continue;
|
|
260
|
+
const match = trimmed.match(BULLET_LINE);
|
|
261
|
+
if (!match)
|
|
262
|
+
continue;
|
|
263
|
+
items.push({
|
|
264
|
+
text: match[1].trim(),
|
|
265
|
+
source: { path, start: lineNum, end: lineNum },
|
|
266
|
+
});
|
|
267
|
+
}
|
|
268
|
+
return items;
|
|
269
|
+
}
|
|
270
|
+
function findNextSiblingOrParentLine(headings, fromIndex, level) {
|
|
271
|
+
for (let i = fromIndex + 1; i < headings.length; i++) {
|
|
272
|
+
if (headings[i].level <= level)
|
|
273
|
+
return headings[i].line;
|
|
274
|
+
}
|
|
275
|
+
return Number.MAX_SAFE_INTEGER;
|
|
276
|
+
}
|
|
277
|
+
function sliceLines(lines, startLineInclusive, endLineInclusive) {
|
|
278
|
+
if (endLineInclusive < startLineInclusive)
|
|
279
|
+
return "";
|
|
280
|
+
const s = Math.max(1, startLineInclusive);
|
|
281
|
+
const e = Math.min(endLineInclusive, lines.length);
|
|
282
|
+
return lines.slice(s - 1, e).join("\n");
|
|
283
|
+
}
|
|
284
|
+
// ── Frontmatter ─────────────────────────────────────────────────────────────
|
|
285
|
+
function readFrontmatter(frontmatter, errors) {
|
|
286
|
+
if (!frontmatter)
|
|
287
|
+
return {};
|
|
288
|
+
let parsed;
|
|
289
|
+
try {
|
|
290
|
+
parsed = yamlParse(frontmatter);
|
|
291
|
+
}
|
|
292
|
+
catch (err) {
|
|
293
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
294
|
+
errors.push({
|
|
295
|
+
line: 1,
|
|
296
|
+
message: `Workflow frontmatter is not valid YAML: ${msg}`,
|
|
297
|
+
});
|
|
298
|
+
return {};
|
|
299
|
+
}
|
|
300
|
+
if (parsed === null || parsed === undefined)
|
|
301
|
+
return {};
|
|
302
|
+
if (typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
303
|
+
errors.push({
|
|
304
|
+
line: 1,
|
|
305
|
+
message: `Workflow frontmatter must be a YAML mapping (key: value pairs). Use "key: value" lines between the --- markers.`,
|
|
306
|
+
});
|
|
307
|
+
return {};
|
|
308
|
+
}
|
|
309
|
+
return parsed;
|
|
310
|
+
}
|
|
311
|
+
function readDescription(data) {
|
|
312
|
+
const v = data.description;
|
|
313
|
+
if (typeof v !== "string")
|
|
314
|
+
return undefined;
|
|
315
|
+
const trimmed = v.trim();
|
|
316
|
+
return trimmed || undefined;
|
|
317
|
+
}
|
|
318
|
+
function readTags(data, errors, fmEndLine) {
|
|
319
|
+
const v = data.tags;
|
|
320
|
+
if (v === undefined || v === null)
|
|
321
|
+
return undefined;
|
|
322
|
+
if (typeof v === "string") {
|
|
323
|
+
const t = v.trim();
|
|
324
|
+
return t ? [t] : undefined;
|
|
325
|
+
}
|
|
326
|
+
if (!Array.isArray(v) || !v.every((tag) => typeof tag === "string" && tag.trim().length > 0)) {
|
|
327
|
+
errors.push({
|
|
328
|
+
line: fmEndLine,
|
|
329
|
+
message: `Workflow frontmatter "tags" must be a string or a list of non-empty strings.`,
|
|
330
|
+
});
|
|
331
|
+
return undefined;
|
|
332
|
+
}
|
|
333
|
+
return v.map((tag) => tag.trim());
|
|
334
|
+
}
|
|
335
|
+
function readParameters(data, errors, fmEndLine, path) {
|
|
336
|
+
const v = data.params;
|
|
337
|
+
if (v === undefined || v === null)
|
|
338
|
+
return undefined;
|
|
339
|
+
if (typeof v !== "object" || Array.isArray(v)) {
|
|
340
|
+
errors.push({
|
|
341
|
+
line: fmEndLine,
|
|
342
|
+
message: `Workflow frontmatter "params" must be a mapping of parameter names to descriptions.`,
|
|
343
|
+
});
|
|
344
|
+
return undefined;
|
|
345
|
+
}
|
|
346
|
+
const entries = Object.entries(v);
|
|
347
|
+
if (entries.length === 0)
|
|
348
|
+
return undefined;
|
|
349
|
+
const out = [];
|
|
350
|
+
for (const [name, desc] of entries) {
|
|
351
|
+
const trimmedName = name.trim();
|
|
352
|
+
if (!trimmedName) {
|
|
353
|
+
errors.push({
|
|
354
|
+
line: fmEndLine,
|
|
355
|
+
message: `Workflow parameter names must be non-empty.`,
|
|
356
|
+
});
|
|
357
|
+
continue;
|
|
358
|
+
}
|
|
359
|
+
if (typeof desc !== "string" || !desc.trim()) {
|
|
360
|
+
errors.push({
|
|
361
|
+
line: fmEndLine,
|
|
362
|
+
message: `Workflow parameter "${trimmedName}" must have a non-empty string description in frontmatter "params".`,
|
|
363
|
+
});
|
|
364
|
+
continue;
|
|
365
|
+
}
|
|
366
|
+
out.push({
|
|
367
|
+
name: trimmedName,
|
|
368
|
+
description: desc.trim(),
|
|
369
|
+
// The frontmatter parser doesn't track per-key line numbers; anchor to the
|
|
370
|
+
// frontmatter block end so editors land somewhere sensible.
|
|
371
|
+
source: { path, start: 1, end: fmEndLine },
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
return out.length > 0 ? out : undefined;
|
|
375
|
+
}
|
|
376
|
+
// ── Error sorting ───────────────────────────────────────────────────────────
|
|
377
|
+
function sortErrors(errors) {
|
|
378
|
+
return [...errors].sort((a, b) => a.line - b.line);
|
|
379
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Show + indexing renderer for workflow assets.
|
|
3
|
+
*
|
|
4
|
+
* Reads the markdown via `parseWorkflow` and projects the validated
|
|
5
|
+
* `WorkflowDocument` down to the public `ShowResponse` shape (which still
|
|
6
|
+
* uses the flat `WorkflowStepDefinition` type for backwards compatibility)
|
|
7
|
+
* and into search hints for the indexer.
|
|
8
|
+
*/
|
|
9
|
+
import { makeAssetRef } from "../core/asset-ref";
|
|
10
|
+
import { UsageError } from "../core/errors";
|
|
11
|
+
import { cacheWorkflowDocument } from "./document-cache";
|
|
12
|
+
import { parseWorkflow } from "./parser";
|
|
13
|
+
function shellQuote(value) {
|
|
14
|
+
return `'${value.replace(/'/g, `'\\''`)}'`;
|
|
15
|
+
}
|
|
16
|
+
export function buildWorkflowAction(ref) {
|
|
17
|
+
return `Resume the active run or start a new run with \`akm workflow next ${shellQuote(ref)}\`.`;
|
|
18
|
+
}
|
|
19
|
+
function deriveName(ctx) {
|
|
20
|
+
const metaName = ctx.matchResult.meta?.name;
|
|
21
|
+
if (typeof metaName === "string" && metaName)
|
|
22
|
+
return metaName;
|
|
23
|
+
const ext = ctx.relPath.lastIndexOf(".");
|
|
24
|
+
return ext > 0 ? ctx.relPath.slice(0, ext) : ctx.relPath;
|
|
25
|
+
}
|
|
26
|
+
function loadDocument(ctx) {
|
|
27
|
+
const result = parseWorkflow(ctx.content(), { path: ctx.relPath });
|
|
28
|
+
if (result.ok)
|
|
29
|
+
return result.document;
|
|
30
|
+
const summary = result.errors.map((e) => `${ctx.relPath}:${e.line} — ${e.message}`).join("\n");
|
|
31
|
+
throw new UsageError(`Workflow has errors:\n${summary}`);
|
|
32
|
+
}
|
|
33
|
+
export const workflowMdRenderer = {
|
|
34
|
+
name: "workflow-md",
|
|
35
|
+
buildShowResponse(ctx) {
|
|
36
|
+
const name = deriveName(ctx);
|
|
37
|
+
const doc = loadDocument(ctx);
|
|
38
|
+
const ref = makeAssetRef("workflow", name, ctx.origin);
|
|
39
|
+
return {
|
|
40
|
+
type: "workflow",
|
|
41
|
+
name,
|
|
42
|
+
path: ctx.absPath,
|
|
43
|
+
action: buildWorkflowAction(ref),
|
|
44
|
+
description: doc.description,
|
|
45
|
+
workflowTitle: doc.title,
|
|
46
|
+
parameters: doc.parameters?.map((p) => p.name),
|
|
47
|
+
workflowParameters: doc.parameters?.map((p) => ({ name: p.name, description: p.description })),
|
|
48
|
+
steps: doc.steps.map((s) => ({
|
|
49
|
+
id: s.id,
|
|
50
|
+
title: s.title,
|
|
51
|
+
instructions: s.instructions.text,
|
|
52
|
+
...(s.completionCriteria ? { completionCriteria: s.completionCriteria.map((c) => c.text) } : {}),
|
|
53
|
+
sequenceIndex: s.sequenceIndex,
|
|
54
|
+
})),
|
|
55
|
+
};
|
|
56
|
+
},
|
|
57
|
+
extractMetadata(entry, ctx) {
|
|
58
|
+
const doc = loadDocument(ctx);
|
|
59
|
+
const hints = new Set(entry.searchHints ?? []);
|
|
60
|
+
hints.add(doc.title);
|
|
61
|
+
for (const step of doc.steps) {
|
|
62
|
+
hints.add(step.title);
|
|
63
|
+
hints.add(step.id);
|
|
64
|
+
hints.add(step.instructions.text);
|
|
65
|
+
for (const criterion of step.completionCriteria ?? []) {
|
|
66
|
+
hints.add(criterion.text);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
entry.searchHints = Array.from(hints).filter(Boolean);
|
|
70
|
+
if (doc.parameters?.length) {
|
|
71
|
+
entry.parameters = doc.parameters.map((p) => ({
|
|
72
|
+
name: p.name,
|
|
73
|
+
...(p.description ? { description: p.description } : {}),
|
|
74
|
+
}));
|
|
75
|
+
}
|
|
76
|
+
cacheWorkflowDocument(entry, doc);
|
|
77
|
+
},
|
|
78
|
+
};
|
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
import { randomUUID } from "node:crypto";
|
|
2
2
|
import fs from "node:fs";
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import { NotFoundError, UsageError } from "
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
9
|
-
import {
|
|
10
|
-
import { resolveAssetPath } from "
|
|
11
|
-
import {
|
|
12
|
-
import {
|
|
3
|
+
import { parseAssetRef } from "../core/asset-ref";
|
|
4
|
+
import { loadConfig } from "../core/config";
|
|
5
|
+
import { NotFoundError, UsageError } from "../core/errors";
|
|
6
|
+
import { getDbPath } from "../core/paths";
|
|
7
|
+
import { closeDatabase, openDatabase } from "../indexer/db";
|
|
8
|
+
import { resolveSourceEntries } from "../indexer/search-source";
|
|
9
|
+
import { resolveSourcesForOrigin } from "../registry/origin-resolve";
|
|
10
|
+
import { resolveAssetPath } from "../sources/resolve";
|
|
11
|
+
import { formatWorkflowErrors } from "./authoring";
|
|
12
|
+
import { closeWorkflowDatabase, openWorkflowDatabase } from "./db";
|
|
13
|
+
import { parseWorkflow } from "./parser";
|
|
13
14
|
export async function startWorkflowRun(ref, params = {}) {
|
|
14
15
|
const asset = await loadWorkflowAsset(ref);
|
|
15
16
|
const workflowDb = openWorkflowDatabase();
|
|
@@ -209,7 +210,7 @@ async function loadWorkflowAsset(ref) {
|
|
|
209
210
|
throw new UsageError(`Expected a workflow ref (workflow:<name>), got "${ref}".`);
|
|
210
211
|
}
|
|
211
212
|
const config = loadConfig();
|
|
212
|
-
const allSources =
|
|
213
|
+
const allSources = resolveSourceEntries(undefined, config);
|
|
213
214
|
const searchSources = resolveSourcesForOrigin(parsed.origin, allSources);
|
|
214
215
|
let assetPath;
|
|
215
216
|
let sourcePath;
|
|
@@ -226,15 +227,69 @@ async function loadWorkflowAsset(ref) {
|
|
|
226
227
|
if (!assetPath) {
|
|
227
228
|
throw new NotFoundError(`Workflow not found for ref: workflow:${parsed.name}`);
|
|
228
229
|
}
|
|
230
|
+
const resolvedSourcePath = sourcePath ?? loadConfig().stashDir ?? assetPath;
|
|
231
|
+
const fullRef = `${parsed.origin ? `${parsed.origin}//` : ""}workflow:${parsed.name}`;
|
|
232
|
+
const cached = readWorkflowDocumentFromIndex(resolvedSourcePath, fullRef);
|
|
233
|
+
const document = cached ?? loadWorkflowDocumentFromDisk(assetPath);
|
|
234
|
+
return projectAsset(document, fullRef, assetPath, resolvedSourcePath);
|
|
235
|
+
}
|
|
236
|
+
function loadWorkflowDocumentFromDisk(assetPath) {
|
|
229
237
|
const content = fs.readFileSync(assetPath, "utf8");
|
|
230
|
-
const
|
|
238
|
+
const result = parseWorkflow(content, { path: assetPath });
|
|
239
|
+
if (!result.ok) {
|
|
240
|
+
throw new UsageError(formatWorkflowErrors(assetPath, result.errors));
|
|
241
|
+
}
|
|
242
|
+
return result.document;
|
|
243
|
+
}
|
|
244
|
+
function readWorkflowDocumentFromIndex(sourcePath, ref) {
|
|
245
|
+
const dbPath = getDbPath();
|
|
246
|
+
if (!fs.existsSync(dbPath))
|
|
247
|
+
return null;
|
|
248
|
+
const db = openDatabase(dbPath);
|
|
249
|
+
try {
|
|
250
|
+
const parsed = parseAssetRef(ref);
|
|
251
|
+
const entryKey = `${sourcePath}:${parsed.type}:${parsed.name}`;
|
|
252
|
+
const row = db
|
|
253
|
+
.prepare(`SELECT wd.document_json AS document_json
|
|
254
|
+
FROM workflow_documents wd
|
|
255
|
+
JOIN entries e ON e.id = wd.entry_id
|
|
256
|
+
WHERE e.entry_type = 'workflow' AND e.entry_key = ?
|
|
257
|
+
LIMIT 1`)
|
|
258
|
+
.get(entryKey);
|
|
259
|
+
if (!row)
|
|
260
|
+
return null;
|
|
261
|
+
try {
|
|
262
|
+
return JSON.parse(row.document_json);
|
|
263
|
+
}
|
|
264
|
+
catch {
|
|
265
|
+
return null;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
finally {
|
|
269
|
+
closeDatabase(db);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
function projectAsset(doc, ref, assetPath, sourcePath) {
|
|
231
273
|
return {
|
|
232
|
-
ref
|
|
274
|
+
ref,
|
|
233
275
|
path: assetPath,
|
|
234
|
-
sourcePath
|
|
235
|
-
title:
|
|
236
|
-
...(
|
|
237
|
-
|
|
276
|
+
sourcePath,
|
|
277
|
+
title: doc.title,
|
|
278
|
+
...(doc.parameters
|
|
279
|
+
? {
|
|
280
|
+
parameters: doc.parameters.map((p) => ({
|
|
281
|
+
name: p.name,
|
|
282
|
+
...(p.description ? { description: p.description } : {}),
|
|
283
|
+
})),
|
|
284
|
+
}
|
|
285
|
+
: {}),
|
|
286
|
+
steps: doc.steps.map((s) => ({
|
|
287
|
+
id: s.id,
|
|
288
|
+
title: s.title,
|
|
289
|
+
instructions: s.instructions.text,
|
|
290
|
+
...(s.completionCriteria ? { completionCriteria: s.completionCriteria.map((c) => c.text) } : {}),
|
|
291
|
+
sequenceIndex: s.sequenceIndex,
|
|
292
|
+
})),
|
|
238
293
|
};
|
|
239
294
|
}
|
|
240
295
|
function resolveWorkflowEntryId(sourcePath, ref) {
|
|
@@ -361,14 +416,3 @@ function parseJsonArray(value) {
|
|
|
361
416
|
}
|
|
362
417
|
return undefined;
|
|
363
418
|
}
|
|
364
|
-
function parseWorkflowDocument(content) {
|
|
365
|
-
try {
|
|
366
|
-
return parseWorkflowMarkdown(content);
|
|
367
|
-
}
|
|
368
|
-
catch (error) {
|
|
369
|
-
if (error instanceof WorkflowValidationError) {
|
|
370
|
-
throw new UsageError(error.message);
|
|
371
|
-
}
|
|
372
|
-
throw error;
|
|
373
|
-
}
|
|
374
|
-
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Validated JSON shape for a workflow asset.
|
|
3
|
+
*
|
|
4
|
+
* `parseWorkflow` (parser.ts) converts a workflow markdown file into a
|
|
5
|
+
* `WorkflowDocument` plus a list of `WorkflowError`s. The document is the
|
|
6
|
+
* single source of truth consumed by the renderer, the indexer (cached
|
|
7
|
+
* into `workflow_documents` in `index.db`), and the run engine. Source
|
|
8
|
+
* markdown is referenced by `SourceRef` line spans so editors and agents
|
|
9
|
+
* can rewrite content in place without a full re-parse.
|
|
10
|
+
*/
|
|
11
|
+
export const WORKFLOW_SCHEMA_VERSION = 1;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cross-cutting semantic checks over an assembled WorkflowDocument draft.
|
|
3
|
+
*
|
|
4
|
+
* The parser handles per-line shape checks; this module runs rules that need
|
|
5
|
+
* the whole document or the raw frontmatter at once: duplicate step IDs,
|
|
6
|
+
* step-id format, and the frontmatter key whitelist.
|
|
7
|
+
*/
|
|
8
|
+
const STEP_ID_REGEX = /^[A-Za-z0-9][A-Za-z0-9._-]*$/;
|
|
9
|
+
const ALLOWED_FRONTMATTER_KEYS = new Set(["description", "tags", "params"]);
|
|
10
|
+
export function runSemanticChecks(draft, frontmatterData, frontmatterEndLine, errors) {
|
|
11
|
+
checkFrontmatterKeys(frontmatterData, frontmatterEndLine, errors);
|
|
12
|
+
checkStepIdFormat(draft, errors);
|
|
13
|
+
checkDuplicateStepIds(draft, errors);
|
|
14
|
+
}
|
|
15
|
+
function checkFrontmatterKeys(data, fmEndLine, errors) {
|
|
16
|
+
for (const key of Object.keys(data)) {
|
|
17
|
+
if (ALLOWED_FRONTMATTER_KEYS.has(key))
|
|
18
|
+
continue;
|
|
19
|
+
errors.push({
|
|
20
|
+
line: fmEndLine,
|
|
21
|
+
message: `Workflow frontmatter "${key}" is not supported. Use only: description, tags, params.`,
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function checkStepIdFormat(draft, errors) {
|
|
26
|
+
for (const step of draft.steps) {
|
|
27
|
+
if (STEP_ID_REGEX.test(step.id))
|
|
28
|
+
continue;
|
|
29
|
+
errors.push({
|
|
30
|
+
line: step.source.start,
|
|
31
|
+
message: `Step ID "${step.id}" is invalid. Use letters, numbers, ".", "_" or "-" (e.g. "deploy-job").`,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function checkDuplicateStepIds(draft, errors) {
|
|
36
|
+
const firstSeenLine = new Map();
|
|
37
|
+
for (const step of draft.steps) {
|
|
38
|
+
const previous = firstSeenLine.get(step.id);
|
|
39
|
+
if (previous !== undefined) {
|
|
40
|
+
errors.push({
|
|
41
|
+
line: step.source.start,
|
|
42
|
+
message: `Step ID "${step.id}" is already used on line ${previous}. Step IDs must be unique within a workflow.`,
|
|
43
|
+
});
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
firstSeenLine.set(step.id, step.source.start);
|
|
47
|
+
}
|
|
48
|
+
}
|