akm-cli 0.7.5 → 0.8.0-rc2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/CHANGELOG.md +1 -1
- package/dist/cli/parse-args.js +43 -0
- package/dist/cli.js +853 -479
- package/dist/commands/agent-dispatch.js +102 -0
- package/dist/commands/agent-support.js +62 -0
- package/dist/commands/config-cli.js +68 -84
- package/dist/commands/consolidate.js +823 -0
- package/dist/commands/distill-promotion-policy.js +658 -0
- package/dist/commands/distill.js +244 -52
- package/dist/commands/eval-cases.js +40 -0
- package/dist/commands/events.js +2 -23
- package/dist/commands/graph.js +222 -0
- package/dist/commands/health.js +376 -0
- package/dist/commands/help/help-accept.md +9 -0
- package/dist/commands/help/help-improve.md +53 -0
- package/dist/commands/help/help-proposals.md +15 -0
- package/dist/commands/help/help-propose.md +17 -0
- package/dist/commands/help/help-reject.md +8 -0
- package/dist/commands/history.js +3 -30
- package/dist/commands/improve.js +1170 -0
- package/dist/commands/info.js +2 -2
- package/dist/commands/init.js +2 -2
- package/dist/commands/install-audit.js +5 -1
- package/dist/commands/installed-stashes.js +118 -138
- package/dist/commands/knowledge.js +133 -0
- package/dist/commands/lint/agent-linter.js +46 -0
- package/dist/commands/lint/base-linter.js +285 -0
- package/dist/commands/lint/command-linter.js +46 -0
- package/dist/commands/lint/default-linter.js +13 -0
- package/dist/commands/lint/index.js +107 -0
- package/dist/commands/lint/knowledge-linter.js +13 -0
- package/dist/commands/lint/memory-linter.js +58 -0
- package/dist/commands/lint/registry.js +33 -0
- package/dist/commands/lint/skill-linter.js +42 -0
- package/dist/commands/lint/task-linter.js +47 -0
- package/dist/commands/lint/types.js +1 -0
- package/dist/commands/lint/workflow-linter.js +53 -0
- package/dist/commands/lint.js +1 -0
- package/dist/commands/proposal.js +8 -7
- package/dist/commands/propose.js +78 -28
- package/dist/commands/reflect.js +143 -35
- package/dist/commands/registry-search.js +2 -2
- package/dist/commands/remember.js +54 -0
- package/dist/commands/schema-repair.js +130 -0
- package/dist/commands/search.js +21 -5
- package/dist/commands/show.js +121 -17
- package/dist/commands/source-add.js +10 -10
- package/dist/commands/source-manage.js +11 -19
- package/dist/commands/tasks.js +385 -0
- package/dist/commands/url-checker.js +39 -0
- package/dist/commands/vault.js +8 -26
- package/dist/core/action-contributors.js +25 -0
- package/dist/core/asset-ref.js +4 -0
- package/dist/core/asset-registry.js +4 -16
- package/dist/core/asset-spec.js +10 -0
- package/dist/core/common.js +94 -0
- package/dist/core/concurrent.js +22 -0
- package/dist/core/config.js +222 -128
- package/dist/core/events.js +73 -126
- package/dist/core/frontmatter.js +3 -1
- package/dist/core/markdown.js +17 -0
- package/dist/core/memory-improve.js +678 -0
- package/dist/core/parse.js +155 -0
- package/dist/core/paths.js +101 -3
- package/dist/core/proposal-validators.js +61 -0
- package/dist/core/proposals.js +49 -38
- package/dist/core/state-db.js +775 -0
- package/dist/core/time.js +51 -0
- package/dist/core/warn.js +59 -1
- package/dist/indexer/db-search.js +52 -238
- package/dist/indexer/db.js +378 -1
- package/dist/indexer/ensure-index.js +61 -0
- package/dist/indexer/graph-boost.js +247 -94
- package/dist/indexer/graph-db.js +201 -0
- package/dist/indexer/graph-dedup.js +99 -0
- package/dist/indexer/graph-extraction.js +409 -76
- package/dist/indexer/index-context.js +10 -0
- package/dist/indexer/indexer.js +442 -290
- package/dist/indexer/llm-cache.js +47 -0
- package/dist/indexer/match-contributors.js +141 -0
- package/dist/indexer/matchers.js +24 -190
- package/dist/indexer/memory-inference.js +63 -29
- package/dist/indexer/metadata-contributors.js +26 -0
- package/dist/indexer/metadata.js +194 -175
- package/dist/indexer/path-resolver.js +89 -0
- package/dist/indexer/ranking-contributors.js +204 -0
- package/dist/indexer/ranking.js +74 -0
- package/dist/indexer/search-hit-enrichers.js +22 -0
- package/dist/indexer/search-source.js +24 -9
- package/dist/indexer/semantic-status.js +2 -16
- package/dist/indexer/walker.js +25 -0
- package/dist/integrations/agent/config.js +175 -3
- package/dist/integrations/agent/index.js +3 -1
- package/dist/integrations/agent/pipeline.js +39 -0
- package/dist/integrations/agent/profiles.js +67 -5
- package/dist/integrations/agent/prompts.js +77 -72
- package/dist/integrations/agent/runners.js +31 -0
- package/dist/integrations/agent/sdk-runner.js +120 -0
- package/dist/integrations/agent/spawn.js +71 -16
- package/dist/integrations/lockfile.js +10 -18
- package/dist/integrations/session-logs/index.js +65 -0
- package/dist/integrations/session-logs/providers/claude-code.js +56 -0
- package/dist/integrations/session-logs/providers/opencode.js +52 -0
- package/dist/integrations/session-logs/types.js +1 -0
- package/dist/llm/call-ai.js +74 -0
- package/dist/llm/client.js +61 -122
- package/dist/llm/feature-gate.js +27 -16
- package/dist/llm/graph-extract.js +297 -62
- package/dist/llm/memory-infer.js +49 -71
- package/dist/llm/metadata-enhance.js +39 -22
- package/dist/llm/prompts/graph-extract-user-prompt.md +12 -0
- package/dist/output/cli-hints-full.md +277 -0
- package/dist/output/cli-hints-short.md +65 -0
- package/dist/output/cli-hints.js +2 -318
- package/dist/output/renderers.js +190 -123
- package/dist/output/shapes.js +33 -0
- package/dist/output/text.js +239 -2
- package/dist/registry/providers/skills-sh.js +61 -49
- package/dist/registry/providers/static-index.js +44 -48
- package/dist/setup/setup.js +510 -11
- package/dist/sources/provider-factory.js +2 -1
- package/dist/sources/providers/git.js +2 -2
- package/dist/sources/website-ingest.js +4 -0
- package/dist/tasks/backends/cron.js +200 -0
- package/dist/tasks/backends/exec-utils.js +25 -0
- package/dist/tasks/backends/index.js +32 -0
- package/dist/tasks/backends/launchd-template.xml +19 -0
- package/dist/tasks/backends/launchd.js +184 -0
- package/dist/tasks/backends/schtasks-template.xml +29 -0
- package/dist/tasks/backends/schtasks.js +212 -0
- package/dist/tasks/parser.js +198 -0
- package/dist/tasks/resolveAkmBin.js +84 -0
- package/dist/tasks/runner.js +432 -0
- package/dist/tasks/schedule.js +208 -0
- package/dist/tasks/schema.js +13 -0
- package/dist/tasks/validator.js +59 -0
- package/dist/wiki/index-template.md +12 -0
- package/dist/wiki/ingest-workflow-template.md +54 -0
- package/dist/wiki/log-template.md +8 -0
- package/dist/wiki/schema-template.md +61 -0
- package/dist/wiki/wiki-templates.js +12 -0
- package/dist/wiki/wiki.js +10 -61
- package/dist/workflows/authoring.js +5 -25
- package/dist/workflows/renderer.js +8 -3
- package/dist/workflows/runs.js +59 -91
- package/dist/workflows/validator.js +1 -1
- package/dist/workflows/workflow-template.md +24 -0
- package/docs/README.md +3 -0
- package/docs/migration/release-notes/0.7.0.md +1 -1
- package/docs/migration/release-notes/0.8.0.md +43 -0
- package/package.json +3 -2
- package/dist/templates/wiki-templates.js +0 -100
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Validate a parsed {@link TaskDocument} for runnability.
|
|
3
|
+
*
|
|
4
|
+
* Enforces:
|
|
5
|
+
* • the schedule is parseable and translates to the active backend
|
|
6
|
+
* • the workflow ref resolves (workflow targets)
|
|
7
|
+
* • the asset/file source exists (prompt targets)
|
|
8
|
+
* • the agent profile resolves (prompt targets)
|
|
9
|
+
*
|
|
10
|
+
* Validation is deliberately split from parsing: callers that only want to
|
|
11
|
+
* read frontmatter (e.g. `tasks list`) can skip these checks, while
|
|
12
|
+
* `tasks add` and `tasks run` should always run them.
|
|
13
|
+
*/
|
|
14
|
+
import fs from "node:fs";
|
|
15
|
+
import path from "node:path";
|
|
16
|
+
import { parseAssetRef } from "../core/asset-ref";
|
|
17
|
+
import { resolveStashDir } from "../core/common";
|
|
18
|
+
import { loadConfig } from "../core/config";
|
|
19
|
+
import { NotFoundError } from "../core/errors";
|
|
20
|
+
import { requireAgentProfile } from "../integrations/agent";
|
|
21
|
+
import { resolveAssetPath } from "../sources/resolve";
|
|
22
|
+
import { parseSchedule } from "./schedule";
|
|
23
|
+
export async function validateTaskDocument(task, options) {
|
|
24
|
+
// Schedule must parse and translate.
|
|
25
|
+
parseSchedule(task.schedule, options.backend);
|
|
26
|
+
if (task.target.kind === "workflow") {
|
|
27
|
+
const stashDir = options.stashDir ?? resolveStashDir();
|
|
28
|
+
const ref = parseAssetRef(task.target.ref);
|
|
29
|
+
if (ref.type !== "workflow") {
|
|
30
|
+
throw new NotFoundError(`Task "${task.id}" workflow target must be a workflow ref (got "${task.target.ref}").`, "WORKFLOW_NOT_FOUND");
|
|
31
|
+
}
|
|
32
|
+
await resolveAssetPath(stashDir, "workflow", ref.name);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
if (task.target.kind !== "prompt") {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
// Prompt target. Resolve the profile unconditionally — when no profile is
|
|
39
|
+
// set on the task, requireAgentProfile falls back to config.agent.default
|
|
40
|
+
// and throws a clear error if neither is configured. Catching this at
|
|
41
|
+
// `tasks add` / `tasks sync` time is much more useful than failing only
|
|
42
|
+
// when the OS scheduler fires.
|
|
43
|
+
const config = loadConfig();
|
|
44
|
+
requireAgentProfile(config.agent, task.target.profile);
|
|
45
|
+
const src = task.target.source;
|
|
46
|
+
if (src.kind === "asset") {
|
|
47
|
+
const stashDir = options.stashDir ?? resolveStashDir();
|
|
48
|
+
const ref = parseAssetRef(src.ref);
|
|
49
|
+
await resolveAssetPath(stashDir, ref.type, ref.name);
|
|
50
|
+
}
|
|
51
|
+
else if (src.kind === "file") {
|
|
52
|
+
const taskDir = path.dirname(task.source.path);
|
|
53
|
+
const resolved = path.isAbsolute(src.path) ? src.path : path.resolve(taskDir, src.path);
|
|
54
|
+
if (!fs.existsSync(resolved) || !fs.statSync(resolved).isFile()) {
|
|
55
|
+
throw new NotFoundError(`Prompt file not found for task "${task.id}": ${src.path} (resolved to ${resolved}).`, "FILE_NOT_FOUND");
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
// inline source is always valid post-parse.
|
|
59
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Catalog of pages in the {{WIKI_NAME}} wiki. Regenerated by `akm index`.
|
|
3
|
+
wikiRole: index
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# {{WIKI_NAME}} — index
|
|
7
|
+
|
|
8
|
+
_This file is regenerated on every `akm index` run. Manual edits are
|
|
9
|
+
preserved until the next regeneration, then replaced._
|
|
10
|
+
|
|
11
|
+
_(no pages yet — create one with your editor, or ingest a source with `akm
|
|
12
|
+
wiki stash {{WIKI_NAME}} <path>`.)_
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# Ingest workflow for wiki:{{WIKI_NAME}}
|
|
2
|
+
|
|
3
|
+
Wiki location: {{WIKI_DIR}}
|
|
4
|
+
Schema: {{SCHEMA_PATH}}
|
|
5
|
+
|
|
6
|
+
Follow these steps. akm commands handle the invariants; use your native
|
|
7
|
+
Read/Write/Edit tools for page edits.
|
|
8
|
+
|
|
9
|
+
1. **Read the schema.** Open `{{SCHEMA_PATH}}`. It defines the voice, page
|
|
10
|
+
kinds, contradiction policy, and any wiki-specific conventions. Do not
|
|
11
|
+
skip this step even on familiar wikis — the schema may have changed.
|
|
12
|
+
|
|
13
|
+
2. **File the source under `raw/`.**
|
|
14
|
+
```sh
|
|
15
|
+
akm wiki stash {{WIKI_NAME}} <path-or-url-to-source>
|
|
16
|
+
# or: cat <source> | akm wiki stash {{WIKI_NAME}} -
|
|
17
|
+
```
|
|
18
|
+
Returns `{ slug, path, ref }`. The raw copy is immutable — never edit it.
|
|
19
|
+
|
|
20
|
+
3. **Find related existing pages.**
|
|
21
|
+
```sh
|
|
22
|
+
akm wiki search {{WIKI_NAME}} "<key terms from the source>"
|
|
23
|
+
```
|
|
24
|
+
Read the top hits with `akm show wiki:{{WIKI_NAME}}/<page>`. Use
|
|
25
|
+
`akm show wiki:{{WIKI_NAME}}/<page> toc` for large pages.
|
|
26
|
+
|
|
27
|
+
4. **Decide for each candidate.** For each related page:
|
|
28
|
+
- **Append**: add a section or paragraph under the relevant heading.
|
|
29
|
+
Include the raw source in the page's `sources:` frontmatter list.
|
|
30
|
+
- **Contradict**: note the tension explicitly; don't silently overwrite.
|
|
31
|
+
Follow the schema's contradiction policy.
|
|
32
|
+
- **Skip**: source doesn't add to this page — move on.
|
|
33
|
+
|
|
34
|
+
5. **Create new pages for concepts/entities the source introduces.** Each
|
|
35
|
+
new page must have frontmatter with `description`, `pageKind`,
|
|
36
|
+
`xrefs`, and `sources`. Cross-reference with related pages both
|
|
37
|
+
directions.
|
|
38
|
+
|
|
39
|
+
6. **Update xrefs both ways.** If page A now xrefs page B, page B must xref
|
|
40
|
+
page A. `akm wiki lint {{WIKI_NAME}}` will flag violations.
|
|
41
|
+
|
|
42
|
+
7. **Append to `log.md`.** One entry per ingest: date, source slug, one-line
|
|
43
|
+
summary, refs to created/edited pages. Newest at the top.
|
|
44
|
+
|
|
45
|
+
8. **Regenerate the index + verify.**
|
|
46
|
+
```sh
|
|
47
|
+
akm index
|
|
48
|
+
akm wiki lint {{WIKI_NAME}}
|
|
49
|
+
```
|
|
50
|
+
Resolve any lint findings before calling the ingest done.
|
|
51
|
+
|
|
52
|
+
That's it. `akm` never calls an LLM — reasoning is your job; it just owns
|
|
53
|
+
the invariants (raw immutability, unique slugs, ref validation, index
|
|
54
|
+
regeneration, structural lint).
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: Rules that govern this wiki. Read before ingesting, searching, or editing pages.
|
|
3
|
+
wikiRole: schema
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# {{WIKI_NAME}} wiki schema
|
|
7
|
+
|
|
8
|
+
This wiki follows the three-layer pattern:
|
|
9
|
+
|
|
10
|
+
- `raw/` — immutable ingested sources (never edit)
|
|
11
|
+
- `<page>.md` and `<topic>/<page>.md` — agent-authored pages
|
|
12
|
+
- `schema.md` (this file), `index.md`, `log.md` — wiki-level metadata
|
|
13
|
+
|
|
14
|
+
## Page frontmatter
|
|
15
|
+
|
|
16
|
+
Every page should carry frontmatter so akm can index and link it:
|
|
17
|
+
|
|
18
|
+
```yaml
|
|
19
|
+
---
|
|
20
|
+
description: one-sentence summary used in search and lint
|
|
21
|
+
pageKind: entity | concept | question | note | <your-custom-kind>
|
|
22
|
+
xrefs:
|
|
23
|
+
- wiki:{{WIKI_NAME}}/other-page
|
|
24
|
+
sources:
|
|
25
|
+
- raw/<slug>.md
|
|
26
|
+
---
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
`pageKind` accepts any non-empty string. Add new categories freely; they
|
|
30
|
+
will surface in `index.md` as new sections after the next `akm index` run.
|
|
31
|
+
|
|
32
|
+
## Three operations
|
|
33
|
+
|
|
34
|
+
### Ingest
|
|
35
|
+
|
|
36
|
+
1. Copy the new source into `raw/` with `akm wiki stash {{WIKI_NAME}} <path>`.
|
|
37
|
+
2. Find related pages: `akm wiki search {{WIKI_NAME}} "<terms>"`.
|
|
38
|
+
3. For each related page: append a section, note a contradiction, or create a
|
|
39
|
+
new page. Update xrefs on both sides.
|
|
40
|
+
4. Cite the raw source in each touched page's `sources:` frontmatter.
|
|
41
|
+
5. Append one entry to `log.md` describing what was assimilated.
|
|
42
|
+
|
|
43
|
+
### Query
|
|
44
|
+
|
|
45
|
+
1. `akm wiki search {{WIKI_NAME}} "<question>"` — find candidate pages.
|
|
46
|
+
2. `akm show wiki:{{WIKI_NAME}}/<page>` — read the top hits.
|
|
47
|
+
3. Compose the answer from the wiki; cite raw sources only when the wiki
|
|
48
|
+
points at them.
|
|
49
|
+
|
|
50
|
+
### Lint
|
|
51
|
+
|
|
52
|
+
1. `akm wiki lint {{WIKI_NAME}}` — deterministic structural checks.
|
|
53
|
+
2. Resolve each finding: link orphans, fix broken xrefs, add descriptions,
|
|
54
|
+
cite uncited raws, refresh the index.
|
|
55
|
+
|
|
56
|
+
## Hard rules
|
|
57
|
+
|
|
58
|
+
- `raw/` is immutable. Never edit ingested sources.
|
|
59
|
+
- Cross-references must point at pages that actually exist.
|
|
60
|
+
- Prefer appending to an existing page over duplicating one.
|
|
61
|
+
- Cite the raw source id (e.g. `raw/2026-04-foo.md`) when copying claims.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import indexTemplate from "./index-template.md" with { type: "text" };
|
|
2
|
+
import logTemplate from "./log-template.md" with { type: "text" };
|
|
3
|
+
import schemaTemplate from "./schema-template.md" with { type: "text" };
|
|
4
|
+
export function buildSchemaMd(wikiName) {
|
|
5
|
+
return schemaTemplate.replaceAll("{{WIKI_NAME}}", wikiName);
|
|
6
|
+
}
|
|
7
|
+
export function buildIndexMd(wikiName) {
|
|
8
|
+
return indexTemplate.replaceAll("{{WIKI_NAME}}", wikiName);
|
|
9
|
+
}
|
|
10
|
+
export function buildLogMd(wikiName) {
|
|
11
|
+
return logTemplate.replaceAll("{{WIKI_NAME}}", wikiName);
|
|
12
|
+
}
|
package/dist/wiki/wiki.js
CHANGED
|
@@ -42,12 +42,13 @@ import fs from "node:fs";
|
|
|
42
42
|
import path from "node:path";
|
|
43
43
|
import { parse as yamlParse } from "yaml";
|
|
44
44
|
import { akmSearch } from "../commands/search";
|
|
45
|
-
import { isWithin } from "../core/common";
|
|
46
|
-
import { loadUserConfig, saveConfig } from "../core/config";
|
|
45
|
+
import { isWithin, todayIso } from "../core/common";
|
|
46
|
+
import { getSources, loadUserConfig, saveConfig } from "../core/config";
|
|
47
47
|
import { NotFoundError, UsageError } from "../core/errors";
|
|
48
48
|
import { parseFrontmatter, parseFrontmatterBlock } from "../core/frontmatter";
|
|
49
49
|
import { resolveSourceEntries } from "../indexer/search-source";
|
|
50
|
-
import
|
|
50
|
+
import ingestWorkflowTemplate from "./ingest-workflow-template.md" with { type: "text" };
|
|
51
|
+
import { buildIndexMd, buildLogMd, buildSchemaMd } from "./wiki-templates";
|
|
51
52
|
// ── Constants ───────────────────────────────────────────────────────────────
|
|
52
53
|
export const WIKIS_SUBDIR = "wikis";
|
|
53
54
|
export const SCHEMA_MD = "schema.md";
|
|
@@ -399,12 +400,11 @@ export function removeWiki(stashDir, name, options = {}) {
|
|
|
399
400
|
const isStashWiki = fs.existsSync(wikiDir) && isRecognizedStashWiki(wikiDir);
|
|
400
401
|
if (!isStashWiki && external) {
|
|
401
402
|
const config = loadUserConfig();
|
|
402
|
-
const filteredSources = (config
|
|
403
|
+
const filteredSources = getSources(config).filter((entry) => entry.wikiName !== name);
|
|
403
404
|
const installed = (config.installed ?? []).filter((entry) => entry.wikiName !== name);
|
|
404
405
|
saveConfig({
|
|
405
406
|
...config,
|
|
406
407
|
sources: filteredSources.length > 0 ? filteredSources : undefined,
|
|
407
|
-
stashes: undefined,
|
|
408
408
|
installed: installed.length > 0 ? installed : undefined,
|
|
409
409
|
});
|
|
410
410
|
return {
|
|
@@ -665,7 +665,7 @@ function withRawFrontmatter(content, slug) {
|
|
|
665
665
|
// tag the wikiRole for the indexer.
|
|
666
666
|
if (content.startsWith("---"))
|
|
667
667
|
return content;
|
|
668
|
-
const date =
|
|
668
|
+
const date = todayIso();
|
|
669
669
|
return `---\nwikiRole: raw\ningestedAt: ${date}\nslug: ${slug}\n---\n\n${content}`;
|
|
670
670
|
}
|
|
671
671
|
function ensureTrailingNewline(value) {
|
|
@@ -960,60 +960,9 @@ export function regenerateAllWikiIndexes(stashDir) {
|
|
|
960
960
|
export function buildIngestWorkflow(stashDir, name) {
|
|
961
961
|
const wikiDir = resolveWikiSource(stashDir, name).path;
|
|
962
962
|
const schemaPath = path.join(wikiDir, SCHEMA_MD);
|
|
963
|
-
const workflow =
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
Follow these steps. akm commands handle the invariants; use your native
|
|
969
|
-
Read/Write/Edit tools for page edits.
|
|
970
|
-
|
|
971
|
-
1. **Read the schema.** Open \`${schemaPath}\`. It defines the voice, page
|
|
972
|
-
kinds, contradiction policy, and any wiki-specific conventions. Do not
|
|
973
|
-
skip this step even on familiar wikis — the schema may have changed.
|
|
974
|
-
|
|
975
|
-
2. **File the source under \`raw/\`.**
|
|
976
|
-
\`\`\`sh
|
|
977
|
-
akm wiki stash ${name} <path-or-url-to-source>
|
|
978
|
-
# or: cat <source> | akm wiki stash ${name} -
|
|
979
|
-
\`\`\`
|
|
980
|
-
Returns \`{ slug, path, ref }\`. The raw copy is immutable — never edit it.
|
|
981
|
-
|
|
982
|
-
3. **Find related existing pages.**
|
|
983
|
-
\`\`\`sh
|
|
984
|
-
akm wiki search ${name} "<key terms from the source>"
|
|
985
|
-
\`\`\`
|
|
986
|
-
Read the top hits with \`akm show wiki:${name}/<page>\`. Use
|
|
987
|
-
\`akm show wiki:${name}/<page> toc\` for large pages.
|
|
988
|
-
|
|
989
|
-
4. **Decide for each candidate.** For each related page:
|
|
990
|
-
- **Append**: add a section or paragraph under the relevant heading.
|
|
991
|
-
Include the raw source in the page's \`sources:\` frontmatter list.
|
|
992
|
-
- **Contradict**: note the tension explicitly; don't silently overwrite.
|
|
993
|
-
Follow the schema's contradiction policy.
|
|
994
|
-
- **Skip**: source doesn't add to this page — move on.
|
|
995
|
-
|
|
996
|
-
5. **Create new pages for concepts/entities the source introduces.** Each
|
|
997
|
-
new page must have frontmatter with \`description\`, \`pageKind\`,
|
|
998
|
-
\`xrefs\`, and \`sources\`. Cross-reference with related pages both
|
|
999
|
-
directions.
|
|
1000
|
-
|
|
1001
|
-
6. **Update xrefs both ways.** If page A now xrefs page B, page B must xref
|
|
1002
|
-
page A. \`akm wiki lint ${name}\` will flag violations.
|
|
1003
|
-
|
|
1004
|
-
7. **Append to \`log.md\`.** One entry per ingest: date, source slug, one-line
|
|
1005
|
-
summary, refs to created/edited pages. Newest at the top.
|
|
1006
|
-
|
|
1007
|
-
8. **Regenerate the index + verify.**
|
|
1008
|
-
\`\`\`sh
|
|
1009
|
-
akm index
|
|
1010
|
-
akm wiki lint ${name}
|
|
1011
|
-
\`\`\`
|
|
1012
|
-
Resolve any lint findings before calling the ingest done.
|
|
1013
|
-
|
|
1014
|
-
That's it. \`akm\` never calls an LLM — reasoning is your job; it just owns
|
|
1015
|
-
the invariants (raw immutability, unique slugs, ref validation, index
|
|
1016
|
-
regeneration, structural lint).
|
|
1017
|
-
`;
|
|
963
|
+
const workflow = ingestWorkflowTemplate
|
|
964
|
+
.replaceAll("{{WIKI_NAME}}", name)
|
|
965
|
+
.replaceAll("{{WIKI_DIR}}", wikiDir)
|
|
966
|
+
.replaceAll("{{SCHEMA_PATH}}", schemaPath);
|
|
1018
967
|
return { wiki: name, path: wikiDir, schemaPath, workflow };
|
|
1019
968
|
}
|
|
@@ -5,6 +5,7 @@ import { isWithin, resolveStashDir } from "../core/common";
|
|
|
5
5
|
import { UsageError } from "../core/errors";
|
|
6
6
|
import { warn } from "../core/warn";
|
|
7
7
|
import { parseWorkflow } from "./parser";
|
|
8
|
+
import workflowTemplate from "./workflow-template.md" with { type: "text" };
|
|
8
9
|
const DEFAULT_WORKFLOW_TEMPLATE = renderWorkflowTemplate({
|
|
9
10
|
title: "Example Workflow",
|
|
10
11
|
firstStepTitle: "First Step",
|
|
@@ -134,29 +135,8 @@ export function validateWorkflowSource(target) {
|
|
|
134
135
|
return { path: target, parse: parseWorkflow(content, { path: target }) };
|
|
135
136
|
}
|
|
136
137
|
function renderWorkflowTemplate(input) {
|
|
137
|
-
return
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
params:
|
|
142
|
-
example_param: Explain this parameter
|
|
143
|
-
---
|
|
144
|
-
|
|
145
|
-
# Workflow: ${input.title}
|
|
146
|
-
|
|
147
|
-
## Step: ${input.firstStepTitle}
|
|
148
|
-
Step ID: ${input.firstStepId}
|
|
149
|
-
|
|
150
|
-
### Instructions
|
|
151
|
-
Describe what to do in this step.
|
|
152
|
-
|
|
153
|
-
### Completion Criteria
|
|
154
|
-
- Confirm the first step is complete
|
|
155
|
-
|
|
156
|
-
## Step: Second Step
|
|
157
|
-
Step ID: second-step
|
|
158
|
-
|
|
159
|
-
### Instructions
|
|
160
|
-
Describe what happens next.
|
|
161
|
-
`;
|
|
138
|
+
return workflowTemplate
|
|
139
|
+
.replace("{{TITLE}}", input.title)
|
|
140
|
+
.replace("{{FIRST_STEP_TITLE}}", input.firstStepTitle)
|
|
141
|
+
.replace("{{FIRST_STEP_ID}}", input.firstStepId);
|
|
162
142
|
}
|
|
@@ -8,6 +8,7 @@
|
|
|
8
8
|
*/
|
|
9
9
|
import { makeAssetRef } from "../core/asset-ref";
|
|
10
10
|
import { UsageError } from "../core/errors";
|
|
11
|
+
import { registerMetadataContributor } from "../indexer/metadata-contributors";
|
|
11
12
|
import { cacheWorkflowDocument } from "./document-cache";
|
|
12
13
|
import { parseWorkflow } from "./parser";
|
|
13
14
|
function shellQuote(value) {
|
|
@@ -54,8 +55,12 @@ export const workflowMdRenderer = {
|
|
|
54
55
|
})),
|
|
55
56
|
};
|
|
56
57
|
},
|
|
57
|
-
|
|
58
|
-
|
|
58
|
+
};
|
|
59
|
+
registerMetadataContributor({
|
|
60
|
+
name: "workflow-document-metadata",
|
|
61
|
+
appliesTo: ({ rendererName }) => rendererName === "workflow-md",
|
|
62
|
+
contribute(entry, { renderContext }) {
|
|
63
|
+
const doc = loadDocument(renderContext);
|
|
59
64
|
const hints = new Set(entry.searchHints ?? []);
|
|
60
65
|
hints.add(doc.title);
|
|
61
66
|
for (const step of doc.steps) {
|
|
@@ -75,4 +80,4 @@ export const workflowMdRenderer = {
|
|
|
75
80
|
}
|
|
76
81
|
cacheWorkflowDocument(entry, doc);
|
|
77
82
|
},
|
|
78
|
-
};
|
|
83
|
+
});
|
package/dist/workflows/runs.js
CHANGED
|
@@ -13,64 +13,58 @@ import { formatWorkflowErrors } from "./authoring";
|
|
|
13
13
|
import { closeWorkflowDatabase, openWorkflowDatabase } from "./db";
|
|
14
14
|
import { parseWorkflow } from "./parser";
|
|
15
15
|
import { getCurrentWorkflowScopeKey } from "./scope-key";
|
|
16
|
+
async function withWorkflowDb(fn) {
|
|
17
|
+
const db = openWorkflowDatabase();
|
|
18
|
+
try {
|
|
19
|
+
return await Promise.resolve(fn(db));
|
|
20
|
+
}
|
|
21
|
+
finally {
|
|
22
|
+
closeWorkflowDatabase(db);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
16
25
|
export async function startWorkflowRun(ref, params = {}) {
|
|
17
26
|
const asset = await loadWorkflowAsset(ref);
|
|
18
|
-
|
|
19
|
-
try {
|
|
27
|
+
return withWorkflowDb(async (db) => {
|
|
20
28
|
const now = new Date().toISOString();
|
|
21
29
|
const runId = randomUUID();
|
|
22
30
|
const scopeKey = getCurrentWorkflowScopeKey();
|
|
23
31
|
const currentStepId = asset.steps[0]?.id ?? null;
|
|
24
32
|
const workflowEntryId = resolveWorkflowEntryId(asset.sourcePath, asset.ref);
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
.prepare(`INSERT INTO workflow_runs (
|
|
33
|
+
db.transaction(() => {
|
|
34
|
+
db.prepare(`INSERT INTO workflow_runs (
|
|
28
35
|
id, workflow_ref, scope_key, workflow_entry_id, workflow_title, status, params_json, current_step_id, created_at, updated_at
|
|
29
|
-
) VALUES (?, ?, ?, ?, ?, 'active', ?, ?, ?, ?)`)
|
|
30
|
-
|
|
31
|
-
const insertStep = workflowDb.prepare(`INSERT INTO workflow_run_steps (
|
|
36
|
+
) VALUES (?, ?, ?, ?, ?, 'active', ?, ?, ?, ?)`).run(runId, asset.ref, scopeKey, workflowEntryId, asset.title, JSON.stringify(params), currentStepId, now, now);
|
|
37
|
+
const insertStep = db.prepare(`INSERT INTO workflow_run_steps (
|
|
32
38
|
run_id, step_id, step_title, instructions, completion_json, sequence_index, status
|
|
33
39
|
) VALUES (?, ?, ?, ?, ?, ?, 'pending')`);
|
|
34
40
|
for (const step of asset.steps) {
|
|
35
41
|
insertStep.run(runId, step.id, step.title, step.instructions, step.completionCriteria ? JSON.stringify(step.completionCriteria) : null, step.sequenceIndex ?? 0);
|
|
36
42
|
}
|
|
37
43
|
})();
|
|
38
|
-
const result = getWorkflowStatus(runId);
|
|
44
|
+
const result = await getWorkflowStatus(runId);
|
|
39
45
|
appendEvent({
|
|
40
46
|
eventType: "workflow_started",
|
|
41
47
|
ref: ref,
|
|
42
48
|
metadata: { runId: result.run.id, title: result.run.workflowTitle },
|
|
43
49
|
});
|
|
44
50
|
return result;
|
|
45
|
-
}
|
|
46
|
-
finally {
|
|
47
|
-
closeWorkflowDatabase(workflowDb);
|
|
48
|
-
}
|
|
51
|
+
});
|
|
49
52
|
}
|
|
50
|
-
export function getWorkflowStatus(runId) {
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
const
|
|
54
|
-
const steps = readWorkflowRunSteps(workflowDb, run.id);
|
|
53
|
+
export async function getWorkflowStatus(runId) {
|
|
54
|
+
return withWorkflowDb((db) => {
|
|
55
|
+
const run = readWorkflowRun(db, runId);
|
|
56
|
+
const steps = readWorkflowRunSteps(db, run.id);
|
|
55
57
|
return buildWorkflowRunDetail(run, steps);
|
|
56
|
-
}
|
|
57
|
-
finally {
|
|
58
|
-
closeWorkflowDatabase(workflowDb);
|
|
59
|
-
}
|
|
58
|
+
});
|
|
60
59
|
}
|
|
61
|
-
export function hasWorkflowRun(runId) {
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
const row = workflowDb.prepare("SELECT 1 FROM workflow_runs WHERE id = ? LIMIT 1").get(runId);
|
|
60
|
+
export async function hasWorkflowRun(runId) {
|
|
61
|
+
return withWorkflowDb((db) => {
|
|
62
|
+
const row = db.prepare("SELECT 1 FROM workflow_runs WHERE id = ? LIMIT 1").get(runId);
|
|
65
63
|
return !!row;
|
|
66
|
-
}
|
|
67
|
-
finally {
|
|
68
|
-
closeWorkflowDatabase(workflowDb);
|
|
69
|
-
}
|
|
64
|
+
});
|
|
70
65
|
}
|
|
71
|
-
export function listWorkflowRuns(input) {
|
|
72
|
-
|
|
73
|
-
try {
|
|
66
|
+
export async function listWorkflowRuns(input) {
|
|
67
|
+
return withWorkflowDb((db) => {
|
|
74
68
|
const filters = [];
|
|
75
69
|
const params = [];
|
|
76
70
|
const scopeKey = getCurrentWorkflowScopeKey();
|
|
@@ -88,20 +82,16 @@ export function listWorkflowRuns(input) {
|
|
|
88
82
|
filters.push("status IN ('active', 'blocked')");
|
|
89
83
|
}
|
|
90
84
|
const where = filters.length > 0 ? `WHERE ${filters.join(" AND ")}` : "";
|
|
91
|
-
const rows =
|
|
85
|
+
const rows = db
|
|
92
86
|
.prepare(`SELECT * FROM workflow_runs ${where} ORDER BY updated_at DESC, created_at DESC`)
|
|
93
87
|
.all(...params);
|
|
94
88
|
return { runs: rows.map(toWorkflowRunSummary) };
|
|
95
|
-
}
|
|
96
|
-
finally {
|
|
97
|
-
closeWorkflowDatabase(workflowDb);
|
|
98
|
-
}
|
|
89
|
+
});
|
|
99
90
|
}
|
|
100
91
|
export async function getNextWorkflowStep(specifier, params) {
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
const
|
|
104
|
-
const steps = readWorkflowRunSteps(workflowDb, run.id);
|
|
92
|
+
return withWorkflowDb(async (db) => {
|
|
93
|
+
const { run, autoStarted } = await resolveRunSpecifier(db, specifier, params);
|
|
94
|
+
const steps = readWorkflowRunSteps(db, run.id);
|
|
105
95
|
const currentStep = resolveCurrentStep(run, steps);
|
|
106
96
|
const done = run.status === "completed" ? true : undefined;
|
|
107
97
|
return {
|
|
@@ -115,54 +105,44 @@ export async function getNextWorkflowStep(specifier, params) {
|
|
|
115
105
|
...(done ? { done } : {}),
|
|
116
106
|
...(autoStarted ? { autoStarted } : {}),
|
|
117
107
|
};
|
|
118
|
-
}
|
|
119
|
-
finally {
|
|
120
|
-
closeWorkflowDatabase(workflowDb);
|
|
121
|
-
}
|
|
108
|
+
});
|
|
122
109
|
}
|
|
123
|
-
export function resumeWorkflowRun(runId) {
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
const run = readWorkflowRun(workflowDb, runId);
|
|
110
|
+
export async function resumeWorkflowRun(runId) {
|
|
111
|
+
return withWorkflowDb((db) => {
|
|
112
|
+
const run = readWorkflowRun(db, runId);
|
|
127
113
|
if (run.status === "completed") {
|
|
128
114
|
throw new UsageError(`Workflow run ${run.id} is already completed and cannot be resumed.`);
|
|
129
115
|
}
|
|
130
116
|
if (run.status === "active") {
|
|
131
|
-
const steps = readWorkflowRunSteps(
|
|
117
|
+
const steps = readWorkflowRunSteps(db, run.id);
|
|
132
118
|
return buildWorkflowRunDetail(run, steps);
|
|
133
119
|
}
|
|
134
120
|
// blocked or failed → flip back to active and re-open the current step so
|
|
135
121
|
// it can be reclassified (completed, failed, skipped) after resuming.
|
|
136
122
|
const now = new Date().toISOString();
|
|
137
|
-
|
|
123
|
+
db.transaction(() => {
|
|
138
124
|
if (run.current_step_id) {
|
|
139
|
-
|
|
140
|
-
.prepare(`UPDATE workflow_run_steps
|
|
125
|
+
db.prepare(`UPDATE workflow_run_steps
|
|
141
126
|
SET status = 'pending', notes = NULL, evidence_json = NULL, completed_at = NULL
|
|
142
|
-
WHERE run_id = ? AND step_id = ? AND status IN ('blocked', 'failed')`)
|
|
143
|
-
.run(run.id, run.current_step_id);
|
|
127
|
+
WHERE run_id = ? AND step_id = ? AND status IN ('blocked', 'failed')`).run(run.id, run.current_step_id);
|
|
144
128
|
}
|
|
145
|
-
|
|
129
|
+
db.prepare("UPDATE workflow_runs SET status = 'active', updated_at = ? WHERE id = ?").run(now, run.id);
|
|
146
130
|
})();
|
|
147
131
|
const updated = { ...run, status: "active", updated_at: now };
|
|
148
|
-
const steps = readWorkflowRunSteps(
|
|
132
|
+
const steps = readWorkflowRunSteps(db, run.id);
|
|
149
133
|
return buildWorkflowRunDetail(updated, steps);
|
|
150
|
-
}
|
|
151
|
-
finally {
|
|
152
|
-
closeWorkflowDatabase(workflowDb);
|
|
153
|
-
}
|
|
134
|
+
});
|
|
154
135
|
}
|
|
155
|
-
export function completeWorkflowStep(input) {
|
|
156
|
-
|
|
157
|
-
try {
|
|
136
|
+
export async function completeWorkflowStep(input) {
|
|
137
|
+
return withWorkflowDb((db) => {
|
|
158
138
|
let updatedRun;
|
|
159
139
|
let refreshedSteps = [];
|
|
160
|
-
|
|
161
|
-
const run = readWorkflowRun(
|
|
140
|
+
db.transaction(() => {
|
|
141
|
+
const run = readWorkflowRun(db, input.runId);
|
|
162
142
|
if (run.status !== "active") {
|
|
163
143
|
throw new UsageError(`Workflow run ${run.id} is ${run.status} and cannot be updated.`);
|
|
164
144
|
}
|
|
165
|
-
const existing =
|
|
145
|
+
const existing = db
|
|
166
146
|
.prepare("SELECT * FROM workflow_run_steps WHERE run_id = ? AND step_id = ?")
|
|
167
147
|
.get(run.id, input.stepId);
|
|
168
148
|
if (!existing) {
|
|
@@ -175,18 +155,14 @@ export function completeWorkflowStep(input) {
|
|
|
175
155
|
throw new UsageError(`Step "${input.stepId}" is not the current step for workflow run ${run.id}. Complete "${run.current_step_id}" first.`);
|
|
176
156
|
}
|
|
177
157
|
const completedAt = new Date().toISOString();
|
|
178
|
-
|
|
179
|
-
.prepare(`UPDATE workflow_run_steps
|
|
158
|
+
db.prepare(`UPDATE workflow_run_steps
|
|
180
159
|
SET status = ?, notes = ?, evidence_json = ?, completed_at = ?
|
|
181
|
-
WHERE run_id = ? AND step_id = ?`)
|
|
182
|
-
|
|
183
|
-
refreshedSteps = readWorkflowRunSteps(workflowDb, run.id);
|
|
160
|
+
WHERE run_id = ? AND step_id = ?`).run(input.status, input.notes?.trim() || null, input.evidence ? JSON.stringify(input.evidence) : null, completedAt, run.id, input.stepId);
|
|
161
|
+
refreshedSteps = readWorkflowRunSteps(db, run.id);
|
|
184
162
|
const state = deriveRunState(refreshedSteps);
|
|
185
|
-
|
|
186
|
-
.prepare(`UPDATE workflow_runs
|
|
163
|
+
db.prepare(`UPDATE workflow_runs
|
|
187
164
|
SET status = ?, current_step_id = ?, updated_at = ?, completed_at = ?
|
|
188
|
-
WHERE id = ?`)
|
|
189
|
-
.run(state.status, state.currentStepId, completedAt, state.completedAt, run.id);
|
|
165
|
+
WHERE id = ?`).run(state.status, state.currentStepId, completedAt, state.completedAt, run.id);
|
|
190
166
|
updatedRun = {
|
|
191
167
|
...run,
|
|
192
168
|
status: state.status,
|
|
@@ -205,10 +181,7 @@ export function completeWorkflowStep(input) {
|
|
|
205
181
|
appendEvent({ eventType: "workflow_finished", ref: detail.run.workflowRef, metadata: { runId: input.runId } });
|
|
206
182
|
}
|
|
207
183
|
return detail;
|
|
208
|
-
}
|
|
209
|
-
finally {
|
|
210
|
-
closeWorkflowDatabase(workflowDb);
|
|
211
|
-
}
|
|
184
|
+
});
|
|
212
185
|
}
|
|
213
186
|
async function resolveRunSpecifier(db, specifier, params) {
|
|
214
187
|
const explicitRun = db.prepare("SELECT * FROM workflow_runs WHERE id = ?").get(specifier);
|
|
@@ -262,7 +235,7 @@ async function loadWorkflowAsset(ref) {
|
|
|
262
235
|
if (!assetPath) {
|
|
263
236
|
throw new NotFoundError(`Workflow not found for ref: workflow:${parsed.name}`);
|
|
264
237
|
}
|
|
265
|
-
const resolvedSourcePath = sourcePath ??
|
|
238
|
+
const resolvedSourcePath = sourcePath ?? config.stashDir ?? assetPath;
|
|
266
239
|
const fullRef = `${parsed.origin ? `${parsed.origin}//` : ""}workflow:${parsed.name}`;
|
|
267
240
|
const cached = readWorkflowDocumentFromIndex(resolvedSourcePath, fullRef);
|
|
268
241
|
const document = cached ?? loadWorkflowDocumentFromDisk(assetPath);
|
|
@@ -452,18 +425,13 @@ function parseJsonArray(value) {
|
|
|
452
425
|
}
|
|
453
426
|
return undefined;
|
|
454
427
|
}
|
|
455
|
-
export function getActiveWorkflowRun(scopeKey = getCurrentWorkflowScopeKey()) {
|
|
456
|
-
|
|
457
|
-
const
|
|
458
|
-
const row = workflowDb
|
|
428
|
+
export async function getActiveWorkflowRun(scopeKey = getCurrentWorkflowScopeKey()) {
|
|
429
|
+
return withWorkflowDb((db) => {
|
|
430
|
+
const row = db
|
|
459
431
|
.query("SELECT id, current_step_id, workflow_ref FROM workflow_runs WHERE scope_key = ? AND status IN ('active', 'blocked') ORDER BY updated_at DESC LIMIT 1")
|
|
460
432
|
.get(scopeKey);
|
|
461
|
-
closeWorkflowDatabase(workflowDb);
|
|
462
433
|
if (!row)
|
|
463
434
|
return null;
|
|
464
435
|
return { runId: row.id, stepId: row.current_step_id, workflowRef: row.workflow_ref };
|
|
465
|
-
}
|
|
466
|
-
catch {
|
|
467
|
-
return null; // fail-open: never crash show output due to DB error
|
|
468
|
-
}
|
|
436
|
+
}).catch(() => null); // fail-open: never crash show output due to DB error
|
|
469
437
|
}
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
* step-id format, and the frontmatter key whitelist.
|
|
7
7
|
*/
|
|
8
8
|
const STEP_ID_REGEX = /^[A-Za-z0-9][A-Za-z0-9._-]*$/;
|
|
9
|
-
const ALLOWED_FRONTMATTER_KEYS = new Set(["description", "tags", "params"]);
|
|
9
|
+
const ALLOWED_FRONTMATTER_KEYS = new Set(["description", "tags", "params", "name", "updated"]);
|
|
10
10
|
export function runSemanticChecks(draft, frontmatterData, frontmatterEndLine, errors) {
|
|
11
11
|
checkFrontmatterKeys(frontmatterData, frontmatterEndLine, errors);
|
|
12
12
|
checkStepIdFormat(draft, errors);
|