akm-cli 0.7.4 → 0.8.0-rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/{CHANGELOG.md → .github/CHANGELOG.md} +34 -1
- package/.github/LICENSE +374 -0
- package/dist/cli/parse-args.js +43 -0
- package/dist/cli.js +1007 -593
- package/dist/commands/agent-dispatch.js +102 -0
- package/dist/commands/agent-support.js +62 -0
- package/dist/commands/config-cli.js +68 -84
- package/dist/commands/consolidate.js +823 -0
- package/dist/commands/curate.js +1 -0
- package/dist/commands/distill-promotion-policy.js +658 -0
- package/dist/commands/distill.js +250 -48
- package/dist/commands/eval-cases.js +40 -0
- package/dist/commands/events.js +12 -24
- package/dist/commands/graph.js +222 -0
- package/dist/commands/health.js +376 -0
- package/dist/commands/help/help-accept.md +9 -0
- package/dist/commands/help/help-improve.md +53 -0
- package/dist/commands/help/help-proposals.md +15 -0
- package/dist/commands/help/help-propose.md +17 -0
- package/dist/commands/help/help-reject.md +8 -0
- package/dist/commands/history.js +3 -30
- package/dist/commands/improve.js +1170 -0
- package/dist/commands/info.js +2 -2
- package/dist/commands/init.js +2 -2
- package/dist/commands/install-audit.js +5 -1
- package/dist/commands/installed-stashes.js +118 -138
- package/dist/commands/knowledge.js +133 -0
- package/dist/commands/lint/agent-linter.js +46 -0
- package/dist/commands/lint/base-linter.js +251 -0
- package/dist/commands/lint/command-linter.js +46 -0
- package/dist/commands/lint/default-linter.js +13 -0
- package/dist/commands/lint/index.js +107 -0
- package/dist/commands/lint/knowledge-linter.js +13 -0
- package/dist/commands/lint/memory-linter.js +58 -0
- package/dist/commands/lint/registry.js +33 -0
- package/dist/commands/lint/skill-linter.js +42 -0
- package/dist/commands/lint/task-linter.js +47 -0
- package/dist/commands/lint/types.js +1 -0
- package/dist/commands/lint/workflow-linter.js +53 -0
- package/dist/commands/lint.js +1 -0
- package/dist/commands/migration-help.js +2 -2
- package/dist/commands/proposal.js +8 -7
- package/dist/commands/propose.js +113 -43
- package/dist/commands/reflect.js +175 -41
- package/dist/commands/registry-search.js +2 -2
- package/dist/commands/remember.js +55 -1
- package/dist/commands/schema-repair.js +130 -0
- package/dist/commands/search.js +21 -5
- package/dist/commands/show.js +131 -52
- package/dist/commands/source-add.js +10 -10
- package/dist/commands/source-manage.js +11 -19
- package/dist/commands/tasks.js +385 -0
- package/dist/commands/url-checker.js +39 -0
- package/dist/commands/vault.js +7 -33
- package/dist/core/action-contributors.js +25 -0
- package/dist/core/asset-registry.js +5 -17
- package/dist/core/asset-spec.js +11 -1
- package/dist/core/common.js +94 -0
- package/dist/core/concurrent.js +22 -0
- package/dist/core/config.js +229 -122
- package/dist/core/events.js +87 -123
- package/dist/core/frontmatter.js +3 -1
- package/dist/core/markdown.js +17 -0
- package/dist/core/memory-improve.js +678 -0
- package/dist/core/parse.js +155 -0
- package/dist/core/paths.js +101 -3
- package/dist/core/proposal-validators.js +61 -0
- package/dist/core/proposals.js +49 -38
- package/dist/core/state-db.js +775 -0
- package/dist/core/time.js +51 -0
- package/dist/core/warn.js +59 -1
- package/dist/indexer/db-search.js +86 -472
- package/dist/indexer/db.js +392 -6
- package/dist/indexer/ensure-index.js +133 -0
- package/dist/indexer/graph-boost.js +247 -94
- package/dist/indexer/graph-db.js +201 -0
- package/dist/indexer/graph-dedup.js +99 -0
- package/dist/indexer/graph-extraction.js +417 -74
- package/dist/indexer/index-context.js +10 -0
- package/dist/indexer/indexer.js +466 -298
- package/dist/indexer/llm-cache.js +47 -0
- package/dist/indexer/match-contributors.js +141 -0
- package/dist/indexer/matchers.js +24 -190
- package/dist/indexer/memory-inference.js +63 -29
- package/dist/indexer/metadata-contributors.js +26 -0
- package/dist/indexer/metadata.js +188 -175
- package/dist/indexer/path-resolver.js +89 -0
- package/dist/indexer/ranking-contributors.js +204 -0
- package/dist/indexer/ranking.js +74 -0
- package/dist/indexer/search-hit-enrichers.js +22 -0
- package/dist/indexer/search-source.js +24 -9
- package/dist/indexer/semantic-status.js +2 -16
- package/dist/indexer/walker.js +25 -0
- package/dist/integrations/agent/config.js +175 -3
- package/dist/integrations/agent/index.js +3 -1
- package/dist/integrations/agent/pipeline.js +39 -0
- package/dist/integrations/agent/profiles.js +67 -5
- package/dist/integrations/agent/prompts.js +114 -29
- package/dist/integrations/agent/runners.js +31 -0
- package/dist/integrations/agent/sdk-runner.js +120 -0
- package/dist/integrations/agent/spawn.js +136 -28
- package/dist/integrations/lockfile.js +10 -18
- package/dist/integrations/session-logs/index.js +65 -0
- package/dist/integrations/session-logs/providers/claude-code.js +56 -0
- package/dist/integrations/session-logs/providers/opencode.js +52 -0
- package/dist/integrations/session-logs/types.js +1 -0
- package/dist/llm/call-ai.js +74 -0
- package/dist/llm/client.js +63 -86
- package/dist/llm/feature-gate.js +27 -16
- package/dist/llm/graph-extract.js +297 -64
- package/dist/llm/memory-infer.js +52 -71
- package/dist/llm/metadata-enhance.js +39 -22
- package/dist/llm/prompts/graph-extract-user-prompt.md +12 -0
- package/dist/output/cli-hints-full.md +277 -0
- package/dist/output/cli-hints-short.md +65 -0
- package/dist/output/cli-hints.js +2 -309
- package/dist/output/renderers.js +196 -124
- package/dist/output/shapes.js +41 -3
- package/dist/output/text.js +257 -21
- package/dist/registry/providers/skills-sh.js +61 -49
- package/dist/registry/providers/static-index.js +44 -48
- package/dist/setup/setup.js +510 -11
- package/dist/sources/provider-factory.js +2 -1
- package/dist/sources/providers/git.js +44 -2
- package/dist/sources/website-ingest.js +4 -0
- package/dist/tasks/backends/cron.js +200 -0
- package/dist/tasks/backends/exec-utils.js +25 -0
- package/dist/tasks/backends/index.js +32 -0
- package/dist/tasks/backends/launchd-template.xml +19 -0
- package/dist/tasks/backends/launchd.js +184 -0
- package/dist/tasks/backends/schtasks-template.xml +29 -0
- package/dist/tasks/backends/schtasks.js +212 -0
- package/dist/tasks/parser.js +198 -0
- package/dist/tasks/resolveAkmBin.js +84 -0
- package/dist/tasks/runner.js +432 -0
- package/dist/tasks/schedule.js +208 -0
- package/dist/tasks/schema.js +13 -0
- package/dist/tasks/validator.js +59 -0
- package/dist/wiki/index-template.md +12 -0
- package/dist/wiki/ingest-workflow-template.md +54 -0
- package/dist/wiki/log-template.md +8 -0
- package/dist/wiki/schema-template.md +61 -0
- package/dist/wiki/wiki-templates.js +12 -0
- package/dist/wiki/wiki.js +10 -61
- package/dist/workflows/authoring.js +5 -25
- package/dist/workflows/db.js +9 -0
- package/dist/workflows/renderer.js +8 -3
- package/dist/workflows/runs.js +73 -88
- package/dist/workflows/scope-key.js +76 -0
- package/dist/workflows/validator.js +1 -1
- package/dist/workflows/workflow-template.md +24 -0
- package/docs/README.md +3 -0
- package/docs/migration/release-notes/0.7.0.md +1 -1
- package/docs/migration/release-notes/0.7.4.md +1 -1
- package/docs/migration/release-notes/0.7.5.md +20 -0
- package/docs/migration/release-notes/0.8.0.md +43 -0
- package/package.json +4 -3
- package/dist/templates/wiki-templates.js +0 -100
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { parseAssetRef } from "../core/asset-ref";
|
|
4
|
+
import { loadConfig } from "../core/config";
|
|
5
|
+
import { NotFoundError, UsageError } from "../core/errors";
|
|
6
|
+
import { closeDatabase, openExistingDatabase } from "../indexer/db";
|
|
7
|
+
import { listRelatedPathsForFile } from "../indexer/graph-boost";
|
|
8
|
+
import { loadStoredGraphSnapshot } from "../indexer/graph-db";
|
|
9
|
+
import { lookup } from "../indexer/indexer";
|
|
10
|
+
import { resolveAssetPath } from "../indexer/path-resolver";
|
|
11
|
+
import { findSourceForPath, resolveSourceEntries } from "../indexer/search-source";
|
|
12
|
+
function resolveGraphStashPath(source) {
|
|
13
|
+
const sources = resolveSourceEntries(undefined, loadConfig());
|
|
14
|
+
if (sources.length === 0) {
|
|
15
|
+
throw new NotFoundError("No stash sources are configured.", "STASH_NOT_FOUND");
|
|
16
|
+
}
|
|
17
|
+
if (!source || source === "primary")
|
|
18
|
+
return sources[0].path;
|
|
19
|
+
const matched = sources.find((entry) => entry.registryId === source || entry.path === source);
|
|
20
|
+
if (!matched) {
|
|
21
|
+
throw new NotFoundError(`Source not found: ${source}`, "SOURCE_NOT_FOUND", "Run `akm list` to see source names.");
|
|
22
|
+
}
|
|
23
|
+
return matched.path;
|
|
24
|
+
}
|
|
25
|
+
function loadGraph(source) {
|
|
26
|
+
const stashPath = resolveGraphStashPath(source);
|
|
27
|
+
let db;
|
|
28
|
+
try {
|
|
29
|
+
db = openExistingDatabase();
|
|
30
|
+
const snapshot = loadStoredGraphSnapshot(stashPath, db);
|
|
31
|
+
if (!snapshot) {
|
|
32
|
+
throw new NotFoundError(`Graph data not found for source ${stashPath}.`, "FILE_NOT_FOUND", "Run the improvement flow that refreshes graph extraction data.");
|
|
33
|
+
}
|
|
34
|
+
return {
|
|
35
|
+
graph: {
|
|
36
|
+
schemaVersion: snapshot.schemaVersion,
|
|
37
|
+
generatedAt: snapshot.generatedAt,
|
|
38
|
+
stashRoot: snapshot.stashPath,
|
|
39
|
+
files: snapshot.files,
|
|
40
|
+
entities: snapshot.entities,
|
|
41
|
+
relations: snapshot.relations,
|
|
42
|
+
...(snapshot.quality ? { quality: snapshot.quality } : {}),
|
|
43
|
+
},
|
|
44
|
+
stashPath,
|
|
45
|
+
graphPath: snapshot.graphPath,
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
finally {
|
|
49
|
+
if (db)
|
|
50
|
+
closeDatabase(db);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
function countEntitiesByFile(nodes) {
|
|
54
|
+
const counts = new Map();
|
|
55
|
+
for (const node of nodes) {
|
|
56
|
+
const seen = new Set();
|
|
57
|
+
for (const entity of node.entities) {
|
|
58
|
+
if (seen.has(entity))
|
|
59
|
+
continue;
|
|
60
|
+
seen.add(entity);
|
|
61
|
+
counts.set(entity, (counts.get(entity) ?? 0) + 1);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return counts;
|
|
65
|
+
}
|
|
66
|
+
export function akmGraphSummary(options) {
|
|
67
|
+
const { graph, stashPath, graphPath } = loadGraph(options?.source);
|
|
68
|
+
return {
|
|
69
|
+
schemaVersion: 1,
|
|
70
|
+
shape: "graph-summary",
|
|
71
|
+
stashPath,
|
|
72
|
+
graphPath,
|
|
73
|
+
generatedAt: graph.generatedAt,
|
|
74
|
+
fileCount: graph.files.length,
|
|
75
|
+
entityCount: Array.isArray(graph.entities) ? graph.entities.length : countEntitiesByFile(graph.files).size,
|
|
76
|
+
relationCount: Array.isArray(graph.relations)
|
|
77
|
+
? graph.relations.length
|
|
78
|
+
: graph.files.reduce((sum, node) => sum + node.relations.length, 0),
|
|
79
|
+
...(graph.quality ? { quality: graph.quality } : {}),
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
export function akmGraphEntities(options) {
|
|
83
|
+
const { graph, stashPath, graphPath } = loadGraph(options?.source);
|
|
84
|
+
const limit = options?.limit;
|
|
85
|
+
if (limit !== undefined && (!Number.isFinite(limit) || limit <= 0)) {
|
|
86
|
+
throw new UsageError("--limit must be a positive integer.", "INVALID_FLAG_VALUE");
|
|
87
|
+
}
|
|
88
|
+
const counts = countEntitiesByFile(graph.files);
|
|
89
|
+
const entities = [...counts.entries()]
|
|
90
|
+
.map(([name, fileCount]) => ({ name, fileCount }))
|
|
91
|
+
.sort((a, b) => b.fileCount - a.fileCount || a.name.localeCompare(b.name));
|
|
92
|
+
const sliced = typeof limit === "number" ? entities.slice(0, limit) : entities;
|
|
93
|
+
return {
|
|
94
|
+
schemaVersion: 1,
|
|
95
|
+
shape: "graph-entities",
|
|
96
|
+
stashPath,
|
|
97
|
+
graphPath,
|
|
98
|
+
generatedAt: graph.generatedAt,
|
|
99
|
+
total: entities.length,
|
|
100
|
+
entities: sliced,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
export function akmGraphRelations(options) {
|
|
104
|
+
const { graph, stashPath, graphPath } = loadGraph(options?.source);
|
|
105
|
+
const limit = options?.limit;
|
|
106
|
+
if (limit !== undefined && (!Number.isFinite(limit) || limit <= 0)) {
|
|
107
|
+
throw new UsageError("--limit must be a positive integer.", "INVALID_FLAG_VALUE");
|
|
108
|
+
}
|
|
109
|
+
const counts = new Map();
|
|
110
|
+
for (const node of graph.files) {
|
|
111
|
+
for (const rel of node.relations) {
|
|
112
|
+
const key = `${rel.from}\u0000${rel.to}\u0000${rel.type ?? ""}`;
|
|
113
|
+
const existing = counts.get(key);
|
|
114
|
+
if (existing) {
|
|
115
|
+
existing.count += 1;
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
counts.set(key, { from: rel.from, to: rel.to, ...(rel.type ? { type: rel.type } : {}), count: 1 });
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
const relations = [...counts.values()].sort((a, b) => b.count - a.count || a.from.localeCompare(b.from) || a.to.localeCompare(b.to));
|
|
123
|
+
const sliced = typeof limit === "number" ? relations.slice(0, limit) : relations;
|
|
124
|
+
return {
|
|
125
|
+
schemaVersion: 1,
|
|
126
|
+
shape: "graph-relations",
|
|
127
|
+
stashPath,
|
|
128
|
+
graphPath,
|
|
129
|
+
generatedAt: graph.generatedAt,
|
|
130
|
+
total: relations.length,
|
|
131
|
+
relations: sliced,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
export function akmGraphExport(options) {
|
|
135
|
+
if (!options.out?.trim()) {
|
|
136
|
+
throw new UsageError("`akm graph export` requires --out <path>.", "MISSING_REQUIRED_ARGUMENT");
|
|
137
|
+
}
|
|
138
|
+
const format = options.format ?? "json";
|
|
139
|
+
if (format !== "json" && format !== "jsonl") {
|
|
140
|
+
throw new UsageError("--format must be one of: json, jsonl.", "INVALID_FLAG_VALUE");
|
|
141
|
+
}
|
|
142
|
+
const { graph, stashPath, graphPath } = loadGraph(options.source);
|
|
143
|
+
const outPath = path.resolve(options.out);
|
|
144
|
+
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
|
145
|
+
const payload = format === "json"
|
|
146
|
+
? `${JSON.stringify(graph, null, 2)}\n`
|
|
147
|
+
: `${[
|
|
148
|
+
...graph.files.flatMap((file) => file.entities.map((entity) => JSON.stringify({ kind: "entity", entity, file: file.path }))),
|
|
149
|
+
...graph.files.flatMap((file) => file.relations.map((relation) => JSON.stringify({ kind: "relation", ...relation, file: file.path }))),
|
|
150
|
+
].join("\n")}\n`;
|
|
151
|
+
fs.writeFileSync(outPath, payload, "utf8");
|
|
152
|
+
return {
|
|
153
|
+
schemaVersion: 1,
|
|
154
|
+
shape: "graph-export",
|
|
155
|
+
stashPath,
|
|
156
|
+
graphPath,
|
|
157
|
+
outPath,
|
|
158
|
+
format,
|
|
159
|
+
bytes: Buffer.byteLength(payload, "utf8"),
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
export async function akmGraphRelated(options) {
|
|
163
|
+
const ref = options.ref.trim();
|
|
164
|
+
if (!ref) {
|
|
165
|
+
throw new UsageError("`akm graph related` requires <ref>.", "MISSING_REQUIRED_ARGUMENT");
|
|
166
|
+
}
|
|
167
|
+
const limit = options.limit;
|
|
168
|
+
if (limit !== undefined && (!Number.isFinite(limit) || limit <= 0)) {
|
|
169
|
+
throw new UsageError("--limit must be a positive integer.", "INVALID_FLAG_VALUE");
|
|
170
|
+
}
|
|
171
|
+
const target = await resolveGraphTarget(ref, options.source);
|
|
172
|
+
const { graph, stashPath, graphPath } = loadGraph(target.stashPath);
|
|
173
|
+
let db;
|
|
174
|
+
const related = (() => {
|
|
175
|
+
try {
|
|
176
|
+
db = openExistingDatabase();
|
|
177
|
+
return listRelatedPathsForFile(stashPath, target.filePath, limit ?? 5, db);
|
|
178
|
+
}
|
|
179
|
+
finally {
|
|
180
|
+
if (db)
|
|
181
|
+
closeDatabase(db);
|
|
182
|
+
}
|
|
183
|
+
})();
|
|
184
|
+
return {
|
|
185
|
+
schemaVersion: 1,
|
|
186
|
+
shape: "graph-related",
|
|
187
|
+
stashPath,
|
|
188
|
+
graphPath,
|
|
189
|
+
generatedAt: graph.generatedAt,
|
|
190
|
+
ref: target.ref,
|
|
191
|
+
path: target.filePath,
|
|
192
|
+
total: related.length,
|
|
193
|
+
related,
|
|
194
|
+
...(related.length === 0 ? { tip: "No related graph neighbors were found for this asset." } : {}),
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
async function resolveGraphTarget(ref, source) {
|
|
198
|
+
const parsedRef = parseAssetRef(ref);
|
|
199
|
+
const filePath = (await resolveAssetPath(parsedRef, {
|
|
200
|
+
mode: "index-first",
|
|
201
|
+
honorOrigin: true,
|
|
202
|
+
})) ?? (await lookup(parsedRef))?.filePath;
|
|
203
|
+
if (!filePath) {
|
|
204
|
+
throw new NotFoundError(`Asset not found for ref: ${ref}`);
|
|
205
|
+
}
|
|
206
|
+
const allSources = resolveSourceEntries(undefined, loadConfig());
|
|
207
|
+
const matchedSource = findSourceForPath(filePath, allSources);
|
|
208
|
+
const inferredStashPath = matchedSource?.path;
|
|
209
|
+
const stashPath = source ? resolveGraphStashPath(source) : inferredStashPath;
|
|
210
|
+
if (!stashPath) {
|
|
211
|
+
throw new NotFoundError(`Could not determine stash source for ref: ${ref}`, "SOURCE_NOT_FOUND");
|
|
212
|
+
}
|
|
213
|
+
if (!filePath.startsWith(path.resolve(stashPath) + path.sep) && path.resolve(filePath) !== path.resolve(stashPath)) {
|
|
214
|
+
throw new UsageError(`Resolved asset ${ref} is not inside source ${source ?? stashPath}.`, "INVALID_SOURCE_VALUE", "Pass --source for the asset's source, or omit it to infer from the resolved asset path.");
|
|
215
|
+
}
|
|
216
|
+
return {
|
|
217
|
+
ref,
|
|
218
|
+
parsedRef,
|
|
219
|
+
filePath,
|
|
220
|
+
stashPath,
|
|
221
|
+
};
|
|
222
|
+
}
|
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import { loadConfig } from "../core/config";
|
|
4
|
+
import { ConfigError, UsageError } from "../core/errors";
|
|
5
|
+
import { appendEvent, readEvents } from "../core/events";
|
|
6
|
+
import { getStateDbPathInDataDir } from "../core/paths";
|
|
7
|
+
import { openStateDatabase, queryTaskHistory } from "../core/state-db";
|
|
8
|
+
import { parseSinceToIso } from "../core/time";
|
|
9
|
+
import { readSemanticStatus } from "../indexer/semantic-status";
|
|
10
|
+
import { detectAgentCliProfiles, requireAgentProfile } from "../integrations/agent";
|
|
11
|
+
import { getExecutionLogCandidates } from "../integrations/session-logs";
|
|
12
|
+
const DEFAULT_SINCE_MS = 24 * 60 * 60 * 1000;
|
|
13
|
+
const IMPROVE_COMPLETED_EVENT = "improve_completed";
|
|
14
|
+
const HEALTH_PROBE_EVENT = "health_probe";
|
|
15
|
+
const ACTIVE_RUN_WARN_MS = 15 * 60 * 1000;
|
|
16
|
+
export function parseHealthSince(since) {
|
|
17
|
+
if (since === undefined || since.trim() === "") {
|
|
18
|
+
return new Date(Date.now() - DEFAULT_SINCE_MS).toISOString();
|
|
19
|
+
}
|
|
20
|
+
const trimmed = since.trim();
|
|
21
|
+
const durationMatch = trimmed.match(/^(\d+)([dhm])$/i);
|
|
22
|
+
if (durationMatch) {
|
|
23
|
+
const amount = Number.parseInt(durationMatch[1] ?? "0", 10);
|
|
24
|
+
const unit = (durationMatch[2] ?? "d").toLowerCase();
|
|
25
|
+
if (!Number.isFinite(amount) || amount < 0) {
|
|
26
|
+
throw new UsageError("--since must be a non-negative duration or timestamp.", "INVALID_FLAG_VALUE");
|
|
27
|
+
}
|
|
28
|
+
const multiplier = unit === "h" ? 60 * 60 * 1000 : unit === "m" ? 30 * 24 * 60 * 60 * 1000 : 24 * 60 * 60 * 1000;
|
|
29
|
+
return new Date(Date.now() - amount * multiplier).toISOString();
|
|
30
|
+
}
|
|
31
|
+
return parseSinceToIso(trimmed);
|
|
32
|
+
}
|
|
33
|
+
function roundRate(value) {
|
|
34
|
+
return Number(value.toFixed(4));
|
|
35
|
+
}
|
|
36
|
+
function parseTaskMetadata(row) {
|
|
37
|
+
try {
|
|
38
|
+
return JSON.parse(row.metadata_json);
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
return {};
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function createUnknownImproveMetrics() {
|
|
45
|
+
return {
|
|
46
|
+
invoked: 0,
|
|
47
|
+
completed: 0,
|
|
48
|
+
skipped: 0,
|
|
49
|
+
skipReasons: {},
|
|
50
|
+
plannedRefs: 0,
|
|
51
|
+
actions: {
|
|
52
|
+
reflect: 0,
|
|
53
|
+
distill: 0,
|
|
54
|
+
distillSkipped: 0,
|
|
55
|
+
memoryPrune: 0,
|
|
56
|
+
memoryInference: 0,
|
|
57
|
+
graphExtraction: 0,
|
|
58
|
+
error: 0,
|
|
59
|
+
},
|
|
60
|
+
crossStepErrorsInjected: 0,
|
|
61
|
+
feedbackRatioUsed: false,
|
|
62
|
+
coverageGapCount: 0,
|
|
63
|
+
executionLogCandidateCount: 0,
|
|
64
|
+
evalCasesWritten: 0,
|
|
65
|
+
deadUrlCount: 0,
|
|
66
|
+
memorySummary: { eligible: 0, derived: 0 },
|
|
67
|
+
memoryCleanup: {
|
|
68
|
+
pruneCandidates: 0,
|
|
69
|
+
contradictionCandidates: 0,
|
|
70
|
+
beliefStateTransitions: 0,
|
|
71
|
+
consolidationCandidates: 0,
|
|
72
|
+
archived: 0,
|
|
73
|
+
warnings: 0,
|
|
74
|
+
},
|
|
75
|
+
consolidation: { ran: false, processed: 0, durationMs: 0 },
|
|
76
|
+
memoryInference: { ran: false, writes: 0, durationMs: 0 },
|
|
77
|
+
graphExtraction: { ran: false, extractedFiles: 0, durationMs: 0 },
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
function toFiniteNumber(value) {
|
|
81
|
+
if (typeof value === "number" && Number.isFinite(value))
|
|
82
|
+
return value;
|
|
83
|
+
if (typeof value === "string" && value.trim()) {
|
|
84
|
+
const parsed = Number(value);
|
|
85
|
+
if (Number.isFinite(parsed))
|
|
86
|
+
return parsed;
|
|
87
|
+
}
|
|
88
|
+
return 0;
|
|
89
|
+
}
|
|
90
|
+
function summarizeImproveCompleted(events) {
|
|
91
|
+
const metrics = createUnknownImproveMetrics();
|
|
92
|
+
metrics.completed = events.length;
|
|
93
|
+
for (const event of events) {
|
|
94
|
+
const meta = event.metadata ?? {};
|
|
95
|
+
metrics.plannedRefs += toFiniteNumber(meta.plannedRefs);
|
|
96
|
+
metrics.actions.reflect += toFiniteNumber(meta.reflectActions);
|
|
97
|
+
metrics.actions.distill += toFiniteNumber(meta.distillActions);
|
|
98
|
+
metrics.actions.distillSkipped += toFiniteNumber(meta.distillSkippedActions);
|
|
99
|
+
metrics.actions.memoryPrune += toFiniteNumber(meta.memoryPruneActions);
|
|
100
|
+
metrics.actions.memoryInference += toFiniteNumber(meta.memoryInferenceActions);
|
|
101
|
+
metrics.actions.graphExtraction += toFiniteNumber(meta.graphExtractionActions);
|
|
102
|
+
metrics.actions.error += toFiniteNumber(meta.errorActions);
|
|
103
|
+
metrics.crossStepErrorsInjected += toFiniteNumber(meta.crossStepErrorsInjected);
|
|
104
|
+
metrics.coverageGapCount += toFiniteNumber(meta.coverageGapCount);
|
|
105
|
+
metrics.executionLogCandidateCount += toFiniteNumber(meta.executionLogCandidateCount);
|
|
106
|
+
metrics.evalCasesWritten += toFiniteNumber(meta.evalCasesWritten);
|
|
107
|
+
metrics.deadUrlCount += toFiniteNumber(meta.deadUrlCount);
|
|
108
|
+
metrics.memorySummary.eligible += toFiniteNumber(meta.memoryEligible);
|
|
109
|
+
metrics.memorySummary.derived += toFiniteNumber(meta.memoryDerived);
|
|
110
|
+
metrics.memoryCleanup.pruneCandidates += toFiniteNumber(meta.memoryCleanupPruneCandidates);
|
|
111
|
+
metrics.memoryCleanup.contradictionCandidates += toFiniteNumber(meta.memoryCleanupContradictionCandidates);
|
|
112
|
+
metrics.memoryCleanup.beliefStateTransitions += toFiniteNumber(meta.memoryCleanupBeliefStateTransitions);
|
|
113
|
+
metrics.memoryCleanup.consolidationCandidates += toFiniteNumber(meta.memoryCleanupConsolidationCandidates);
|
|
114
|
+
metrics.memoryCleanup.archived += toFiniteNumber(meta.memoryCleanupArchived);
|
|
115
|
+
metrics.memoryCleanup.warnings += toFiniteNumber(meta.memoryCleanupWarnings);
|
|
116
|
+
metrics.consolidation.processed += toFiniteNumber(meta.consolidationProcessed);
|
|
117
|
+
metrics.consolidation.durationMs += toFiniteNumber(meta.consolidationDurationMs);
|
|
118
|
+
metrics.memoryInference.writes += toFiniteNumber(meta.memoryInferenceWrites);
|
|
119
|
+
metrics.memoryInference.durationMs += toFiniteNumber(meta.memoryInferenceDurationMs);
|
|
120
|
+
metrics.graphExtraction.extractedFiles += toFiniteNumber(meta.graphExtractionExtractedFiles);
|
|
121
|
+
metrics.graphExtraction.durationMs += toFiniteNumber(meta.graphExtractionDurationMs);
|
|
122
|
+
if (meta.feedbackRatioUsed === true)
|
|
123
|
+
metrics.feedbackRatioUsed = true;
|
|
124
|
+
}
|
|
125
|
+
metrics.consolidation.ran = metrics.consolidation.processed > 0 || metrics.consolidation.durationMs > 0;
|
|
126
|
+
metrics.memoryInference.ran = metrics.memoryInference.writes > 0 || metrics.memoryInference.durationMs > 0;
|
|
127
|
+
metrics.graphExtraction.ran = metrics.graphExtraction.extractedFiles > 0 || metrics.graphExtraction.durationMs > 0;
|
|
128
|
+
return metrics;
|
|
129
|
+
}
|
|
130
|
+
function buildImproveSkipSummary(events) {
|
|
131
|
+
const skipReasons = {};
|
|
132
|
+
for (const event of events) {
|
|
133
|
+
const reason = typeof event.metadata?.reason === "string" && event.metadata.reason.trim() ? event.metadata.reason : "unknown";
|
|
134
|
+
skipReasons[reason] = (skipReasons[reason] ?? 0) + 1;
|
|
135
|
+
}
|
|
136
|
+
return { skipped: events.length, skipReasons };
|
|
137
|
+
}
|
|
138
|
+
function probeStateDbRoundTrip(stateDbPath) {
|
|
139
|
+
const before = readEvents({}, { dbPath: stateDbPath }).nextOffset;
|
|
140
|
+
const started = Date.now();
|
|
141
|
+
appendEvent({ eventType: HEALTH_PROBE_EVENT, ref: "health:probe", metadata: { source: "akm health" } }, { dbPath: stateDbPath });
|
|
142
|
+
const after = readEvents({ sinceOffset: before, type: HEALTH_PROBE_EVENT, ref: "health:probe" }, { dbPath: stateDbPath });
|
|
143
|
+
const durationMs = Date.now() - started;
|
|
144
|
+
if (after.events.length === 0 || after.nextOffset <= before) {
|
|
145
|
+
return { ok: false, durationMs, error: "probe event was not readable after append" };
|
|
146
|
+
}
|
|
147
|
+
return { ok: true, durationMs };
|
|
148
|
+
}
|
|
149
|
+
function runAgentProbe() {
|
|
150
|
+
const config = loadConfig();
|
|
151
|
+
if (!config.agent) {
|
|
152
|
+
return {
|
|
153
|
+
name: "agent-profile",
|
|
154
|
+
kind: "deterministic",
|
|
155
|
+
status: "unknown",
|
|
156
|
+
confidence: "high",
|
|
157
|
+
message: "No agent config present.",
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
let profile;
|
|
161
|
+
try {
|
|
162
|
+
profile = requireAgentProfile(config.agent);
|
|
163
|
+
}
|
|
164
|
+
catch (error) {
|
|
165
|
+
return {
|
|
166
|
+
name: "agent-profile",
|
|
167
|
+
kind: "deterministic",
|
|
168
|
+
status: "warn",
|
|
169
|
+
confidence: "high",
|
|
170
|
+
message: error instanceof Error ? error.message : String(error),
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
if (profile.sdkMode === true) {
|
|
174
|
+
return {
|
|
175
|
+
name: "agent-profile",
|
|
176
|
+
kind: "deterministic",
|
|
177
|
+
status: profile.model ? "pass" : "warn",
|
|
178
|
+
confidence: "high",
|
|
179
|
+
message: profile.model
|
|
180
|
+
? `SDK mode profile "${profile.name}" is configured.`
|
|
181
|
+
: `SDK mode profile "${profile.name}" has no explicit model.`,
|
|
182
|
+
evidence: { profile: profile.name, sdkMode: true, model: profile.model ?? null },
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
const detections = detectAgentCliProfiles(config.agent);
|
|
186
|
+
const detection = detections.find((entry) => entry.name === profile.name);
|
|
187
|
+
if (!detection?.available) {
|
|
188
|
+
return {
|
|
189
|
+
name: "agent-profile",
|
|
190
|
+
kind: "deterministic",
|
|
191
|
+
status: "fail",
|
|
192
|
+
confidence: "high",
|
|
193
|
+
message: `Default agent profile "${profile.name}" is not available on PATH.`,
|
|
194
|
+
evidence: { profile: profile.name, bin: profile.bin },
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
const version = spawnSync(profile.bin, ["--version"], { encoding: "utf8", timeout: 5_000 });
|
|
198
|
+
if ((version.status ?? 1) !== 0) {
|
|
199
|
+
return {
|
|
200
|
+
name: "agent-profile",
|
|
201
|
+
kind: "deterministic",
|
|
202
|
+
status: "warn",
|
|
203
|
+
confidence: "medium",
|
|
204
|
+
message: `Agent binary "${profile.bin}" was found but \`--version\` failed.`,
|
|
205
|
+
evidence: {
|
|
206
|
+
profile: profile.name,
|
|
207
|
+
bin: profile.bin,
|
|
208
|
+
exitCode: version.status ?? null,
|
|
209
|
+
stderr: (version.stderr ?? "").trim(),
|
|
210
|
+
},
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
name: "agent-profile",
|
|
215
|
+
kind: "deterministic",
|
|
216
|
+
status: "pass",
|
|
217
|
+
confidence: "high",
|
|
218
|
+
message: `Agent profile "${profile.name}" is available.`,
|
|
219
|
+
evidence: { profile: profile.name, bin: profile.bin, version: (version.stdout ?? "").trim() },
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
export function akmHealth(options = {}) {
|
|
223
|
+
const since = parseHealthSince(options.since);
|
|
224
|
+
const stateDbPath = getStateDbPathInDataDir();
|
|
225
|
+
const hardChecks = [];
|
|
226
|
+
const advisories = [];
|
|
227
|
+
const getExecutionLogCandidatesFn = options.getExecutionLogCandidatesFn ?? getExecutionLogCandidates;
|
|
228
|
+
let db;
|
|
229
|
+
try {
|
|
230
|
+
db = openStateDatabase(stateDbPath);
|
|
231
|
+
}
|
|
232
|
+
catch (error) {
|
|
233
|
+
throw new ConfigError(`Unable to open state.db: ${error instanceof Error ? error.message : String(error)}`, "INVALID_CONFIG_FILE");
|
|
234
|
+
}
|
|
235
|
+
try {
|
|
236
|
+
const tables = db
|
|
237
|
+
.prepare("SELECT name FROM sqlite_master WHERE type = 'table' AND name IN ('events', 'task_history', 'proposals', 'schema_migrations') ORDER BY name")
|
|
238
|
+
.all();
|
|
239
|
+
const tableNames = tables.map((row) => row.name).sort();
|
|
240
|
+
const requiredTables = ["events", "proposals", "schema_migrations", "task_history"];
|
|
241
|
+
const missingTables = requiredTables.filter((name) => !tableNames.includes(name));
|
|
242
|
+
hardChecks.push({
|
|
243
|
+
name: "state-db-schema",
|
|
244
|
+
kind: "deterministic",
|
|
245
|
+
status: missingTables.length === 0 ? "pass" : "fail",
|
|
246
|
+
confidence: "high",
|
|
247
|
+
message: missingTables.length === 0
|
|
248
|
+
? "state.db opened and required tables are present."
|
|
249
|
+
: `state.db is missing required tables: ${missingTables.join(", ")}`,
|
|
250
|
+
evidence: { path: stateDbPath, tables: tableNames },
|
|
251
|
+
});
|
|
252
|
+
const probe = probeStateDbRoundTrip(stateDbPath);
|
|
253
|
+
hardChecks.push({
|
|
254
|
+
name: "state-db-round-trip",
|
|
255
|
+
kind: "deterministic",
|
|
256
|
+
status: probe.ok ? "pass" : "fail",
|
|
257
|
+
confidence: "high",
|
|
258
|
+
message: probe.ok ? "state.db append/read round-trip succeeded." : `state.db round-trip failed: ${probe.error}`,
|
|
259
|
+
evidence: { path: stateDbPath, durationMs: probe.durationMs },
|
|
260
|
+
});
|
|
261
|
+
const taskRows = queryTaskHistory(db, { since });
|
|
262
|
+
const taskRowsWithLogs = taskRows.filter((row) => row.log_path !== null);
|
|
263
|
+
const existingLogRows = taskRowsWithLogs.filter((row) => row.log_path && fs.existsSync(row.log_path));
|
|
264
|
+
const failedTaskRows = taskRows.filter((row) => row.status === "failed");
|
|
265
|
+
const activeRows = taskRows.filter((row) => row.status === "active");
|
|
266
|
+
const stuckActiveRuns = activeRows.filter((row) => Date.now() - new Date(row.started_at).getTime() > ACTIVE_RUN_WARN_MS).length;
|
|
267
|
+
const promptRows = taskRows.filter((row) => row.target_kind === "prompt");
|
|
268
|
+
const promptFailures = promptRows.filter((row) => {
|
|
269
|
+
const detail = parseTaskMetadata(row).detail;
|
|
270
|
+
return typeof detail?.reason === "string" && detail.reason.length > 0;
|
|
271
|
+
});
|
|
272
|
+
const logBackingRate = taskRowsWithLogs.length === 0 ? 1 : existingLogRows.length / taskRowsWithLogs.length;
|
|
273
|
+
const taskFailRate = taskRows.length === 0 ? 0 : failedTaskRows.length / taskRows.length;
|
|
274
|
+
const agentFailureRate = promptRows.length === 0 ? 0 : promptFailures.length / promptRows.length;
|
|
275
|
+
hardChecks.push({
|
|
276
|
+
name: "task-history-read",
|
|
277
|
+
kind: "deterministic",
|
|
278
|
+
status: "pass",
|
|
279
|
+
confidence: "high",
|
|
280
|
+
message: `Read ${taskRows.length} task-history row(s) since ${since}.`,
|
|
281
|
+
evidence: { rows: taskRows.length, since },
|
|
282
|
+
});
|
|
283
|
+
hardChecks.push({
|
|
284
|
+
name: "task-log-backing",
|
|
285
|
+
kind: "deterministic",
|
|
286
|
+
status: logBackingRate === 1 ? "pass" : "fail",
|
|
287
|
+
confidence: "high",
|
|
288
|
+
message: logBackingRate === 1
|
|
289
|
+
? "Every task_history log_path resolved on disk."
|
|
290
|
+
: `${taskRowsWithLogs.length - existingLogRows.length} task log(s) referenced in task_history are missing.`,
|
|
291
|
+
evidence: { totalWithLogs: taskRowsWithLogs.length, existingLogs: existingLogRows.length },
|
|
292
|
+
});
|
|
293
|
+
hardChecks.push({
|
|
294
|
+
name: "active-runs",
|
|
295
|
+
kind: "deterministic",
|
|
296
|
+
status: stuckActiveRuns === 0 ? "pass" : "warn",
|
|
297
|
+
confidence: "high",
|
|
298
|
+
message: stuckActiveRuns === 0
|
|
299
|
+
? "No active task runs exceeded the stale threshold."
|
|
300
|
+
: `${stuckActiveRuns} active task run(s) are older than ${Math.round(ACTIVE_RUN_WARN_MS / 60000)} minutes.`,
|
|
301
|
+
evidence: { stuckActiveRuns },
|
|
302
|
+
});
|
|
303
|
+
hardChecks.push(runAgentProbe());
|
|
304
|
+
const semanticStatus = readSemanticStatus();
|
|
305
|
+
advisories.push({
|
|
306
|
+
name: "semantic-search-runtime",
|
|
307
|
+
kind: "deterministic",
|
|
308
|
+
status: !semanticStatus ||
|
|
309
|
+
semanticStatus.status === "pending" ||
|
|
310
|
+
semanticStatus.status === "ready-js" ||
|
|
311
|
+
semanticStatus.status === "ready-vec"
|
|
312
|
+
? "pass"
|
|
313
|
+
: "warn",
|
|
314
|
+
confidence: "medium",
|
|
315
|
+
message: semanticStatus
|
|
316
|
+
? `Semantic search status: ${semanticStatus.status}`
|
|
317
|
+
: "No semantic-search runtime status recorded yet.",
|
|
318
|
+
evidence: semanticStatus ? { ...semanticStatus } : undefined,
|
|
319
|
+
});
|
|
320
|
+
const improveInvoked = readEvents({ since, type: "improve_invoked" }, { dbPath: stateDbPath }).events.length;
|
|
321
|
+
const improveCompletedEvents = readEvents({ since, type: IMPROVE_COMPLETED_EVENT }, { dbPath: stateDbPath }).events;
|
|
322
|
+
const improveSkippedEvents = readEvents({ since, type: "improve_skipped" }, { dbPath: stateDbPath }).events;
|
|
323
|
+
const improveSummary = summarizeImproveCompleted(improveCompletedEvents);
|
|
324
|
+
improveSummary.invoked = improveInvoked;
|
|
325
|
+
const skipSummary = buildImproveSkipSummary(improveSkippedEvents);
|
|
326
|
+
improveSummary.skipped = skipSummary.skipped;
|
|
327
|
+
improveSummary.skipReasons = skipSummary.skipReasons;
|
|
328
|
+
let sessionLogEntries = [];
|
|
329
|
+
try {
|
|
330
|
+
const sinceDays = Math.max(0, Math.ceil((Date.now() - new Date(since).getTime()) / (24 * 60 * 60 * 1000)));
|
|
331
|
+
sessionLogEntries = getExecutionLogCandidatesFn(sinceDays).map((entry) => ({
|
|
332
|
+
topic: entry.topic,
|
|
333
|
+
frequency: entry.frequency,
|
|
334
|
+
source: entry.source,
|
|
335
|
+
isFailurePattern: entry.isFailurePattern,
|
|
336
|
+
}));
|
|
337
|
+
}
|
|
338
|
+
catch {
|
|
339
|
+
sessionLogEntries = [];
|
|
340
|
+
}
|
|
341
|
+
advisories.push({
|
|
342
|
+
name: "session-log-failures",
|
|
343
|
+
kind: "heuristic",
|
|
344
|
+
status: sessionLogEntries.length === 0 ? "pass" : "warn",
|
|
345
|
+
confidence: sessionLogEntries.length === 0 ? "low" : "medium",
|
|
346
|
+
message: sessionLogEntries.length === 0
|
|
347
|
+
? "No repeated external session-log failure patterns were detected."
|
|
348
|
+
: `${sessionLogEntries.length} repeated external session-log failure pattern(s) detected.`,
|
|
349
|
+
evidence: { candidates: sessionLogEntries.slice(0, 5) },
|
|
350
|
+
});
|
|
351
|
+
const metrics = {
|
|
352
|
+
taskFailRate: roundRate(taskFailRate),
|
|
353
|
+
agentFailureRate: roundRate(agentFailureRate),
|
|
354
|
+
stuckActiveRuns,
|
|
355
|
+
logBackingRate: roundRate(logBackingRate),
|
|
356
|
+
probeRoundTripMs: probe.durationMs,
|
|
357
|
+
};
|
|
358
|
+
const hardFailure = hardChecks.some((check) => check.status === "fail");
|
|
359
|
+
const deterministicWarnings = [...hardChecks, ...advisories].some((check) => check.status === "warn" && check.kind === "deterministic");
|
|
360
|
+
const status = hardFailure ? "fail" : deterministicWarnings ? "warn" : "pass";
|
|
361
|
+
return {
|
|
362
|
+
schemaVersion: 1,
|
|
363
|
+
ok: !hardFailure,
|
|
364
|
+
status,
|
|
365
|
+
since,
|
|
366
|
+
hardChecks,
|
|
367
|
+
advisories,
|
|
368
|
+
metrics,
|
|
369
|
+
improve: improveSummary,
|
|
370
|
+
sessionLogAdvisories: sessionLogEntries,
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
finally {
|
|
374
|
+
db.close();
|
|
375
|
+
}
|
|
376
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
Usage:
|
|
2
|
+
akm improve
|
|
3
|
+
akm improve <type>
|
|
4
|
+
akm improve <ref>
|
|
5
|
+
|
|
6
|
+
Description:
|
|
7
|
+
Analyze existing AKM assets and generate improvement proposals.
|
|
8
|
+
|
|
9
|
+
Modes:
|
|
10
|
+
akm improve
|
|
11
|
+
Improve all eligible assets in the current scope.
|
|
12
|
+
|
|
13
|
+
akm improve <type>
|
|
14
|
+
Improve all assets of a given type.
|
|
15
|
+
Example: akm improve memory
|
|
16
|
+
|
|
17
|
+
akm improve <ref>
|
|
18
|
+
Improve one specific asset.
|
|
19
|
+
Example: akm improve workflow:release-checklist
|
|
20
|
+
|
|
21
|
+
What it does:
|
|
22
|
+
- reviews feedback and recent history
|
|
23
|
+
- proposes edits to existing assets
|
|
24
|
+
- distills lessons where useful
|
|
25
|
+
- promotes durable skill lessons into skill reference-doc proposals when justified
|
|
26
|
+
- cleans and consolidates memories
|
|
27
|
+
- writes results to the proposal queue
|
|
28
|
+
|
|
29
|
+
Options:
|
|
30
|
+
--task <text> Add extra guidance for this improvement pass
|
|
31
|
+
--dry-run Show planned actions without generating proposals
|
|
32
|
+
--target <source> Override the write target for accepted proposals
|
|
33
|
+
--auto-accept safe Automatically accept low-risk proposals
|
|
34
|
+
--ignore-cooldown Disable reflect/distill/consolidate cooldown checks for this run
|
|
35
|
+
--reflect-cooldown-days <n>
|
|
36
|
+
Override reflect cooldown with a non-negative integer
|
|
37
|
+
--distill-cooldown-days <n>
|
|
38
|
+
Override distill cooldown with a non-negative integer
|
|
39
|
+
--consolidate-cooldown-days <n>
|
|
40
|
+
Override consolidate cooldown with a non-negative integer
|
|
41
|
+
--consolidate-recovery <mode>
|
|
42
|
+
Recovery mode for stale consolidate journals: abort (default) or clean
|
|
43
|
+
--require-feedback-signal
|
|
44
|
+
Only process refs with recent feedback signal events
|
|
45
|
+
--min-retrieval-count <n>
|
|
46
|
+
Retrieval fallback threshold when no recent feedback exists (default: 5)
|
|
47
|
+
|
|
48
|
+
Examples:
|
|
49
|
+
akm improve
|
|
50
|
+
akm improve memory
|
|
51
|
+
akm improve skill
|
|
52
|
+
akm improve skill:code-review
|
|
53
|
+
akm improve workflow:incident-response --task "reduce duplication"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Usage:
|
|
2
|
+
akm proposals
|
|
3
|
+
|
|
4
|
+
Description:
|
|
5
|
+
List proposal queue entries.
|
|
6
|
+
|
|
7
|
+
Options:
|
|
8
|
+
--status <status> Filter by pending, accepted, or rejected
|
|
9
|
+
--type <type> Filter by asset type
|
|
10
|
+
--ref <ref> Filter by exact asset ref
|
|
11
|
+
|
|
12
|
+
Examples:
|
|
13
|
+
akm proposals
|
|
14
|
+
akm proposals --status pending
|
|
15
|
+
akm proposals --type skill
|