@topogram/cli 0.3.85 → 0.3.87
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/archive/unarchive.js +56 -5
- package/src/catalog/source.js +52 -1
- package/src/cli/commands/emit/snapshot-input.js +111 -0
- package/src/cli/commands/emit.js +11 -1
- package/src/cli/commands/extractor.js +150 -13
- package/src/cli/commands/import/plan.js +101 -4
- package/src/cli/commands/query/workspace.js +2 -67
- package/src/extraction-context.js +79 -0
- package/src/extractor/packages.js +9 -2
- package/src/github-client.js +56 -2
- package/src/import/core/shared/files.js +21 -0
- package/src/remote-payload-limits.js +40 -0
- package/src/template-trust/constants.js +1 -1
- package/src/template-trust/policy.js +3 -4
- package/src/template-trust/status.js +2 -4
- package/src/topogram-config.js +47 -0
- package/src/workflows/reconcile/adoption-plan/outputs.js +31 -4
- package/src/workflows/shared.js +21 -0
package/package.json
CHANGED
package/src/archive/unarchive.js
CHANGED
|
@@ -12,15 +12,15 @@
|
|
|
12
12
|
// - pitch: draft
|
|
13
13
|
// - document: draft
|
|
14
14
|
|
|
15
|
-
import { existsSync,
|
|
15
|
+
import { existsSync, writeFileSync, mkdirSync } from "node:fs";
|
|
16
16
|
import path from "node:path";
|
|
17
17
|
import {
|
|
18
|
-
archiveDir,
|
|
19
18
|
listArchiveFiles,
|
|
20
19
|
parseArchiveFile,
|
|
21
20
|
rewriteArchiveFile
|
|
22
21
|
} from "./jsonl.js";
|
|
23
|
-
import {
|
|
22
|
+
import { isArchivableKind } from "./schema.js";
|
|
23
|
+
import { sdlcRootForSdlc } from "../sdlc/paths.js";
|
|
24
24
|
|
|
25
25
|
const REOPEN_STATUSES = {
|
|
26
26
|
bug: "open",
|
|
@@ -30,6 +30,50 @@ const REOPEN_STATUSES = {
|
|
|
30
30
|
document: "draft"
|
|
31
31
|
};
|
|
32
32
|
|
|
33
|
+
const SAFE_ARCHIVE_ID = /^[A-Za-z][A-Za-z0-9_]*$/;
|
|
34
|
+
|
|
35
|
+
function recordDirForKind(kind) {
|
|
36
|
+
if (kind === "acceptance_criterion") return "acceptance_criteria";
|
|
37
|
+
return `${kind}s`;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function isContainedPath(root, candidate) {
|
|
41
|
+
const relative = path.relative(root, candidate);
|
|
42
|
+
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function validateArchivedEntry(entry) {
|
|
46
|
+
if (!entry || typeof entry !== "object") {
|
|
47
|
+
return "Archived entry is not an object";
|
|
48
|
+
}
|
|
49
|
+
if (!isArchivableKind(entry.kind)) {
|
|
50
|
+
return `Archived entry kind '${entry.kind}' is not supported for unarchive`;
|
|
51
|
+
}
|
|
52
|
+
if (typeof entry.id !== "string" || !SAFE_ARCHIVE_ID.test(entry.id)) {
|
|
53
|
+
return `Archived entry id '${entry.id}' is not a safe Topogram identifier`;
|
|
54
|
+
}
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function resolveTargetFile(workspaceRoot, entry, options = {}) {
|
|
59
|
+
const recordRoot = path.resolve(sdlcRootForSdlc(workspaceRoot), recordDirForKind(entry.kind));
|
|
60
|
+
const targetDir = path.resolve(options.targetDir || recordRoot);
|
|
61
|
+
if (!isContainedPath(recordRoot, targetDir)) {
|
|
62
|
+
return {
|
|
63
|
+
ok: false,
|
|
64
|
+
error: `Target directory '${targetDir}' escapes SDLC ${entry.kind} record root '${recordRoot}'`
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
const targetFile = path.resolve(targetDir, `${entry.id}.tg`);
|
|
68
|
+
if (!isContainedPath(recordRoot, targetFile)) {
|
|
69
|
+
return {
|
|
70
|
+
ok: false,
|
|
71
|
+
error: `Archived entry id '${entry.id}' resolves outside SDLC ${entry.kind} record root '${recordRoot}'`
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
return { ok: true, recordRoot, targetDir, targetFile };
|
|
75
|
+
}
|
|
76
|
+
|
|
33
77
|
function findEntry(workspaceRoot, id) {
|
|
34
78
|
for (const file of listArchiveFiles(workspaceRoot)) {
|
|
35
79
|
const entries = parseArchiveFile(file);
|
|
@@ -109,10 +153,17 @@ export function unarchive(workspaceRoot, id, options = {}) {
|
|
|
109
153
|
}
|
|
110
154
|
|
|
111
155
|
const { file, entries, entry } = found;
|
|
156
|
+
const entryError = validateArchivedEntry(entry);
|
|
157
|
+
if (entryError) {
|
|
158
|
+
return { ok: false, error: entryError };
|
|
159
|
+
}
|
|
112
160
|
const reopenStatus = options.status || REOPEN_STATUSES[entry.kind] || "draft";
|
|
113
|
-
const
|
|
161
|
+
const target = resolveTargetFile(workspaceRoot, entry, options);
|
|
162
|
+
if (!target.ok) {
|
|
163
|
+
return target;
|
|
164
|
+
}
|
|
165
|
+
const { targetDir, targetFile } = target;
|
|
114
166
|
if (!existsSync(targetDir)) mkdirSync(targetDir, { recursive: true });
|
|
115
|
-
const targetFile = path.join(targetDir, `${entry.id}.tg`);
|
|
116
167
|
|
|
117
168
|
if (existsSync(targetFile)) {
|
|
118
169
|
return { ok: false, error: `Target file '${targetFile}' already exists; refuse to overwrite` };
|
package/src/catalog/source.js
CHANGED
|
@@ -5,22 +5,66 @@ import fs from "node:fs";
|
|
|
5
5
|
import path from "node:path";
|
|
6
6
|
|
|
7
7
|
import { readGithubCatalogSourceText } from "../github-client.js";
|
|
8
|
+
import { remotePayloadMaxBytes } from "../remote-payload-limits.js";
|
|
8
9
|
import { defaultCatalogSource } from "../topogram-config.js";
|
|
9
10
|
import { GITHUB_TOKEN_HOSTS } from "./constants.js";
|
|
10
11
|
import { validateCatalog } from "./validation.js";
|
|
11
12
|
|
|
12
13
|
const FETCH_URL_SCRIPT = `
|
|
13
14
|
const source = process.argv[1];
|
|
15
|
+
const maxBytes = Number.parseInt(process.env.TOPOGRAM_FETCH_MAX_BYTES || "", 10) || 5242880;
|
|
14
16
|
const token = process.env.TOPOGRAM_FETCH_TOKEN || "";
|
|
15
17
|
const tokenHosts = new Set(["github.com", "api.github.com", "raw.githubusercontent.com"]);
|
|
16
18
|
function tokenAllowed(url) {
|
|
17
19
|
const hostname = new URL(url).hostname.toLowerCase();
|
|
18
20
|
return tokenHosts.has(hostname) || hostname.endsWith(".github.com");
|
|
19
21
|
}
|
|
22
|
+
async function readResponseText(response, url) {
|
|
23
|
+
const declaredLength = Number.parseInt(response.headers.get("content-length") || "", 10);
|
|
24
|
+
if (Number.isFinite(declaredLength) && declaredLength > maxBytes) {
|
|
25
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
26
|
+
}
|
|
27
|
+
if (!response.body) {
|
|
28
|
+
const text = await response.text();
|
|
29
|
+
if (Buffer.byteLength(text, "utf8") > maxBytes) {
|
|
30
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
31
|
+
}
|
|
32
|
+
return text;
|
|
33
|
+
}
|
|
34
|
+
const reader = response.body.getReader();
|
|
35
|
+
const decoder = new TextDecoder();
|
|
36
|
+
const chunks = [];
|
|
37
|
+
let total = 0;
|
|
38
|
+
while (true) {
|
|
39
|
+
const { value, done } = await reader.read();
|
|
40
|
+
if (done) {
|
|
41
|
+
break;
|
|
42
|
+
}
|
|
43
|
+
total += value.byteLength;
|
|
44
|
+
if (total > maxBytes) {
|
|
45
|
+
try {
|
|
46
|
+
await reader.cancel();
|
|
47
|
+
} catch {}
|
|
48
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
49
|
+
}
|
|
50
|
+
chunks.push(decoder.decode(value, { stream: true }));
|
|
51
|
+
}
|
|
52
|
+
chunks.push(decoder.decode());
|
|
53
|
+
return chunks.join("");
|
|
54
|
+
}
|
|
20
55
|
async function readUrl(url, redirects = 0) {
|
|
21
56
|
if (redirects > 5) {
|
|
22
57
|
throw new Error("Too many redirects.");
|
|
23
58
|
}
|
|
59
|
+
if (process.env.TOPOGRAM_CATALOG_URL_FIXTURE_PATH) {
|
|
60
|
+
const fs = await import("node:fs");
|
|
61
|
+
const fixturePath = process.env.TOPOGRAM_CATALOG_URL_FIXTURE_PATH;
|
|
62
|
+
const fixtureSize = fs.statSync(fixturePath).size;
|
|
63
|
+
if (fixtureSize > maxBytes) {
|
|
64
|
+
throw new Error("Response from " + url + " exceeded " + maxBytes + " byte limit.");
|
|
65
|
+
}
|
|
66
|
+
return fs.readFileSync(fixturePath, "utf8");
|
|
67
|
+
}
|
|
24
68
|
const headers = {};
|
|
25
69
|
if (token && tokenAllowed(url)) {
|
|
26
70
|
headers.authorization = "Bearer " + token;
|
|
@@ -30,7 +74,7 @@ async function readUrl(url, redirects = 0) {
|
|
|
30
74
|
const next = new URL(response.headers.get("location"), url).toString();
|
|
31
75
|
return readUrl(next, redirects + 1);
|
|
32
76
|
}
|
|
33
|
-
const text = await response
|
|
77
|
+
const text = await readResponseText(response, url);
|
|
34
78
|
if (!response.ok) {
|
|
35
79
|
const preview = text.trim().slice(0, 400);
|
|
36
80
|
throw new Error(String(response.status) + " " + response.statusText + (preview ? "\\n" + preview : ""));
|
|
@@ -118,14 +162,21 @@ function readCatalogText(source) {
|
|
|
118
162
|
*/
|
|
119
163
|
function readUrlText(source) {
|
|
120
164
|
const token = process.env.GITHUB_TOKEN || process.env.GH_TOKEN || "";
|
|
165
|
+
const maxBytes = remotePayloadMaxBytes(
|
|
166
|
+
["TOPOGRAM_CATALOG_FETCH_MAX_BYTES", "TOPOGRAM_REMOTE_FETCH_MAX_BYTES"],
|
|
167
|
+
undefined,
|
|
168
|
+
["catalogFetchMaxBytes", "remoteFetchMaxBytes"]
|
|
169
|
+
);
|
|
121
170
|
const tokenEnv = token && githubTokenAllowedForCatalogUrl(source)
|
|
122
171
|
? { TOPOGRAM_FETCH_TOKEN: token }
|
|
123
172
|
: {};
|
|
124
173
|
const result = childProcess.spawnSync(process.execPath, ["--input-type=module", "-e", FETCH_URL_SCRIPT, source], {
|
|
125
174
|
encoding: "utf8",
|
|
175
|
+
maxBuffer: maxBytes + 4096,
|
|
126
176
|
env: {
|
|
127
177
|
...process.env,
|
|
128
178
|
...tokenEnv,
|
|
179
|
+
TOPOGRAM_FETCH_MAX_BYTES: String(maxBytes),
|
|
129
180
|
PATH: process.env.PATH || ""
|
|
130
181
|
}
|
|
131
182
|
});
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
|
|
5
|
+
const FORBIDDEN_SNAPSHOT_KEYS = new Set(["__proto__", "constructor", "prototype"]);
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {unknown} value
|
|
9
|
+
* @returns {value is Record<string, unknown>}
|
|
10
|
+
*/
|
|
11
|
+
function isRecord(value) {
|
|
12
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* @param {unknown} value
|
|
17
|
+
* @returns {string}
|
|
18
|
+
*/
|
|
19
|
+
function errorMessage(value) {
|
|
20
|
+
return value instanceof Error ? value.message : String(value);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* @param {unknown} value
|
|
25
|
+
* @returns {string[]}
|
|
26
|
+
*/
|
|
27
|
+
function validateDbSchemaSnapshot(value) {
|
|
28
|
+
const errors = [];
|
|
29
|
+
if (!isRecord(value)) {
|
|
30
|
+
return ["snapshot must be a JSON object"];
|
|
31
|
+
}
|
|
32
|
+
if (value.type !== "db_schema_snapshot") {
|
|
33
|
+
errors.push("snapshot.type must be 'db_schema_snapshot'");
|
|
34
|
+
}
|
|
35
|
+
if (!Array.isArray(value.tables)) {
|
|
36
|
+
errors.push("snapshot.tables must be an array");
|
|
37
|
+
}
|
|
38
|
+
if (!Array.isArray(value.enums)) {
|
|
39
|
+
errors.push("snapshot.enums must be an array");
|
|
40
|
+
}
|
|
41
|
+
if (Array.isArray(value.tables)) {
|
|
42
|
+
for (const [index, table] of value.tables.entries()) {
|
|
43
|
+
if (!isRecord(table)) {
|
|
44
|
+
errors.push(`snapshot.tables[${index}] must be an object`);
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
if (typeof table.table !== "string" || table.table.length === 0) {
|
|
48
|
+
errors.push(`snapshot.tables[${index}].table must be a non-empty string`);
|
|
49
|
+
}
|
|
50
|
+
if (!Array.isArray(table.columns)) {
|
|
51
|
+
errors.push(`snapshot.tables[${index}].columns must be an array`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
if (Array.isArray(value.enums)) {
|
|
56
|
+
for (const [index, enumEntry] of value.enums.entries()) {
|
|
57
|
+
if (!isRecord(enumEntry)) {
|
|
58
|
+
errors.push(`snapshot.enums[${index}] must be an object`);
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
if (typeof enumEntry.id !== "string" || enumEntry.id.length === 0) {
|
|
62
|
+
errors.push(`snapshot.enums[${index}].id must be a non-empty string`);
|
|
63
|
+
}
|
|
64
|
+
if (!Array.isArray(enumEntry.values)) {
|
|
65
|
+
errors.push(`snapshot.enums[${index}].values must be an array`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return errors;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* @param {string} snapshotPath
|
|
74
|
+
* @returns {{ ok: true, snapshot: Record<string, unknown> } | { ok: false, message: string }}
|
|
75
|
+
*/
|
|
76
|
+
export function readFromSnapshot(snapshotPath) {
|
|
77
|
+
let raw;
|
|
78
|
+
try {
|
|
79
|
+
raw = fs.readFileSync(snapshotPath, "utf8");
|
|
80
|
+
} catch (error) {
|
|
81
|
+
return {
|
|
82
|
+
ok: false,
|
|
83
|
+
message: `Unable to read --from-snapshot '${snapshotPath}': ${errorMessage(error)}`
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
let parsed;
|
|
88
|
+
try {
|
|
89
|
+
parsed = JSON.parse(raw, (key, value) => {
|
|
90
|
+
if (FORBIDDEN_SNAPSHOT_KEYS.has(key)) {
|
|
91
|
+
throw new Error(`unsafe key '${key}' is not allowed in snapshot JSON`);
|
|
92
|
+
}
|
|
93
|
+
return value;
|
|
94
|
+
});
|
|
95
|
+
} catch (error) {
|
|
96
|
+
return {
|
|
97
|
+
ok: false,
|
|
98
|
+
message: `Invalid --from-snapshot JSON at '${snapshotPath}': ${errorMessage(error)}`
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const errors = validateDbSchemaSnapshot(parsed);
|
|
103
|
+
if (errors.length > 0) {
|
|
104
|
+
return {
|
|
105
|
+
ok: false,
|
|
106
|
+
message: `Invalid --from-snapshot DB schema snapshot at '${snapshotPath}': ${errors.join("; ")}`
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return { ok: true, snapshot: parsed };
|
|
111
|
+
}
|
package/src/cli/commands/emit.js
CHANGED
|
@@ -22,6 +22,7 @@ import {
|
|
|
22
22
|
generatedOutputSentinel,
|
|
23
23
|
topogramInputPathForGeneration
|
|
24
24
|
} from "../output-safety.js";
|
|
25
|
+
import { readFromSnapshot } from "./emit/snapshot-input.js";
|
|
25
26
|
|
|
26
27
|
const IMPLEMENTATION_PROVIDER_TARGETS = new Set([
|
|
27
28
|
"persistence-scaffold",
|
|
@@ -78,6 +79,15 @@ function targetRequiresImplementationProvider(target) {
|
|
|
78
79
|
*/
|
|
79
80
|
export async function runEmitCommand(options) {
|
|
80
81
|
const ast = parsePath(options.inputPath);
|
|
82
|
+
let fromSnapshot = null;
|
|
83
|
+
if (options.fromSnapshotPath) {
|
|
84
|
+
const parsedSnapshot = readFromSnapshot(options.fromSnapshotPath);
|
|
85
|
+
if (!parsedSnapshot.ok) {
|
|
86
|
+
console.error(parsedSnapshot.message);
|
|
87
|
+
return 1;
|
|
88
|
+
}
|
|
89
|
+
fromSnapshot = parsedSnapshot.snapshot;
|
|
90
|
+
}
|
|
81
91
|
const explicitProjectConfig = loadProjectConfig(options.projectRoot) || loadProjectConfig(options.inputPath);
|
|
82
92
|
const shouldLoadImplementation = targetRequiresImplementationProvider(options.target) &&
|
|
83
93
|
(!IMPLEMENTATION_OPTIONAL_TARGETS.has(options.target) || Boolean(explicitProjectConfig?.config?.implementation));
|
|
@@ -106,7 +116,7 @@ export async function runEmitCommand(options) {
|
|
|
106
116
|
target: options.target,
|
|
107
117
|
...(options.selectors || {}),
|
|
108
118
|
profileId: options.profileId,
|
|
109
|
-
fromSnapshot
|
|
119
|
+
fromSnapshot,
|
|
110
120
|
fromSnapshotPath: options.fromSnapshotPath,
|
|
111
121
|
fromTopogramPath: options.fromTopogramPath,
|
|
112
122
|
topogramInputPath: topogramInputPathForGeneration(options.inputPath),
|
|
@@ -44,6 +44,7 @@ export function printExtractorHelp() {
|
|
|
44
44
|
console.log(" - extractor packages execute only during `topogram extract` or `topogram extractor check`.");
|
|
45
45
|
console.log(" - extractor packages emit review-only candidates; core owns persistence, reconcile, and adoption.");
|
|
46
46
|
console.log(` - package-backed extractors are governed by ${EXTRACTOR_POLICY_FILE}; bundled topogram/* extractors are allowed.`);
|
|
47
|
+
console.log(" - safe loop: list/show -> install -> policy pin -> check -> extract -> plan/list -> adopt --dry-run -> adopt --write.");
|
|
47
48
|
console.log("");
|
|
48
49
|
console.log("Examples:");
|
|
49
50
|
console.log(" topogram extractor list");
|
|
@@ -121,17 +122,110 @@ function extractorPolicyPinCommand(packageName, version) {
|
|
|
121
122
|
}
|
|
122
123
|
|
|
123
124
|
/**
|
|
124
|
-
* @param {string|null|undefined}
|
|
125
|
+
* @param {string|null|undefined} extractorRef
|
|
125
126
|
* @param {string[]} tracks
|
|
126
127
|
* @param {string|null|undefined} exampleSource
|
|
127
128
|
* @returns {string|null}
|
|
128
129
|
*/
|
|
129
|
-
function extractorRunCommand(
|
|
130
|
-
if (!
|
|
130
|
+
function extractorRunCommand(extractorRef, tracks, exampleSource) {
|
|
131
|
+
if (!extractorRef) {
|
|
131
132
|
return null;
|
|
132
133
|
}
|
|
133
134
|
const trackList = tracks.length > 0 ? tracks.join(",") : "db,api,ui,cli";
|
|
134
|
-
return `topogram extract ${exampleSource || "./existing-app"} --out ./imported-topogram --from ${trackList} --extractor ${
|
|
135
|
+
return `topogram extract ${exampleSource || "./existing-app"} --out ./imported-topogram --from ${trackList} --extractor ${extractorRef}`;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* @param {Record<string, any>|null|undefined} extractor
|
|
140
|
+
* @returns {Record<string, any>}
|
|
141
|
+
*/
|
|
142
|
+
function buildExtractorReviewWorkflow(extractor = null) {
|
|
143
|
+
const packageName = extractor?.package || extractor?.packageName || null;
|
|
144
|
+
const extractorRef = packageName || extractor?.id || "<extractor>";
|
|
145
|
+
const tracks = Array.isArray(extractor?.tracks) ? extractor.tracks : [];
|
|
146
|
+
const version = extractor?.version || "1";
|
|
147
|
+
const bundledExtractor = extractor?.source === "bundled" && !packageName;
|
|
148
|
+
const installCommand = extractor?.installCommand || (packageName ? packageExtractorInstallCommand(packageName) : null);
|
|
149
|
+
const policyPinCommand = extractor?.policyPinCommand || (packageName ? extractorPolicyPinCommand(packageName, version) : null);
|
|
150
|
+
const extractCommand = extractor?.extractCommand || extractorRunCommand(extractorRef, tracks, extractor?.exampleSource);
|
|
151
|
+
return {
|
|
152
|
+
type: "extractor_review_workflow",
|
|
153
|
+
packageCodeExecution: {
|
|
154
|
+
list: false,
|
|
155
|
+
show: false,
|
|
156
|
+
policy: false,
|
|
157
|
+
check: true,
|
|
158
|
+
extract: true
|
|
159
|
+
},
|
|
160
|
+
steps: [
|
|
161
|
+
{
|
|
162
|
+
id: "discover",
|
|
163
|
+
command: "topogram extractor list",
|
|
164
|
+
packageCodeExecution: false,
|
|
165
|
+
purpose: "Find bundled and first-party package-backed extractors by track."
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
id: "inspect",
|
|
169
|
+
command: `topogram extractor show ${extractorRef}`,
|
|
170
|
+
packageCodeExecution: false,
|
|
171
|
+
purpose: "Read manifest purpose, tracks, install command, policy pin command, and extract command."
|
|
172
|
+
},
|
|
173
|
+
...(installCommand ? [{
|
|
174
|
+
id: "install",
|
|
175
|
+
command: installCommand,
|
|
176
|
+
packageCodeExecution: false,
|
|
177
|
+
purpose: "Install the extractor package explicitly; Topogram does not install it during extraction."
|
|
178
|
+
}] : []),
|
|
179
|
+
...(policyPinCommand ? [{
|
|
180
|
+
id: "pin_policy",
|
|
181
|
+
command: policyPinCommand,
|
|
182
|
+
packageCodeExecution: false,
|
|
183
|
+
purpose: "Allow and pin the extractor manifest version before execution."
|
|
184
|
+
}] : []),
|
|
185
|
+
...(!bundledExtractor ? [{
|
|
186
|
+
id: "check",
|
|
187
|
+
command: `topogram extractor check ${extractorRef}`,
|
|
188
|
+
packageCodeExecution: true,
|
|
189
|
+
purpose: "Load the adapter and run a minimal smoke extraction against a synthetic fixture."
|
|
190
|
+
}] : []),
|
|
191
|
+
...(extractCommand ? [{
|
|
192
|
+
id: "extract",
|
|
193
|
+
command: extractCommand,
|
|
194
|
+
packageCodeExecution: true,
|
|
195
|
+
purpose: "Read brownfield source and write review-only candidates plus extraction provenance."
|
|
196
|
+
}] : []),
|
|
197
|
+
{
|
|
198
|
+
id: "review_plan",
|
|
199
|
+
command: "topogram extract plan ./imported-topogram",
|
|
200
|
+
packageCodeExecution: false,
|
|
201
|
+
purpose: "Review bundles, extractor provenance, candidate counts, and safety notes."
|
|
202
|
+
},
|
|
203
|
+
{
|
|
204
|
+
id: "list_selectors",
|
|
205
|
+
command: "topogram adopt --list ./imported-topogram",
|
|
206
|
+
packageCodeExecution: false,
|
|
207
|
+
purpose: "Choose an explicit adoption selector."
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
id: "dry_run_adoption",
|
|
211
|
+
command: "topogram adopt <selector> ./imported-topogram --dry-run",
|
|
212
|
+
packageCodeExecution: false,
|
|
213
|
+
purpose: "Preview canonical topo writes before changing project-owned records."
|
|
214
|
+
},
|
|
215
|
+
{
|
|
216
|
+
id: "write_reviewed_adoption",
|
|
217
|
+
command: "topogram adopt <selector> ./imported-topogram --write",
|
|
218
|
+
packageCodeExecution: false,
|
|
219
|
+
purpose: "Write only reviewed canonical records; extractor packages never own adoption semantics."
|
|
220
|
+
}
|
|
221
|
+
],
|
|
222
|
+
safetyNotes: [
|
|
223
|
+
"topogram extractor list/show/policy do not load package adapter code.",
|
|
224
|
+
"topogram extractor check and topogram extract load package adapter code.",
|
|
225
|
+
"Extractor packages emit review-only candidates; core owns persistence, reconcile, adoption, and canonical topo writes.",
|
|
226
|
+
"Run dry-run adoption before --write."
|
|
227
|
+
]
|
|
228
|
+
};
|
|
135
229
|
}
|
|
136
230
|
|
|
137
231
|
/**
|
|
@@ -188,8 +282,8 @@ function extractorManifestSummary(manifest, metadata = {}) {
|
|
|
188
282
|
const version = manifest.version || firstParty?.version || "1";
|
|
189
283
|
const installCommand = packageName ? packageExtractorInstallCommand(packageName) : null;
|
|
190
284
|
const policyPinCommand = extractorPolicyPinCommand(packageName, version);
|
|
191
|
-
const extractCommand = extractorRunCommand(packageName, tracks, firstParty?.exampleSource);
|
|
192
|
-
|
|
285
|
+
const extractCommand = extractorRunCommand(packageName || manifest.id, tracks, firstParty?.exampleSource);
|
|
286
|
+
const summary = {
|
|
193
287
|
id: manifest.id,
|
|
194
288
|
version,
|
|
195
289
|
label: firstParty?.label || null,
|
|
@@ -215,6 +309,10 @@ function extractorManifestSummary(manifest, metadata = {}) {
|
|
|
215
309
|
packageRoot: metadata.packageRoot || null,
|
|
216
310
|
errors: metadata.errors || []
|
|
217
311
|
};
|
|
312
|
+
return {
|
|
313
|
+
...summary,
|
|
314
|
+
reviewWorkflow: buildExtractorReviewWorkflow(summary)
|
|
315
|
+
};
|
|
218
316
|
}
|
|
219
317
|
|
|
220
318
|
/**
|
|
@@ -222,7 +320,7 @@ function extractorManifestSummary(manifest, metadata = {}) {
|
|
|
222
320
|
* @returns {Record<string, any>}
|
|
223
321
|
*/
|
|
224
322
|
function firstPartyExtractorPlaceholder(info) {
|
|
225
|
-
|
|
323
|
+
const summary = {
|
|
226
324
|
id: info.id,
|
|
227
325
|
version: info.version,
|
|
228
326
|
label: info.label,
|
|
@@ -248,11 +346,15 @@ function firstPartyExtractorPlaceholder(info) {
|
|
|
248
346
|
packageRoot: null,
|
|
249
347
|
errors: []
|
|
250
348
|
};
|
|
349
|
+
return {
|
|
350
|
+
...summary,
|
|
351
|
+
reviewWorkflow: buildExtractorReviewWorkflow(summary)
|
|
352
|
+
};
|
|
251
353
|
}
|
|
252
354
|
|
|
253
355
|
/**
|
|
254
356
|
* @param {string} cwd
|
|
255
|
-
* @returns {{ ok: boolean, cwd: string, extractors: Record<string, any>[], groups: Record<string, ReturnType<typeof groupExtractorEntry>[]>, summary: Record<string, number> }}
|
|
357
|
+
* @returns {{ ok: boolean, cwd: string, extractors: Record<string, any>[], groups: Record<string, ReturnType<typeof groupExtractorEntry>[]>, reviewWorkflow: Record<string, any>, summary: Record<string, number> }}
|
|
256
358
|
*/
|
|
257
359
|
export function buildExtractorListPayload(cwd) {
|
|
258
360
|
const extractors = EXTRACTOR_MANIFESTS
|
|
@@ -314,6 +416,7 @@ export function buildExtractorListPayload(cwd) {
|
|
|
314
416
|
cwd,
|
|
315
417
|
extractors,
|
|
316
418
|
groups,
|
|
419
|
+
reviewWorkflow: buildExtractorReviewWorkflow(),
|
|
317
420
|
summary: {
|
|
318
421
|
total: extractors.length,
|
|
319
422
|
bundled: extractors.filter((extractor) => extractor.source === "bundled").length,
|
|
@@ -367,6 +470,7 @@ export function printExtractorList(payload) {
|
|
|
367
470
|
console.log("Topogram extractors");
|
|
368
471
|
console.log(`Bundled: ${payload.summary.bundled}; package-backed: ${payload.summary.package}; installed: ${payload.summary.installed}; first-party missing: ${payload.summary.missingFirstParty || 0}`);
|
|
369
472
|
console.log("Package-backed extractors are listed for discovery even before they are installed.");
|
|
473
|
+
console.log("Selection loop: list/show (no package code) -> install -> policy pin -> extractor check (loads adapter) -> extract -> extract plan/adopt --list -> adopt --dry-run -> adopt --write.");
|
|
370
474
|
console.log("");
|
|
371
475
|
for (const track of EXTRACTOR_TRACK_ORDER) {
|
|
372
476
|
const entries = (payload.groups || {})[track] || [];
|
|
@@ -426,10 +530,18 @@ export function printExtractorShow(payload) {
|
|
|
426
530
|
console.log(`Extractors: ${extractor.extractors.join(", ") || "none"}`);
|
|
427
531
|
console.log(`Candidate kinds: ${extractor.candidateKinds.join(", ") || "none"}`);
|
|
428
532
|
console.log(`Evidence types: ${extractor.evidenceTypes.join(", ") || "none"}`);
|
|
533
|
+
if (extractor.reviewWorkflow?.steps?.length) {
|
|
534
|
+
console.log("");
|
|
535
|
+
console.log("Review loop:");
|
|
536
|
+
for (const step of extractor.reviewWorkflow.steps) {
|
|
537
|
+
console.log(`- ${step.id}: ${step.command}`);
|
|
538
|
+
console.log(` ${step.purpose}`);
|
|
539
|
+
}
|
|
540
|
+
}
|
|
429
541
|
}
|
|
430
542
|
|
|
431
543
|
/**
|
|
432
|
-
* @param {ReturnType<typeof checkExtractorPack>} payload
|
|
544
|
+
* @param {ReturnType<typeof checkExtractorPack> & { reviewWorkflow?: Record<string, any> }} payload
|
|
433
545
|
* @returns {void}
|
|
434
546
|
*/
|
|
435
547
|
export function printExtractorCheck(payload) {
|
|
@@ -453,12 +565,19 @@ export function printExtractorCheck(payload) {
|
|
|
453
565
|
console.log("");
|
|
454
566
|
console.log(`Smoke output: ${payload.smoke.extractors} extractor(s), ${payload.smoke.findings} finding(s), ${payload.smoke.candidateKeys} candidate bucket(s), ${payload.smoke.diagnostics} diagnostic(s)`);
|
|
455
567
|
}
|
|
568
|
+
if (payload.reviewWorkflow?.steps?.length) {
|
|
569
|
+
console.log("");
|
|
570
|
+
console.log("Next review loop:");
|
|
571
|
+
for (const step of payload.reviewWorkflow.steps.filter((/** @type {Record<string, any>} */ step) => ["extract", "review_plan", "list_selectors", "dry_run_adoption", "write_reviewed_adoption"].includes(step.id))) {
|
|
572
|
+
console.log(`- ${step.command}`);
|
|
573
|
+
}
|
|
574
|
+
}
|
|
456
575
|
for (const error of payload.errors || []) console.log(`Error: ${error}`);
|
|
457
576
|
}
|
|
458
577
|
|
|
459
578
|
/**
|
|
460
579
|
* @param {string} projectPath
|
|
461
|
-
* @returns {{ ok: boolean, path: string, exists: boolean, policy: any, defaulted: boolean, packages: any[], diagnostics: any[], errors: string[], summary: Record<string, number> }}
|
|
580
|
+
* @returns {{ ok: boolean, path: string, exists: boolean, policy: any, defaulted: boolean, packages: any[], diagnostics: any[], errors: string[], reviewWorkflow: Record<string, any>, summary: Record<string, number> }}
|
|
462
581
|
*/
|
|
463
582
|
export function buildExtractorPolicyStatusPayload(projectPath) {
|
|
464
583
|
const root = path.resolve(projectPath || ".");
|
|
@@ -504,6 +623,7 @@ export function buildExtractorPolicyStatusPayload(projectPath) {
|
|
|
504
623
|
packages,
|
|
505
624
|
diagnostics,
|
|
506
625
|
errors,
|
|
626
|
+
reviewWorkflow: buildExtractorReviewWorkflow(packages[0] || null),
|
|
507
627
|
summary: {
|
|
508
628
|
enabledPackages: policy.enabledPackages.length,
|
|
509
629
|
installed: packages.filter((item) => item.installed).length,
|
|
@@ -567,6 +687,7 @@ export function printExtractorPolicyStatus(payload) {
|
|
|
567
687
|
console.log(`Enabled packages: ${payload.summary.enabledPackages}`);
|
|
568
688
|
console.log("Default allowlist: bundled topogram/* extractors and first-party @topogram/extractor-* packages.");
|
|
569
689
|
console.log("Install behavior: Topogram does not install extractor packages automatically.");
|
|
690
|
+
console.log("Review loop: install package -> pin policy -> extractor check -> extract -> extract plan/adopt --list -> adopt --dry-run -> adopt --write.");
|
|
570
691
|
for (const item of payload.packages) {
|
|
571
692
|
console.log(`- ${item.packageName}@${item.version}: ${item.installed ? "installed" : "missing"}, ${item.allowed ? "allowed" : "denied"}`);
|
|
572
693
|
if (!item.installed && item.installCommand) console.log(` Install: ${item.installCommand}`);
|
|
@@ -611,9 +732,25 @@ export function runExtractorCommand(context) {
|
|
|
611
732
|
const { commandArgs, inputPath, json, cwd } = context;
|
|
612
733
|
if (commandArgs.extractorCommand === "check") {
|
|
613
734
|
const payload = checkExtractorPack(inputPath || "", { cwd });
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
735
|
+
const summary = payload.manifest
|
|
736
|
+
? extractorManifestSummary(payload.manifest, {
|
|
737
|
+
installed: Boolean(payload.manifest),
|
|
738
|
+
manifestPath: payload.manifestPath,
|
|
739
|
+
packageRoot: payload.packageRoot,
|
|
740
|
+
errors: payload.errors
|
|
741
|
+
})
|
|
742
|
+
: null;
|
|
743
|
+
const augmentedPayload = /** @type {ReturnType<typeof checkExtractorPack> & { reviewWorkflow?: Record<string, any> }} */ (payload);
|
|
744
|
+
augmentedPayload.reviewWorkflow = buildExtractorReviewWorkflow(summary || {
|
|
745
|
+
id: inputPath || "<extractor>",
|
|
746
|
+
package: payload.packageName || null,
|
|
747
|
+
packageName: payload.packageName || null,
|
|
748
|
+
tracks: payload.manifest?.tracks || [],
|
|
749
|
+
version: payload.manifest?.version || "1"
|
|
750
|
+
});
|
|
751
|
+
if (json) console.log(stableStringify(augmentedPayload));
|
|
752
|
+
else printExtractorCheck(augmentedPayload);
|
|
753
|
+
return augmentedPayload.ok ? 0 : 1;
|
|
617
754
|
}
|
|
618
755
|
if (commandArgs.extractorCommand === "scaffold") {
|
|
619
756
|
const payload = scaffoldExtractorPack(inputPath || "", {
|