@topogram/cli 0.3.83 → 0.3.85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/cli/command-parsers/extractor.js +20 -1
- package/src/cli/commands/extractor.js +38 -0
- package/src/cli/help.js +2 -0
- package/src/extractor/check.js +3 -36
- package/src/extractor/output.js +220 -0
- package/src/extractor/scaffold.js +487 -0
- package/src/import/core/runner/tracks.js +7 -19
package/package.json
CHANGED
|
@@ -2,6 +2,17 @@
|
|
|
2
2
|
|
|
3
3
|
import { commandPath } from "./shared.js";
|
|
4
4
|
|
|
5
|
+
/**
|
|
6
|
+
* @param {string[]} args
|
|
7
|
+
* @param {string} flag
|
|
8
|
+
* @returns {string|null}
|
|
9
|
+
*/
|
|
10
|
+
function optionValue(args, flag) {
|
|
11
|
+
const index = args.indexOf(flag);
|
|
12
|
+
const value = index >= 0 ? args[index + 1] : null;
|
|
13
|
+
return value && !value.startsWith("-") ? value : null;
|
|
14
|
+
}
|
|
15
|
+
|
|
5
16
|
/**
|
|
6
17
|
* @param {string[]} args
|
|
7
18
|
* @returns {import("./shared.js").SplitCommandArgs|null}
|
|
@@ -16,6 +27,15 @@ export function parseExtractorCommandArgs(args) {
|
|
|
16
27
|
if (args[0] === "extractor" && args[1] === "check") {
|
|
17
28
|
return { extractorCommand: "check", inputPath: args[2] };
|
|
18
29
|
}
|
|
30
|
+
if (args[0] === "extractor" && args[1] === "scaffold") {
|
|
31
|
+
return {
|
|
32
|
+
extractorCommand: "scaffold",
|
|
33
|
+
inputPath: args[2],
|
|
34
|
+
extractorScaffoldTrack: optionValue(args, "--track"),
|
|
35
|
+
extractorScaffoldPackage: optionValue(args, "--package"),
|
|
36
|
+
extractorScaffoldId: optionValue(args, "--id")
|
|
37
|
+
};
|
|
38
|
+
}
|
|
19
39
|
if (args[0] === "extractor" && args[1] === "policy" && args[2] === "init") {
|
|
20
40
|
return { extractorPolicyCommand: "init", inputPath: commandPath(args, 3, ".") };
|
|
21
41
|
}
|
|
@@ -37,4 +57,3 @@ export function parseExtractorCommandArgs(args) {
|
|
|
37
57
|
}
|
|
38
58
|
return null;
|
|
39
59
|
}
|
|
40
|
-
|
|
@@ -6,6 +6,7 @@ import path from "node:path";
|
|
|
6
6
|
import { stableStringify } from "../../format.js";
|
|
7
7
|
import { checkExtractorPack } from "../../extractor/check.js";
|
|
8
8
|
import { FIRST_PARTY_EXTRACTOR_PACKAGES, firstPartyExtractorInfo } from "../../extractor/first-party.js";
|
|
9
|
+
import { scaffoldExtractorPack } from "../../extractor/scaffold.js";
|
|
9
10
|
import {
|
|
10
11
|
EXTRACTOR_MANIFESTS,
|
|
11
12
|
getExtractorManifest,
|
|
@@ -30,6 +31,7 @@ export function printExtractorHelp() {
|
|
|
30
31
|
console.log("Usage: topogram extractor list [--json]");
|
|
31
32
|
console.log(" or: topogram extractor show <id-or-package> [--json]");
|
|
32
33
|
console.log(" or: topogram extractor check <path-or-package> [--json]");
|
|
34
|
+
console.log(" or: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>] [--json]");
|
|
33
35
|
console.log(" or: topogram extractor policy init [path] [--json]");
|
|
34
36
|
console.log(" or: topogram extractor policy status [path] [--json]");
|
|
35
37
|
console.log(" or: topogram extractor policy check [path] [--json]");
|
|
@@ -48,11 +50,37 @@ export function printExtractorHelp() {
|
|
|
48
50
|
console.log(" topogram extractor show topogram/api-extractors");
|
|
49
51
|
console.log(" topogram extractor show @topogram/extractor-prisma-db");
|
|
50
52
|
console.log(" topogram extractor check ./extractor-package");
|
|
53
|
+
console.log(" topogram extractor scaffold ./topogram-extractor-node-cli --track cli --package @scope/topogram-extractor-node-cli");
|
|
51
54
|
console.log(" topogram extractor policy init");
|
|
52
55
|
console.log(" topogram extractor policy pin @topogram/extractor-node-cli@1");
|
|
53
56
|
console.log(" topogram extract ./express-api --out ./imported-topogram --from api --extractor @topogram/extractor-express-api");
|
|
54
57
|
}
|
|
55
58
|
|
|
59
|
+
/**
|
|
60
|
+
* @param {ReturnType<typeof scaffoldExtractorPack>} payload
|
|
61
|
+
* @returns {void}
|
|
62
|
+
*/
|
|
63
|
+
export function printExtractorScaffold(payload) {
|
|
64
|
+
console.log(payload.ok ? "Extractor scaffold created." : "Extractor scaffold failed.");
|
|
65
|
+
console.log(`Target: ${payload.target}`);
|
|
66
|
+
if (payload.packageName) console.log(`Package: ${payload.packageName}`);
|
|
67
|
+
if (payload.manifestId) console.log(`Manifest id: ${payload.manifestId}`);
|
|
68
|
+
if (payload.track) console.log(`Track: ${payload.track}`);
|
|
69
|
+
if (payload.files.length > 0) {
|
|
70
|
+
console.log("Files:");
|
|
71
|
+
for (const file of payload.files) {
|
|
72
|
+
console.log(`- ${file}`);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if (payload.nextCommands.length > 0) {
|
|
76
|
+
console.log("Next commands:");
|
|
77
|
+
for (const command of payload.nextCommands) {
|
|
78
|
+
console.log(`- ${command}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
for (const error of payload.errors || []) console.log(`Error: ${error}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
56
84
|
/**
|
|
57
85
|
* @param {string} cwd
|
|
58
86
|
* @returns {string[]}
|
|
@@ -587,6 +615,16 @@ export function runExtractorCommand(context) {
|
|
|
587
615
|
else printExtractorCheck(payload);
|
|
588
616
|
return payload.ok ? 0 : 1;
|
|
589
617
|
}
|
|
618
|
+
if (commandArgs.extractorCommand === "scaffold") {
|
|
619
|
+
const payload = scaffoldExtractorPack(inputPath || "", {
|
|
620
|
+
track: commandArgs.extractorScaffoldTrack,
|
|
621
|
+
packageName: commandArgs.extractorScaffoldPackage,
|
|
622
|
+
manifestId: commandArgs.extractorScaffoldId
|
|
623
|
+
});
|
|
624
|
+
if (json) console.log(stableStringify(payload));
|
|
625
|
+
else printExtractorScaffold(payload);
|
|
626
|
+
return payload.ok ? 0 : 1;
|
|
627
|
+
}
|
|
590
628
|
if (commandArgs.extractorCommand === "list") {
|
|
591
629
|
const payload = buildExtractorListPayload(cwd);
|
|
592
630
|
if (json) console.log(stableStringify(payload));
|
package/src/cli/help.js
CHANGED
|
@@ -71,6 +71,7 @@ export function printUsage(options = {}) {
|
|
|
71
71
|
console.log(" or: topogram extractor list [--json]");
|
|
72
72
|
console.log(" or: topogram extractor show <id-or-package> [--json]");
|
|
73
73
|
console.log(" or: topogram extractor check <path-or-package> [--json]");
|
|
74
|
+
console.log(" or: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>] [--json]");
|
|
74
75
|
console.log(" or: topogram extractor policy init [path] [--json]");
|
|
75
76
|
console.log(" or: topogram extractor policy status [path] [--json]");
|
|
76
77
|
console.log(" or: topogram extractor policy check [path] [--json]");
|
|
@@ -111,6 +112,7 @@ export function printUsage(options = {}) {
|
|
|
111
112
|
console.log(" topogram extractor list");
|
|
112
113
|
console.log(" topogram extractor show @topogram/extractor-prisma-db");
|
|
113
114
|
console.log(" topogram extractor check ./extractor-package");
|
|
115
|
+
console.log(" topogram extractor scaffold ./topogram-extractor-node-cli --track cli --package @scope/topogram-extractor-node-cli");
|
|
114
116
|
console.log(" topogram extractor policy check");
|
|
115
117
|
console.log(" topogram extract ./express-api --out ./extracted-topogram --from api --extractor @topogram/extractor-express-api");
|
|
116
118
|
console.log(" topogram generate");
|
package/src/extractor/check.js
CHANGED
|
@@ -5,6 +5,7 @@ import {
|
|
|
5
5
|
loadExtractorPackageAdapterForSpec,
|
|
6
6
|
validateExtractorAdapter
|
|
7
7
|
} from "./packages.js";
|
|
8
|
+
import { validateExtractorResult } from "./output.js";
|
|
8
9
|
|
|
9
10
|
/**
|
|
10
11
|
* @typedef {import("./registry.js").ExtractorManifest} ExtractorManifest
|
|
@@ -25,39 +26,6 @@ import {
|
|
|
25
26
|
* @property {boolean} executesPackageCode
|
|
26
27
|
*/
|
|
27
28
|
|
|
28
|
-
/**
|
|
29
|
-
* @param {any} result
|
|
30
|
-
* @returns {{ ok: boolean, message: string, smoke: { findings: number, candidateKeys: number, diagnostics: number }|null }}
|
|
31
|
-
*/
|
|
32
|
-
function validateExtractResult(result) {
|
|
33
|
-
if (!result || typeof result !== "object" || Array.isArray(result)) {
|
|
34
|
-
return { ok: false, message: "extract(context) must return an object", smoke: null };
|
|
35
|
-
}
|
|
36
|
-
if (result.findings != null && !Array.isArray(result.findings)) {
|
|
37
|
-
return { ok: false, message: "extract(context) findings must be an array when present", smoke: null };
|
|
38
|
-
}
|
|
39
|
-
if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
|
|
40
|
-
return { ok: false, message: "extract(context) diagnostics must be an array when present", smoke: null };
|
|
41
|
-
}
|
|
42
|
-
if (!result.candidates || typeof result.candidates !== "object" || Array.isArray(result.candidates)) {
|
|
43
|
-
return { ok: false, message: "extract(context) result must include a candidates object", smoke: null };
|
|
44
|
-
}
|
|
45
|
-
for (const [key, value] of Object.entries(result.candidates)) {
|
|
46
|
-
if (!Array.isArray(value)) {
|
|
47
|
-
return { ok: false, message: `extract(context) candidates.${key} must be an array`, smoke: null };
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
return {
|
|
51
|
-
ok: true,
|
|
52
|
-
message: `extract(context) returned ${Object.keys(result.candidates).length} candidate bucket(s)`,
|
|
53
|
-
smoke: {
|
|
54
|
-
findings: Array.isArray(result.findings) ? result.findings.length : 0,
|
|
55
|
-
candidateKeys: Object.keys(result.candidates).length,
|
|
56
|
-
diagnostics: Array.isArray(result.diagnostics) ? result.diagnostics.length : 0
|
|
57
|
-
}
|
|
58
|
-
};
|
|
59
|
-
}
|
|
60
|
-
|
|
61
29
|
/**
|
|
62
30
|
* @param {string} sourceSpec
|
|
63
31
|
* @param {{ cwd?: string }} [options]
|
|
@@ -126,9 +94,9 @@ export function checkExtractorPack(sourceSpec, options = {}) {
|
|
|
126
94
|
continue;
|
|
127
95
|
}
|
|
128
96
|
const result = extractor.extract(context) || { findings: [], candidates: {} };
|
|
129
|
-
const validation =
|
|
97
|
+
const validation = validateExtractorResult(result, { track: extractor.track, strictCandidates: true });
|
|
130
98
|
if (!validation.ok || !validation.smoke) {
|
|
131
|
-
payload.errors.push(`Extractor '${extractor.id}' ${
|
|
99
|
+
payload.errors.push(...validation.errors.map((message) => `Extractor '${extractor.id}' ${message}.`));
|
|
132
100
|
continue;
|
|
133
101
|
}
|
|
134
102
|
totalFindings += validation.smoke.findings;
|
|
@@ -152,4 +120,3 @@ export function checkExtractorPack(sourceSpec, options = {}) {
|
|
|
152
120
|
payload.ok = payload.errors.length === 0;
|
|
153
121
|
return payload;
|
|
154
122
|
}
|
|
155
|
-
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @typedef {Object} ExtractorResultValidationOptions
|
|
5
|
+
* @property {string} [track]
|
|
6
|
+
* @property {boolean} [strictCandidates]
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* @typedef {Object} ExtractorResultValidation
|
|
11
|
+
* @property {boolean} ok
|
|
12
|
+
* @property {string[]} errors
|
|
13
|
+
* @property {{ findings: number, candidateKeys: number, diagnostics: number }|null} smoke
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
/** @type {Record<string, Set<string>>} */
|
|
17
|
+
const TRACK_CANDIDATE_BUCKETS = {
|
|
18
|
+
db: new Set(["entities", "enums", "relations", "indexes", "maintained_seams"]),
|
|
19
|
+
api: new Set(["capabilities", "routes", "stacks"]),
|
|
20
|
+
ui: new Set(["screens", "routes", "actions", "flows", "widgets", "shapes", "stacks"]),
|
|
21
|
+
cli: new Set(["commands", "capabilities", "surfaces"]),
|
|
22
|
+
workflows: new Set(["workflows", "workflow_states", "workflow_transitions"]),
|
|
23
|
+
verification: new Set(["verifications", "scenarios", "frameworks", "scripts"])
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
const DISALLOWED_BUCKETS = new Set([
|
|
27
|
+
"adoption",
|
|
28
|
+
"adoption_plan",
|
|
29
|
+
"adoptionPlan",
|
|
30
|
+
"canonical",
|
|
31
|
+
"canonical_files",
|
|
32
|
+
"canonicalFiles",
|
|
33
|
+
"files",
|
|
34
|
+
"patches",
|
|
35
|
+
"project_config",
|
|
36
|
+
"projectConfig",
|
|
37
|
+
"topo",
|
|
38
|
+
"topogram",
|
|
39
|
+
"topogram_project",
|
|
40
|
+
"topogramProject",
|
|
41
|
+
"writeFiles",
|
|
42
|
+
"writes",
|
|
43
|
+
"writtenFiles"
|
|
44
|
+
]);
|
|
45
|
+
|
|
46
|
+
const DISALLOWED_RECORD_KEYS = new Set([
|
|
47
|
+
"adoption",
|
|
48
|
+
"adoptionPlan",
|
|
49
|
+
"canonical",
|
|
50
|
+
"canonicalFiles",
|
|
51
|
+
"files",
|
|
52
|
+
"patches",
|
|
53
|
+
"receipt",
|
|
54
|
+
"topo",
|
|
55
|
+
"topogram",
|
|
56
|
+
"write",
|
|
57
|
+
"writeFiles",
|
|
58
|
+
"writes",
|
|
59
|
+
"writtenFiles"
|
|
60
|
+
]);
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Keys that carry local source/package file references. Deliberately excludes
|
|
64
|
+
* command/route `path` and config target dotted `path` values.
|
|
65
|
+
*/
|
|
66
|
+
const PATH_KEYS = new Set([
|
|
67
|
+
"configFile",
|
|
68
|
+
"configPath",
|
|
69
|
+
"file",
|
|
70
|
+
"filePath",
|
|
71
|
+
"migrationPath",
|
|
72
|
+
"migrationsPath",
|
|
73
|
+
"schemaPath",
|
|
74
|
+
"snapshotPath",
|
|
75
|
+
"sourceFile",
|
|
76
|
+
"sourcePath",
|
|
77
|
+
"source_path",
|
|
78
|
+
"targetFile",
|
|
79
|
+
"targetPath"
|
|
80
|
+
]);
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* @param {unknown} value
|
|
84
|
+
* @returns {value is Record<string, unknown>}
|
|
85
|
+
*/
|
|
86
|
+
function isPlainObject(value) {
|
|
87
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* @param {string} candidatePath
|
|
92
|
+
* @returns {boolean}
|
|
93
|
+
*/
|
|
94
|
+
function isUnsafeRelativePath(candidatePath) {
|
|
95
|
+
return candidatePath.startsWith("/") || candidatePath === ".." || candidatePath.startsWith("../") || candidatePath.includes("/../");
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* @param {string} bucket
|
|
100
|
+
* @param {Record<string, unknown>} candidate
|
|
101
|
+
* @returns {string[]}
|
|
102
|
+
*/
|
|
103
|
+
function identityFieldsForBucket(bucket, candidate) {
|
|
104
|
+
if (bucket === "commands") return ["command_id", "id_hint"];
|
|
105
|
+
if (bucket === "routes") {
|
|
106
|
+
if (typeof candidate.method === "string" && typeof candidate.path === "string") {
|
|
107
|
+
return [];
|
|
108
|
+
}
|
|
109
|
+
return ["id_hint", "id"];
|
|
110
|
+
}
|
|
111
|
+
return ["id_hint", "id", "name"];
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* @param {string} bucket
|
|
116
|
+
* @param {Record<string, unknown>} candidate
|
|
117
|
+
* @param {string} pathLabel
|
|
118
|
+
* @returns {string[]}
|
|
119
|
+
*/
|
|
120
|
+
function validateCandidateIdentity(bucket, candidate, pathLabel) {
|
|
121
|
+
const fields = identityFieldsForBucket(bucket, candidate);
|
|
122
|
+
if (fields.length === 0) return [];
|
|
123
|
+
if (fields.some((field) => typeof candidate[field] === "string" && String(candidate[field]).trim().length > 0)) {
|
|
124
|
+
return [];
|
|
125
|
+
}
|
|
126
|
+
return [`${pathLabel} must include an identity field: ${fields.join(" or ")}`];
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* @param {unknown} value
|
|
131
|
+
* @param {string} pathLabel
|
|
132
|
+
* @param {string[]} errors
|
|
133
|
+
* @returns {void}
|
|
134
|
+
*/
|
|
135
|
+
function validateNoUnsafeRecords(value, pathLabel, errors) {
|
|
136
|
+
if (Array.isArray(value)) {
|
|
137
|
+
for (let index = 0; index < value.length; index += 1) {
|
|
138
|
+
validateNoUnsafeRecords(value[index], `${pathLabel}[${index}]`, errors);
|
|
139
|
+
}
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
if (!isPlainObject(value)) return;
|
|
143
|
+
for (const [key, child] of Object.entries(value)) {
|
|
144
|
+
const childPath = `${pathLabel}.${key}`;
|
|
145
|
+
if (DISALLOWED_RECORD_KEYS.has(key)) {
|
|
146
|
+
errors.push(`${childPath} is not allowed in extractor candidate output; extractors emit review candidates, not adoption plans or files.`);
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
if (PATH_KEYS.has(key) && typeof child === "string" && isUnsafeRelativePath(child)) {
|
|
150
|
+
errors.push(`${childPath} must be a safe project-relative path.`);
|
|
151
|
+
continue;
|
|
152
|
+
}
|
|
153
|
+
validateNoUnsafeRecords(child, childPath, errors);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* @param {unknown} result
|
|
159
|
+
* @param {ExtractorResultValidationOptions} [options]
|
|
160
|
+
* @returns {ExtractorResultValidation}
|
|
161
|
+
*/
|
|
162
|
+
export function validateExtractorResult(result, options = {}) {
|
|
163
|
+
const errors = [];
|
|
164
|
+
if (!isPlainObject(result)) {
|
|
165
|
+
return { ok: false, errors: ["extract(context) must return an object"], smoke: null };
|
|
166
|
+
}
|
|
167
|
+
if (result.findings != null && !Array.isArray(result.findings)) {
|
|
168
|
+
errors.push("extract(context) findings must be an array when present");
|
|
169
|
+
}
|
|
170
|
+
if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
|
|
171
|
+
errors.push("extract(context) diagnostics must be an array when present");
|
|
172
|
+
}
|
|
173
|
+
if (!isPlainObject(result.candidates)) {
|
|
174
|
+
errors.push("extract(context) result must include a candidates object");
|
|
175
|
+
return { ok: false, errors, smoke: null };
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const allowedBuckets = options.track ? TRACK_CANDIDATE_BUCKETS[options.track] : null;
|
|
179
|
+
const candidateKeys = Object.keys(result.candidates);
|
|
180
|
+
for (const [bucket, value] of Object.entries(result.candidates)) {
|
|
181
|
+
const bucketLabel = `extract(context) candidates.${bucket}`;
|
|
182
|
+
if (DISALLOWED_BUCKETS.has(bucket)) {
|
|
183
|
+
errors.push(`${bucketLabel} is not allowed; extractors must not return adoption plans, canonical files, patches, or topo writes.`);
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
if (options.strictCandidates && allowedBuckets && !allowedBuckets.has(bucket)) {
|
|
187
|
+
errors.push(`${bucketLabel} is not allowed for track '${options.track}'.`);
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
if (!Array.isArray(value)) {
|
|
191
|
+
errors.push(`${bucketLabel} must be an array`);
|
|
192
|
+
continue;
|
|
193
|
+
}
|
|
194
|
+
if (!options.strictCandidates) continue;
|
|
195
|
+
for (let index = 0; index < value.length; index += 1) {
|
|
196
|
+
const candidate = value[index];
|
|
197
|
+
const candidateLabel = `${bucketLabel}[${index}]`;
|
|
198
|
+
if (!isPlainObject(candidate)) {
|
|
199
|
+
errors.push(`${candidateLabel} must be an object.`);
|
|
200
|
+
continue;
|
|
201
|
+
}
|
|
202
|
+
errors.push(...validateCandidateIdentity(bucket, candidate, candidateLabel));
|
|
203
|
+
validateNoUnsafeRecords(candidate, candidateLabel, errors);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
return {
|
|
208
|
+
ok: errors.length === 0,
|
|
209
|
+
errors,
|
|
210
|
+
smoke: errors.length === 0
|
|
211
|
+
? {
|
|
212
|
+
findings: Array.isArray(result.findings) ? result.findings.length : 0,
|
|
213
|
+
candidateKeys: candidateKeys.length,
|
|
214
|
+
diagnostics: Array.isArray(result.diagnostics) ? result.diagnostics.length : 0
|
|
215
|
+
}
|
|
216
|
+
: null
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
export { TRACK_CANDIDATE_BUCKETS };
|
|
@@ -0,0 +1,487 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
|
|
6
|
+
import { stableStringify } from "../format.js";
|
|
7
|
+
import { EXTRACTOR_TRACKS } from "./registry.js";
|
|
8
|
+
|
|
9
|
+
const DEFAULT_TRACK = "cli";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @typedef {Object} ExtractorScaffoldOptions
|
|
13
|
+
* @property {string|null|undefined} [packageName]
|
|
14
|
+
* @property {string|null|undefined} [manifestId]
|
|
15
|
+
* @property {string|null|undefined} [track]
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @param {string} value
|
|
20
|
+
* @returns {string}
|
|
21
|
+
*/
|
|
22
|
+
function slugify(value) {
|
|
23
|
+
return String(value || "extractor")
|
|
24
|
+
.trim()
|
|
25
|
+
.toLowerCase()
|
|
26
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
27
|
+
.replace(/^-+|-+$/g, "") || "extractor";
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {string} target
|
|
32
|
+
* @returns {string}
|
|
33
|
+
*/
|
|
34
|
+
function defaultPackageName(target) {
|
|
35
|
+
const basename = slugify(path.basename(path.resolve(target)));
|
|
36
|
+
return basename.startsWith("topogram-extractor-") ? basename : `topogram-extractor-${basename}`;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* @param {string} value
|
|
41
|
+
* @returns {boolean}
|
|
42
|
+
*/
|
|
43
|
+
function isValidPackageName(value) {
|
|
44
|
+
return /^(?:@[a-z0-9][a-z0-9._-]*\/)?[a-z0-9][a-z0-9._-]*$/.test(value);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @returns {string}
|
|
49
|
+
*/
|
|
50
|
+
function currentCliVersion() {
|
|
51
|
+
try {
|
|
52
|
+
const packageJson = JSON.parse(fs.readFileSync(new URL("../../package.json", import.meta.url), "utf8"));
|
|
53
|
+
return typeof packageJson.version === "string" && packageJson.version ? packageJson.version : "latest";
|
|
54
|
+
} catch {
|
|
55
|
+
return "latest";
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* @param {string} packageName
|
|
61
|
+
* @param {string} track
|
|
62
|
+
* @returns {string}
|
|
63
|
+
*/
|
|
64
|
+
function extractorId(packageName, track) {
|
|
65
|
+
const bareName = packageName.split("/").pop() || packageName;
|
|
66
|
+
return `${track}.${slugify(bareName.replace(/^topogram-extractor-/, "").replace(/^extractor-/, ""))}`;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* @param {string} track
|
|
71
|
+
* @returns {{ stack: Record<string, string>, capabilities: Record<string, boolean>, candidateKinds: string[], fixtureFiles: Record<string, string> }}
|
|
72
|
+
*/
|
|
73
|
+
function trackDefaults(track) {
|
|
74
|
+
if (track === "db") {
|
|
75
|
+
return {
|
|
76
|
+
stack: { domain: "database", framework: "scaffold" },
|
|
77
|
+
capabilities: { schema: true },
|
|
78
|
+
candidateKinds: ["entity"],
|
|
79
|
+
fixtureFiles: {
|
|
80
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-db", private: true })}\n`,
|
|
81
|
+
"src/schema.sql": "create table scaffold_records (id text primary key, name text not null);\n"
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
if (track === "api") {
|
|
86
|
+
return {
|
|
87
|
+
stack: { runtime: "node", framework: "scaffold" },
|
|
88
|
+
capabilities: { routes: true },
|
|
89
|
+
candidateKinds: ["capability", "route", "stack"],
|
|
90
|
+
fixtureFiles: {
|
|
91
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-api", private: true })}\n`,
|
|
92
|
+
"src/server.js": "app.get('/scaffold-records', listScaffoldRecords);\n"
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
if (track === "ui") {
|
|
97
|
+
return {
|
|
98
|
+
stack: { framework: "scaffold-ui" },
|
|
99
|
+
capabilities: { screens: true, flows: true },
|
|
100
|
+
candidateKinds: ["screen", "route", "flow"],
|
|
101
|
+
fixtureFiles: {
|
|
102
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-ui", private: true })}\n`,
|
|
103
|
+
"src/routes/scaffold-records.jsx": "export default function ScaffoldRecords() { return <main>Scaffold records</main>; }\n"
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
if (track === "workflows") {
|
|
108
|
+
return {
|
|
109
|
+
stack: { domain: "workflow" },
|
|
110
|
+
capabilities: { workflows: true },
|
|
111
|
+
candidateKinds: ["workflow"],
|
|
112
|
+
fixtureFiles: {
|
|
113
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-workflows", private: true })}\n`,
|
|
114
|
+
"docs/workflows.md": "# Scaffold workflow\n\n- draft\n- review\n- complete\n"
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
if (track === "verification") {
|
|
119
|
+
return {
|
|
120
|
+
stack: { domain: "verification" },
|
|
121
|
+
capabilities: { verifications: true },
|
|
122
|
+
candidateKinds: ["verification"],
|
|
123
|
+
fixtureFiles: {
|
|
124
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-verification", private: true, scripts: { test: "node test.js" } })}\n`,
|
|
125
|
+
"test.js": "console.log('scaffold verification fixture');\n"
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
return {
|
|
130
|
+
stack: { runtime: "node", framework: "generic-cli" },
|
|
131
|
+
capabilities: { commands: true, options: true, effects: true },
|
|
132
|
+
candidateKinds: ["command", "capability", "cli_surface"],
|
|
133
|
+
fixtureFiles: {
|
|
134
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-cli", private: true, bin: { scaffold: "./bin/scaffold.js" } })}\n`,
|
|
135
|
+
"bin/scaffold.js": "#!/usr/bin/env node\nconsole.log('Usage: scaffold check --json');\n"
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* @param {string} track
|
|
142
|
+
* @returns {string}
|
|
143
|
+
*/
|
|
144
|
+
function candidateSourceForTrack(track) {
|
|
145
|
+
if (track === "db") {
|
|
146
|
+
return `{
|
|
147
|
+
entities: [{
|
|
148
|
+
id_hint: "entity_scaffold_record",
|
|
149
|
+
label: "Scaffold Record",
|
|
150
|
+
confidence: "low",
|
|
151
|
+
provenance: ["package-extractor-scaffold"],
|
|
152
|
+
fields: [{ name: "id", field_type: "string", required: true }]
|
|
153
|
+
}]
|
|
154
|
+
}`;
|
|
155
|
+
}
|
|
156
|
+
if (track === "api") {
|
|
157
|
+
return `{
|
|
158
|
+
capabilities: [{
|
|
159
|
+
id_hint: "cap_list_scaffold_records",
|
|
160
|
+
label: "List scaffold records",
|
|
161
|
+
confidence: "low",
|
|
162
|
+
provenance: ["package-extractor-scaffold"]
|
|
163
|
+
}],
|
|
164
|
+
routes: [{
|
|
165
|
+
method: "GET",
|
|
166
|
+
path: "/scaffold-records",
|
|
167
|
+
capability_hint: "cap_list_scaffold_records",
|
|
168
|
+
confidence: "low",
|
|
169
|
+
provenance: ["package-extractor-scaffold"]
|
|
170
|
+
}],
|
|
171
|
+
stacks: ["scaffold-api"]
|
|
172
|
+
}`;
|
|
173
|
+
}
|
|
174
|
+
if (track === "ui") {
|
|
175
|
+
return `{
|
|
176
|
+
screens: [{
|
|
177
|
+
id_hint: "screen_scaffold_records",
|
|
178
|
+
label: "Scaffold Records",
|
|
179
|
+
screen_kind: "list",
|
|
180
|
+
route_path: "/scaffold-records",
|
|
181
|
+
confidence: "low",
|
|
182
|
+
provenance: ["package-extractor-scaffold"]
|
|
183
|
+
}],
|
|
184
|
+
routes: [{
|
|
185
|
+
id_hint: "route_scaffold_records",
|
|
186
|
+
path: "/scaffold-records",
|
|
187
|
+
screen_id: "screen_scaffold_records",
|
|
188
|
+
confidence: "low",
|
|
189
|
+
provenance: ["package-extractor-scaffold"]
|
|
190
|
+
}],
|
|
191
|
+
flows: [],
|
|
192
|
+
stacks: ["scaffold-ui"]
|
|
193
|
+
}`;
|
|
194
|
+
}
|
|
195
|
+
if (track === "workflows") {
|
|
196
|
+
return `{
|
|
197
|
+
workflows: [{
|
|
198
|
+
id_hint: "workflow_scaffold_review",
|
|
199
|
+
label: "Scaffold Review",
|
|
200
|
+
confidence: "low",
|
|
201
|
+
provenance: ["package-extractor-scaffold"]
|
|
202
|
+
}]
|
|
203
|
+
}`;
|
|
204
|
+
}
|
|
205
|
+
if (track === "verification") {
|
|
206
|
+
return `{
|
|
207
|
+
verifications: [{
|
|
208
|
+
id_hint: "verification_scaffold_check",
|
|
209
|
+
label: "Scaffold Check",
|
|
210
|
+
confidence: "low",
|
|
211
|
+
provenance: ["package-extractor-scaffold"]
|
|
212
|
+
}],
|
|
213
|
+
frameworks: ["scaffold"],
|
|
214
|
+
scripts: [{
|
|
215
|
+
id_hint: "script_scaffold_check",
|
|
216
|
+
command: "npm test",
|
|
217
|
+
confidence: "low",
|
|
218
|
+
provenance: ["package-extractor-scaffold"]
|
|
219
|
+
}]
|
|
220
|
+
}`;
|
|
221
|
+
}
|
|
222
|
+
return `{
|
|
223
|
+
commands: [{
|
|
224
|
+
command_id: "scaffold_check",
|
|
225
|
+
label: "Scaffold Check",
|
|
226
|
+
usage: "scaffold check --json",
|
|
227
|
+
provenance: ["package-extractor-scaffold"]
|
|
228
|
+
}],
|
|
229
|
+
capabilities: [{
|
|
230
|
+
id_hint: "cap_scaffold_check",
|
|
231
|
+
label: "Run scaffold check",
|
|
232
|
+
command_id: "scaffold_check",
|
|
233
|
+
provenance: ["package-extractor-scaffold"]
|
|
234
|
+
}],
|
|
235
|
+
surfaces: [{
|
|
236
|
+
id_hint: "proj_cli_surface",
|
|
237
|
+
commands: ["scaffold_check"],
|
|
238
|
+
options: [{ command_id: "scaffold_check", name: "json", flag: "--json", type: "boolean", required: false }],
|
|
239
|
+
effects: [{ command_id: "scaffold_check", effect: "read_only" }],
|
|
240
|
+
provenance: ["package-extractor-scaffold"]
|
|
241
|
+
}]
|
|
242
|
+
}`;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* @param {string} track
|
|
247
|
+
* @param {string} extractor
|
|
248
|
+
* @returns {string}
|
|
249
|
+
*/
|
|
250
|
+
function adapterSource(track, extractor) {
|
|
251
|
+
return `const manifest = require("./topogram-extractor.json");
|
|
252
|
+
|
|
253
|
+
exports.manifest = manifest;
|
|
254
|
+
exports.extractors = [{
|
|
255
|
+
id: ${JSON.stringify(extractor)},
|
|
256
|
+
track: ${JSON.stringify(track)},
|
|
257
|
+
detect(context) {
|
|
258
|
+
return { score: 1, reasons: ["Scaffold extractor runs against the included fixture."] };
|
|
259
|
+
},
|
|
260
|
+
extract(context) {
|
|
261
|
+
return {
|
|
262
|
+
findings: [{
|
|
263
|
+
kind: "scaffold_finding",
|
|
264
|
+
message: "Replace this scaffold extractor with precise framework evidence.",
|
|
265
|
+
evidence: ["fixtures/basic-source"]
|
|
266
|
+
}],
|
|
267
|
+
candidates: ${candidateSourceForTrack(track)},
|
|
268
|
+
diagnostics: []
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
}];
|
|
272
|
+
`;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* @param {string} packageName
|
|
277
|
+
* @param {string} track
|
|
278
|
+
* @returns {string}
|
|
279
|
+
*/
|
|
280
|
+
function checkScriptSource(packageName, track) {
|
|
281
|
+
return `import childProcess from "node:child_process";
|
|
282
|
+
import fs from "node:fs";
|
|
283
|
+
import os from "node:os";
|
|
284
|
+
import path from "node:path";
|
|
285
|
+
|
|
286
|
+
const packageJson = JSON.parse(fs.readFileSync(new URL("../package.json", import.meta.url), "utf8"));
|
|
287
|
+
const packageName = packageJson.name || ${JSON.stringify(packageName)};
|
|
288
|
+
const track = ${JSON.stringify(track)};
|
|
289
|
+
const topogramBin = process.env.TOPOGRAM_BIN || "topogram";
|
|
290
|
+
const root = process.cwd();
|
|
291
|
+
|
|
292
|
+
function run(args, options = {}) {
|
|
293
|
+
const result = childProcess.spawnSync(topogramBin, args, {
|
|
294
|
+
cwd: options.cwd || root,
|
|
295
|
+
encoding: "utf8",
|
|
296
|
+
env: { ...process.env, FORCE_COLOR: "0" },
|
|
297
|
+
maxBuffer: 1024 * 1024 * 10
|
|
298
|
+
});
|
|
299
|
+
if (result.status !== 0) {
|
|
300
|
+
process.stderr.write(result.stdout || "");
|
|
301
|
+
process.stderr.write(result.stderr || "");
|
|
302
|
+
throw new Error(\`Command failed: \${topogramBin} \${args.join(" ")}\`);
|
|
303
|
+
}
|
|
304
|
+
return result.stdout;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
function snapshotFixture() {
|
|
308
|
+
const fixtureRoot = path.join(root, "fixtures", "basic-source");
|
|
309
|
+
const files = [];
|
|
310
|
+
function visit(dir) {
|
|
311
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
312
|
+
const absolute = path.join(dir, entry.name);
|
|
313
|
+
if (entry.isDirectory()) visit(absolute);
|
|
314
|
+
else if (entry.isFile()) files.push([path.relative(fixtureRoot, absolute), fs.readFileSync(absolute, "utf8")]);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
visit(fixtureRoot);
|
|
318
|
+
return JSON.stringify(files.sort());
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
run(["extractor", "check", "."]);
|
|
322
|
+
|
|
323
|
+
const before = snapshotFixture();
|
|
324
|
+
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "topogram-extractor-smoke."));
|
|
325
|
+
const policyPath = path.join(tmp, "topogram.extractor-policy.json");
|
|
326
|
+
fs.writeFileSync(policyPath, JSON.stringify({
|
|
327
|
+
version: "0.1",
|
|
328
|
+
allowedPackageScopes: [],
|
|
329
|
+
allowedPackages: [packageName],
|
|
330
|
+
pinnedVersions: { [packageName]: "1" },
|
|
331
|
+
enabledPackages: []
|
|
332
|
+
}, null, 2) + "\\n", "utf8");
|
|
333
|
+
|
|
334
|
+
const extracted = path.join(tmp, "extracted");
|
|
335
|
+
run([
|
|
336
|
+
"extract",
|
|
337
|
+
path.join(root, "fixtures", "basic-source"),
|
|
338
|
+
"--out",
|
|
339
|
+
extracted,
|
|
340
|
+
"--from",
|
|
341
|
+
track,
|
|
342
|
+
"--extractor",
|
|
343
|
+
".",
|
|
344
|
+
"--extractor-policy",
|
|
345
|
+
policyPath,
|
|
346
|
+
"--json"
|
|
347
|
+
]);
|
|
348
|
+
run(["extract", "plan", extracted, "--json"]);
|
|
349
|
+
run(["query", "extract-plan", path.join(extracted, "topo"), "--json"]);
|
|
350
|
+
run(["adopt", "--list", extracted, "--json"]);
|
|
351
|
+
|
|
352
|
+
const after = snapshotFixture();
|
|
353
|
+
if (after !== before) {
|
|
354
|
+
throw new Error("Extractor smoke mutated fixture source files.");
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
console.log(\`Extractor package smoke passed for \${packageName}.\`);
|
|
358
|
+
`;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
/**
|
|
362
|
+
* @param {Record<string, string>} files
|
|
363
|
+
* @returns {string[]}
|
|
364
|
+
*/
|
|
365
|
+
function writeFiles(files) {
|
|
366
|
+
const written = [];
|
|
367
|
+
for (const [filePath, contents] of Object.entries(files)) {
|
|
368
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
369
|
+
fs.writeFileSync(filePath, contents, "utf8");
|
|
370
|
+
written.push(filePath);
|
|
371
|
+
}
|
|
372
|
+
return written.sort();
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
/**
|
|
376
|
+
* @param {string} target
|
|
377
|
+
* @param {ExtractorScaffoldOptions} [options]
|
|
378
|
+
* @returns {{ ok: boolean, target: string, packageName: string|null, manifestId: string|null, track: string|null, files: string[], nextCommands: string[], errors: string[] }}
|
|
379
|
+
*/
|
|
380
|
+
export function scaffoldExtractorPack(target, options = {}) {
|
|
381
|
+
const absoluteTarget = path.resolve(target || "");
|
|
382
|
+
if (!target || target.startsWith("-")) {
|
|
383
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track: null, files: [], nextCommands: [], errors: ["Usage: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>]"] };
|
|
384
|
+
}
|
|
385
|
+
if (fs.existsSync(absoluteTarget) && fs.readdirSync(absoluteTarget).length > 0) {
|
|
386
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track: null, files: [], nextCommands: [], errors: [`Extractor scaffold target '${absoluteTarget}' already exists and is not empty.`] };
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
const track = options.track || DEFAULT_TRACK;
|
|
390
|
+
if (!EXTRACTOR_TRACKS.includes(track)) {
|
|
391
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track, files: [], nextCommands: [], errors: [`Extractor track '${track}' is not supported. Expected one of: ${EXTRACTOR_TRACKS.join(", ")}.`] };
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
const packageName = options.packageName || defaultPackageName(absoluteTarget);
|
|
395
|
+
if (!isValidPackageName(packageName)) {
|
|
396
|
+
return { ok: false, target: absoluteTarget, packageName, manifestId: null, track, files: [], nextCommands: [], errors: [`Extractor package name '${packageName}' is invalid. Use a lowercase npm package name such as @scope/topogram-extractor-example.`] };
|
|
397
|
+
}
|
|
398
|
+
const manifestId = options.manifestId || packageName;
|
|
399
|
+
const extractor = extractorId(packageName, track);
|
|
400
|
+
const defaults = trackDefaults(track);
|
|
401
|
+
const manifest = {
|
|
402
|
+
id: manifestId,
|
|
403
|
+
version: "1",
|
|
404
|
+
tracks: [track],
|
|
405
|
+
source: "package",
|
|
406
|
+
package: packageName,
|
|
407
|
+
stack: defaults.stack,
|
|
408
|
+
capabilities: defaults.capabilities,
|
|
409
|
+
candidateKinds: defaults.candidateKinds,
|
|
410
|
+
evidenceTypes: ["runtime_source"],
|
|
411
|
+
extractors: [extractor]
|
|
412
|
+
};
|
|
413
|
+
const packageJson = {
|
|
414
|
+
name: packageName,
|
|
415
|
+
version: "0.1.0",
|
|
416
|
+
private: true,
|
|
417
|
+
description: "Topogram extractor pack scaffold.",
|
|
418
|
+
main: "index.cjs",
|
|
419
|
+
files: [
|
|
420
|
+
"index.cjs",
|
|
421
|
+
"topogram-extractor.json",
|
|
422
|
+
"README.md",
|
|
423
|
+
"scripts",
|
|
424
|
+
"fixtures"
|
|
425
|
+
],
|
|
426
|
+
scripts: {
|
|
427
|
+
check: "node ./scripts/check-extractor.mjs"
|
|
428
|
+
},
|
|
429
|
+
devDependencies: {
|
|
430
|
+
"@topogram/cli": `^${currentCliVersion()}`
|
|
431
|
+
}
|
|
432
|
+
};
|
|
433
|
+
const files = {
|
|
434
|
+
[path.join(absoluteTarget, "package.json")]: `${stableStringify(packageJson)}\n`,
|
|
435
|
+
[path.join(absoluteTarget, "topogram-extractor.json")]: `${stableStringify(manifest)}\n`,
|
|
436
|
+
[path.join(absoluteTarget, "index.cjs")]: adapterSource(track, extractor),
|
|
437
|
+
[path.join(absoluteTarget, "scripts", "check-extractor.mjs")]: checkScriptSource(packageName, track),
|
|
438
|
+
[path.join(absoluteTarget, "README.md")]: `# ${packageName}
|
|
439
|
+
|
|
440
|
+
This is a Topogram extractor pack scaffold for the \`${track}\` track.
|
|
441
|
+
|
|
442
|
+
## Author Loop
|
|
443
|
+
|
|
444
|
+
\`\`\`bash
|
|
445
|
+
npm install
|
|
446
|
+
npm run check
|
|
447
|
+
\`\`\`
|
|
448
|
+
|
|
449
|
+
\`npm run check\` runs:
|
|
450
|
+
|
|
451
|
+
- \`topogram extractor check .\`
|
|
452
|
+
- \`topogram extract ./fixtures/basic-source --out <tmp> --from ${track} --extractor .\`
|
|
453
|
+
- \`topogram extract plan <tmp>\`
|
|
454
|
+
- \`topogram query extract-plan <tmp>/topo\`
|
|
455
|
+
- \`topogram adopt --list <tmp>\`
|
|
456
|
+
|
|
457
|
+
Replace the scaffold adapter in \`index.cjs\` with precise, read-only source evidence.
|
|
458
|
+
Extractor packages must not mutate source files, write canonical \`topo/**\`, install
|
|
459
|
+
packages, perform network access, or define adoption semantics.
|
|
460
|
+
|
|
461
|
+
Candidate output is validated by track. Return only review candidate buckets for
|
|
462
|
+
the declared track, give each candidate a stable identity, keep file evidence
|
|
463
|
+
project-relative, and never return files, patches, adoption plans, or write
|
|
464
|
+
instructions.
|
|
465
|
+
`
|
|
466
|
+
};
|
|
467
|
+
for (const [relative, contents] of Object.entries(defaults.fixtureFiles)) {
|
|
468
|
+
files[path.join(absoluteTarget, "fixtures", "basic-source", relative)] = contents;
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
const written = writeFiles(files);
|
|
472
|
+
return {
|
|
473
|
+
ok: true,
|
|
474
|
+
target: absoluteTarget,
|
|
475
|
+
packageName,
|
|
476
|
+
manifestId,
|
|
477
|
+
track,
|
|
478
|
+
files: written.map((filePath) => path.relative(absoluteTarget, filePath).replace(/\\/g, "/")),
|
|
479
|
+
nextCommands: [
|
|
480
|
+
`cd ${absoluteTarget}`,
|
|
481
|
+
"npm install",
|
|
482
|
+
"npm run check",
|
|
483
|
+
"topogram extractor check ."
|
|
484
|
+
],
|
|
485
|
+
errors: []
|
|
486
|
+
};
|
|
487
|
+
}
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import { getEnrichersForTrack, getExtractorsForTrack } from "../registry.js";
|
|
4
4
|
import { normalizeCandidatesForTrack } from "./candidates.js";
|
|
5
5
|
import { packageExtractorsForContext } from "../../../extractor/packages.js";
|
|
6
|
+
import { validateExtractorResult } from "../../../extractor/output.js";
|
|
6
7
|
|
|
7
8
|
/**
|
|
8
9
|
* @param {any} context
|
|
@@ -140,25 +141,12 @@ function initialCandidatesForTrack(track) {
|
|
|
140
141
|
*/
|
|
141
142
|
function assertExtractorResultShape(extractor, result) {
|
|
142
143
|
const label = extractor?.id || "unknown";
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
|
|
150
|
-
throw new Error(`Extractor '${label}' extract(context) diagnostics must be an array when present.`);
|
|
151
|
-
}
|
|
152
|
-
if (result.candidates == null) {
|
|
153
|
-
result.candidates = {};
|
|
154
|
-
}
|
|
155
|
-
if (!result.candidates || typeof result.candidates !== "object" || Array.isArray(result.candidates)) {
|
|
156
|
-
throw new Error(`Extractor '${label}' extract(context) candidates must be an object.`);
|
|
157
|
-
}
|
|
158
|
-
for (const [key, value] of Object.entries(result.candidates)) {
|
|
159
|
-
if (!Array.isArray(value)) {
|
|
160
|
-
throw new Error(`Extractor '${label}' extract(context) candidates.${key} must be an array.`);
|
|
161
|
-
}
|
|
144
|
+
const validation = validateExtractorResult(result, {
|
|
145
|
+
track: extractor?.track,
|
|
146
|
+
strictCandidates: extractor?.source === "package"
|
|
147
|
+
});
|
|
148
|
+
if (!validation.ok) {
|
|
149
|
+
throw new Error(validation.errors.map((message) => `Extractor '${label}' ${message}.`).join("\n"));
|
|
162
150
|
}
|
|
163
151
|
}
|
|
164
152
|
|