@topogram/cli 0.3.82 → 0.3.84
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/agent-ops/query-builders/change-risk/extract-plan.js +2 -1
- package/src/agent-ops/query-builders/multi-agent.js +2 -0
- package/src/agent-ops/query-builders/work-packets.js +2 -0
- package/src/agent-ops/query-builders/workflow-context.js +4 -0
- package/src/cli/command-parsers/extractor.js +20 -1
- package/src/cli/commands/extractor.js +38 -0
- package/src/cli/commands/query/definitions.js +45 -0
- package/src/cli/commands/query/extract-adopt.js +9 -2
- package/src/cli/commands/query/runner/workflow.js +12 -3
- package/src/cli/commands/query/workspace.js +68 -0
- package/src/cli/help.js +2 -0
- package/src/extractor/scaffold.js +482 -0
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@ import { buildImportMaintainedRisk } from "../maintained-risk.js";
|
|
|
2
2
|
import { buildImportPlanNextAction } from "../workflow-presets-core.js";
|
|
3
3
|
import { buildPresetGuidanceSummary } from "./risk.js";
|
|
4
4
|
|
|
5
|
-
export function buildImportPlanPayload(adoptionPlan, taskModeArtifact, maintainedBoundaryArtifact = null, workflowPresetState = null) {
|
|
5
|
+
export function buildImportPlanPayload(adoptionPlan, taskModeArtifact, maintainedBoundaryArtifact = null, workflowPresetState = null, extractionContext = null) {
|
|
6
6
|
const importMaintained = buildImportMaintainedRisk(adoptionPlan.imported_proposal_surfaces || [], maintainedBoundaryArtifact);
|
|
7
7
|
const importNextAction = buildImportPlanNextAction(taskModeArtifact.next_action || null, workflowPresetState);
|
|
8
8
|
const presetGuidanceSummary = buildPresetGuidanceSummary(workflowPresetState, null);
|
|
@@ -13,6 +13,7 @@ export function buildImportPlanPayload(adoptionPlan, taskModeArtifact, maintaine
|
|
|
13
13
|
next_action: importNextAction,
|
|
14
14
|
write_scope: taskModeArtifact.write_scope || null,
|
|
15
15
|
verification_targets: taskModeArtifact.verification_targets || null,
|
|
16
|
+
extraction_context: extractionContext,
|
|
16
17
|
review_groups: adoptionPlan.approved_review_groups || [],
|
|
17
18
|
staged_items: adoptionPlan.staged_items || [],
|
|
18
19
|
accepted_items: adoptionPlan.accepted_items || [],
|
|
@@ -496,6 +496,7 @@ export function buildMultiAgentPlanPayload({
|
|
|
496
496
|
importPlan,
|
|
497
497
|
report,
|
|
498
498
|
adoptionStatus,
|
|
499
|
+
extractionContext = null,
|
|
499
500
|
resolvedWorkflowContext = null
|
|
500
501
|
}) {
|
|
501
502
|
const presetGuidanceSummary = buildPresetGuidanceSummary(importPlan?.workflow_presets || null, resolvedWorkflowContext || singleAgentPlan?.resolved_workflow_context || null);
|
|
@@ -530,6 +531,7 @@ export function buildMultiAgentPlanPayload({
|
|
|
530
531
|
active_preset_ids: presetGuidanceSummary.active_preset_ids,
|
|
531
532
|
preset_blockers: presetGuidanceSummary.preset_blockers,
|
|
532
533
|
recommended_preset_action: presetGuidanceSummary.recommended_preset_action,
|
|
534
|
+
extraction_context: extractionContext || singleAgentPlan?.extraction_context || resolvedWorkflowContext?.extraction_context || null,
|
|
533
535
|
resolved_workflow_context: resolvedWorkflowContext || singleAgentPlan?.resolved_workflow_context || null,
|
|
534
536
|
lanes,
|
|
535
537
|
parallel_workstreams: parallelWorkstreams,
|
|
@@ -147,6 +147,7 @@ export function buildWorkPacketPayload({
|
|
|
147
147
|
const publishedHandoffPacket = (multiAgentPlan?.handoff_packets || []).find((packet) => packet.from_lane === laneId)
|
|
148
148
|
|| handoffTemplateFromLane(lane, multiAgentPlan?.mode || null);
|
|
149
149
|
const resolvedWorkflowContext = multiAgentPlan?.resolved_workflow_context || null;
|
|
150
|
+
const extractionContext = multiAgentPlan?.extraction_context || resolvedWorkflowContext?.extraction_context || null;
|
|
150
151
|
const presetGuidanceSummary = multiAgentPlan?.preset_guidance_summary || buildPresetGuidanceSummary(null, resolvedWorkflowContext);
|
|
151
152
|
const effectiveWriteScope = lane.workflow_context_overrides?.effective_write_scope || lane.write_scope || resolvedWorkflowContext?.effective_write_scope || null;
|
|
152
153
|
const effectiveVerificationPolicy = lane.workflow_context_overrides?.effective_verification_policy || {
|
|
@@ -172,6 +173,7 @@ export function buildWorkPacketPayload({
|
|
|
172
173
|
preset_guidance_summary: presetGuidanceSummary,
|
|
173
174
|
active_preset_ids: presetGuidanceSummary.active_preset_ids,
|
|
174
175
|
recommended_preset_action: presetGuidanceSummary.recommended_preset_action,
|
|
176
|
+
extraction_context: extractionContext,
|
|
175
177
|
write_scope: lane.write_scope || null,
|
|
176
178
|
effective_write_scope: effectiveWriteScope,
|
|
177
179
|
owned_targets: lane.owned_targets || null,
|
|
@@ -154,6 +154,7 @@ export function buildResolvedWorkflowContextPayload({
|
|
|
154
154
|
reviewBoundary = null,
|
|
155
155
|
maintainedBoundary = null,
|
|
156
156
|
generatorTargets = [],
|
|
157
|
+
extractionContext = null,
|
|
157
158
|
providerPresets = null,
|
|
158
159
|
teamPresets = null,
|
|
159
160
|
providerManifests = null,
|
|
@@ -326,6 +327,7 @@ export function buildResolvedWorkflowContextPayload({
|
|
|
326
327
|
preferred_queries: preferredQueries,
|
|
327
328
|
artifact_load_order: artifactLoadOrder,
|
|
328
329
|
recommended_artifact_queries: recommendedArtifactQueries,
|
|
330
|
+
extraction_context: extractionContext,
|
|
329
331
|
effective_write_scope: taskModeArtifact?.write_scope || null,
|
|
330
332
|
effective_review_policy: {
|
|
331
333
|
block_on: reviewBlockers,
|
|
@@ -362,6 +364,7 @@ export function buildSingleAgentPlanPayload({
|
|
|
362
364
|
workspace,
|
|
363
365
|
taskModeArtifact,
|
|
364
366
|
importPlan = null,
|
|
367
|
+
extractionContext = null,
|
|
365
368
|
resolvedWorkflowContext = null
|
|
366
369
|
}) {
|
|
367
370
|
const primaryArtifacts = stableOrderedUnion([
|
|
@@ -377,6 +380,7 @@ export function buildSingleAgentPlanPayload({
|
|
|
377
380
|
current_focus: currentFocusFromTaskMode(taskModeArtifact),
|
|
378
381
|
next_action: taskModeArtifact?.next_action || null,
|
|
379
382
|
write_scope: taskModeArtifact?.write_scope || null,
|
|
383
|
+
extraction_context: extractionContext || resolvedWorkflowContext?.extraction_context || null,
|
|
380
384
|
review_boundaries: buildReviewBoundaries(taskModeArtifact, importPlan),
|
|
381
385
|
proof_targets: taskModeArtifact?.verification_targets || null,
|
|
382
386
|
operator_loop: buildOperatorLoopSummary({
|
|
@@ -2,6 +2,17 @@
|
|
|
2
2
|
|
|
3
3
|
import { commandPath } from "./shared.js";
|
|
4
4
|
|
|
5
|
+
/**
|
|
6
|
+
* @param {string[]} args
|
|
7
|
+
* @param {string} flag
|
|
8
|
+
* @returns {string|null}
|
|
9
|
+
*/
|
|
10
|
+
function optionValue(args, flag) {
|
|
11
|
+
const index = args.indexOf(flag);
|
|
12
|
+
const value = index >= 0 ? args[index + 1] : null;
|
|
13
|
+
return value && !value.startsWith("-") ? value : null;
|
|
14
|
+
}
|
|
15
|
+
|
|
5
16
|
/**
|
|
6
17
|
* @param {string[]} args
|
|
7
18
|
* @returns {import("./shared.js").SplitCommandArgs|null}
|
|
@@ -16,6 +27,15 @@ export function parseExtractorCommandArgs(args) {
|
|
|
16
27
|
if (args[0] === "extractor" && args[1] === "check") {
|
|
17
28
|
return { extractorCommand: "check", inputPath: args[2] };
|
|
18
29
|
}
|
|
30
|
+
if (args[0] === "extractor" && args[1] === "scaffold") {
|
|
31
|
+
return {
|
|
32
|
+
extractorCommand: "scaffold",
|
|
33
|
+
inputPath: args[2],
|
|
34
|
+
extractorScaffoldTrack: optionValue(args, "--track"),
|
|
35
|
+
extractorScaffoldPackage: optionValue(args, "--package"),
|
|
36
|
+
extractorScaffoldId: optionValue(args, "--id")
|
|
37
|
+
};
|
|
38
|
+
}
|
|
19
39
|
if (args[0] === "extractor" && args[1] === "policy" && args[2] === "init") {
|
|
20
40
|
return { extractorPolicyCommand: "init", inputPath: commandPath(args, 3, ".") };
|
|
21
41
|
}
|
|
@@ -37,4 +57,3 @@ export function parseExtractorCommandArgs(args) {
|
|
|
37
57
|
}
|
|
38
58
|
return null;
|
|
39
59
|
}
|
|
40
|
-
|
|
@@ -6,6 +6,7 @@ import path from "node:path";
|
|
|
6
6
|
import { stableStringify } from "../../format.js";
|
|
7
7
|
import { checkExtractorPack } from "../../extractor/check.js";
|
|
8
8
|
import { FIRST_PARTY_EXTRACTOR_PACKAGES, firstPartyExtractorInfo } from "../../extractor/first-party.js";
|
|
9
|
+
import { scaffoldExtractorPack } from "../../extractor/scaffold.js";
|
|
9
10
|
import {
|
|
10
11
|
EXTRACTOR_MANIFESTS,
|
|
11
12
|
getExtractorManifest,
|
|
@@ -30,6 +31,7 @@ export function printExtractorHelp() {
|
|
|
30
31
|
console.log("Usage: topogram extractor list [--json]");
|
|
31
32
|
console.log(" or: topogram extractor show <id-or-package> [--json]");
|
|
32
33
|
console.log(" or: topogram extractor check <path-or-package> [--json]");
|
|
34
|
+
console.log(" or: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>] [--json]");
|
|
33
35
|
console.log(" or: topogram extractor policy init [path] [--json]");
|
|
34
36
|
console.log(" or: topogram extractor policy status [path] [--json]");
|
|
35
37
|
console.log(" or: topogram extractor policy check [path] [--json]");
|
|
@@ -48,11 +50,37 @@ export function printExtractorHelp() {
|
|
|
48
50
|
console.log(" topogram extractor show topogram/api-extractors");
|
|
49
51
|
console.log(" topogram extractor show @topogram/extractor-prisma-db");
|
|
50
52
|
console.log(" topogram extractor check ./extractor-package");
|
|
53
|
+
console.log(" topogram extractor scaffold ./topogram-extractor-node-cli --track cli --package @scope/topogram-extractor-node-cli");
|
|
51
54
|
console.log(" topogram extractor policy init");
|
|
52
55
|
console.log(" topogram extractor policy pin @topogram/extractor-node-cli@1");
|
|
53
56
|
console.log(" topogram extract ./express-api --out ./imported-topogram --from api --extractor @topogram/extractor-express-api");
|
|
54
57
|
}
|
|
55
58
|
|
|
59
|
+
/**
|
|
60
|
+
* @param {ReturnType<typeof scaffoldExtractorPack>} payload
|
|
61
|
+
* @returns {void}
|
|
62
|
+
*/
|
|
63
|
+
export function printExtractorScaffold(payload) {
|
|
64
|
+
console.log(payload.ok ? "Extractor scaffold created." : "Extractor scaffold failed.");
|
|
65
|
+
console.log(`Target: ${payload.target}`);
|
|
66
|
+
if (payload.packageName) console.log(`Package: ${payload.packageName}`);
|
|
67
|
+
if (payload.manifestId) console.log(`Manifest id: ${payload.manifestId}`);
|
|
68
|
+
if (payload.track) console.log(`Track: ${payload.track}`);
|
|
69
|
+
if (payload.files.length > 0) {
|
|
70
|
+
console.log("Files:");
|
|
71
|
+
for (const file of payload.files) {
|
|
72
|
+
console.log(`- ${file}`);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if (payload.nextCommands.length > 0) {
|
|
76
|
+
console.log("Next commands:");
|
|
77
|
+
for (const command of payload.nextCommands) {
|
|
78
|
+
console.log(`- ${command}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
for (const error of payload.errors || []) console.log(`Error: ${error}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
56
84
|
/**
|
|
57
85
|
* @param {string} cwd
|
|
58
86
|
* @returns {string[]}
|
|
@@ -587,6 +615,16 @@ export function runExtractorCommand(context) {
|
|
|
587
615
|
else printExtractorCheck(payload);
|
|
588
616
|
return payload.ok ? 0 : 1;
|
|
589
617
|
}
|
|
618
|
+
if (commandArgs.extractorCommand === "scaffold") {
|
|
619
|
+
const payload = scaffoldExtractorPack(inputPath || "", {
|
|
620
|
+
track: commandArgs.extractorScaffoldTrack,
|
|
621
|
+
packageName: commandArgs.extractorScaffoldPackage,
|
|
622
|
+
manifestId: commandArgs.extractorScaffoldId
|
|
623
|
+
});
|
|
624
|
+
if (json) console.log(stableStringify(payload));
|
|
625
|
+
else printExtractorScaffold(payload);
|
|
626
|
+
return payload.ok ? 0 : 1;
|
|
627
|
+
}
|
|
590
628
|
if (commandArgs.extractorCommand === "list") {
|
|
591
629
|
const payload = buildExtractorListPayload(cwd);
|
|
592
630
|
if (json) console.log(stableStringify(payload));
|
|
@@ -97,6 +97,51 @@ export function queryDefinitions() {
|
|
|
97
97
|
output: "single_agent_plan_query",
|
|
98
98
|
example: "topogram query single-agent-plan ./topo --mode modeling --widget widget_data_grid --json"
|
|
99
99
|
},
|
|
100
|
+
{
|
|
101
|
+
name: "extract-plan",
|
|
102
|
+
purpose: "Summarize brownfield extraction candidates, package extractor context, and adoption review state.",
|
|
103
|
+
description: "Return the extract/adopt plan for an extracted workspace, including trusted extraction provenance and next review commands.",
|
|
104
|
+
selectors: ["provider", "preset"],
|
|
105
|
+
args: ["[path]", "[--json]"],
|
|
106
|
+
output: "extract_plan_query",
|
|
107
|
+
example: "topogram query extract-plan ./extracted-topogram --json"
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
name: "multi-agent-plan",
|
|
111
|
+
purpose: "Split extract/adopt review into serialized and parallel agent lanes.",
|
|
112
|
+
description: "Return lane ownership, handoff packets, overlap rules, and package extractor context for extract/adopt mode.",
|
|
113
|
+
selectors: ["mode", "provider", "preset"],
|
|
114
|
+
args: ["[path]", "--mode extract-adopt", "[--json]"],
|
|
115
|
+
output: "multi_agent_plan",
|
|
116
|
+
example: "topogram query multi-agent-plan ./extracted-topogram --mode extract-adopt --json"
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
name: "work-packet",
|
|
120
|
+
purpose: "Give one extract/adopt lane its allowed inputs, write scope, blockers, and handoff packet.",
|
|
121
|
+
description: "Return a lane-scoped work packet for extract/adopt mode.",
|
|
122
|
+
selectors: ["mode", "lane"],
|
|
123
|
+
args: ["[path]", "--mode extract-adopt", "--lane <id>", "[--json]"],
|
|
124
|
+
output: "work_packet",
|
|
125
|
+
example: "topogram query work-packet ./extracted-topogram --mode extract-adopt --lane adoption_operator --json"
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
name: "lane-status",
|
|
129
|
+
purpose: "Show which extract/adopt lanes are ready, blocked, or complete.",
|
|
130
|
+
description: "Return artifact-derived lane status for extract/adopt mode.",
|
|
131
|
+
selectors: ["mode"],
|
|
132
|
+
args: ["[path]", "--mode extract-adopt", "[--json]"],
|
|
133
|
+
output: "lane_status_query",
|
|
134
|
+
example: "topogram query lane-status ./extracted-topogram --mode extract-adopt --json"
|
|
135
|
+
},
|
|
136
|
+
{
|
|
137
|
+
name: "handoff-status",
|
|
138
|
+
purpose: "Show extract/adopt handoff packet status across lanes.",
|
|
139
|
+
description: "Return handoff readiness and blockers for extract/adopt mode.",
|
|
140
|
+
selectors: ["mode"],
|
|
141
|
+
args: ["[path]", "--mode extract-adopt", "[--json]"],
|
|
142
|
+
output: "handoff_status_query",
|
|
143
|
+
example: "topogram query handoff-status ./extracted-topogram --mode extract-adopt --json"
|
|
144
|
+
},
|
|
100
145
|
{
|
|
101
146
|
name: "risk-summary",
|
|
102
147
|
purpose: "Surface behavioral, ownership, and verification risks for a selected change.",
|
|
@@ -16,6 +16,7 @@ import {
|
|
|
16
16
|
buildTaskMode,
|
|
17
17
|
normalizeTopogramPath,
|
|
18
18
|
readJson,
|
|
19
|
+
readExtractionContext,
|
|
19
20
|
requireReconcileArtifacts,
|
|
20
21
|
resultOk,
|
|
21
22
|
workflowPresetSelectors
|
|
@@ -50,6 +51,7 @@ export function buildImportPlanForContext(context, queryFamily) {
|
|
|
50
51
|
workspace: topogramRoot,
|
|
51
52
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, queryFamily)
|
|
52
53
|
});
|
|
54
|
+
const extractionContext = readExtractionContext(topogramRoot);
|
|
53
55
|
return {
|
|
54
56
|
ok: true,
|
|
55
57
|
topogramRoot,
|
|
@@ -60,7 +62,8 @@ export function buildImportPlanForContext(context, queryFamily) {
|
|
|
60
62
|
adoptionPlan,
|
|
61
63
|
taskModeResult.artifact,
|
|
62
64
|
maintainedBundleResult.artifact.maintained_boundary || null,
|
|
63
|
-
workflowPresets
|
|
65
|
+
workflowPresets,
|
|
66
|
+
extractionContext
|
|
64
67
|
)
|
|
65
68
|
};
|
|
66
69
|
}
|
|
@@ -85,17 +88,20 @@ export function buildImportAdoptAgentContext(context, queryFamily) {
|
|
|
85
88
|
workspace: topogramRoot,
|
|
86
89
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, queryFamily)
|
|
87
90
|
});
|
|
88
|
-
const
|
|
91
|
+
const extractionContext = readExtractionContext(topogramRoot);
|
|
92
|
+
const importPlan = buildImportPlanPayload(adoptionPlanArtifact, taskModeResult.artifact, null, workflowPresets, extractionContext);
|
|
89
93
|
const resolvedWorkflowContext = buildResolvedWorkflowContextPayload({
|
|
90
94
|
workspace: topogramRoot,
|
|
91
95
|
taskModeArtifact: taskModeResult.artifact,
|
|
92
96
|
importPlan,
|
|
97
|
+
extractionContext,
|
|
93
98
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, queryFamily)
|
|
94
99
|
});
|
|
95
100
|
const singleAgentPlan = buildSingleAgentPlanPayload({
|
|
96
101
|
workspace: topogramRoot,
|
|
97
102
|
taskModeArtifact: taskModeResult.artifact,
|
|
98
103
|
importPlan,
|
|
104
|
+
extractionContext,
|
|
99
105
|
resolvedWorkflowContext
|
|
100
106
|
});
|
|
101
107
|
const multiAgentPlan = buildMultiAgentPlanPayload({
|
|
@@ -104,6 +110,7 @@ export function buildImportAdoptAgentContext(context, queryFamily) {
|
|
|
104
110
|
importPlan,
|
|
105
111
|
report: reconcileReport,
|
|
106
112
|
adoptionStatus,
|
|
113
|
+
extractionContext,
|
|
107
114
|
resolvedWorkflowContext
|
|
108
115
|
});
|
|
109
116
|
return {
|
|
@@ -26,6 +26,7 @@ import {
|
|
|
26
26
|
normalizeTopogramPath,
|
|
27
27
|
printValidationFailure,
|
|
28
28
|
readJson,
|
|
29
|
+
readExtractionContext,
|
|
29
30
|
resultOk,
|
|
30
31
|
selectorOptions,
|
|
31
32
|
workflowPresetSelectors
|
|
@@ -83,7 +84,7 @@ export function runWorkflowQuery(context) {
|
|
|
83
84
|
workspace: topogramRoot,
|
|
84
85
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, "workflow-preset-activation")
|
|
85
86
|
});
|
|
86
|
-
importPlan = buildImportPlanPayload(readJson(adoptionPlanPath(topogramRoot)), taskModeResult.artifact, null, workflowPresets);
|
|
87
|
+
importPlan = buildImportPlanPayload(readJson(adoptionPlanPath(topogramRoot)), taskModeResult.artifact, null, workflowPresets, readExtractionContext(topogramRoot));
|
|
87
88
|
}
|
|
88
89
|
return printJson(buildWorkflowPresetActivationPayload({
|
|
89
90
|
workspace: topogramRoot,
|
|
@@ -134,24 +135,28 @@ function runSingleAgentPlan(context, selectors) {
|
|
|
134
135
|
});
|
|
135
136
|
const topogramRoot = normalizeTopogramPath(context.inputPath);
|
|
136
137
|
let importPlan = null;
|
|
138
|
+
let extractionContext = null;
|
|
137
139
|
if (context.modeId === "extract-adopt" && fs.existsSync(adoptionPlanPath(topogramRoot))) {
|
|
140
|
+
extractionContext = readExtractionContext(topogramRoot);
|
|
138
141
|
const workflowPresets = buildWorkflowPresetState({
|
|
139
142
|
workspace: topogramRoot,
|
|
140
143
|
selectors: workflowPresetSelectors(result.artifact, context.providerId, context.presetId, "single-agent-plan")
|
|
141
144
|
});
|
|
142
|
-
importPlan = buildImportPlanPayload(readJson(adoptionPlanPath(topogramRoot)), result.artifact, null, workflowPresets);
|
|
145
|
+
importPlan = buildImportPlanPayload(readJson(adoptionPlanPath(topogramRoot)), result.artifact, null, workflowPresets, extractionContext);
|
|
143
146
|
}
|
|
144
147
|
const resolvedWorkflowContext = buildResolvedWorkflowContextPayload({
|
|
145
148
|
workspace: topogramRoot,
|
|
146
149
|
taskModeArtifact: result.artifact,
|
|
147
150
|
generatorTargets,
|
|
148
151
|
selectors: workflowPresetSelectors(result.artifact, context.providerId, context.presetId, "single-agent-plan"),
|
|
152
|
+
extractionContext,
|
|
149
153
|
importPlan
|
|
150
154
|
});
|
|
151
155
|
return printJson(buildSingleAgentPlanPayload({
|
|
152
156
|
workspace: topogramRoot,
|
|
153
157
|
taskModeArtifact: result.artifact,
|
|
154
158
|
importPlan,
|
|
159
|
+
extractionContext,
|
|
155
160
|
resolvedWorkflowContext
|
|
156
161
|
}));
|
|
157
162
|
}
|
|
@@ -186,7 +191,9 @@ function runResolvedWorkflowContext(context, selectors) {
|
|
|
186
191
|
maintainedBoundaryArtifact: maintainedBundleResult?.artifact?.maintained_boundary || null
|
|
187
192
|
});
|
|
188
193
|
let importPlan = null;
|
|
194
|
+
let extractionContext = null;
|
|
189
195
|
if (context.modeId === "extract-adopt" && fs.existsSync(adoptionPlanPath(topogramRoot))) {
|
|
196
|
+
extractionContext = readExtractionContext(topogramRoot);
|
|
190
197
|
const workflowPresets = buildWorkflowPresetState({
|
|
191
198
|
workspace: topogramRoot,
|
|
192
199
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, "resolved-workflow-context")
|
|
@@ -195,7 +202,8 @@ function runResolvedWorkflowContext(context, selectors) {
|
|
|
195
202
|
readJson(adoptionPlanPath(topogramRoot)),
|
|
196
203
|
taskModeResult.artifact,
|
|
197
204
|
maintainedBundleResult?.artifact?.maintained_boundary || null,
|
|
198
|
-
workflowPresets
|
|
205
|
+
workflowPresets,
|
|
206
|
+
extractionContext
|
|
199
207
|
);
|
|
200
208
|
}
|
|
201
209
|
return printJson(buildResolvedWorkflowContextPayload({
|
|
@@ -205,6 +213,7 @@ function runResolvedWorkflowContext(context, selectors) {
|
|
|
205
213
|
reviewBoundary: sliceResult?.artifact?.review_boundary || null,
|
|
206
214
|
maintainedBoundary: maintainedBundleResult?.artifact?.maintained_boundary || null,
|
|
207
215
|
generatorTargets,
|
|
216
|
+
extractionContext,
|
|
208
217
|
selectors: workflowPresetSelectors(taskModeResult.artifact, context.providerId, context.presetId, "resolved-workflow-context")
|
|
209
218
|
}));
|
|
210
219
|
}
|
|
@@ -4,6 +4,7 @@ import fs from "node:fs";
|
|
|
4
4
|
import path from "node:path";
|
|
5
5
|
|
|
6
6
|
import { generateWorkspace } from "../../../generator.js";
|
|
7
|
+
import { TOPOGRAM_IMPORT_FILE } from "../../../import/provenance.js";
|
|
7
8
|
import { formatValidationErrors } from "../../../validator.js";
|
|
8
9
|
import { buildChangePlanPayload } from "../../../agent-ops/query-builders.js";
|
|
9
10
|
import { resolveTopoRoot } from "../../../workspace-paths.js";
|
|
@@ -196,6 +197,73 @@ export function readJson(filePath) {
|
|
|
196
197
|
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
|
197
198
|
}
|
|
198
199
|
|
|
200
|
+
/**
|
|
201
|
+
* @param {AnyRecord} record
|
|
202
|
+
* @param {string} provenancePath
|
|
203
|
+
* @returns {AnyRecord}
|
|
204
|
+
*/
|
|
205
|
+
export function buildExtractionContext(record, provenancePath) {
|
|
206
|
+
const extractorPackages = /** @type {AnyRecord[]} */ (Array.isArray(record.extract?.extractorPackages)
|
|
207
|
+
? record.extract.extractorPackages
|
|
208
|
+
: []);
|
|
209
|
+
const packageBackedExtractors = extractorPackages
|
|
210
|
+
.filter((entry) => entry?.source === "package")
|
|
211
|
+
.map((entry) => ({
|
|
212
|
+
id: entry.id || null,
|
|
213
|
+
version: entry.version || null,
|
|
214
|
+
packageName: entry.packageName || null,
|
|
215
|
+
extractors: Array.isArray(entry.extractors) ? entry.extractors : [],
|
|
216
|
+
manifestPath: entry.manifestPath || null
|
|
217
|
+
}));
|
|
218
|
+
const bundledExtractors = extractorPackages
|
|
219
|
+
.filter((entry) => entry?.source === "bundled")
|
|
220
|
+
.map((entry) => ({
|
|
221
|
+
id: entry.id || null,
|
|
222
|
+
version: entry.version || null,
|
|
223
|
+
extractors: Array.isArray(entry.extractors) ? entry.extractors : []
|
|
224
|
+
}));
|
|
225
|
+
return {
|
|
226
|
+
type: "extraction_context",
|
|
227
|
+
provenance_path: provenancePath,
|
|
228
|
+
kind: record.kind || null,
|
|
229
|
+
extracted_at: record.extractedAt || null,
|
|
230
|
+
refreshed_at: record.refreshedAt || null,
|
|
231
|
+
source_path: record.source?.path || null,
|
|
232
|
+
tracks: Array.isArray(record.extract?.tracks) ? record.extract.tracks : [],
|
|
233
|
+
findings_count: record.extract?.findingsCount || 0,
|
|
234
|
+
candidate_counts: record.extract?.candidateCounts || {},
|
|
235
|
+
package_backed_extractors: packageBackedExtractors,
|
|
236
|
+
bundled_extractors: bundledExtractors,
|
|
237
|
+
summary: {
|
|
238
|
+
package_backed_extractor_count: packageBackedExtractors.length,
|
|
239
|
+
bundled_extractor_count: bundledExtractors.length,
|
|
240
|
+
source_file_count: Array.isArray(record.files) ? record.files.length : 0
|
|
241
|
+
},
|
|
242
|
+
next_commands: [
|
|
243
|
+
"topogram extract check",
|
|
244
|
+
"topogram extract plan",
|
|
245
|
+
"topogram adopt --list",
|
|
246
|
+
"topogram adopt <selector> --dry-run"
|
|
247
|
+
],
|
|
248
|
+
safety_notes: [
|
|
249
|
+
"Extractor packages are evidence producers only; review candidates before canonical adoption.",
|
|
250
|
+
"Use dry-run adoption before --write, especially when package-backed extractors contributed candidates."
|
|
251
|
+
]
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/**
|
|
256
|
+
* @param {string} topogramRoot
|
|
257
|
+
* @returns {AnyRecord|null}
|
|
258
|
+
*/
|
|
259
|
+
export function readExtractionContext(topogramRoot) {
|
|
260
|
+
const provenancePath = path.join(path.dirname(topogramRoot), TOPOGRAM_IMPORT_FILE);
|
|
261
|
+
if (!fs.existsSync(provenancePath)) {
|
|
262
|
+
return null;
|
|
263
|
+
}
|
|
264
|
+
return buildExtractionContext(readJson(provenancePath), provenancePath);
|
|
265
|
+
}
|
|
266
|
+
|
|
199
267
|
/**
|
|
200
268
|
* @param {AnyRecord} options
|
|
201
269
|
* @returns {boolean}
|
package/src/cli/help.js
CHANGED
|
@@ -71,6 +71,7 @@ export function printUsage(options = {}) {
|
|
|
71
71
|
console.log(" or: topogram extractor list [--json]");
|
|
72
72
|
console.log(" or: topogram extractor show <id-or-package> [--json]");
|
|
73
73
|
console.log(" or: topogram extractor check <path-or-package> [--json]");
|
|
74
|
+
console.log(" or: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>] [--json]");
|
|
74
75
|
console.log(" or: topogram extractor policy init [path] [--json]");
|
|
75
76
|
console.log(" or: topogram extractor policy status [path] [--json]");
|
|
76
77
|
console.log(" or: topogram extractor policy check [path] [--json]");
|
|
@@ -111,6 +112,7 @@ export function printUsage(options = {}) {
|
|
|
111
112
|
console.log(" topogram extractor list");
|
|
112
113
|
console.log(" topogram extractor show @topogram/extractor-prisma-db");
|
|
113
114
|
console.log(" topogram extractor check ./extractor-package");
|
|
115
|
+
console.log(" topogram extractor scaffold ./topogram-extractor-node-cli --track cli --package @scope/topogram-extractor-node-cli");
|
|
114
116
|
console.log(" topogram extractor policy check");
|
|
115
117
|
console.log(" topogram extract ./express-api --out ./extracted-topogram --from api --extractor @topogram/extractor-express-api");
|
|
116
118
|
console.log(" topogram generate");
|
|
@@ -0,0 +1,482 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
|
|
6
|
+
import { stableStringify } from "../format.js";
|
|
7
|
+
import { EXTRACTOR_TRACKS } from "./registry.js";
|
|
8
|
+
|
|
9
|
+
const DEFAULT_TRACK = "cli";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @typedef {Object} ExtractorScaffoldOptions
|
|
13
|
+
* @property {string|null|undefined} [packageName]
|
|
14
|
+
* @property {string|null|undefined} [manifestId]
|
|
15
|
+
* @property {string|null|undefined} [track]
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @param {string} value
|
|
20
|
+
* @returns {string}
|
|
21
|
+
*/
|
|
22
|
+
function slugify(value) {
|
|
23
|
+
return String(value || "extractor")
|
|
24
|
+
.trim()
|
|
25
|
+
.toLowerCase()
|
|
26
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
27
|
+
.replace(/^-+|-+$/g, "") || "extractor";
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {string} target
|
|
32
|
+
* @returns {string}
|
|
33
|
+
*/
|
|
34
|
+
function defaultPackageName(target) {
|
|
35
|
+
const basename = slugify(path.basename(path.resolve(target)));
|
|
36
|
+
return basename.startsWith("topogram-extractor-") ? basename : `topogram-extractor-${basename}`;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* @param {string} value
|
|
41
|
+
* @returns {boolean}
|
|
42
|
+
*/
|
|
43
|
+
function isValidPackageName(value) {
|
|
44
|
+
return /^(?:@[a-z0-9][a-z0-9._-]*\/)?[a-z0-9][a-z0-9._-]*$/.test(value);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @returns {string}
|
|
49
|
+
*/
|
|
50
|
+
function currentCliVersion() {
|
|
51
|
+
try {
|
|
52
|
+
const packageJson = JSON.parse(fs.readFileSync(new URL("../../package.json", import.meta.url), "utf8"));
|
|
53
|
+
return typeof packageJson.version === "string" && packageJson.version ? packageJson.version : "latest";
|
|
54
|
+
} catch {
|
|
55
|
+
return "latest";
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* @param {string} packageName
|
|
61
|
+
* @param {string} track
|
|
62
|
+
* @returns {string}
|
|
63
|
+
*/
|
|
64
|
+
function extractorId(packageName, track) {
|
|
65
|
+
const bareName = packageName.split("/").pop() || packageName;
|
|
66
|
+
return `${track}.${slugify(bareName.replace(/^topogram-extractor-/, "").replace(/^extractor-/, ""))}`;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* @param {string} track
|
|
71
|
+
* @returns {{ stack: Record<string, string>, capabilities: Record<string, boolean>, candidateKinds: string[], fixtureFiles: Record<string, string> }}
|
|
72
|
+
*/
|
|
73
|
+
function trackDefaults(track) {
|
|
74
|
+
if (track === "db") {
|
|
75
|
+
return {
|
|
76
|
+
stack: { domain: "database", framework: "scaffold" },
|
|
77
|
+
capabilities: { schema: true },
|
|
78
|
+
candidateKinds: ["entity"],
|
|
79
|
+
fixtureFiles: {
|
|
80
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-db", private: true })}\n`,
|
|
81
|
+
"src/schema.sql": "create table scaffold_records (id text primary key, name text not null);\n"
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
if (track === "api") {
|
|
86
|
+
return {
|
|
87
|
+
stack: { runtime: "node", framework: "scaffold" },
|
|
88
|
+
capabilities: { routes: true },
|
|
89
|
+
candidateKinds: ["capability", "route", "stack"],
|
|
90
|
+
fixtureFiles: {
|
|
91
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-api", private: true })}\n`,
|
|
92
|
+
"src/server.js": "app.get('/scaffold-records', listScaffoldRecords);\n"
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
if (track === "ui") {
|
|
97
|
+
return {
|
|
98
|
+
stack: { framework: "scaffold-ui" },
|
|
99
|
+
capabilities: { screens: true, flows: true },
|
|
100
|
+
candidateKinds: ["screen", "route", "flow"],
|
|
101
|
+
fixtureFiles: {
|
|
102
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-ui", private: true })}\n`,
|
|
103
|
+
"src/routes/scaffold-records.jsx": "export default function ScaffoldRecords() { return <main>Scaffold records</main>; }\n"
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
if (track === "workflows") {
|
|
108
|
+
return {
|
|
109
|
+
stack: { domain: "workflow" },
|
|
110
|
+
capabilities: { workflows: true },
|
|
111
|
+
candidateKinds: ["workflow"],
|
|
112
|
+
fixtureFiles: {
|
|
113
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-workflows", private: true })}\n`,
|
|
114
|
+
"docs/workflows.md": "# Scaffold workflow\n\n- draft\n- review\n- complete\n"
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
if (track === "verification") {
|
|
119
|
+
return {
|
|
120
|
+
stack: { domain: "verification" },
|
|
121
|
+
capabilities: { verifications: true },
|
|
122
|
+
candidateKinds: ["verification"],
|
|
123
|
+
fixtureFiles: {
|
|
124
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-verification", private: true, scripts: { test: "node test.js" } })}\n`,
|
|
125
|
+
"test.js": "console.log('scaffold verification fixture');\n"
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
return {
|
|
130
|
+
stack: { runtime: "node", framework: "generic-cli" },
|
|
131
|
+
capabilities: { commands: true, options: true, effects: true },
|
|
132
|
+
candidateKinds: ["command", "capability", "cli_surface"],
|
|
133
|
+
fixtureFiles: {
|
|
134
|
+
"package.json": `${stableStringify({ name: "topogram-extractor-fixture-cli", private: true, bin: { scaffold: "./bin/scaffold.js" } })}\n`,
|
|
135
|
+
"bin/scaffold.js": "#!/usr/bin/env node\nconsole.log('Usage: scaffold check --json');\n"
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* @param {string} track
|
|
142
|
+
* @returns {string}
|
|
143
|
+
*/
|
|
144
|
+
function candidateSourceForTrack(track) {
|
|
145
|
+
if (track === "db") {
|
|
146
|
+
return `{
|
|
147
|
+
entities: [{
|
|
148
|
+
id_hint: "entity_scaffold_record",
|
|
149
|
+
label: "Scaffold Record",
|
|
150
|
+
confidence: "low",
|
|
151
|
+
provenance: ["package-extractor-scaffold"],
|
|
152
|
+
fields: [{ name: "id", field_type: "string", required: true }]
|
|
153
|
+
}]
|
|
154
|
+
}`;
|
|
155
|
+
}
|
|
156
|
+
if (track === "api") {
|
|
157
|
+
return `{
|
|
158
|
+
capabilities: [{
|
|
159
|
+
id_hint: "cap_list_scaffold_records",
|
|
160
|
+
label: "List scaffold records",
|
|
161
|
+
confidence: "low",
|
|
162
|
+
provenance: ["package-extractor-scaffold"]
|
|
163
|
+
}],
|
|
164
|
+
routes: [{
|
|
165
|
+
method: "GET",
|
|
166
|
+
path: "/scaffold-records",
|
|
167
|
+
capability_hint: "cap_list_scaffold_records",
|
|
168
|
+
confidence: "low",
|
|
169
|
+
provenance: ["package-extractor-scaffold"]
|
|
170
|
+
}],
|
|
171
|
+
stacks: ["scaffold-api"]
|
|
172
|
+
}`;
|
|
173
|
+
}
|
|
174
|
+
if (track === "ui") {
|
|
175
|
+
return `{
|
|
176
|
+
screens: [{
|
|
177
|
+
id_hint: "screen_scaffold_records",
|
|
178
|
+
label: "Scaffold Records",
|
|
179
|
+
screen_kind: "list",
|
|
180
|
+
route_path: "/scaffold-records",
|
|
181
|
+
confidence: "low",
|
|
182
|
+
provenance: ["package-extractor-scaffold"]
|
|
183
|
+
}],
|
|
184
|
+
routes: [{
|
|
185
|
+
id_hint: "route_scaffold_records",
|
|
186
|
+
path: "/scaffold-records",
|
|
187
|
+
screen_id: "screen_scaffold_records",
|
|
188
|
+
confidence: "low",
|
|
189
|
+
provenance: ["package-extractor-scaffold"]
|
|
190
|
+
}],
|
|
191
|
+
flows: [],
|
|
192
|
+
stacks: ["scaffold-ui"]
|
|
193
|
+
}`;
|
|
194
|
+
}
|
|
195
|
+
if (track === "workflows") {
|
|
196
|
+
return `{
|
|
197
|
+
workflows: [{
|
|
198
|
+
id_hint: "workflow_scaffold_review",
|
|
199
|
+
label: "Scaffold Review",
|
|
200
|
+
confidence: "low",
|
|
201
|
+
provenance: ["package-extractor-scaffold"]
|
|
202
|
+
}]
|
|
203
|
+
}`;
|
|
204
|
+
}
|
|
205
|
+
if (track === "verification") {
|
|
206
|
+
return `{
|
|
207
|
+
verifications: [{
|
|
208
|
+
id_hint: "verification_scaffold_check",
|
|
209
|
+
label: "Scaffold Check",
|
|
210
|
+
confidence: "low",
|
|
211
|
+
provenance: ["package-extractor-scaffold"]
|
|
212
|
+
}],
|
|
213
|
+
frameworks: ["scaffold"],
|
|
214
|
+
scripts: [{
|
|
215
|
+
id_hint: "script_scaffold_check",
|
|
216
|
+
command: "npm test",
|
|
217
|
+
confidence: "low",
|
|
218
|
+
provenance: ["package-extractor-scaffold"]
|
|
219
|
+
}]
|
|
220
|
+
}`;
|
|
221
|
+
}
|
|
222
|
+
return `{
|
|
223
|
+
commands: [{
|
|
224
|
+
command_id: "scaffold_check",
|
|
225
|
+
label: "Scaffold Check",
|
|
226
|
+
usage: "scaffold check --json",
|
|
227
|
+
provenance: ["package-extractor-scaffold"]
|
|
228
|
+
}],
|
|
229
|
+
capabilities: [{
|
|
230
|
+
id_hint: "cap_scaffold_check",
|
|
231
|
+
label: "Run scaffold check",
|
|
232
|
+
command_id: "scaffold_check",
|
|
233
|
+
provenance: ["package-extractor-scaffold"]
|
|
234
|
+
}],
|
|
235
|
+
surfaces: [{
|
|
236
|
+
id_hint: "proj_cli_surface",
|
|
237
|
+
commands: ["scaffold_check"],
|
|
238
|
+
options: [{ command_id: "scaffold_check", name: "json", flag: "--json", type: "boolean", required: false }],
|
|
239
|
+
effects: [{ command_id: "scaffold_check", effect: "read_only" }],
|
|
240
|
+
provenance: ["package-extractor-scaffold"]
|
|
241
|
+
}]
|
|
242
|
+
}`;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* @param {string} track
|
|
247
|
+
* @param {string} extractor
|
|
248
|
+
* @returns {string}
|
|
249
|
+
*/
|
|
250
|
+
function adapterSource(track, extractor) {
|
|
251
|
+
return `const manifest = require("./topogram-extractor.json");
|
|
252
|
+
|
|
253
|
+
exports.manifest = manifest;
|
|
254
|
+
exports.extractors = [{
|
|
255
|
+
id: ${JSON.stringify(extractor)},
|
|
256
|
+
track: ${JSON.stringify(track)},
|
|
257
|
+
detect(context) {
|
|
258
|
+
return { score: 1, reasons: ["Scaffold extractor runs against the included fixture."] };
|
|
259
|
+
},
|
|
260
|
+
extract(context) {
|
|
261
|
+
return {
|
|
262
|
+
findings: [{
|
|
263
|
+
kind: "scaffold_finding",
|
|
264
|
+
message: "Replace this scaffold extractor with precise framework evidence.",
|
|
265
|
+
evidence: ["fixtures/basic-source"]
|
|
266
|
+
}],
|
|
267
|
+
candidates: ${candidateSourceForTrack(track)},
|
|
268
|
+
diagnostics: []
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
}];
|
|
272
|
+
`;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* @param {string} packageName
|
|
277
|
+
* @param {string} track
|
|
278
|
+
* @returns {string}
|
|
279
|
+
*/
|
|
280
|
+
function checkScriptSource(packageName, track) {
|
|
281
|
+
return `import childProcess from "node:child_process";
|
|
282
|
+
import fs from "node:fs";
|
|
283
|
+
import os from "node:os";
|
|
284
|
+
import path from "node:path";
|
|
285
|
+
|
|
286
|
+
const packageJson = JSON.parse(fs.readFileSync(new URL("../package.json", import.meta.url), "utf8"));
|
|
287
|
+
const packageName = packageJson.name || ${JSON.stringify(packageName)};
|
|
288
|
+
const track = ${JSON.stringify(track)};
|
|
289
|
+
const topogramBin = process.env.TOPOGRAM_BIN || "topogram";
|
|
290
|
+
const root = process.cwd();
|
|
291
|
+
|
|
292
|
+
function run(args, options = {}) {
|
|
293
|
+
const result = childProcess.spawnSync(topogramBin, args, {
|
|
294
|
+
cwd: options.cwd || root,
|
|
295
|
+
encoding: "utf8",
|
|
296
|
+
env: { ...process.env, FORCE_COLOR: "0" },
|
|
297
|
+
maxBuffer: 1024 * 1024 * 10
|
|
298
|
+
});
|
|
299
|
+
if (result.status !== 0) {
|
|
300
|
+
process.stderr.write(result.stdout || "");
|
|
301
|
+
process.stderr.write(result.stderr || "");
|
|
302
|
+
throw new Error(\`Command failed: \${topogramBin} \${args.join(" ")}\`);
|
|
303
|
+
}
|
|
304
|
+
return result.stdout;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
function snapshotFixture() {
|
|
308
|
+
const fixtureRoot = path.join(root, "fixtures", "basic-source");
|
|
309
|
+
const files = [];
|
|
310
|
+
function visit(dir) {
|
|
311
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
312
|
+
const absolute = path.join(dir, entry.name);
|
|
313
|
+
if (entry.isDirectory()) visit(absolute);
|
|
314
|
+
else if (entry.isFile()) files.push([path.relative(fixtureRoot, absolute), fs.readFileSync(absolute, "utf8")]);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
visit(fixtureRoot);
|
|
318
|
+
return JSON.stringify(files.sort());
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
run(["extractor", "check", "."]);
|
|
322
|
+
|
|
323
|
+
const before = snapshotFixture();
|
|
324
|
+
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "topogram-extractor-smoke."));
|
|
325
|
+
const policyPath = path.join(tmp, "topogram.extractor-policy.json");
|
|
326
|
+
fs.writeFileSync(policyPath, JSON.stringify({
|
|
327
|
+
version: "0.1",
|
|
328
|
+
allowedPackageScopes: [],
|
|
329
|
+
allowedPackages: [packageName],
|
|
330
|
+
pinnedVersions: { [packageName]: "1" },
|
|
331
|
+
enabledPackages: []
|
|
332
|
+
}, null, 2) + "\\n", "utf8");
|
|
333
|
+
|
|
334
|
+
const extracted = path.join(tmp, "extracted");
|
|
335
|
+
run([
|
|
336
|
+
"extract",
|
|
337
|
+
path.join(root, "fixtures", "basic-source"),
|
|
338
|
+
"--out",
|
|
339
|
+
extracted,
|
|
340
|
+
"--from",
|
|
341
|
+
track,
|
|
342
|
+
"--extractor",
|
|
343
|
+
".",
|
|
344
|
+
"--extractor-policy",
|
|
345
|
+
policyPath,
|
|
346
|
+
"--json"
|
|
347
|
+
]);
|
|
348
|
+
run(["extract", "plan", extracted, "--json"]);
|
|
349
|
+
run(["query", "extract-plan", path.join(extracted, "topo"), "--json"]);
|
|
350
|
+
run(["adopt", "--list", extracted, "--json"]);
|
|
351
|
+
|
|
352
|
+
const after = snapshotFixture();
|
|
353
|
+
if (after !== before) {
|
|
354
|
+
throw new Error("Extractor smoke mutated fixture source files.");
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
console.log(\`Extractor package smoke passed for \${packageName}.\`);
|
|
358
|
+
`;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
/**
|
|
362
|
+
* @param {Record<string, string>} files
|
|
363
|
+
* @returns {string[]}
|
|
364
|
+
*/
|
|
365
|
+
function writeFiles(files) {
|
|
366
|
+
const written = [];
|
|
367
|
+
for (const [filePath, contents] of Object.entries(files)) {
|
|
368
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
369
|
+
fs.writeFileSync(filePath, contents, "utf8");
|
|
370
|
+
written.push(filePath);
|
|
371
|
+
}
|
|
372
|
+
return written.sort();
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
/**
|
|
376
|
+
* @param {string} target
|
|
377
|
+
* @param {ExtractorScaffoldOptions} [options]
|
|
378
|
+
* @returns {{ ok: boolean, target: string, packageName: string|null, manifestId: string|null, track: string|null, files: string[], nextCommands: string[], errors: string[] }}
|
|
379
|
+
*/
|
|
380
|
+
export function scaffoldExtractorPack(target, options = {}) {
|
|
381
|
+
const absoluteTarget = path.resolve(target || "");
|
|
382
|
+
if (!target || target.startsWith("-")) {
|
|
383
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track: null, files: [], nextCommands: [], errors: ["Usage: topogram extractor scaffold <target> [--track <track>] [--package <name>] [--id <manifest-id>]"] };
|
|
384
|
+
}
|
|
385
|
+
if (fs.existsSync(absoluteTarget) && fs.readdirSync(absoluteTarget).length > 0) {
|
|
386
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track: null, files: [], nextCommands: [], errors: [`Extractor scaffold target '${absoluteTarget}' already exists and is not empty.`] };
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
const track = options.track || DEFAULT_TRACK;
|
|
390
|
+
if (!EXTRACTOR_TRACKS.includes(track)) {
|
|
391
|
+
return { ok: false, target: absoluteTarget, packageName: null, manifestId: null, track, files: [], nextCommands: [], errors: [`Extractor track '${track}' is not supported. Expected one of: ${EXTRACTOR_TRACKS.join(", ")}.`] };
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
const packageName = options.packageName || defaultPackageName(absoluteTarget);
|
|
395
|
+
if (!isValidPackageName(packageName)) {
|
|
396
|
+
return { ok: false, target: absoluteTarget, packageName, manifestId: null, track, files: [], nextCommands: [], errors: [`Extractor package name '${packageName}' is invalid. Use a lowercase npm package name such as @scope/topogram-extractor-example.`] };
|
|
397
|
+
}
|
|
398
|
+
const manifestId = options.manifestId || packageName;
|
|
399
|
+
const extractor = extractorId(packageName, track);
|
|
400
|
+
const defaults = trackDefaults(track);
|
|
401
|
+
const manifest = {
|
|
402
|
+
id: manifestId,
|
|
403
|
+
version: "1",
|
|
404
|
+
tracks: [track],
|
|
405
|
+
source: "package",
|
|
406
|
+
package: packageName,
|
|
407
|
+
stack: defaults.stack,
|
|
408
|
+
capabilities: defaults.capabilities,
|
|
409
|
+
candidateKinds: defaults.candidateKinds,
|
|
410
|
+
evidenceTypes: ["runtime_source"],
|
|
411
|
+
extractors: [extractor]
|
|
412
|
+
};
|
|
413
|
+
const packageJson = {
|
|
414
|
+
name: packageName,
|
|
415
|
+
version: "0.1.0",
|
|
416
|
+
private: true,
|
|
417
|
+
description: "Topogram extractor pack scaffold.",
|
|
418
|
+
main: "index.cjs",
|
|
419
|
+
files: [
|
|
420
|
+
"index.cjs",
|
|
421
|
+
"topogram-extractor.json",
|
|
422
|
+
"README.md",
|
|
423
|
+
"scripts",
|
|
424
|
+
"fixtures"
|
|
425
|
+
],
|
|
426
|
+
scripts: {
|
|
427
|
+
check: "node ./scripts/check-extractor.mjs"
|
|
428
|
+
},
|
|
429
|
+
devDependencies: {
|
|
430
|
+
"@topogram/cli": `^${currentCliVersion()}`
|
|
431
|
+
}
|
|
432
|
+
};
|
|
433
|
+
const files = {
|
|
434
|
+
[path.join(absoluteTarget, "package.json")]: `${stableStringify(packageJson)}\n`,
|
|
435
|
+
[path.join(absoluteTarget, "topogram-extractor.json")]: `${stableStringify(manifest)}\n`,
|
|
436
|
+
[path.join(absoluteTarget, "index.cjs")]: adapterSource(track, extractor),
|
|
437
|
+
[path.join(absoluteTarget, "scripts", "check-extractor.mjs")]: checkScriptSource(packageName, track),
|
|
438
|
+
[path.join(absoluteTarget, "README.md")]: `# ${packageName}
|
|
439
|
+
|
|
440
|
+
This is a Topogram extractor pack scaffold for the \`${track}\` track.
|
|
441
|
+
|
|
442
|
+
## Author Loop
|
|
443
|
+
|
|
444
|
+
\`\`\`bash
|
|
445
|
+
npm install
|
|
446
|
+
npm run check
|
|
447
|
+
\`\`\`
|
|
448
|
+
|
|
449
|
+
\`npm run check\` runs:
|
|
450
|
+
|
|
451
|
+
- \`topogram extractor check .\`
|
|
452
|
+
- \`topogram extract ./fixtures/basic-source --out <tmp> --from ${track} --extractor .\`
|
|
453
|
+
- \`topogram extract plan <tmp>\`
|
|
454
|
+
- \`topogram query extract-plan <tmp>/topo\`
|
|
455
|
+
- \`topogram adopt --list <tmp>\`
|
|
456
|
+
|
|
457
|
+
Replace the scaffold adapter in \`index.cjs\` with precise, read-only source evidence.
|
|
458
|
+
Extractor packages must not mutate source files, write canonical \`topo/**\`, install
|
|
459
|
+
packages, perform network access, or define adoption semantics.
|
|
460
|
+
`
|
|
461
|
+
};
|
|
462
|
+
for (const [relative, contents] of Object.entries(defaults.fixtureFiles)) {
|
|
463
|
+
files[path.join(absoluteTarget, "fixtures", "basic-source", relative)] = contents;
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
const written = writeFiles(files);
|
|
467
|
+
return {
|
|
468
|
+
ok: true,
|
|
469
|
+
target: absoluteTarget,
|
|
470
|
+
packageName,
|
|
471
|
+
manifestId,
|
|
472
|
+
track,
|
|
473
|
+
files: written.map((filePath) => path.relative(absoluteTarget, filePath).replace(/\\/g, "/")),
|
|
474
|
+
nextCommands: [
|
|
475
|
+
`cd ${absoluteTarget}`,
|
|
476
|
+
"npm install",
|
|
477
|
+
"npm run check",
|
|
478
|
+
"topogram extractor check ."
|
|
479
|
+
],
|
|
480
|
+
errors: []
|
|
481
|
+
};
|
|
482
|
+
}
|