@topogram/cli 0.3.84 → 0.3.86

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@topogram/cli",
3
- "version": "0.3.84",
3
+ "version": "0.3.86",
4
4
  "description": "Topogram CLI for checking Topogram workspaces and generating app bundles.",
5
5
  "license": "Apache-2.0",
6
6
  "repository": {
@@ -44,6 +44,7 @@ export function printExtractorHelp() {
44
44
  console.log(" - extractor packages execute only during `topogram extract` or `topogram extractor check`.");
45
45
  console.log(" - extractor packages emit review-only candidates; core owns persistence, reconcile, and adoption.");
46
46
  console.log(` - package-backed extractors are governed by ${EXTRACTOR_POLICY_FILE}; bundled topogram/* extractors are allowed.`);
47
+ console.log(" - safe loop: list/show -> install -> policy pin -> check -> extract -> plan/list -> adopt --dry-run -> adopt --write.");
47
48
  console.log("");
48
49
  console.log("Examples:");
49
50
  console.log(" topogram extractor list");
@@ -121,17 +122,110 @@ function extractorPolicyPinCommand(packageName, version) {
121
122
  }
122
123
 
123
124
  /**
124
- * @param {string|null|undefined} packageName
125
+ * @param {string|null|undefined} extractorRef
125
126
  * @param {string[]} tracks
126
127
  * @param {string|null|undefined} exampleSource
127
128
  * @returns {string|null}
128
129
  */
129
- function extractorRunCommand(packageName, tracks, exampleSource) {
130
- if (!packageName) {
130
+ function extractorRunCommand(extractorRef, tracks, exampleSource) {
131
+ if (!extractorRef) {
131
132
  return null;
132
133
  }
133
134
  const trackList = tracks.length > 0 ? tracks.join(",") : "db,api,ui,cli";
134
- return `topogram extract ${exampleSource || "./existing-app"} --out ./imported-topogram --from ${trackList} --extractor ${packageName}`;
135
+ return `topogram extract ${exampleSource || "./existing-app"} --out ./imported-topogram --from ${trackList} --extractor ${extractorRef}`;
136
+ }
137
+
138
+ /**
139
+ * @param {Record<string, any>|null|undefined} extractor
140
+ * @returns {Record<string, any>}
141
+ */
142
+ function buildExtractorReviewWorkflow(extractor = null) {
143
+ const packageName = extractor?.package || extractor?.packageName || null;
144
+ const extractorRef = packageName || extractor?.id || "<extractor>";
145
+ const tracks = Array.isArray(extractor?.tracks) ? extractor.tracks : [];
146
+ const version = extractor?.version || "1";
147
+ const bundledExtractor = extractor?.source === "bundled" && !packageName;
148
+ const installCommand = extractor?.installCommand || (packageName ? packageExtractorInstallCommand(packageName) : null);
149
+ const policyPinCommand = extractor?.policyPinCommand || (packageName ? extractorPolicyPinCommand(packageName, version) : null);
150
+ const extractCommand = extractor?.extractCommand || extractorRunCommand(extractorRef, tracks, extractor?.exampleSource);
151
+ return {
152
+ type: "extractor_review_workflow",
153
+ packageCodeExecution: {
154
+ list: false,
155
+ show: false,
156
+ policy: false,
157
+ check: true,
158
+ extract: true
159
+ },
160
+ steps: [
161
+ {
162
+ id: "discover",
163
+ command: "topogram extractor list",
164
+ packageCodeExecution: false,
165
+ purpose: "Find bundled and first-party package-backed extractors by track."
166
+ },
167
+ {
168
+ id: "inspect",
169
+ command: `topogram extractor show ${extractorRef}`,
170
+ packageCodeExecution: false,
171
+ purpose: "Read manifest purpose, tracks, install command, policy pin command, and extract command."
172
+ },
173
+ ...(installCommand ? [{
174
+ id: "install",
175
+ command: installCommand,
176
+ packageCodeExecution: false,
177
+ purpose: "Install the extractor package explicitly; Topogram does not install it during extraction."
178
+ }] : []),
179
+ ...(policyPinCommand ? [{
180
+ id: "pin_policy",
181
+ command: policyPinCommand,
182
+ packageCodeExecution: false,
183
+ purpose: "Allow and pin the extractor manifest version before execution."
184
+ }] : []),
185
+ ...(!bundledExtractor ? [{
186
+ id: "check",
187
+ command: `topogram extractor check ${extractorRef}`,
188
+ packageCodeExecution: true,
189
+ purpose: "Load the adapter and run a minimal smoke extraction against a synthetic fixture."
190
+ }] : []),
191
+ ...(extractCommand ? [{
192
+ id: "extract",
193
+ command: extractCommand,
194
+ packageCodeExecution: true,
195
+ purpose: "Read brownfield source and write review-only candidates plus extraction provenance."
196
+ }] : []),
197
+ {
198
+ id: "review_plan",
199
+ command: "topogram extract plan ./imported-topogram",
200
+ packageCodeExecution: false,
201
+ purpose: "Review bundles, extractor provenance, candidate counts, and safety notes."
202
+ },
203
+ {
204
+ id: "list_selectors",
205
+ command: "topogram adopt --list ./imported-topogram",
206
+ packageCodeExecution: false,
207
+ purpose: "Choose an explicit adoption selector."
208
+ },
209
+ {
210
+ id: "dry_run_adoption",
211
+ command: "topogram adopt <selector> ./imported-topogram --dry-run",
212
+ packageCodeExecution: false,
213
+ purpose: "Preview canonical topo writes before changing project-owned records."
214
+ },
215
+ {
216
+ id: "write_reviewed_adoption",
217
+ command: "topogram adopt <selector> ./imported-topogram --write",
218
+ packageCodeExecution: false,
219
+ purpose: "Write only reviewed canonical records; extractor packages never own adoption semantics."
220
+ }
221
+ ],
222
+ safetyNotes: [
223
+ "topogram extractor list/show/policy do not load package adapter code.",
224
+ "topogram extractor check and topogram extract load package adapter code.",
225
+ "Extractor packages emit review-only candidates; core owns persistence, reconcile, adoption, and canonical topo writes.",
226
+ "Run dry-run adoption before --write."
227
+ ]
228
+ };
135
229
  }
136
230
 
137
231
  /**
@@ -188,8 +282,8 @@ function extractorManifestSummary(manifest, metadata = {}) {
188
282
  const version = manifest.version || firstParty?.version || "1";
189
283
  const installCommand = packageName ? packageExtractorInstallCommand(packageName) : null;
190
284
  const policyPinCommand = extractorPolicyPinCommand(packageName, version);
191
- const extractCommand = extractorRunCommand(packageName, tracks, firstParty?.exampleSource);
192
- return {
285
+ const extractCommand = extractorRunCommand(packageName || manifest.id, tracks, firstParty?.exampleSource);
286
+ const summary = {
193
287
  id: manifest.id,
194
288
  version,
195
289
  label: firstParty?.label || null,
@@ -215,6 +309,10 @@ function extractorManifestSummary(manifest, metadata = {}) {
215
309
  packageRoot: metadata.packageRoot || null,
216
310
  errors: metadata.errors || []
217
311
  };
312
+ return {
313
+ ...summary,
314
+ reviewWorkflow: buildExtractorReviewWorkflow(summary)
315
+ };
218
316
  }
219
317
 
220
318
  /**
@@ -222,7 +320,7 @@ function extractorManifestSummary(manifest, metadata = {}) {
222
320
  * @returns {Record<string, any>}
223
321
  */
224
322
  function firstPartyExtractorPlaceholder(info) {
225
- return {
323
+ const summary = {
226
324
  id: info.id,
227
325
  version: info.version,
228
326
  label: info.label,
@@ -248,11 +346,15 @@ function firstPartyExtractorPlaceholder(info) {
248
346
  packageRoot: null,
249
347
  errors: []
250
348
  };
349
+ return {
350
+ ...summary,
351
+ reviewWorkflow: buildExtractorReviewWorkflow(summary)
352
+ };
251
353
  }
252
354
 
253
355
  /**
254
356
  * @param {string} cwd
255
- * @returns {{ ok: boolean, cwd: string, extractors: Record<string, any>[], groups: Record<string, ReturnType<typeof groupExtractorEntry>[]>, summary: Record<string, number> }}
357
+ * @returns {{ ok: boolean, cwd: string, extractors: Record<string, any>[], groups: Record<string, ReturnType<typeof groupExtractorEntry>[]>, reviewWorkflow: Record<string, any>, summary: Record<string, number> }}
256
358
  */
257
359
  export function buildExtractorListPayload(cwd) {
258
360
  const extractors = EXTRACTOR_MANIFESTS
@@ -314,6 +416,7 @@ export function buildExtractorListPayload(cwd) {
314
416
  cwd,
315
417
  extractors,
316
418
  groups,
419
+ reviewWorkflow: buildExtractorReviewWorkflow(),
317
420
  summary: {
318
421
  total: extractors.length,
319
422
  bundled: extractors.filter((extractor) => extractor.source === "bundled").length,
@@ -367,6 +470,7 @@ export function printExtractorList(payload) {
367
470
  console.log("Topogram extractors");
368
471
  console.log(`Bundled: ${payload.summary.bundled}; package-backed: ${payload.summary.package}; installed: ${payload.summary.installed}; first-party missing: ${payload.summary.missingFirstParty || 0}`);
369
472
  console.log("Package-backed extractors are listed for discovery even before they are installed.");
473
+ console.log("Selection loop: list/show (no package code) -> install -> policy pin -> extractor check (loads adapter) -> extract -> extract plan/adopt --list -> adopt --dry-run -> adopt --write.");
370
474
  console.log("");
371
475
  for (const track of EXTRACTOR_TRACK_ORDER) {
372
476
  const entries = (payload.groups || {})[track] || [];
@@ -426,10 +530,18 @@ export function printExtractorShow(payload) {
426
530
  console.log(`Extractors: ${extractor.extractors.join(", ") || "none"}`);
427
531
  console.log(`Candidate kinds: ${extractor.candidateKinds.join(", ") || "none"}`);
428
532
  console.log(`Evidence types: ${extractor.evidenceTypes.join(", ") || "none"}`);
533
+ if (extractor.reviewWorkflow?.steps?.length) {
534
+ console.log("");
535
+ console.log("Review loop:");
536
+ for (const step of extractor.reviewWorkflow.steps) {
537
+ console.log(`- ${step.id}: ${step.command}`);
538
+ console.log(` ${step.purpose}`);
539
+ }
540
+ }
429
541
  }
430
542
 
431
543
  /**
432
- * @param {ReturnType<typeof checkExtractorPack>} payload
544
+ * @param {ReturnType<typeof checkExtractorPack> & { reviewWorkflow?: Record<string, any> }} payload
433
545
  * @returns {void}
434
546
  */
435
547
  export function printExtractorCheck(payload) {
@@ -453,12 +565,19 @@ export function printExtractorCheck(payload) {
453
565
  console.log("");
454
566
  console.log(`Smoke output: ${payload.smoke.extractors} extractor(s), ${payload.smoke.findings} finding(s), ${payload.smoke.candidateKeys} candidate bucket(s), ${payload.smoke.diagnostics} diagnostic(s)`);
455
567
  }
568
+ if (payload.reviewWorkflow?.steps?.length) {
569
+ console.log("");
570
+ console.log("Next review loop:");
571
+ for (const step of payload.reviewWorkflow.steps.filter((/** @type {Record<string, any>} */ step) => ["extract", "review_plan", "list_selectors", "dry_run_adoption", "write_reviewed_adoption"].includes(step.id))) {
572
+ console.log(`- ${step.command}`);
573
+ }
574
+ }
456
575
  for (const error of payload.errors || []) console.log(`Error: ${error}`);
457
576
  }
458
577
 
459
578
  /**
460
579
  * @param {string} projectPath
461
- * @returns {{ ok: boolean, path: string, exists: boolean, policy: any, defaulted: boolean, packages: any[], diagnostics: any[], errors: string[], summary: Record<string, number> }}
580
+ * @returns {{ ok: boolean, path: string, exists: boolean, policy: any, defaulted: boolean, packages: any[], diagnostics: any[], errors: string[], reviewWorkflow: Record<string, any>, summary: Record<string, number> }}
462
581
  */
463
582
  export function buildExtractorPolicyStatusPayload(projectPath) {
464
583
  const root = path.resolve(projectPath || ".");
@@ -504,6 +623,7 @@ export function buildExtractorPolicyStatusPayload(projectPath) {
504
623
  packages,
505
624
  diagnostics,
506
625
  errors,
626
+ reviewWorkflow: buildExtractorReviewWorkflow(packages[0] || null),
507
627
  summary: {
508
628
  enabledPackages: policy.enabledPackages.length,
509
629
  installed: packages.filter((item) => item.installed).length,
@@ -567,6 +687,7 @@ export function printExtractorPolicyStatus(payload) {
567
687
  console.log(`Enabled packages: ${payload.summary.enabledPackages}`);
568
688
  console.log("Default allowlist: bundled topogram/* extractors and first-party @topogram/extractor-* packages.");
569
689
  console.log("Install behavior: Topogram does not install extractor packages automatically.");
690
+ console.log("Review loop: install package -> pin policy -> extractor check -> extract -> extract plan/adopt --list -> adopt --dry-run -> adopt --write.");
570
691
  for (const item of payload.packages) {
571
692
  console.log(`- ${item.packageName}@${item.version}: ${item.installed ? "installed" : "missing"}, ${item.allowed ? "allowed" : "denied"}`);
572
693
  if (!item.installed && item.installCommand) console.log(` Install: ${item.installCommand}`);
@@ -611,9 +732,25 @@ export function runExtractorCommand(context) {
611
732
  const { commandArgs, inputPath, json, cwd } = context;
612
733
  if (commandArgs.extractorCommand === "check") {
613
734
  const payload = checkExtractorPack(inputPath || "", { cwd });
614
- if (json) console.log(stableStringify(payload));
615
- else printExtractorCheck(payload);
616
- return payload.ok ? 0 : 1;
735
+ const summary = payload.manifest
736
+ ? extractorManifestSummary(payload.manifest, {
737
+ installed: Boolean(payload.manifest),
738
+ manifestPath: payload.manifestPath,
739
+ packageRoot: payload.packageRoot,
740
+ errors: payload.errors
741
+ })
742
+ : null;
743
+ const augmentedPayload = /** @type {ReturnType<typeof checkExtractorPack> & { reviewWorkflow?: Record<string, any> }} */ (payload);
744
+ augmentedPayload.reviewWorkflow = buildExtractorReviewWorkflow(summary || {
745
+ id: inputPath || "<extractor>",
746
+ package: payload.packageName || null,
747
+ packageName: payload.packageName || null,
748
+ tracks: payload.manifest?.tracks || [],
749
+ version: payload.manifest?.version || "1"
750
+ });
751
+ if (json) console.log(stableStringify(augmentedPayload));
752
+ else printExtractorCheck(augmentedPayload);
753
+ return augmentedPayload.ok ? 0 : 1;
617
754
  }
618
755
  if (commandArgs.extractorCommand === "scaffold") {
619
756
  const payload = scaffoldExtractorPack(inputPath || "", {
@@ -3,6 +3,7 @@
3
3
  import fs from "node:fs";
4
4
  import path from "node:path";
5
5
 
6
+ import { readExtractionContext } from "../../../extraction-context.js";
6
7
  import { runWorkflow } from "../../../workflows.js";
7
8
  import {
8
9
  countByField,
@@ -60,6 +61,73 @@ export const BROWNFIELD_BROAD_ADOPT_SELECTORS = [
60
61
  }
61
62
  ];
62
63
 
64
+ /**
65
+ * @param {AnyRecord|null|undefined} extractionContext
66
+ * @param {AnyRecord[]} bundleSurfaces
67
+ * @param {string} bundleSlug
68
+ * @returns {string[]}
69
+ */
70
+ function tracksForBundle(extractionContext, bundleSurfaces, bundleSlug) {
71
+ const tracks = new Set(bundleSurfaces.map((surface) => surface.track).filter(Boolean));
72
+ if (bundleSlug === "database" || bundleSlug.includes("db")) tracks.add("db");
73
+ if (bundleSlug === "cli") tracks.add("cli");
74
+ if (bundleSlug === "ui") tracks.add("ui");
75
+ if (bundleSlug.includes("api")) tracks.add("api");
76
+ const knownTracks = new Set(Array.isArray(extractionContext?.tracks) ? extractionContext.tracks : []);
77
+ return [...tracks].filter((track) => knownTracks.size === 0 || knownTracks.has(track)).sort((left, right) => left.localeCompare(right));
78
+ }
79
+
80
+ /**
81
+ * @param {AnyRecord} extractor
82
+ * @param {Set<string>} tracks
83
+ * @returns {boolean}
84
+ */
85
+ function extractorMatchesTracks(extractor, tracks) {
86
+ const extractorTracks = Array.isArray(extractor.tracks) ? extractor.tracks : [];
87
+ return tracks.size === 0 || extractorTracks.length === 0 || extractorTracks.some((track) => tracks.has(track));
88
+ }
89
+
90
+ /**
91
+ * @param {AnyRecord|null|undefined} extractionContext
92
+ * @param {AnyRecord[]} bundleSurfaces
93
+ * @param {string} bundleSlug
94
+ * @returns {AnyRecord|null}
95
+ */
96
+ function extractorContextForBundle(extractionContext, bundleSurfaces, bundleSlug) {
97
+ if (!extractionContext) {
98
+ return null;
99
+ }
100
+ const tracks = tracksForBundle(extractionContext, bundleSurfaces, bundleSlug);
101
+ const trackSet = new Set(tracks);
102
+ const packageBackedExtractors = (extractionContext.package_backed_extractors || [])
103
+ .filter((/** @type {AnyRecord} */ extractor) => extractorMatchesTracks(extractor, trackSet))
104
+ .map((/** @type {AnyRecord} */ extractor) => ({
105
+ id: extractor.id || null,
106
+ version: extractor.version || null,
107
+ packageName: extractor.packageName || null,
108
+ extractors: Array.isArray(extractor.extractors) ? extractor.extractors : [],
109
+ tracks: Array.isArray(extractor.tracks) ? extractor.tracks : []
110
+ }));
111
+ const bundledExtractors = (extractionContext.bundled_extractors || [])
112
+ .filter((/** @type {AnyRecord} */ extractor) => extractorMatchesTracks(extractor, trackSet))
113
+ .map((/** @type {AnyRecord} */ extractor) => ({
114
+ id: extractor.id || null,
115
+ version: extractor.version || null,
116
+ extractors: Array.isArray(extractor.extractors) ? extractor.extractors : [],
117
+ tracks: Array.isArray(extractor.tracks) ? extractor.tracks : []
118
+ }));
119
+ if (packageBackedExtractors.length === 0 && bundledExtractors.length === 0) {
120
+ return null;
121
+ }
122
+ return {
123
+ tracks,
124
+ packageBackedExtractors,
125
+ bundledExtractors,
126
+ candidateCounts: extractionContext.candidate_counts || {},
127
+ safetyNotes: extractionContext.safety_notes || []
128
+ };
129
+ }
130
+
63
131
  /**
64
132
  * @param {string} inputPath
65
133
  * @returns {AnyRecord}
@@ -84,7 +152,8 @@ export function readImportAdoptionArtifacts(inputPath) {
84
152
  paths,
85
153
  adoptionPlan: JSON.parse(fs.readFileSync(paths.adoptionPlanAgent, "utf8")),
86
154
  adoptionStatus: readJsonIfExists(paths.adoptionStatus),
87
- reconcileReport: readJsonIfExists(paths.reconcileReport)
155
+ reconcileReport: readJsonIfExists(paths.reconcileReport),
156
+ extractionContext: readExtractionContext(topogramRoot)
88
157
  };
89
158
  }
90
159
 
@@ -118,9 +187,10 @@ export function buildBrownfieldBroadAdoptSelectors(projectRoot, adoptionPlan) {
118
187
  * @param {AnyRecord} adoptionPlan
119
188
  * @param {AnyRecord} adoptionStatus
120
189
  * @param {string} projectRoot
190
+ * @param {AnyRecord|null|undefined} extractionContext
121
191
  * @returns {AnyRecord}
122
192
  */
123
- export function summarizeImportAdoption(adoptionPlan, adoptionStatus, projectRoot) {
193
+ export function summarizeImportAdoption(adoptionPlan, adoptionStatus, projectRoot, extractionContext = null) {
124
194
  const surfaces = adoptionPlan.imported_proposal_surfaces || [];
125
195
  /** @type {string[]} */
126
196
  const slugs = [];
@@ -162,7 +232,8 @@ export function summarizeImportAdoption(adoptionPlan, adoptionStatus, projectRoo
162
232
  complete: Boolean(priority?.is_complete) || (pendingItems.length === 0 && blockedItems.length === 0 && appliedItems.length > 0),
163
233
  evidenceScore: priority?.evidence_score || 0,
164
234
  why: priority?.operator_summary?.whyThisBundle || null,
165
- nextCommand: importAdoptCommand(projectRoot, `bundle:${slug}`, false)
235
+ nextCommand: importAdoptCommand(projectRoot, `bundle:${slug}`, false),
236
+ extractorContext: extractorContextForBundle(extractionContext, bundleSurfaces, slug)
166
237
  };
167
238
  });
168
239
  const nextBundle = bundles.find((bundle) => !bundle.complete && bundle.pendingItemCount > 0) || bundles.find((bundle) => !bundle.complete) || bundles[0] || null;
@@ -196,7 +267,7 @@ export function summarizeImportAdoption(adoptionPlan, adoptionStatus, projectRoo
196
267
  export function buildBrownfieldImportPlanPayload(inputPath) {
197
268
  const artifacts = readImportAdoptionArtifacts(inputPath);
198
269
  const adoptionStatus = runWorkflow("adoption-status", artifacts.projectRoot).summary || artifacts.adoptionStatus || {};
199
- const adoption = summarizeImportAdoption(artifacts.adoptionPlan, adoptionStatus, artifacts.projectRoot);
270
+ const adoption = summarizeImportAdoption(artifacts.adoptionPlan, adoptionStatus, artifacts.projectRoot, artifacts.extractionContext);
200
271
  return {
201
272
  ok: true,
202
273
  projectRoot: artifacts.projectRoot,
@@ -207,6 +278,14 @@ export function buildBrownfieldImportPlanPayload(inputPath) {
207
278
  adoptionStatus: artifacts.paths.adoptionStatus,
208
279
  reconcileReport: artifacts.paths.reconcileReport
209
280
  },
281
+ extractorContext: artifacts.extractionContext ? {
282
+ provenancePath: artifacts.extractionContext.provenance_path,
283
+ packageBackedExtractors: artifacts.extractionContext.package_backed_extractors,
284
+ bundledExtractors: artifacts.extractionContext.bundled_extractors,
285
+ candidateCounts: artifacts.extractionContext.candidate_counts,
286
+ safetyNotes: artifacts.extractionContext.safety_notes,
287
+ summary: artifacts.extractionContext.summary
288
+ } : null,
210
289
  ...adoption,
211
290
  commands: {
212
291
  check: `topogram extract check ${importProjectCommandPath(artifacts.projectRoot)}`,
@@ -229,6 +308,14 @@ export function printBrownfieldImportPlan(payload) {
229
308
  if (bundle.why) {
230
309
  console.log(` ${bundle.why}`);
231
310
  }
311
+ if (bundle.extractorContext?.packageBackedExtractors?.length > 0) {
312
+ const names = bundle.extractorContext.packageBackedExtractors
313
+ .map((/** @type {AnyRecord} */ extractor) => extractor.packageName || extractor.id)
314
+ .filter(Boolean)
315
+ .join(", ");
316
+ console.log(` Extractors: ${names}`);
317
+ console.log(" Safety: package-backed extractor candidates are review-only; run dry-run adoption before --write.");
318
+ }
232
319
  console.log(` Preview: ${bundle.nextCommand}`);
233
320
  }
234
321
  if (payload.risks.length > 0) {
@@ -257,6 +344,7 @@ export function buildBrownfieldImportAdoptListPayload(inputPath) {
257
344
  appliedItemCount: bundle.appliedItemCount,
258
345
  blockedItemCount: bundle.blockedItemCount,
259
346
  complete: bundle.complete,
347
+ extractorContext: bundle.extractorContext || null,
260
348
  previewCommand: importAdoptCommand(plan.projectRoot, `bundle:${bundle.bundle}`, false),
261
349
  writeCommand: importAdoptCommand(plan.projectRoot, `bundle:${bundle.bundle}`, true)
262
350
  }));
@@ -270,6 +358,7 @@ export function buildBrownfieldImportAdoptListPayload(inputPath) {
270
358
  selectors,
271
359
  broadSelectorCount: broadSelectors.length,
272
360
  broadSelectors,
361
+ extractorContext: plan.extractorContext,
273
362
  nextCommand: selectors.find((/** @type {AnyRecord} */ selector) => !selector.complete)?.previewCommand || plan.commands.status
274
363
  };
275
364
  }
@@ -286,6 +375,14 @@ export function printBrownfieldImportAdoptList(payload) {
286
375
  }
287
376
  for (const selector of payload.selectors) {
288
377
  console.log(`- ${selector.selector}: ${selector.itemCount} item(s), ${selector.pendingItemCount} pending, ${selector.appliedItemCount} applied`);
378
+ if (selector.extractorContext?.packageBackedExtractors?.length > 0) {
379
+ const names = selector.extractorContext.packageBackedExtractors
380
+ .map((/** @type {AnyRecord} */ extractor) => extractor.packageName || extractor.id)
381
+ .filter(Boolean)
382
+ .join(", ");
383
+ console.log(` Extractors: ${names}`);
384
+ console.log(" Safety: package-backed extractor candidates are review-only; run dry-run adoption before --write.");
385
+ }
289
386
  console.log(` Preview: ${selector.previewCommand}`);
290
387
  console.log(` Write: ${selector.writeCommand}`);
291
388
  }
@@ -4,9 +4,9 @@ import fs from "node:fs";
4
4
  import path from "node:path";
5
5
 
6
6
  import { generateWorkspace } from "../../../generator.js";
7
- import { TOPOGRAM_IMPORT_FILE } from "../../../import/provenance.js";
8
7
  import { formatValidationErrors } from "../../../validator.js";
9
8
  import { buildChangePlanPayload } from "../../../agent-ops/query-builders.js";
9
+ import { buildExtractionContext, readExtractionContext } from "../../../extraction-context.js";
10
10
  import { resolveTopoRoot } from "../../../workspace-paths.js";
11
11
 
12
12
  /**
@@ -197,72 +197,7 @@ export function readJson(filePath) {
197
197
  return JSON.parse(fs.readFileSync(filePath, "utf8"));
198
198
  }
199
199
 
200
- /**
201
- * @param {AnyRecord} record
202
- * @param {string} provenancePath
203
- * @returns {AnyRecord}
204
- */
205
- export function buildExtractionContext(record, provenancePath) {
206
- const extractorPackages = /** @type {AnyRecord[]} */ (Array.isArray(record.extract?.extractorPackages)
207
- ? record.extract.extractorPackages
208
- : []);
209
- const packageBackedExtractors = extractorPackages
210
- .filter((entry) => entry?.source === "package")
211
- .map((entry) => ({
212
- id: entry.id || null,
213
- version: entry.version || null,
214
- packageName: entry.packageName || null,
215
- extractors: Array.isArray(entry.extractors) ? entry.extractors : [],
216
- manifestPath: entry.manifestPath || null
217
- }));
218
- const bundledExtractors = extractorPackages
219
- .filter((entry) => entry?.source === "bundled")
220
- .map((entry) => ({
221
- id: entry.id || null,
222
- version: entry.version || null,
223
- extractors: Array.isArray(entry.extractors) ? entry.extractors : []
224
- }));
225
- return {
226
- type: "extraction_context",
227
- provenance_path: provenancePath,
228
- kind: record.kind || null,
229
- extracted_at: record.extractedAt || null,
230
- refreshed_at: record.refreshedAt || null,
231
- source_path: record.source?.path || null,
232
- tracks: Array.isArray(record.extract?.tracks) ? record.extract.tracks : [],
233
- findings_count: record.extract?.findingsCount || 0,
234
- candidate_counts: record.extract?.candidateCounts || {},
235
- package_backed_extractors: packageBackedExtractors,
236
- bundled_extractors: bundledExtractors,
237
- summary: {
238
- package_backed_extractor_count: packageBackedExtractors.length,
239
- bundled_extractor_count: bundledExtractors.length,
240
- source_file_count: Array.isArray(record.files) ? record.files.length : 0
241
- },
242
- next_commands: [
243
- "topogram extract check",
244
- "topogram extract plan",
245
- "topogram adopt --list",
246
- "topogram adopt <selector> --dry-run"
247
- ],
248
- safety_notes: [
249
- "Extractor packages are evidence producers only; review candidates before canonical adoption.",
250
- "Use dry-run adoption before --write, especially when package-backed extractors contributed candidates."
251
- ]
252
- };
253
- }
254
-
255
- /**
256
- * @param {string} topogramRoot
257
- * @returns {AnyRecord|null}
258
- */
259
- export function readExtractionContext(topogramRoot) {
260
- const provenancePath = path.join(path.dirname(topogramRoot), TOPOGRAM_IMPORT_FILE);
261
- if (!fs.existsSync(provenancePath)) {
262
- return null;
263
- }
264
- return buildExtractionContext(readJson(provenancePath), provenancePath);
265
- }
200
+ export { buildExtractionContext, readExtractionContext };
266
201
 
267
202
  /**
268
203
  * @param {AnyRecord} options
@@ -0,0 +1,79 @@
1
+ // @ts-check
2
+
3
+ import fs from "node:fs";
4
+ import path from "node:path";
5
+
6
+ import { TOPOGRAM_IMPORT_FILE } from "./import/provenance.js";
7
+
8
+ /**
9
+ * @typedef {Record<string, any>} AnyRecord
10
+ */
11
+
12
+ /**
13
+ * @param {AnyRecord} record
14
+ * @param {string} provenancePath
15
+ * @returns {AnyRecord}
16
+ */
17
+ export function buildExtractionContext(record, provenancePath) {
18
+ const extractorPackages = /** @type {AnyRecord[]} */ (Array.isArray(record.extract?.extractorPackages)
19
+ ? record.extract.extractorPackages
20
+ : []);
21
+ const packageBackedExtractors = extractorPackages
22
+ .filter((entry) => entry?.source === "package")
23
+ .map((entry) => ({
24
+ id: entry.id || null,
25
+ version: entry.version || null,
26
+ packageName: entry.packageName || null,
27
+ extractors: Array.isArray(entry.extractors) ? entry.extractors : [],
28
+ tracks: Array.isArray(entry.tracks) ? entry.tracks : [],
29
+ manifestPath: entry.manifestPath || null
30
+ }));
31
+ const bundledExtractors = extractorPackages
32
+ .filter((entry) => entry?.source === "bundled")
33
+ .map((entry) => ({
34
+ id: entry.id || null,
35
+ version: entry.version || null,
36
+ extractors: Array.isArray(entry.extractors) ? entry.extractors : [],
37
+ tracks: Array.isArray(entry.tracks) ? entry.tracks : []
38
+ }));
39
+ return {
40
+ type: "extraction_context",
41
+ provenance_path: provenancePath,
42
+ kind: record.kind || null,
43
+ extracted_at: record.extractedAt || null,
44
+ refreshed_at: record.refreshedAt || null,
45
+ source_path: record.source?.path || null,
46
+ tracks: Array.isArray(record.extract?.tracks) ? record.extract.tracks : [],
47
+ findings_count: record.extract?.findingsCount || 0,
48
+ candidate_counts: record.extract?.candidateCounts || {},
49
+ package_backed_extractors: packageBackedExtractors,
50
+ bundled_extractors: bundledExtractors,
51
+ summary: {
52
+ package_backed_extractor_count: packageBackedExtractors.length,
53
+ bundled_extractor_count: bundledExtractors.length,
54
+ source_file_count: Array.isArray(record.files) ? record.files.length : 0
55
+ },
56
+ next_commands: [
57
+ "topogram extract check",
58
+ "topogram extract plan",
59
+ "topogram adopt --list",
60
+ "topogram adopt <selector> --dry-run"
61
+ ],
62
+ safety_notes: [
63
+ "Extractor packages are evidence producers only; review candidates before canonical adoption.",
64
+ "Use dry-run adoption before --write, especially when package-backed extractors contributed candidates."
65
+ ]
66
+ };
67
+ }
68
+
69
+ /**
70
+ * @param {string} topogramRoot
71
+ * @returns {AnyRecord|null}
72
+ */
73
+ export function readExtractionContext(topogramRoot) {
74
+ const provenancePath = path.join(path.dirname(topogramRoot), TOPOGRAM_IMPORT_FILE);
75
+ if (!fs.existsSync(provenancePath)) {
76
+ return null;
77
+ }
78
+ return buildExtractionContext(JSON.parse(fs.readFileSync(provenancePath, "utf8")), provenancePath);
79
+ }
@@ -5,6 +5,7 @@ import {
5
5
  loadExtractorPackageAdapterForSpec,
6
6
  validateExtractorAdapter
7
7
  } from "./packages.js";
8
+ import { validateExtractorResult } from "./output.js";
8
9
 
9
10
  /**
10
11
  * @typedef {import("./registry.js").ExtractorManifest} ExtractorManifest
@@ -25,39 +26,6 @@ import {
25
26
  * @property {boolean} executesPackageCode
26
27
  */
27
28
 
28
- /**
29
- * @param {any} result
30
- * @returns {{ ok: boolean, message: string, smoke: { findings: number, candidateKeys: number, diagnostics: number }|null }}
31
- */
32
- function validateExtractResult(result) {
33
- if (!result || typeof result !== "object" || Array.isArray(result)) {
34
- return { ok: false, message: "extract(context) must return an object", smoke: null };
35
- }
36
- if (result.findings != null && !Array.isArray(result.findings)) {
37
- return { ok: false, message: "extract(context) findings must be an array when present", smoke: null };
38
- }
39
- if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
40
- return { ok: false, message: "extract(context) diagnostics must be an array when present", smoke: null };
41
- }
42
- if (!result.candidates || typeof result.candidates !== "object" || Array.isArray(result.candidates)) {
43
- return { ok: false, message: "extract(context) result must include a candidates object", smoke: null };
44
- }
45
- for (const [key, value] of Object.entries(result.candidates)) {
46
- if (!Array.isArray(value)) {
47
- return { ok: false, message: `extract(context) candidates.${key} must be an array`, smoke: null };
48
- }
49
- }
50
- return {
51
- ok: true,
52
- message: `extract(context) returned ${Object.keys(result.candidates).length} candidate bucket(s)`,
53
- smoke: {
54
- findings: Array.isArray(result.findings) ? result.findings.length : 0,
55
- candidateKeys: Object.keys(result.candidates).length,
56
- diagnostics: Array.isArray(result.diagnostics) ? result.diagnostics.length : 0
57
- }
58
- };
59
- }
60
-
61
29
  /**
62
30
  * @param {string} sourceSpec
63
31
  * @param {{ cwd?: string }} [options]
@@ -126,9 +94,9 @@ export function checkExtractorPack(sourceSpec, options = {}) {
126
94
  continue;
127
95
  }
128
96
  const result = extractor.extract(context) || { findings: [], candidates: {} };
129
- const validation = validateExtractResult(result);
97
+ const validation = validateExtractorResult(result, { track: extractor.track, strictCandidates: true });
130
98
  if (!validation.ok || !validation.smoke) {
131
- payload.errors.push(`Extractor '${extractor.id}' ${validation.message}.`);
99
+ payload.errors.push(...validation.errors.map((message) => `Extractor '${extractor.id}' ${message}.`));
132
100
  continue;
133
101
  }
134
102
  totalFindings += validation.smoke.findings;
@@ -152,4 +120,3 @@ export function checkExtractorPack(sourceSpec, options = {}) {
152
120
  payload.ok = payload.errors.length === 0;
153
121
  return payload;
154
122
  }
155
-
@@ -0,0 +1,220 @@
1
+ // @ts-check
2
+
3
+ /**
4
+ * @typedef {Object} ExtractorResultValidationOptions
5
+ * @property {string} [track]
6
+ * @property {boolean} [strictCandidates]
7
+ */
8
+
9
+ /**
10
+ * @typedef {Object} ExtractorResultValidation
11
+ * @property {boolean} ok
12
+ * @property {string[]} errors
13
+ * @property {{ findings: number, candidateKeys: number, diagnostics: number }|null} smoke
14
+ */
15
+
16
+ /** @type {Record<string, Set<string>>} */
17
+ const TRACK_CANDIDATE_BUCKETS = {
18
+ db: new Set(["entities", "enums", "relations", "indexes", "maintained_seams"]),
19
+ api: new Set(["capabilities", "routes", "stacks"]),
20
+ ui: new Set(["screens", "routes", "actions", "flows", "widgets", "shapes", "stacks"]),
21
+ cli: new Set(["commands", "capabilities", "surfaces"]),
22
+ workflows: new Set(["workflows", "workflow_states", "workflow_transitions"]),
23
+ verification: new Set(["verifications", "scenarios", "frameworks", "scripts"])
24
+ };
25
+
26
+ const DISALLOWED_BUCKETS = new Set([
27
+ "adoption",
28
+ "adoption_plan",
29
+ "adoptionPlan",
30
+ "canonical",
31
+ "canonical_files",
32
+ "canonicalFiles",
33
+ "files",
34
+ "patches",
35
+ "project_config",
36
+ "projectConfig",
37
+ "topo",
38
+ "topogram",
39
+ "topogram_project",
40
+ "topogramProject",
41
+ "writeFiles",
42
+ "writes",
43
+ "writtenFiles"
44
+ ]);
45
+
46
+ const DISALLOWED_RECORD_KEYS = new Set([
47
+ "adoption",
48
+ "adoptionPlan",
49
+ "canonical",
50
+ "canonicalFiles",
51
+ "files",
52
+ "patches",
53
+ "receipt",
54
+ "topo",
55
+ "topogram",
56
+ "write",
57
+ "writeFiles",
58
+ "writes",
59
+ "writtenFiles"
60
+ ]);
61
+
62
+ /**
63
+ * Keys that carry local source/package file references. Deliberately excludes
64
+ * command/route `path` and config target dotted `path` values.
65
+ */
66
+ const PATH_KEYS = new Set([
67
+ "configFile",
68
+ "configPath",
69
+ "file",
70
+ "filePath",
71
+ "migrationPath",
72
+ "migrationsPath",
73
+ "schemaPath",
74
+ "snapshotPath",
75
+ "sourceFile",
76
+ "sourcePath",
77
+ "source_path",
78
+ "targetFile",
79
+ "targetPath"
80
+ ]);
81
+
82
+ /**
83
+ * @param {unknown} value
84
+ * @returns {value is Record<string, unknown>}
85
+ */
86
+ function isPlainObject(value) {
87
+ return Boolean(value) && typeof value === "object" && !Array.isArray(value);
88
+ }
89
+
90
+ /**
91
+ * @param {string} candidatePath
92
+ * @returns {boolean}
93
+ */
94
+ function isUnsafeRelativePath(candidatePath) {
95
+ return candidatePath.startsWith("/") || candidatePath === ".." || candidatePath.startsWith("../") || candidatePath.includes("/../");
96
+ }
97
+
98
+ /**
99
+ * @param {string} bucket
100
+ * @param {Record<string, unknown>} candidate
101
+ * @returns {string[]}
102
+ */
103
+ function identityFieldsForBucket(bucket, candidate) {
104
+ if (bucket === "commands") return ["command_id", "id_hint"];
105
+ if (bucket === "routes") {
106
+ if (typeof candidate.method === "string" && typeof candidate.path === "string") {
107
+ return [];
108
+ }
109
+ return ["id_hint", "id"];
110
+ }
111
+ return ["id_hint", "id", "name"];
112
+ }
113
+
114
+ /**
115
+ * @param {string} bucket
116
+ * @param {Record<string, unknown>} candidate
117
+ * @param {string} pathLabel
118
+ * @returns {string[]}
119
+ */
120
+ function validateCandidateIdentity(bucket, candidate, pathLabel) {
121
+ const fields = identityFieldsForBucket(bucket, candidate);
122
+ if (fields.length === 0) return [];
123
+ if (fields.some((field) => typeof candidate[field] === "string" && String(candidate[field]).trim().length > 0)) {
124
+ return [];
125
+ }
126
+ return [`${pathLabel} must include an identity field: ${fields.join(" or ")}`];
127
+ }
128
+
129
+ /**
130
+ * @param {unknown} value
131
+ * @param {string} pathLabel
132
+ * @param {string[]} errors
133
+ * @returns {void}
134
+ */
135
+ function validateNoUnsafeRecords(value, pathLabel, errors) {
136
+ if (Array.isArray(value)) {
137
+ for (let index = 0; index < value.length; index += 1) {
138
+ validateNoUnsafeRecords(value[index], `${pathLabel}[${index}]`, errors);
139
+ }
140
+ return;
141
+ }
142
+ if (!isPlainObject(value)) return;
143
+ for (const [key, child] of Object.entries(value)) {
144
+ const childPath = `${pathLabel}.${key}`;
145
+ if (DISALLOWED_RECORD_KEYS.has(key)) {
146
+ errors.push(`${childPath} is not allowed in extractor candidate output; extractors emit review candidates, not adoption plans or files.`);
147
+ continue;
148
+ }
149
+ if (PATH_KEYS.has(key) && typeof child === "string" && isUnsafeRelativePath(child)) {
150
+ errors.push(`${childPath} must be a safe project-relative path.`);
151
+ continue;
152
+ }
153
+ validateNoUnsafeRecords(child, childPath, errors);
154
+ }
155
+ }
156
+
157
+ /**
158
+ * @param {unknown} result
159
+ * @param {ExtractorResultValidationOptions} [options]
160
+ * @returns {ExtractorResultValidation}
161
+ */
162
+ export function validateExtractorResult(result, options = {}) {
163
+ const errors = [];
164
+ if (!isPlainObject(result)) {
165
+ return { ok: false, errors: ["extract(context) must return an object"], smoke: null };
166
+ }
167
+ if (result.findings != null && !Array.isArray(result.findings)) {
168
+ errors.push("extract(context) findings must be an array when present");
169
+ }
170
+ if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
171
+ errors.push("extract(context) diagnostics must be an array when present");
172
+ }
173
+ if (!isPlainObject(result.candidates)) {
174
+ errors.push("extract(context) result must include a candidates object");
175
+ return { ok: false, errors, smoke: null };
176
+ }
177
+
178
+ const allowedBuckets = options.track ? TRACK_CANDIDATE_BUCKETS[options.track] : null;
179
+ const candidateKeys = Object.keys(result.candidates);
180
+ for (const [bucket, value] of Object.entries(result.candidates)) {
181
+ const bucketLabel = `extract(context) candidates.${bucket}`;
182
+ if (DISALLOWED_BUCKETS.has(bucket)) {
183
+ errors.push(`${bucketLabel} is not allowed; extractors must not return adoption plans, canonical files, patches, or topo writes.`);
184
+ continue;
185
+ }
186
+ if (options.strictCandidates && allowedBuckets && !allowedBuckets.has(bucket)) {
187
+ errors.push(`${bucketLabel} is not allowed for track '${options.track}'.`);
188
+ continue;
189
+ }
190
+ if (!Array.isArray(value)) {
191
+ errors.push(`${bucketLabel} must be an array`);
192
+ continue;
193
+ }
194
+ if (!options.strictCandidates) continue;
195
+ for (let index = 0; index < value.length; index += 1) {
196
+ const candidate = value[index];
197
+ const candidateLabel = `${bucketLabel}[${index}]`;
198
+ if (!isPlainObject(candidate)) {
199
+ errors.push(`${candidateLabel} must be an object.`);
200
+ continue;
201
+ }
202
+ errors.push(...validateCandidateIdentity(bucket, candidate, candidateLabel));
203
+ validateNoUnsafeRecords(candidate, candidateLabel, errors);
204
+ }
205
+ }
206
+
207
+ return {
208
+ ok: errors.length === 0,
209
+ errors,
210
+ smoke: errors.length === 0
211
+ ? {
212
+ findings: Array.isArray(result.findings) ? result.findings.length : 0,
213
+ candidateKeys: candidateKeys.length,
214
+ diagnostics: Array.isArray(result.diagnostics) ? result.diagnostics.length : 0
215
+ }
216
+ : null
217
+ };
218
+ }
219
+
220
+ export { TRACK_CANDIDATE_BUCKETS };
@@ -228,13 +228,19 @@ export function packageExtractorsForContext(context) {
228
228
  const bundledPack = getBundledExtractorPack(spec);
229
229
  if (bundledPack) {
230
230
  extractors.push(...bundledPack.extractors);
231
- provenance.push({ source: "bundled", id: bundledPack.manifest.id, version: bundledPack.manifest.version, extractors: bundledPack.manifest.extractors });
231
+ provenance.push({
232
+ source: "bundled",
233
+ id: bundledPack.manifest.id,
234
+ version: bundledPack.manifest.version,
235
+ tracks: bundledPack.manifest.tracks || [],
236
+ extractors: bundledPack.manifest.extractors
237
+ });
232
238
  continue;
233
239
  }
234
240
  const bundledExtractor = getBundledExtractorById(spec);
235
241
  if (bundledExtractor) {
236
242
  extractors.push(bundledExtractor);
237
- provenance.push({ source: "bundled", id: bundledExtractor.id, version: "1", extractors: [bundledExtractor.id] });
243
+ provenance.push({ source: "bundled", id: bundledExtractor.id, version: "1", tracks: bundledExtractor.track ? [bundledExtractor.track] : [], extractors: [bundledExtractor.id] });
238
244
  continue;
239
245
  }
240
246
  const packageManifest = loadExtractorPackageManifestForSpec(spec, { cwd });
@@ -273,6 +279,7 @@ export function packageExtractorsForContext(context) {
273
279
  id: packageManifest.manifest.id,
274
280
  version: packageManifest.manifest.version,
275
281
  packageName,
282
+ tracks: packageManifest.manifest.tracks || [],
276
283
  manifestPath: packageManifest.manifestPath,
277
284
  packageRoot: packageManifest.packageRoot,
278
285
  extractors: packageManifest.manifest.extractors
@@ -457,6 +457,11 @@ npm run check
457
457
  Replace the scaffold adapter in \`index.cjs\` with precise, read-only source evidence.
458
458
  Extractor packages must not mutate source files, write canonical \`topo/**\`, install
459
459
  packages, perform network access, or define adoption semantics.
460
+
461
+ Candidate output is validated by track. Return only review candidate buckets for
462
+ the declared track, give each candidate a stable identity, keep file evidence
463
+ project-relative, and never return files, patches, adoption plans, or write
464
+ instructions.
460
465
  `
461
466
  };
462
467
  for (const [relative, contents] of Object.entries(defaults.fixtureFiles)) {
@@ -3,6 +3,7 @@
3
3
  import { getEnrichersForTrack, getExtractorsForTrack } from "../registry.js";
4
4
  import { normalizeCandidatesForTrack } from "./candidates.js";
5
5
  import { packageExtractorsForContext } from "../../../extractor/packages.js";
6
+ import { validateExtractorResult } from "../../../extractor/output.js";
6
7
 
7
8
  /**
8
9
  * @param {any} context
@@ -140,25 +141,12 @@ function initialCandidatesForTrack(track) {
140
141
  */
141
142
  function assertExtractorResultShape(extractor, result) {
142
143
  const label = extractor?.id || "unknown";
143
- if (!result || typeof result !== "object" || Array.isArray(result)) {
144
- throw new Error(`Extractor '${label}' extract(context) must return an object.`);
145
- }
146
- if (result.findings != null && !Array.isArray(result.findings)) {
147
- throw new Error(`Extractor '${label}' extract(context) findings must be an array when present.`);
148
- }
149
- if (result.diagnostics != null && !Array.isArray(result.diagnostics)) {
150
- throw new Error(`Extractor '${label}' extract(context) diagnostics must be an array when present.`);
151
- }
152
- if (result.candidates == null) {
153
- result.candidates = {};
154
- }
155
- if (!result.candidates || typeof result.candidates !== "object" || Array.isArray(result.candidates)) {
156
- throw new Error(`Extractor '${label}' extract(context) candidates must be an object.`);
157
- }
158
- for (const [key, value] of Object.entries(result.candidates)) {
159
- if (!Array.isArray(value)) {
160
- throw new Error(`Extractor '${label}' extract(context) candidates.${key} must be an array.`);
161
- }
144
+ const validation = validateExtractorResult(result, {
145
+ track: extractor?.track,
146
+ strictCandidates: extractor?.source === "package"
147
+ });
148
+ if (!validation.ok) {
149
+ throw new Error(validation.errors.map((message) => `Extractor '${label}' ${message}.`).join("\n"));
162
150
  }
163
151
  }
164
152