@topogram/cli 0.3.74 → 0.3.76

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,7 @@
1
1
  // @ts-check
2
2
 
3
+ import path from "node:path";
4
+
3
5
  import {
4
6
  findImportFiles,
5
7
  idHintify,
@@ -12,6 +14,7 @@ import {
12
14
 
13
15
  const CLI_SOURCE_PATTERN = /(^|\/)(bin|cli|command|commands|parser|help)(\/|[-_.A-Za-z0-9]*\.(?:js|mjs|cjs|ts))$/i;
14
16
  const JS_SOURCE_PATTERN = /\.(?:js|mjs|cjs|ts)$/i;
17
+ const NON_AUTHORITATIVE_CLI_PATH_PATTERN = /(^|\/)(test|tests|__tests__|fixtures|fixture|expected|snapshots|snapshot|mock|mocks|candidates|docs-generated)(\/|$)|\.(?:test|spec)\.(?:js|mjs|cjs|ts)$/i;
15
18
 
16
19
  /**
17
20
  * @param {string} value
@@ -21,6 +24,18 @@ function normalizePath(value) {
21
24
  return value.replaceAll("\\", "/");
22
25
  }
23
26
 
27
+ /**
28
+ * CLI import should prefer public command surfaces, not tests or fixture strings
29
+ * that happen to look like command help.
30
+ * @param {any} paths
31
+ * @param {string} filePath
32
+ * @returns {boolean}
33
+ */
34
+ function isAuthoritativeCliSource(paths, filePath) {
35
+ const normalized = normalizePath(normalizeImportRelativePath(paths, filePath));
36
+ return !NON_AUTHORITATIVE_CLI_PATH_PATTERN.test(normalized);
37
+ }
38
+
24
39
  /**
25
40
  * @param {string} commandId
26
41
  * @returns {string}
@@ -29,24 +44,44 @@ function capabilityIdForCommand(commandId) {
29
44
  return `cap_${idHintify(commandId)}`;
30
45
  }
31
46
 
47
+ /**
48
+ * @param {string} rawLine
49
+ * @returns {{ text: string, terminalOutput: boolean }}
50
+ */
51
+ function normalizePotentialHelpLine(rawLine) {
52
+ const trimmed = rawLine.trim();
53
+ const terminalOutput = /^(?:console\.(?:log|error|warn)|print)\(\s*["'`]/.test(trimmed) || /^echo\s+["'`]/.test(trimmed);
54
+ return {
55
+ terminalOutput,
56
+ text: trimmed
57
+ .replace(/^\s*(?:console\.(?:log|error|warn)\(|print\(|echo\s+)?["'`]*/, "")
58
+ .replace(/["'`),;]*\s*$/, "")
59
+ .trim()
60
+ };
61
+ }
62
+
32
63
  /**
33
64
  * @param {string} text
65
+ * @param {Set<string>} binNames
34
66
  * @returns {string[]}
35
67
  */
36
- function extractUsageLines(text) {
68
+ function extractUsageLines(text, binNames) {
37
69
  const lines = [];
38
70
  for (const rawLine of text.split(/\r?\n/)) {
39
- const line = rawLine
40
- .replace(/^\s*(?:console\.log\(|print\(|echo\s+)?["'`]*/, "")
41
- .replace(/["'`),;]*\s*$/, "")
42
- .trim();
71
+ const { text: line, terminalOutput } = normalizePotentialHelpLine(rawLine);
43
72
  if (!line) continue;
44
73
  const usageMatch = line.match(/(?:^|\b)Usage:\s*(.+)$/i);
45
74
  if (usageMatch?.[1]) {
46
75
  lines.push(usageMatch[1].trim());
47
76
  continue;
48
77
  }
49
- if (/^[a-zA-Z][\w.-]+(?:\s+[a-zA-Z][\w:-]+)+(?:\s|$)/.test(line) && /(?:--[a-zA-Z][\w:-]*|<[^>]+>|\[[^\]]+\])/.test(line)) {
78
+ const firstToken = line.split(/\s+/)[0];
79
+ if (
80
+ terminalOutput &&
81
+ (binNames.size === 0 || binNames.has(firstToken)) &&
82
+ /^[a-zA-Z][\w.-]+(?:\s+[a-zA-Z][\w:-]+)+(?:\s|$)/.test(line) &&
83
+ /(?:--[a-zA-Z][\w:-]*|<[^>]+>|\[[^\]]+\])/.test(line)
84
+ ) {
50
85
  lines.push(line);
51
86
  }
52
87
  }
@@ -167,19 +202,79 @@ function dedupeRecords(records, keyFn) {
167
202
 
168
203
  /**
169
204
  * @param {any} context
170
- * @returns {{ packageFiles: string[], sourceFiles: string[] }}
205
+ * @param {string[]} packageFiles
206
+ * @returns {{ binNames: Set<string>, binTargets: Set<string>, findings: any[], provenance: string[] }}
207
+ */
208
+ function inspectPackageCliMetadata(context, packageFiles) {
209
+ const binNames = new Set();
210
+ const binTargets = new Set();
211
+ const findings = [];
212
+ const provenance = [];
213
+
214
+ for (const packagePath of packageFiles) {
215
+ const pkg = readJsonIfExists(packagePath);
216
+ if (!pkg) continue;
217
+ const relPath = normalizeImportRelativePath(context.paths, packagePath);
218
+ const bin = pkg.bin;
219
+ if (typeof bin === "string") {
220
+ const binName = pkg.name ? String(pkg.name).split("/").pop() : "cli";
221
+ binNames.add(binName);
222
+ binTargets.add(path.resolve(path.dirname(packagePath), bin));
223
+ provenance.push(`${relPath}#bin`);
224
+ } else if (bin && typeof bin === "object") {
225
+ for (const [name, target] of Object.entries(bin)) {
226
+ binNames.add(name);
227
+ if (typeof target === "string") {
228
+ binTargets.add(path.resolve(path.dirname(packagePath), target));
229
+ }
230
+ }
231
+ provenance.push(`${relPath}#bin`);
232
+ }
233
+ for (const [name, command] of Object.entries(pkg.scripts || {})) {
234
+ if (/^(cli|bin|start|check|test|verify)(:|$)/.test(name) || /\b(node|tsx|ts-node)\b.+\b(cli|bin)\b/i.test(String(command))) {
235
+ findings.push({
236
+ kind: "cli_script",
237
+ name,
238
+ command,
239
+ source: relPath
240
+ });
241
+ }
242
+ }
243
+ }
244
+
245
+ return { binNames, binTargets, findings, provenance };
246
+ }
247
+
248
+ /**
249
+ * @param {any} context
250
+ * @returns {{ packageFiles: string[], sourceFiles: string[], binNames: Set<string>, findings: any[], provenance: string[] }}
171
251
  */
172
252
  function discoverCliSources(context) {
173
253
  const packageFiles = findImportFiles(context.paths, (/** @type {string} */ filePath) => /package\.json$/i.test(filePath));
254
+ const packageMetadata = inspectPackageCliMetadata(context, packageFiles);
174
255
  const sourceFiles = findImportFiles(context.paths, (/** @type {string} */ filePath) => {
175
256
  const normalized = normalizePath(filePath);
257
+ if (!isAuthoritativeCliSource(context.paths, filePath)) {
258
+ return false;
259
+ }
176
260
  return JS_SOURCE_PATTERN.test(normalized) && (
177
261
  CLI_SOURCE_PATTERN.test(normalized) ||
178
262
  normalized.includes("/src/cli/") ||
179
263
  normalized.includes("/commands/")
180
264
  );
181
265
  });
182
- return { packageFiles, sourceFiles };
266
+ for (const binTarget of packageMetadata.binTargets) {
267
+ if (JS_SOURCE_PATTERN.test(binTarget)) {
268
+ sourceFiles.push(binTarget);
269
+ }
270
+ }
271
+ return {
272
+ packageFiles,
273
+ sourceFiles: [...new Set(sourceFiles)].sort(),
274
+ binNames: packageMetadata.binNames,
275
+ findings: packageMetadata.findings,
276
+ provenance: packageMetadata.provenance
277
+ };
183
278
  }
184
279
 
185
280
  export const genericCliExtractor = {
@@ -187,7 +282,7 @@ export const genericCliExtractor = {
187
282
  track: "cli",
188
283
  /** @param {any} context */
189
284
  detect(context) {
190
- const { packageFiles, sourceFiles } = discoverCliSources(context);
285
+ const { packageFiles, sourceFiles, binNames } = discoverCliSources(context);
191
286
  const hasBin = packageFiles.some((filePath) => {
192
287
  const pkg = readJsonIfExists(filePath);
193
288
  return Boolean(pkg?.bin);
@@ -196,53 +291,25 @@ export const genericCliExtractor = {
196
291
  return {
197
292
  score,
198
293
  reasons: [
199
- hasBin ? "package.json declares a CLI bin" : null,
294
+ hasBin ? `package.json declares ${binNames.size || 1} CLI bin${binNames.size === 1 ? "" : "s"}` : null,
200
295
  sourceFiles.length ? `${sourceFiles.length} CLI-like source files found` : null
201
296
  ].filter(Boolean)
202
297
  };
203
298
  },
204
299
  /** @param {any} context */
205
300
  extract(context) {
206
- const { packageFiles, sourceFiles } = discoverCliSources(context);
207
- const findings = [];
301
+ const { sourceFiles, binNames, findings, provenance } = discoverCliSources(context);
208
302
  const commands = [];
209
303
  const options = [];
210
304
  const outputs = [];
211
305
  const effects = [];
212
306
  const examples = [];
213
307
  const capabilities = [];
214
- const binNames = new Set();
215
- const provenance = [];
216
-
217
- for (const packagePath of packageFiles) {
218
- const pkg = readJsonIfExists(packagePath);
219
- if (!pkg) continue;
220
- const relPath = normalizeImportRelativePath(context.paths, packagePath);
221
- const bin = pkg.bin;
222
- if (typeof bin === "string") {
223
- binNames.add(pkg.name ? String(pkg.name).split("/").pop() : "cli");
224
- } else if (bin && typeof bin === "object") {
225
- for (const name of Object.keys(bin)) {
226
- binNames.add(name);
227
- }
228
- }
229
- for (const [name, command] of Object.entries(pkg.scripts || {})) {
230
- if (/^(cli|bin|start|check|test|verify)(:|$)/.test(name) || /\b(node|tsx|ts-node)\b.+\b(cli|bin)\b/i.test(String(command))) {
231
- findings.push({
232
- kind: "cli_script",
233
- name,
234
- command,
235
- source: relPath
236
- });
237
- }
238
- }
239
- provenance.push(`${relPath}#bin`);
240
- }
241
308
 
242
309
  for (const sourcePath of sourceFiles) {
243
310
  const sourceText = readTextIfExists(sourcePath) || "";
244
311
  const relPath = normalizeImportRelativePath(context.paths, sourcePath);
245
- const usageLines = extractUsageLines(sourceText);
312
+ const usageLines = extractUsageLines(sourceText, binNames);
246
313
  if (usageLines.length === 0) {
247
314
  continue;
248
315
  }
@@ -10,6 +10,7 @@ import {
10
10
  slugify,
11
11
  titleCase
12
12
  } from "../../core/shared.js";
13
+ import { inferDrizzleMaintainedDbSeams } from "./maintained-seams.js";
13
14
 
14
15
  function splitTopLevelEntries(block) {
15
16
  const entries = [];
@@ -165,21 +166,50 @@ function parseDrizzleTables(schemaText) {
165
166
  };
166
167
  }
167
168
 
169
+ function drizzleConfigFiles(context) {
170
+ return findImportFiles(context.paths, (filePath) => /drizzle\.config\.(ts|js|mjs|cjs)$/i.test(path.basename(filePath)));
171
+ }
172
+
173
+ function configuredSchemaFiles(context, configFiles) {
174
+ const files = [];
175
+ for (const configFile of configFiles) {
176
+ const configText = context.helpers.readTextIfExists(configFile) || "";
177
+ for (const match of configText.matchAll(/\bschema\s*:\s*["'`]([^"'`*]+)["'`]/g)) {
178
+ const absoluteSchemaPath = path.resolve(path.dirname(configFile), match[1]);
179
+ if (context.helpers.readTextIfExists(absoluteSchemaPath) !== null) {
180
+ files.push(absoluteSchemaPath);
181
+ }
182
+ }
183
+ }
184
+ return files;
185
+ }
186
+
187
+ function findDrizzleSchemaFiles(context) {
188
+ const configFiles = drizzleConfigFiles(context);
189
+ const conventionalSchemaFiles = findImportFiles(context.paths, (filePath) =>
190
+ /(?:^|\/)(?:src\/db\/schema|src\/schema|db\/schema|schema)\.(ts|js|mjs|cjs)$/i.test(relativeTo(context.paths.workspaceRoot, filePath).replaceAll(path.sep, "/"))
191
+ );
192
+ return [...new Set([
193
+ ...configuredSchemaFiles(context, configFiles),
194
+ ...conventionalSchemaFiles
195
+ ])].sort();
196
+ }
197
+
168
198
  export const drizzleExtractor = {
169
199
  id: "db.drizzle",
170
200
  track: "db",
171
201
  detect(context) {
172
- const hasConfig = findImportFiles(context.paths, (filePath) => /drizzle\.config\.(ts|js|mjs|cjs)$/i.test(path.basename(filePath))).length > 0;
173
- const hasSchema = findImportFiles(context.paths, (filePath) => /src\/schema\.(ts|js|mjs|cjs)$/i.test(filePath)).length > 0;
202
+ const hasConfig = drizzleConfigFiles(context).length > 0;
203
+ const hasSchema = findDrizzleSchemaFiles(context).length > 0;
174
204
  return {
175
205
  score: hasConfig || hasSchema ? 95 : 0,
176
206
  reasons: hasConfig || hasSchema ? ["Found Drizzle config/schema source"] : []
177
207
  };
178
208
  },
179
209
  extract(context) {
180
- const schemaFiles = findImportFiles(context.paths, (filePath) => /src\/schema\.(ts|js|mjs|cjs)$/i.test(filePath));
210
+ const schemaFiles = findDrizzleSchemaFiles(context);
181
211
  const findings = [];
182
- const candidates = { entities: [], enums: [], relations: [], indexes: [] };
212
+ const candidates = { entities: [], enums: [], relations: [], indexes: [], maintained_seams: [] };
183
213
  for (const filePath of schemaFiles) {
184
214
  const parsed = parseDrizzleTables(context.helpers.readTextIfExists(filePath) || "");
185
215
  const provenance = relativeTo(context.paths.repoRoot, filePath);
@@ -237,6 +267,7 @@ export const drizzleExtractor = {
237
267
  candidates.enums = dedupeCandidateRecords(candidates.enums, (record) => record.id_hint);
238
268
  candidates.relations = dedupeCandidateRecords(candidates.relations, (record) => record.id_hint);
239
269
  candidates.indexes = dedupeCandidateRecords(candidates.indexes, (record) => record.id_hint);
270
+ candidates.maintained_seams = inferDrizzleMaintainedDbSeams(context, schemaFiles);
240
271
  return { findings, candidates };
241
272
  }
242
273
  };
@@ -0,0 +1,208 @@
1
+ // @ts-check
2
+
3
+ import path from "node:path";
4
+
5
+ import { findImportFiles, makeCandidateRecord, relativeTo } from "../../core/shared.js";
6
+
7
+ /** @param {string} value @returns {string} */
8
+ function toPosix(value) {
9
+ return String(value || "").replaceAll(path.sep, "/");
10
+ }
11
+
12
+ /** @param {any} context @param {string} filePath @returns {string} */
13
+ function appRelativePath(context, filePath) {
14
+ return toPosix(relativeTo(context.paths.workspaceRoot, filePath));
15
+ }
16
+
17
+ /** @param {any} context @param {string} filePath @returns {string} */
18
+ function evidencePath(context, filePath) {
19
+ return toPosix(relativeTo(context.paths.repoRoot, filePath));
20
+ }
21
+
22
+ /** @param {string} relativePath @param {string[]} segments @returns {string|null} */
23
+ function prefixThroughSegments(relativePath, segments) {
24
+ const parts = toPosix(relativePath).split("/");
25
+ for (let index = 0; index <= parts.length - segments.length; index += 1) {
26
+ if (segments.every((segment, offset) => parts[index + offset] === segment)) {
27
+ return parts.slice(0, index + segments.length).join("/");
28
+ }
29
+ }
30
+ return null;
31
+ }
32
+
33
+ /** @param {string} relativePath @returns {string|null} */
34
+ function migrationDirectoryFromRelativePath(relativePath) {
35
+ return prefixThroughSegments(relativePath, ["migrations"]) ||
36
+ prefixThroughSegments(relativePath, ["migration"]);
37
+ }
38
+
39
+ /** @param {any} context @param {string[]} files @param {string[][]} markers @returns {string|null} */
40
+ function firstMarkedDirectory(context, files, markers) {
41
+ const directories = new Set();
42
+ for (const filePath of files) {
43
+ const relativePath = appRelativePath(context, filePath);
44
+ for (const marker of markers) {
45
+ const directory = prefixThroughSegments(relativePath, marker);
46
+ if (directory) {
47
+ directories.add(directory);
48
+ }
49
+ }
50
+ }
51
+ return [...directories].sort()[0] || null;
52
+ }
53
+
54
+ /** @param {any} context @param {string[]} configFiles @returns {string|null} */
55
+ function drizzleOutPathFromConfig(context, configFiles) {
56
+ for (const configFile of configFiles.sort()) {
57
+ const configText = context.helpers.readTextIfExists(configFile) || "";
58
+ const outMatch = configText.match(/\bout\s*:\s*["'`]([^"'`]+)["'`]/);
59
+ if (!outMatch) {
60
+ continue;
61
+ }
62
+ const absoluteOut = path.resolve(path.dirname(configFile), outMatch[1]);
63
+ const relativeOut = appRelativePath(context, absoluteOut);
64
+ if (relativeOut && !relativeOut.startsWith("..")) {
65
+ return relativeOut;
66
+ }
67
+ }
68
+ return null;
69
+ }
70
+
71
+ /**
72
+ * @param {any} context
73
+ * @param {{ tool: "sql"|"prisma"|"drizzle", schemaPath?: string|null, migrationsPath?: string|null, evidence: string[], matchReasons: string[], missingDecisions: string[] }} options
74
+ * @returns {any}
75
+ */
76
+ function maintainedDbSeamCandidate(context, options) {
77
+ const runtimeId = "app_db";
78
+ const projectionId = "proj_db";
79
+ const snapshotPath = `topo/state/db/${runtimeId}/current.snapshot.json`;
80
+ const proposedRuntimeMigration = {
81
+ ownership: "maintained",
82
+ tool: options.tool,
83
+ apply: "never",
84
+ snapshotPath,
85
+ ...(options.schemaPath ? { schemaPath: options.schemaPath } : {}),
86
+ ...(options.migrationsPath ? { migrationsPath: options.migrationsPath } : {})
87
+ };
88
+ const idHint = `seam_${options.tool}_db_migrations`;
89
+
90
+ return makeCandidateRecord({
91
+ kind: "maintained_db_migration_seam",
92
+ idHint,
93
+ label: `${options.tool.toUpperCase()} maintained database migrations`,
94
+ confidence: options.missingDecisions.length === 0 ? "high" : "medium",
95
+ sourceKind: "migration_strategy_inference",
96
+ sourceOfTruth: "candidate",
97
+ provenance: options.evidence,
98
+ track: "db",
99
+ seam_id: idHint,
100
+ output_id: "maintained_app",
101
+ ownership_class: "human_owned",
102
+ status: "review_required",
103
+ tool: options.tool,
104
+ ownership: "maintained",
105
+ apply: "never",
106
+ schemaPath: options.schemaPath || null,
107
+ migrationsPath: options.migrationsPath || null,
108
+ snapshotPath,
109
+ runtime_id_hint: runtimeId,
110
+ projection_id_hint: projectionId,
111
+ evidence: options.evidence,
112
+ match_reasons: options.matchReasons,
113
+ missing_decisions: options.missingDecisions,
114
+ proposed_runtime_migration: proposedRuntimeMigration,
115
+ maintained_modules: [options.schemaPath, options.migrationsPath].filter(Boolean),
116
+ emitted_dependencies: [snapshotPath, projectionId],
117
+ allowed_change_classes: ["proposal_only"],
118
+ drift_signals: ["schema_or_migration_changed", "migration_directory_changed"]
119
+ });
120
+ }
121
+
122
+ /** @param {any} context @param {string[]} prismaFiles @returns {any[]} */
123
+ export function inferPrismaMaintainedDbSeams(context, prismaFiles) {
124
+ if (!prismaFiles.length) {
125
+ return [];
126
+ }
127
+ const schemaPath = appRelativePath(context, prismaFiles[0]);
128
+ const migrationFiles = /** @type {string[]} */ (findImportFiles(context.paths, /** @param {string} filePath */ (filePath) => toPosix(filePath).includes("/prisma/migrations/")));
129
+ const migrationsPath = firstMarkedDirectory(context, migrationFiles, [["prisma", "migrations"]]);
130
+ return [
131
+ maintainedDbSeamCandidate(context, {
132
+ tool: "prisma",
133
+ schemaPath,
134
+ migrationsPath,
135
+ evidence: [
136
+ ...prismaFiles.map((filePath) => evidencePath(context, filePath)),
137
+ ...migrationFiles.slice(0, 3).map(/** @param {string} filePath */ (filePath) => evidencePath(context, filePath))
138
+ ],
139
+ matchReasons: [
140
+ "found Prisma schema",
141
+ ...(migrationsPath ? ["found Prisma migrations directory"] : [])
142
+ ],
143
+ missingDecisions: migrationsPath ? [] : ["confirm Prisma migrationsPath before adding this strategy to topogram.project.json"]
144
+ })
145
+ ];
146
+ }
147
+
148
+ /** @param {any} context @param {string[]} schemaFiles @returns {any[]} */
149
+ export function inferDrizzleMaintainedDbSeams(context, schemaFiles) {
150
+ if (!schemaFiles.length) {
151
+ return [];
152
+ }
153
+ const configFiles = /** @type {string[]} */ (findImportFiles(context.paths, /** @param {string} filePath */ (filePath) => /drizzle\.config\.(ts|js|mjs|cjs)$/i.test(path.basename(filePath))));
154
+ const drizzleFiles = /** @type {string[]} */ (findImportFiles(context.paths, /** @param {string} filePath */ (filePath) => appRelativePath(context, filePath).startsWith("drizzle/")));
155
+ const configuredOutPath = drizzleOutPathFromConfig(context, configFiles);
156
+ const migrationsPath = configuredOutPath ||
157
+ firstMarkedDirectory(context, drizzleFiles, [["drizzle"]]);
158
+ return [
159
+ maintainedDbSeamCandidate(context, {
160
+ tool: "drizzle",
161
+ schemaPath: appRelativePath(context, schemaFiles[0]),
162
+ migrationsPath,
163
+ evidence: [
164
+ ...schemaFiles.map((filePath) => evidencePath(context, filePath)),
165
+ ...configFiles.map(/** @param {string} filePath */ (filePath) => evidencePath(context, filePath)),
166
+ ...drizzleFiles.slice(0, 3).map(/** @param {string} filePath */ (filePath) => evidencePath(context, filePath))
167
+ ],
168
+ matchReasons: [
169
+ "found Drizzle schema source",
170
+ ...(configFiles.length ? ["found Drizzle config"] : []),
171
+ ...(migrationsPath ? ["found Drizzle migrations output"] : [])
172
+ ],
173
+ missingDecisions: migrationsPath ? [] : ["confirm Drizzle migrationsPath before adding this strategy to topogram.project.json"]
174
+ })
175
+ ];
176
+ }
177
+
178
+ /** @param {any} context @param {string[]} allSqlFiles @param {string[]} selectedSqlFiles @returns {any[]} */
179
+ export function inferSqlMaintainedDbSeams(context, allSqlFiles, selectedSqlFiles) {
180
+ if (!allSqlFiles.length) {
181
+ return [];
182
+ }
183
+ const schemaFile = selectedSqlFiles.find((filePath) => !/migration/i.test(path.basename(filePath))) ||
184
+ allSqlFiles.find((filePath) => /schema/i.test(path.basename(filePath))) ||
185
+ null;
186
+ const migrationFiles = allSqlFiles.filter((filePath) => {
187
+ const relativePath = appRelativePath(context, filePath);
188
+ return Boolean(migrationDirectoryFromRelativePath(relativePath)) || /migration/i.test(path.basename(filePath));
189
+ });
190
+ const migrationsPath = firstMarkedDirectory(context, migrationFiles, [["migrations"], ["migration"]]) ||
191
+ (migrationFiles.length ? toPosix(path.dirname(appRelativePath(context, migrationFiles[0]))) : null);
192
+ return [
193
+ maintainedDbSeamCandidate(context, {
194
+ tool: "sql",
195
+ schemaPath: schemaFile ? appRelativePath(context, schemaFile) : null,
196
+ migrationsPath,
197
+ evidence: [
198
+ ...(schemaFile ? [evidencePath(context, schemaFile)] : []),
199
+ ...migrationFiles.slice(0, 3).map((filePath) => evidencePath(context, filePath))
200
+ ],
201
+ matchReasons: [
202
+ ...(schemaFile ? ["found SQL schema"] : []),
203
+ ...(migrationsPath ? ["found SQL migrations directory or migration file"] : [])
204
+ ],
205
+ missingDecisions: migrationsPath ? [] : ["confirm SQL migrationsPath before adding this strategy to topogram.project.json"]
206
+ })
207
+ ];
208
+ }
@@ -8,6 +8,7 @@ import {
8
8
  titleCase,
9
9
  idHintify
10
10
  } from "../../core/shared.js";
11
+ import { inferPrismaMaintainedDbSeams } from "./maintained-seams.js";
11
12
 
12
13
  function parsePrismaSchema(schemaText) {
13
14
  const enums = [];
@@ -125,7 +126,7 @@ export const prismaExtractor = {
125
126
  "prisma"
126
127
  );
127
128
  const findings = [];
128
- const candidates = { entities: [], enums: [], relations: [], indexes: [] };
129
+ const candidates = { entities: [], enums: [], relations: [], indexes: [], maintained_seams: [] };
129
130
  for (const filePath of prismaFiles) {
130
131
  const parsed = parsePrismaSchema(context.helpers.readTextIfExists(filePath) || "");
131
132
  const provenance = relativeTo(context.paths.repoRoot, filePath);
@@ -179,7 +180,7 @@ export const prismaExtractor = {
179
180
  track: "db"
180
181
  })));
181
182
  }
183
+ candidates.maintained_seams = inferPrismaMaintainedDbSeams(context, prismaFiles);
182
184
  return { findings, candidates };
183
185
  }
184
186
  };
185
-
@@ -1,4 +1,5 @@
1
1
  import { canonicalCandidateTerm, findImportFiles, makeCandidateRecord, relativeTo, selectPreferredImportFiles, slugify, titleCase, idHintify } from "../../core/shared.js";
2
+ import { inferSqlMaintainedDbSeams } from "./maintained-seams.js";
2
3
 
3
4
  function parseTableConstraint(line, tableName) {
4
5
  const normalized = line.replace(/,$/, "").trim();
@@ -119,7 +120,7 @@ export const sqlExtractor = {
119
120
  ? selectPreferredImportFiles(context.paths, schemaSqlFiles, "sql")
120
121
  : selectPreferredImportFiles(context.paths, migrationSqlFiles, "sql");
121
122
  const findings = [];
122
- const candidates = { entities: [], enums: [], relations: [], indexes: [] };
123
+ const candidates = { entities: [], enums: [], relations: [], indexes: [], maintained_seams: [] };
123
124
  for (const filePath of sqlFiles) {
124
125
  const parsed = parseSqlSchema(context.helpers.readTextIfExists(filePath) || "");
125
126
  const provenance = relativeTo(context.paths.repoRoot, filePath);
@@ -175,6 +176,7 @@ export const sqlExtractor = {
175
176
  track: "db"
176
177
  })));
177
178
  }
179
+ candidates.maintained_seams = inferSqlMaintainedDbSeams(context, allSqlFiles, sqlFiles);
178
180
  return { findings, candidates };
179
181
  }
180
182
  };