wogiflow 2.4.3 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/wogi-audit.md +26 -0
- package/.claude/commands/wogi-review.md +29 -0
- package/.claude/commands/wogi-start.md +124 -0
- package/.claude/docs/claude-code-compatibility.md +24 -0
- package/.claude/docs/explore-agents.md +19 -2
- package/.claude/settings.json +11 -0
- package/bin/flow +11 -1
- package/lib/workspace-channel-server.js +364 -0
- package/lib/workspace-contracts.js +599 -0
- package/lib/workspace-intelligence.js +600 -0
- package/lib/workspace-messages.js +441 -0
- package/lib/workspace-routing.js +782 -0
- package/lib/workspace-sync.js +339 -0
- package/lib/workspace.js +1349 -0
- package/package.json +1 -1
- package/scripts/flow-config-defaults.js +28 -0
- package/scripts/flow-eval-calibration.js +257 -0
- package/scripts/flow-eval-judge.js +10 -1
- package/scripts/flow-eval.js +9 -0
- package/scripts/flow-schema-drift.js +837 -0
- package/scripts/hooks/adapters/claude-code.js +29 -0
- package/scripts/hooks/core/task-created.js +83 -0
- package/scripts/hooks/entry/claude-code/task-created.js +15 -0
- package/scripts/postinstall.js +2 -0
|
@@ -0,0 +1,837 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Wogi Flow — Schema Drift Quality Gate
|
|
5
|
+
*
|
|
6
|
+
* Detects when schema fields are removed/renamed but consumers still reference
|
|
7
|
+
* the old field names. Agnostic core + specific parsers for known ORMs.
|
|
8
|
+
*
|
|
9
|
+
* Triggers on ALL tasks that touch schema files (not just refactors).
|
|
10
|
+
* Auto-fixes consumers when the change was explicitly requested in the task spec;
|
|
11
|
+
* flags for user decision when the change is a side-effect.
|
|
12
|
+
*
|
|
13
|
+
* Cross-repo aware: can scan workspace member repos for drift.
|
|
14
|
+
*
|
|
15
|
+
* Usage:
|
|
16
|
+
* node scripts/flow-schema-drift.js [changed-files...]
|
|
17
|
+
* node scripts/flow-schema-drift.js --task wf-XXXXXXXX
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
'use strict';
|
|
21
|
+
|
|
22
|
+
const fs = require('node:fs');
|
|
23
|
+
const path = require('node:path');
|
|
24
|
+
const { execFileSync } = require('node:child_process');
|
|
25
|
+
|
|
26
|
+
// ============================================================
|
|
27
|
+
// Constants
|
|
28
|
+
// ============================================================
|
|
29
|
+
|
|
30
|
+
let PROJECT_ROOT;
|
|
31
|
+
try {
|
|
32
|
+
PROJECT_ROOT = execFileSync('git', ['rev-parse', '--show-toplevel'], {
|
|
33
|
+
encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe']
|
|
34
|
+
}).trim();
|
|
35
|
+
} catch (_err) {
|
|
36
|
+
PROJECT_ROOT = process.cwd();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const SCHEMA_MAP_PATH = path.join(PROJECT_ROOT, '.workflow', 'state', 'schema-map.md');
|
|
40
|
+
const SCHEMA_INDEX_PATH = path.join(PROJECT_ROOT, '.workflow', 'state', 'schema-index.json');
|
|
41
|
+
const CONFIG_PATH = path.join(PROJECT_ROOT, '.workflow', 'config.json');
|
|
42
|
+
|
|
43
|
+
// Convention-based schema file patterns (when no schema-map exists)
|
|
44
|
+
const SCHEMA_CONVENTIONS = [
|
|
45
|
+
/\.prisma$/,
|
|
46
|
+
/\.entity\.(ts|js)$/,
|
|
47
|
+
/\.model\.(ts|js)$/,
|
|
48
|
+
/\.schema\.(ts|js)$/,
|
|
49
|
+
/models\/[^/]+\.(ts|js)$/,
|
|
50
|
+
/entities\/[^/]+\.(ts|js)$/,
|
|
51
|
+
/schemas\/[^/]+\.(ts|js)$/,
|
|
52
|
+
];
|
|
53
|
+
|
|
54
|
+
// Directories to exclude from consumer scanning
|
|
55
|
+
const EXCLUDE_DIRS = ['node_modules', 'dist', 'build', '.next', '.workflow', '.git', 'coverage'];
|
|
56
|
+
|
|
57
|
+
// ============================================================
|
|
58
|
+
// Schema File Detection (C1 — Layer 1)
|
|
59
|
+
// ============================================================
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Get registered schema files from schema-map.md or schema-index.json.
|
|
63
|
+
* @returns {string[]} absolute paths to known schema files
|
|
64
|
+
*/
|
|
65
|
+
function getRegisteredSchemaFiles() {
|
|
66
|
+
const files = [];
|
|
67
|
+
|
|
68
|
+
// Try schema-index.json first (structured data)
|
|
69
|
+
try {
|
|
70
|
+
if (fs.existsSync(SCHEMA_INDEX_PATH)) {
|
|
71
|
+
const index = JSON.parse(fs.readFileSync(SCHEMA_INDEX_PATH, 'utf-8'));
|
|
72
|
+
if (index.models) {
|
|
73
|
+
for (const model of index.models) {
|
|
74
|
+
if (model.file) {
|
|
75
|
+
const abs = path.resolve(PROJECT_ROOT, model.file);
|
|
76
|
+
if (!files.includes(abs)) files.push(abs);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
} catch (_err) {
|
|
82
|
+
// Fall through to schema-map.md
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Try schema-map.md (parse table rows for file paths)
|
|
86
|
+
try {
|
|
87
|
+
if (files.length === 0 && fs.existsSync(SCHEMA_MAP_PATH)) {
|
|
88
|
+
const content = fs.readFileSync(SCHEMA_MAP_PATH, 'utf-8');
|
|
89
|
+
const filePattern = /`([^`]+\.(prisma|entity\.\w+|model\.\w+|schema\.\w+))`/g;
|
|
90
|
+
let match;
|
|
91
|
+
while ((match = filePattern.exec(content)) !== null) {
|
|
92
|
+
const abs = path.resolve(PROJECT_ROOT, match[1]);
|
|
93
|
+
if (!files.includes(abs)) files.push(abs);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
} catch (_err) {
|
|
97
|
+
// Non-critical
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return files;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Detect schema files among changed files.
|
|
105
|
+
* Uses registry-first approach, then convention fallback.
|
|
106
|
+
*
|
|
107
|
+
* @param {string[]} changedFiles — paths (relative or absolute)
|
|
108
|
+
* @returns {string[]} schema file paths found in the changed set
|
|
109
|
+
*/
|
|
110
|
+
function detectSchemaFiles(changedFiles) {
|
|
111
|
+
const registered = new Set(getRegisteredSchemaFiles().map(f => path.resolve(PROJECT_ROOT, f)));
|
|
112
|
+
const schemaFiles = [];
|
|
113
|
+
|
|
114
|
+
for (const file of changedFiles) {
|
|
115
|
+
const abs = path.resolve(PROJECT_ROOT, file);
|
|
116
|
+
|
|
117
|
+
// Registry match
|
|
118
|
+
if (registered.has(abs)) {
|
|
119
|
+
schemaFiles.push(file);
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Convention match
|
|
124
|
+
const rel = path.relative(PROJECT_ROOT, abs);
|
|
125
|
+
if (SCHEMA_CONVENTIONS.some(pattern => pattern.test(rel))) {
|
|
126
|
+
schemaFiles.push(file);
|
|
127
|
+
continue;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Agnostic: check if file is in schema-index by model file reference
|
|
131
|
+
// (handles cases where schema-map lists the file differently)
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return schemaFiles;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// ============================================================
|
|
138
|
+
// Field Change Parsing (C1 — Layer 2)
|
|
139
|
+
// ============================================================
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Parse removed/added lines from git diff for a file.
|
|
143
|
+
* @param {string} filePath
|
|
144
|
+
* @returns {{ removed: string[], added: string[] }}
|
|
145
|
+
*/
|
|
146
|
+
function getDiffLines(filePath) {
|
|
147
|
+
const removed = [];
|
|
148
|
+
const added = [];
|
|
149
|
+
|
|
150
|
+
try {
|
|
151
|
+
const diff = execFileSync('git', [
|
|
152
|
+
'diff', '--unified=0', '--', filePath
|
|
153
|
+
], { cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] });
|
|
154
|
+
|
|
155
|
+
let stagedDiff = '';
|
|
156
|
+
try {
|
|
157
|
+
stagedDiff = execFileSync('git', [
|
|
158
|
+
'diff', '--cached', '--unified=0', '--', filePath
|
|
159
|
+
], { cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] });
|
|
160
|
+
} catch (_err) { /* no staged changes */ }
|
|
161
|
+
|
|
162
|
+
const combined = diff + '\n' + stagedDiff;
|
|
163
|
+
for (const line of combined.split('\n')) {
|
|
164
|
+
if (line.startsWith('-') && !line.startsWith('---')) {
|
|
165
|
+
removed.push(line.substring(1).trim());
|
|
166
|
+
} else if (line.startsWith('+') && !line.startsWith('+++')) {
|
|
167
|
+
added.push(line.substring(1).trim());
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
} catch (_err) {
|
|
171
|
+
// File may not have changes
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return { removed, added };
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// ---- Specific Parsers ----
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Extract field name from a Prisma schema line.
|
|
181
|
+
* Prisma format: ` fieldName Type @decorators`
|
|
182
|
+
* @param {string} line
|
|
183
|
+
* @returns {string|null} field name or null
|
|
184
|
+
*/
|
|
185
|
+
function parsePrismaField(line) {
|
|
186
|
+
// Skip model/enum declarations, comments, decorators, closing braces
|
|
187
|
+
if (/^\s*(model|enum|type|generator|datasource)\s/.test(line)) return null;
|
|
188
|
+
if (/^\s*(\/\/|@@|})/.test(line)) return null;
|
|
189
|
+
if (!line.trim()) return null;
|
|
190
|
+
|
|
191
|
+
// Prisma field: ` fieldName Type @optional`
|
|
192
|
+
const match = line.match(/^\s+(\w+)\s+\w/);
|
|
193
|
+
return match ? match[1] : null;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Extract field name from a TypeORM/NestJS entity line.
|
|
198
|
+
* Format: `@Column() fieldName: Type` or just `fieldName: Type`
|
|
199
|
+
* @param {string} line
|
|
200
|
+
* @returns {string|null}
|
|
201
|
+
*/
|
|
202
|
+
function parseEntityField(line) {
|
|
203
|
+
// Skip decorators, imports, class declarations
|
|
204
|
+
if (/^\s*(@|import |export |class |\/\/|\/\*|\*|})/.test(line)) return null;
|
|
205
|
+
|
|
206
|
+
// Match: `fieldName: Type` or `fieldName?: Type` or `fieldName!: Type`
|
|
207
|
+
const match = line.match(/^\s+(\w+)[?!]?\s*:\s*\w/);
|
|
208
|
+
return match ? match[1] : null;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Extract field name from a Mongoose schema line.
|
|
213
|
+
* Format: `fieldName: { type: ... }` or `fieldName: Type`
|
|
214
|
+
* @param {string} line
|
|
215
|
+
* @returns {string|null}
|
|
216
|
+
*/
|
|
217
|
+
function parseMongooseField(line) {
|
|
218
|
+
if (/^\s*(\/\/|\/\*|\*|import |export |const |let |var |})/.test(line)) return null;
|
|
219
|
+
|
|
220
|
+
// Match: `fieldName: {` or `fieldName: Schema.Types.` or `fieldName: String`
|
|
221
|
+
const match = line.match(/^\s+(\w+)\s*:\s*[{\[A-Z]/);
|
|
222
|
+
return match ? match[1] : null;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Extract field name from a TypeScript interface/type line.
|
|
227
|
+
* Format: `fieldName: Type;` or `fieldName?: Type;`
|
|
228
|
+
* @param {string} line
|
|
229
|
+
* @returns {string|null}
|
|
230
|
+
*/
|
|
231
|
+
function parseTsInterfaceField(line) {
|
|
232
|
+
if (/^\s*(\/\/|\/\*|\*|import |export |interface |type |})/.test(line)) return null;
|
|
233
|
+
|
|
234
|
+
const match = line.match(/^\s+(\w+)[?!]?\s*:\s*.+[;,]?\s*$/);
|
|
235
|
+
return match ? match[1] : null;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Extract field name from a SQL column definition.
|
|
240
|
+
* Format: `column_name TYPE ...`
|
|
241
|
+
* @param {string} line
|
|
242
|
+
* @returns {string|null}
|
|
243
|
+
*/
|
|
244
|
+
function parseSqlColumn(line) {
|
|
245
|
+
if (/^\s*(CREATE|ALTER|DROP|INSERT|SELECT|UPDATE|DELETE|--|\/\*|\*|CONSTRAINT|INDEX|PRIMARY|FOREIGN|UNIQUE|CHECK|\))/i.test(line)) return null;
|
|
246
|
+
|
|
247
|
+
const match = line.match(/^\s+"?(\w+)"?\s+(VARCHAR|INT|TEXT|BOOLEAN|TIMESTAMP|DATE|FLOAT|DECIMAL|BIGINT|SERIAL|UUID|JSONB?|BYTEA|SMALLINT|NUMERIC)/i);
|
|
248
|
+
return match ? match[1] : null;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Agnostic field extraction — catches property-like patterns in any file.
|
|
253
|
+
* @param {string} line
|
|
254
|
+
* @returns {string|null}
|
|
255
|
+
*/
|
|
256
|
+
function parseAgnosticField(line) {
|
|
257
|
+
if (/^\s*(\/\/|\/\*|\*|import |export |class |function |const |let |var |return |if |})/.test(line)) return null;
|
|
258
|
+
if (!line.trim()) return null;
|
|
259
|
+
|
|
260
|
+
// Match property definitions: ` name: Type` or ` name?: Type` or ` name: value,`
|
|
261
|
+
const match = line.match(/^\s{2,}(\w{2,})[?!]?\s*:\s*\S/);
|
|
262
|
+
if (match) {
|
|
263
|
+
const name = match[1];
|
|
264
|
+
// Filter noise: common keywords that aren't field names
|
|
265
|
+
const noise = new Set(['type', 'default', 'required', 'unique', 'index', 'ref', 'enum', 'validate', 'get', 'set', 'value', 'key', 'label', 'description', 'constructor', 'prototype']);
|
|
266
|
+
if (noise.has(name.toLowerCase())) return null;
|
|
267
|
+
return name;
|
|
268
|
+
}
|
|
269
|
+
return null;
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
/**
|
|
273
|
+
* Select the best parser for a file based on extension and content heuristics.
|
|
274
|
+
* @param {string} filePath
|
|
275
|
+
* @returns {Function} parser function
|
|
276
|
+
*/
|
|
277
|
+
function selectParser(filePath) {
|
|
278
|
+
const ext = path.extname(filePath);
|
|
279
|
+
const basename = path.basename(filePath);
|
|
280
|
+
|
|
281
|
+
if (ext === '.prisma') return parsePrismaField;
|
|
282
|
+
if (/\.entity\.(ts|js)$/.test(basename)) return parseEntityField;
|
|
283
|
+
if (/\.schema\.(ts|js)$/.test(basename)) return parseMongooseField;
|
|
284
|
+
if (/\.model\.(ts|js)$/.test(basename)) return parseEntityField; // TypeORM-style
|
|
285
|
+
if (/\.sql$/.test(ext)) return parseSqlColumn;
|
|
286
|
+
if (ext === '.ts' || ext === '.tsx') return parseTsInterfaceField;
|
|
287
|
+
if (ext === '.js' || ext === '.jsx') return parseMongooseField; // Mongoose-style fallback
|
|
288
|
+
|
|
289
|
+
return parseAgnosticField;
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* Parse field changes from a schema file's git diff.
|
|
294
|
+
*
|
|
295
|
+
* @param {string} filePath — schema file path
|
|
296
|
+
* @returns {Array<{ file: string, field: string, action: 'removed'|'renamed', oldName: string, newName?: string }>}
|
|
297
|
+
*/
|
|
298
|
+
function parseFieldChanges(filePath) {
|
|
299
|
+
const { removed, added } = getDiffLines(filePath);
|
|
300
|
+
const parser = selectParser(filePath);
|
|
301
|
+
const agnostic = parseAgnosticField;
|
|
302
|
+
|
|
303
|
+
// Extract fields from removed lines
|
|
304
|
+
const removedFields = new Set();
|
|
305
|
+
for (const line of removed) {
|
|
306
|
+
const field = parser(line) ?? agnostic(line);
|
|
307
|
+
if (field) removedFields.add(field);
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Extract fields from added lines
|
|
311
|
+
const addedFields = new Set();
|
|
312
|
+
for (const line of added) {
|
|
313
|
+
const field = parser(line) ?? agnostic(line);
|
|
314
|
+
if (field) addedFields.add(field);
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
const entries = [];
|
|
318
|
+
|
|
319
|
+
for (const field of removedFields) {
|
|
320
|
+
if (addedFields.has(field)) {
|
|
321
|
+
// Field appears in both removed and added — not actually removed (just modified)
|
|
322
|
+
continue;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// Check for rename: was a similar field added?
|
|
326
|
+
const rename = detectRename(field, addedFields);
|
|
327
|
+
if (rename) {
|
|
328
|
+
entries.push({
|
|
329
|
+
file: filePath,
|
|
330
|
+
field,
|
|
331
|
+
action: 'renamed',
|
|
332
|
+
oldName: field,
|
|
333
|
+
newName: rename
|
|
334
|
+
});
|
|
335
|
+
} else {
|
|
336
|
+
entries.push({
|
|
337
|
+
file: filePath,
|
|
338
|
+
field,
|
|
339
|
+
action: 'removed',
|
|
340
|
+
oldName: field
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
return entries;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* Heuristic rename detection: check if a removed field name is similar to an added one.
|
|
350
|
+
* @param {string} removedField
|
|
351
|
+
* @param {Set<string>} addedFields
|
|
352
|
+
* @returns {string|null} new name if rename detected
|
|
353
|
+
*/
|
|
354
|
+
function detectRename(removedField, addedFields) {
|
|
355
|
+
const lower = removedField.toLowerCase();
|
|
356
|
+
|
|
357
|
+
for (const added of addedFields) {
|
|
358
|
+
const addedLower = added.toLowerCase();
|
|
359
|
+
|
|
360
|
+
// Case change: emailVerified → emailverified (same when lowered)
|
|
361
|
+
if (lower === addedLower && removedField !== added) return added;
|
|
362
|
+
|
|
363
|
+
// Prefix/suffix relationship: emailVerified → isEmailVerified
|
|
364
|
+
if (lower.length >= 4 && addedLower.length >= 4) {
|
|
365
|
+
if (addedLower.includes(lower) || lower.includes(addedLower)) return added;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Levenshtein distance — strict threshold to avoid false positives
|
|
369
|
+
// (e.g., createdAt vs updatedAt must NOT match)
|
|
370
|
+
if (removedField.length >= 4 && added.length >= 4) {
|
|
371
|
+
const lenDiff = Math.abs(removedField.length - added.length);
|
|
372
|
+
if (lenDiff <= 3) {
|
|
373
|
+
const dist = levenshtein(lower, addedLower);
|
|
374
|
+
const maxLen = Math.max(lower.length, addedLower.length);
|
|
375
|
+
// Require at least 80% similarity AND edit distance ≤ 3
|
|
376
|
+
if (dist <= 3 && (1 - dist / maxLen) >= 0.8) return added;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
return null;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
/**
|
|
385
|
+
* Simple Levenshtein distance.
|
|
386
|
+
* @param {string} a
|
|
387
|
+
* @param {string} b
|
|
388
|
+
* @returns {number}
|
|
389
|
+
*/
|
|
390
|
+
function levenshtein(a, b) {
|
|
391
|
+
const m = a.length, n = b.length;
|
|
392
|
+
if (m === 0) return n;
|
|
393
|
+
if (n === 0) return m;
|
|
394
|
+
|
|
395
|
+
const dp = Array.from({ length: m + 1 }, (_, i) => i);
|
|
396
|
+
for (let j = 1; j <= n; j++) {
|
|
397
|
+
let prev = dp[0];
|
|
398
|
+
dp[0] = j;
|
|
399
|
+
for (let i = 1; i <= m; i++) {
|
|
400
|
+
const temp = dp[i];
|
|
401
|
+
dp[i] = a[i - 1] === b[j - 1]
|
|
402
|
+
? prev
|
|
403
|
+
: 1 + Math.min(prev, dp[i], dp[i - 1]);
|
|
404
|
+
prev = temp;
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
return dp[m];
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
// ============================================================
|
|
411
|
+
// Consumer Scanning (C2)
|
|
412
|
+
// ============================================================
|
|
413
|
+
|
|
414
|
+
/**
|
|
415
|
+
* Find all files that reference a field name.
|
|
416
|
+
*
|
|
417
|
+
* @param {string} fieldName — the field to search for
|
|
418
|
+
* @param {string} excludeFile — schema file to exclude from results
|
|
419
|
+
* @param {string} [searchRoot] — directory to search (default: PROJECT_ROOT)
|
|
420
|
+
* @returns {Array<{ file: string, line: number, context: string, matchType: string }>}
|
|
421
|
+
*/
|
|
422
|
+
function findFieldReferences(fieldName, excludeFile, searchRoot) {
|
|
423
|
+
const root = searchRoot ?? PROJECT_ROOT;
|
|
424
|
+
const refs = [];
|
|
425
|
+
|
|
426
|
+
// Skip very short field names that would produce too many false positives
|
|
427
|
+
if (fieldName.length < 3) return refs;
|
|
428
|
+
|
|
429
|
+
// Validate fieldName is a valid identifier (prevent grep flag injection)
|
|
430
|
+
if (!/^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(fieldName)) return refs;
|
|
431
|
+
|
|
432
|
+
try {
|
|
433
|
+
const excludeArgs = EXCLUDE_DIRS.flatMap(d => ['--exclude-dir', d]);
|
|
434
|
+
const result = execFileSync('grep', [
|
|
435
|
+
'-rn',
|
|
436
|
+
'--include=*.ts', '--include=*.tsx',
|
|
437
|
+
'--include=*.js', '--include=*.jsx',
|
|
438
|
+
'--include=*.vue', '--include=*.svelte',
|
|
439
|
+
...excludeArgs,
|
|
440
|
+
'-w',
|
|
441
|
+
'--', // End of flags — prevents fieldName from being parsed as a flag
|
|
442
|
+
fieldName,
|
|
443
|
+
'.'
|
|
444
|
+
], {
|
|
445
|
+
cwd: root,
|
|
446
|
+
encoding: 'utf-8',
|
|
447
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
448
|
+
stdio: ['pipe', 'pipe', 'pipe']
|
|
449
|
+
});
|
|
450
|
+
|
|
451
|
+
const lines = result.trim().split('\n').filter(Boolean);
|
|
452
|
+
for (const line of lines) {
|
|
453
|
+
const colonIdx = line.indexOf(':');
|
|
454
|
+
if (colonIdx === -1) continue;
|
|
455
|
+
const filePath = line.substring(0, colonIdx).replace(/^\.\//, '');
|
|
456
|
+
|
|
457
|
+
// Skip the schema file itself
|
|
458
|
+
const absFile = path.resolve(root, filePath);
|
|
459
|
+
const absExclude = path.resolve(root, excludeFile);
|
|
460
|
+
if (absFile === absExclude) continue;
|
|
461
|
+
|
|
462
|
+
// Skip schema-adjacent files (other model/entity files)
|
|
463
|
+
if (SCHEMA_CONVENTIONS.some(p => p.test(filePath))) continue;
|
|
464
|
+
|
|
465
|
+
const secondColon = line.indexOf(':', colonIdx + 1);
|
|
466
|
+
const lineNum = secondColon > colonIdx ? parseInt(line.substring(colonIdx + 1, secondColon), 10) : 0;
|
|
467
|
+
const context = secondColon > colonIdx ? line.substring(secondColon + 1).trim() : '';
|
|
468
|
+
|
|
469
|
+
// Classify match type
|
|
470
|
+
let matchType = 'reference';
|
|
471
|
+
if (context.includes(`.${fieldName}`)) matchType = 'property-access';
|
|
472
|
+
else if (context.includes(`{ ${fieldName}`) || context.includes(`{${fieldName}`)) matchType = 'destructuring';
|
|
473
|
+
else if (context.includes(`${fieldName}:`)) matchType = 'object-key';
|
|
474
|
+
else if (context.includes(`'${fieldName}'`) || context.includes(`"${fieldName}"`)) matchType = 'string-literal';
|
|
475
|
+
|
|
476
|
+
refs.push({ file: filePath, line: lineNum, context: context.substring(0, 120), matchType });
|
|
477
|
+
}
|
|
478
|
+
} catch (_err) {
|
|
479
|
+
// grep returns exit code 1 when no matches — that's fine
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
return refs;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
/**
|
|
486
|
+
* Scan all consumers for drift entries.
|
|
487
|
+
*
|
|
488
|
+
* @param {Array} driftEntries — from parseFieldChanges()
|
|
489
|
+
* @param {string} [searchRoot] — search root (default: PROJECT_ROOT)
|
|
490
|
+
* @returns {Array<{ field: string, action: string, oldName: string, newName?: string, consumers: Array }>}
|
|
491
|
+
*/
|
|
492
|
+
function scanConsumers(driftEntries, searchRoot) {
|
|
493
|
+
const results = [];
|
|
494
|
+
|
|
495
|
+
for (const entry of driftEntries) {
|
|
496
|
+
const consumers = findFieldReferences(entry.oldName, entry.file, searchRoot);
|
|
497
|
+
if (consumers.length > 0) {
|
|
498
|
+
results.push({ ...entry, consumers });
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
return results;
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
// ============================================================
|
|
506
|
+
// Intent Classification (C4)
|
|
507
|
+
// ============================================================
|
|
508
|
+
|
|
509
|
+
/**
|
|
510
|
+
* Classify whether a field change was explicitly requested by the user
|
|
511
|
+
* or is a side-effect of the task.
|
|
512
|
+
*
|
|
513
|
+
* @param {Array} driftWithConsumers — from scanConsumers()
|
|
514
|
+
* @param {string|null} specContent — task spec content (or null)
|
|
515
|
+
* @returns {Array} same entries with `intent: 'auto-fix'|'flag'` added
|
|
516
|
+
*/
|
|
517
|
+
function classifyIntent(driftWithConsumers, specContent) {
|
|
518
|
+
const specLower = (specContent ?? '').toLowerCase();
|
|
519
|
+
|
|
520
|
+
return driftWithConsumers.map(entry => {
|
|
521
|
+
const oldLower = entry.oldName.toLowerCase();
|
|
522
|
+
const newLower = (entry.newName ?? '').toLowerCase();
|
|
523
|
+
|
|
524
|
+
// Check if the field change is mentioned in the spec
|
|
525
|
+
const mentionsOld = specLower.includes(oldLower);
|
|
526
|
+
const mentionsNew = newLower && specLower.includes(newLower);
|
|
527
|
+
const mentionsRename = specLower.includes('rename') && (mentionsOld || mentionsNew);
|
|
528
|
+
const mentionsRemove = (specLower.includes('remove') || specLower.includes('delete')) && mentionsOld;
|
|
529
|
+
|
|
530
|
+
const isExplicit = mentionsRename || mentionsRemove || (mentionsOld && mentionsNew);
|
|
531
|
+
|
|
532
|
+
return {
|
|
533
|
+
...entry,
|
|
534
|
+
intent: isExplicit ? 'auto-fix' : 'flag'
|
|
535
|
+
};
|
|
536
|
+
});
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// ============================================================
|
|
540
|
+
// Quality Gate (C3)
|
|
541
|
+
// ============================================================
|
|
542
|
+
|
|
543
|
+
/**
|
|
544
|
+
* Run the schema drift quality gate on changed files.
|
|
545
|
+
*
|
|
546
|
+
* @param {string[]} changedFiles — files changed in this task
|
|
547
|
+
* @param {Object} [opts]
|
|
548
|
+
* @param {string} [opts.specContent] — task spec for intent classification
|
|
549
|
+
* @param {string} [opts.searchRoot] — search root override
|
|
550
|
+
* @returns {{ passed: boolean, blocked: boolean, schemaFiles: string[], driftEntries: Array, consumers: Array, violations: Array }}
|
|
551
|
+
*/
|
|
552
|
+
function runSchemaDriftGate(changedFiles, opts = {}) {
|
|
553
|
+
// Check config
|
|
554
|
+
let enabled = true;
|
|
555
|
+
try {
|
|
556
|
+
if (fs.existsSync(CONFIG_PATH)) {
|
|
557
|
+
const config = JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
|
|
558
|
+
enabled = config.enforcement?.schemaDrift?.enabled ?? config.schemaDrift?.enabled ?? true;
|
|
559
|
+
}
|
|
560
|
+
} catch (_err) {
|
|
561
|
+
// Default to enabled
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
if (!enabled) {
|
|
565
|
+
return { passed: true, blocked: false, schemaFiles: [], driftEntries: [], consumers: [], violations: [], skipped: true };
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
// Step 1: Detect schema files in changed set
|
|
569
|
+
const schemaFiles = detectSchemaFiles(changedFiles);
|
|
570
|
+
if (schemaFiles.length === 0) {
|
|
571
|
+
return { passed: true, blocked: false, schemaFiles: [], driftEntries: [], consumers: [], violations: [] };
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
// Step 2: Parse field changes
|
|
575
|
+
const allDriftEntries = [];
|
|
576
|
+
for (const file of schemaFiles) {
|
|
577
|
+
const entries = parseFieldChanges(file);
|
|
578
|
+
allDriftEntries.push(...entries);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
if (allDriftEntries.length === 0) {
|
|
582
|
+
return { passed: true, blocked: false, schemaFiles, driftEntries: [], consumers: [], violations: [] };
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
// Step 3: Scan consumers
|
|
586
|
+
const consumersWithDrift = scanConsumers(allDriftEntries, opts.searchRoot);
|
|
587
|
+
|
|
588
|
+
if (consumersWithDrift.length === 0) {
|
|
589
|
+
return { passed: true, blocked: false, schemaFiles, driftEntries: allDriftEntries, consumers: [], violations: [] };
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
// Step 4: Classify intent
|
|
593
|
+
const classified = classifyIntent(consumersWithDrift, opts.specContent ?? null);
|
|
594
|
+
|
|
595
|
+
// Build violations in standard format (compatible with standards gate)
|
|
596
|
+
const violations = [];
|
|
597
|
+
for (const entry of classified) {
|
|
598
|
+
for (const consumer of entry.consumers) {
|
|
599
|
+
violations.push({
|
|
600
|
+
type: 'schema-drift',
|
|
601
|
+
severity: entry.action === 'removed' ? 'must-fix' : 'warning',
|
|
602
|
+
file: consumer.file,
|
|
603
|
+
line: consumer.line,
|
|
604
|
+
rule: `Schema field '${entry.oldName}' was ${entry.action}${entry.newName ? ` to '${entry.newName}'` : ''} in ${entry.file}`,
|
|
605
|
+
message: `Consumer still references '${entry.oldName}' (${consumer.matchType})`,
|
|
606
|
+
context: consumer.context,
|
|
607
|
+
intent: entry.intent,
|
|
608
|
+
autoFixable: entry.intent === 'auto-fix'
|
|
609
|
+
});
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
const hasBlockers = violations.some(v => v.severity === 'must-fix' && v.intent !== 'auto-fix');
|
|
614
|
+
|
|
615
|
+
return {
|
|
616
|
+
passed: !hasBlockers,
|
|
617
|
+
blocked: hasBlockers,
|
|
618
|
+
schemaFiles,
|
|
619
|
+
driftEntries: allDriftEntries,
|
|
620
|
+
consumers: classified,
|
|
621
|
+
violations
|
|
622
|
+
};
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
// ============================================================
|
|
626
|
+
// Cross-Repo Scanning (C7)
|
|
627
|
+
// ============================================================
|
|
628
|
+
|
|
629
|
+
/**
|
|
630
|
+
* Scan workspace member repos for drift in consumer code.
|
|
631
|
+
*
|
|
632
|
+
* @param {Array} driftEntries — from parseFieldChanges()
|
|
633
|
+
* @param {string} workspaceRoot — workspace root path
|
|
634
|
+
* @returns {Array<{ repo: string, field: string, consumers: Array }>}
|
|
635
|
+
*/
|
|
636
|
+
function scanCrossRepoConsumers(driftEntries, workspaceRoot) {
|
|
637
|
+
const results = [];
|
|
638
|
+
|
|
639
|
+
// Read workspace config
|
|
640
|
+
let config;
|
|
641
|
+
try {
|
|
642
|
+
const configPath = path.join(workspaceRoot, 'wogi-workspace.json');
|
|
643
|
+
if (!fs.existsSync(configPath)) return results;
|
|
644
|
+
config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
|
645
|
+
} catch (_err) {
|
|
646
|
+
return results;
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
if (!config.members) return results;
|
|
650
|
+
|
|
651
|
+
// Scan each member repo (excluding the repo that made the change)
|
|
652
|
+
const currentRepoName = path.basename(PROJECT_ROOT);
|
|
653
|
+
for (const [name, member] of Object.entries(config.members)) {
|
|
654
|
+
if (name === currentRepoName) continue;
|
|
655
|
+
|
|
656
|
+
const memberPath = path.resolve(workspaceRoot, member.path ?? `./${name}`);
|
|
657
|
+
|
|
658
|
+
// Path safety: ensure member is inside workspace
|
|
659
|
+
if (!memberPath.startsWith(workspaceRoot + path.sep) && memberPath !== workspaceRoot) continue;
|
|
660
|
+
|
|
661
|
+
if (!fs.existsSync(memberPath)) continue;
|
|
662
|
+
|
|
663
|
+
for (const entry of driftEntries) {
|
|
664
|
+
const consumers = findFieldReferences(entry.oldName, entry.file, memberPath);
|
|
665
|
+
if (consumers.length > 0) {
|
|
666
|
+
results.push({
|
|
667
|
+
repo: name,
|
|
668
|
+
field: entry.oldName,
|
|
669
|
+
action: entry.action,
|
|
670
|
+
newName: entry.newName,
|
|
671
|
+
consumers: consumers.map(c => ({ ...c, file: `${name}/${c.file}` }))
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
return results;
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
// ============================================================
|
|
681
|
+
// Formatting
|
|
682
|
+
// ============================================================
|
|
683
|
+
|
|
684
|
+
/**
|
|
685
|
+
* Format drift gate results for display.
|
|
686
|
+
* @param {Object} result — from runSchemaDriftGate()
|
|
687
|
+
* @returns {string}
|
|
688
|
+
*/
|
|
689
|
+
function formatResult(result) {
|
|
690
|
+
if (result.skipped) return '⊘ Schema drift gate: disabled';
|
|
691
|
+
if (result.schemaFiles.length === 0) return '✓ Schema drift gate: no schema files changed';
|
|
692
|
+
if (result.driftEntries.length === 0) return '✓ Schema drift gate: no fields removed/renamed';
|
|
693
|
+
if (result.violations.length === 0) return '✓ Schema drift gate: no consumer drift detected';
|
|
694
|
+
|
|
695
|
+
const lines = ['', '━━━ SCHEMA DRIFT DETECTED ━━━', ''];
|
|
696
|
+
|
|
697
|
+
// Group by drift entry
|
|
698
|
+
for (const entry of result.consumers) {
|
|
699
|
+
const action = entry.action === 'renamed'
|
|
700
|
+
? `renamed to '${entry.newName}'`
|
|
701
|
+
: 'removed';
|
|
702
|
+
const intent = entry.intent === 'auto-fix' ? ' [AUTO-FIX]' : ' [NEEDS REVIEW]';
|
|
703
|
+
|
|
704
|
+
lines.push(` ${entry.action === 'removed' ? '🔴' : '🟡'} ${entry.file}: field '${entry.oldName}' ${action}${intent}`);
|
|
705
|
+
lines.push(` Consumers (${entry.consumers.length}):`);
|
|
706
|
+
for (const c of entry.consumers.slice(0, 10)) {
|
|
707
|
+
lines.push(` → ${c.file}:${c.line} (${c.matchType})`);
|
|
708
|
+
}
|
|
709
|
+
if (entry.consumers.length > 10) {
|
|
710
|
+
lines.push(` ... and ${entry.consumers.length - 10} more`);
|
|
711
|
+
}
|
|
712
|
+
lines.push('');
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
const autoFix = result.violations.filter(v => v.intent === 'auto-fix').length;
|
|
716
|
+
const needsReview = result.violations.filter(v => v.intent === 'flag').length;
|
|
717
|
+
|
|
718
|
+
lines.push(` Total: ${result.violations.length} drift references (${autoFix} auto-fixable, ${needsReview} need review)`);
|
|
719
|
+
if (result.blocked) {
|
|
720
|
+
lines.push('');
|
|
721
|
+
lines.push(' ⚠️ Task BLOCKED — fix consumer references before completing');
|
|
722
|
+
}
|
|
723
|
+
lines.push('');
|
|
724
|
+
|
|
725
|
+
return lines.join('\n');
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
// ============================================================
|
|
729
|
+
// CLI
|
|
730
|
+
// ============================================================
|
|
731
|
+
|
|
732
|
+
if (require.main === module) {
|
|
733
|
+
const args = process.argv.slice(2);
|
|
734
|
+
|
|
735
|
+
let changedFiles = [];
|
|
736
|
+
let specContent = null;
|
|
737
|
+
|
|
738
|
+
if (args.includes('--task')) {
|
|
739
|
+
const taskIdx = args.indexOf('--task');
|
|
740
|
+
const taskId = args[taskIdx + 1];
|
|
741
|
+
if (taskId) {
|
|
742
|
+
// Read spec for intent classification
|
|
743
|
+
const specPaths = [
|
|
744
|
+
path.join(PROJECT_ROOT, '.workflow', 'specs', `${taskId}.md`),
|
|
745
|
+
path.join(PROJECT_ROOT, '.workflow', 'changes', `${taskId}.md`)
|
|
746
|
+
];
|
|
747
|
+
for (const sp of specPaths) {
|
|
748
|
+
try {
|
|
749
|
+
if (fs.existsSync(sp)) {
|
|
750
|
+
specContent = fs.readFileSync(sp, 'utf-8');
|
|
751
|
+
break;
|
|
752
|
+
}
|
|
753
|
+
} catch (_err) { /* continue */ }
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
// Get changed files from git
|
|
757
|
+
try {
|
|
758
|
+
const diff = execFileSync('git', ['diff', '--name-only', 'HEAD'], {
|
|
759
|
+
cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe']
|
|
760
|
+
});
|
|
761
|
+
const staged = execFileSync('git', ['diff', '--name-only', '--staged'], {
|
|
762
|
+
cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe']
|
|
763
|
+
});
|
|
764
|
+
changedFiles = [...new Set([...diff.trim().split('\n'), ...staged.trim().split('\n')].filter(Boolean))];
|
|
765
|
+
} catch (_err) {
|
|
766
|
+
console.error('Cannot read git diff');
|
|
767
|
+
process.exit(1);
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
} else {
|
|
771
|
+
changedFiles = args.filter(a => !a.startsWith('--'));
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
if (changedFiles.length === 0) {
|
|
775
|
+
// Default: get all changed files
|
|
776
|
+
try {
|
|
777
|
+
const diff = execFileSync('git', ['diff', '--name-only', 'HEAD'], {
|
|
778
|
+
cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe']
|
|
779
|
+
});
|
|
780
|
+
const staged = execFileSync('git', ['diff', '--name-only', '--staged'], {
|
|
781
|
+
cwd: PROJECT_ROOT, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe']
|
|
782
|
+
});
|
|
783
|
+
changedFiles = [...new Set([...diff.trim().split('\n'), ...staged.trim().split('\n')].filter(Boolean))];
|
|
784
|
+
} catch (_err) {
|
|
785
|
+
console.error('Cannot read git diff');
|
|
786
|
+
process.exit(1);
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
const result = runSchemaDriftGate(changedFiles, { specContent });
|
|
791
|
+
console.log(formatResult(result));
|
|
792
|
+
|
|
793
|
+
if (result.blocked) {
|
|
794
|
+
console.log(JSON.stringify({ blocked: true, violations: result.violations.length }, null, 2));
|
|
795
|
+
process.exit(1);
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
// ============================================================
|
|
800
|
+
// Exports
|
|
801
|
+
// ============================================================
|
|
802
|
+
|
|
803
|
+
module.exports = {
|
|
804
|
+
// Detection
|
|
805
|
+
detectSchemaFiles,
|
|
806
|
+
getRegisteredSchemaFiles,
|
|
807
|
+
parseFieldChanges,
|
|
808
|
+
getDiffLines,
|
|
809
|
+
|
|
810
|
+
// Parsers
|
|
811
|
+
parsePrismaField,
|
|
812
|
+
parseEntityField,
|
|
813
|
+
parseMongooseField,
|
|
814
|
+
parseTsInterfaceField,
|
|
815
|
+
parseSqlColumn,
|
|
816
|
+
parseAgnosticField,
|
|
817
|
+
selectParser,
|
|
818
|
+
detectRename,
|
|
819
|
+
|
|
820
|
+
// Scanning
|
|
821
|
+
findFieldReferences,
|
|
822
|
+
scanConsumers,
|
|
823
|
+
scanCrossRepoConsumers,
|
|
824
|
+
|
|
825
|
+
// Classification
|
|
826
|
+
classifyIntent,
|
|
827
|
+
|
|
828
|
+
// Gate
|
|
829
|
+
runSchemaDriftGate,
|
|
830
|
+
|
|
831
|
+
// Display
|
|
832
|
+
formatResult,
|
|
833
|
+
|
|
834
|
+
// Constants
|
|
835
|
+
SCHEMA_CONVENTIONS,
|
|
836
|
+
EXCLUDE_DIRS
|
|
837
|
+
};
|