@schemashift/core 0.9.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1796 -95
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +456 -2
- package/dist/index.d.ts +456 -2
- package/dist/index.js +1773 -103
- package/dist/index.js.map +1 -1
- package/package.json +6 -1
package/dist/index.js
CHANGED
|
@@ -10,6 +10,9 @@ var LIBRARY_PATTERNS = {
|
|
|
10
10
|
joi: [/^joi$/, /^@hapi\/joi$/],
|
|
11
11
|
"io-ts": [/^io-ts$/, /^io-ts\//],
|
|
12
12
|
valibot: [/^valibot$/],
|
|
13
|
+
arktype: [/^arktype$/],
|
|
14
|
+
superstruct: [/^superstruct$/],
|
|
15
|
+
effect: [/^@effect\/schema$/],
|
|
13
16
|
v4: [],
|
|
14
17
|
// Target version, not detectable from imports
|
|
15
18
|
unknown: []
|
|
@@ -130,6 +133,110 @@ var SchemaAnalyzer = class {
|
|
|
130
133
|
}
|
|
131
134
|
};
|
|
132
135
|
|
|
136
|
+
// src/approval.ts
|
|
137
|
+
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "fs";
|
|
138
|
+
import { join } from "path";
|
|
139
|
+
var ApprovalManager = class {
|
|
140
|
+
pendingDir;
|
|
141
|
+
constructor(projectPath) {
|
|
142
|
+
this.pendingDir = join(projectPath, ".schemashift", "pending");
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Create a new migration request for review.
|
|
146
|
+
*/
|
|
147
|
+
createRequest(from, to, files, requestedBy, metadata) {
|
|
148
|
+
const id = `mig-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
149
|
+
const request = {
|
|
150
|
+
id,
|
|
151
|
+
from,
|
|
152
|
+
to,
|
|
153
|
+
files,
|
|
154
|
+
requestedBy,
|
|
155
|
+
requestedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
156
|
+
status: "pending",
|
|
157
|
+
metadata
|
|
158
|
+
};
|
|
159
|
+
this.ensureDir();
|
|
160
|
+
const filePath = join(this.pendingDir, `${id}.json`);
|
|
161
|
+
writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
|
|
162
|
+
return request;
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Review (approve or reject) a pending migration request.
|
|
166
|
+
*/
|
|
167
|
+
review(decision) {
|
|
168
|
+
const request = this.getRequest(decision.requestId);
|
|
169
|
+
if (!request) {
|
|
170
|
+
throw new Error(`Migration request ${decision.requestId} not found`);
|
|
171
|
+
}
|
|
172
|
+
if (request.status !== "pending") {
|
|
173
|
+
throw new Error(`Migration request ${decision.requestId} is already ${request.status}`);
|
|
174
|
+
}
|
|
175
|
+
request.status = decision.status;
|
|
176
|
+
request.reviewedBy = decision.reviewedBy;
|
|
177
|
+
request.reviewedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
178
|
+
request.reason = decision.reason;
|
|
179
|
+
const filePath = join(this.pendingDir, `${decision.requestId}.json`);
|
|
180
|
+
writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
|
|
181
|
+
return request;
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Get a specific migration request by ID.
|
|
185
|
+
*/
|
|
186
|
+
getRequest(id) {
|
|
187
|
+
const filePath = join(this.pendingDir, `${id}.json`);
|
|
188
|
+
if (!existsSync(filePath)) {
|
|
189
|
+
return null;
|
|
190
|
+
}
|
|
191
|
+
const content = readFileSync(filePath, "utf-8");
|
|
192
|
+
return JSON.parse(content);
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* List all migration requests, optionally filtered by status.
|
|
196
|
+
*/
|
|
197
|
+
listRequests(status) {
|
|
198
|
+
if (!existsSync(this.pendingDir)) {
|
|
199
|
+
return [];
|
|
200
|
+
}
|
|
201
|
+
const files = readdirSync(this.pendingDir).filter((f) => f.endsWith(".json"));
|
|
202
|
+
const requests = [];
|
|
203
|
+
for (const file of files) {
|
|
204
|
+
const content = readFileSync(join(this.pendingDir, file), "utf-8");
|
|
205
|
+
const request = JSON.parse(content);
|
|
206
|
+
if (!status || request.status === status) {
|
|
207
|
+
requests.push(request);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return requests.sort(
|
|
211
|
+
(a, b) => new Date(b.requestedAt).getTime() - new Date(a.requestedAt).getTime()
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Get summary counts of all requests.
|
|
216
|
+
*/
|
|
217
|
+
getSummary() {
|
|
218
|
+
const all = this.listRequests();
|
|
219
|
+
return {
|
|
220
|
+
pending: all.filter((r) => r.status === "pending").length,
|
|
221
|
+
approved: all.filter((r) => r.status === "approved").length,
|
|
222
|
+
rejected: all.filter((r) => r.status === "rejected").length,
|
|
223
|
+
total: all.length
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* Check if a migration has been approved.
|
|
228
|
+
*/
|
|
229
|
+
isApproved(requestId) {
|
|
230
|
+
const request = this.getRequest(requestId);
|
|
231
|
+
return request?.status === "approved";
|
|
232
|
+
}
|
|
233
|
+
ensureDir() {
|
|
234
|
+
if (!existsSync(this.pendingDir)) {
|
|
235
|
+
mkdirSync(this.pendingDir, { recursive: true });
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
};
|
|
239
|
+
|
|
133
240
|
// src/ast-utils.ts
|
|
134
241
|
import { Node as NodeUtils } from "ts-morph";
|
|
135
242
|
function parseCallChain(node) {
|
|
@@ -266,8 +373,8 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
|
|
|
266
373
|
|
|
267
374
|
// src/audit-log.ts
|
|
268
375
|
import { createHash } from "crypto";
|
|
269
|
-
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
270
|
-
import { join } from "path";
|
|
376
|
+
import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
377
|
+
import { join as join2 } from "path";
|
|
271
378
|
var AUDIT_DIR = ".schemashift";
|
|
272
379
|
var AUDIT_FILE = "audit-log.json";
|
|
273
380
|
var AUDIT_VERSION = 1;
|
|
@@ -275,8 +382,8 @@ var MigrationAuditLog = class {
|
|
|
275
382
|
logDir;
|
|
276
383
|
logPath;
|
|
277
384
|
constructor(projectPath) {
|
|
278
|
-
this.logDir =
|
|
279
|
-
this.logPath =
|
|
385
|
+
this.logDir = join2(projectPath, AUDIT_DIR);
|
|
386
|
+
this.logPath = join2(this.logDir, AUDIT_FILE);
|
|
280
387
|
}
|
|
281
388
|
/**
|
|
282
389
|
* Append a new entry to the audit log.
|
|
@@ -304,18 +411,19 @@ var MigrationAuditLog = class {
|
|
|
304
411
|
errorCount: params.errorCount,
|
|
305
412
|
riskScore: params.riskScore,
|
|
306
413
|
duration: params.duration,
|
|
307
|
-
user: this.getCurrentUser()
|
|
414
|
+
user: this.getCurrentUser(),
|
|
415
|
+
metadata: params.metadata || this.collectMetadata()
|
|
308
416
|
};
|
|
309
417
|
}
|
|
310
418
|
/**
|
|
311
419
|
* Read the current audit log.
|
|
312
420
|
*/
|
|
313
421
|
read() {
|
|
314
|
-
if (!
|
|
422
|
+
if (!existsSync2(this.logPath)) {
|
|
315
423
|
return { version: AUDIT_VERSION, entries: [] };
|
|
316
424
|
}
|
|
317
425
|
try {
|
|
318
|
-
const content =
|
|
426
|
+
const content = readFileSync2(this.logPath, "utf-8");
|
|
319
427
|
if (!content.trim()) {
|
|
320
428
|
return { version: AUDIT_VERSION, entries: [] };
|
|
321
429
|
}
|
|
@@ -346,17 +454,72 @@ var MigrationAuditLog = class {
|
|
|
346
454
|
migrationPaths
|
|
347
455
|
};
|
|
348
456
|
}
|
|
457
|
+
/**
|
|
458
|
+
* Export audit log as JSON string.
|
|
459
|
+
*/
|
|
460
|
+
exportJson() {
|
|
461
|
+
const log = this.read();
|
|
462
|
+
return JSON.stringify(log, null, 2);
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* Export audit log as CSV string.
|
|
466
|
+
*/
|
|
467
|
+
exportCsv() {
|
|
468
|
+
const log = this.read();
|
|
469
|
+
const headers = [
|
|
470
|
+
"timestamp",
|
|
471
|
+
"migrationId",
|
|
472
|
+
"filePath",
|
|
473
|
+
"action",
|
|
474
|
+
"from",
|
|
475
|
+
"to",
|
|
476
|
+
"success",
|
|
477
|
+
"warningCount",
|
|
478
|
+
"errorCount",
|
|
479
|
+
"riskScore",
|
|
480
|
+
"user",
|
|
481
|
+
"duration"
|
|
482
|
+
];
|
|
483
|
+
const rows = log.entries.map(
|
|
484
|
+
(e) => headers.map((h) => {
|
|
485
|
+
const val = e[h];
|
|
486
|
+
if (val === void 0 || val === null) return "";
|
|
487
|
+
return String(val).includes(",") ? `"${String(val)}"` : String(val);
|
|
488
|
+
}).join(",")
|
|
489
|
+
);
|
|
490
|
+
return [headers.join(","), ...rows].join("\n");
|
|
491
|
+
}
|
|
492
|
+
/**
|
|
493
|
+
* Get entries filtered by date range.
|
|
494
|
+
*/
|
|
495
|
+
getByDateRange(start, end) {
|
|
496
|
+
const log = this.read();
|
|
497
|
+
return log.entries.filter((e) => {
|
|
498
|
+
const ts = new Date(e.timestamp);
|
|
499
|
+
return ts >= start && ts <= end;
|
|
500
|
+
});
|
|
501
|
+
}
|
|
349
502
|
/**
|
|
350
503
|
* Clear the audit log.
|
|
351
504
|
*/
|
|
352
505
|
clear() {
|
|
353
506
|
this.write({ version: AUDIT_VERSION, entries: [] });
|
|
354
507
|
}
|
|
508
|
+
collectMetadata() {
|
|
509
|
+
return {
|
|
510
|
+
hostname: process.env.HOSTNAME || void 0,
|
|
511
|
+
nodeVersion: process.version,
|
|
512
|
+
ciJobId: process.env.CI_JOB_ID || process.env.GITHUB_RUN_ID || void 0,
|
|
513
|
+
ciProvider: process.env.GITHUB_ACTIONS ? "github" : process.env.GITLAB_CI ? "gitlab" : process.env.CIRCLECI ? "circleci" : process.env.JENKINS_URL ? "jenkins" : void 0,
|
|
514
|
+
gitBranch: process.env.GITHUB_REF_NAME || process.env.CI_COMMIT_BRANCH || void 0,
|
|
515
|
+
gitCommit: process.env.GITHUB_SHA || process.env.CI_COMMIT_SHA || void 0
|
|
516
|
+
};
|
|
517
|
+
}
|
|
355
518
|
write(log) {
|
|
356
|
-
if (!
|
|
357
|
-
|
|
519
|
+
if (!existsSync2(this.logDir)) {
|
|
520
|
+
mkdirSync2(this.logDir, { recursive: true });
|
|
358
521
|
}
|
|
359
|
-
|
|
522
|
+
writeFileSync2(this.logPath, JSON.stringify(log, null, 2));
|
|
360
523
|
}
|
|
361
524
|
hashContent(content) {
|
|
362
525
|
return createHash("sha256").update(content).digest("hex").substring(0, 16);
|
|
@@ -789,12 +952,12 @@ var MigrationChain = class {
|
|
|
789
952
|
};
|
|
790
953
|
|
|
791
954
|
// src/compatibility.ts
|
|
792
|
-
import { existsSync as
|
|
793
|
-
import { join as
|
|
955
|
+
import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
|
|
956
|
+
import { join as join4 } from "path";
|
|
794
957
|
|
|
795
958
|
// src/ecosystem.ts
|
|
796
|
-
import { existsSync as
|
|
797
|
-
import { join as
|
|
959
|
+
import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
|
|
960
|
+
import { join as join3 } from "path";
|
|
798
961
|
var ECOSYSTEM_RULES = [
|
|
799
962
|
// ORM integrations
|
|
800
963
|
{
|
|
@@ -970,6 +1133,191 @@ var ECOSYSTEM_RULES = [
|
|
|
970
1133
|
severity: "error"
|
|
971
1134
|
})
|
|
972
1135
|
},
|
|
1136
|
+
// Zod-based HTTP/API clients
|
|
1137
|
+
{
|
|
1138
|
+
package: "zodios",
|
|
1139
|
+
category: "api",
|
|
1140
|
+
migrations: ["zod-v3->v4"],
|
|
1141
|
+
check: () => ({
|
|
1142
|
+
issue: "Zodios uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
|
|
1143
|
+
suggestion: "Upgrade Zodios to a Zod v4-compatible version and verify all API contracts.",
|
|
1144
|
+
severity: "warning",
|
|
1145
|
+
upgradeCommand: "npm install @zodios/core@latest"
|
|
1146
|
+
})
|
|
1147
|
+
},
|
|
1148
|
+
{
|
|
1149
|
+
package: "@zodios/core",
|
|
1150
|
+
category: "api",
|
|
1151
|
+
migrations: ["zod-v3->v4"],
|
|
1152
|
+
check: () => ({
|
|
1153
|
+
issue: "@zodios/core uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
|
|
1154
|
+
suggestion: "Upgrade @zodios/core to a Zod v4-compatible version and verify all API contracts.",
|
|
1155
|
+
severity: "warning",
|
|
1156
|
+
upgradeCommand: "npm install @zodios/core@latest"
|
|
1157
|
+
})
|
|
1158
|
+
},
|
|
1159
|
+
{
|
|
1160
|
+
package: "@ts-rest/core",
|
|
1161
|
+
category: "api",
|
|
1162
|
+
migrations: ["zod-v3->v4"],
|
|
1163
|
+
check: () => ({
|
|
1164
|
+
issue: "@ts-rest/core uses Zod for contract definitions. Zod v4 type incompatibilities may break runtime validation.",
|
|
1165
|
+
suggestion: "Upgrade @ts-rest/core to a version with Zod v4 support.",
|
|
1166
|
+
severity: "warning",
|
|
1167
|
+
upgradeCommand: "npm install @ts-rest/core@latest"
|
|
1168
|
+
})
|
|
1169
|
+
},
|
|
1170
|
+
{
|
|
1171
|
+
package: "trpc-openapi",
|
|
1172
|
+
category: "openapi",
|
|
1173
|
+
migrations: ["zod-v3->v4"],
|
|
1174
|
+
check: () => ({
|
|
1175
|
+
issue: "trpc-openapi needs a v4-compatible version for Zod v4.",
|
|
1176
|
+
suggestion: "Check for a Zod v4-compatible version of trpc-openapi before upgrading.",
|
|
1177
|
+
severity: "warning",
|
|
1178
|
+
upgradeCommand: "npm install trpc-openapi@latest"
|
|
1179
|
+
})
|
|
1180
|
+
},
|
|
1181
|
+
// Form data and URL state libraries
|
|
1182
|
+
{
|
|
1183
|
+
package: "zod-form-data",
|
|
1184
|
+
category: "form",
|
|
1185
|
+
migrations: ["zod-v3->v4"],
|
|
1186
|
+
check: () => ({
|
|
1187
|
+
issue: "zod-form-data relies on Zod v3 internals (_def) which moved to _zod.def in v4.",
|
|
1188
|
+
suggestion: "Upgrade zod-form-data to a Zod v4-compatible version.",
|
|
1189
|
+
severity: "error",
|
|
1190
|
+
upgradeCommand: "npm install zod-form-data@latest"
|
|
1191
|
+
})
|
|
1192
|
+
},
|
|
1193
|
+
{
|
|
1194
|
+
package: "@conform-to/zod",
|
|
1195
|
+
category: "form",
|
|
1196
|
+
migrations: ["zod-v3->v4"],
|
|
1197
|
+
check: () => ({
|
|
1198
|
+
issue: "@conform-to/zod may have Zod v4 compatibility issues.",
|
|
1199
|
+
suggestion: "Upgrade @conform-to/zod to the latest version with Zod v4 support.",
|
|
1200
|
+
severity: "warning",
|
|
1201
|
+
upgradeCommand: "npm install @conform-to/zod@latest"
|
|
1202
|
+
})
|
|
1203
|
+
},
|
|
1204
|
+
{
|
|
1205
|
+
package: "nuqs",
|
|
1206
|
+
category: "validation-util",
|
|
1207
|
+
migrations: ["zod-v3->v4"],
|
|
1208
|
+
check: () => ({
|
|
1209
|
+
issue: "nuqs uses Zod for URL state parsing. Zod v4 changes may affect URL parameter validation.",
|
|
1210
|
+
suggestion: "Upgrade nuqs to a version with Zod v4 support.",
|
|
1211
|
+
severity: "warning",
|
|
1212
|
+
upgradeCommand: "npm install nuqs@latest"
|
|
1213
|
+
})
|
|
1214
|
+
},
|
|
1215
|
+
// Server action / routing integrations
|
|
1216
|
+
{
|
|
1217
|
+
package: "next-safe-action",
|
|
1218
|
+
category: "api",
|
|
1219
|
+
migrations: ["zod-v3->v4"],
|
|
1220
|
+
check: () => ({
|
|
1221
|
+
issue: "next-safe-action uses Zod for input validation. Zod v4 type changes may break action definitions.",
|
|
1222
|
+
suggestion: "Upgrade next-safe-action to the latest version with Zod v4 support.",
|
|
1223
|
+
severity: "warning",
|
|
1224
|
+
upgradeCommand: "npm install next-safe-action@latest"
|
|
1225
|
+
})
|
|
1226
|
+
},
|
|
1227
|
+
{
|
|
1228
|
+
package: "@tanstack/router",
|
|
1229
|
+
category: "api",
|
|
1230
|
+
migrations: ["zod-v3->v4"],
|
|
1231
|
+
check: () => ({
|
|
1232
|
+
issue: "@tanstack/router uses Zod for route parameter validation. Zod v4 changes may affect type inference.",
|
|
1233
|
+
suggestion: "Upgrade @tanstack/router to a version with Zod v4 support.",
|
|
1234
|
+
severity: "warning",
|
|
1235
|
+
upgradeCommand: "npm install @tanstack/router@latest"
|
|
1236
|
+
})
|
|
1237
|
+
},
|
|
1238
|
+
{
|
|
1239
|
+
package: "@tanstack/react-query",
|
|
1240
|
+
category: "api",
|
|
1241
|
+
migrations: ["zod-v3->v4"],
|
|
1242
|
+
check: () => ({
|
|
1243
|
+
issue: "@tanstack/react-query may use Zod for query key/param validation via integrations.",
|
|
1244
|
+
suggestion: "Verify any Zod-based query validation still works after the Zod v4 upgrade.",
|
|
1245
|
+
severity: "info"
|
|
1246
|
+
})
|
|
1247
|
+
},
|
|
1248
|
+
{
|
|
1249
|
+
package: "fastify-type-provider-zod",
|
|
1250
|
+
category: "api",
|
|
1251
|
+
migrations: ["zod-v3->v4"],
|
|
1252
|
+
check: () => ({
|
|
1253
|
+
issue: "fastify-type-provider-zod needs a Zod v4-compatible version.",
|
|
1254
|
+
suggestion: "Upgrade fastify-type-provider-zod to a version supporting Zod v4.",
|
|
1255
|
+
severity: "warning",
|
|
1256
|
+
upgradeCommand: "npm install fastify-type-provider-zod@latest"
|
|
1257
|
+
})
|
|
1258
|
+
},
|
|
1259
|
+
{
|
|
1260
|
+
package: "zod-i18n-map",
|
|
1261
|
+
category: "validation-util",
|
|
1262
|
+
migrations: ["zod-v3->v4"],
|
|
1263
|
+
check: () => ({
|
|
1264
|
+
issue: 'zod-i18n-map uses Zod v3 error map format. Error messages changed in v4 (e.g., "Required" is now descriptive).',
|
|
1265
|
+
suggestion: "Check for a Zod v4-compatible version of zod-i18n-map or update custom error maps.",
|
|
1266
|
+
severity: "warning",
|
|
1267
|
+
upgradeCommand: "npm install zod-i18n-map@latest"
|
|
1268
|
+
})
|
|
1269
|
+
},
|
|
1270
|
+
{
|
|
1271
|
+
package: "openapi-zod-client",
|
|
1272
|
+
category: "openapi",
|
|
1273
|
+
migrations: ["zod-v3->v4"],
|
|
1274
|
+
check: () => ({
|
|
1275
|
+
issue: "openapi-zod-client generates Zod v3 schemas from OpenAPI specs. Generated code may need regeneration.",
|
|
1276
|
+
suggestion: "Upgrade openapi-zod-client and regenerate schemas for Zod v4 compatibility.",
|
|
1277
|
+
severity: "warning",
|
|
1278
|
+
upgradeCommand: "npm install openapi-zod-client@latest"
|
|
1279
|
+
})
|
|
1280
|
+
},
|
|
1281
|
+
// Schema library detection for cross-library migrations
|
|
1282
|
+
{
|
|
1283
|
+
package: "@effect/schema",
|
|
1284
|
+
category: "validation-util",
|
|
1285
|
+
migrations: ["io-ts->zod"],
|
|
1286
|
+
check: () => ({
|
|
1287
|
+
issue: "@effect/schema detected \u2014 this is the successor to io-ts/fp-ts. Consider migrating to Effect Schema instead of Zod if you prefer FP patterns.",
|
|
1288
|
+
suggestion: "If using fp-ts patterns heavily, consider Effect Schema as the migration target instead of Zod.",
|
|
1289
|
+
severity: "info"
|
|
1290
|
+
})
|
|
1291
|
+
},
|
|
1292
|
+
{
|
|
1293
|
+
package: "arktype",
|
|
1294
|
+
category: "validation-util",
|
|
1295
|
+
migrations: ["zod->valibot", "zod-v3->v4"],
|
|
1296
|
+
check: (_version, migration) => {
|
|
1297
|
+
if (migration === "zod->valibot") {
|
|
1298
|
+
return {
|
|
1299
|
+
issue: "ArkType detected alongside Zod. Consider ArkType as a migration target \u2014 it offers 100x faster validation and Standard Schema support.",
|
|
1300
|
+
suggestion: "Consider migrating to ArkType for performance-critical paths, or keep Zod for ecosystem compatibility.",
|
|
1301
|
+
severity: "info"
|
|
1302
|
+
};
|
|
1303
|
+
}
|
|
1304
|
+
return {
|
|
1305
|
+
issue: "ArkType detected alongside Zod. ArkType supports Standard Schema, making it interoperable with Zod v4.",
|
|
1306
|
+
suggestion: "No action needed \u2014 ArkType and Zod v4 can coexist via Standard Schema.",
|
|
1307
|
+
severity: "info"
|
|
1308
|
+
};
|
|
1309
|
+
}
|
|
1310
|
+
},
|
|
1311
|
+
{
|
|
1312
|
+
package: "superstruct",
|
|
1313
|
+
category: "validation-util",
|
|
1314
|
+
migrations: ["yup->zod", "joi->zod"],
|
|
1315
|
+
check: () => ({
|
|
1316
|
+
issue: "Superstruct detected in the project. Consider migrating Superstruct schemas to Zod as well for a unified validation approach.",
|
|
1317
|
+
suggestion: "Use SchemaShift to migrate Superstruct schemas alongside Yup/Joi schemas.",
|
|
1318
|
+
severity: "info"
|
|
1319
|
+
})
|
|
1320
|
+
},
|
|
973
1321
|
// Additional validation utilities
|
|
974
1322
|
{
|
|
975
1323
|
package: "zod-to-json-schema",
|
|
@@ -1007,13 +1355,13 @@ var EcosystemAnalyzer = class {
|
|
|
1007
1355
|
const dependencies = [];
|
|
1008
1356
|
const warnings = [];
|
|
1009
1357
|
const blockers = [];
|
|
1010
|
-
const pkgPath =
|
|
1011
|
-
if (!
|
|
1358
|
+
const pkgPath = join3(projectPath, "package.json");
|
|
1359
|
+
if (!existsSync3(pkgPath)) {
|
|
1012
1360
|
return { dependencies, warnings, blockers };
|
|
1013
1361
|
}
|
|
1014
1362
|
let allDeps = {};
|
|
1015
1363
|
try {
|
|
1016
|
-
const pkg = JSON.parse(
|
|
1364
|
+
const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
|
|
1017
1365
|
allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
1018
1366
|
} catch {
|
|
1019
1367
|
return { dependencies, warnings, blockers };
|
|
@@ -1134,10 +1482,10 @@ var CompatibilityAnalyzer = class {
|
|
|
1134
1482
|
ecosystemAnalyzer = new EcosystemAnalyzer();
|
|
1135
1483
|
detectVersions(projectPath) {
|
|
1136
1484
|
const versions = [];
|
|
1137
|
-
const pkgPath =
|
|
1138
|
-
if (!
|
|
1485
|
+
const pkgPath = join4(projectPath, "package.json");
|
|
1486
|
+
if (!existsSync4(pkgPath)) return versions;
|
|
1139
1487
|
try {
|
|
1140
|
-
const pkg = JSON.parse(
|
|
1488
|
+
const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
|
|
1141
1489
|
const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
|
|
1142
1490
|
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
1143
1491
|
for (const lib of knownLibs) {
|
|
@@ -1358,9 +1706,115 @@ async function loadConfig(configPath) {
|
|
|
1358
1706
|
};
|
|
1359
1707
|
}
|
|
1360
1708
|
|
|
1709
|
+
// src/cross-field-patterns.ts
|
|
1710
|
+
function requireIf(conditionField, requiredField) {
|
|
1711
|
+
return {
|
|
1712
|
+
name: `requireIf(${conditionField}, ${requiredField})`,
|
|
1713
|
+
description: `${requiredField} is required when ${conditionField} is truthy`,
|
|
1714
|
+
zodCode: [
|
|
1715
|
+
".superRefine((data, ctx) => {",
|
|
1716
|
+
` if (data.${conditionField} && !data.${requiredField}) {`,
|
|
1717
|
+
" ctx.addIssue({",
|
|
1718
|
+
" code: z.ZodIssueCode.custom,",
|
|
1719
|
+
` message: '${requiredField} is required when ${conditionField} is set',`,
|
|
1720
|
+
` path: ['${requiredField}'],`,
|
|
1721
|
+
" });",
|
|
1722
|
+
" }",
|
|
1723
|
+
"})"
|
|
1724
|
+
].join("\n")
|
|
1725
|
+
};
|
|
1726
|
+
}
|
|
1727
|
+
function requireOneOf(fields) {
|
|
1728
|
+
const fieldList = fields.map((f) => `'${f}'`).join(", ");
|
|
1729
|
+
const conditions = fields.map((f) => `data.${f}`).join(" || ");
|
|
1730
|
+
return {
|
|
1731
|
+
name: `requireOneOf(${fields.join(", ")})`,
|
|
1732
|
+
description: `At least one of [${fields.join(", ")}] must be provided`,
|
|
1733
|
+
zodCode: [
|
|
1734
|
+
".superRefine((data, ctx) => {",
|
|
1735
|
+
` if (!(${conditions})) {`,
|
|
1736
|
+
" ctx.addIssue({",
|
|
1737
|
+
" code: z.ZodIssueCode.custom,",
|
|
1738
|
+
` message: 'At least one of [${fields.join(", ")}] is required',`,
|
|
1739
|
+
` path: [${fieldList}],`,
|
|
1740
|
+
" });",
|
|
1741
|
+
" }",
|
|
1742
|
+
"})"
|
|
1743
|
+
].join("\n")
|
|
1744
|
+
};
|
|
1745
|
+
}
|
|
1746
|
+
function mutuallyExclusive(fields) {
|
|
1747
|
+
const checks = fields.map((f) => `(data.${f} ? 1 : 0)`).join(" + ");
|
|
1748
|
+
return {
|
|
1749
|
+
name: `mutuallyExclusive(${fields.join(", ")})`,
|
|
1750
|
+
description: `Only one of [${fields.join(", ")}] can be set at a time`,
|
|
1751
|
+
zodCode: [
|
|
1752
|
+
".superRefine((data, ctx) => {",
|
|
1753
|
+
` const count = ${checks};`,
|
|
1754
|
+
" if (count > 1) {",
|
|
1755
|
+
" ctx.addIssue({",
|
|
1756
|
+
" code: z.ZodIssueCode.custom,",
|
|
1757
|
+
` message: 'Only one of [${fields.join(", ")}] can be set at a time',`,
|
|
1758
|
+
" });",
|
|
1759
|
+
" }",
|
|
1760
|
+
"})"
|
|
1761
|
+
].join("\n")
|
|
1762
|
+
};
|
|
1763
|
+
}
|
|
1764
|
+
function dependentFields(primaryField, dependents) {
|
|
1765
|
+
const checks = dependents.map(
|
|
1766
|
+
(f) => ` if (!data.${f}) {
|
|
1767
|
+
ctx.addIssue({ code: z.ZodIssueCode.custom, message: '${f} is required when ${primaryField} is set', path: ['${f}'] });
|
|
1768
|
+
}`
|
|
1769
|
+
).join("\n");
|
|
1770
|
+
return {
|
|
1771
|
+
name: `dependentFields(${primaryField} -> ${dependents.join(", ")})`,
|
|
1772
|
+
description: `When ${primaryField} is set, [${dependents.join(", ")}] are required`,
|
|
1773
|
+
zodCode: [
|
|
1774
|
+
".superRefine((data, ctx) => {",
|
|
1775
|
+
` if (data.${primaryField}) {`,
|
|
1776
|
+
checks,
|
|
1777
|
+
" }",
|
|
1778
|
+
"})"
|
|
1779
|
+
].join("\n")
|
|
1780
|
+
};
|
|
1781
|
+
}
|
|
1782
|
+
function conditionalValidation(conditionField, conditionValue, targetField, validationMessage) {
|
|
1783
|
+
return {
|
|
1784
|
+
name: `conditionalValidation(${conditionField}=${conditionValue} -> ${targetField})`,
|
|
1785
|
+
description: `Validate ${targetField} when ${conditionField} equals ${conditionValue}`,
|
|
1786
|
+
zodCode: [
|
|
1787
|
+
".superRefine((data, ctx) => {",
|
|
1788
|
+
` if (data.${conditionField} === ${conditionValue} && !data.${targetField}) {`,
|
|
1789
|
+
" ctx.addIssue({",
|
|
1790
|
+
" code: z.ZodIssueCode.custom,",
|
|
1791
|
+
` message: '${validationMessage}',`,
|
|
1792
|
+
` path: ['${targetField}'],`,
|
|
1793
|
+
" });",
|
|
1794
|
+
" }",
|
|
1795
|
+
"})"
|
|
1796
|
+
].join("\n")
|
|
1797
|
+
};
|
|
1798
|
+
}
|
|
1799
|
+
function suggestCrossFieldPattern(whenCode) {
|
|
1800
|
+
const booleanMatch = whenCode.match(/\.when\(['"](\w+)['"]\s*,\s*\{[^}]*is:\s*true/);
|
|
1801
|
+
if (booleanMatch?.[1]) {
|
|
1802
|
+
const field = booleanMatch[1];
|
|
1803
|
+
return requireIf(field, "targetField");
|
|
1804
|
+
}
|
|
1805
|
+
const multiFieldMatch = whenCode.match(/\.when\(\[([^\]]+)\]/);
|
|
1806
|
+
if (multiFieldMatch?.[1]) {
|
|
1807
|
+
const fields = multiFieldMatch[1].split(",").map((f) => f.trim().replace(/['"]/g, "")).filter(Boolean);
|
|
1808
|
+
if (fields.length > 1) {
|
|
1809
|
+
return dependentFields(fields[0] ?? "primary", fields.slice(1));
|
|
1810
|
+
}
|
|
1811
|
+
}
|
|
1812
|
+
return null;
|
|
1813
|
+
}
|
|
1814
|
+
|
|
1361
1815
|
// src/dependency-graph.ts
|
|
1362
|
-
import { existsSync as
|
|
1363
|
-
import { join as
|
|
1816
|
+
import { existsSync as existsSync5, readdirSync as readdirSync2, readFileSync as readFileSync5 } from "fs";
|
|
1817
|
+
import { join as join5, resolve } from "path";
|
|
1364
1818
|
var SchemaDependencyResolver = class {
|
|
1365
1819
|
resolve(project, filePaths) {
|
|
1366
1820
|
const fileSet = new Set(filePaths);
|
|
@@ -1487,38 +1941,38 @@ function computeParallelBatches(packages, suggestedOrder) {
|
|
|
1487
1941
|
}
|
|
1488
1942
|
var MonorepoResolver = class {
|
|
1489
1943
|
detect(projectPath) {
|
|
1490
|
-
const pkgPath =
|
|
1491
|
-
if (
|
|
1944
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1945
|
+
if (existsSync5(pkgPath)) {
|
|
1492
1946
|
try {
|
|
1493
|
-
const pkg = JSON.parse(
|
|
1947
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1494
1948
|
if (pkg.workspaces) return true;
|
|
1495
1949
|
} catch {
|
|
1496
1950
|
}
|
|
1497
1951
|
}
|
|
1498
|
-
if (
|
|
1952
|
+
if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return true;
|
|
1499
1953
|
return false;
|
|
1500
1954
|
}
|
|
1501
1955
|
/**
|
|
1502
1956
|
* Detect which workspace manager is being used.
|
|
1503
1957
|
*/
|
|
1504
1958
|
detectManager(projectPath) {
|
|
1505
|
-
if (
|
|
1506
|
-
const pkgPath =
|
|
1507
|
-
if (
|
|
1959
|
+
if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
|
|
1960
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1961
|
+
if (existsSync5(pkgPath)) {
|
|
1508
1962
|
try {
|
|
1509
|
-
const pkg = JSON.parse(
|
|
1963
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1510
1964
|
if (pkg.packageManager?.startsWith("yarn")) return "yarn";
|
|
1511
1965
|
if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
|
|
1512
1966
|
} catch {
|
|
1513
1967
|
}
|
|
1514
1968
|
}
|
|
1515
|
-
if (
|
|
1516
|
-
if (
|
|
1969
|
+
if (existsSync5(join5(projectPath, "pnpm-lock.yaml"))) return "pnpm";
|
|
1970
|
+
if (existsSync5(join5(projectPath, "yarn.lock"))) return "yarn";
|
|
1517
1971
|
return "npm";
|
|
1518
1972
|
}
|
|
1519
1973
|
analyze(projectPath) {
|
|
1520
|
-
const pkgPath =
|
|
1521
|
-
if (!
|
|
1974
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1975
|
+
if (!existsSync5(pkgPath)) {
|
|
1522
1976
|
return { isMonorepo: false, packages: [], suggestedOrder: [] };
|
|
1523
1977
|
}
|
|
1524
1978
|
let workspaceGlobs;
|
|
@@ -1533,10 +1987,10 @@ var MonorepoResolver = class {
|
|
|
1533
1987
|
const packages = [];
|
|
1534
1988
|
const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
|
|
1535
1989
|
for (const dir of resolvedDirs) {
|
|
1536
|
-
const wsPkgPath =
|
|
1537
|
-
if (!
|
|
1990
|
+
const wsPkgPath = join5(dir, "package.json");
|
|
1991
|
+
if (!existsSync5(wsPkgPath)) continue;
|
|
1538
1992
|
try {
|
|
1539
|
-
const wsPkg = JSON.parse(
|
|
1993
|
+
const wsPkg = JSON.parse(readFileSync5(wsPkgPath, "utf-8"));
|
|
1540
1994
|
if (!wsPkg.name) continue;
|
|
1541
1995
|
const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
|
|
1542
1996
|
const depNames = Object.keys(allDeps);
|
|
@@ -1580,14 +2034,14 @@ var MonorepoResolver = class {
|
|
|
1580
2034
|
* Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
|
|
1581
2035
|
*/
|
|
1582
2036
|
resolveWorkspaceGlobs(projectPath) {
|
|
1583
|
-
const pnpmPath =
|
|
1584
|
-
if (
|
|
2037
|
+
const pnpmPath = join5(projectPath, "pnpm-workspace.yaml");
|
|
2038
|
+
if (existsSync5(pnpmPath)) {
|
|
1585
2039
|
return this.parsePnpmWorkspace(pnpmPath);
|
|
1586
2040
|
}
|
|
1587
|
-
const pkgPath =
|
|
1588
|
-
if (
|
|
2041
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
2042
|
+
if (existsSync5(pkgPath)) {
|
|
1589
2043
|
try {
|
|
1590
|
-
const pkg = JSON.parse(
|
|
2044
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1591
2045
|
if (pkg.workspaces) {
|
|
1592
2046
|
return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
|
|
1593
2047
|
}
|
|
@@ -1606,7 +2060,7 @@ var MonorepoResolver = class {
|
|
|
1606
2060
|
* ```
|
|
1607
2061
|
*/
|
|
1608
2062
|
parsePnpmWorkspace(filePath) {
|
|
1609
|
-
const content =
|
|
2063
|
+
const content = readFileSync5(filePath, "utf-8");
|
|
1610
2064
|
const globs = [];
|
|
1611
2065
|
let inPackages = false;
|
|
1612
2066
|
for (const line of content.split("\n")) {
|
|
@@ -1632,13 +2086,13 @@ var MonorepoResolver = class {
|
|
|
1632
2086
|
for (const glob of globs) {
|
|
1633
2087
|
const clean = glob.replace(/\/?\*$/, "");
|
|
1634
2088
|
const base = resolve(projectPath, clean);
|
|
1635
|
-
if (!
|
|
2089
|
+
if (!existsSync5(base)) continue;
|
|
1636
2090
|
if (glob.endsWith("*")) {
|
|
1637
2091
|
try {
|
|
1638
|
-
const entries =
|
|
2092
|
+
const entries = readdirSync2(base, { withFileTypes: true });
|
|
1639
2093
|
for (const entry of entries) {
|
|
1640
2094
|
if (entry.isDirectory()) {
|
|
1641
|
-
dirs.push(
|
|
2095
|
+
dirs.push(join5(base, entry.name));
|
|
1642
2096
|
}
|
|
1643
2097
|
}
|
|
1644
2098
|
} catch {
|
|
@@ -1652,8 +2106,8 @@ var MonorepoResolver = class {
|
|
|
1652
2106
|
};
|
|
1653
2107
|
|
|
1654
2108
|
// src/detailed-analyzer.ts
|
|
1655
|
-
import { existsSync as
|
|
1656
|
-
import { join as
|
|
2109
|
+
import { existsSync as existsSync6, readFileSync as readFileSync6 } from "fs";
|
|
2110
|
+
import { join as join6 } from "path";
|
|
1657
2111
|
var COMPLEXITY_CHAIN_WEIGHT = 2;
|
|
1658
2112
|
var COMPLEXITY_DEPTH_WEIGHT = 3;
|
|
1659
2113
|
var COMPLEXITY_VALIDATION_WEIGHT = 1;
|
|
@@ -1718,10 +2172,10 @@ var DetailedAnalyzer = class {
|
|
|
1718
2172
|
}
|
|
1719
2173
|
detectLibraryVersions(projectPath) {
|
|
1720
2174
|
const versions = [];
|
|
1721
|
-
const pkgPath =
|
|
1722
|
-
if (!
|
|
2175
|
+
const pkgPath = join6(projectPath, "package.json");
|
|
2176
|
+
if (!existsSync6(pkgPath)) return versions;
|
|
1723
2177
|
try {
|
|
1724
|
-
const pkg = JSON.parse(
|
|
2178
|
+
const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
|
|
1725
2179
|
const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
|
|
1726
2180
|
const allDeps = {
|
|
1727
2181
|
...pkg.dependencies,
|
|
@@ -1894,36 +2348,195 @@ var DetailedAnalyzer = class {
|
|
|
1894
2348
|
}
|
|
1895
2349
|
};
|
|
1896
2350
|
|
|
1897
|
-
// src/
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
2351
|
+
// src/drift-detector.ts
|
|
2352
|
+
import { createHash as createHash2 } from "crypto";
|
|
2353
|
+
import { existsSync as existsSync7, mkdirSync as mkdirSync3, readFileSync as readFileSync7, writeFileSync as writeFileSync3 } from "fs";
|
|
2354
|
+
import { join as join7, relative } from "path";
|
|
2355
|
+
var SNAPSHOT_DIR = ".schemashift";
|
|
2356
|
+
var SNAPSHOT_FILE = "schema-snapshot.json";
|
|
2357
|
+
var SNAPSHOT_VERSION = 1;
|
|
2358
|
+
var DriftDetector = class {
|
|
2359
|
+
snapshotDir;
|
|
2360
|
+
snapshotPath;
|
|
2361
|
+
constructor(projectPath) {
|
|
2362
|
+
this.snapshotDir = join7(projectPath, SNAPSHOT_DIR);
|
|
2363
|
+
this.snapshotPath = join7(this.snapshotDir, SNAPSHOT_FILE);
|
|
2364
|
+
}
|
|
2365
|
+
/**
|
|
2366
|
+
* Take a snapshot of the current schema state
|
|
2367
|
+
*/
|
|
2368
|
+
snapshot(files, projectPath) {
|
|
2369
|
+
const schemas = [];
|
|
2370
|
+
for (const filePath of files) {
|
|
2371
|
+
if (!existsSync7(filePath)) continue;
|
|
2372
|
+
const content = readFileSync7(filePath, "utf-8");
|
|
2373
|
+
const library = this.detectLibraryFromContent(content);
|
|
2374
|
+
if (library === "unknown") continue;
|
|
2375
|
+
const schemaNames = this.extractSchemaNames(content);
|
|
2376
|
+
schemas.push({
|
|
2377
|
+
filePath: relative(projectPath, filePath),
|
|
2378
|
+
library,
|
|
2379
|
+
contentHash: this.hashContent(content),
|
|
2380
|
+
schemaCount: schemaNames.length,
|
|
2381
|
+
schemaNames
|
|
2382
|
+
});
|
|
1905
2383
|
}
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
2384
|
+
const snapshot = {
|
|
2385
|
+
version: SNAPSHOT_VERSION,
|
|
2386
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2387
|
+
projectPath,
|
|
2388
|
+
schemas
|
|
2389
|
+
};
|
|
2390
|
+
return snapshot;
|
|
2391
|
+
}
|
|
2392
|
+
/**
|
|
2393
|
+
* Save a snapshot to disk
|
|
2394
|
+
*/
|
|
2395
|
+
saveSnapshot(snapshot) {
|
|
2396
|
+
if (!existsSync7(this.snapshotDir)) {
|
|
2397
|
+
mkdirSync3(this.snapshotDir, { recursive: true });
|
|
1913
2398
|
}
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
2399
|
+
writeFileSync3(this.snapshotPath, JSON.stringify(snapshot, null, 2));
|
|
2400
|
+
}
|
|
2401
|
+
/**
|
|
2402
|
+
* Load saved snapshot from disk
|
|
2403
|
+
*/
|
|
2404
|
+
loadSnapshot() {
|
|
2405
|
+
if (!existsSync7(this.snapshotPath)) {
|
|
2406
|
+
return null;
|
|
1921
2407
|
}
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
|
|
1926
|
-
|
|
2408
|
+
try {
|
|
2409
|
+
const content = readFileSync7(this.snapshotPath, "utf-8");
|
|
2410
|
+
return JSON.parse(content);
|
|
2411
|
+
} catch {
|
|
2412
|
+
return null;
|
|
2413
|
+
}
|
|
2414
|
+
}
|
|
2415
|
+
/**
|
|
2416
|
+
* Compare current state against saved snapshot
|
|
2417
|
+
*/
|
|
2418
|
+
detect(currentFiles, projectPath) {
|
|
2419
|
+
const saved = this.loadSnapshot();
|
|
2420
|
+
if (!saved) {
|
|
2421
|
+
return {
|
|
2422
|
+
hasDrift: false,
|
|
2423
|
+
added: [],
|
|
2424
|
+
removed: [],
|
|
2425
|
+
modified: [],
|
|
2426
|
+
unchanged: 0,
|
|
2427
|
+
totalFiles: 0,
|
|
2428
|
+
snapshotTimestamp: ""
|
|
2429
|
+
};
|
|
2430
|
+
}
|
|
2431
|
+
const current = this.snapshot(currentFiles, projectPath);
|
|
2432
|
+
return this.compareSnapshots(saved, current);
|
|
2433
|
+
}
|
|
2434
|
+
/**
|
|
2435
|
+
* Compare two snapshots and return drift results
|
|
2436
|
+
*/
|
|
2437
|
+
compareSnapshots(baseline, current) {
|
|
2438
|
+
const baselineMap = new Map(baseline.schemas.map((s) => [s.filePath, s]));
|
|
2439
|
+
const currentMap = new Map(current.schemas.map((s) => [s.filePath, s]));
|
|
2440
|
+
const added = [];
|
|
2441
|
+
const removed = [];
|
|
2442
|
+
const modified = [];
|
|
2443
|
+
let unchanged = 0;
|
|
2444
|
+
for (const [path, currentFile] of currentMap) {
|
|
2445
|
+
const baselineFile = baselineMap.get(path);
|
|
2446
|
+
if (!baselineFile) {
|
|
2447
|
+
added.push(currentFile);
|
|
2448
|
+
} else if (currentFile.contentHash !== baselineFile.contentHash) {
|
|
2449
|
+
const addedSchemas = currentFile.schemaNames.filter(
|
|
2450
|
+
(n) => !baselineFile.schemaNames.includes(n)
|
|
2451
|
+
);
|
|
2452
|
+
const removedSchemas = baselineFile.schemaNames.filter(
|
|
2453
|
+
(n) => !currentFile.schemaNames.includes(n)
|
|
2454
|
+
);
|
|
2455
|
+
modified.push({
|
|
2456
|
+
filePath: path,
|
|
2457
|
+
library: currentFile.library,
|
|
2458
|
+
previousHash: baselineFile.contentHash,
|
|
2459
|
+
currentHash: currentFile.contentHash,
|
|
2460
|
+
previousSchemaCount: baselineFile.schemaCount,
|
|
2461
|
+
currentSchemaCount: currentFile.schemaCount,
|
|
2462
|
+
addedSchemas,
|
|
2463
|
+
removedSchemas
|
|
2464
|
+
});
|
|
2465
|
+
} else {
|
|
2466
|
+
unchanged++;
|
|
2467
|
+
}
|
|
2468
|
+
}
|
|
2469
|
+
for (const [path, baselineFile] of baselineMap) {
|
|
2470
|
+
if (!currentMap.has(path)) {
|
|
2471
|
+
removed.push(baselineFile);
|
|
2472
|
+
}
|
|
2473
|
+
}
|
|
2474
|
+
return {
|
|
2475
|
+
hasDrift: added.length > 0 || removed.length > 0 || modified.length > 0,
|
|
2476
|
+
added,
|
|
2477
|
+
removed,
|
|
2478
|
+
modified,
|
|
2479
|
+
unchanged,
|
|
2480
|
+
totalFiles: currentMap.size,
|
|
2481
|
+
snapshotTimestamp: baseline.timestamp
|
|
2482
|
+
};
|
|
2483
|
+
}
|
|
2484
|
+
extractSchemaNames(content) {
|
|
2485
|
+
const names = [];
|
|
2486
|
+
const pattern = /(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\(|S\.)/g;
|
|
2487
|
+
for (const match of content.matchAll(pattern)) {
|
|
2488
|
+
if (match[1]) names.push(match[1]);
|
|
2489
|
+
}
|
|
2490
|
+
return names;
|
|
2491
|
+
}
|
|
2492
|
+
detectLibraryFromContent(content) {
|
|
2493
|
+
if (/from\s*['"]zod['"]/.test(content) || /\bz\./.test(content)) return "zod";
|
|
2494
|
+
if (/from\s*['"]yup['"]/.test(content) || /\byup\./.test(content)) return "yup";
|
|
2495
|
+
if (/from\s*['"]joi['"]/.test(content) || /\bJoi\./.test(content)) return "joi";
|
|
2496
|
+
if (/from\s*['"]io-ts['"]/.test(content) || /\bt\./.test(content) && /from\s*['"]io-ts/.test(content))
|
|
2497
|
+
return "io-ts";
|
|
2498
|
+
if (/from\s*['"]valibot['"]/.test(content) || /\bv\./.test(content) && /from\s*['"]valibot/.test(content))
|
|
2499
|
+
return "valibot";
|
|
2500
|
+
if (/from\s*['"]arktype['"]/.test(content)) return "arktype";
|
|
2501
|
+
if (/from\s*['"]superstruct['"]/.test(content)) return "superstruct";
|
|
2502
|
+
if (/from\s*['"]@effect\/schema['"]/.test(content)) return "effect";
|
|
2503
|
+
return "unknown";
|
|
2504
|
+
}
|
|
2505
|
+
hashContent(content) {
|
|
2506
|
+
return createHash2("sha256").update(content).digest("hex").substring(0, 16);
|
|
2507
|
+
}
|
|
2508
|
+
};
|
|
2509
|
+
|
|
2510
|
+
// src/form-resolver-migrator.ts
|
|
2511
|
+
var RESOLVER_MAPPINGS = {
|
|
2512
|
+
"yup->zod": [
|
|
2513
|
+
{
|
|
2514
|
+
fromImport: "@hookform/resolvers/yup",
|
|
2515
|
+
toImport: "@hookform/resolvers/zod",
|
|
2516
|
+
fromResolver: "yupResolver",
|
|
2517
|
+
toResolver: "zodResolver"
|
|
2518
|
+
}
|
|
2519
|
+
],
|
|
2520
|
+
"joi->zod": [
|
|
2521
|
+
{
|
|
2522
|
+
fromImport: "@hookform/resolvers/joi",
|
|
2523
|
+
toImport: "@hookform/resolvers/zod",
|
|
2524
|
+
fromResolver: "joiResolver",
|
|
2525
|
+
toResolver: "zodResolver"
|
|
2526
|
+
}
|
|
2527
|
+
],
|
|
2528
|
+
"zod->valibot": [
|
|
2529
|
+
{
|
|
2530
|
+
fromImport: "@hookform/resolvers/zod",
|
|
2531
|
+
toImport: "@hookform/resolvers/valibot",
|
|
2532
|
+
fromResolver: "zodResolver",
|
|
2533
|
+
toResolver: "valibotResolver"
|
|
2534
|
+
}
|
|
2535
|
+
]
|
|
2536
|
+
};
|
|
2537
|
+
var TODO_PATTERNS = [
|
|
2538
|
+
{
|
|
2539
|
+
pattern: /from\s+['"]formik['"]/,
|
|
1927
2540
|
comment: "/* TODO(schemashift): Formik is unmaintained. Consider migrating to React Hook Form with zodResolver */"
|
|
1928
2541
|
},
|
|
1929
2542
|
{
|
|
@@ -2255,17 +2868,676 @@ var GovernanceEngine = class {
|
|
|
2255
2868
|
}
|
|
2256
2869
|
};
|
|
2257
2870
|
|
|
2871
|
+
// src/governance-fixer.ts
|
|
2872
|
+
var GovernanceFixer = class {
|
|
2873
|
+
defaultMaxLength = 1e4;
|
|
2874
|
+
/**
|
|
2875
|
+
* Set the default max length appended by the require-max-length fix.
|
|
2876
|
+
*/
|
|
2877
|
+
setDefaultMaxLength(length) {
|
|
2878
|
+
this.defaultMaxLength = length;
|
|
2879
|
+
}
|
|
2880
|
+
/**
|
|
2881
|
+
* Check if a violation is auto-fixable.
|
|
2882
|
+
*/
|
|
2883
|
+
canFix(violation) {
|
|
2884
|
+
return [
|
|
2885
|
+
"no-any-schemas",
|
|
2886
|
+
"require-descriptions",
|
|
2887
|
+
"require-max-length",
|
|
2888
|
+
"naming-convention",
|
|
2889
|
+
"no-any",
|
|
2890
|
+
"require-description",
|
|
2891
|
+
"required-validations",
|
|
2892
|
+
"require-safeParse"
|
|
2893
|
+
].includes(violation.rule);
|
|
2894
|
+
}
|
|
2895
|
+
/**
|
|
2896
|
+
* Fix a single violation in a source file.
|
|
2897
|
+
* Returns the fixed code for the entire file.
|
|
2898
|
+
*/
|
|
2899
|
+
fix(violation, sourceCode) {
|
|
2900
|
+
switch (violation.rule) {
|
|
2901
|
+
case "no-any-schemas":
|
|
2902
|
+
case "no-any":
|
|
2903
|
+
return this.fixNoAny(violation, sourceCode);
|
|
2904
|
+
case "require-descriptions":
|
|
2905
|
+
case "require-description":
|
|
2906
|
+
return this.fixRequireDescription(violation, sourceCode);
|
|
2907
|
+
case "require-max-length":
|
|
2908
|
+
case "required-validations":
|
|
2909
|
+
return this.fixRequireMaxLength(violation, sourceCode);
|
|
2910
|
+
case "naming-convention":
|
|
2911
|
+
return this.fixNamingConvention(violation, sourceCode);
|
|
2912
|
+
case "require-safeParse":
|
|
2913
|
+
return this.fixRequireSafeParse(violation, sourceCode);
|
|
2914
|
+
default:
|
|
2915
|
+
return {
|
|
2916
|
+
success: false,
|
|
2917
|
+
explanation: `No auto-fix available for rule: ${violation.rule}`,
|
|
2918
|
+
rule: violation.rule,
|
|
2919
|
+
lineNumber: violation.lineNumber
|
|
2920
|
+
};
|
|
2921
|
+
}
|
|
2922
|
+
}
|
|
2923
|
+
/**
|
|
2924
|
+
* Fix all fixable violations in a source file.
|
|
2925
|
+
* Applies fixes from bottom to top to preserve line numbers.
|
|
2926
|
+
*/
|
|
2927
|
+
fixAll(violations, sourceCode) {
|
|
2928
|
+
const fixable = violations.filter((v) => this.canFix(v));
|
|
2929
|
+
const results = [];
|
|
2930
|
+
let currentCode = sourceCode;
|
|
2931
|
+
let fixed = 0;
|
|
2932
|
+
const sorted = [...fixable].sort((a, b) => b.lineNumber - a.lineNumber);
|
|
2933
|
+
for (const violation of sorted) {
|
|
2934
|
+
const result = this.fix(violation, currentCode);
|
|
2935
|
+
results.push(result);
|
|
2936
|
+
if (result.success && result.fixedCode) {
|
|
2937
|
+
currentCode = result.fixedCode;
|
|
2938
|
+
fixed++;
|
|
2939
|
+
}
|
|
2940
|
+
}
|
|
2941
|
+
return {
|
|
2942
|
+
totalViolations: violations.length,
|
|
2943
|
+
fixed,
|
|
2944
|
+
skipped: violations.length - fixed,
|
|
2945
|
+
results
|
|
2946
|
+
};
|
|
2947
|
+
}
|
|
2948
|
+
fixNoAny(violation, sourceCode) {
|
|
2949
|
+
const lines = sourceCode.split("\n");
|
|
2950
|
+
const lineIndex = violation.lineNumber - 1;
|
|
2951
|
+
const line = lines[lineIndex];
|
|
2952
|
+
if (!line) {
|
|
2953
|
+
return {
|
|
2954
|
+
success: false,
|
|
2955
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
2956
|
+
rule: violation.rule,
|
|
2957
|
+
lineNumber: violation.lineNumber
|
|
2958
|
+
};
|
|
2959
|
+
}
|
|
2960
|
+
let fixedLine = line;
|
|
2961
|
+
let explanation = "";
|
|
2962
|
+
if (/\bz\.any\(\)/.test(line)) {
|
|
2963
|
+
fixedLine = line.replace(/\bz\.any\(\)/, "z.unknown()");
|
|
2964
|
+
explanation = "Replaced z.any() with z.unknown() for type safety";
|
|
2965
|
+
} else if (/\byup\.mixed\(\)/.test(line)) {
|
|
2966
|
+
fixedLine = line.replace(/\byup\.mixed\(\)/, "yup.mixed().required()");
|
|
2967
|
+
explanation = "Added .required() constraint to yup.mixed()";
|
|
2968
|
+
} else if (/\bt\.any\b/.test(line)) {
|
|
2969
|
+
fixedLine = line.replace(/\bt\.any\b/, "t.unknown");
|
|
2970
|
+
explanation = "Replaced t.any with t.unknown for type safety";
|
|
2971
|
+
} else if (/\bv\.any\(\)/.test(line)) {
|
|
2972
|
+
fixedLine = line.replace(/\bv\.any\(\)/, "v.unknown()");
|
|
2973
|
+
explanation = "Replaced v.any() with v.unknown() for type safety";
|
|
2974
|
+
} else {
|
|
2975
|
+
return {
|
|
2976
|
+
success: false,
|
|
2977
|
+
explanation: "Could not identify any-type pattern to fix",
|
|
2978
|
+
rule: violation.rule,
|
|
2979
|
+
lineNumber: violation.lineNumber
|
|
2980
|
+
};
|
|
2981
|
+
}
|
|
2982
|
+
lines[lineIndex] = fixedLine;
|
|
2983
|
+
return {
|
|
2984
|
+
success: true,
|
|
2985
|
+
fixedCode: lines.join("\n"),
|
|
2986
|
+
explanation,
|
|
2987
|
+
rule: violation.rule,
|
|
2988
|
+
lineNumber: violation.lineNumber
|
|
2989
|
+
};
|
|
2990
|
+
}
|
|
2991
|
+
fixRequireDescription(violation, sourceCode) {
|
|
2992
|
+
const lines = sourceCode.split("\n");
|
|
2993
|
+
const lineIndex = violation.lineNumber - 1;
|
|
2994
|
+
const line = lines[lineIndex];
|
|
2995
|
+
if (!line) {
|
|
2996
|
+
return {
|
|
2997
|
+
success: false,
|
|
2998
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
2999
|
+
rule: violation.rule,
|
|
3000
|
+
lineNumber: violation.lineNumber
|
|
3001
|
+
};
|
|
3002
|
+
}
|
|
3003
|
+
let endLineIndex = lineIndex;
|
|
3004
|
+
for (let i = lineIndex; i < lines.length && i < lineIndex + 20; i++) {
|
|
3005
|
+
if (lines[i]?.includes(";")) {
|
|
3006
|
+
endLineIndex = i;
|
|
3007
|
+
break;
|
|
3008
|
+
}
|
|
3009
|
+
}
|
|
3010
|
+
const endLine = lines[endLineIndex] ?? "";
|
|
3011
|
+
const schemaName = violation.schemaName || "schema";
|
|
3012
|
+
const description = `${schemaName} schema`;
|
|
3013
|
+
const semicolonIndex = endLine.lastIndexOf(";");
|
|
3014
|
+
if (semicolonIndex >= 0) {
|
|
3015
|
+
lines[endLineIndex] = `${endLine.slice(0, semicolonIndex)}.describe('${description}')${endLine.slice(semicolonIndex)}`;
|
|
3016
|
+
} else {
|
|
3017
|
+
lines[endLineIndex] = `${endLine}.describe('${description}')`;
|
|
3018
|
+
}
|
|
3019
|
+
return {
|
|
3020
|
+
success: true,
|
|
3021
|
+
fixedCode: lines.join("\n"),
|
|
3022
|
+
explanation: `Added .describe('${description}') to ${schemaName}`,
|
|
3023
|
+
rule: violation.rule,
|
|
3024
|
+
lineNumber: violation.lineNumber
|
|
3025
|
+
};
|
|
3026
|
+
}
|
|
3027
|
+
fixRequireMaxLength(violation, sourceCode) {
|
|
3028
|
+
const lines = sourceCode.split("\n");
|
|
3029
|
+
const lineIndex = violation.lineNumber - 1;
|
|
3030
|
+
const line = lines[lineIndex];
|
|
3031
|
+
if (!line) {
|
|
3032
|
+
return {
|
|
3033
|
+
success: false,
|
|
3034
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
3035
|
+
rule: violation.rule,
|
|
3036
|
+
lineNumber: violation.lineNumber
|
|
3037
|
+
};
|
|
3038
|
+
}
|
|
3039
|
+
if (/z\.string\(\)/.test(line)) {
|
|
3040
|
+
lines[lineIndex] = line.replace(/z\.string\(\)/, `z.string().max(${this.defaultMaxLength})`);
|
|
3041
|
+
return {
|
|
3042
|
+
success: true,
|
|
3043
|
+
fixedCode: lines.join("\n"),
|
|
3044
|
+
explanation: `Added .max(${this.defaultMaxLength}) to string schema`,
|
|
3045
|
+
rule: violation.rule,
|
|
3046
|
+
lineNumber: violation.lineNumber
|
|
3047
|
+
};
|
|
3048
|
+
}
|
|
3049
|
+
return {
|
|
3050
|
+
success: false,
|
|
3051
|
+
explanation: "Could not find z.string() pattern to fix on this line",
|
|
3052
|
+
rule: violation.rule,
|
|
3053
|
+
lineNumber: violation.lineNumber
|
|
3054
|
+
};
|
|
3055
|
+
}
|
|
3056
|
+
fixNamingConvention(violation, sourceCode) {
|
|
3057
|
+
const schemaName = violation.schemaName;
|
|
3058
|
+
if (!schemaName) {
|
|
3059
|
+
return {
|
|
3060
|
+
success: false,
|
|
3061
|
+
explanation: "No schema name available for renaming",
|
|
3062
|
+
rule: violation.rule,
|
|
3063
|
+
lineNumber: violation.lineNumber
|
|
3064
|
+
};
|
|
3065
|
+
}
|
|
3066
|
+
const newName = schemaName.endsWith("Schema") ? schemaName : `${schemaName}Schema`;
|
|
3067
|
+
if (newName === schemaName) {
|
|
3068
|
+
return {
|
|
3069
|
+
success: false,
|
|
3070
|
+
explanation: "Schema already matches naming convention",
|
|
3071
|
+
rule: violation.rule,
|
|
3072
|
+
lineNumber: violation.lineNumber
|
|
3073
|
+
};
|
|
3074
|
+
}
|
|
3075
|
+
const fixedCode = sourceCode.replace(new RegExp(`\\b${schemaName}\\b`, "g"), newName);
|
|
3076
|
+
return {
|
|
3077
|
+
success: true,
|
|
3078
|
+
fixedCode,
|
|
3079
|
+
explanation: `Renamed "${schemaName}" to "${newName}"`,
|
|
3080
|
+
rule: violation.rule,
|
|
3081
|
+
lineNumber: violation.lineNumber
|
|
3082
|
+
};
|
|
3083
|
+
}
|
|
3084
|
+
fixRequireSafeParse(violation, sourceCode) {
|
|
3085
|
+
const lines = sourceCode.split("\n");
|
|
3086
|
+
const lineIndex = violation.lineNumber - 1;
|
|
3087
|
+
const line = lines[lineIndex];
|
|
3088
|
+
if (!line) {
|
|
3089
|
+
return {
|
|
3090
|
+
success: false,
|
|
3091
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
3092
|
+
rule: violation.rule,
|
|
3093
|
+
lineNumber: violation.lineNumber
|
|
3094
|
+
};
|
|
3095
|
+
}
|
|
3096
|
+
if (line.includes(".parse(") && !line.includes(".safeParse(")) {
|
|
3097
|
+
lines[lineIndex] = line.replace(".parse(", ".safeParse(");
|
|
3098
|
+
return {
|
|
3099
|
+
success: true,
|
|
3100
|
+
fixedCode: lines.join("\n"),
|
|
3101
|
+
explanation: "Replaced .parse() with .safeParse() for safer error handling",
|
|
3102
|
+
rule: violation.rule,
|
|
3103
|
+
lineNumber: violation.lineNumber
|
|
3104
|
+
};
|
|
3105
|
+
}
|
|
3106
|
+
return {
|
|
3107
|
+
success: false,
|
|
3108
|
+
explanation: "Could not find .parse() pattern to fix",
|
|
3109
|
+
rule: violation.rule,
|
|
3110
|
+
lineNumber: violation.lineNumber
|
|
3111
|
+
};
|
|
3112
|
+
}
|
|
3113
|
+
};
|
|
3114
|
+
|
|
3115
|
+
// src/governance-templates.ts
|
|
3116
|
+
var GOVERNANCE_TEMPLATES = [
|
|
3117
|
+
{
|
|
3118
|
+
name: "no-any-schemas",
|
|
3119
|
+
description: "Disallow z.any(), yup.mixed() without constraints, and similar unrestricted types",
|
|
3120
|
+
category: "security",
|
|
3121
|
+
rule: (sourceFile, _config) => {
|
|
3122
|
+
const violations = [];
|
|
3123
|
+
const text = sourceFile.getFullText();
|
|
3124
|
+
const filePath = sourceFile.getFilePath();
|
|
3125
|
+
const lines = text.split("\n");
|
|
3126
|
+
const anyPatterns = [
|
|
3127
|
+
/\bz\.any\(\)/,
|
|
3128
|
+
/\byup\.mixed\(\)/,
|
|
3129
|
+
/\bt\.any\b/,
|
|
3130
|
+
/\bv\.any\(\)/,
|
|
3131
|
+
/\bunknown\(\)/
|
|
3132
|
+
];
|
|
3133
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3134
|
+
const line = lines[i] ?? "";
|
|
3135
|
+
for (const pattern of anyPatterns) {
|
|
3136
|
+
if (pattern.test(line)) {
|
|
3137
|
+
violations.push({
|
|
3138
|
+
rule: "no-any-schemas",
|
|
3139
|
+
message: "Unrestricted type (any/mixed/unknown) found. Use a specific type with constraints.",
|
|
3140
|
+
filePath,
|
|
3141
|
+
lineNumber: i + 1,
|
|
3142
|
+
schemaName: "",
|
|
3143
|
+
severity: "error",
|
|
3144
|
+
fixable: false
|
|
3145
|
+
});
|
|
3146
|
+
}
|
|
3147
|
+
}
|
|
3148
|
+
}
|
|
3149
|
+
return violations;
|
|
3150
|
+
}
|
|
3151
|
+
},
|
|
3152
|
+
{
|
|
3153
|
+
name: "require-descriptions",
|
|
3154
|
+
description: "All exported schemas must have .describe() for documentation",
|
|
3155
|
+
category: "quality",
|
|
3156
|
+
rule: (sourceFile, _config) => {
|
|
3157
|
+
const violations = [];
|
|
3158
|
+
const text = sourceFile.getFullText();
|
|
3159
|
+
const filePath = sourceFile.getFilePath();
|
|
3160
|
+
const lines = text.split("\n");
|
|
3161
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3162
|
+
const line = lines[i] ?? "";
|
|
3163
|
+
if (/export\s+(const|let)\s+\w+.*=\s*(z\.|yup\.)/.test(line)) {
|
|
3164
|
+
let fullStatement = line;
|
|
3165
|
+
let j = i + 1;
|
|
3166
|
+
while (j < lines.length && !lines[j]?.includes(";") && j < i + 10) {
|
|
3167
|
+
fullStatement += lines[j] ?? "";
|
|
3168
|
+
j++;
|
|
3169
|
+
}
|
|
3170
|
+
if (j < lines.length) fullStatement += lines[j] ?? "";
|
|
3171
|
+
if (!fullStatement.includes(".describe(")) {
|
|
3172
|
+
const nameMatch = line.match(/(?:const|let)\s+(\w+)/);
|
|
3173
|
+
violations.push({
|
|
3174
|
+
rule: "require-descriptions",
|
|
3175
|
+
message: `Exported schema ${nameMatch?.[1] || "unknown"} should include .describe() for documentation.`,
|
|
3176
|
+
filePath,
|
|
3177
|
+
lineNumber: i + 1,
|
|
3178
|
+
schemaName: nameMatch?.[1] || "",
|
|
3179
|
+
severity: "warning",
|
|
3180
|
+
fixable: true
|
|
3181
|
+
});
|
|
3182
|
+
}
|
|
3183
|
+
}
|
|
3184
|
+
}
|
|
3185
|
+
return violations;
|
|
3186
|
+
}
|
|
3187
|
+
},
|
|
3188
|
+
{
|
|
3189
|
+
name: "max-nesting-depth",
|
|
3190
|
+
description: "Limit schema nesting depth to prevent TypeScript performance issues",
|
|
3191
|
+
category: "performance",
|
|
3192
|
+
rule: (sourceFile, config) => {
|
|
3193
|
+
const violations = [];
|
|
3194
|
+
const text = sourceFile.getFullText();
|
|
3195
|
+
const filePath = sourceFile.getFilePath();
|
|
3196
|
+
const maxDepth = config.threshold || 5;
|
|
3197
|
+
const lines = text.split("\n");
|
|
3198
|
+
let currentDepth = 0;
|
|
3199
|
+
let maxFoundDepth = 0;
|
|
3200
|
+
let deepestLine = 0;
|
|
3201
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3202
|
+
const line = lines[i] ?? "";
|
|
3203
|
+
for (const char of line) {
|
|
3204
|
+
if (char === "(" || char === "{" || char === "[") {
|
|
3205
|
+
currentDepth++;
|
|
3206
|
+
if (currentDepth > maxFoundDepth) {
|
|
3207
|
+
maxFoundDepth = currentDepth;
|
|
3208
|
+
deepestLine = i + 1;
|
|
3209
|
+
}
|
|
3210
|
+
}
|
|
3211
|
+
if (char === ")" || char === "}" || char === "]") {
|
|
3212
|
+
currentDepth = Math.max(0, currentDepth - 1);
|
|
3213
|
+
}
|
|
3214
|
+
}
|
|
3215
|
+
}
|
|
3216
|
+
if (maxFoundDepth > maxDepth) {
|
|
3217
|
+
violations.push({
|
|
3218
|
+
rule: "max-nesting-depth",
|
|
3219
|
+
message: `Schema nesting depth ${maxFoundDepth} exceeds maximum of ${maxDepth}. Consider breaking into smaller schemas.`,
|
|
3220
|
+
filePath,
|
|
3221
|
+
lineNumber: deepestLine,
|
|
3222
|
+
schemaName: "",
|
|
3223
|
+
severity: "warning",
|
|
3224
|
+
fixable: false
|
|
3225
|
+
});
|
|
3226
|
+
}
|
|
3227
|
+
return violations;
|
|
3228
|
+
}
|
|
3229
|
+
},
|
|
3230
|
+
{
|
|
3231
|
+
name: "no-deprecated-methods",
|
|
3232
|
+
description: "Flag usage of deprecated schema methods",
|
|
3233
|
+
category: "quality",
|
|
3234
|
+
rule: (sourceFile, _config) => {
|
|
3235
|
+
const violations = [];
|
|
3236
|
+
const text = sourceFile.getFullText();
|
|
3237
|
+
const filePath = sourceFile.getFilePath();
|
|
3238
|
+
const lines = text.split("\n");
|
|
3239
|
+
const deprecatedPatterns = [
|
|
3240
|
+
{
|
|
3241
|
+
pattern: /\.deepPartial\(\)/,
|
|
3242
|
+
message: ".deepPartial() is removed in Zod v4. Use recursive .partial() instead."
|
|
3243
|
+
},
|
|
3244
|
+
{
|
|
3245
|
+
pattern: /\.strip\(\)/,
|
|
3246
|
+
message: ".strip() is deprecated. Use z.strictObject() or explicit stripping."
|
|
3247
|
+
},
|
|
3248
|
+
{
|
|
3249
|
+
pattern: /z\.promise\(/,
|
|
3250
|
+
message: "z.promise() is deprecated in Zod v4. Use native Promise types."
|
|
3251
|
+
},
|
|
3252
|
+
{
|
|
3253
|
+
pattern: /z\.ostring\(\)/,
|
|
3254
|
+
message: "z.ostring() is removed in Zod v4. Use z.string().optional()."
|
|
3255
|
+
},
|
|
3256
|
+
{
|
|
3257
|
+
pattern: /z\.onumber\(\)/,
|
|
3258
|
+
message: "z.onumber() is removed in Zod v4. Use z.number().optional()."
|
|
3259
|
+
},
|
|
3260
|
+
{
|
|
3261
|
+
pattern: /z\.oboolean\(\)/,
|
|
3262
|
+
message: "z.oboolean() is removed in Zod v4. Use z.boolean().optional()."
|
|
3263
|
+
},
|
|
3264
|
+
{
|
|
3265
|
+
pattern: /z\.preprocess\(/,
|
|
3266
|
+
message: "z.preprocess() is removed in Zod v4. Use z.coerce.* instead."
|
|
3267
|
+
}
|
|
3268
|
+
];
|
|
3269
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3270
|
+
const line = lines[i] ?? "";
|
|
3271
|
+
for (const { pattern, message } of deprecatedPatterns) {
|
|
3272
|
+
if (pattern.test(line)) {
|
|
3273
|
+
violations.push({
|
|
3274
|
+
rule: "no-deprecated-methods",
|
|
3275
|
+
message,
|
|
3276
|
+
filePath,
|
|
3277
|
+
lineNumber: i + 1,
|
|
3278
|
+
schemaName: "",
|
|
3279
|
+
severity: "warning",
|
|
3280
|
+
fixable: false
|
|
3281
|
+
});
|
|
3282
|
+
}
|
|
3283
|
+
}
|
|
3284
|
+
}
|
|
3285
|
+
return violations;
|
|
3286
|
+
}
|
|
3287
|
+
},
|
|
3288
|
+
{
|
|
3289
|
+
name: "naming-convention",
|
|
3290
|
+
description: "Enforce schema naming pattern (e.g., must end with Schema)",
|
|
3291
|
+
category: "quality",
|
|
3292
|
+
rule: (sourceFile, config) => {
|
|
3293
|
+
const violations = [];
|
|
3294
|
+
const text = sourceFile.getFullText();
|
|
3295
|
+
const filePath = sourceFile.getFilePath();
|
|
3296
|
+
const lines = text.split("\n");
|
|
3297
|
+
const pattern = new RegExp(config.pattern || ".*Schema$");
|
|
3298
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3299
|
+
const line = lines[i] ?? "";
|
|
3300
|
+
const match = line.match(
|
|
3301
|
+
/(?:const|let)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\()/
|
|
3302
|
+
);
|
|
3303
|
+
if (match?.[1] && !pattern.test(match[1])) {
|
|
3304
|
+
violations.push({
|
|
3305
|
+
rule: "naming-convention",
|
|
3306
|
+
message: `Schema "${match[1]}" does not match naming pattern ${pattern.source}.`,
|
|
3307
|
+
filePath,
|
|
3308
|
+
lineNumber: i + 1,
|
|
3309
|
+
schemaName: match[1],
|
|
3310
|
+
severity: "warning",
|
|
3311
|
+
fixable: false
|
|
3312
|
+
});
|
|
3313
|
+
}
|
|
3314
|
+
}
|
|
3315
|
+
return violations;
|
|
3316
|
+
}
|
|
3317
|
+
},
|
|
3318
|
+
{
|
|
3319
|
+
name: "require-max-length",
|
|
3320
|
+
description: "String schemas must have .max() to prevent DoS via unbounded input",
|
|
3321
|
+
category: "security",
|
|
3322
|
+
rule: (sourceFile, _config) => {
|
|
3323
|
+
const violations = [];
|
|
3324
|
+
const text = sourceFile.getFullText();
|
|
3325
|
+
const filePath = sourceFile.getFilePath();
|
|
3326
|
+
const lines = text.split("\n");
|
|
3327
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3328
|
+
const line = lines[i] ?? "";
|
|
3329
|
+
if (/z\.string\(\)/.test(line) && !line.includes(".max(") && !line.includes(".length(")) {
|
|
3330
|
+
let fullChain = line;
|
|
3331
|
+
let j = i + 1;
|
|
3332
|
+
while (j < lines.length && j < i + 5 && /^\s*\./.test(lines[j] ?? "")) {
|
|
3333
|
+
fullChain += lines[j] ?? "";
|
|
3334
|
+
j++;
|
|
3335
|
+
}
|
|
3336
|
+
if (!fullChain.includes(".max(") && !fullChain.includes(".length(")) {
|
|
3337
|
+
violations.push({
|
|
3338
|
+
rule: "require-max-length",
|
|
3339
|
+
message: "String schema should have .max() to prevent unbounded input (DoS protection).",
|
|
3340
|
+
filePath,
|
|
3341
|
+
lineNumber: i + 1,
|
|
3342
|
+
schemaName: "",
|
|
3343
|
+
severity: "warning",
|
|
3344
|
+
fixable: true
|
|
3345
|
+
});
|
|
3346
|
+
}
|
|
3347
|
+
}
|
|
3348
|
+
}
|
|
3349
|
+
return violations;
|
|
3350
|
+
}
|
|
3351
|
+
}
|
|
3352
|
+
];
|
|
3353
|
+
function getGovernanceTemplate(name) {
|
|
3354
|
+
return GOVERNANCE_TEMPLATES.find((t) => t.name === name);
|
|
3355
|
+
}
|
|
3356
|
+
function getGovernanceTemplatesByCategory(category) {
|
|
3357
|
+
return GOVERNANCE_TEMPLATES.filter((t) => t.category === category);
|
|
3358
|
+
}
|
|
3359
|
+
function getGovernanceTemplateNames() {
|
|
3360
|
+
return GOVERNANCE_TEMPLATES.map((t) => t.name);
|
|
3361
|
+
}
|
|
3362
|
+
|
|
3363
|
+
// src/graph-exporter.ts
|
|
3364
|
+
var LIBRARY_COLORS = {
|
|
3365
|
+
zod: "#3068B7",
|
|
3366
|
+
yup: "#32CD32",
|
|
3367
|
+
joi: "#FF6347",
|
|
3368
|
+
"io-ts": "#9370DB",
|
|
3369
|
+
valibot: "#FF8C00",
|
|
3370
|
+
arktype: "#20B2AA",
|
|
3371
|
+
superstruct: "#DAA520",
|
|
3372
|
+
effect: "#6A5ACD"
|
|
3373
|
+
};
|
|
3374
|
+
var LIBRARY_MERMAID_STYLES = {
|
|
3375
|
+
zod: "fill:#3068B7,color:#fff",
|
|
3376
|
+
yup: "fill:#32CD32,color:#000",
|
|
3377
|
+
joi: "fill:#FF6347,color:#fff",
|
|
3378
|
+
"io-ts": "fill:#9370DB,color:#fff",
|
|
3379
|
+
valibot: "fill:#FF8C00,color:#000",
|
|
3380
|
+
arktype: "fill:#20B2AA,color:#fff",
|
|
3381
|
+
superstruct: "fill:#DAA520,color:#000",
|
|
3382
|
+
effect: "fill:#6A5ACD,color:#fff"
|
|
3383
|
+
};
|
|
3384
|
+
var GraphExporter = class {
|
|
3385
|
+
/**
|
|
3386
|
+
* Export dependency graph as DOT format for Graphviz.
|
|
3387
|
+
*/
|
|
3388
|
+
exportDot(graph, options = {}) {
|
|
3389
|
+
const lines = [];
|
|
3390
|
+
lines.push("digraph SchemaShiftDependencies {");
|
|
3391
|
+
lines.push(" rankdir=LR;");
|
|
3392
|
+
lines.push(' node [shape=box, style=filled, fontname="monospace"];');
|
|
3393
|
+
lines.push(' edge [color="#666666"];');
|
|
3394
|
+
lines.push("");
|
|
3395
|
+
const circularFiles = /* @__PURE__ */ new Set();
|
|
3396
|
+
if (options.highlightCircular && graph.circularWarnings.length > 0) {
|
|
3397
|
+
for (const warning of graph.circularWarnings) {
|
|
3398
|
+
const match = warning.match(/Circular dependency: (.+)/);
|
|
3399
|
+
if (match?.[1]) {
|
|
3400
|
+
for (const part of match[1].split(" -> ")) {
|
|
3401
|
+
for (const file of graph.sortedFiles) {
|
|
3402
|
+
if (file.endsWith(part.trim()) || this.shortenPath(file) === part.trim()) {
|
|
3403
|
+
circularFiles.add(file);
|
|
3404
|
+
}
|
|
3405
|
+
}
|
|
3406
|
+
}
|
|
3407
|
+
}
|
|
3408
|
+
}
|
|
3409
|
+
}
|
|
3410
|
+
for (const filePath of graph.sortedFiles) {
|
|
3411
|
+
const meta = options.nodeMetadata?.get(filePath);
|
|
3412
|
+
const library = meta?.library;
|
|
3413
|
+
if (options.filterLibrary && library !== options.filterLibrary) continue;
|
|
3414
|
+
const shortPath = this.shortenPath(filePath);
|
|
3415
|
+
const nodeId = this.toNodeId(filePath);
|
|
3416
|
+
const attrs = [];
|
|
3417
|
+
attrs.push(`label="${shortPath}"`);
|
|
3418
|
+
if (circularFiles.has(filePath)) {
|
|
3419
|
+
attrs.push('color="#FF0000"');
|
|
3420
|
+
attrs.push("penwidth=2");
|
|
3421
|
+
}
|
|
3422
|
+
if (options.colorByLibrary && library && LIBRARY_COLORS[library]) {
|
|
3423
|
+
attrs.push(`fillcolor="${LIBRARY_COLORS[library]}"`);
|
|
3424
|
+
attrs.push('fontcolor="white"');
|
|
3425
|
+
} else {
|
|
3426
|
+
attrs.push('fillcolor="#E8E8E8"');
|
|
3427
|
+
}
|
|
3428
|
+
if (meta?.schemaCount) {
|
|
3429
|
+
attrs.push(`tooltip="${meta.schemaCount} schema(s)"`);
|
|
3430
|
+
}
|
|
3431
|
+
lines.push(` ${nodeId} [${attrs.join(", ")}];`);
|
|
3432
|
+
}
|
|
3433
|
+
lines.push("");
|
|
3434
|
+
const filterSet = options.filterLibrary ? new Set(
|
|
3435
|
+
graph.sortedFiles.filter((f) => {
|
|
3436
|
+
const meta = options.nodeMetadata?.get(f);
|
|
3437
|
+
return meta?.library === options.filterLibrary;
|
|
3438
|
+
})
|
|
3439
|
+
) : void 0;
|
|
3440
|
+
for (const [file, deps] of graph.dependencies) {
|
|
3441
|
+
if (filterSet && !filterSet.has(file)) continue;
|
|
3442
|
+
const fromId = this.toNodeId(file);
|
|
3443
|
+
for (const dep of deps) {
|
|
3444
|
+
if (filterSet && !filterSet.has(dep)) continue;
|
|
3445
|
+
const toId = this.toNodeId(dep);
|
|
3446
|
+
const edgeAttrs = [];
|
|
3447
|
+
if (options.highlightCircular && circularFiles.has(file) && circularFiles.has(dep)) {
|
|
3448
|
+
edgeAttrs.push('color="#FF0000"');
|
|
3449
|
+
edgeAttrs.push("penwidth=2");
|
|
3450
|
+
}
|
|
3451
|
+
lines.push(
|
|
3452
|
+
` ${fromId} -> ${toId}${edgeAttrs.length > 0 ? ` [${edgeAttrs.join(", ")}]` : ""};`
|
|
3453
|
+
);
|
|
3454
|
+
}
|
|
3455
|
+
}
|
|
3456
|
+
lines.push("}");
|
|
3457
|
+
return lines.join("\n");
|
|
3458
|
+
}
|
|
3459
|
+
/**
|
|
3460
|
+
* Export dependency graph as Mermaid diagram syntax.
|
|
3461
|
+
*/
|
|
3462
|
+
exportMermaid(graph, options = {}) {
|
|
3463
|
+
const lines = [];
|
|
3464
|
+
lines.push("graph LR");
|
|
3465
|
+
const styledNodes = /* @__PURE__ */ new Map();
|
|
3466
|
+
for (const [file, deps] of graph.dependencies) {
|
|
3467
|
+
const meta = options.nodeMetadata?.get(file);
|
|
3468
|
+
if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
|
|
3469
|
+
const fromId = this.toMermaidId(file);
|
|
3470
|
+
const fromLabel = this.shortenPath(file);
|
|
3471
|
+
if (meta?.library) {
|
|
3472
|
+
styledNodes.set(fromId, meta.library);
|
|
3473
|
+
}
|
|
3474
|
+
if (deps.length === 0) {
|
|
3475
|
+
lines.push(` ${fromId}["${fromLabel}"]`);
|
|
3476
|
+
}
|
|
3477
|
+
for (const dep of deps) {
|
|
3478
|
+
const depMeta = options.nodeMetadata?.get(dep);
|
|
3479
|
+
if (options.filterLibrary && depMeta?.library !== options.filterLibrary) continue;
|
|
3480
|
+
const toId = this.toMermaidId(dep);
|
|
3481
|
+
const toLabel = this.shortenPath(dep);
|
|
3482
|
+
if (depMeta?.library) {
|
|
3483
|
+
styledNodes.set(toId, depMeta.library);
|
|
3484
|
+
}
|
|
3485
|
+
lines.push(` ${fromId}["${fromLabel}"] --> ${toId}["${toLabel}"]`);
|
|
3486
|
+
}
|
|
3487
|
+
}
|
|
3488
|
+
for (const file of graph.sortedFiles) {
|
|
3489
|
+
const meta = options.nodeMetadata?.get(file);
|
|
3490
|
+
if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
|
|
3491
|
+
const id = this.toMermaidId(file);
|
|
3492
|
+
if (!lines.some((l) => l.includes(id))) {
|
|
3493
|
+
lines.push(` ${id}["${this.shortenPath(file)}"]`);
|
|
3494
|
+
if (meta?.library) {
|
|
3495
|
+
styledNodes.set(id, meta.library);
|
|
3496
|
+
}
|
|
3497
|
+
}
|
|
3498
|
+
}
|
|
3499
|
+
if (options.colorByLibrary && styledNodes.size > 0) {
|
|
3500
|
+
lines.push("");
|
|
3501
|
+
const libraryGroups = /* @__PURE__ */ new Map();
|
|
3502
|
+
for (const [nodeId, library] of styledNodes) {
|
|
3503
|
+
const group = libraryGroups.get(library) ?? [];
|
|
3504
|
+
group.push(nodeId);
|
|
3505
|
+
libraryGroups.set(library, group);
|
|
3506
|
+
}
|
|
3507
|
+
for (const [library, nodeIds] of libraryGroups) {
|
|
3508
|
+
const style = LIBRARY_MERMAID_STYLES[library];
|
|
3509
|
+
if (style) {
|
|
3510
|
+
for (const nodeId of nodeIds) {
|
|
3511
|
+
lines.push(` style ${nodeId} ${style}`);
|
|
3512
|
+
}
|
|
3513
|
+
}
|
|
3514
|
+
}
|
|
3515
|
+
}
|
|
3516
|
+
return lines.join("\n");
|
|
3517
|
+
}
|
|
3518
|
+
shortenPath(filePath) {
|
|
3519
|
+
const parts = filePath.split("/");
|
|
3520
|
+
return parts.slice(-2).join("/");
|
|
3521
|
+
}
|
|
3522
|
+
toNodeId(filePath) {
|
|
3523
|
+
return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "").replace(/_+$/, "");
|
|
3524
|
+
}
|
|
3525
|
+
toMermaidId(filePath) {
|
|
3526
|
+
return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "n_").replace(/_+$/, "");
|
|
3527
|
+
}
|
|
3528
|
+
};
|
|
3529
|
+
|
|
2258
3530
|
// src/incremental.ts
|
|
2259
|
-
import { existsSync as
|
|
2260
|
-
import { join as
|
|
3531
|
+
import { existsSync as existsSync8, mkdirSync as mkdirSync4, readFileSync as readFileSync8, unlinkSync, writeFileSync as writeFileSync4 } from "fs";
|
|
3532
|
+
import { join as join8 } from "path";
|
|
2261
3533
|
var STATE_DIR = ".schemashift";
|
|
2262
3534
|
var STATE_FILE = "incremental.json";
|
|
2263
3535
|
var IncrementalTracker = class {
|
|
2264
3536
|
stateDir;
|
|
2265
3537
|
statePath;
|
|
2266
3538
|
constructor(projectPath) {
|
|
2267
|
-
this.stateDir =
|
|
2268
|
-
this.statePath =
|
|
3539
|
+
this.stateDir = join8(projectPath, STATE_DIR);
|
|
3540
|
+
this.statePath = join8(this.stateDir, STATE_FILE);
|
|
2269
3541
|
}
|
|
2270
3542
|
start(files, from, to) {
|
|
2271
3543
|
const state = {
|
|
@@ -2300,9 +3572,9 @@ var IncrementalTracker = class {
|
|
|
2300
3572
|
this.saveState(state);
|
|
2301
3573
|
}
|
|
2302
3574
|
getState() {
|
|
2303
|
-
if (!
|
|
3575
|
+
if (!existsSync8(this.statePath)) return null;
|
|
2304
3576
|
try {
|
|
2305
|
-
return JSON.parse(
|
|
3577
|
+
return JSON.parse(readFileSync8(this.statePath, "utf-8"));
|
|
2306
3578
|
} catch {
|
|
2307
3579
|
return null;
|
|
2308
3580
|
}
|
|
@@ -2329,21 +3601,299 @@ var IncrementalTracker = class {
|
|
|
2329
3601
|
};
|
|
2330
3602
|
}
|
|
2331
3603
|
clear() {
|
|
2332
|
-
if (
|
|
3604
|
+
if (existsSync8(this.statePath)) {
|
|
2333
3605
|
unlinkSync(this.statePath);
|
|
2334
3606
|
}
|
|
2335
3607
|
}
|
|
2336
3608
|
saveState(state) {
|
|
2337
|
-
if (!
|
|
2338
|
-
|
|
3609
|
+
if (!existsSync8(this.stateDir)) {
|
|
3610
|
+
mkdirSync4(this.stateDir, { recursive: true });
|
|
2339
3611
|
}
|
|
2340
|
-
|
|
3612
|
+
writeFileSync4(this.statePath, JSON.stringify(state, null, 2));
|
|
3613
|
+
}
|
|
3614
|
+
};
|
|
3615
|
+
|
|
3616
|
+
// src/migration-templates.ts
|
|
3617
|
+
var BUILT_IN_TEMPLATES = [
|
|
3618
|
+
{
|
|
3619
|
+
name: "react-hook-form-yup-to-zod",
|
|
3620
|
+
description: "Migrate React Hook Form project from Yup to Zod validation",
|
|
3621
|
+
category: "form-migration",
|
|
3622
|
+
migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
|
|
3623
|
+
preChecks: [
|
|
3624
|
+
{ description: "Ensure @hookform/resolvers is installed" },
|
|
3625
|
+
{ description: "Check for .when() conditional validations that need manual review" }
|
|
3626
|
+
],
|
|
3627
|
+
postSteps: [
|
|
3628
|
+
{
|
|
3629
|
+
description: "Update resolver imports: yupResolver \u2192 zodResolver",
|
|
3630
|
+
command: void 0
|
|
3631
|
+
},
|
|
3632
|
+
{
|
|
3633
|
+
description: "Run tests to verify form validation behavior",
|
|
3634
|
+
command: "npm test"
|
|
3635
|
+
},
|
|
3636
|
+
{
|
|
3637
|
+
description: "Remove Yup dependency if no longer used",
|
|
3638
|
+
command: "npm uninstall yup"
|
|
3639
|
+
}
|
|
3640
|
+
],
|
|
3641
|
+
packageChanges: [
|
|
3642
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3643
|
+
{ action: "upgrade", package: "@hookform/resolvers", version: "latest" }
|
|
3644
|
+
],
|
|
3645
|
+
recommendedFlags: ["--cross-file", "--scaffold-tests", "--verbose"],
|
|
3646
|
+
estimatedEffort: "moderate"
|
|
3647
|
+
},
|
|
3648
|
+
{
|
|
3649
|
+
name: "trpc-zod-v3-to-v4",
|
|
3650
|
+
description: "Upgrade tRPC project from Zod v3 to Zod v4",
|
|
3651
|
+
category: "framework-upgrade",
|
|
3652
|
+
migrationSteps: [
|
|
3653
|
+
{ from: "zod-v3", to: "v4", description: "Upgrade Zod v3 schemas to v4 syntax" }
|
|
3654
|
+
],
|
|
3655
|
+
preChecks: [
|
|
3656
|
+
{ description: "Check tRPC version \u2014 v11+ required for Zod v4 compatibility" },
|
|
3657
|
+
{ description: "Check zod-validation-error version \u2014 v5.0.0+ required" },
|
|
3658
|
+
{ description: "Run existing test suite to establish baseline", command: "npm test" }
|
|
3659
|
+
],
|
|
3660
|
+
postSteps: [
|
|
3661
|
+
{
|
|
3662
|
+
description: "Update tRPC to v11 if not already",
|
|
3663
|
+
command: "npm install @trpc/server@latest @trpc/client@latest"
|
|
3664
|
+
},
|
|
3665
|
+
{
|
|
3666
|
+
description: "Update zod-validation-error if used",
|
|
3667
|
+
command: "npm install zod-validation-error@^5.0.0"
|
|
3668
|
+
},
|
|
3669
|
+
{ description: "Review TODO(schemashift) comments for manual fixes" },
|
|
3670
|
+
{ description: "Run tests to verify tRPC router behavior", command: "npm test" }
|
|
3671
|
+
],
|
|
3672
|
+
packageChanges: [
|
|
3673
|
+
{ action: "upgrade", package: "zod", version: "^3.25.0" },
|
|
3674
|
+
{ action: "upgrade", package: "@trpc/server", version: "^11.0.0" }
|
|
3675
|
+
],
|
|
3676
|
+
recommendedFlags: ["--compat-check", "--scaffold-tests", "--verbose"],
|
|
3677
|
+
estimatedEffort: "high"
|
|
3678
|
+
},
|
|
3679
|
+
{
|
|
3680
|
+
name: "express-joi-to-zod",
|
|
3681
|
+
description: "Migrate Express.js API validators from Joi to Zod",
|
|
3682
|
+
category: "library-switch",
|
|
3683
|
+
migrationSteps: [{ from: "joi", to: "zod", description: "Convert Joi schemas to Zod schemas" }],
|
|
3684
|
+
preChecks: [
|
|
3685
|
+
{ description: "Identify middleware using Joi validation" },
|
|
3686
|
+
{ description: "Check for Joi.extend() custom validators that need manual migration" }
|
|
3687
|
+
],
|
|
3688
|
+
postSteps: [
|
|
3689
|
+
{ description: "Update Express middleware to use Zod schemas" },
|
|
3690
|
+
{ description: "Replace celebrate/express-validation with custom Zod middleware" },
|
|
3691
|
+
{ description: "Run API integration tests", command: "npm test" },
|
|
3692
|
+
{ description: "Remove Joi dependency", command: "npm uninstall joi" }
|
|
3693
|
+
],
|
|
3694
|
+
packageChanges: [
|
|
3695
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3696
|
+
{ action: "remove", package: "celebrate" }
|
|
3697
|
+
],
|
|
3698
|
+
recommendedFlags: ["--cross-file", "--verbose"],
|
|
3699
|
+
estimatedEffort: "moderate"
|
|
3700
|
+
},
|
|
3701
|
+
{
|
|
3702
|
+
name: "nextjs-form-migration",
|
|
3703
|
+
description: "Migrate Next.js form validation from Yup/Formik to Zod/React Hook Form",
|
|
3704
|
+
category: "form-migration",
|
|
3705
|
+
migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
|
|
3706
|
+
preChecks: [
|
|
3707
|
+
{ description: "Identify all Formik form components" },
|
|
3708
|
+
{ description: "Check for server-side validation using Yup" },
|
|
3709
|
+
{ description: "Run existing tests to establish baseline", command: "npm test" }
|
|
3710
|
+
],
|
|
3711
|
+
postSteps: [
|
|
3712
|
+
{ description: "Replace Formik with React Hook Form + zodResolver" },
|
|
3713
|
+
{ description: "Update server actions to use Zod for validation" },
|
|
3714
|
+
{
|
|
3715
|
+
description: "Install next-safe-action if using server actions",
|
|
3716
|
+
command: "npm install next-safe-action"
|
|
3717
|
+
},
|
|
3718
|
+
{ description: "Run full test suite", command: "npm test" }
|
|
3719
|
+
],
|
|
3720
|
+
packageChanges: [
|
|
3721
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3722
|
+
{ action: "install", package: "react-hook-form", version: "^7.0.0" },
|
|
3723
|
+
{ action: "install", package: "@hookform/resolvers", version: "latest" }
|
|
3724
|
+
],
|
|
3725
|
+
recommendedFlags: ["--cross-file", "--scaffold-tests"],
|
|
3726
|
+
estimatedEffort: "high"
|
|
3727
|
+
},
|
|
3728
|
+
{
|
|
3729
|
+
name: "monorepo-staged-migration",
|
|
3730
|
+
description: "Phased monorepo migration with incremental tracking",
|
|
3731
|
+
category: "monorepo",
|
|
3732
|
+
migrationSteps: [
|
|
3733
|
+
{ from: "yup", to: "zod", description: "Convert shared packages first, then applications" }
|
|
3734
|
+
],
|
|
3735
|
+
preChecks: [
|
|
3736
|
+
{ description: "Analyze monorepo workspace structure" },
|
|
3737
|
+
{ description: "Identify shared schema packages used by multiple apps" },
|
|
3738
|
+
{ description: "Ensure all packages build successfully", command: "npm run build" }
|
|
3739
|
+
],
|
|
3740
|
+
postSteps: [
|
|
3741
|
+
{ description: "Run incremental migration starting with leaf packages" },
|
|
3742
|
+
{ description: "Build all packages after each batch", command: "npm run build" },
|
|
3743
|
+
{ description: "Run full test suite", command: "npm test" },
|
|
3744
|
+
{ description: "Review cross-package type compatibility" }
|
|
3745
|
+
],
|
|
3746
|
+
packageChanges: [],
|
|
3747
|
+
recommendedFlags: ["--cross-file", "--incremental", "--compat-check", "--audit"],
|
|
3748
|
+
estimatedEffort: "high"
|
|
3749
|
+
}
|
|
3750
|
+
];
|
|
3751
|
+
function getMigrationTemplate(name) {
|
|
3752
|
+
return BUILT_IN_TEMPLATES.find((t) => t.name === name);
|
|
3753
|
+
}
|
|
3754
|
+
function getMigrationTemplateNames() {
|
|
3755
|
+
return BUILT_IN_TEMPLATES.map((t) => t.name);
|
|
3756
|
+
}
|
|
3757
|
+
function getMigrationTemplatesByCategory(category) {
|
|
3758
|
+
return BUILT_IN_TEMPLATES.filter((t) => t.category === category);
|
|
3759
|
+
}
|
|
3760
|
+
function getAllMigrationTemplates() {
|
|
3761
|
+
return [...BUILT_IN_TEMPLATES];
|
|
3762
|
+
}
|
|
3763
|
+
function validateMigrationTemplate(template) {
|
|
3764
|
+
const errors = [];
|
|
3765
|
+
if (!template.name || template.name.trim().length === 0) {
|
|
3766
|
+
errors.push("Template name is required");
|
|
3767
|
+
}
|
|
3768
|
+
if (!template.description || template.description.trim().length === 0) {
|
|
3769
|
+
errors.push("Template description is required");
|
|
3770
|
+
}
|
|
3771
|
+
if (!template.migrationSteps || template.migrationSteps.length === 0) {
|
|
3772
|
+
errors.push("At least one migration step is required");
|
|
3773
|
+
}
|
|
3774
|
+
for (const step of template.migrationSteps ?? []) {
|
|
3775
|
+
if (!step.from || !step.to) {
|
|
3776
|
+
errors.push(`Migration step must have from and to: ${JSON.stringify(step)}`);
|
|
3777
|
+
}
|
|
3778
|
+
}
|
|
3779
|
+
return { valid: errors.length === 0, errors };
|
|
3780
|
+
}
|
|
3781
|
+
|
|
3782
|
+
// src/notifications.ts
|
|
3783
|
+
async function computeSignature(payload, secret) {
|
|
3784
|
+
const { createHmac } = await import("crypto");
|
|
3785
|
+
return createHmac("sha256", secret).update(payload).digest("hex");
|
|
3786
|
+
}
|
|
3787
|
+
var WebhookNotifier = class {
|
|
3788
|
+
webhooks;
|
|
3789
|
+
constructor(webhooks) {
|
|
3790
|
+
this.webhooks = webhooks;
|
|
3791
|
+
}
|
|
3792
|
+
/**
|
|
3793
|
+
* Create a migration event with current timestamp.
|
|
3794
|
+
*/
|
|
3795
|
+
createEvent(type, details, project) {
|
|
3796
|
+
return {
|
|
3797
|
+
type,
|
|
3798
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3799
|
+
project,
|
|
3800
|
+
details
|
|
3801
|
+
};
|
|
3802
|
+
}
|
|
3803
|
+
/**
|
|
3804
|
+
* Send an event to all matching webhooks.
|
|
3805
|
+
*/
|
|
3806
|
+
async send(event) {
|
|
3807
|
+
const results = [];
|
|
3808
|
+
for (const webhook of this.webhooks) {
|
|
3809
|
+
if (webhook.events && !webhook.events.includes(event.type)) {
|
|
3810
|
+
continue;
|
|
3811
|
+
}
|
|
3812
|
+
const result = await this.sendToWebhook(webhook, event);
|
|
3813
|
+
results.push(result);
|
|
3814
|
+
}
|
|
3815
|
+
return results;
|
|
3816
|
+
}
|
|
3817
|
+
/**
|
|
3818
|
+
* Send event to a single webhook endpoint.
|
|
3819
|
+
*/
|
|
3820
|
+
async sendToWebhook(webhook, event) {
|
|
3821
|
+
const payload = JSON.stringify(event);
|
|
3822
|
+
const headers = {
|
|
3823
|
+
"Content-Type": "application/json",
|
|
3824
|
+
"User-Agent": "SchemaShift-Webhook/1.0",
|
|
3825
|
+
...webhook.headers
|
|
3826
|
+
};
|
|
3827
|
+
if (webhook.secret) {
|
|
3828
|
+
const signature = await computeSignature(payload, webhook.secret);
|
|
3829
|
+
headers["X-SchemaShift-Signature"] = `sha256=${signature}`;
|
|
3830
|
+
}
|
|
3831
|
+
try {
|
|
3832
|
+
const response = await fetch(webhook.url, {
|
|
3833
|
+
method: "POST",
|
|
3834
|
+
headers,
|
|
3835
|
+
body: payload
|
|
3836
|
+
});
|
|
3837
|
+
return {
|
|
3838
|
+
success: response.ok,
|
|
3839
|
+
statusCode: response.status,
|
|
3840
|
+
error: response.ok ? void 0 : `HTTP ${response.status}: ${response.statusText}`
|
|
3841
|
+
};
|
|
3842
|
+
} catch (err) {
|
|
3843
|
+
return {
|
|
3844
|
+
success: false,
|
|
3845
|
+
error: err instanceof Error ? err.message : String(err)
|
|
3846
|
+
};
|
|
3847
|
+
}
|
|
3848
|
+
}
|
|
3849
|
+
/**
|
|
3850
|
+
* Convenience: send a migration_started event.
|
|
3851
|
+
*/
|
|
3852
|
+
async notifyMigrationStarted(from, to, fileCount, project) {
|
|
3853
|
+
const event = this.createEvent("migration_started", { from, to, fileCount }, project);
|
|
3854
|
+
return this.send(event);
|
|
3855
|
+
}
|
|
3856
|
+
/**
|
|
3857
|
+
* Convenience: send a migration_completed event.
|
|
3858
|
+
*/
|
|
3859
|
+
async notifyMigrationCompleted(from, to, fileCount, warningCount, project) {
|
|
3860
|
+
const event = this.createEvent(
|
|
3861
|
+
"migration_completed",
|
|
3862
|
+
{ from, to, fileCount, warningCount },
|
|
3863
|
+
project
|
|
3864
|
+
);
|
|
3865
|
+
return this.send(event);
|
|
3866
|
+
}
|
|
3867
|
+
/**
|
|
3868
|
+
* Convenience: send a migration_failed event.
|
|
3869
|
+
*/
|
|
3870
|
+
async notifyMigrationFailed(from, to, error, project) {
|
|
3871
|
+
const event = this.createEvent("migration_failed", { from, to, error }, project);
|
|
3872
|
+
return this.send(event);
|
|
3873
|
+
}
|
|
3874
|
+
/**
|
|
3875
|
+
* Convenience: send a governance_violation event.
|
|
3876
|
+
*/
|
|
3877
|
+
async notifyGovernanceViolation(violationCount, rules, project) {
|
|
3878
|
+
const event = this.createEvent("governance_violation", { violationCount, rules }, project);
|
|
3879
|
+
return this.send(event);
|
|
3880
|
+
}
|
|
3881
|
+
/**
|
|
3882
|
+
* Convenience: send a drift_detected event.
|
|
3883
|
+
*/
|
|
3884
|
+
async notifyDriftDetected(modifiedFiles, addedFiles, removedFiles, project) {
|
|
3885
|
+
const event = this.createEvent(
|
|
3886
|
+
"drift_detected",
|
|
3887
|
+
{ modifiedFiles, addedFiles, removedFiles },
|
|
3888
|
+
project
|
|
3889
|
+
);
|
|
3890
|
+
return this.send(event);
|
|
2341
3891
|
}
|
|
2342
3892
|
};
|
|
2343
3893
|
|
|
2344
3894
|
// src/package-updater.ts
|
|
2345
|
-
import { existsSync as
|
|
2346
|
-
import { join as
|
|
3895
|
+
import { existsSync as existsSync9, readFileSync as readFileSync9, writeFileSync as writeFileSync5 } from "fs";
|
|
3896
|
+
import { join as join9 } from "path";
|
|
2347
3897
|
var TARGET_VERSIONS = {
|
|
2348
3898
|
"yup->zod": { zod: "^3.24.0" },
|
|
2349
3899
|
"joi->zod": { zod: "^3.24.0" },
|
|
@@ -2364,14 +3914,14 @@ var PackageUpdater = class {
|
|
|
2364
3914
|
const add = {};
|
|
2365
3915
|
const remove = [];
|
|
2366
3916
|
const warnings = [];
|
|
2367
|
-
const pkgPath =
|
|
2368
|
-
if (!
|
|
3917
|
+
const pkgPath = join9(projectPath, "package.json");
|
|
3918
|
+
if (!existsSync9(pkgPath)) {
|
|
2369
3919
|
warnings.push("No package.json found. Cannot plan dependency updates.");
|
|
2370
3920
|
return { add, remove, warnings };
|
|
2371
3921
|
}
|
|
2372
3922
|
let pkg;
|
|
2373
3923
|
try {
|
|
2374
|
-
pkg = JSON.parse(
|
|
3924
|
+
pkg = JSON.parse(readFileSync9(pkgPath, "utf-8"));
|
|
2375
3925
|
} catch {
|
|
2376
3926
|
warnings.push("Could not parse package.json.");
|
|
2377
3927
|
return { add, remove, warnings };
|
|
@@ -2401,9 +3951,9 @@ var PackageUpdater = class {
|
|
|
2401
3951
|
return { add, remove, warnings };
|
|
2402
3952
|
}
|
|
2403
3953
|
apply(projectPath, plan) {
|
|
2404
|
-
const pkgPath =
|
|
2405
|
-
if (!
|
|
2406
|
-
const pkgText =
|
|
3954
|
+
const pkgPath = join9(projectPath, "package.json");
|
|
3955
|
+
if (!existsSync9(pkgPath)) return;
|
|
3956
|
+
const pkgText = readFileSync9(pkgPath, "utf-8");
|
|
2407
3957
|
const pkg = JSON.parse(pkgText);
|
|
2408
3958
|
if (!pkg.dependencies) pkg.dependencies = {};
|
|
2409
3959
|
for (const [name, version] of Object.entries(plan.add)) {
|
|
@@ -2413,7 +3963,7 @@ var PackageUpdater = class {
|
|
|
2413
3963
|
pkg.dependencies[name] = version;
|
|
2414
3964
|
}
|
|
2415
3965
|
}
|
|
2416
|
-
|
|
3966
|
+
writeFileSync5(pkgPath, `${JSON.stringify(pkg, null, 2)}
|
|
2417
3967
|
`);
|
|
2418
3968
|
}
|
|
2419
3969
|
};
|
|
@@ -2585,8 +4135,8 @@ var PluginLoader = class {
|
|
|
2585
4135
|
};
|
|
2586
4136
|
|
|
2587
4137
|
// src/standard-schema.ts
|
|
2588
|
-
import { existsSync as
|
|
2589
|
-
import { join as
|
|
4138
|
+
import { existsSync as existsSync10, readFileSync as readFileSync10 } from "fs";
|
|
4139
|
+
import { join as join10 } from "path";
|
|
2590
4140
|
var STANDARD_SCHEMA_LIBRARIES = {
|
|
2591
4141
|
zod: { minMajor: 3, minMinor: 23 },
|
|
2592
4142
|
// Zod v3.23+ and v4+
|
|
@@ -2615,13 +4165,13 @@ function isVersionCompatible(version, minMajor, minMinor) {
|
|
|
2615
4165
|
return false;
|
|
2616
4166
|
}
|
|
2617
4167
|
function detectStandardSchema(projectPath) {
|
|
2618
|
-
const pkgPath =
|
|
2619
|
-
if (!
|
|
4168
|
+
const pkgPath = join10(projectPath, "package.json");
|
|
4169
|
+
if (!existsSync10(pkgPath)) {
|
|
2620
4170
|
return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
|
|
2621
4171
|
}
|
|
2622
4172
|
let allDeps = {};
|
|
2623
4173
|
try {
|
|
2624
|
-
const pkg = JSON.parse(
|
|
4174
|
+
const pkg = JSON.parse(readFileSync10(pkgPath, "utf-8"));
|
|
2625
4175
|
allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
2626
4176
|
} catch {
|
|
2627
4177
|
return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
|
|
@@ -2660,6 +4210,105 @@ function detectStandardSchema(projectPath) {
|
|
|
2660
4210
|
return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
|
|
2661
4211
|
}
|
|
2662
4212
|
|
|
4213
|
+
// src/standard-schema-advisor.ts
|
|
4214
|
+
var STANDARD_SCHEMA_LIBS = /* @__PURE__ */ new Set(["zod", "valibot", "arktype"]);
|
|
4215
|
+
var StandardSchemaAdvisor = class {
|
|
4216
|
+
/**
|
|
4217
|
+
* Check if a schema library supports Standard Schema.
|
|
4218
|
+
*/
|
|
4219
|
+
supportsStandardSchema(library) {
|
|
4220
|
+
return STANDARD_SCHEMA_LIBS.has(library);
|
|
4221
|
+
}
|
|
4222
|
+
/**
|
|
4223
|
+
* Generate advisory for a given migration path.
|
|
4224
|
+
*/
|
|
4225
|
+
advise(from, to) {
|
|
4226
|
+
const fromSupports = this.supportsStandardSchema(from);
|
|
4227
|
+
const toSupports = this.supportsStandardSchema(to);
|
|
4228
|
+
if (!fromSupports && !toSupports) {
|
|
4229
|
+
return {
|
|
4230
|
+
shouldConsiderAdapter: false,
|
|
4231
|
+
reason: `Neither ${from} nor ${to} supports Standard Schema. Full migration is recommended.`,
|
|
4232
|
+
migrationAdvantages: [
|
|
4233
|
+
"Complete type safety with target library",
|
|
4234
|
+
"Access to target library ecosystem",
|
|
4235
|
+
"No runtime adapter overhead"
|
|
4236
|
+
],
|
|
4237
|
+
adapterAdvantages: [],
|
|
4238
|
+
recommendation: "migrate"
|
|
4239
|
+
};
|
|
4240
|
+
}
|
|
4241
|
+
if (fromSupports && toSupports) {
|
|
4242
|
+
return {
|
|
4243
|
+
shouldConsiderAdapter: true,
|
|
4244
|
+
reason: `Both ${from} and ${to} support Standard Schema 1.0. You may be able to use adapters for ecosystem tools (tRPC, TanStack Form, etc.) instead of migrating all schemas.`,
|
|
4245
|
+
adapterExample: this.generateAdapterExample(from, to),
|
|
4246
|
+
migrationAdvantages: [
|
|
4247
|
+
"Full target library API and ergonomics",
|
|
4248
|
+
"Consistent codebase (single library)",
|
|
4249
|
+
"Better IDE support for one library",
|
|
4250
|
+
"Smaller bundle (avoid loading two libraries)"
|
|
4251
|
+
],
|
|
4252
|
+
adapterAdvantages: [
|
|
4253
|
+
"No code changes needed for existing schemas",
|
|
4254
|
+
"Gradual migration possible",
|
|
4255
|
+
"Ecosystem tools work with both libraries via Standard Schema",
|
|
4256
|
+
"Lower risk \u2014 existing validation behavior preserved"
|
|
4257
|
+
],
|
|
4258
|
+
recommendation: "either"
|
|
4259
|
+
};
|
|
4260
|
+
}
|
|
4261
|
+
if (toSupports && !fromSupports) {
|
|
4262
|
+
return {
|
|
4263
|
+
shouldConsiderAdapter: false,
|
|
4264
|
+
reason: `${from} does not support Standard Schema, but ${to} does. Migrating to ${to} gives you Standard Schema interoperability.`,
|
|
4265
|
+
migrationAdvantages: [
|
|
4266
|
+
"Standard Schema interoperability with ecosystem tools",
|
|
4267
|
+
"Future-proof validation layer",
|
|
4268
|
+
`Access to ${to} API and type inference`
|
|
4269
|
+
],
|
|
4270
|
+
adapterAdvantages: [],
|
|
4271
|
+
recommendation: "migrate"
|
|
4272
|
+
};
|
|
4273
|
+
}
|
|
4274
|
+
return {
|
|
4275
|
+
shouldConsiderAdapter: false,
|
|
4276
|
+
reason: `${from} supports Standard Schema but ${to} does not. Consider if you need the specific features of ${to} that justify losing Standard Schema interoperability.`,
|
|
4277
|
+
migrationAdvantages: [`Access to ${to}-specific features`],
|
|
4278
|
+
adapterAdvantages: [`Keeping ${from} preserves Standard Schema interoperability`],
|
|
4279
|
+
recommendation: "migrate"
|
|
4280
|
+
};
|
|
4281
|
+
}
|
|
4282
|
+
/**
|
|
4283
|
+
* Analyze a project and provide advisory based on detected libraries.
|
|
4284
|
+
*/
|
|
4285
|
+
adviseFromProject(projectPath, from, to) {
|
|
4286
|
+
const projectInfo = detectStandardSchema(projectPath);
|
|
4287
|
+
const advisory = this.advise(from, to);
|
|
4288
|
+
return { ...advisory, projectInfo };
|
|
4289
|
+
}
|
|
4290
|
+
generateAdapterExample(from, to) {
|
|
4291
|
+
return [
|
|
4292
|
+
`// Instead of migrating all ${from} schemas to ${to},`,
|
|
4293
|
+
`// you can use Standard Schema adapters for ecosystem tools:`,
|
|
4294
|
+
`//`,
|
|
4295
|
+
`// Example with tRPC (v11+):`,
|
|
4296
|
+
`// tRPC accepts any Standard Schema-compatible schema.`,
|
|
4297
|
+
`// Both ${from} and ${to} schemas work without conversion:`,
|
|
4298
|
+
`//`,
|
|
4299
|
+
`// import { ${from}Schema } from './existing-${from}-schemas';`,
|
|
4300
|
+
`// import { ${to}Schema } from './new-${to}-schemas';`,
|
|
4301
|
+
`//`,
|
|
4302
|
+
`// const router = t.router({`,
|
|
4303
|
+
`// // Works with ${from} schema (Standard Schema compatible)`,
|
|
4304
|
+
`// getUser: t.procedure.input(${from}Schema).query(...)`,
|
|
4305
|
+
`// // Also works with ${to} schema`,
|
|
4306
|
+
`// createUser: t.procedure.input(${to}Schema).mutation(...)`,
|
|
4307
|
+
`// });`
|
|
4308
|
+
].join("\n");
|
|
4309
|
+
}
|
|
4310
|
+
};
|
|
4311
|
+
|
|
2663
4312
|
// src/test-scaffolder.ts
|
|
2664
4313
|
var TestScaffolder = class {
|
|
2665
4314
|
scaffold(sourceFiles, from, to) {
|
|
@@ -2953,14 +4602,19 @@ var TypeDedupDetector = class {
|
|
|
2953
4602
|
}
|
|
2954
4603
|
};
|
|
2955
4604
|
export {
|
|
4605
|
+
ApprovalManager,
|
|
2956
4606
|
BehavioralWarningAnalyzer,
|
|
2957
4607
|
BundleEstimator,
|
|
2958
4608
|
CompatibilityAnalyzer,
|
|
2959
4609
|
ComplexityEstimator,
|
|
2960
4610
|
DetailedAnalyzer,
|
|
4611
|
+
DriftDetector,
|
|
2961
4612
|
EcosystemAnalyzer,
|
|
2962
4613
|
FormResolverMigrator,
|
|
4614
|
+
GOVERNANCE_TEMPLATES,
|
|
2963
4615
|
GovernanceEngine,
|
|
4616
|
+
GovernanceFixer,
|
|
4617
|
+
GraphExporter,
|
|
2964
4618
|
IncrementalTracker,
|
|
2965
4619
|
MigrationAuditLog,
|
|
2966
4620
|
MigrationChain,
|
|
@@ -2970,21 +4624,37 @@ export {
|
|
|
2970
4624
|
PluginLoader,
|
|
2971
4625
|
SchemaAnalyzer,
|
|
2972
4626
|
SchemaDependencyResolver,
|
|
4627
|
+
StandardSchemaAdvisor,
|
|
2973
4628
|
TestScaffolder,
|
|
2974
4629
|
TransformEngine,
|
|
2975
4630
|
TypeDedupDetector,
|
|
4631
|
+
WebhookNotifier,
|
|
2976
4632
|
buildCallChain,
|
|
2977
4633
|
computeParallelBatches,
|
|
4634
|
+
conditionalValidation,
|
|
4635
|
+
dependentFields,
|
|
2978
4636
|
detectFormLibraries,
|
|
2979
4637
|
detectSchemaLibrary,
|
|
2980
4638
|
detectStandardSchema,
|
|
4639
|
+
getAllMigrationTemplates,
|
|
4640
|
+
getGovernanceTemplate,
|
|
4641
|
+
getGovernanceTemplateNames,
|
|
4642
|
+
getGovernanceTemplatesByCategory,
|
|
4643
|
+
getMigrationTemplate,
|
|
4644
|
+
getMigrationTemplateNames,
|
|
4645
|
+
getMigrationTemplatesByCategory,
|
|
2981
4646
|
isInsideComment,
|
|
2982
4647
|
isInsideStringLiteral,
|
|
2983
4648
|
loadConfig,
|
|
4649
|
+
mutuallyExclusive,
|
|
2984
4650
|
parseCallChain,
|
|
4651
|
+
requireIf,
|
|
4652
|
+
requireOneOf,
|
|
2985
4653
|
shouldSuppressWarning,
|
|
2986
4654
|
startsWithBase,
|
|
4655
|
+
suggestCrossFieldPattern,
|
|
2987
4656
|
transformMethodChain,
|
|
2988
|
-
validateConfig
|
|
4657
|
+
validateConfig,
|
|
4658
|
+
validateMigrationTemplate
|
|
2989
4659
|
};
|
|
2990
4660
|
//# sourceMappingURL=index.js.map
|