@schemashift/core 0.10.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1196 -90
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +335 -1
- package/dist/index.d.ts +335 -1
- package/dist/index.js +1166 -86
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -133,6 +133,110 @@ var SchemaAnalyzer = class {
|
|
|
133
133
|
}
|
|
134
134
|
};
|
|
135
135
|
|
|
136
|
+
// src/approval.ts
|
|
137
|
+
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "fs";
|
|
138
|
+
import { join } from "path";
|
|
139
|
+
var ApprovalManager = class {
|
|
140
|
+
pendingDir;
|
|
141
|
+
constructor(projectPath) {
|
|
142
|
+
this.pendingDir = join(projectPath, ".schemashift", "pending");
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Create a new migration request for review.
|
|
146
|
+
*/
|
|
147
|
+
createRequest(from, to, files, requestedBy, metadata) {
|
|
148
|
+
const id = `mig-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
149
|
+
const request = {
|
|
150
|
+
id,
|
|
151
|
+
from,
|
|
152
|
+
to,
|
|
153
|
+
files,
|
|
154
|
+
requestedBy,
|
|
155
|
+
requestedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
156
|
+
status: "pending",
|
|
157
|
+
metadata
|
|
158
|
+
};
|
|
159
|
+
this.ensureDir();
|
|
160
|
+
const filePath = join(this.pendingDir, `${id}.json`);
|
|
161
|
+
writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
|
|
162
|
+
return request;
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Review (approve or reject) a pending migration request.
|
|
166
|
+
*/
|
|
167
|
+
review(decision) {
|
|
168
|
+
const request = this.getRequest(decision.requestId);
|
|
169
|
+
if (!request) {
|
|
170
|
+
throw new Error(`Migration request ${decision.requestId} not found`);
|
|
171
|
+
}
|
|
172
|
+
if (request.status !== "pending") {
|
|
173
|
+
throw new Error(`Migration request ${decision.requestId} is already ${request.status}`);
|
|
174
|
+
}
|
|
175
|
+
request.status = decision.status;
|
|
176
|
+
request.reviewedBy = decision.reviewedBy;
|
|
177
|
+
request.reviewedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
178
|
+
request.reason = decision.reason;
|
|
179
|
+
const filePath = join(this.pendingDir, `${decision.requestId}.json`);
|
|
180
|
+
writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
|
|
181
|
+
return request;
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Get a specific migration request by ID.
|
|
185
|
+
*/
|
|
186
|
+
getRequest(id) {
|
|
187
|
+
const filePath = join(this.pendingDir, `${id}.json`);
|
|
188
|
+
if (!existsSync(filePath)) {
|
|
189
|
+
return null;
|
|
190
|
+
}
|
|
191
|
+
const content = readFileSync(filePath, "utf-8");
|
|
192
|
+
return JSON.parse(content);
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* List all migration requests, optionally filtered by status.
|
|
196
|
+
*/
|
|
197
|
+
listRequests(status) {
|
|
198
|
+
if (!existsSync(this.pendingDir)) {
|
|
199
|
+
return [];
|
|
200
|
+
}
|
|
201
|
+
const files = readdirSync(this.pendingDir).filter((f) => f.endsWith(".json"));
|
|
202
|
+
const requests = [];
|
|
203
|
+
for (const file of files) {
|
|
204
|
+
const content = readFileSync(join(this.pendingDir, file), "utf-8");
|
|
205
|
+
const request = JSON.parse(content);
|
|
206
|
+
if (!status || request.status === status) {
|
|
207
|
+
requests.push(request);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return requests.sort(
|
|
211
|
+
(a, b) => new Date(b.requestedAt).getTime() - new Date(a.requestedAt).getTime()
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Get summary counts of all requests.
|
|
216
|
+
*/
|
|
217
|
+
getSummary() {
|
|
218
|
+
const all = this.listRequests();
|
|
219
|
+
return {
|
|
220
|
+
pending: all.filter((r) => r.status === "pending").length,
|
|
221
|
+
approved: all.filter((r) => r.status === "approved").length,
|
|
222
|
+
rejected: all.filter((r) => r.status === "rejected").length,
|
|
223
|
+
total: all.length
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* Check if a migration has been approved.
|
|
228
|
+
*/
|
|
229
|
+
isApproved(requestId) {
|
|
230
|
+
const request = this.getRequest(requestId);
|
|
231
|
+
return request?.status === "approved";
|
|
232
|
+
}
|
|
233
|
+
ensureDir() {
|
|
234
|
+
if (!existsSync(this.pendingDir)) {
|
|
235
|
+
mkdirSync(this.pendingDir, { recursive: true });
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
};
|
|
239
|
+
|
|
136
240
|
// src/ast-utils.ts
|
|
137
241
|
import { Node as NodeUtils } from "ts-morph";
|
|
138
242
|
function parseCallChain(node) {
|
|
@@ -269,8 +373,8 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
|
|
|
269
373
|
|
|
270
374
|
// src/audit-log.ts
|
|
271
375
|
import { createHash } from "crypto";
|
|
272
|
-
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
273
|
-
import { join } from "path";
|
|
376
|
+
import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
377
|
+
import { join as join2 } from "path";
|
|
274
378
|
var AUDIT_DIR = ".schemashift";
|
|
275
379
|
var AUDIT_FILE = "audit-log.json";
|
|
276
380
|
var AUDIT_VERSION = 1;
|
|
@@ -278,8 +382,8 @@ var MigrationAuditLog = class {
|
|
|
278
382
|
logDir;
|
|
279
383
|
logPath;
|
|
280
384
|
constructor(projectPath) {
|
|
281
|
-
this.logDir =
|
|
282
|
-
this.logPath =
|
|
385
|
+
this.logDir = join2(projectPath, AUDIT_DIR);
|
|
386
|
+
this.logPath = join2(this.logDir, AUDIT_FILE);
|
|
283
387
|
}
|
|
284
388
|
/**
|
|
285
389
|
* Append a new entry to the audit log.
|
|
@@ -315,11 +419,11 @@ var MigrationAuditLog = class {
|
|
|
315
419
|
* Read the current audit log.
|
|
316
420
|
*/
|
|
317
421
|
read() {
|
|
318
|
-
if (!
|
|
422
|
+
if (!existsSync2(this.logPath)) {
|
|
319
423
|
return { version: AUDIT_VERSION, entries: [] };
|
|
320
424
|
}
|
|
321
425
|
try {
|
|
322
|
-
const content =
|
|
426
|
+
const content = readFileSync2(this.logPath, "utf-8");
|
|
323
427
|
if (!content.trim()) {
|
|
324
428
|
return { version: AUDIT_VERSION, entries: [] };
|
|
325
429
|
}
|
|
@@ -412,10 +516,10 @@ var MigrationAuditLog = class {
|
|
|
412
516
|
};
|
|
413
517
|
}
|
|
414
518
|
write(log) {
|
|
415
|
-
if (!
|
|
416
|
-
|
|
519
|
+
if (!existsSync2(this.logDir)) {
|
|
520
|
+
mkdirSync2(this.logDir, { recursive: true });
|
|
417
521
|
}
|
|
418
|
-
|
|
522
|
+
writeFileSync2(this.logPath, JSON.stringify(log, null, 2));
|
|
419
523
|
}
|
|
420
524
|
hashContent(content) {
|
|
421
525
|
return createHash("sha256").update(content).digest("hex").substring(0, 16);
|
|
@@ -848,12 +952,12 @@ var MigrationChain = class {
|
|
|
848
952
|
};
|
|
849
953
|
|
|
850
954
|
// src/compatibility.ts
|
|
851
|
-
import { existsSync as
|
|
852
|
-
import { join as
|
|
955
|
+
import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
|
|
956
|
+
import { join as join4 } from "path";
|
|
853
957
|
|
|
854
958
|
// src/ecosystem.ts
|
|
855
|
-
import { existsSync as
|
|
856
|
-
import { join as
|
|
959
|
+
import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
|
|
960
|
+
import { join as join3 } from "path";
|
|
857
961
|
var ECOSYSTEM_RULES = [
|
|
858
962
|
// ORM integrations
|
|
859
963
|
{
|
|
@@ -1108,6 +1212,72 @@ var ECOSYSTEM_RULES = [
|
|
|
1108
1212
|
upgradeCommand: "npm install nuqs@latest"
|
|
1109
1213
|
})
|
|
1110
1214
|
},
|
|
1215
|
+
// Server action / routing integrations
|
|
1216
|
+
{
|
|
1217
|
+
package: "next-safe-action",
|
|
1218
|
+
category: "api",
|
|
1219
|
+
migrations: ["zod-v3->v4"],
|
|
1220
|
+
check: () => ({
|
|
1221
|
+
issue: "next-safe-action uses Zod for input validation. Zod v4 type changes may break action definitions.",
|
|
1222
|
+
suggestion: "Upgrade next-safe-action to the latest version with Zod v4 support.",
|
|
1223
|
+
severity: "warning",
|
|
1224
|
+
upgradeCommand: "npm install next-safe-action@latest"
|
|
1225
|
+
})
|
|
1226
|
+
},
|
|
1227
|
+
{
|
|
1228
|
+
package: "@tanstack/router",
|
|
1229
|
+
category: "api",
|
|
1230
|
+
migrations: ["zod-v3->v4"],
|
|
1231
|
+
check: () => ({
|
|
1232
|
+
issue: "@tanstack/router uses Zod for route parameter validation. Zod v4 changes may affect type inference.",
|
|
1233
|
+
suggestion: "Upgrade @tanstack/router to a version with Zod v4 support.",
|
|
1234
|
+
severity: "warning",
|
|
1235
|
+
upgradeCommand: "npm install @tanstack/router@latest"
|
|
1236
|
+
})
|
|
1237
|
+
},
|
|
1238
|
+
{
|
|
1239
|
+
package: "@tanstack/react-query",
|
|
1240
|
+
category: "api",
|
|
1241
|
+
migrations: ["zod-v3->v4"],
|
|
1242
|
+
check: () => ({
|
|
1243
|
+
issue: "@tanstack/react-query may use Zod for query key/param validation via integrations.",
|
|
1244
|
+
suggestion: "Verify any Zod-based query validation still works after the Zod v4 upgrade.",
|
|
1245
|
+
severity: "info"
|
|
1246
|
+
})
|
|
1247
|
+
},
|
|
1248
|
+
{
|
|
1249
|
+
package: "fastify-type-provider-zod",
|
|
1250
|
+
category: "api",
|
|
1251
|
+
migrations: ["zod-v3->v4"],
|
|
1252
|
+
check: () => ({
|
|
1253
|
+
issue: "fastify-type-provider-zod needs a Zod v4-compatible version.",
|
|
1254
|
+
suggestion: "Upgrade fastify-type-provider-zod to a version supporting Zod v4.",
|
|
1255
|
+
severity: "warning",
|
|
1256
|
+
upgradeCommand: "npm install fastify-type-provider-zod@latest"
|
|
1257
|
+
})
|
|
1258
|
+
},
|
|
1259
|
+
{
|
|
1260
|
+
package: "zod-i18n-map",
|
|
1261
|
+
category: "validation-util",
|
|
1262
|
+
migrations: ["zod-v3->v4"],
|
|
1263
|
+
check: () => ({
|
|
1264
|
+
issue: 'zod-i18n-map uses Zod v3 error map format. Error messages changed in v4 (e.g., "Required" is now descriptive).',
|
|
1265
|
+
suggestion: "Check for a Zod v4-compatible version of zod-i18n-map or update custom error maps.",
|
|
1266
|
+
severity: "warning",
|
|
1267
|
+
upgradeCommand: "npm install zod-i18n-map@latest"
|
|
1268
|
+
})
|
|
1269
|
+
},
|
|
1270
|
+
{
|
|
1271
|
+
package: "openapi-zod-client",
|
|
1272
|
+
category: "openapi",
|
|
1273
|
+
migrations: ["zod-v3->v4"],
|
|
1274
|
+
check: () => ({
|
|
1275
|
+
issue: "openapi-zod-client generates Zod v3 schemas from OpenAPI specs. Generated code may need regeneration.",
|
|
1276
|
+
suggestion: "Upgrade openapi-zod-client and regenerate schemas for Zod v4 compatibility.",
|
|
1277
|
+
severity: "warning",
|
|
1278
|
+
upgradeCommand: "npm install openapi-zod-client@latest"
|
|
1279
|
+
})
|
|
1280
|
+
},
|
|
1111
1281
|
// Schema library detection for cross-library migrations
|
|
1112
1282
|
{
|
|
1113
1283
|
package: "@effect/schema",
|
|
@@ -1185,13 +1355,13 @@ var EcosystemAnalyzer = class {
|
|
|
1185
1355
|
const dependencies = [];
|
|
1186
1356
|
const warnings = [];
|
|
1187
1357
|
const blockers = [];
|
|
1188
|
-
const pkgPath =
|
|
1189
|
-
if (!
|
|
1358
|
+
const pkgPath = join3(projectPath, "package.json");
|
|
1359
|
+
if (!existsSync3(pkgPath)) {
|
|
1190
1360
|
return { dependencies, warnings, blockers };
|
|
1191
1361
|
}
|
|
1192
1362
|
let allDeps = {};
|
|
1193
1363
|
try {
|
|
1194
|
-
const pkg = JSON.parse(
|
|
1364
|
+
const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
|
|
1195
1365
|
allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
1196
1366
|
} catch {
|
|
1197
1367
|
return { dependencies, warnings, blockers };
|
|
@@ -1312,10 +1482,10 @@ var CompatibilityAnalyzer = class {
|
|
|
1312
1482
|
ecosystemAnalyzer = new EcosystemAnalyzer();
|
|
1313
1483
|
detectVersions(projectPath) {
|
|
1314
1484
|
const versions = [];
|
|
1315
|
-
const pkgPath =
|
|
1316
|
-
if (!
|
|
1485
|
+
const pkgPath = join4(projectPath, "package.json");
|
|
1486
|
+
if (!existsSync4(pkgPath)) return versions;
|
|
1317
1487
|
try {
|
|
1318
|
-
const pkg = JSON.parse(
|
|
1488
|
+
const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
|
|
1319
1489
|
const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
|
|
1320
1490
|
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
1321
1491
|
for (const lib of knownLibs) {
|
|
@@ -1536,9 +1706,115 @@ async function loadConfig(configPath) {
|
|
|
1536
1706
|
};
|
|
1537
1707
|
}
|
|
1538
1708
|
|
|
1709
|
+
// src/cross-field-patterns.ts
|
|
1710
|
+
function requireIf(conditionField, requiredField) {
|
|
1711
|
+
return {
|
|
1712
|
+
name: `requireIf(${conditionField}, ${requiredField})`,
|
|
1713
|
+
description: `${requiredField} is required when ${conditionField} is truthy`,
|
|
1714
|
+
zodCode: [
|
|
1715
|
+
".superRefine((data, ctx) => {",
|
|
1716
|
+
` if (data.${conditionField} && !data.${requiredField}) {`,
|
|
1717
|
+
" ctx.addIssue({",
|
|
1718
|
+
" code: z.ZodIssueCode.custom,",
|
|
1719
|
+
` message: '${requiredField} is required when ${conditionField} is set',`,
|
|
1720
|
+
` path: ['${requiredField}'],`,
|
|
1721
|
+
" });",
|
|
1722
|
+
" }",
|
|
1723
|
+
"})"
|
|
1724
|
+
].join("\n")
|
|
1725
|
+
};
|
|
1726
|
+
}
|
|
1727
|
+
function requireOneOf(fields) {
|
|
1728
|
+
const fieldList = fields.map((f) => `'${f}'`).join(", ");
|
|
1729
|
+
const conditions = fields.map((f) => `data.${f}`).join(" || ");
|
|
1730
|
+
return {
|
|
1731
|
+
name: `requireOneOf(${fields.join(", ")})`,
|
|
1732
|
+
description: `At least one of [${fields.join(", ")}] must be provided`,
|
|
1733
|
+
zodCode: [
|
|
1734
|
+
".superRefine((data, ctx) => {",
|
|
1735
|
+
` if (!(${conditions})) {`,
|
|
1736
|
+
" ctx.addIssue({",
|
|
1737
|
+
" code: z.ZodIssueCode.custom,",
|
|
1738
|
+
` message: 'At least one of [${fields.join(", ")}] is required',`,
|
|
1739
|
+
` path: [${fieldList}],`,
|
|
1740
|
+
" });",
|
|
1741
|
+
" }",
|
|
1742
|
+
"})"
|
|
1743
|
+
].join("\n")
|
|
1744
|
+
};
|
|
1745
|
+
}
|
|
1746
|
+
function mutuallyExclusive(fields) {
|
|
1747
|
+
const checks = fields.map((f) => `(data.${f} ? 1 : 0)`).join(" + ");
|
|
1748
|
+
return {
|
|
1749
|
+
name: `mutuallyExclusive(${fields.join(", ")})`,
|
|
1750
|
+
description: `Only one of [${fields.join(", ")}] can be set at a time`,
|
|
1751
|
+
zodCode: [
|
|
1752
|
+
".superRefine((data, ctx) => {",
|
|
1753
|
+
` const count = ${checks};`,
|
|
1754
|
+
" if (count > 1) {",
|
|
1755
|
+
" ctx.addIssue({",
|
|
1756
|
+
" code: z.ZodIssueCode.custom,",
|
|
1757
|
+
` message: 'Only one of [${fields.join(", ")}] can be set at a time',`,
|
|
1758
|
+
" });",
|
|
1759
|
+
" }",
|
|
1760
|
+
"})"
|
|
1761
|
+
].join("\n")
|
|
1762
|
+
};
|
|
1763
|
+
}
|
|
1764
|
+
function dependentFields(primaryField, dependents) {
|
|
1765
|
+
const checks = dependents.map(
|
|
1766
|
+
(f) => ` if (!data.${f}) {
|
|
1767
|
+
ctx.addIssue({ code: z.ZodIssueCode.custom, message: '${f} is required when ${primaryField} is set', path: ['${f}'] });
|
|
1768
|
+
}`
|
|
1769
|
+
).join("\n");
|
|
1770
|
+
return {
|
|
1771
|
+
name: `dependentFields(${primaryField} -> ${dependents.join(", ")})`,
|
|
1772
|
+
description: `When ${primaryField} is set, [${dependents.join(", ")}] are required`,
|
|
1773
|
+
zodCode: [
|
|
1774
|
+
".superRefine((data, ctx) => {",
|
|
1775
|
+
` if (data.${primaryField}) {`,
|
|
1776
|
+
checks,
|
|
1777
|
+
" }",
|
|
1778
|
+
"})"
|
|
1779
|
+
].join("\n")
|
|
1780
|
+
};
|
|
1781
|
+
}
|
|
1782
|
+
function conditionalValidation(conditionField, conditionValue, targetField, validationMessage) {
|
|
1783
|
+
return {
|
|
1784
|
+
name: `conditionalValidation(${conditionField}=${conditionValue} -> ${targetField})`,
|
|
1785
|
+
description: `Validate ${targetField} when ${conditionField} equals ${conditionValue}`,
|
|
1786
|
+
zodCode: [
|
|
1787
|
+
".superRefine((data, ctx) => {",
|
|
1788
|
+
` if (data.${conditionField} === ${conditionValue} && !data.${targetField}) {`,
|
|
1789
|
+
" ctx.addIssue({",
|
|
1790
|
+
" code: z.ZodIssueCode.custom,",
|
|
1791
|
+
` message: '${validationMessage}',`,
|
|
1792
|
+
` path: ['${targetField}'],`,
|
|
1793
|
+
" });",
|
|
1794
|
+
" }",
|
|
1795
|
+
"})"
|
|
1796
|
+
].join("\n")
|
|
1797
|
+
};
|
|
1798
|
+
}
|
|
1799
|
+
function suggestCrossFieldPattern(whenCode) {
|
|
1800
|
+
const booleanMatch = whenCode.match(/\.when\(['"](\w+)['"]\s*,\s*\{[^}]*is:\s*true/);
|
|
1801
|
+
if (booleanMatch?.[1]) {
|
|
1802
|
+
const field = booleanMatch[1];
|
|
1803
|
+
return requireIf(field, "targetField");
|
|
1804
|
+
}
|
|
1805
|
+
const multiFieldMatch = whenCode.match(/\.when\(\[([^\]]+)\]/);
|
|
1806
|
+
if (multiFieldMatch?.[1]) {
|
|
1807
|
+
const fields = multiFieldMatch[1].split(",").map((f) => f.trim().replace(/['"]/g, "")).filter(Boolean);
|
|
1808
|
+
if (fields.length > 1) {
|
|
1809
|
+
return dependentFields(fields[0] ?? "primary", fields.slice(1));
|
|
1810
|
+
}
|
|
1811
|
+
}
|
|
1812
|
+
return null;
|
|
1813
|
+
}
|
|
1814
|
+
|
|
1539
1815
|
// src/dependency-graph.ts
|
|
1540
|
-
import { existsSync as
|
|
1541
|
-
import { join as
|
|
1816
|
+
import { existsSync as existsSync5, readdirSync as readdirSync2, readFileSync as readFileSync5 } from "fs";
|
|
1817
|
+
import { join as join5, resolve } from "path";
|
|
1542
1818
|
var SchemaDependencyResolver = class {
|
|
1543
1819
|
resolve(project, filePaths) {
|
|
1544
1820
|
const fileSet = new Set(filePaths);
|
|
@@ -1665,38 +1941,38 @@ function computeParallelBatches(packages, suggestedOrder) {
|
|
|
1665
1941
|
}
|
|
1666
1942
|
var MonorepoResolver = class {
|
|
1667
1943
|
detect(projectPath) {
|
|
1668
|
-
const pkgPath =
|
|
1669
|
-
if (
|
|
1944
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1945
|
+
if (existsSync5(pkgPath)) {
|
|
1670
1946
|
try {
|
|
1671
|
-
const pkg = JSON.parse(
|
|
1947
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1672
1948
|
if (pkg.workspaces) return true;
|
|
1673
1949
|
} catch {
|
|
1674
1950
|
}
|
|
1675
1951
|
}
|
|
1676
|
-
if (
|
|
1952
|
+
if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return true;
|
|
1677
1953
|
return false;
|
|
1678
1954
|
}
|
|
1679
1955
|
/**
|
|
1680
1956
|
* Detect which workspace manager is being used.
|
|
1681
1957
|
*/
|
|
1682
1958
|
detectManager(projectPath) {
|
|
1683
|
-
if (
|
|
1684
|
-
const pkgPath =
|
|
1685
|
-
if (
|
|
1959
|
+
if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
|
|
1960
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1961
|
+
if (existsSync5(pkgPath)) {
|
|
1686
1962
|
try {
|
|
1687
|
-
const pkg = JSON.parse(
|
|
1963
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1688
1964
|
if (pkg.packageManager?.startsWith("yarn")) return "yarn";
|
|
1689
1965
|
if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
|
|
1690
1966
|
} catch {
|
|
1691
1967
|
}
|
|
1692
1968
|
}
|
|
1693
|
-
if (
|
|
1694
|
-
if (
|
|
1969
|
+
if (existsSync5(join5(projectPath, "pnpm-lock.yaml"))) return "pnpm";
|
|
1970
|
+
if (existsSync5(join5(projectPath, "yarn.lock"))) return "yarn";
|
|
1695
1971
|
return "npm";
|
|
1696
1972
|
}
|
|
1697
1973
|
analyze(projectPath) {
|
|
1698
|
-
const pkgPath =
|
|
1699
|
-
if (!
|
|
1974
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
1975
|
+
if (!existsSync5(pkgPath)) {
|
|
1700
1976
|
return { isMonorepo: false, packages: [], suggestedOrder: [] };
|
|
1701
1977
|
}
|
|
1702
1978
|
let workspaceGlobs;
|
|
@@ -1711,10 +1987,10 @@ var MonorepoResolver = class {
|
|
|
1711
1987
|
const packages = [];
|
|
1712
1988
|
const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
|
|
1713
1989
|
for (const dir of resolvedDirs) {
|
|
1714
|
-
const wsPkgPath =
|
|
1715
|
-
if (!
|
|
1990
|
+
const wsPkgPath = join5(dir, "package.json");
|
|
1991
|
+
if (!existsSync5(wsPkgPath)) continue;
|
|
1716
1992
|
try {
|
|
1717
|
-
const wsPkg = JSON.parse(
|
|
1993
|
+
const wsPkg = JSON.parse(readFileSync5(wsPkgPath, "utf-8"));
|
|
1718
1994
|
if (!wsPkg.name) continue;
|
|
1719
1995
|
const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
|
|
1720
1996
|
const depNames = Object.keys(allDeps);
|
|
@@ -1758,14 +2034,14 @@ var MonorepoResolver = class {
|
|
|
1758
2034
|
* Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
|
|
1759
2035
|
*/
|
|
1760
2036
|
resolveWorkspaceGlobs(projectPath) {
|
|
1761
|
-
const pnpmPath =
|
|
1762
|
-
if (
|
|
2037
|
+
const pnpmPath = join5(projectPath, "pnpm-workspace.yaml");
|
|
2038
|
+
if (existsSync5(pnpmPath)) {
|
|
1763
2039
|
return this.parsePnpmWorkspace(pnpmPath);
|
|
1764
2040
|
}
|
|
1765
|
-
const pkgPath =
|
|
1766
|
-
if (
|
|
2041
|
+
const pkgPath = join5(projectPath, "package.json");
|
|
2042
|
+
if (existsSync5(pkgPath)) {
|
|
1767
2043
|
try {
|
|
1768
|
-
const pkg = JSON.parse(
|
|
2044
|
+
const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
|
|
1769
2045
|
if (pkg.workspaces) {
|
|
1770
2046
|
return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
|
|
1771
2047
|
}
|
|
@@ -1784,7 +2060,7 @@ var MonorepoResolver = class {
|
|
|
1784
2060
|
* ```
|
|
1785
2061
|
*/
|
|
1786
2062
|
parsePnpmWorkspace(filePath) {
|
|
1787
|
-
const content =
|
|
2063
|
+
const content = readFileSync5(filePath, "utf-8");
|
|
1788
2064
|
const globs = [];
|
|
1789
2065
|
let inPackages = false;
|
|
1790
2066
|
for (const line of content.split("\n")) {
|
|
@@ -1810,13 +2086,13 @@ var MonorepoResolver = class {
|
|
|
1810
2086
|
for (const glob of globs) {
|
|
1811
2087
|
const clean = glob.replace(/\/?\*$/, "");
|
|
1812
2088
|
const base = resolve(projectPath, clean);
|
|
1813
|
-
if (!
|
|
2089
|
+
if (!existsSync5(base)) continue;
|
|
1814
2090
|
if (glob.endsWith("*")) {
|
|
1815
2091
|
try {
|
|
1816
|
-
const entries =
|
|
2092
|
+
const entries = readdirSync2(base, { withFileTypes: true });
|
|
1817
2093
|
for (const entry of entries) {
|
|
1818
2094
|
if (entry.isDirectory()) {
|
|
1819
|
-
dirs.push(
|
|
2095
|
+
dirs.push(join5(base, entry.name));
|
|
1820
2096
|
}
|
|
1821
2097
|
}
|
|
1822
2098
|
} catch {
|
|
@@ -1830,8 +2106,8 @@ var MonorepoResolver = class {
|
|
|
1830
2106
|
};
|
|
1831
2107
|
|
|
1832
2108
|
// src/detailed-analyzer.ts
|
|
1833
|
-
import { existsSync as
|
|
1834
|
-
import { join as
|
|
2109
|
+
import { existsSync as existsSync6, readFileSync as readFileSync6 } from "fs";
|
|
2110
|
+
import { join as join6 } from "path";
|
|
1835
2111
|
var COMPLEXITY_CHAIN_WEIGHT = 2;
|
|
1836
2112
|
var COMPLEXITY_DEPTH_WEIGHT = 3;
|
|
1837
2113
|
var COMPLEXITY_VALIDATION_WEIGHT = 1;
|
|
@@ -1896,10 +2172,10 @@ var DetailedAnalyzer = class {
|
|
|
1896
2172
|
}
|
|
1897
2173
|
detectLibraryVersions(projectPath) {
|
|
1898
2174
|
const versions = [];
|
|
1899
|
-
const pkgPath =
|
|
1900
|
-
if (!
|
|
2175
|
+
const pkgPath = join6(projectPath, "package.json");
|
|
2176
|
+
if (!existsSync6(pkgPath)) return versions;
|
|
1901
2177
|
try {
|
|
1902
|
-
const pkg = JSON.parse(
|
|
2178
|
+
const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
|
|
1903
2179
|
const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
|
|
1904
2180
|
const allDeps = {
|
|
1905
2181
|
...pkg.dependencies,
|
|
@@ -2074,8 +2350,8 @@ var DetailedAnalyzer = class {
|
|
|
2074
2350
|
|
|
2075
2351
|
// src/drift-detector.ts
|
|
2076
2352
|
import { createHash as createHash2 } from "crypto";
|
|
2077
|
-
import { existsSync as
|
|
2078
|
-
import { join as
|
|
2353
|
+
import { existsSync as existsSync7, mkdirSync as mkdirSync3, readFileSync as readFileSync7, writeFileSync as writeFileSync3 } from "fs";
|
|
2354
|
+
import { join as join7, relative } from "path";
|
|
2079
2355
|
var SNAPSHOT_DIR = ".schemashift";
|
|
2080
2356
|
var SNAPSHOT_FILE = "schema-snapshot.json";
|
|
2081
2357
|
var SNAPSHOT_VERSION = 1;
|
|
@@ -2083,8 +2359,8 @@ var DriftDetector = class {
|
|
|
2083
2359
|
snapshotDir;
|
|
2084
2360
|
snapshotPath;
|
|
2085
2361
|
constructor(projectPath) {
|
|
2086
|
-
this.snapshotDir =
|
|
2087
|
-
this.snapshotPath =
|
|
2362
|
+
this.snapshotDir = join7(projectPath, SNAPSHOT_DIR);
|
|
2363
|
+
this.snapshotPath = join7(this.snapshotDir, SNAPSHOT_FILE);
|
|
2088
2364
|
}
|
|
2089
2365
|
/**
|
|
2090
2366
|
* Take a snapshot of the current schema state
|
|
@@ -2092,8 +2368,8 @@ var DriftDetector = class {
|
|
|
2092
2368
|
snapshot(files, projectPath) {
|
|
2093
2369
|
const schemas = [];
|
|
2094
2370
|
for (const filePath of files) {
|
|
2095
|
-
if (!
|
|
2096
|
-
const content =
|
|
2371
|
+
if (!existsSync7(filePath)) continue;
|
|
2372
|
+
const content = readFileSync7(filePath, "utf-8");
|
|
2097
2373
|
const library = this.detectLibraryFromContent(content);
|
|
2098
2374
|
if (library === "unknown") continue;
|
|
2099
2375
|
const schemaNames = this.extractSchemaNames(content);
|
|
@@ -2117,20 +2393,20 @@ var DriftDetector = class {
|
|
|
2117
2393
|
* Save a snapshot to disk
|
|
2118
2394
|
*/
|
|
2119
2395
|
saveSnapshot(snapshot) {
|
|
2120
|
-
if (!
|
|
2121
|
-
|
|
2396
|
+
if (!existsSync7(this.snapshotDir)) {
|
|
2397
|
+
mkdirSync3(this.snapshotDir, { recursive: true });
|
|
2122
2398
|
}
|
|
2123
|
-
|
|
2399
|
+
writeFileSync3(this.snapshotPath, JSON.stringify(snapshot, null, 2));
|
|
2124
2400
|
}
|
|
2125
2401
|
/**
|
|
2126
2402
|
* Load saved snapshot from disk
|
|
2127
2403
|
*/
|
|
2128
2404
|
loadSnapshot() {
|
|
2129
|
-
if (!
|
|
2405
|
+
if (!existsSync7(this.snapshotPath)) {
|
|
2130
2406
|
return null;
|
|
2131
2407
|
}
|
|
2132
2408
|
try {
|
|
2133
|
-
const content =
|
|
2409
|
+
const content = readFileSync7(this.snapshotPath, "utf-8");
|
|
2134
2410
|
return JSON.parse(content);
|
|
2135
2411
|
} catch {
|
|
2136
2412
|
return null;
|
|
@@ -2592,6 +2868,250 @@ var GovernanceEngine = class {
|
|
|
2592
2868
|
}
|
|
2593
2869
|
};
|
|
2594
2870
|
|
|
2871
|
+
// src/governance-fixer.ts
|
|
2872
|
+
var GovernanceFixer = class {
|
|
2873
|
+
defaultMaxLength = 1e4;
|
|
2874
|
+
/**
|
|
2875
|
+
* Set the default max length appended by the require-max-length fix.
|
|
2876
|
+
*/
|
|
2877
|
+
setDefaultMaxLength(length) {
|
|
2878
|
+
this.defaultMaxLength = length;
|
|
2879
|
+
}
|
|
2880
|
+
/**
|
|
2881
|
+
* Check if a violation is auto-fixable.
|
|
2882
|
+
*/
|
|
2883
|
+
canFix(violation) {
|
|
2884
|
+
return [
|
|
2885
|
+
"no-any-schemas",
|
|
2886
|
+
"require-descriptions",
|
|
2887
|
+
"require-max-length",
|
|
2888
|
+
"naming-convention",
|
|
2889
|
+
"no-any",
|
|
2890
|
+
"require-description",
|
|
2891
|
+
"required-validations",
|
|
2892
|
+
"require-safeParse"
|
|
2893
|
+
].includes(violation.rule);
|
|
2894
|
+
}
|
|
2895
|
+
/**
|
|
2896
|
+
* Fix a single violation in a source file.
|
|
2897
|
+
* Returns the fixed code for the entire file.
|
|
2898
|
+
*/
|
|
2899
|
+
fix(violation, sourceCode) {
|
|
2900
|
+
switch (violation.rule) {
|
|
2901
|
+
case "no-any-schemas":
|
|
2902
|
+
case "no-any":
|
|
2903
|
+
return this.fixNoAny(violation, sourceCode);
|
|
2904
|
+
case "require-descriptions":
|
|
2905
|
+
case "require-description":
|
|
2906
|
+
return this.fixRequireDescription(violation, sourceCode);
|
|
2907
|
+
case "require-max-length":
|
|
2908
|
+
case "required-validations":
|
|
2909
|
+
return this.fixRequireMaxLength(violation, sourceCode);
|
|
2910
|
+
case "naming-convention":
|
|
2911
|
+
return this.fixNamingConvention(violation, sourceCode);
|
|
2912
|
+
case "require-safeParse":
|
|
2913
|
+
return this.fixRequireSafeParse(violation, sourceCode);
|
|
2914
|
+
default:
|
|
2915
|
+
return {
|
|
2916
|
+
success: false,
|
|
2917
|
+
explanation: `No auto-fix available for rule: ${violation.rule}`,
|
|
2918
|
+
rule: violation.rule,
|
|
2919
|
+
lineNumber: violation.lineNumber
|
|
2920
|
+
};
|
|
2921
|
+
}
|
|
2922
|
+
}
|
|
2923
|
+
/**
|
|
2924
|
+
* Fix all fixable violations in a source file.
|
|
2925
|
+
* Applies fixes from bottom to top to preserve line numbers.
|
|
2926
|
+
*/
|
|
2927
|
+
fixAll(violations, sourceCode) {
|
|
2928
|
+
const fixable = violations.filter((v) => this.canFix(v));
|
|
2929
|
+
const results = [];
|
|
2930
|
+
let currentCode = sourceCode;
|
|
2931
|
+
let fixed = 0;
|
|
2932
|
+
const sorted = [...fixable].sort((a, b) => b.lineNumber - a.lineNumber);
|
|
2933
|
+
for (const violation of sorted) {
|
|
2934
|
+
const result = this.fix(violation, currentCode);
|
|
2935
|
+
results.push(result);
|
|
2936
|
+
if (result.success && result.fixedCode) {
|
|
2937
|
+
currentCode = result.fixedCode;
|
|
2938
|
+
fixed++;
|
|
2939
|
+
}
|
|
2940
|
+
}
|
|
2941
|
+
return {
|
|
2942
|
+
totalViolations: violations.length,
|
|
2943
|
+
fixed,
|
|
2944
|
+
skipped: violations.length - fixed,
|
|
2945
|
+
results
|
|
2946
|
+
};
|
|
2947
|
+
}
|
|
2948
|
+
fixNoAny(violation, sourceCode) {
|
|
2949
|
+
const lines = sourceCode.split("\n");
|
|
2950
|
+
const lineIndex = violation.lineNumber - 1;
|
|
2951
|
+
const line = lines[lineIndex];
|
|
2952
|
+
if (!line) {
|
|
2953
|
+
return {
|
|
2954
|
+
success: false,
|
|
2955
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
2956
|
+
rule: violation.rule,
|
|
2957
|
+
lineNumber: violation.lineNumber
|
|
2958
|
+
};
|
|
2959
|
+
}
|
|
2960
|
+
let fixedLine = line;
|
|
2961
|
+
let explanation = "";
|
|
2962
|
+
if (/\bz\.any\(\)/.test(line)) {
|
|
2963
|
+
fixedLine = line.replace(/\bz\.any\(\)/, "z.unknown()");
|
|
2964
|
+
explanation = "Replaced z.any() with z.unknown() for type safety";
|
|
2965
|
+
} else if (/\byup\.mixed\(\)/.test(line)) {
|
|
2966
|
+
fixedLine = line.replace(/\byup\.mixed\(\)/, "yup.mixed().required()");
|
|
2967
|
+
explanation = "Added .required() constraint to yup.mixed()";
|
|
2968
|
+
} else if (/\bt\.any\b/.test(line)) {
|
|
2969
|
+
fixedLine = line.replace(/\bt\.any\b/, "t.unknown");
|
|
2970
|
+
explanation = "Replaced t.any with t.unknown for type safety";
|
|
2971
|
+
} else if (/\bv\.any\(\)/.test(line)) {
|
|
2972
|
+
fixedLine = line.replace(/\bv\.any\(\)/, "v.unknown()");
|
|
2973
|
+
explanation = "Replaced v.any() with v.unknown() for type safety";
|
|
2974
|
+
} else {
|
|
2975
|
+
return {
|
|
2976
|
+
success: false,
|
|
2977
|
+
explanation: "Could not identify any-type pattern to fix",
|
|
2978
|
+
rule: violation.rule,
|
|
2979
|
+
lineNumber: violation.lineNumber
|
|
2980
|
+
};
|
|
2981
|
+
}
|
|
2982
|
+
lines[lineIndex] = fixedLine;
|
|
2983
|
+
return {
|
|
2984
|
+
success: true,
|
|
2985
|
+
fixedCode: lines.join("\n"),
|
|
2986
|
+
explanation,
|
|
2987
|
+
rule: violation.rule,
|
|
2988
|
+
lineNumber: violation.lineNumber
|
|
2989
|
+
};
|
|
2990
|
+
}
|
|
2991
|
+
fixRequireDescription(violation, sourceCode) {
|
|
2992
|
+
const lines = sourceCode.split("\n");
|
|
2993
|
+
const lineIndex = violation.lineNumber - 1;
|
|
2994
|
+
const line = lines[lineIndex];
|
|
2995
|
+
if (!line) {
|
|
2996
|
+
return {
|
|
2997
|
+
success: false,
|
|
2998
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
2999
|
+
rule: violation.rule,
|
|
3000
|
+
lineNumber: violation.lineNumber
|
|
3001
|
+
};
|
|
3002
|
+
}
|
|
3003
|
+
let endLineIndex = lineIndex;
|
|
3004
|
+
for (let i = lineIndex; i < lines.length && i < lineIndex + 20; i++) {
|
|
3005
|
+
if (lines[i]?.includes(";")) {
|
|
3006
|
+
endLineIndex = i;
|
|
3007
|
+
break;
|
|
3008
|
+
}
|
|
3009
|
+
}
|
|
3010
|
+
const endLine = lines[endLineIndex] ?? "";
|
|
3011
|
+
const schemaName = violation.schemaName || "schema";
|
|
3012
|
+
const description = `${schemaName} schema`;
|
|
3013
|
+
const semicolonIndex = endLine.lastIndexOf(";");
|
|
3014
|
+
if (semicolonIndex >= 0) {
|
|
3015
|
+
lines[endLineIndex] = `${endLine.slice(0, semicolonIndex)}.describe('${description}')${endLine.slice(semicolonIndex)}`;
|
|
3016
|
+
} else {
|
|
3017
|
+
lines[endLineIndex] = `${endLine}.describe('${description}')`;
|
|
3018
|
+
}
|
|
3019
|
+
return {
|
|
3020
|
+
success: true,
|
|
3021
|
+
fixedCode: lines.join("\n"),
|
|
3022
|
+
explanation: `Added .describe('${description}') to ${schemaName}`,
|
|
3023
|
+
rule: violation.rule,
|
|
3024
|
+
lineNumber: violation.lineNumber
|
|
3025
|
+
};
|
|
3026
|
+
}
|
|
3027
|
+
fixRequireMaxLength(violation, sourceCode) {
|
|
3028
|
+
const lines = sourceCode.split("\n");
|
|
3029
|
+
const lineIndex = violation.lineNumber - 1;
|
|
3030
|
+
const line = lines[lineIndex];
|
|
3031
|
+
if (!line) {
|
|
3032
|
+
return {
|
|
3033
|
+
success: false,
|
|
3034
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
3035
|
+
rule: violation.rule,
|
|
3036
|
+
lineNumber: violation.lineNumber
|
|
3037
|
+
};
|
|
3038
|
+
}
|
|
3039
|
+
if (/z\.string\(\)/.test(line)) {
|
|
3040
|
+
lines[lineIndex] = line.replace(/z\.string\(\)/, `z.string().max(${this.defaultMaxLength})`);
|
|
3041
|
+
return {
|
|
3042
|
+
success: true,
|
|
3043
|
+
fixedCode: lines.join("\n"),
|
|
3044
|
+
explanation: `Added .max(${this.defaultMaxLength}) to string schema`,
|
|
3045
|
+
rule: violation.rule,
|
|
3046
|
+
lineNumber: violation.lineNumber
|
|
3047
|
+
};
|
|
3048
|
+
}
|
|
3049
|
+
return {
|
|
3050
|
+
success: false,
|
|
3051
|
+
explanation: "Could not find z.string() pattern to fix on this line",
|
|
3052
|
+
rule: violation.rule,
|
|
3053
|
+
lineNumber: violation.lineNumber
|
|
3054
|
+
};
|
|
3055
|
+
}
|
|
3056
|
+
fixNamingConvention(violation, sourceCode) {
|
|
3057
|
+
const schemaName = violation.schemaName;
|
|
3058
|
+
if (!schemaName) {
|
|
3059
|
+
return {
|
|
3060
|
+
success: false,
|
|
3061
|
+
explanation: "No schema name available for renaming",
|
|
3062
|
+
rule: violation.rule,
|
|
3063
|
+
lineNumber: violation.lineNumber
|
|
3064
|
+
};
|
|
3065
|
+
}
|
|
3066
|
+
const newName = schemaName.endsWith("Schema") ? schemaName : `${schemaName}Schema`;
|
|
3067
|
+
if (newName === schemaName) {
|
|
3068
|
+
return {
|
|
3069
|
+
success: false,
|
|
3070
|
+
explanation: "Schema already matches naming convention",
|
|
3071
|
+
rule: violation.rule,
|
|
3072
|
+
lineNumber: violation.lineNumber
|
|
3073
|
+
};
|
|
3074
|
+
}
|
|
3075
|
+
const fixedCode = sourceCode.replace(new RegExp(`\\b${schemaName}\\b`, "g"), newName);
|
|
3076
|
+
return {
|
|
3077
|
+
success: true,
|
|
3078
|
+
fixedCode,
|
|
3079
|
+
explanation: `Renamed "${schemaName}" to "${newName}"`,
|
|
3080
|
+
rule: violation.rule,
|
|
3081
|
+
lineNumber: violation.lineNumber
|
|
3082
|
+
};
|
|
3083
|
+
}
|
|
3084
|
+
fixRequireSafeParse(violation, sourceCode) {
|
|
3085
|
+
const lines = sourceCode.split("\n");
|
|
3086
|
+
const lineIndex = violation.lineNumber - 1;
|
|
3087
|
+
const line = lines[lineIndex];
|
|
3088
|
+
if (!line) {
|
|
3089
|
+
return {
|
|
3090
|
+
success: false,
|
|
3091
|
+
explanation: `Line ${violation.lineNumber} not found`,
|
|
3092
|
+
rule: violation.rule,
|
|
3093
|
+
lineNumber: violation.lineNumber
|
|
3094
|
+
};
|
|
3095
|
+
}
|
|
3096
|
+
if (line.includes(".parse(") && !line.includes(".safeParse(")) {
|
|
3097
|
+
lines[lineIndex] = line.replace(".parse(", ".safeParse(");
|
|
3098
|
+
return {
|
|
3099
|
+
success: true,
|
|
3100
|
+
fixedCode: lines.join("\n"),
|
|
3101
|
+
explanation: "Replaced .parse() with .safeParse() for safer error handling",
|
|
3102
|
+
rule: violation.rule,
|
|
3103
|
+
lineNumber: violation.lineNumber
|
|
3104
|
+
};
|
|
3105
|
+
}
|
|
3106
|
+
return {
|
|
3107
|
+
success: false,
|
|
3108
|
+
explanation: "Could not find .parse() pattern to fix",
|
|
3109
|
+
rule: violation.rule,
|
|
3110
|
+
lineNumber: violation.lineNumber
|
|
3111
|
+
};
|
|
3112
|
+
}
|
|
3113
|
+
};
|
|
3114
|
+
|
|
2595
3115
|
// src/governance-templates.ts
|
|
2596
3116
|
var GOVERNANCE_TEMPLATES = [
|
|
2597
3117
|
{
|
|
@@ -2840,17 +3360,184 @@ function getGovernanceTemplateNames() {
|
|
|
2840
3360
|
return GOVERNANCE_TEMPLATES.map((t) => t.name);
|
|
2841
3361
|
}
|
|
2842
3362
|
|
|
3363
|
+
// src/graph-exporter.ts
|
|
3364
|
+
var LIBRARY_COLORS = {
|
|
3365
|
+
zod: "#3068B7",
|
|
3366
|
+
yup: "#32CD32",
|
|
3367
|
+
joi: "#FF6347",
|
|
3368
|
+
"io-ts": "#9370DB",
|
|
3369
|
+
valibot: "#FF8C00",
|
|
3370
|
+
arktype: "#20B2AA",
|
|
3371
|
+
superstruct: "#DAA520",
|
|
3372
|
+
effect: "#6A5ACD"
|
|
3373
|
+
};
|
|
3374
|
+
var LIBRARY_MERMAID_STYLES = {
|
|
3375
|
+
zod: "fill:#3068B7,color:#fff",
|
|
3376
|
+
yup: "fill:#32CD32,color:#000",
|
|
3377
|
+
joi: "fill:#FF6347,color:#fff",
|
|
3378
|
+
"io-ts": "fill:#9370DB,color:#fff",
|
|
3379
|
+
valibot: "fill:#FF8C00,color:#000",
|
|
3380
|
+
arktype: "fill:#20B2AA,color:#fff",
|
|
3381
|
+
superstruct: "fill:#DAA520,color:#000",
|
|
3382
|
+
effect: "fill:#6A5ACD,color:#fff"
|
|
3383
|
+
};
|
|
3384
|
+
var GraphExporter = class {
|
|
3385
|
+
/**
|
|
3386
|
+
* Export dependency graph as DOT format for Graphviz.
|
|
3387
|
+
*/
|
|
3388
|
+
exportDot(graph, options = {}) {
|
|
3389
|
+
const lines = [];
|
|
3390
|
+
lines.push("digraph SchemaShiftDependencies {");
|
|
3391
|
+
lines.push(" rankdir=LR;");
|
|
3392
|
+
lines.push(' node [shape=box, style=filled, fontname="monospace"];');
|
|
3393
|
+
lines.push(' edge [color="#666666"];');
|
|
3394
|
+
lines.push("");
|
|
3395
|
+
const circularFiles = /* @__PURE__ */ new Set();
|
|
3396
|
+
if (options.highlightCircular && graph.circularWarnings.length > 0) {
|
|
3397
|
+
for (const warning of graph.circularWarnings) {
|
|
3398
|
+
const match = warning.match(/Circular dependency: (.+)/);
|
|
3399
|
+
if (match?.[1]) {
|
|
3400
|
+
for (const part of match[1].split(" -> ")) {
|
|
3401
|
+
for (const file of graph.sortedFiles) {
|
|
3402
|
+
if (file.endsWith(part.trim()) || this.shortenPath(file) === part.trim()) {
|
|
3403
|
+
circularFiles.add(file);
|
|
3404
|
+
}
|
|
3405
|
+
}
|
|
3406
|
+
}
|
|
3407
|
+
}
|
|
3408
|
+
}
|
|
3409
|
+
}
|
|
3410
|
+
for (const filePath of graph.sortedFiles) {
|
|
3411
|
+
const meta = options.nodeMetadata?.get(filePath);
|
|
3412
|
+
const library = meta?.library;
|
|
3413
|
+
if (options.filterLibrary && library !== options.filterLibrary) continue;
|
|
3414
|
+
const shortPath = this.shortenPath(filePath);
|
|
3415
|
+
const nodeId = this.toNodeId(filePath);
|
|
3416
|
+
const attrs = [];
|
|
3417
|
+
attrs.push(`label="${shortPath}"`);
|
|
3418
|
+
if (circularFiles.has(filePath)) {
|
|
3419
|
+
attrs.push('color="#FF0000"');
|
|
3420
|
+
attrs.push("penwidth=2");
|
|
3421
|
+
}
|
|
3422
|
+
if (options.colorByLibrary && library && LIBRARY_COLORS[library]) {
|
|
3423
|
+
attrs.push(`fillcolor="${LIBRARY_COLORS[library]}"`);
|
|
3424
|
+
attrs.push('fontcolor="white"');
|
|
3425
|
+
} else {
|
|
3426
|
+
attrs.push('fillcolor="#E8E8E8"');
|
|
3427
|
+
}
|
|
3428
|
+
if (meta?.schemaCount) {
|
|
3429
|
+
attrs.push(`tooltip="${meta.schemaCount} schema(s)"`);
|
|
3430
|
+
}
|
|
3431
|
+
lines.push(` ${nodeId} [${attrs.join(", ")}];`);
|
|
3432
|
+
}
|
|
3433
|
+
lines.push("");
|
|
3434
|
+
const filterSet = options.filterLibrary ? new Set(
|
|
3435
|
+
graph.sortedFiles.filter((f) => {
|
|
3436
|
+
const meta = options.nodeMetadata?.get(f);
|
|
3437
|
+
return meta?.library === options.filterLibrary;
|
|
3438
|
+
})
|
|
3439
|
+
) : void 0;
|
|
3440
|
+
for (const [file, deps] of graph.dependencies) {
|
|
3441
|
+
if (filterSet && !filterSet.has(file)) continue;
|
|
3442
|
+
const fromId = this.toNodeId(file);
|
|
3443
|
+
for (const dep of deps) {
|
|
3444
|
+
if (filterSet && !filterSet.has(dep)) continue;
|
|
3445
|
+
const toId = this.toNodeId(dep);
|
|
3446
|
+
const edgeAttrs = [];
|
|
3447
|
+
if (options.highlightCircular && circularFiles.has(file) && circularFiles.has(dep)) {
|
|
3448
|
+
edgeAttrs.push('color="#FF0000"');
|
|
3449
|
+
edgeAttrs.push("penwidth=2");
|
|
3450
|
+
}
|
|
3451
|
+
lines.push(
|
|
3452
|
+
` ${fromId} -> ${toId}${edgeAttrs.length > 0 ? ` [${edgeAttrs.join(", ")}]` : ""};`
|
|
3453
|
+
);
|
|
3454
|
+
}
|
|
3455
|
+
}
|
|
3456
|
+
lines.push("}");
|
|
3457
|
+
return lines.join("\n");
|
|
3458
|
+
}
|
|
3459
|
+
/**
|
|
3460
|
+
* Export dependency graph as Mermaid diagram syntax.
|
|
3461
|
+
*/
|
|
3462
|
+
exportMermaid(graph, options = {}) {
|
|
3463
|
+
const lines = [];
|
|
3464
|
+
lines.push("graph LR");
|
|
3465
|
+
const styledNodes = /* @__PURE__ */ new Map();
|
|
3466
|
+
for (const [file, deps] of graph.dependencies) {
|
|
3467
|
+
const meta = options.nodeMetadata?.get(file);
|
|
3468
|
+
if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
|
|
3469
|
+
const fromId = this.toMermaidId(file);
|
|
3470
|
+
const fromLabel = this.shortenPath(file);
|
|
3471
|
+
if (meta?.library) {
|
|
3472
|
+
styledNodes.set(fromId, meta.library);
|
|
3473
|
+
}
|
|
3474
|
+
if (deps.length === 0) {
|
|
3475
|
+
lines.push(` ${fromId}["${fromLabel}"]`);
|
|
3476
|
+
}
|
|
3477
|
+
for (const dep of deps) {
|
|
3478
|
+
const depMeta = options.nodeMetadata?.get(dep);
|
|
3479
|
+
if (options.filterLibrary && depMeta?.library !== options.filterLibrary) continue;
|
|
3480
|
+
const toId = this.toMermaidId(dep);
|
|
3481
|
+
const toLabel = this.shortenPath(dep);
|
|
3482
|
+
if (depMeta?.library) {
|
|
3483
|
+
styledNodes.set(toId, depMeta.library);
|
|
3484
|
+
}
|
|
3485
|
+
lines.push(` ${fromId}["${fromLabel}"] --> ${toId}["${toLabel}"]`);
|
|
3486
|
+
}
|
|
3487
|
+
}
|
|
3488
|
+
for (const file of graph.sortedFiles) {
|
|
3489
|
+
const meta = options.nodeMetadata?.get(file);
|
|
3490
|
+
if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
|
|
3491
|
+
const id = this.toMermaidId(file);
|
|
3492
|
+
if (!lines.some((l) => l.includes(id))) {
|
|
3493
|
+
lines.push(` ${id}["${this.shortenPath(file)}"]`);
|
|
3494
|
+
if (meta?.library) {
|
|
3495
|
+
styledNodes.set(id, meta.library);
|
|
3496
|
+
}
|
|
3497
|
+
}
|
|
3498
|
+
}
|
|
3499
|
+
if (options.colorByLibrary && styledNodes.size > 0) {
|
|
3500
|
+
lines.push("");
|
|
3501
|
+
const libraryGroups = /* @__PURE__ */ new Map();
|
|
3502
|
+
for (const [nodeId, library] of styledNodes) {
|
|
3503
|
+
const group = libraryGroups.get(library) ?? [];
|
|
3504
|
+
group.push(nodeId);
|
|
3505
|
+
libraryGroups.set(library, group);
|
|
3506
|
+
}
|
|
3507
|
+
for (const [library, nodeIds] of libraryGroups) {
|
|
3508
|
+
const style = LIBRARY_MERMAID_STYLES[library];
|
|
3509
|
+
if (style) {
|
|
3510
|
+
for (const nodeId of nodeIds) {
|
|
3511
|
+
lines.push(` style ${nodeId} ${style}`);
|
|
3512
|
+
}
|
|
3513
|
+
}
|
|
3514
|
+
}
|
|
3515
|
+
}
|
|
3516
|
+
return lines.join("\n");
|
|
3517
|
+
}
|
|
3518
|
+
shortenPath(filePath) {
|
|
3519
|
+
const parts = filePath.split("/");
|
|
3520
|
+
return parts.slice(-2).join("/");
|
|
3521
|
+
}
|
|
3522
|
+
toNodeId(filePath) {
|
|
3523
|
+
return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "").replace(/_+$/, "");
|
|
3524
|
+
}
|
|
3525
|
+
toMermaidId(filePath) {
|
|
3526
|
+
return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "n_").replace(/_+$/, "");
|
|
3527
|
+
}
|
|
3528
|
+
};
|
|
3529
|
+
|
|
2843
3530
|
// src/incremental.ts
|
|
2844
|
-
import { existsSync as
|
|
2845
|
-
import { join as
|
|
3531
|
+
import { existsSync as existsSync8, mkdirSync as mkdirSync4, readFileSync as readFileSync8, unlinkSync, writeFileSync as writeFileSync4 } from "fs";
|
|
3532
|
+
import { join as join8 } from "path";
|
|
2846
3533
|
var STATE_DIR = ".schemashift";
|
|
2847
3534
|
var STATE_FILE = "incremental.json";
|
|
2848
3535
|
var IncrementalTracker = class {
|
|
2849
3536
|
stateDir;
|
|
2850
3537
|
statePath;
|
|
2851
3538
|
constructor(projectPath) {
|
|
2852
|
-
this.stateDir =
|
|
2853
|
-
this.statePath =
|
|
3539
|
+
this.stateDir = join8(projectPath, STATE_DIR);
|
|
3540
|
+
this.statePath = join8(this.stateDir, STATE_FILE);
|
|
2854
3541
|
}
|
|
2855
3542
|
start(files, from, to) {
|
|
2856
3543
|
const state = {
|
|
@@ -2885,9 +3572,9 @@ var IncrementalTracker = class {
|
|
|
2885
3572
|
this.saveState(state);
|
|
2886
3573
|
}
|
|
2887
3574
|
getState() {
|
|
2888
|
-
if (!
|
|
3575
|
+
if (!existsSync8(this.statePath)) return null;
|
|
2889
3576
|
try {
|
|
2890
|
-
return JSON.parse(
|
|
3577
|
+
return JSON.parse(readFileSync8(this.statePath, "utf-8"));
|
|
2891
3578
|
} catch {
|
|
2892
3579
|
return null;
|
|
2893
3580
|
}
|
|
@@ -2914,21 +3601,299 @@ var IncrementalTracker = class {
|
|
|
2914
3601
|
};
|
|
2915
3602
|
}
|
|
2916
3603
|
clear() {
|
|
2917
|
-
if (
|
|
3604
|
+
if (existsSync8(this.statePath)) {
|
|
2918
3605
|
unlinkSync(this.statePath);
|
|
2919
3606
|
}
|
|
2920
3607
|
}
|
|
2921
3608
|
saveState(state) {
|
|
2922
|
-
if (!
|
|
2923
|
-
|
|
3609
|
+
if (!existsSync8(this.stateDir)) {
|
|
3610
|
+
mkdirSync4(this.stateDir, { recursive: true });
|
|
2924
3611
|
}
|
|
2925
|
-
|
|
3612
|
+
writeFileSync4(this.statePath, JSON.stringify(state, null, 2));
|
|
3613
|
+
}
|
|
3614
|
+
};
|
|
3615
|
+
|
|
3616
|
+
// src/migration-templates.ts
|
|
3617
|
+
var BUILT_IN_TEMPLATES = [
|
|
3618
|
+
{
|
|
3619
|
+
name: "react-hook-form-yup-to-zod",
|
|
3620
|
+
description: "Migrate React Hook Form project from Yup to Zod validation",
|
|
3621
|
+
category: "form-migration",
|
|
3622
|
+
migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
|
|
3623
|
+
preChecks: [
|
|
3624
|
+
{ description: "Ensure @hookform/resolvers is installed" },
|
|
3625
|
+
{ description: "Check for .when() conditional validations that need manual review" }
|
|
3626
|
+
],
|
|
3627
|
+
postSteps: [
|
|
3628
|
+
{
|
|
3629
|
+
description: "Update resolver imports: yupResolver \u2192 zodResolver",
|
|
3630
|
+
command: void 0
|
|
3631
|
+
},
|
|
3632
|
+
{
|
|
3633
|
+
description: "Run tests to verify form validation behavior",
|
|
3634
|
+
command: "npm test"
|
|
3635
|
+
},
|
|
3636
|
+
{
|
|
3637
|
+
description: "Remove Yup dependency if no longer used",
|
|
3638
|
+
command: "npm uninstall yup"
|
|
3639
|
+
}
|
|
3640
|
+
],
|
|
3641
|
+
packageChanges: [
|
|
3642
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3643
|
+
{ action: "upgrade", package: "@hookform/resolvers", version: "latest" }
|
|
3644
|
+
],
|
|
3645
|
+
recommendedFlags: ["--cross-file", "--scaffold-tests", "--verbose"],
|
|
3646
|
+
estimatedEffort: "moderate"
|
|
3647
|
+
},
|
|
3648
|
+
{
|
|
3649
|
+
name: "trpc-zod-v3-to-v4",
|
|
3650
|
+
description: "Upgrade tRPC project from Zod v3 to Zod v4",
|
|
3651
|
+
category: "framework-upgrade",
|
|
3652
|
+
migrationSteps: [
|
|
3653
|
+
{ from: "zod-v3", to: "v4", description: "Upgrade Zod v3 schemas to v4 syntax" }
|
|
3654
|
+
],
|
|
3655
|
+
preChecks: [
|
|
3656
|
+
{ description: "Check tRPC version \u2014 v11+ required for Zod v4 compatibility" },
|
|
3657
|
+
{ description: "Check zod-validation-error version \u2014 v5.0.0+ required" },
|
|
3658
|
+
{ description: "Run existing test suite to establish baseline", command: "npm test" }
|
|
3659
|
+
],
|
|
3660
|
+
postSteps: [
|
|
3661
|
+
{
|
|
3662
|
+
description: "Update tRPC to v11 if not already",
|
|
3663
|
+
command: "npm install @trpc/server@latest @trpc/client@latest"
|
|
3664
|
+
},
|
|
3665
|
+
{
|
|
3666
|
+
description: "Update zod-validation-error if used",
|
|
3667
|
+
command: "npm install zod-validation-error@^5.0.0"
|
|
3668
|
+
},
|
|
3669
|
+
{ description: "Review TODO(schemashift) comments for manual fixes" },
|
|
3670
|
+
{ description: "Run tests to verify tRPC router behavior", command: "npm test" }
|
|
3671
|
+
],
|
|
3672
|
+
packageChanges: [
|
|
3673
|
+
{ action: "upgrade", package: "zod", version: "^3.25.0" },
|
|
3674
|
+
{ action: "upgrade", package: "@trpc/server", version: "^11.0.0" }
|
|
3675
|
+
],
|
|
3676
|
+
recommendedFlags: ["--compat-check", "--scaffold-tests", "--verbose"],
|
|
3677
|
+
estimatedEffort: "high"
|
|
3678
|
+
},
|
|
3679
|
+
{
|
|
3680
|
+
name: "express-joi-to-zod",
|
|
3681
|
+
description: "Migrate Express.js API validators from Joi to Zod",
|
|
3682
|
+
category: "library-switch",
|
|
3683
|
+
migrationSteps: [{ from: "joi", to: "zod", description: "Convert Joi schemas to Zod schemas" }],
|
|
3684
|
+
preChecks: [
|
|
3685
|
+
{ description: "Identify middleware using Joi validation" },
|
|
3686
|
+
{ description: "Check for Joi.extend() custom validators that need manual migration" }
|
|
3687
|
+
],
|
|
3688
|
+
postSteps: [
|
|
3689
|
+
{ description: "Update Express middleware to use Zod schemas" },
|
|
3690
|
+
{ description: "Replace celebrate/express-validation with custom Zod middleware" },
|
|
3691
|
+
{ description: "Run API integration tests", command: "npm test" },
|
|
3692
|
+
{ description: "Remove Joi dependency", command: "npm uninstall joi" }
|
|
3693
|
+
],
|
|
3694
|
+
packageChanges: [
|
|
3695
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3696
|
+
{ action: "remove", package: "celebrate" }
|
|
3697
|
+
],
|
|
3698
|
+
recommendedFlags: ["--cross-file", "--verbose"],
|
|
3699
|
+
estimatedEffort: "moderate"
|
|
3700
|
+
},
|
|
3701
|
+
{
|
|
3702
|
+
name: "nextjs-form-migration",
|
|
3703
|
+
description: "Migrate Next.js form validation from Yup/Formik to Zod/React Hook Form",
|
|
3704
|
+
category: "form-migration",
|
|
3705
|
+
migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
|
|
3706
|
+
preChecks: [
|
|
3707
|
+
{ description: "Identify all Formik form components" },
|
|
3708
|
+
{ description: "Check for server-side validation using Yup" },
|
|
3709
|
+
{ description: "Run existing tests to establish baseline", command: "npm test" }
|
|
3710
|
+
],
|
|
3711
|
+
postSteps: [
|
|
3712
|
+
{ description: "Replace Formik with React Hook Form + zodResolver" },
|
|
3713
|
+
{ description: "Update server actions to use Zod for validation" },
|
|
3714
|
+
{
|
|
3715
|
+
description: "Install next-safe-action if using server actions",
|
|
3716
|
+
command: "npm install next-safe-action"
|
|
3717
|
+
},
|
|
3718
|
+
{ description: "Run full test suite", command: "npm test" }
|
|
3719
|
+
],
|
|
3720
|
+
packageChanges: [
|
|
3721
|
+
{ action: "install", package: "zod", version: "^3.24.0" },
|
|
3722
|
+
{ action: "install", package: "react-hook-form", version: "^7.0.0" },
|
|
3723
|
+
{ action: "install", package: "@hookform/resolvers", version: "latest" }
|
|
3724
|
+
],
|
|
3725
|
+
recommendedFlags: ["--cross-file", "--scaffold-tests"],
|
|
3726
|
+
estimatedEffort: "high"
|
|
3727
|
+
},
|
|
3728
|
+
{
|
|
3729
|
+
name: "monorepo-staged-migration",
|
|
3730
|
+
description: "Phased monorepo migration with incremental tracking",
|
|
3731
|
+
category: "monorepo",
|
|
3732
|
+
migrationSteps: [
|
|
3733
|
+
{ from: "yup", to: "zod", description: "Convert shared packages first, then applications" }
|
|
3734
|
+
],
|
|
3735
|
+
preChecks: [
|
|
3736
|
+
{ description: "Analyze monorepo workspace structure" },
|
|
3737
|
+
{ description: "Identify shared schema packages used by multiple apps" },
|
|
3738
|
+
{ description: "Ensure all packages build successfully", command: "npm run build" }
|
|
3739
|
+
],
|
|
3740
|
+
postSteps: [
|
|
3741
|
+
{ description: "Run incremental migration starting with leaf packages" },
|
|
3742
|
+
{ description: "Build all packages after each batch", command: "npm run build" },
|
|
3743
|
+
{ description: "Run full test suite", command: "npm test" },
|
|
3744
|
+
{ description: "Review cross-package type compatibility" }
|
|
3745
|
+
],
|
|
3746
|
+
packageChanges: [],
|
|
3747
|
+
recommendedFlags: ["--cross-file", "--incremental", "--compat-check", "--audit"],
|
|
3748
|
+
estimatedEffort: "high"
|
|
3749
|
+
}
|
|
3750
|
+
];
|
|
3751
|
+
function getMigrationTemplate(name) {
|
|
3752
|
+
return BUILT_IN_TEMPLATES.find((t) => t.name === name);
|
|
3753
|
+
}
|
|
3754
|
+
function getMigrationTemplateNames() {
|
|
3755
|
+
return BUILT_IN_TEMPLATES.map((t) => t.name);
|
|
3756
|
+
}
|
|
3757
|
+
function getMigrationTemplatesByCategory(category) {
|
|
3758
|
+
return BUILT_IN_TEMPLATES.filter((t) => t.category === category);
|
|
3759
|
+
}
|
|
3760
|
+
function getAllMigrationTemplates() {
|
|
3761
|
+
return [...BUILT_IN_TEMPLATES];
|
|
3762
|
+
}
|
|
3763
|
+
function validateMigrationTemplate(template) {
|
|
3764
|
+
const errors = [];
|
|
3765
|
+
if (!template.name || template.name.trim().length === 0) {
|
|
3766
|
+
errors.push("Template name is required");
|
|
3767
|
+
}
|
|
3768
|
+
if (!template.description || template.description.trim().length === 0) {
|
|
3769
|
+
errors.push("Template description is required");
|
|
3770
|
+
}
|
|
3771
|
+
if (!template.migrationSteps || template.migrationSteps.length === 0) {
|
|
3772
|
+
errors.push("At least one migration step is required");
|
|
3773
|
+
}
|
|
3774
|
+
for (const step of template.migrationSteps ?? []) {
|
|
3775
|
+
if (!step.from || !step.to) {
|
|
3776
|
+
errors.push(`Migration step must have from and to: ${JSON.stringify(step)}`);
|
|
3777
|
+
}
|
|
3778
|
+
}
|
|
3779
|
+
return { valid: errors.length === 0, errors };
|
|
3780
|
+
}
|
|
3781
|
+
|
|
3782
|
+
// src/notifications.ts
|
|
3783
|
+
async function computeSignature(payload, secret) {
|
|
3784
|
+
const { createHmac } = await import("crypto");
|
|
3785
|
+
return createHmac("sha256", secret).update(payload).digest("hex");
|
|
3786
|
+
}
|
|
3787
|
+
var WebhookNotifier = class {
|
|
3788
|
+
webhooks;
|
|
3789
|
+
constructor(webhooks) {
|
|
3790
|
+
this.webhooks = webhooks;
|
|
3791
|
+
}
|
|
3792
|
+
/**
|
|
3793
|
+
* Create a migration event with current timestamp.
|
|
3794
|
+
*/
|
|
3795
|
+
createEvent(type, details, project) {
|
|
3796
|
+
return {
|
|
3797
|
+
type,
|
|
3798
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3799
|
+
project,
|
|
3800
|
+
details
|
|
3801
|
+
};
|
|
3802
|
+
}
|
|
3803
|
+
/**
|
|
3804
|
+
* Send an event to all matching webhooks.
|
|
3805
|
+
*/
|
|
3806
|
+
async send(event) {
|
|
3807
|
+
const results = [];
|
|
3808
|
+
for (const webhook of this.webhooks) {
|
|
3809
|
+
if (webhook.events && !webhook.events.includes(event.type)) {
|
|
3810
|
+
continue;
|
|
3811
|
+
}
|
|
3812
|
+
const result = await this.sendToWebhook(webhook, event);
|
|
3813
|
+
results.push(result);
|
|
3814
|
+
}
|
|
3815
|
+
return results;
|
|
3816
|
+
}
|
|
3817
|
+
/**
|
|
3818
|
+
* Send event to a single webhook endpoint.
|
|
3819
|
+
*/
|
|
3820
|
+
async sendToWebhook(webhook, event) {
|
|
3821
|
+
const payload = JSON.stringify(event);
|
|
3822
|
+
const headers = {
|
|
3823
|
+
"Content-Type": "application/json",
|
|
3824
|
+
"User-Agent": "SchemaShift-Webhook/1.0",
|
|
3825
|
+
...webhook.headers
|
|
3826
|
+
};
|
|
3827
|
+
if (webhook.secret) {
|
|
3828
|
+
const signature = await computeSignature(payload, webhook.secret);
|
|
3829
|
+
headers["X-SchemaShift-Signature"] = `sha256=${signature}`;
|
|
3830
|
+
}
|
|
3831
|
+
try {
|
|
3832
|
+
const response = await fetch(webhook.url, {
|
|
3833
|
+
method: "POST",
|
|
3834
|
+
headers,
|
|
3835
|
+
body: payload
|
|
3836
|
+
});
|
|
3837
|
+
return {
|
|
3838
|
+
success: response.ok,
|
|
3839
|
+
statusCode: response.status,
|
|
3840
|
+
error: response.ok ? void 0 : `HTTP ${response.status}: ${response.statusText}`
|
|
3841
|
+
};
|
|
3842
|
+
} catch (err) {
|
|
3843
|
+
return {
|
|
3844
|
+
success: false,
|
|
3845
|
+
error: err instanceof Error ? err.message : String(err)
|
|
3846
|
+
};
|
|
3847
|
+
}
|
|
3848
|
+
}
|
|
3849
|
+
/**
|
|
3850
|
+
* Convenience: send a migration_started event.
|
|
3851
|
+
*/
|
|
3852
|
+
async notifyMigrationStarted(from, to, fileCount, project) {
|
|
3853
|
+
const event = this.createEvent("migration_started", { from, to, fileCount }, project);
|
|
3854
|
+
return this.send(event);
|
|
3855
|
+
}
|
|
3856
|
+
/**
|
|
3857
|
+
* Convenience: send a migration_completed event.
|
|
3858
|
+
*/
|
|
3859
|
+
async notifyMigrationCompleted(from, to, fileCount, warningCount, project) {
|
|
3860
|
+
const event = this.createEvent(
|
|
3861
|
+
"migration_completed",
|
|
3862
|
+
{ from, to, fileCount, warningCount },
|
|
3863
|
+
project
|
|
3864
|
+
);
|
|
3865
|
+
return this.send(event);
|
|
3866
|
+
}
|
|
3867
|
+
/**
|
|
3868
|
+
* Convenience: send a migration_failed event.
|
|
3869
|
+
*/
|
|
3870
|
+
async notifyMigrationFailed(from, to, error, project) {
|
|
3871
|
+
const event = this.createEvent("migration_failed", { from, to, error }, project);
|
|
3872
|
+
return this.send(event);
|
|
3873
|
+
}
|
|
3874
|
+
/**
|
|
3875
|
+
* Convenience: send a governance_violation event.
|
|
3876
|
+
*/
|
|
3877
|
+
async notifyGovernanceViolation(violationCount, rules, project) {
|
|
3878
|
+
const event = this.createEvent("governance_violation", { violationCount, rules }, project);
|
|
3879
|
+
return this.send(event);
|
|
3880
|
+
}
|
|
3881
|
+
/**
|
|
3882
|
+
* Convenience: send a drift_detected event.
|
|
3883
|
+
*/
|
|
3884
|
+
async notifyDriftDetected(modifiedFiles, addedFiles, removedFiles, project) {
|
|
3885
|
+
const event = this.createEvent(
|
|
3886
|
+
"drift_detected",
|
|
3887
|
+
{ modifiedFiles, addedFiles, removedFiles },
|
|
3888
|
+
project
|
|
3889
|
+
);
|
|
3890
|
+
return this.send(event);
|
|
2926
3891
|
}
|
|
2927
3892
|
};
|
|
2928
3893
|
|
|
2929
3894
|
// src/package-updater.ts
|
|
2930
|
-
import { existsSync as
|
|
2931
|
-
import { join as
|
|
3895
|
+
import { existsSync as existsSync9, readFileSync as readFileSync9, writeFileSync as writeFileSync5 } from "fs";
|
|
3896
|
+
import { join as join9 } from "path";
|
|
2932
3897
|
var TARGET_VERSIONS = {
|
|
2933
3898
|
"yup->zod": { zod: "^3.24.0" },
|
|
2934
3899
|
"joi->zod": { zod: "^3.24.0" },
|
|
@@ -2949,14 +3914,14 @@ var PackageUpdater = class {
|
|
|
2949
3914
|
const add = {};
|
|
2950
3915
|
const remove = [];
|
|
2951
3916
|
const warnings = [];
|
|
2952
|
-
const pkgPath =
|
|
2953
|
-
if (!
|
|
3917
|
+
const pkgPath = join9(projectPath, "package.json");
|
|
3918
|
+
if (!existsSync9(pkgPath)) {
|
|
2954
3919
|
warnings.push("No package.json found. Cannot plan dependency updates.");
|
|
2955
3920
|
return { add, remove, warnings };
|
|
2956
3921
|
}
|
|
2957
3922
|
let pkg;
|
|
2958
3923
|
try {
|
|
2959
|
-
pkg = JSON.parse(
|
|
3924
|
+
pkg = JSON.parse(readFileSync9(pkgPath, "utf-8"));
|
|
2960
3925
|
} catch {
|
|
2961
3926
|
warnings.push("Could not parse package.json.");
|
|
2962
3927
|
return { add, remove, warnings };
|
|
@@ -2986,9 +3951,9 @@ var PackageUpdater = class {
|
|
|
2986
3951
|
return { add, remove, warnings };
|
|
2987
3952
|
}
|
|
2988
3953
|
apply(projectPath, plan) {
|
|
2989
|
-
const pkgPath =
|
|
2990
|
-
if (!
|
|
2991
|
-
const pkgText =
|
|
3954
|
+
const pkgPath = join9(projectPath, "package.json");
|
|
3955
|
+
if (!existsSync9(pkgPath)) return;
|
|
3956
|
+
const pkgText = readFileSync9(pkgPath, "utf-8");
|
|
2992
3957
|
const pkg = JSON.parse(pkgText);
|
|
2993
3958
|
if (!pkg.dependencies) pkg.dependencies = {};
|
|
2994
3959
|
for (const [name, version] of Object.entries(plan.add)) {
|
|
@@ -2998,7 +3963,7 @@ var PackageUpdater = class {
|
|
|
2998
3963
|
pkg.dependencies[name] = version;
|
|
2999
3964
|
}
|
|
3000
3965
|
}
|
|
3001
|
-
|
|
3966
|
+
writeFileSync5(pkgPath, `${JSON.stringify(pkg, null, 2)}
|
|
3002
3967
|
`);
|
|
3003
3968
|
}
|
|
3004
3969
|
};
|
|
@@ -3170,8 +4135,8 @@ var PluginLoader = class {
|
|
|
3170
4135
|
};
|
|
3171
4136
|
|
|
3172
4137
|
// src/standard-schema.ts
|
|
3173
|
-
import { existsSync as
|
|
3174
|
-
import { join as
|
|
4138
|
+
import { existsSync as existsSync10, readFileSync as readFileSync10 } from "fs";
|
|
4139
|
+
import { join as join10 } from "path";
|
|
3175
4140
|
var STANDARD_SCHEMA_LIBRARIES = {
|
|
3176
4141
|
zod: { minMajor: 3, minMinor: 23 },
|
|
3177
4142
|
// Zod v3.23+ and v4+
|
|
@@ -3200,13 +4165,13 @@ function isVersionCompatible(version, minMajor, minMinor) {
|
|
|
3200
4165
|
return false;
|
|
3201
4166
|
}
|
|
3202
4167
|
function detectStandardSchema(projectPath) {
|
|
3203
|
-
const pkgPath =
|
|
3204
|
-
if (!
|
|
4168
|
+
const pkgPath = join10(projectPath, "package.json");
|
|
4169
|
+
if (!existsSync10(pkgPath)) {
|
|
3205
4170
|
return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
|
|
3206
4171
|
}
|
|
3207
4172
|
let allDeps = {};
|
|
3208
4173
|
try {
|
|
3209
|
-
const pkg = JSON.parse(
|
|
4174
|
+
const pkg = JSON.parse(readFileSync10(pkgPath, "utf-8"));
|
|
3210
4175
|
allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
3211
4176
|
} catch {
|
|
3212
4177
|
return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
|
|
@@ -3245,6 +4210,105 @@ function detectStandardSchema(projectPath) {
|
|
|
3245
4210
|
return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
|
|
3246
4211
|
}
|
|
3247
4212
|
|
|
4213
|
+
// src/standard-schema-advisor.ts
|
|
4214
|
+
var STANDARD_SCHEMA_LIBS = /* @__PURE__ */ new Set(["zod", "valibot", "arktype"]);
|
|
4215
|
+
var StandardSchemaAdvisor = class {
|
|
4216
|
+
/**
|
|
4217
|
+
* Check if a schema library supports Standard Schema.
|
|
4218
|
+
*/
|
|
4219
|
+
supportsStandardSchema(library) {
|
|
4220
|
+
return STANDARD_SCHEMA_LIBS.has(library);
|
|
4221
|
+
}
|
|
4222
|
+
/**
|
|
4223
|
+
* Generate advisory for a given migration path.
|
|
4224
|
+
*/
|
|
4225
|
+
advise(from, to) {
|
|
4226
|
+
const fromSupports = this.supportsStandardSchema(from);
|
|
4227
|
+
const toSupports = this.supportsStandardSchema(to);
|
|
4228
|
+
if (!fromSupports && !toSupports) {
|
|
4229
|
+
return {
|
|
4230
|
+
shouldConsiderAdapter: false,
|
|
4231
|
+
reason: `Neither ${from} nor ${to} supports Standard Schema. Full migration is recommended.`,
|
|
4232
|
+
migrationAdvantages: [
|
|
4233
|
+
"Complete type safety with target library",
|
|
4234
|
+
"Access to target library ecosystem",
|
|
4235
|
+
"No runtime adapter overhead"
|
|
4236
|
+
],
|
|
4237
|
+
adapterAdvantages: [],
|
|
4238
|
+
recommendation: "migrate"
|
|
4239
|
+
};
|
|
4240
|
+
}
|
|
4241
|
+
if (fromSupports && toSupports) {
|
|
4242
|
+
return {
|
|
4243
|
+
shouldConsiderAdapter: true,
|
|
4244
|
+
reason: `Both ${from} and ${to} support Standard Schema 1.0. You may be able to use adapters for ecosystem tools (tRPC, TanStack Form, etc.) instead of migrating all schemas.`,
|
|
4245
|
+
adapterExample: this.generateAdapterExample(from, to),
|
|
4246
|
+
migrationAdvantages: [
|
|
4247
|
+
"Full target library API and ergonomics",
|
|
4248
|
+
"Consistent codebase (single library)",
|
|
4249
|
+
"Better IDE support for one library",
|
|
4250
|
+
"Smaller bundle (avoid loading two libraries)"
|
|
4251
|
+
],
|
|
4252
|
+
adapterAdvantages: [
|
|
4253
|
+
"No code changes needed for existing schemas",
|
|
4254
|
+
"Gradual migration possible",
|
|
4255
|
+
"Ecosystem tools work with both libraries via Standard Schema",
|
|
4256
|
+
"Lower risk \u2014 existing validation behavior preserved"
|
|
4257
|
+
],
|
|
4258
|
+
recommendation: "either"
|
|
4259
|
+
};
|
|
4260
|
+
}
|
|
4261
|
+
if (toSupports && !fromSupports) {
|
|
4262
|
+
return {
|
|
4263
|
+
shouldConsiderAdapter: false,
|
|
4264
|
+
reason: `${from} does not support Standard Schema, but ${to} does. Migrating to ${to} gives you Standard Schema interoperability.`,
|
|
4265
|
+
migrationAdvantages: [
|
|
4266
|
+
"Standard Schema interoperability with ecosystem tools",
|
|
4267
|
+
"Future-proof validation layer",
|
|
4268
|
+
`Access to ${to} API and type inference`
|
|
4269
|
+
],
|
|
4270
|
+
adapterAdvantages: [],
|
|
4271
|
+
recommendation: "migrate"
|
|
4272
|
+
};
|
|
4273
|
+
}
|
|
4274
|
+
return {
|
|
4275
|
+
shouldConsiderAdapter: false,
|
|
4276
|
+
reason: `${from} supports Standard Schema but ${to} does not. Consider if you need the specific features of ${to} that justify losing Standard Schema interoperability.`,
|
|
4277
|
+
migrationAdvantages: [`Access to ${to}-specific features`],
|
|
4278
|
+
adapterAdvantages: [`Keeping ${from} preserves Standard Schema interoperability`],
|
|
4279
|
+
recommendation: "migrate"
|
|
4280
|
+
};
|
|
4281
|
+
}
|
|
4282
|
+
/**
|
|
4283
|
+
* Analyze a project and provide advisory based on detected libraries.
|
|
4284
|
+
*/
|
|
4285
|
+
adviseFromProject(projectPath, from, to) {
|
|
4286
|
+
const projectInfo = detectStandardSchema(projectPath);
|
|
4287
|
+
const advisory = this.advise(from, to);
|
|
4288
|
+
return { ...advisory, projectInfo };
|
|
4289
|
+
}
|
|
4290
|
+
generateAdapterExample(from, to) {
|
|
4291
|
+
return [
|
|
4292
|
+
`// Instead of migrating all ${from} schemas to ${to},`,
|
|
4293
|
+
`// you can use Standard Schema adapters for ecosystem tools:`,
|
|
4294
|
+
`//`,
|
|
4295
|
+
`// Example with tRPC (v11+):`,
|
|
4296
|
+
`// tRPC accepts any Standard Schema-compatible schema.`,
|
|
4297
|
+
`// Both ${from} and ${to} schemas work without conversion:`,
|
|
4298
|
+
`//`,
|
|
4299
|
+
`// import { ${from}Schema } from './existing-${from}-schemas';`,
|
|
4300
|
+
`// import { ${to}Schema } from './new-${to}-schemas';`,
|
|
4301
|
+
`//`,
|
|
4302
|
+
`// const router = t.router({`,
|
|
4303
|
+
`// // Works with ${from} schema (Standard Schema compatible)`,
|
|
4304
|
+
`// getUser: t.procedure.input(${from}Schema).query(...)`,
|
|
4305
|
+
`// // Also works with ${to} schema`,
|
|
4306
|
+
`// createUser: t.procedure.input(${to}Schema).mutation(...)`,
|
|
4307
|
+
`// });`
|
|
4308
|
+
].join("\n");
|
|
4309
|
+
}
|
|
4310
|
+
};
|
|
4311
|
+
|
|
3248
4312
|
// src/test-scaffolder.ts
|
|
3249
4313
|
var TestScaffolder = class {
|
|
3250
4314
|
scaffold(sourceFiles, from, to) {
|
|
@@ -3538,6 +4602,7 @@ var TypeDedupDetector = class {
|
|
|
3538
4602
|
}
|
|
3539
4603
|
};
|
|
3540
4604
|
export {
|
|
4605
|
+
ApprovalManager,
|
|
3541
4606
|
BehavioralWarningAnalyzer,
|
|
3542
4607
|
BundleEstimator,
|
|
3543
4608
|
CompatibilityAnalyzer,
|
|
@@ -3548,6 +4613,8 @@ export {
|
|
|
3548
4613
|
FormResolverMigrator,
|
|
3549
4614
|
GOVERNANCE_TEMPLATES,
|
|
3550
4615
|
GovernanceEngine,
|
|
4616
|
+
GovernanceFixer,
|
|
4617
|
+
GraphExporter,
|
|
3551
4618
|
IncrementalTracker,
|
|
3552
4619
|
MigrationAuditLog,
|
|
3553
4620
|
MigrationChain,
|
|
@@ -3557,24 +4624,37 @@ export {
|
|
|
3557
4624
|
PluginLoader,
|
|
3558
4625
|
SchemaAnalyzer,
|
|
3559
4626
|
SchemaDependencyResolver,
|
|
4627
|
+
StandardSchemaAdvisor,
|
|
3560
4628
|
TestScaffolder,
|
|
3561
4629
|
TransformEngine,
|
|
3562
4630
|
TypeDedupDetector,
|
|
4631
|
+
WebhookNotifier,
|
|
3563
4632
|
buildCallChain,
|
|
3564
4633
|
computeParallelBatches,
|
|
4634
|
+
conditionalValidation,
|
|
4635
|
+
dependentFields,
|
|
3565
4636
|
detectFormLibraries,
|
|
3566
4637
|
detectSchemaLibrary,
|
|
3567
4638
|
detectStandardSchema,
|
|
4639
|
+
getAllMigrationTemplates,
|
|
3568
4640
|
getGovernanceTemplate,
|
|
3569
4641
|
getGovernanceTemplateNames,
|
|
3570
4642
|
getGovernanceTemplatesByCategory,
|
|
4643
|
+
getMigrationTemplate,
|
|
4644
|
+
getMigrationTemplateNames,
|
|
4645
|
+
getMigrationTemplatesByCategory,
|
|
3571
4646
|
isInsideComment,
|
|
3572
4647
|
isInsideStringLiteral,
|
|
3573
4648
|
loadConfig,
|
|
4649
|
+
mutuallyExclusive,
|
|
3574
4650
|
parseCallChain,
|
|
4651
|
+
requireIf,
|
|
4652
|
+
requireOneOf,
|
|
3575
4653
|
shouldSuppressWarning,
|
|
3576
4654
|
startsWithBase,
|
|
4655
|
+
suggestCrossFieldPattern,
|
|
3577
4656
|
transformMethodChain,
|
|
3578
|
-
validateConfig
|
|
4657
|
+
validateConfig,
|
|
4658
|
+
validateMigrationTemplate
|
|
3579
4659
|
};
|
|
3580
4660
|
//# sourceMappingURL=index.js.map
|