@prisma-next/sql-contract 0.3.0-dev.4 → 0.3.0-dev.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -10
- package/dist/factories.d.mts +49 -0
- package/dist/factories.d.mts.map +1 -0
- package/dist/factories.mjs +82 -0
- package/dist/factories.mjs.map +1 -0
- package/dist/pack-types.d.mts +13 -0
- package/dist/pack-types.d.mts.map +1 -0
- package/dist/pack-types.mjs +1 -0
- package/dist/types-DTFobApb.d.mts +137 -0
- package/dist/types-DTFobApb.d.mts.map +1 -0
- package/dist/types-kacOgEya.mjs +17 -0
- package/dist/types-kacOgEya.mjs.map +1 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +3 -0
- package/dist/validate.d.mts +11 -0
- package/dist/validate.d.mts.map +1 -0
- package/dist/validate.mjs +242 -0
- package/dist/validate.mjs.map +1 -0
- package/dist/validators-DG6QQnb9.mjs +162 -0
- package/dist/validators-DG6QQnb9.mjs.map +1 -0
- package/dist/{exports/validators.d.ts → validators.d.mts} +17 -4
- package/dist/validators.d.mts.map +1 -0
- package/dist/validators.mjs +3 -0
- package/package.json +27 -29
- package/src/exports/factories.ts +11 -0
- package/src/exports/pack-types.ts +1 -0
- package/src/exports/types.ts +20 -0
- package/src/exports/validate.ts +6 -0
- package/src/exports/validators.ts +1 -0
- package/src/factories.ts +162 -0
- package/src/index.ts +4 -0
- package/src/pack-types.ts +9 -0
- package/src/types.ts +163 -0
- package/src/validate.ts +443 -0
- package/src/validators.ts +227 -0
- package/dist/exports/factories.d.ts +0 -41
- package/dist/exports/factories.js +0 -83
- package/dist/exports/factories.js.map +0 -1
- package/dist/exports/pack-types.d.ts +0 -11
- package/dist/exports/pack-types.js +0 -1
- package/dist/exports/pack-types.js.map +0 -1
- package/dist/exports/types.d.ts +0 -70
- package/dist/exports/types.js +0 -1
- package/dist/exports/types.js.map +0 -1
- package/dist/exports/validators.js +0 -96
- package/dist/exports/validators.js.map +0 -1
package/src/validate.ts
ADDED
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
import type { ColumnDefaultLiteralInputValue } from '@prisma-next/contract/types';
|
|
2
|
+
import { isTaggedBigInt, isTaggedRaw } from '@prisma-next/contract/types';
|
|
3
|
+
import type {
|
|
4
|
+
ModelDefinition,
|
|
5
|
+
SqlContract,
|
|
6
|
+
SqlMappings,
|
|
7
|
+
SqlStorage,
|
|
8
|
+
StorageColumn,
|
|
9
|
+
StorageTable,
|
|
10
|
+
} from './types';
|
|
11
|
+
import { applyFkDefaults } from './types';
|
|
12
|
+
import { validateSqlContract } from './validators';
|
|
13
|
+
|
|
14
|
+
type ResolvedMappings = {
|
|
15
|
+
modelToTable: Record<string, string>;
|
|
16
|
+
tableToModel: Record<string, string>;
|
|
17
|
+
fieldToColumn: Record<string, Record<string, string>>;
|
|
18
|
+
columnToField: Record<string, Record<string, string>>;
|
|
19
|
+
codecTypes: Record<string, { readonly output: unknown }>;
|
|
20
|
+
operationTypes: Record<string, Record<string, unknown>>;
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
function computeDefaultMappings(models: Record<string, ModelDefinition>): ResolvedMappings {
|
|
24
|
+
const modelToTable: Record<string, string> = {};
|
|
25
|
+
const tableToModel: Record<string, string> = {};
|
|
26
|
+
const fieldToColumn: Record<string, Record<string, string>> = {};
|
|
27
|
+
const columnToField: Record<string, Record<string, string>> = {};
|
|
28
|
+
|
|
29
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
30
|
+
const tableName = model.storage.table;
|
|
31
|
+
modelToTable[modelName] = tableName;
|
|
32
|
+
tableToModel[tableName] = modelName;
|
|
33
|
+
|
|
34
|
+
const modelFieldToColumn: Record<string, string> = {};
|
|
35
|
+
for (const [fieldName, field] of Object.entries(model.fields)) {
|
|
36
|
+
const columnName = field.column;
|
|
37
|
+
modelFieldToColumn[fieldName] = columnName;
|
|
38
|
+
if (!columnToField[tableName]) {
|
|
39
|
+
columnToField[tableName] = {};
|
|
40
|
+
}
|
|
41
|
+
columnToField[tableName][columnName] = fieldName;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
fieldToColumn[modelName] = modelFieldToColumn;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return {
|
|
48
|
+
modelToTable,
|
|
49
|
+
tableToModel,
|
|
50
|
+
fieldToColumn,
|
|
51
|
+
columnToField,
|
|
52
|
+
codecTypes: {},
|
|
53
|
+
operationTypes: {},
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function assertInverseModelMappings(
|
|
58
|
+
modelToTable: Record<string, string>,
|
|
59
|
+
tableToModel: Record<string, string>,
|
|
60
|
+
) {
|
|
61
|
+
for (const [model, table] of Object.entries(modelToTable)) {
|
|
62
|
+
if (tableToModel[table] !== model) {
|
|
63
|
+
throw new Error(
|
|
64
|
+
`Mappings override mismatch: modelToTable.${model}="${table}" is not mirrored in tableToModel`,
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
for (const [table, model] of Object.entries(tableToModel)) {
|
|
69
|
+
if (modelToTable[model] !== table) {
|
|
70
|
+
throw new Error(
|
|
71
|
+
`Mappings override mismatch: tableToModel.${table}="${model}" is not mirrored in modelToTable`,
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function assertInverseFieldMappings(
|
|
78
|
+
fieldToColumn: Record<string, Record<string, string>>,
|
|
79
|
+
columnToField: Record<string, Record<string, string>>,
|
|
80
|
+
modelToTable: Record<string, string>,
|
|
81
|
+
tableToModel: Record<string, string>,
|
|
82
|
+
) {
|
|
83
|
+
for (const [model, fields] of Object.entries(fieldToColumn)) {
|
|
84
|
+
const table = modelToTable[model];
|
|
85
|
+
if (!table) {
|
|
86
|
+
throw new Error(
|
|
87
|
+
`Mappings override mismatch: fieldToColumn references unknown model "${model}"`,
|
|
88
|
+
);
|
|
89
|
+
}
|
|
90
|
+
const reverseFields = columnToField[table];
|
|
91
|
+
if (!reverseFields) {
|
|
92
|
+
throw new Error(
|
|
93
|
+
`Mappings override mismatch: columnToField is missing table "${table}" for model "${model}"`,
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
for (const [field, column] of Object.entries(fields)) {
|
|
97
|
+
if (reverseFields[column] !== field) {
|
|
98
|
+
throw new Error(
|
|
99
|
+
`Mappings override mismatch: fieldToColumn.${model}.${field}="${column}" is not mirrored in columnToField.${table}`,
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
for (const [table, columns] of Object.entries(columnToField)) {
|
|
106
|
+
const model = tableToModel[table];
|
|
107
|
+
if (!model) {
|
|
108
|
+
throw new Error(
|
|
109
|
+
`Mappings override mismatch: columnToField references unknown table "${table}"`,
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
const forwardFields = fieldToColumn[model];
|
|
113
|
+
if (!forwardFields) {
|
|
114
|
+
throw new Error(
|
|
115
|
+
`Mappings override mismatch: fieldToColumn is missing model "${model}" for table "${table}"`,
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
for (const [column, field] of Object.entries(columns)) {
|
|
119
|
+
if (forwardFields[field] !== column) {
|
|
120
|
+
throw new Error(
|
|
121
|
+
`Mappings override mismatch: columnToField.${table}.${column}="${field}" is not mirrored in fieldToColumn.${model}`,
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function mergeMappings(
|
|
129
|
+
defaults: ResolvedMappings,
|
|
130
|
+
existingMappings?: Partial<SqlMappings>,
|
|
131
|
+
): ResolvedMappings {
|
|
132
|
+
const hasModelToTable = existingMappings?.modelToTable !== undefined;
|
|
133
|
+
const hasTableToModel = existingMappings?.tableToModel !== undefined;
|
|
134
|
+
if (hasModelToTable !== hasTableToModel) {
|
|
135
|
+
throw new Error(
|
|
136
|
+
'Mappings override mismatch: modelToTable and tableToModel must be provided together',
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const hasFieldToColumn = existingMappings?.fieldToColumn !== undefined;
|
|
141
|
+
const hasColumnToField = existingMappings?.columnToField !== undefined;
|
|
142
|
+
if (hasFieldToColumn !== hasColumnToField) {
|
|
143
|
+
throw new Error(
|
|
144
|
+
'Mappings override mismatch: fieldToColumn and columnToField must be provided together',
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const modelToTable: Record<string, string> = hasModelToTable
|
|
149
|
+
? (existingMappings?.modelToTable ?? {})
|
|
150
|
+
: defaults.modelToTable;
|
|
151
|
+
const tableToModel: Record<string, string> = hasTableToModel
|
|
152
|
+
? (existingMappings?.tableToModel ?? {})
|
|
153
|
+
: defaults.tableToModel;
|
|
154
|
+
assertInverseModelMappings(modelToTable, tableToModel);
|
|
155
|
+
|
|
156
|
+
const fieldToColumn: Record<string, Record<string, string>> = hasFieldToColumn
|
|
157
|
+
? (existingMappings?.fieldToColumn ?? {})
|
|
158
|
+
: defaults.fieldToColumn;
|
|
159
|
+
const columnToField: Record<string, Record<string, string>> = hasColumnToField
|
|
160
|
+
? (existingMappings?.columnToField ?? {})
|
|
161
|
+
: defaults.columnToField;
|
|
162
|
+
assertInverseFieldMappings(fieldToColumn, columnToField, modelToTable, tableToModel);
|
|
163
|
+
|
|
164
|
+
return {
|
|
165
|
+
modelToTable,
|
|
166
|
+
tableToModel,
|
|
167
|
+
fieldToColumn,
|
|
168
|
+
columnToField,
|
|
169
|
+
codecTypes: { ...defaults.codecTypes, ...(existingMappings?.codecTypes ?? {}) },
|
|
170
|
+
operationTypes: { ...defaults.operationTypes, ...(existingMappings?.operationTypes ?? {}) },
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function validateContractLogic(contract: SqlContract<SqlStorage>): void {
|
|
175
|
+
const tableNames = new Set(Object.keys(contract.storage.tables));
|
|
176
|
+
|
|
177
|
+
for (const [tableName, table] of Object.entries(contract.storage.tables)) {
|
|
178
|
+
const columnNames = new Set(Object.keys(table.columns));
|
|
179
|
+
|
|
180
|
+
if (table.primaryKey) {
|
|
181
|
+
for (const colName of table.primaryKey.columns) {
|
|
182
|
+
if (!columnNames.has(colName)) {
|
|
183
|
+
throw new Error(
|
|
184
|
+
`Table "${tableName}" primaryKey references non-existent column "${colName}"`,
|
|
185
|
+
);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
for (const unique of table.uniques) {
|
|
191
|
+
for (const colName of unique.columns) {
|
|
192
|
+
if (!columnNames.has(colName)) {
|
|
193
|
+
throw new Error(
|
|
194
|
+
`Table "${tableName}" unique constraint references non-existent column "${colName}"`,
|
|
195
|
+
);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
for (const index of table.indexes) {
|
|
201
|
+
for (const colName of index.columns) {
|
|
202
|
+
if (!columnNames.has(colName)) {
|
|
203
|
+
throw new Error(`Table "${tableName}" index references non-existent column "${colName}"`);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
for (const [colName, column] of Object.entries(table.columns)) {
|
|
209
|
+
if (!column.nullable && column.default?.kind === 'literal' && column.default.value === null) {
|
|
210
|
+
throw new Error(
|
|
211
|
+
`Table "${tableName}" column "${colName}" is NOT NULL but has a literal null default`,
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
for (const fk of table.foreignKeys) {
|
|
217
|
+
for (const colName of fk.columns) {
|
|
218
|
+
if (!columnNames.has(colName)) {
|
|
219
|
+
throw new Error(
|
|
220
|
+
`Table "${tableName}" foreignKey references non-existent column "${colName}"`,
|
|
221
|
+
);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (!tableNames.has(fk.references.table)) {
|
|
226
|
+
throw new Error(
|
|
227
|
+
`Table "${tableName}" foreignKey references non-existent table "${fk.references.table}"`,
|
|
228
|
+
);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const referencedTable = contract.storage.tables[
|
|
232
|
+
fk.references.table
|
|
233
|
+
] as (typeof contract.storage.tables)[string];
|
|
234
|
+
const referencedColumnNames = new Set(Object.keys(referencedTable.columns));
|
|
235
|
+
for (const colName of fk.references.columns) {
|
|
236
|
+
if (!referencedColumnNames.has(colName)) {
|
|
237
|
+
throw new Error(
|
|
238
|
+
`Table "${tableName}" foreignKey references non-existent column "${colName}" in table "${fk.references.table}"`,
|
|
239
|
+
);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (fk.columns.length !== fk.references.columns.length) {
|
|
244
|
+
throw new Error(
|
|
245
|
+
`Table "${tableName}" foreignKey column count (${fk.columns.length}) does not match referenced column count (${fk.references.columns.length})`,
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
const BIGINT_NATIVE_TYPES = new Set(['bigint', 'int8']);
|
|
253
|
+
|
|
254
|
+
export function isBigIntColumn(column: StorageColumn): boolean {
|
|
255
|
+
const nativeType = column.nativeType?.toLowerCase() ?? '';
|
|
256
|
+
if (BIGINT_NATIVE_TYPES.has(nativeType)) return true;
|
|
257
|
+
const codecId = column.codecId?.toLowerCase() ?? '';
|
|
258
|
+
return codecId.includes('int8') || codecId.includes('bigint');
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
export function decodeDefaultLiteralValue(
|
|
262
|
+
value: ColumnDefaultLiteralInputValue,
|
|
263
|
+
column: StorageColumn,
|
|
264
|
+
tableName: string,
|
|
265
|
+
columnName: string,
|
|
266
|
+
): ColumnDefaultLiteralInputValue {
|
|
267
|
+
if (value instanceof Date) {
|
|
268
|
+
return value;
|
|
269
|
+
}
|
|
270
|
+
if (isTaggedRaw(value)) {
|
|
271
|
+
return value.value;
|
|
272
|
+
}
|
|
273
|
+
if (isTaggedBigInt(value)) {
|
|
274
|
+
if (!isBigIntColumn(column)) {
|
|
275
|
+
return value;
|
|
276
|
+
}
|
|
277
|
+
try {
|
|
278
|
+
return BigInt(value.value);
|
|
279
|
+
} catch {
|
|
280
|
+
throw new Error(
|
|
281
|
+
`Invalid tagged bigint for default value on "${tableName}.${columnName}": "${value.value}" is not a valid integer`,
|
|
282
|
+
);
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
return value;
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
export function decodeContractDefaults<T extends SqlContract<SqlStorage>>(contract: T): T {
|
|
289
|
+
const tables = contract.storage.tables;
|
|
290
|
+
let tablesChanged = false;
|
|
291
|
+
const decodedTables: Record<string, StorageTable> = {};
|
|
292
|
+
|
|
293
|
+
for (const [tableName, table] of Object.entries(tables)) {
|
|
294
|
+
let columnsChanged = false;
|
|
295
|
+
const decodedColumns: Record<string, StorageColumn> = {};
|
|
296
|
+
|
|
297
|
+
for (const [columnName, column] of Object.entries(table.columns)) {
|
|
298
|
+
if (column.default?.kind === 'literal') {
|
|
299
|
+
const decodedValue = decodeDefaultLiteralValue(
|
|
300
|
+
column.default.value,
|
|
301
|
+
column,
|
|
302
|
+
tableName,
|
|
303
|
+
columnName,
|
|
304
|
+
);
|
|
305
|
+
if (decodedValue !== column.default.value) {
|
|
306
|
+
columnsChanged = true;
|
|
307
|
+
decodedColumns[columnName] = {
|
|
308
|
+
...column,
|
|
309
|
+
default: { kind: 'literal', value: decodedValue },
|
|
310
|
+
};
|
|
311
|
+
continue;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
decodedColumns[columnName] = column;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
if (columnsChanged) {
|
|
318
|
+
tablesChanged = true;
|
|
319
|
+
decodedTables[tableName] = { ...table, columns: decodedColumns };
|
|
320
|
+
} else {
|
|
321
|
+
decodedTables[tableName] = table;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
if (!tablesChanged) {
|
|
326
|
+
return contract;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// The spread widens to SqlContract<SqlStorage>, but this transformation only
|
|
330
|
+
// decodes tagged bigint defaults for bigint-like columns and preserves all
|
|
331
|
+
// other properties of T.
|
|
332
|
+
return {
|
|
333
|
+
...contract,
|
|
334
|
+
storage: {
|
|
335
|
+
...contract.storage,
|
|
336
|
+
tables: decodedTables,
|
|
337
|
+
},
|
|
338
|
+
} as T;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
export function normalizeContract(contract: unknown): SqlContract<SqlStorage> {
|
|
342
|
+
if (typeof contract !== 'object' || contract === null) {
|
|
343
|
+
return contract as SqlContract<SqlStorage>;
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
const contractObj = contract as Record<string, unknown>;
|
|
347
|
+
|
|
348
|
+
let normalizedStorage = contractObj['storage'];
|
|
349
|
+
if (normalizedStorage && typeof normalizedStorage === 'object' && normalizedStorage !== null) {
|
|
350
|
+
const storage = normalizedStorage as Record<string, unknown>;
|
|
351
|
+
const tables = storage['tables'] as Record<string, unknown> | undefined;
|
|
352
|
+
|
|
353
|
+
if (tables) {
|
|
354
|
+
const normalizedTables: Record<string, unknown> = {};
|
|
355
|
+
for (const [tableName, table] of Object.entries(tables)) {
|
|
356
|
+
const tableObj = table as Record<string, unknown>;
|
|
357
|
+
const columns = tableObj['columns'] as Record<string, unknown> | undefined;
|
|
358
|
+
|
|
359
|
+
if (columns) {
|
|
360
|
+
const normalizedColumns: Record<string, unknown> = {};
|
|
361
|
+
for (const [columnName, column] of Object.entries(columns)) {
|
|
362
|
+
const columnObj = column as Record<string, unknown>;
|
|
363
|
+
normalizedColumns[columnName] = {
|
|
364
|
+
...columnObj,
|
|
365
|
+
nullable: columnObj['nullable'] ?? false,
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Normalize foreign keys: add constraint/index defaults if missing
|
|
370
|
+
const rawForeignKeys = (tableObj['foreignKeys'] ?? []) as Array<Record<string, unknown>>;
|
|
371
|
+
const normalizedForeignKeys = rawForeignKeys.map((fk) => ({
|
|
372
|
+
...fk,
|
|
373
|
+
...applyFkDefaults({
|
|
374
|
+
constraint: typeof fk['constraint'] === 'boolean' ? fk['constraint'] : undefined,
|
|
375
|
+
index: typeof fk['index'] === 'boolean' ? fk['index'] : undefined,
|
|
376
|
+
}),
|
|
377
|
+
}));
|
|
378
|
+
|
|
379
|
+
normalizedTables[tableName] = {
|
|
380
|
+
...tableObj,
|
|
381
|
+
columns: normalizedColumns,
|
|
382
|
+
uniques: tableObj['uniques'] ?? [],
|
|
383
|
+
indexes: tableObj['indexes'] ?? [],
|
|
384
|
+
foreignKeys: normalizedForeignKeys,
|
|
385
|
+
};
|
|
386
|
+
} else {
|
|
387
|
+
normalizedTables[tableName] = tableObj;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
normalizedStorage = {
|
|
392
|
+
...storage,
|
|
393
|
+
tables: normalizedTables,
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
let normalizedModels = contractObj['models'];
|
|
399
|
+
if (normalizedModels && typeof normalizedModels === 'object' && normalizedModels !== null) {
|
|
400
|
+
const models = normalizedModels as Record<string, unknown>;
|
|
401
|
+
const normalizedModelsObj: Record<string, unknown> = {};
|
|
402
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
403
|
+
const modelObj = model as Record<string, unknown>;
|
|
404
|
+
normalizedModelsObj[modelName] = {
|
|
405
|
+
...modelObj,
|
|
406
|
+
relations: modelObj['relations'] ?? {},
|
|
407
|
+
};
|
|
408
|
+
}
|
|
409
|
+
normalizedModels = normalizedModelsObj;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
return {
|
|
413
|
+
...contractObj,
|
|
414
|
+
models: normalizedModels,
|
|
415
|
+
relations: contractObj['relations'] ?? {},
|
|
416
|
+
storage: normalizedStorage,
|
|
417
|
+
extensionPacks: contractObj['extensionPacks'] ?? {},
|
|
418
|
+
capabilities: contractObj['capabilities'] ?? {},
|
|
419
|
+
meta: contractObj['meta'] ?? {},
|
|
420
|
+
sources: contractObj['sources'] ?? {},
|
|
421
|
+
} as SqlContract<SqlStorage>;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
export function validateContract<TContract extends SqlContract<SqlStorage>>(
|
|
425
|
+
value: unknown,
|
|
426
|
+
): TContract {
|
|
427
|
+
const normalized = normalizeContract(value);
|
|
428
|
+
const structurallyValid = validateSqlContract<SqlContract<SqlStorage>>(normalized);
|
|
429
|
+
validateContractLogic(structurallyValid);
|
|
430
|
+
|
|
431
|
+
const existingMappings = (structurallyValid as { mappings?: Partial<SqlMappings> }).mappings;
|
|
432
|
+
const defaultMappings = computeDefaultMappings(
|
|
433
|
+
structurallyValid.models as Record<string, ModelDefinition>,
|
|
434
|
+
);
|
|
435
|
+
const mappings = mergeMappings(defaultMappings, existingMappings);
|
|
436
|
+
|
|
437
|
+
const contractWithMappings = {
|
|
438
|
+
...structurallyValid,
|
|
439
|
+
mappings,
|
|
440
|
+
};
|
|
441
|
+
|
|
442
|
+
return decodeContractDefaults(contractWithMappings) as TContract;
|
|
443
|
+
}
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
import { type } from 'arktype';
|
|
2
|
+
import type {
|
|
3
|
+
ForeignKey,
|
|
4
|
+
ForeignKeyReferences,
|
|
5
|
+
Index,
|
|
6
|
+
ModelDefinition,
|
|
7
|
+
ModelField,
|
|
8
|
+
ModelStorage,
|
|
9
|
+
PrimaryKey,
|
|
10
|
+
SqlContract,
|
|
11
|
+
SqlStorage,
|
|
12
|
+
StorageTypeInstance,
|
|
13
|
+
UniqueConstraint,
|
|
14
|
+
} from './types';
|
|
15
|
+
|
|
16
|
+
type ColumnDefaultLiteral = {
|
|
17
|
+
readonly kind: 'literal';
|
|
18
|
+
readonly value: string | number | boolean | Record<string, unknown> | unknown[] | null;
|
|
19
|
+
};
|
|
20
|
+
type ColumnDefaultFunction = { readonly kind: 'function'; readonly expression: string };
|
|
21
|
+
const literalKindSchema = type("'literal'");
|
|
22
|
+
const functionKindSchema = type("'function'");
|
|
23
|
+
const generatorKindSchema = type("'generator'");
|
|
24
|
+
const generatorIdSchema = type("'ulid' | 'nanoid' | 'uuidv7' | 'uuidv4' | 'cuid2' | 'ksuid'");
|
|
25
|
+
|
|
26
|
+
export const ColumnDefaultLiteralSchema = type.declare<ColumnDefaultLiteral>().type({
|
|
27
|
+
kind: literalKindSchema,
|
|
28
|
+
value: 'string | number | boolean | null | unknown[] | Record<string, unknown>',
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
export const ColumnDefaultFunctionSchema = type.declare<ColumnDefaultFunction>().type({
|
|
32
|
+
kind: functionKindSchema,
|
|
33
|
+
expression: 'string',
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
export const ColumnDefaultSchema = ColumnDefaultLiteralSchema.or(ColumnDefaultFunctionSchema);
|
|
37
|
+
|
|
38
|
+
const ExecutionMutationDefaultValueSchema = type({
|
|
39
|
+
kind: generatorKindSchema,
|
|
40
|
+
id: generatorIdSchema,
|
|
41
|
+
'params?': 'Record<string, unknown>',
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
const ExecutionMutationDefaultSchema = type({
|
|
45
|
+
ref: {
|
|
46
|
+
table: 'string',
|
|
47
|
+
column: 'string',
|
|
48
|
+
},
|
|
49
|
+
'onCreate?': ExecutionMutationDefaultValueSchema,
|
|
50
|
+
'onUpdate?': ExecutionMutationDefaultValueSchema,
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
const ExecutionSchema = type({
|
|
54
|
+
mutations: {
|
|
55
|
+
defaults: ExecutionMutationDefaultSchema.array().readonly(),
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
const StorageColumnSchema = type({
|
|
60
|
+
nativeType: 'string',
|
|
61
|
+
codecId: 'string',
|
|
62
|
+
nullable: 'boolean',
|
|
63
|
+
'typeParams?': 'Record<string, unknown>',
|
|
64
|
+
'typeRef?': 'string',
|
|
65
|
+
'default?': ColumnDefaultSchema,
|
|
66
|
+
}).narrow((col, ctx) => {
|
|
67
|
+
if (col.typeParams !== undefined && col.typeRef !== undefined) {
|
|
68
|
+
return ctx.mustBe('a column with either typeParams or typeRef, not both');
|
|
69
|
+
}
|
|
70
|
+
return true;
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const StorageTypeInstanceSchema = type.declare<StorageTypeInstance>().type({
|
|
74
|
+
codecId: 'string',
|
|
75
|
+
nativeType: 'string',
|
|
76
|
+
typeParams: 'Record<string, unknown>',
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
const PrimaryKeySchema = type.declare<PrimaryKey>().type({
|
|
80
|
+
columns: type.string.array().readonly(),
|
|
81
|
+
'name?': 'string',
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const UniqueConstraintSchema = type.declare<UniqueConstraint>().type({
|
|
85
|
+
columns: type.string.array().readonly(),
|
|
86
|
+
'name?': 'string',
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
const IndexSchema = type.declare<Index>().type({
|
|
90
|
+
columns: type.string.array().readonly(),
|
|
91
|
+
'name?': 'string',
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const ForeignKeyReferencesSchema = type.declare<ForeignKeyReferences>().type({
|
|
95
|
+
table: 'string',
|
|
96
|
+
columns: type.string.array().readonly(),
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
const ForeignKeySchema = type.declare<ForeignKey>().type({
|
|
100
|
+
columns: type.string.array().readonly(),
|
|
101
|
+
references: ForeignKeyReferencesSchema,
|
|
102
|
+
'name?': 'string',
|
|
103
|
+
constraint: 'boolean',
|
|
104
|
+
index: 'boolean',
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
const StorageTableSchema = type({
|
|
108
|
+
columns: type({ '[string]': StorageColumnSchema }),
|
|
109
|
+
'primaryKey?': PrimaryKeySchema,
|
|
110
|
+
uniques: UniqueConstraintSchema.array().readonly(),
|
|
111
|
+
indexes: IndexSchema.array().readonly(),
|
|
112
|
+
foreignKeys: ForeignKeySchema.array().readonly(),
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
const StorageSchema = type({
|
|
116
|
+
tables: type({ '[string]': StorageTableSchema }),
|
|
117
|
+
'types?': type({ '[string]': StorageTypeInstanceSchema }),
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
const ModelFieldSchema = type.declare<ModelField>().type({
|
|
121
|
+
column: 'string',
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
const ModelStorageSchema = type.declare<ModelStorage>().type({
|
|
125
|
+
table: 'string',
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
const ModelSchema = type.declare<ModelDefinition>().type({
|
|
129
|
+
storage: ModelStorageSchema,
|
|
130
|
+
fields: type({ '[string]': ModelFieldSchema }),
|
|
131
|
+
relations: type({ '[string]': 'unknown' }),
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
const SqlContractSchema = type({
|
|
135
|
+
'schemaVersion?': "'1'",
|
|
136
|
+
target: 'string',
|
|
137
|
+
targetFamily: "'sql'",
|
|
138
|
+
storageHash: 'string',
|
|
139
|
+
'executionHash?': 'string',
|
|
140
|
+
'profileHash?': 'string',
|
|
141
|
+
'capabilities?': 'Record<string, Record<string, boolean>>',
|
|
142
|
+
'extensionPacks?': 'Record<string, unknown>',
|
|
143
|
+
'meta?': 'Record<string, unknown>',
|
|
144
|
+
'sources?': 'Record<string, unknown>',
|
|
145
|
+
models: type({ '[string]': ModelSchema }),
|
|
146
|
+
storage: StorageSchema,
|
|
147
|
+
'execution?': ExecutionSchema,
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
// NOTE: StorageColumnSchema, StorageTableSchema, and StorageSchema use bare type()
|
|
151
|
+
// instead of type.declare<T>().type() because the ColumnDefault union's value field
|
|
152
|
+
// includes bigint | Date (runtime-only types after decoding) which cannot be expressed
|
|
153
|
+
// in Arktype's JSON validation DSL. The `as SqlStorage` cast in validateStorage() bridges
|
|
154
|
+
// the gap between the JSON-safe Arktype output and the runtime TypeScript type.
|
|
155
|
+
// See decodeContractDefaults() in validate.ts for the decoding step.
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Validates the structural shape of SqlStorage using Arktype.
|
|
159
|
+
*
|
|
160
|
+
* @param value - The storage value to validate
|
|
161
|
+
* @returns The validated storage if structure is valid
|
|
162
|
+
* @throws Error if the storage structure is invalid
|
|
163
|
+
*/
|
|
164
|
+
export function validateStorage(value: unknown): SqlStorage {
|
|
165
|
+
const result = StorageSchema(value);
|
|
166
|
+
if (result instanceof type.errors) {
|
|
167
|
+
const messages = result.map((p: { message: string }) => p.message).join('; ');
|
|
168
|
+
throw new Error(`Storage validation failed: ${messages}`);
|
|
169
|
+
}
|
|
170
|
+
return result as SqlStorage;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Validates the structural shape of ModelDefinition using Arktype.
|
|
175
|
+
*
|
|
176
|
+
* @param value - The model value to validate
|
|
177
|
+
* @returns The validated model if structure is valid
|
|
178
|
+
* @throws Error if the model structure is invalid
|
|
179
|
+
*/
|
|
180
|
+
export function validateModel(value: unknown): ModelDefinition {
|
|
181
|
+
const result = ModelSchema(value);
|
|
182
|
+
if (result instanceof type.errors) {
|
|
183
|
+
const messages = result.map((p: { message: string }) => p.message).join('; ');
|
|
184
|
+
throw new Error(`Model validation failed: ${messages}`);
|
|
185
|
+
}
|
|
186
|
+
return result;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Validates the structural shape of a SqlContract using Arktype.
|
|
191
|
+
*
|
|
192
|
+
* **Responsibility: Validation Only**
|
|
193
|
+
* This function validates that the contract has the correct structure and types.
|
|
194
|
+
* It does NOT normalize the contract - normalization must happen in the contract builder.
|
|
195
|
+
*
|
|
196
|
+
* The contract passed to this function must already be normalized (all required fields present).
|
|
197
|
+
* If normalization is needed, it should be done by the contract builder before calling this function.
|
|
198
|
+
*
|
|
199
|
+
* This ensures all required fields are present and have the correct types.
|
|
200
|
+
*
|
|
201
|
+
* @param value - The contract value to validate (typically from a JSON import)
|
|
202
|
+
* @returns The validated contract if structure is valid
|
|
203
|
+
* @throws Error if the contract structure is invalid
|
|
204
|
+
*/
|
|
205
|
+
export function validateSqlContract<T extends SqlContract<SqlStorage>>(value: unknown): T {
|
|
206
|
+
if (typeof value !== 'object' || value === null) {
|
|
207
|
+
throw new Error('Contract structural validation failed: value must be an object');
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Check targetFamily first to provide a clear error message for unsupported target families
|
|
211
|
+
const rawValue = value as { targetFamily?: string };
|
|
212
|
+
if (rawValue.targetFamily !== undefined && rawValue.targetFamily !== 'sql') {
|
|
213
|
+
throw new Error(`Unsupported target family: ${rawValue.targetFamily}`);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
const contractResult = SqlContractSchema(value);
|
|
217
|
+
|
|
218
|
+
if (contractResult instanceof type.errors) {
|
|
219
|
+
const messages = contractResult.map((p: { message: string }) => p.message).join('; ');
|
|
220
|
+
throw new Error(`Contract structural validation failed: ${messages}`);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// After validation, contractResult matches the schema and preserves the input structure
|
|
224
|
+
// TypeScript needs an assertion here due to exactOptionalPropertyTypes differences
|
|
225
|
+
// between Arktype's inferred type and the generic T, but runtime-wise they're compatible
|
|
226
|
+
return contractResult as T;
|
|
227
|
+
}
|