@prisma-next/sql-contract 0.3.0-dev.13 → 0.3.0-dev.130
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +84 -10
- package/dist/factories.d.mts +48 -0
- package/dist/factories.d.mts.map +1 -0
- package/dist/factories.mjs +84 -0
- package/dist/factories.mjs.map +1 -0
- package/dist/pack-types.d.mts +13 -0
- package/dist/pack-types.d.mts.map +1 -0
- package/dist/pack-types.mjs +1 -0
- package/dist/types-CB821Pqa.d.mts +197 -0
- package/dist/types-CB821Pqa.d.mts.map +1 -0
- package/dist/types-DRR5stkj.mjs +13 -0
- package/dist/types-DRR5stkj.mjs.map +1 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +3 -0
- package/dist/validate.d.mts +11 -0
- package/dist/validate.d.mts.map +1 -0
- package/dist/validate.mjs +437 -0
- package/dist/validate.mjs.map +1 -0
- package/dist/validators-CNxeypbZ.mjs +234 -0
- package/dist/validators-CNxeypbZ.mjs.map +1 -0
- package/dist/validators.d.mts +71 -0
- package/dist/validators.d.mts.map +1 -0
- package/dist/validators.mjs +3 -0
- package/package.json +21 -25
- package/src/construct.ts +181 -0
- package/src/exports/types.ts +21 -0
- package/src/exports/validate.ts +6 -0
- package/src/exports/validators.ts +1 -1
- package/src/factories.ts +41 -8
- package/src/index.ts +1 -0
- package/src/types.ts +176 -9
- package/src/validate.ts +560 -0
- package/src/validators.ts +184 -18
- package/dist/exports/factories.d.ts +0 -2
- package/dist/exports/factories.d.ts.map +0 -1
- package/dist/exports/factories.js +0 -83
- package/dist/exports/factories.js.map +0 -1
- package/dist/exports/pack-types.d.ts +0 -2
- package/dist/exports/pack-types.d.ts.map +0 -1
- package/dist/exports/pack-types.js +0 -1
- package/dist/exports/pack-types.js.map +0 -1
- package/dist/exports/types.d.ts +0 -2
- package/dist/exports/types.d.ts.map +0 -1
- package/dist/exports/types.js +0 -1
- package/dist/exports/types.js.map +0 -1
- package/dist/exports/validators.d.ts +0 -2
- package/dist/exports/validators.d.ts.map +0 -1
- package/dist/exports/validators.js +0 -96
- package/dist/exports/validators.js.map +0 -1
- package/dist/factories.d.ts +0 -38
- package/dist/factories.d.ts.map +0 -1
- package/dist/index.d.ts +0 -4
- package/dist/index.d.ts.map +0 -1
- package/dist/pack-types.d.ts +0 -10
- package/dist/pack-types.d.ts.map +0 -1
- package/dist/types.d.ts +0 -68
- package/dist/types.d.ts.map +0 -1
- package/dist/validators.d.ts +0 -35
- package/dist/validators.d.ts.map +0 -1
package/src/validate.ts
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
1
|
+
import type { ColumnDefaultLiteralInputValue } from '@prisma-next/contract/types';
|
|
2
|
+
import { isTaggedBigInt, isTaggedRaw } from '@prisma-next/contract/types';
|
|
3
|
+
import type { DomainContractShape, DomainModelShape } from '@prisma-next/contract/validate-domain';
|
|
4
|
+
import { validateContractDomain } from '@prisma-next/contract/validate-domain';
|
|
5
|
+
import { constructContract } from './construct';
|
|
6
|
+
import type { SqlContract, SqlStorage, StorageColumn, StorageTable } from './types';
|
|
7
|
+
import { applyFkDefaults } from './types';
|
|
8
|
+
import { validateSqlContract, validateStorageSemantics } from './validators';
|
|
9
|
+
|
|
10
|
+
function extractDomainShape(contract: SqlContract<SqlStorage>): DomainContractShape {
|
|
11
|
+
return {
|
|
12
|
+
roots: contract.roots,
|
|
13
|
+
models: contract.models as Record<string, DomainModelShape>,
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function validateContractLogic(contract: SqlContract<SqlStorage>): void {
|
|
18
|
+
const tableNames = new Set(Object.keys(contract.storage.tables));
|
|
19
|
+
|
|
20
|
+
for (const [tableName, table] of Object.entries(contract.storage.tables)) {
|
|
21
|
+
const columnNames = new Set(Object.keys(table.columns));
|
|
22
|
+
|
|
23
|
+
if (table.primaryKey) {
|
|
24
|
+
for (const colName of table.primaryKey.columns) {
|
|
25
|
+
if (!columnNames.has(colName)) {
|
|
26
|
+
throw new Error(
|
|
27
|
+
`Table "${tableName}" primaryKey references non-existent column "${colName}"`,
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
for (const unique of table.uniques) {
|
|
34
|
+
for (const colName of unique.columns) {
|
|
35
|
+
if (!columnNames.has(colName)) {
|
|
36
|
+
throw new Error(
|
|
37
|
+
`Table "${tableName}" unique constraint references non-existent column "${colName}"`,
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
for (const index of table.indexes) {
|
|
44
|
+
for (const colName of index.columns) {
|
|
45
|
+
if (!columnNames.has(colName)) {
|
|
46
|
+
throw new Error(`Table "${tableName}" index references non-existent column "${colName}"`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
for (const [colName, column] of Object.entries(table.columns)) {
|
|
52
|
+
if (!column.nullable && column.default?.kind === 'literal' && column.default.value === null) {
|
|
53
|
+
throw new Error(
|
|
54
|
+
`Table "${tableName}" column "${colName}" is NOT NULL but has a literal null default`,
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
for (const fk of table.foreignKeys) {
|
|
60
|
+
for (const colName of fk.columns) {
|
|
61
|
+
if (!columnNames.has(colName)) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
`Table "${tableName}" foreignKey references non-existent column "${colName}"`,
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (!tableNames.has(fk.references.table)) {
|
|
69
|
+
throw new Error(
|
|
70
|
+
`Table "${tableName}" foreignKey references non-existent table "${fk.references.table}"`,
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const referencedTable = contract.storage.tables[
|
|
75
|
+
fk.references.table
|
|
76
|
+
] as (typeof contract.storage.tables)[string];
|
|
77
|
+
const referencedColumnNames = new Set(Object.keys(referencedTable.columns));
|
|
78
|
+
for (const colName of fk.references.columns) {
|
|
79
|
+
if (!referencedColumnNames.has(colName)) {
|
|
80
|
+
throw new Error(
|
|
81
|
+
`Table "${tableName}" foreignKey references non-existent column "${colName}" in table "${fk.references.table}"`,
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (fk.columns.length !== fk.references.columns.length) {
|
|
87
|
+
throw new Error(
|
|
88
|
+
`Table "${tableName}" foreignKey column count (${fk.columns.length}) does not match referenced column count (${fk.references.columns.length})`,
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const BIGINT_NATIVE_TYPES = new Set(['bigint', 'int8']);
|
|
96
|
+
|
|
97
|
+
export function isBigIntColumn(column: StorageColumn): boolean {
|
|
98
|
+
const nativeType = column.nativeType?.toLowerCase() ?? '';
|
|
99
|
+
if (BIGINT_NATIVE_TYPES.has(nativeType)) return true;
|
|
100
|
+
const codecId = column.codecId?.toLowerCase() ?? '';
|
|
101
|
+
return codecId.includes('int8') || codecId.includes('bigint');
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export function decodeDefaultLiteralValue(
|
|
105
|
+
value: ColumnDefaultLiteralInputValue,
|
|
106
|
+
column: StorageColumn,
|
|
107
|
+
tableName: string,
|
|
108
|
+
columnName: string,
|
|
109
|
+
): ColumnDefaultLiteralInputValue {
|
|
110
|
+
if (value instanceof Date) {
|
|
111
|
+
return value;
|
|
112
|
+
}
|
|
113
|
+
if (isTaggedRaw(value)) {
|
|
114
|
+
return value.value;
|
|
115
|
+
}
|
|
116
|
+
if (isTaggedBigInt(value)) {
|
|
117
|
+
if (!isBigIntColumn(column)) {
|
|
118
|
+
return value;
|
|
119
|
+
}
|
|
120
|
+
try {
|
|
121
|
+
return BigInt(value.value);
|
|
122
|
+
} catch {
|
|
123
|
+
throw new Error(
|
|
124
|
+
`Invalid tagged bigint for default value on "${tableName}.${columnName}": "${value.value}" is not a valid integer`,
|
|
125
|
+
);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return value;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
export function decodeContractDefaults<T extends SqlContract<SqlStorage>>(contract: T): T {
|
|
132
|
+
const tables = contract.storage.tables;
|
|
133
|
+
let tablesChanged = false;
|
|
134
|
+
const decodedTables: Record<string, StorageTable> = {};
|
|
135
|
+
|
|
136
|
+
for (const [tableName, table] of Object.entries(tables)) {
|
|
137
|
+
let columnsChanged = false;
|
|
138
|
+
const decodedColumns: Record<string, StorageColumn> = {};
|
|
139
|
+
|
|
140
|
+
for (const [columnName, column] of Object.entries(table.columns)) {
|
|
141
|
+
if (column.default?.kind === 'literal') {
|
|
142
|
+
const decodedValue = decodeDefaultLiteralValue(
|
|
143
|
+
column.default.value,
|
|
144
|
+
column,
|
|
145
|
+
tableName,
|
|
146
|
+
columnName,
|
|
147
|
+
);
|
|
148
|
+
if (decodedValue !== column.default.value) {
|
|
149
|
+
columnsChanged = true;
|
|
150
|
+
decodedColumns[columnName] = {
|
|
151
|
+
...column,
|
|
152
|
+
default: { kind: 'literal', value: decodedValue },
|
|
153
|
+
};
|
|
154
|
+
continue;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
decodedColumns[columnName] = column;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (columnsChanged) {
|
|
161
|
+
tablesChanged = true;
|
|
162
|
+
decodedTables[tableName] = { ...table, columns: decodedColumns };
|
|
163
|
+
} else {
|
|
164
|
+
decodedTables[tableName] = table;
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if (!tablesChanged) {
|
|
169
|
+
return contract;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// The spread widens to SqlContract<SqlStorage>, but this transformation only
|
|
173
|
+
// decodes tagged bigint defaults for bigint-like columns and preserves all
|
|
174
|
+
// other properties of T.
|
|
175
|
+
return {
|
|
176
|
+
...contract,
|
|
177
|
+
storage: {
|
|
178
|
+
...contract.storage,
|
|
179
|
+
tables: decodedTables,
|
|
180
|
+
},
|
|
181
|
+
} as T;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function normalizeStorage(contractObj: Record<string, unknown>): Record<string, unknown> {
|
|
185
|
+
const normalizedStorage = contractObj['storage'];
|
|
186
|
+
if (!normalizedStorage || typeof normalizedStorage !== 'object')
|
|
187
|
+
return normalizedStorage as Record<string, unknown>;
|
|
188
|
+
|
|
189
|
+
const storage = normalizedStorage as Record<string, unknown>;
|
|
190
|
+
const tables = storage['tables'] as Record<string, unknown> | undefined;
|
|
191
|
+
if (!tables) return storage;
|
|
192
|
+
|
|
193
|
+
const normalizedTables: Record<string, unknown> = {};
|
|
194
|
+
for (const [tableName, table] of Object.entries(tables)) {
|
|
195
|
+
const tableObj = table as Record<string, unknown>;
|
|
196
|
+
const columns = tableObj['columns'] as Record<string, unknown> | undefined;
|
|
197
|
+
|
|
198
|
+
if (columns) {
|
|
199
|
+
const normalizedColumns: Record<string, unknown> = {};
|
|
200
|
+
for (const [columnName, column] of Object.entries(columns)) {
|
|
201
|
+
const columnObj = column as Record<string, unknown>;
|
|
202
|
+
normalizedColumns[columnName] = {
|
|
203
|
+
...columnObj,
|
|
204
|
+
nullable: columnObj['nullable'] ?? false,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
const rawForeignKeys = (tableObj['foreignKeys'] ?? []) as Array<Record<string, unknown>>;
|
|
209
|
+
const normalizedForeignKeys = rawForeignKeys.map((fk) => ({
|
|
210
|
+
...fk,
|
|
211
|
+
...applyFkDefaults({
|
|
212
|
+
constraint: typeof fk['constraint'] === 'boolean' ? fk['constraint'] : undefined,
|
|
213
|
+
index: typeof fk['index'] === 'boolean' ? fk['index'] : undefined,
|
|
214
|
+
}),
|
|
215
|
+
}));
|
|
216
|
+
|
|
217
|
+
normalizedTables[tableName] = {
|
|
218
|
+
...tableObj,
|
|
219
|
+
columns: normalizedColumns,
|
|
220
|
+
uniques: tableObj['uniques'] ?? [],
|
|
221
|
+
indexes: tableObj['indexes'] ?? [],
|
|
222
|
+
foreignKeys: normalizedForeignKeys,
|
|
223
|
+
};
|
|
224
|
+
} else {
|
|
225
|
+
normalizedTables[tableName] = tableObj;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return { ...storage, tables: normalizedTables };
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
type RawModel = Record<string, unknown>;
|
|
233
|
+
type RawField = Record<string, unknown>;
|
|
234
|
+
type RawRelation = Record<string, unknown>;
|
|
235
|
+
type RawStorageObj = { tables: Record<string, Record<string, unknown>> };
|
|
236
|
+
|
|
237
|
+
function detectFormat(models: Record<string, RawModel>): 'old' | 'new' {
|
|
238
|
+
for (const model of Object.values(models)) {
|
|
239
|
+
const fields = model['fields'] as Record<string, RawField> | undefined;
|
|
240
|
+
if (!fields) continue;
|
|
241
|
+
for (const field of Object.values(fields)) {
|
|
242
|
+
if ('column' in field) return 'old';
|
|
243
|
+
if ('codecId' in field) return 'new';
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return 'old';
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
function buildColumnToFieldMap(
|
|
250
|
+
fields: Record<string, RawField>,
|
|
251
|
+
modelName: string,
|
|
252
|
+
): Record<string, string> {
|
|
253
|
+
const map: Record<string, string> = {};
|
|
254
|
+
for (const [fieldName, field] of Object.entries(fields)) {
|
|
255
|
+
const col = field['column'] as string | undefined;
|
|
256
|
+
if (!col) continue;
|
|
257
|
+
if (Object.hasOwn(map, col)) {
|
|
258
|
+
throw new Error(
|
|
259
|
+
`Model "${modelName}" has duplicate column mapping: fields "${map[col]}" and "${fieldName}" both map to column "${col}"`,
|
|
260
|
+
);
|
|
261
|
+
}
|
|
262
|
+
map[col] = fieldName;
|
|
263
|
+
}
|
|
264
|
+
return map;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
function enrichOldFormatModels(
|
|
268
|
+
models: Record<string, RawModel>,
|
|
269
|
+
storageObj: RawStorageObj,
|
|
270
|
+
topRelations: Record<string, Record<string, RawRelation>>,
|
|
271
|
+
): { enrichedModels: Record<string, RawModel>; roots: Record<string, string> } {
|
|
272
|
+
const roots: Record<string, string> = {};
|
|
273
|
+
const tableToModel: Record<string, string> = {};
|
|
274
|
+
|
|
275
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
276
|
+
const modelStorage = model['storage'] as Record<string, unknown> | undefined;
|
|
277
|
+
const tableName = modelStorage?.['table'] as string | undefined;
|
|
278
|
+
if (tableName) {
|
|
279
|
+
if (!model['owner']) {
|
|
280
|
+
roots[modelName] = modelName;
|
|
281
|
+
}
|
|
282
|
+
tableToModel[tableName] = modelName;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
const enrichedModels: Record<string, RawModel> = {};
|
|
287
|
+
|
|
288
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
289
|
+
const fields = (model['fields'] ?? {}) as Record<string, RawField>;
|
|
290
|
+
const modelStorage = model['storage'] as Record<string, unknown> | undefined;
|
|
291
|
+
const tableName = modelStorage?.['table'] as string | undefined;
|
|
292
|
+
const storageTable = tableName
|
|
293
|
+
? (storageObj.tables[tableName] as Record<string, unknown> | undefined)
|
|
294
|
+
: undefined;
|
|
295
|
+
const storageColumns = (storageTable?.['columns'] ?? {}) as Record<
|
|
296
|
+
string,
|
|
297
|
+
Record<string, unknown>
|
|
298
|
+
>;
|
|
299
|
+
|
|
300
|
+
const enrichedFields: Record<string, RawField> = {};
|
|
301
|
+
const modelStorageFields: Record<string, { column: string }> = {};
|
|
302
|
+
|
|
303
|
+
const hasStorageColumns = Object.keys(storageColumns).length > 0;
|
|
304
|
+
for (const [fieldName, field] of Object.entries(fields)) {
|
|
305
|
+
const colName = field['column'] as string;
|
|
306
|
+
const storageCol = storageColumns[colName];
|
|
307
|
+
if (!storageCol && hasStorageColumns && colName) {
|
|
308
|
+
throw new Error(
|
|
309
|
+
`Model "${modelName}" field "${fieldName}" references non-existent column "${colName}" in table "${tableName}"`,
|
|
310
|
+
);
|
|
311
|
+
}
|
|
312
|
+
enrichedFields[fieldName] = {
|
|
313
|
+
...field,
|
|
314
|
+
nullable: storageCol?.['nullable'] ?? false,
|
|
315
|
+
codecId: storageCol?.['codecId'] ?? '',
|
|
316
|
+
};
|
|
317
|
+
modelStorageFields[fieldName] = { column: colName };
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
const enrichedStorage = {
|
|
321
|
+
...(modelStorage ?? {}),
|
|
322
|
+
fields: modelStorageFields,
|
|
323
|
+
};
|
|
324
|
+
|
|
325
|
+
enrichedModels[modelName] = {
|
|
326
|
+
...model,
|
|
327
|
+
fields: enrichedFields,
|
|
328
|
+
storage: enrichedStorage,
|
|
329
|
+
relations: model['relations'] ?? {},
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
for (const [tableName, tableRels] of Object.entries(topRelations)) {
|
|
334
|
+
const modelName = tableToModel[tableName];
|
|
335
|
+
if (!modelName) continue;
|
|
336
|
+
const existingModel = enrichedModels[modelName];
|
|
337
|
+
if (!existingModel) continue;
|
|
338
|
+
|
|
339
|
+
const existingRels = (existingModel['relations'] ?? {}) as Record<string, unknown>;
|
|
340
|
+
const targetColumnToField: Record<string, Record<string, string>> = {};
|
|
341
|
+
|
|
342
|
+
const modelRelations: Record<string, unknown> = { ...existingRels };
|
|
343
|
+
for (const [relName, rel] of Object.entries(tableRels)) {
|
|
344
|
+
const on = rel['on'] as { childCols?: string[]; parentCols?: string[] } | undefined;
|
|
345
|
+
const parentCols = on?.['parentCols'] ?? [];
|
|
346
|
+
const childCols = on?.['childCols'] ?? [];
|
|
347
|
+
|
|
348
|
+
const toModel = rel['to'] as string;
|
|
349
|
+
const sourceFields = (existingModel['fields'] ?? {}) as Record<string, RawField>;
|
|
350
|
+
const sourceColToField = buildColumnToFieldMap(sourceFields, modelName);
|
|
351
|
+
|
|
352
|
+
if (!targetColumnToField[toModel]) {
|
|
353
|
+
const targetModelObj = enrichedModels[toModel];
|
|
354
|
+
if (targetModelObj) {
|
|
355
|
+
targetColumnToField[toModel] = buildColumnToFieldMap(
|
|
356
|
+
(targetModelObj['fields'] ?? {}) as Record<string, RawField>,
|
|
357
|
+
toModel,
|
|
358
|
+
);
|
|
359
|
+
} else {
|
|
360
|
+
targetColumnToField[toModel] = {};
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
const targetColToField = targetColumnToField[toModel] ?? {};
|
|
364
|
+
|
|
365
|
+
// Old format: parentCols = columns on FK-holding table (local), childCols = columns on referenced table (target)
|
|
366
|
+
const localFields = parentCols.map((c: string) => sourceColToField[c] ?? c);
|
|
367
|
+
const targetFields = childCols.map((c: string) => targetColToField[c] ?? c);
|
|
368
|
+
|
|
369
|
+
modelRelations[relName] = {
|
|
370
|
+
to: toModel,
|
|
371
|
+
cardinality: rel['cardinality'],
|
|
372
|
+
on: { localFields, targetFields },
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
enrichedModels[modelName] = {
|
|
377
|
+
...existingModel,
|
|
378
|
+
relations: modelRelations,
|
|
379
|
+
};
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
return { enrichedModels, roots };
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
function enrichNewFormatModels(models: Record<string, RawModel>): {
|
|
386
|
+
enrichedModels: Record<string, RawModel>;
|
|
387
|
+
topRelations: Record<string, Record<string, unknown>>;
|
|
388
|
+
} {
|
|
389
|
+
const enrichedModels: Record<string, RawModel> = {};
|
|
390
|
+
const topRelations: Record<string, Record<string, unknown>> = {};
|
|
391
|
+
const modelToTable: Record<string, string> = {};
|
|
392
|
+
|
|
393
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
394
|
+
const modelStorage = model['storage'] as Record<string, unknown> | undefined;
|
|
395
|
+
const tableName = modelStorage?.['table'] as string | undefined;
|
|
396
|
+
if (tableName) modelToTable[modelName] = tableName;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
for (const [modelName, model] of Object.entries(models)) {
|
|
400
|
+
const fields = (model['fields'] ?? {}) as Record<string, RawField>;
|
|
401
|
+
const modelStorage = model['storage'] as Record<string, unknown> | undefined;
|
|
402
|
+
const storageFields = (modelStorage?.['fields'] ?? {}) as Record<
|
|
403
|
+
string,
|
|
404
|
+
Record<string, unknown>
|
|
405
|
+
>;
|
|
406
|
+
|
|
407
|
+
const enrichedFields: Record<string, RawField> = {};
|
|
408
|
+
for (const [fieldName, field] of Object.entries(fields)) {
|
|
409
|
+
const sfEntry = storageFields[fieldName];
|
|
410
|
+
const column = sfEntry?.['column'] as string | undefined;
|
|
411
|
+
enrichedFields[fieldName] = column ? { ...field, column } : { ...field };
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
enrichedModels[modelName] = {
|
|
415
|
+
...model,
|
|
416
|
+
fields: enrichedFields,
|
|
417
|
+
relations: model['relations'] ?? {},
|
|
418
|
+
};
|
|
419
|
+
|
|
420
|
+
const modelRels = (model['relations'] ?? {}) as Record<string, RawRelation>;
|
|
421
|
+
const tableName = modelToTable[modelName];
|
|
422
|
+
if (!tableName) continue;
|
|
423
|
+
|
|
424
|
+
for (const [relName, rel] of Object.entries(modelRels)) {
|
|
425
|
+
const on = rel['on'] as { localFields?: string[]; targetFields?: string[] } | undefined;
|
|
426
|
+
if (!on) continue;
|
|
427
|
+
const toModel = rel['to'] as string;
|
|
428
|
+
const toTable = modelToTable[toModel];
|
|
429
|
+
if (!toTable) continue;
|
|
430
|
+
|
|
431
|
+
const sourceFields = enrichedFields;
|
|
432
|
+
const targetModelObj = models[toModel];
|
|
433
|
+
const targetFields = (targetModelObj?.['fields'] ?? {}) as Record<string, RawField>;
|
|
434
|
+
const targetStorageObj = targetModelObj?.['storage'] as Record<string, unknown> | undefined;
|
|
435
|
+
const targetStorageFields = (targetStorageObj?.['fields'] ?? {}) as Record<
|
|
436
|
+
string,
|
|
437
|
+
Record<string, unknown>
|
|
438
|
+
>;
|
|
439
|
+
|
|
440
|
+
const parentCols = (on.localFields ?? []).map((f: string) => {
|
|
441
|
+
const sf = storageFields[f];
|
|
442
|
+
return (
|
|
443
|
+
(sf?.['column'] as string | undefined) ??
|
|
444
|
+
(sourceFields[f]?.['column'] as string | undefined) ??
|
|
445
|
+
f
|
|
446
|
+
);
|
|
447
|
+
});
|
|
448
|
+
|
|
449
|
+
const childCols = (on.targetFields ?? []).map((f: string) => {
|
|
450
|
+
const tsf = targetStorageFields[f];
|
|
451
|
+
return (
|
|
452
|
+
(tsf?.['column'] as string | undefined) ??
|
|
453
|
+
(targetFields[f]?.['column'] as string | undefined) ??
|
|
454
|
+
f
|
|
455
|
+
);
|
|
456
|
+
});
|
|
457
|
+
|
|
458
|
+
if (!topRelations[tableName]) topRelations[tableName] = {};
|
|
459
|
+
topRelations[tableName][relName] = {
|
|
460
|
+
to: toModel,
|
|
461
|
+
cardinality: rel['cardinality'],
|
|
462
|
+
on: { parentCols, childCols },
|
|
463
|
+
};
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
return { enrichedModels, topRelations };
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
export function normalizeContract(contract: unknown): SqlContract<SqlStorage> {
|
|
471
|
+
if (typeof contract !== 'object' || contract === null) {
|
|
472
|
+
return contract as SqlContract<SqlStorage>;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
const contractObj = contract as Record<string, unknown>;
|
|
476
|
+
const normalizedStorage = normalizeStorage(contractObj);
|
|
477
|
+
|
|
478
|
+
const rawModels = contractObj['models'];
|
|
479
|
+
if (!rawModels || typeof rawModels !== 'object' || rawModels === null) {
|
|
480
|
+
return {
|
|
481
|
+
...contractObj,
|
|
482
|
+
roots: contractObj['roots'] ?? {},
|
|
483
|
+
models: rawModels ?? {},
|
|
484
|
+
relations: contractObj['relations'] ?? {},
|
|
485
|
+
storage: normalizedStorage,
|
|
486
|
+
extensionPacks: contractObj['extensionPacks'] ?? {},
|
|
487
|
+
capabilities: contractObj['capabilities'] ?? {},
|
|
488
|
+
meta: contractObj['meta'] ?? {},
|
|
489
|
+
sources: contractObj['sources'] ?? {},
|
|
490
|
+
} as SqlContract<SqlStorage>;
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
const modelsObj = rawModels as Record<string, RawModel>;
|
|
494
|
+
const format = detectFormat(modelsObj);
|
|
495
|
+
|
|
496
|
+
let normalizedModels: Record<string, RawModel>;
|
|
497
|
+
let roots: Record<string, string>;
|
|
498
|
+
let topRelations: Record<string, Record<string, unknown>>;
|
|
499
|
+
|
|
500
|
+
if (format === 'new') {
|
|
501
|
+
const result = enrichNewFormatModels(modelsObj);
|
|
502
|
+
normalizedModels = result.enrichedModels;
|
|
503
|
+
topRelations = {
|
|
504
|
+
...((contractObj['relations'] ?? {}) as Record<string, Record<string, unknown>>),
|
|
505
|
+
...result.topRelations,
|
|
506
|
+
};
|
|
507
|
+
roots = (contractObj['roots'] as Record<string, string>) ?? {};
|
|
508
|
+
} else {
|
|
509
|
+
const rawStorageObj =
|
|
510
|
+
normalizedStorage && typeof normalizedStorage === 'object'
|
|
511
|
+
? (normalizedStorage as Record<string, unknown>)
|
|
512
|
+
: {};
|
|
513
|
+
const storageObj = {
|
|
514
|
+
tables: ((rawStorageObj as Record<string, unknown>)['tables'] ?? {}) as Record<
|
|
515
|
+
string,
|
|
516
|
+
Record<string, unknown>
|
|
517
|
+
>,
|
|
518
|
+
};
|
|
519
|
+
const existingRelations = (contractObj['relations'] ?? {}) as Record<
|
|
520
|
+
string,
|
|
521
|
+
Record<string, RawRelation>
|
|
522
|
+
>;
|
|
523
|
+
const result = enrichOldFormatModels(modelsObj, storageObj, existingRelations);
|
|
524
|
+
normalizedModels = result.enrichedModels;
|
|
525
|
+
roots = result.roots;
|
|
526
|
+
topRelations = existingRelations;
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
return {
|
|
530
|
+
...contractObj,
|
|
531
|
+
roots,
|
|
532
|
+
models: normalizedModels,
|
|
533
|
+
relations: topRelations,
|
|
534
|
+
storage: normalizedStorage,
|
|
535
|
+
extensionPacks: contractObj['extensionPacks'] ?? {},
|
|
536
|
+
capabilities: contractObj['capabilities'] ?? {},
|
|
537
|
+
meta: contractObj['meta'] ?? {},
|
|
538
|
+
sources: contractObj['sources'] ?? {},
|
|
539
|
+
} as SqlContract<SqlStorage>;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
export function validateContract<TContract extends SqlContract<SqlStorage>>(
|
|
543
|
+
value: unknown,
|
|
544
|
+
): TContract {
|
|
545
|
+
const normalized = normalizeContract(value);
|
|
546
|
+
|
|
547
|
+
const structurallyValid = validateSqlContract<SqlContract<SqlStorage>>(normalized);
|
|
548
|
+
|
|
549
|
+
validateContractDomain(extractDomainShape(structurallyValid));
|
|
550
|
+
|
|
551
|
+
validateContractLogic(structurallyValid);
|
|
552
|
+
|
|
553
|
+
const semanticErrors = validateStorageSemantics(structurallyValid.storage);
|
|
554
|
+
if (semanticErrors.length > 0) {
|
|
555
|
+
throw new Error(`Contract semantic validation failed: ${semanticErrors.join('; ')}`);
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
const constructed = constructContract<TContract>(structurallyValid);
|
|
559
|
+
return decodeContractDefaults(constructed) as TContract;
|
|
560
|
+
}
|