@prisma-next/target-postgres 0.3.0-dev.4 → 0.3.0-dev.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -1
- package/dist/control.d.mts +16 -0
- package/dist/control.d.mts.map +1 -0
- package/dist/control.mjs +2494 -0
- package/dist/control.mjs.map +1 -0
- package/dist/descriptor-meta-DxB8oZzB.mjs +13 -0
- package/dist/descriptor-meta-DxB8oZzB.mjs.map +1 -0
- package/dist/pack.d.mts +7 -0
- package/dist/pack.d.mts.map +1 -0
- package/dist/pack.mjs +9 -0
- package/dist/pack.mjs.map +1 -0
- package/dist/runtime.d.mts +9 -0
- package/dist/runtime.d.mts.map +1 -0
- package/dist/runtime.mjs +21 -0
- package/dist/runtime.mjs.map +1 -0
- package/package.json +34 -33
- package/src/core/descriptor-meta.ts +8 -0
- package/src/core/migrations/planner.ts +1090 -0
- package/src/core/migrations/runner.ts +603 -0
- package/src/core/migrations/statement-builders.ts +144 -0
- package/src/core/types.ts +5 -0
- package/src/exports/control.ts +54 -0
- package/src/exports/pack.ts +6 -0
- package/src/exports/runtime.ts +24 -0
- package/dist/exports/chunk-2EJEPMD3.js +0 -14
- package/dist/exports/chunk-2EJEPMD3.js.map +0 -1
- package/dist/exports/control.d.ts +0 -16
- package/dist/exports/control.js +0 -1255
- package/dist/exports/control.js.map +0 -1
- package/dist/exports/pack.d.ts +0 -5
- package/dist/exports/pack.js +0 -11
- package/dist/exports/pack.js.map +0 -1
- package/dist/exports/runtime.d.ts +0 -13
- package/dist/exports/runtime.js +0 -19
- package/dist/exports/runtime.js.map +0 -1
|
@@ -0,0 +1,1090 @@
|
|
|
1
|
+
import {
|
|
2
|
+
escapeLiteral,
|
|
3
|
+
expandParameterizedNativeType,
|
|
4
|
+
normalizeSchemaNativeType,
|
|
5
|
+
parsePostgresDefault,
|
|
6
|
+
quoteIdentifier,
|
|
7
|
+
} from '@prisma-next/adapter-postgres/control';
|
|
8
|
+
import type { SchemaIssue } from '@prisma-next/core-control-plane/types';
|
|
9
|
+
import type {
|
|
10
|
+
CodecControlHooks,
|
|
11
|
+
MigrationOperationPolicy,
|
|
12
|
+
SqlMigrationPlanner,
|
|
13
|
+
SqlMigrationPlannerPlanOptions,
|
|
14
|
+
SqlMigrationPlanOperation,
|
|
15
|
+
SqlPlannerConflict,
|
|
16
|
+
} from '@prisma-next/family-sql/control';
|
|
17
|
+
import {
|
|
18
|
+
createMigrationPlan,
|
|
19
|
+
extractCodecControlHooks,
|
|
20
|
+
plannerFailure,
|
|
21
|
+
plannerSuccess,
|
|
22
|
+
} from '@prisma-next/family-sql/control';
|
|
23
|
+
import {
|
|
24
|
+
arraysEqual,
|
|
25
|
+
isIndexSatisfied,
|
|
26
|
+
isUniqueConstraintSatisfied,
|
|
27
|
+
verifySqlSchema,
|
|
28
|
+
} from '@prisma-next/family-sql/schema-verify';
|
|
29
|
+
import type {
|
|
30
|
+
ForeignKey,
|
|
31
|
+
SqlContract,
|
|
32
|
+
SqlStorage,
|
|
33
|
+
StorageColumn,
|
|
34
|
+
StorageTable,
|
|
35
|
+
} from '@prisma-next/sql-contract/types';
|
|
36
|
+
import type { SqlSchemaIR } from '@prisma-next/sql-schema-ir/types';
|
|
37
|
+
import { ifDefined } from '@prisma-next/utils/defined';
|
|
38
|
+
import type { PostgresColumnDefault } from '../types';
|
|
39
|
+
|
|
40
|
+
type OperationClass = 'extension' | 'type' | 'table' | 'unique' | 'index' | 'foreignKey';
|
|
41
|
+
|
|
42
|
+
type PlannerFrameworkComponents = SqlMigrationPlannerPlanOptions extends {
|
|
43
|
+
readonly frameworkComponents: infer T;
|
|
44
|
+
}
|
|
45
|
+
? T
|
|
46
|
+
: ReadonlyArray<unknown>;
|
|
47
|
+
|
|
48
|
+
type PlannerOptionsWithComponents = SqlMigrationPlannerPlanOptions & {
|
|
49
|
+
readonly frameworkComponents: PlannerFrameworkComponents;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
type VerifySqlSchemaOptionsWithComponents = Parameters<typeof verifySqlSchema>[0] & {
|
|
53
|
+
readonly frameworkComponents: PlannerFrameworkComponents;
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
type PlannerDatabaseDependency = {
|
|
57
|
+
readonly id: string;
|
|
58
|
+
readonly label: string;
|
|
59
|
+
readonly install: readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[];
|
|
60
|
+
readonly verifyDatabaseDependencyInstalled: (schema: SqlSchemaIR) => readonly SchemaIssue[];
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
export interface PostgresPlanTargetDetails {
|
|
64
|
+
readonly schema: string;
|
|
65
|
+
readonly objectType: OperationClass;
|
|
66
|
+
readonly name: string;
|
|
67
|
+
readonly table?: string;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
interface PlannerConfig {
|
|
71
|
+
readonly defaultSchema: string;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const DEFAULT_PLANNER_CONFIG: PlannerConfig = {
|
|
75
|
+
defaultSchema: 'public',
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
export function createPostgresMigrationPlanner(
|
|
79
|
+
config: Partial<PlannerConfig> = {},
|
|
80
|
+
): SqlMigrationPlanner<PostgresPlanTargetDetails> {
|
|
81
|
+
return new PostgresMigrationPlanner({
|
|
82
|
+
...DEFAULT_PLANNER_CONFIG,
|
|
83
|
+
...config,
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTargetDetails> {
|
|
88
|
+
constructor(private readonly config: PlannerConfig) {}
|
|
89
|
+
|
|
90
|
+
plan(options: SqlMigrationPlannerPlanOptions) {
|
|
91
|
+
const schemaName = options.schemaName ?? this.config.defaultSchema;
|
|
92
|
+
const policyResult = this.ensureAdditivePolicy(options.policy);
|
|
93
|
+
if (policyResult) {
|
|
94
|
+
return policyResult;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const classification = this.classifySchema(options);
|
|
98
|
+
if (classification.kind === 'conflict') {
|
|
99
|
+
return plannerFailure(classification.conflicts);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Extract codec control hooks once at entry point for reuse across all operations.
|
|
103
|
+
// This avoids repeated iteration over frameworkComponents for each method that needs hooks.
|
|
104
|
+
const codecHooks = extractCodecControlHooks(options.frameworkComponents);
|
|
105
|
+
|
|
106
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
107
|
+
|
|
108
|
+
const storageTypePlan = this.buildStorageTypeOperations(options, schemaName, codecHooks);
|
|
109
|
+
if (storageTypePlan.conflicts.length > 0) {
|
|
110
|
+
return plannerFailure(storageTypePlan.conflicts);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Build extension operations from component-owned database dependencies
|
|
114
|
+
operations.push(
|
|
115
|
+
...this.buildDatabaseDependencyOperations(options),
|
|
116
|
+
...storageTypePlan.operations,
|
|
117
|
+
...this.buildTableOperations(options.contract.storage.tables, options.schema, schemaName),
|
|
118
|
+
...this.buildColumnOperations(options.contract.storage.tables, options.schema, schemaName),
|
|
119
|
+
...this.buildPrimaryKeyOperations(
|
|
120
|
+
options.contract.storage.tables,
|
|
121
|
+
options.schema,
|
|
122
|
+
schemaName,
|
|
123
|
+
),
|
|
124
|
+
...this.buildUniqueOperations(options.contract.storage.tables, options.schema, schemaName),
|
|
125
|
+
...this.buildIndexOperations(options.contract.storage.tables, options.schema, schemaName),
|
|
126
|
+
...this.buildFkBackingIndexOperations(
|
|
127
|
+
options.contract.storage.tables,
|
|
128
|
+
options.schema,
|
|
129
|
+
schemaName,
|
|
130
|
+
),
|
|
131
|
+
...this.buildForeignKeyOperations(
|
|
132
|
+
options.contract.storage.tables,
|
|
133
|
+
options.schema,
|
|
134
|
+
schemaName,
|
|
135
|
+
),
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
const plan = createMigrationPlan<PostgresPlanTargetDetails>({
|
|
139
|
+
targetId: 'postgres',
|
|
140
|
+
origin: null,
|
|
141
|
+
destination: {
|
|
142
|
+
storageHash: options.contract.storageHash,
|
|
143
|
+
...ifDefined('profileHash', options.contract.profileHash),
|
|
144
|
+
},
|
|
145
|
+
operations,
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
return plannerSuccess(plan);
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
private ensureAdditivePolicy(policy: MigrationOperationPolicy) {
|
|
152
|
+
if (!policy.allowedOperationClasses.includes('additive')) {
|
|
153
|
+
return plannerFailure([
|
|
154
|
+
{
|
|
155
|
+
kind: 'unsupportedOperation',
|
|
156
|
+
summary: 'Init planner requires additive operations be allowed',
|
|
157
|
+
why: 'The init planner only emits additive operations. Update the policy to include "additive".',
|
|
158
|
+
},
|
|
159
|
+
]);
|
|
160
|
+
}
|
|
161
|
+
return null;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Builds migration operations from component-owned database dependencies.
|
|
166
|
+
* These operations install database-side persistence structures declared by components.
|
|
167
|
+
*/
|
|
168
|
+
private buildDatabaseDependencyOperations(
|
|
169
|
+
options: PlannerOptionsWithComponents,
|
|
170
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
171
|
+
const dependencies = this.collectDependencies(options);
|
|
172
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
173
|
+
const seenDependencyIds = new Set<string>();
|
|
174
|
+
const seenOperationIds = new Set<string>();
|
|
175
|
+
|
|
176
|
+
for (const dependency of dependencies) {
|
|
177
|
+
if (seenDependencyIds.has(dependency.id)) {
|
|
178
|
+
continue;
|
|
179
|
+
}
|
|
180
|
+
seenDependencyIds.add(dependency.id);
|
|
181
|
+
|
|
182
|
+
const issues = dependency.verifyDatabaseDependencyInstalled(options.schema);
|
|
183
|
+
if (issues.length === 0) {
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
for (const installOp of dependency.install) {
|
|
188
|
+
if (seenOperationIds.has(installOp.id)) {
|
|
189
|
+
continue;
|
|
190
|
+
}
|
|
191
|
+
seenOperationIds.add(installOp.id);
|
|
192
|
+
// SQL family components are expected to provide compatible target details. This would be better if
|
|
193
|
+
// the type system could enforce it but it's not likely to occur in practice.
|
|
194
|
+
operations.push(installOp as SqlMigrationPlanOperation<PostgresPlanTargetDetails>);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return operations;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
private buildStorageTypeOperations(
|
|
202
|
+
options: PlannerOptionsWithComponents,
|
|
203
|
+
schemaName: string,
|
|
204
|
+
codecHooks: Map<string, CodecControlHooks>,
|
|
205
|
+
): {
|
|
206
|
+
readonly operations: readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[];
|
|
207
|
+
readonly conflicts: readonly SqlPlannerConflict[];
|
|
208
|
+
} {
|
|
209
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
210
|
+
const conflicts: SqlPlannerConflict[] = [];
|
|
211
|
+
const storageTypes = options.contract.storage.types ?? {};
|
|
212
|
+
|
|
213
|
+
for (const [typeName, typeInstance] of sortedEntries(storageTypes)) {
|
|
214
|
+
const hook = codecHooks.get(typeInstance.codecId);
|
|
215
|
+
const planResult = hook?.planTypeOperations?.({
|
|
216
|
+
typeName,
|
|
217
|
+
typeInstance,
|
|
218
|
+
contract: options.contract,
|
|
219
|
+
schema: options.schema,
|
|
220
|
+
schemaName,
|
|
221
|
+
policy: options.policy,
|
|
222
|
+
});
|
|
223
|
+
if (!planResult) {
|
|
224
|
+
continue;
|
|
225
|
+
}
|
|
226
|
+
for (const operation of planResult.operations) {
|
|
227
|
+
if (!options.policy.allowedOperationClasses.includes(operation.operationClass)) {
|
|
228
|
+
conflicts.push({
|
|
229
|
+
kind: 'missingButNonAdditive',
|
|
230
|
+
summary: `Storage type "${typeName}" requires "${operation.operationClass}" operation "${operation.id}"`,
|
|
231
|
+
location: {
|
|
232
|
+
type: typeName,
|
|
233
|
+
},
|
|
234
|
+
});
|
|
235
|
+
continue;
|
|
236
|
+
}
|
|
237
|
+
operations.push({
|
|
238
|
+
...operation,
|
|
239
|
+
target: {
|
|
240
|
+
id: operation.target.id,
|
|
241
|
+
details: this.buildTargetDetails('type', typeName, schemaName),
|
|
242
|
+
},
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return { operations, conflicts };
|
|
248
|
+
}
|
|
249
|
+
private collectDependencies(
|
|
250
|
+
options: PlannerOptionsWithComponents,
|
|
251
|
+
): ReadonlyArray<PlannerDatabaseDependency> {
|
|
252
|
+
const components = options.frameworkComponents;
|
|
253
|
+
if (components.length === 0) {
|
|
254
|
+
return [];
|
|
255
|
+
}
|
|
256
|
+
const deps: PlannerDatabaseDependency[] = [];
|
|
257
|
+
for (const component of components) {
|
|
258
|
+
if (!isSqlDependencyProvider(component)) {
|
|
259
|
+
continue;
|
|
260
|
+
}
|
|
261
|
+
const initDeps = component.databaseDependencies?.init;
|
|
262
|
+
if (initDeps && initDeps.length > 0) {
|
|
263
|
+
deps.push(...initDeps);
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
return sortDependencies(deps);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
private buildTableOperations(
|
|
270
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
271
|
+
schema: SqlSchemaIR,
|
|
272
|
+
schemaName: string,
|
|
273
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
274
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
275
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
276
|
+
if (schema.tables[tableName]) {
|
|
277
|
+
continue;
|
|
278
|
+
}
|
|
279
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
280
|
+
operations.push({
|
|
281
|
+
id: `table.${tableName}`,
|
|
282
|
+
label: `Create table ${tableName}`,
|
|
283
|
+
summary: `Creates table ${tableName} with required columns`,
|
|
284
|
+
operationClass: 'additive',
|
|
285
|
+
target: {
|
|
286
|
+
id: 'postgres',
|
|
287
|
+
details: this.buildTargetDetails('table', tableName, schemaName),
|
|
288
|
+
},
|
|
289
|
+
precheck: [
|
|
290
|
+
{
|
|
291
|
+
description: `ensure table "${tableName}" does not exist`,
|
|
292
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`,
|
|
293
|
+
},
|
|
294
|
+
],
|
|
295
|
+
execute: [
|
|
296
|
+
{
|
|
297
|
+
description: `create table "${tableName}"`,
|
|
298
|
+
sql: buildCreateTableSql(qualified, table),
|
|
299
|
+
},
|
|
300
|
+
],
|
|
301
|
+
postcheck: [
|
|
302
|
+
{
|
|
303
|
+
description: `verify table "${tableName}" exists`,
|
|
304
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`,
|
|
305
|
+
},
|
|
306
|
+
],
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
return operations;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
private buildColumnOperations(
|
|
313
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
314
|
+
schema: SqlSchemaIR,
|
|
315
|
+
schemaName: string,
|
|
316
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
317
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
318
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
319
|
+
const schemaTable = schema.tables[tableName];
|
|
320
|
+
if (!schemaTable) {
|
|
321
|
+
continue;
|
|
322
|
+
}
|
|
323
|
+
for (const [columnName, column] of sortedEntries(table.columns)) {
|
|
324
|
+
if (schemaTable.columns[columnName]) {
|
|
325
|
+
continue;
|
|
326
|
+
}
|
|
327
|
+
operations.push(this.buildAddColumnOperation(schemaName, tableName, columnName, column));
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return operations;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
private buildAddColumnOperation(
|
|
334
|
+
schema: string,
|
|
335
|
+
tableName: string,
|
|
336
|
+
columnName: string,
|
|
337
|
+
column: StorageColumn,
|
|
338
|
+
): SqlMigrationPlanOperation<PostgresPlanTargetDetails> {
|
|
339
|
+
const qualified = qualifyTableName(schema, tableName);
|
|
340
|
+
const notNull = column.nullable === false;
|
|
341
|
+
const hasDefault = column.default !== undefined;
|
|
342
|
+
// Only require empty table for NOT NULL columns WITHOUT defaults.
|
|
343
|
+
// PostgreSQL allows adding NOT NULL columns with defaults to non-empty tables
|
|
344
|
+
// because the default value is applied to existing rows.
|
|
345
|
+
const requiresEmptyTable = notNull && !hasDefault;
|
|
346
|
+
const precheck = [
|
|
347
|
+
{
|
|
348
|
+
description: `ensure column "${columnName}" is missing`,
|
|
349
|
+
sql: columnExistsCheck({ schema, table: tableName, column: columnName, exists: false }),
|
|
350
|
+
},
|
|
351
|
+
...(requiresEmptyTable
|
|
352
|
+
? [
|
|
353
|
+
{
|
|
354
|
+
description: `ensure table "${tableName}" is empty before adding NOT NULL column without default`,
|
|
355
|
+
sql: tableIsEmptyCheck(qualified),
|
|
356
|
+
},
|
|
357
|
+
]
|
|
358
|
+
: []),
|
|
359
|
+
];
|
|
360
|
+
const execute = [
|
|
361
|
+
{
|
|
362
|
+
description: `add column "${columnName}"`,
|
|
363
|
+
sql: buildAddColumnSql(qualified, columnName, column),
|
|
364
|
+
},
|
|
365
|
+
];
|
|
366
|
+
const postcheck = [
|
|
367
|
+
{
|
|
368
|
+
description: `verify column "${columnName}" exists`,
|
|
369
|
+
sql: columnExistsCheck({ schema, table: tableName, column: columnName }),
|
|
370
|
+
},
|
|
371
|
+
...(notNull
|
|
372
|
+
? [
|
|
373
|
+
{
|
|
374
|
+
description: `verify column "${columnName}" is NOT NULL`,
|
|
375
|
+
sql: columnIsNotNullCheck({ schema, table: tableName, column: columnName }),
|
|
376
|
+
},
|
|
377
|
+
]
|
|
378
|
+
: []),
|
|
379
|
+
];
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
id: `column.${tableName}.${columnName}`,
|
|
383
|
+
label: `Add column ${columnName} to ${tableName}`,
|
|
384
|
+
summary: `Adds column ${columnName} to table ${tableName}`,
|
|
385
|
+
operationClass: 'additive',
|
|
386
|
+
target: {
|
|
387
|
+
id: 'postgres',
|
|
388
|
+
details: this.buildTargetDetails('table', tableName, schema),
|
|
389
|
+
},
|
|
390
|
+
precheck,
|
|
391
|
+
execute,
|
|
392
|
+
postcheck,
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
private buildPrimaryKeyOperations(
|
|
397
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
398
|
+
schema: SqlSchemaIR,
|
|
399
|
+
schemaName: string,
|
|
400
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
401
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
402
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
403
|
+
if (!table.primaryKey) {
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
const schemaTable = schema.tables[tableName];
|
|
407
|
+
if (!schemaTable || schemaTable.primaryKey) {
|
|
408
|
+
continue;
|
|
409
|
+
}
|
|
410
|
+
const constraintName = table.primaryKey.name ?? `${tableName}_pkey`;
|
|
411
|
+
operations.push({
|
|
412
|
+
id: `primaryKey.${tableName}.${constraintName}`,
|
|
413
|
+
label: `Add primary key ${constraintName} on ${tableName}`,
|
|
414
|
+
summary: `Adds primary key ${constraintName} on ${tableName}`,
|
|
415
|
+
operationClass: 'additive',
|
|
416
|
+
target: {
|
|
417
|
+
id: 'postgres',
|
|
418
|
+
details: this.buildTargetDetails('table', tableName, schemaName),
|
|
419
|
+
},
|
|
420
|
+
precheck: [
|
|
421
|
+
{
|
|
422
|
+
description: `ensure primary key does not exist on "${tableName}"`,
|
|
423
|
+
sql: tableHasPrimaryKeyCheck(schemaName, tableName, false),
|
|
424
|
+
},
|
|
425
|
+
],
|
|
426
|
+
execute: [
|
|
427
|
+
{
|
|
428
|
+
description: `add primary key "${constraintName}"`,
|
|
429
|
+
sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
|
|
430
|
+
ADD CONSTRAINT ${quoteIdentifier(constraintName)}
|
|
431
|
+
PRIMARY KEY (${table.primaryKey.columns.map(quoteIdentifier).join(', ')})`,
|
|
432
|
+
},
|
|
433
|
+
],
|
|
434
|
+
postcheck: [
|
|
435
|
+
{
|
|
436
|
+
description: `verify primary key "${constraintName}" exists`,
|
|
437
|
+
sql: tableHasPrimaryKeyCheck(schemaName, tableName, true, constraintName),
|
|
438
|
+
},
|
|
439
|
+
],
|
|
440
|
+
});
|
|
441
|
+
}
|
|
442
|
+
return operations;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
private buildUniqueOperations(
|
|
446
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
447
|
+
schema: SqlSchemaIR,
|
|
448
|
+
schemaName: string,
|
|
449
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
450
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
451
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
452
|
+
const schemaTable = schema.tables[tableName];
|
|
453
|
+
for (const unique of table.uniques) {
|
|
454
|
+
if (schemaTable && hasUniqueConstraint(schemaTable, unique.columns)) {
|
|
455
|
+
continue;
|
|
456
|
+
}
|
|
457
|
+
const constraintName = unique.name ?? `${tableName}_${unique.columns.join('_')}_key`;
|
|
458
|
+
operations.push({
|
|
459
|
+
id: `unique.${tableName}.${constraintName}`,
|
|
460
|
+
label: `Add unique constraint ${constraintName} on ${tableName}`,
|
|
461
|
+
summary: `Adds unique constraint ${constraintName} on ${tableName}`,
|
|
462
|
+
operationClass: 'additive',
|
|
463
|
+
target: {
|
|
464
|
+
id: 'postgres',
|
|
465
|
+
details: this.buildTargetDetails('unique', constraintName, schemaName, tableName),
|
|
466
|
+
},
|
|
467
|
+
precheck: [
|
|
468
|
+
{
|
|
469
|
+
description: `ensure unique constraint "${constraintName}" is missing`,
|
|
470
|
+
sql: constraintExistsCheck({ constraintName, schema: schemaName, exists: false }),
|
|
471
|
+
},
|
|
472
|
+
],
|
|
473
|
+
execute: [
|
|
474
|
+
{
|
|
475
|
+
description: `add unique constraint "${constraintName}"`,
|
|
476
|
+
sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
|
|
477
|
+
ADD CONSTRAINT ${quoteIdentifier(constraintName)}
|
|
478
|
+
UNIQUE (${unique.columns.map(quoteIdentifier).join(', ')})`,
|
|
479
|
+
},
|
|
480
|
+
],
|
|
481
|
+
postcheck: [
|
|
482
|
+
{
|
|
483
|
+
description: `verify unique constraint "${constraintName}" exists`,
|
|
484
|
+
sql: constraintExistsCheck({ constraintName, schema: schemaName }),
|
|
485
|
+
},
|
|
486
|
+
],
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
return operations;
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
private buildIndexOperations(
|
|
494
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
495
|
+
schema: SqlSchemaIR,
|
|
496
|
+
schemaName: string,
|
|
497
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
498
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
499
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
500
|
+
const schemaTable = schema.tables[tableName];
|
|
501
|
+
for (const index of table.indexes) {
|
|
502
|
+
if (schemaTable && hasIndex(schemaTable, index.columns)) {
|
|
503
|
+
continue;
|
|
504
|
+
}
|
|
505
|
+
const indexName = index.name ?? `${tableName}_${index.columns.join('_')}_idx`;
|
|
506
|
+
operations.push({
|
|
507
|
+
id: `index.${tableName}.${indexName}`,
|
|
508
|
+
label: `Create index ${indexName} on ${tableName}`,
|
|
509
|
+
summary: `Creates index ${indexName} on ${tableName}`,
|
|
510
|
+
operationClass: 'additive',
|
|
511
|
+
target: {
|
|
512
|
+
id: 'postgres',
|
|
513
|
+
details: this.buildTargetDetails('index', indexName, schemaName, tableName),
|
|
514
|
+
},
|
|
515
|
+
precheck: [
|
|
516
|
+
{
|
|
517
|
+
description: `ensure index "${indexName}" is missing`,
|
|
518
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`,
|
|
519
|
+
},
|
|
520
|
+
],
|
|
521
|
+
execute: [
|
|
522
|
+
{
|
|
523
|
+
description: `create index "${indexName}"`,
|
|
524
|
+
sql: `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualifyTableName(
|
|
525
|
+
schemaName,
|
|
526
|
+
tableName,
|
|
527
|
+
)} (${index.columns.map(quoteIdentifier).join(', ')})`,
|
|
528
|
+
},
|
|
529
|
+
],
|
|
530
|
+
postcheck: [
|
|
531
|
+
{
|
|
532
|
+
description: `verify index "${indexName}" exists`,
|
|
533
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`,
|
|
534
|
+
},
|
|
535
|
+
],
|
|
536
|
+
});
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
return operations;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
/**
|
|
543
|
+
* Generates FK-backing index operations for FKs with `index: true`,
|
|
544
|
+
* but only when no matching user-declared index exists in `contractTable.indexes`.
|
|
545
|
+
*/
|
|
546
|
+
private buildFkBackingIndexOperations(
|
|
547
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
548
|
+
schema: SqlSchemaIR,
|
|
549
|
+
schemaName: string,
|
|
550
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
551
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
552
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
553
|
+
const schemaTable = schema.tables[tableName];
|
|
554
|
+
// Collect column sets of user-declared indexes to avoid duplicates
|
|
555
|
+
const declaredIndexColumns = new Set(table.indexes.map((idx) => idx.columns.join(',')));
|
|
556
|
+
|
|
557
|
+
for (const fk of table.foreignKeys) {
|
|
558
|
+
if (fk.index === false) continue;
|
|
559
|
+
// Skip if user already declared an index with these columns
|
|
560
|
+
if (declaredIndexColumns.has(fk.columns.join(','))) continue;
|
|
561
|
+
// Skip if the index already exists in the database
|
|
562
|
+
if (schemaTable && hasIndex(schemaTable, fk.columns)) continue;
|
|
563
|
+
|
|
564
|
+
const indexName = `${tableName}_${fk.columns.join('_')}_idx`;
|
|
565
|
+
operations.push({
|
|
566
|
+
id: `index.${tableName}.${indexName}`,
|
|
567
|
+
label: `Create FK-backing index ${indexName} on ${tableName}`,
|
|
568
|
+
summary: `Creates FK-backing index ${indexName} on ${tableName}`,
|
|
569
|
+
operationClass: 'additive',
|
|
570
|
+
target: {
|
|
571
|
+
id: 'postgres',
|
|
572
|
+
details: this.buildTargetDetails('index', indexName, schemaName, tableName),
|
|
573
|
+
},
|
|
574
|
+
precheck: [
|
|
575
|
+
{
|
|
576
|
+
description: `ensure index "${indexName}" is missing`,
|
|
577
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`,
|
|
578
|
+
},
|
|
579
|
+
],
|
|
580
|
+
execute: [
|
|
581
|
+
{
|
|
582
|
+
description: `create FK-backing index "${indexName}"`,
|
|
583
|
+
sql: `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualifyTableName(
|
|
584
|
+
schemaName,
|
|
585
|
+
tableName,
|
|
586
|
+
)} (${fk.columns.map(quoteIdentifier).join(', ')})`,
|
|
587
|
+
},
|
|
588
|
+
],
|
|
589
|
+
postcheck: [
|
|
590
|
+
{
|
|
591
|
+
description: `verify index "${indexName}" exists`,
|
|
592
|
+
sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`,
|
|
593
|
+
},
|
|
594
|
+
],
|
|
595
|
+
});
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
return operations;
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
private buildForeignKeyOperations(
|
|
602
|
+
tables: SqlContract<SqlStorage>['storage']['tables'],
|
|
603
|
+
schema: SqlSchemaIR,
|
|
604
|
+
schemaName: string,
|
|
605
|
+
): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
|
|
606
|
+
const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
|
|
607
|
+
for (const [tableName, table] of sortedEntries(tables)) {
|
|
608
|
+
const schemaTable = schema.tables[tableName];
|
|
609
|
+
for (const foreignKey of table.foreignKeys) {
|
|
610
|
+
if (foreignKey.constraint === false) continue;
|
|
611
|
+
if (schemaTable && hasForeignKey(schemaTable, foreignKey)) {
|
|
612
|
+
continue;
|
|
613
|
+
}
|
|
614
|
+
const fkName = foreignKey.name ?? `${tableName}_${foreignKey.columns.join('_')}_fkey`;
|
|
615
|
+
operations.push({
|
|
616
|
+
id: `foreignKey.${tableName}.${fkName}`,
|
|
617
|
+
label: `Add foreign key ${fkName} on ${tableName}`,
|
|
618
|
+
summary: `Adds foreign key ${fkName} referencing ${foreignKey.references.table}`,
|
|
619
|
+
operationClass: 'additive',
|
|
620
|
+
target: {
|
|
621
|
+
id: 'postgres',
|
|
622
|
+
details: this.buildTargetDetails('foreignKey', fkName, schemaName, tableName),
|
|
623
|
+
},
|
|
624
|
+
precheck: [
|
|
625
|
+
{
|
|
626
|
+
description: `ensure foreign key "${fkName}" is missing`,
|
|
627
|
+
sql: constraintExistsCheck({
|
|
628
|
+
constraintName: fkName,
|
|
629
|
+
schema: schemaName,
|
|
630
|
+
exists: false,
|
|
631
|
+
}),
|
|
632
|
+
},
|
|
633
|
+
],
|
|
634
|
+
execute: [
|
|
635
|
+
{
|
|
636
|
+
description: `add foreign key "${fkName}"`,
|
|
637
|
+
sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
|
|
638
|
+
ADD CONSTRAINT ${quoteIdentifier(fkName)}
|
|
639
|
+
FOREIGN KEY (${foreignKey.columns.map(quoteIdentifier).join(', ')})
|
|
640
|
+
REFERENCES ${qualifyTableName(schemaName, foreignKey.references.table)} (${foreignKey.references.columns
|
|
641
|
+
.map(quoteIdentifier)
|
|
642
|
+
.join(', ')})`,
|
|
643
|
+
},
|
|
644
|
+
],
|
|
645
|
+
postcheck: [
|
|
646
|
+
{
|
|
647
|
+
description: `verify foreign key "${fkName}" exists`,
|
|
648
|
+
sql: constraintExistsCheck({ constraintName: fkName, schema: schemaName }),
|
|
649
|
+
},
|
|
650
|
+
],
|
|
651
|
+
});
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
return operations;
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
private buildTargetDetails(
|
|
658
|
+
objectType: OperationClass,
|
|
659
|
+
name: string,
|
|
660
|
+
schema: string,
|
|
661
|
+
table?: string,
|
|
662
|
+
): PostgresPlanTargetDetails {
|
|
663
|
+
return {
|
|
664
|
+
schema,
|
|
665
|
+
objectType,
|
|
666
|
+
name,
|
|
667
|
+
...ifDefined('table', table),
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
private classifySchema(options: PlannerOptionsWithComponents):
|
|
672
|
+
| { kind: 'ok' }
|
|
673
|
+
| {
|
|
674
|
+
kind: 'conflict';
|
|
675
|
+
conflicts: SqlPlannerConflict[];
|
|
676
|
+
} {
|
|
677
|
+
const verifyOptions: VerifySqlSchemaOptionsWithComponents = {
|
|
678
|
+
contract: options.contract,
|
|
679
|
+
schema: options.schema,
|
|
680
|
+
strict: false,
|
|
681
|
+
typeMetadataRegistry: new Map(),
|
|
682
|
+
frameworkComponents: options.frameworkComponents,
|
|
683
|
+
normalizeDefault: parsePostgresDefault,
|
|
684
|
+
normalizeNativeType: normalizeSchemaNativeType,
|
|
685
|
+
};
|
|
686
|
+
const verifyResult = verifySqlSchema(verifyOptions);
|
|
687
|
+
|
|
688
|
+
const conflicts = this.extractConflicts(verifyResult.schema.issues);
|
|
689
|
+
if (conflicts.length > 0) {
|
|
690
|
+
return { kind: 'conflict', conflicts };
|
|
691
|
+
}
|
|
692
|
+
return { kind: 'ok' };
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
private extractConflicts(issues: readonly SchemaIssue[]): SqlPlannerConflict[] {
|
|
696
|
+
const conflicts: SqlPlannerConflict[] = [];
|
|
697
|
+
for (const issue of issues) {
|
|
698
|
+
if (isAdditiveIssue(issue)) {
|
|
699
|
+
continue;
|
|
700
|
+
}
|
|
701
|
+
const conflict = this.convertIssueToConflict(issue);
|
|
702
|
+
if (conflict) {
|
|
703
|
+
conflicts.push(conflict);
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
return conflicts.sort(conflictComparator);
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
private convertIssueToConflict(issue: SchemaIssue): SqlPlannerConflict | null {
|
|
710
|
+
switch (issue.kind) {
|
|
711
|
+
case 'type_mismatch':
|
|
712
|
+
return this.buildConflict('typeMismatch', issue);
|
|
713
|
+
case 'nullability_mismatch':
|
|
714
|
+
return this.buildConflict('nullabilityConflict', issue);
|
|
715
|
+
case 'primary_key_mismatch':
|
|
716
|
+
return this.buildConflict('indexIncompatible', issue);
|
|
717
|
+
case 'unique_constraint_mismatch':
|
|
718
|
+
return this.buildConflict('indexIncompatible', issue);
|
|
719
|
+
case 'index_mismatch':
|
|
720
|
+
return this.buildConflict('indexIncompatible', issue);
|
|
721
|
+
case 'foreign_key_mismatch':
|
|
722
|
+
return this.buildConflict('foreignKeyConflict', issue);
|
|
723
|
+
default:
|
|
724
|
+
return null;
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
private buildConflict(kind: SqlPlannerConflict['kind'], issue: SchemaIssue): SqlPlannerConflict {
|
|
729
|
+
const location = buildConflictLocation(issue);
|
|
730
|
+
const meta =
|
|
731
|
+
issue.expected || issue.actual
|
|
732
|
+
? Object.freeze({
|
|
733
|
+
...ifDefined('expected', issue.expected),
|
|
734
|
+
...ifDefined('actual', issue.actual),
|
|
735
|
+
})
|
|
736
|
+
: undefined;
|
|
737
|
+
|
|
738
|
+
return {
|
|
739
|
+
kind,
|
|
740
|
+
summary: issue.message,
|
|
741
|
+
...ifDefined('location', location),
|
|
742
|
+
...ifDefined('meta', meta),
|
|
743
|
+
};
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
function isSqlDependencyProvider(component: unknown): component is {
|
|
748
|
+
readonly databaseDependencies?: {
|
|
749
|
+
readonly init?: readonly PlannerDatabaseDependency[];
|
|
750
|
+
};
|
|
751
|
+
} {
|
|
752
|
+
if (typeof component !== 'object' || component === null) {
|
|
753
|
+
return false;
|
|
754
|
+
}
|
|
755
|
+
const record = component as Record<string, unknown>;
|
|
756
|
+
|
|
757
|
+
// If present, enforce familyId match to avoid mixing families at runtime.
|
|
758
|
+
if (Object.hasOwn(record, 'familyId') && record['familyId'] !== 'sql') {
|
|
759
|
+
return false;
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
if (!Object.hasOwn(record, 'databaseDependencies')) {
|
|
763
|
+
return false;
|
|
764
|
+
}
|
|
765
|
+
const deps = record['databaseDependencies'];
|
|
766
|
+
return deps === undefined || (typeof deps === 'object' && deps !== null);
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
function sortDependencies(
|
|
770
|
+
dependencies: ReadonlyArray<PlannerDatabaseDependency>,
|
|
771
|
+
): ReadonlyArray<PlannerDatabaseDependency> {
|
|
772
|
+
if (dependencies.length <= 1) {
|
|
773
|
+
return dependencies;
|
|
774
|
+
}
|
|
775
|
+
return [...dependencies].sort((a, b) => a.id.localeCompare(b.id));
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
function buildCreateTableSql(qualifiedTableName: string, table: StorageTable): string {
|
|
779
|
+
const columnDefinitions = Object.entries(table.columns).map(
|
|
780
|
+
([columnName, column]: [string, StorageColumn]) => {
|
|
781
|
+
const parts = [
|
|
782
|
+
quoteIdentifier(columnName),
|
|
783
|
+
buildColumnTypeSql(column),
|
|
784
|
+
buildColumnDefaultSql(column.default),
|
|
785
|
+
column.nullable ? '' : 'NOT NULL',
|
|
786
|
+
].filter(Boolean);
|
|
787
|
+
return parts.join(' ');
|
|
788
|
+
},
|
|
789
|
+
);
|
|
790
|
+
|
|
791
|
+
const constraintDefinitions: string[] = [];
|
|
792
|
+
if (table.primaryKey) {
|
|
793
|
+
constraintDefinitions.push(
|
|
794
|
+
`PRIMARY KEY (${table.primaryKey.columns.map(quoteIdentifier).join(', ')})`,
|
|
795
|
+
);
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
const allDefinitions = [...columnDefinitions, ...constraintDefinitions];
|
|
799
|
+
return `CREATE TABLE ${qualifiedTableName} (\n ${allDefinitions.join(',\n ')}\n)`;
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
/**
|
|
803
|
+
* Builds the column type SQL, handling autoincrement as a special case.
|
|
804
|
+
* For autoincrement on int4/int8, we use SERIAL/BIGSERIAL types.
|
|
805
|
+
*/
|
|
806
|
+
function buildColumnTypeSql(column: StorageColumn): string {
|
|
807
|
+
const columnDefault = column.default;
|
|
808
|
+
|
|
809
|
+
// For autoincrement, use SERIAL/BIGSERIAL types instead of int4/int8
|
|
810
|
+
if (columnDefault?.kind === 'function' && columnDefault.expression === 'autoincrement()') {
|
|
811
|
+
if (column.nativeType === 'int4' || column.nativeType === 'integer') {
|
|
812
|
+
return 'SERIAL';
|
|
813
|
+
}
|
|
814
|
+
if (column.nativeType === 'int8' || column.nativeType === 'bigint') {
|
|
815
|
+
return 'BIGSERIAL';
|
|
816
|
+
}
|
|
817
|
+
if (column.nativeType === 'int2' || column.nativeType === 'smallint') {
|
|
818
|
+
return 'SMALLSERIAL';
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
if (column.typeRef) {
|
|
823
|
+
return quoteIdentifier(column.nativeType);
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
return renderParameterizedTypeSql(column) ?? column.nativeType;
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
/**
|
|
830
|
+
* Renders parameterized type SQL for a column, returning null if no expansion is needed.
|
|
831
|
+
*
|
|
832
|
+
* Uses the shared expandParameterizedNativeType utility from the postgres adapter.
|
|
833
|
+
* Returns null when the column has no typeParams, allowing the caller to fall back
|
|
834
|
+
* to the base nativeType.
|
|
835
|
+
*/
|
|
836
|
+
function renderParameterizedTypeSql(column: StorageColumn): string | null {
|
|
837
|
+
if (!column.typeParams) {
|
|
838
|
+
return null;
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
const expanded = expandParameterizedNativeType({
|
|
842
|
+
nativeType: column.nativeType,
|
|
843
|
+
codecId: column.codecId,
|
|
844
|
+
typeParams: column.typeParams,
|
|
845
|
+
});
|
|
846
|
+
|
|
847
|
+
// If no expansion happened (returned the same base type), return null
|
|
848
|
+
// so caller can decide whether to use nativeType directly
|
|
849
|
+
return expanded !== column.nativeType ? expanded : null;
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
/**
|
|
853
|
+
* Builds the DEFAULT clause for a column definition.
|
|
854
|
+
* Returns empty string if no default is defined.
|
|
855
|
+
*
|
|
856
|
+
* Note: autoincrement is handled specially via SERIAL types, so we skip it here.
|
|
857
|
+
*/
|
|
858
|
+
function buildColumnDefaultSql(columnDefault: PostgresColumnDefault | undefined): string {
|
|
859
|
+
if (!columnDefault) {
|
|
860
|
+
return '';
|
|
861
|
+
}
|
|
862
|
+
|
|
863
|
+
switch (columnDefault.kind) {
|
|
864
|
+
case 'literal':
|
|
865
|
+
return `DEFAULT ${columnDefault.expression}`;
|
|
866
|
+
case 'function': {
|
|
867
|
+
// autoincrement is handled by SERIAL type, no explicit DEFAULT needed
|
|
868
|
+
if (columnDefault.expression === 'autoincrement()') {
|
|
869
|
+
return '';
|
|
870
|
+
}
|
|
871
|
+
return `DEFAULT ${columnDefault.expression}`;
|
|
872
|
+
}
|
|
873
|
+
case 'sequence':
|
|
874
|
+
// Sequence names use quoteIdentifier for safe identifier handling
|
|
875
|
+
return `DEFAULT nextval(${quoteIdentifier(columnDefault.name)}::regclass)`;
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
|
|
879
|
+
function qualifyTableName(schema: string, table: string): string {
|
|
880
|
+
return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
function toRegclassLiteral(schema: string, name: string): string {
|
|
884
|
+
const regclass = `${quoteIdentifier(schema)}.${quoteIdentifier(name)}`;
|
|
885
|
+
return `'${escapeLiteral(regclass)}'`;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
function sortedEntries<V>(record: Readonly<Record<string, V>>): Array<[string, V]> {
|
|
889
|
+
return Object.entries(record).sort(([a], [b]) => a.localeCompare(b)) as Array<[string, V]>;
|
|
890
|
+
}
|
|
891
|
+
|
|
892
|
+
function constraintExistsCheck({
|
|
893
|
+
constraintName,
|
|
894
|
+
schema,
|
|
895
|
+
exists = true,
|
|
896
|
+
}: {
|
|
897
|
+
constraintName: string;
|
|
898
|
+
schema: string;
|
|
899
|
+
exists?: boolean;
|
|
900
|
+
}): string {
|
|
901
|
+
const existsClause = exists ? 'EXISTS' : 'NOT EXISTS';
|
|
902
|
+
return `SELECT ${existsClause} (
|
|
903
|
+
SELECT 1 FROM pg_constraint c
|
|
904
|
+
JOIN pg_namespace n ON c.connamespace = n.oid
|
|
905
|
+
WHERE c.conname = '${escapeLiteral(constraintName)}'
|
|
906
|
+
AND n.nspname = '${escapeLiteral(schema)}'
|
|
907
|
+
)`;
|
|
908
|
+
}
|
|
909
|
+
|
|
910
|
+
function columnExistsCheck({
|
|
911
|
+
schema,
|
|
912
|
+
table,
|
|
913
|
+
column,
|
|
914
|
+
exists = true,
|
|
915
|
+
}: {
|
|
916
|
+
schema: string;
|
|
917
|
+
table: string;
|
|
918
|
+
column: string;
|
|
919
|
+
exists?: boolean;
|
|
920
|
+
}): string {
|
|
921
|
+
const existsClause = exists ? '' : 'NOT ';
|
|
922
|
+
return `SELECT ${existsClause}EXISTS (
|
|
923
|
+
SELECT 1
|
|
924
|
+
FROM information_schema.columns
|
|
925
|
+
WHERE table_schema = '${escapeLiteral(schema)}'
|
|
926
|
+
AND table_name = '${escapeLiteral(table)}'
|
|
927
|
+
AND column_name = '${escapeLiteral(column)}'
|
|
928
|
+
)`;
|
|
929
|
+
}
|
|
930
|
+
|
|
931
|
+
function columnIsNotNullCheck({
|
|
932
|
+
schema,
|
|
933
|
+
table,
|
|
934
|
+
column,
|
|
935
|
+
}: {
|
|
936
|
+
schema: string;
|
|
937
|
+
table: string;
|
|
938
|
+
column: string;
|
|
939
|
+
}): string {
|
|
940
|
+
return `SELECT EXISTS (
|
|
941
|
+
SELECT 1
|
|
942
|
+
FROM information_schema.columns
|
|
943
|
+
WHERE table_schema = '${escapeLiteral(schema)}'
|
|
944
|
+
AND table_name = '${escapeLiteral(table)}'
|
|
945
|
+
AND column_name = '${escapeLiteral(column)}'
|
|
946
|
+
AND is_nullable = 'NO'
|
|
947
|
+
)`;
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
function tableIsEmptyCheck(qualifiedTableName: string): string {
|
|
951
|
+
return `SELECT NOT EXISTS (SELECT 1 FROM ${qualifiedTableName} LIMIT 1)`;
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
function buildAddColumnSql(
|
|
955
|
+
qualifiedTableName: string,
|
|
956
|
+
columnName: string,
|
|
957
|
+
column: StorageColumn,
|
|
958
|
+
): string {
|
|
959
|
+
const typeSql = buildColumnTypeSql(column);
|
|
960
|
+
const defaultSql = buildColumnDefaultSql(column.default);
|
|
961
|
+
const parts = [
|
|
962
|
+
`ALTER TABLE ${qualifiedTableName}`,
|
|
963
|
+
`ADD COLUMN ${quoteIdentifier(columnName)} ${typeSql}`,
|
|
964
|
+
defaultSql,
|
|
965
|
+
column.nullable ? '' : 'NOT NULL',
|
|
966
|
+
].filter(Boolean);
|
|
967
|
+
return parts.join(' ');
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
function tableHasPrimaryKeyCheck(
|
|
971
|
+
schema: string,
|
|
972
|
+
table: string,
|
|
973
|
+
exists: boolean,
|
|
974
|
+
constraintName?: string,
|
|
975
|
+
): string {
|
|
976
|
+
const comparison = exists ? '' : 'NOT ';
|
|
977
|
+
const constraintFilter = constraintName
|
|
978
|
+
? `AND c2.relname = '${escapeLiteral(constraintName)}'`
|
|
979
|
+
: '';
|
|
980
|
+
return `SELECT ${comparison}EXISTS (
|
|
981
|
+
SELECT 1
|
|
982
|
+
FROM pg_index i
|
|
983
|
+
JOIN pg_class c ON c.oid = i.indrelid
|
|
984
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
985
|
+
LEFT JOIN pg_class c2 ON c2.oid = i.indexrelid
|
|
986
|
+
WHERE n.nspname = '${escapeLiteral(schema)}'
|
|
987
|
+
AND c.relname = '${escapeLiteral(table)}'
|
|
988
|
+
AND i.indisprimary
|
|
989
|
+
${constraintFilter}
|
|
990
|
+
)`;
|
|
991
|
+
}
|
|
992
|
+
|
|
993
|
+
/**
|
|
994
|
+
* Checks if table has a unique constraint satisfied by the given columns.
|
|
995
|
+
* Uses shared semantic satisfaction predicate from verify-helpers.
|
|
996
|
+
*/
|
|
997
|
+
function hasUniqueConstraint(
|
|
998
|
+
table: SqlSchemaIR['tables'][string],
|
|
999
|
+
columns: readonly string[],
|
|
1000
|
+
): boolean {
|
|
1001
|
+
return isUniqueConstraintSatisfied(table.uniques, table.indexes, columns);
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
/**
|
|
1005
|
+
* Checks if table has an index satisfied by the given columns.
|
|
1006
|
+
* Uses shared semantic satisfaction predicate from verify-helpers.
|
|
1007
|
+
*/
|
|
1008
|
+
function hasIndex(table: SqlSchemaIR['tables'][string], columns: readonly string[]): boolean {
|
|
1009
|
+
return isIndexSatisfied(table.indexes, table.uniques, columns);
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
function hasForeignKey(table: SqlSchemaIR['tables'][string], fk: ForeignKey): boolean {
|
|
1013
|
+
return table.foreignKeys.some(
|
|
1014
|
+
(candidate) =>
|
|
1015
|
+
arraysEqual(candidate.columns, fk.columns) &&
|
|
1016
|
+
candidate.referencedTable === fk.references.table &&
|
|
1017
|
+
arraysEqual(candidate.referencedColumns, fk.references.columns),
|
|
1018
|
+
);
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
function isAdditiveIssue(issue: SchemaIssue): boolean {
|
|
1022
|
+
switch (issue.kind) {
|
|
1023
|
+
case 'type_missing':
|
|
1024
|
+
case 'type_values_mismatch':
|
|
1025
|
+
case 'missing_table':
|
|
1026
|
+
case 'missing_column':
|
|
1027
|
+
case 'extension_missing':
|
|
1028
|
+
return true;
|
|
1029
|
+
case 'primary_key_mismatch':
|
|
1030
|
+
return issue.actual === undefined;
|
|
1031
|
+
case 'unique_constraint_mismatch':
|
|
1032
|
+
case 'index_mismatch':
|
|
1033
|
+
case 'foreign_key_mismatch':
|
|
1034
|
+
return issue.indexOrConstraint === undefined;
|
|
1035
|
+
default:
|
|
1036
|
+
return false;
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
function buildConflictLocation(issue: SchemaIssue) {
|
|
1041
|
+
const location: {
|
|
1042
|
+
table?: string;
|
|
1043
|
+
column?: string;
|
|
1044
|
+
constraint?: string;
|
|
1045
|
+
} = {};
|
|
1046
|
+
if (issue.table) {
|
|
1047
|
+
location.table = issue.table;
|
|
1048
|
+
}
|
|
1049
|
+
if (issue.column) {
|
|
1050
|
+
location.column = issue.column;
|
|
1051
|
+
}
|
|
1052
|
+
if (issue.indexOrConstraint) {
|
|
1053
|
+
location.constraint = issue.indexOrConstraint;
|
|
1054
|
+
}
|
|
1055
|
+
return Object.keys(location).length > 0 ? location : undefined;
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
function conflictComparator(a: SqlPlannerConflict, b: SqlPlannerConflict): number {
|
|
1059
|
+
if (a.kind !== b.kind) {
|
|
1060
|
+
return a.kind < b.kind ? -1 : 1;
|
|
1061
|
+
}
|
|
1062
|
+
const aLocation = a.location ?? {};
|
|
1063
|
+
const bLocation = b.location ?? {};
|
|
1064
|
+
const tableCompare = compareStrings(aLocation.table, bLocation.table);
|
|
1065
|
+
if (tableCompare !== 0) {
|
|
1066
|
+
return tableCompare;
|
|
1067
|
+
}
|
|
1068
|
+
const columnCompare = compareStrings(aLocation.column, bLocation.column);
|
|
1069
|
+
if (columnCompare !== 0) {
|
|
1070
|
+
return columnCompare;
|
|
1071
|
+
}
|
|
1072
|
+
const constraintCompare = compareStrings(aLocation.constraint, bLocation.constraint);
|
|
1073
|
+
if (constraintCompare !== 0) {
|
|
1074
|
+
return constraintCompare;
|
|
1075
|
+
}
|
|
1076
|
+
return compareStrings(a.summary, b.summary);
|
|
1077
|
+
}
|
|
1078
|
+
|
|
1079
|
+
function compareStrings(a?: string, b?: string): number {
|
|
1080
|
+
if (a === b) {
|
|
1081
|
+
return 0;
|
|
1082
|
+
}
|
|
1083
|
+
if (a === undefined) {
|
|
1084
|
+
return -1;
|
|
1085
|
+
}
|
|
1086
|
+
if (b === undefined) {
|
|
1087
|
+
return 1;
|
|
1088
|
+
}
|
|
1089
|
+
return a < b ? -1 : 1;
|
|
1090
|
+
}
|