@prisma-next/target-postgres 0.4.0-dev.9 → 0.5.0-dev.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/codec-ids-CojIXVf9.mjs +29 -0
- package/dist/codec-ids-CojIXVf9.mjs.map +1 -0
- package/dist/codec-ids.d.mts +28 -0
- package/dist/codec-ids.d.mts.map +1 -0
- package/dist/codec-ids.mjs +3 -0
- package/dist/codec-types.d.mts +42 -0
- package/dist/codec-types.d.mts.map +1 -0
- package/dist/codec-types.mjs +3 -0
- package/dist/codecs-BoahtY_Q.mjs +385 -0
- package/dist/codecs-BoahtY_Q.mjs.map +1 -0
- package/dist/codecs-D-F2KJqt.d.mts +299 -0
- package/dist/codecs-D-F2KJqt.d.mts.map +1 -0
- package/dist/codecs.d.mts +2 -0
- package/dist/codecs.mjs +3 -0
- package/dist/control.d.mts +1 -9
- package/dist/control.d.mts.map +1 -1
- package/dist/control.mjs +23 -5101
- package/dist/control.mjs.map +1 -1
- package/dist/data-transform-CxFRBIUp.d.mts +32 -0
- package/dist/data-transform-CxFRBIUp.d.mts.map +1 -0
- package/dist/data-transform-VfEGzXWt.mjs +39 -0
- package/dist/data-transform-VfEGzXWt.mjs.map +1 -0
- package/dist/data-transform.d.mts +2 -0
- package/dist/data-transform.mjs +3 -0
- package/dist/default-normalizer-DNOpRoOF.mjs +131 -0
- package/dist/default-normalizer-DNOpRoOF.mjs.map +1 -0
- package/dist/default-normalizer.d.mts +19 -0
- package/dist/default-normalizer.d.mts.map +1 -0
- package/dist/default-normalizer.mjs +3 -0
- package/dist/{descriptor-meta-DkvCmY98.mjs → descriptor-meta-BVoVtyp-.mjs} +1 -1
- package/dist/{descriptor-meta-DkvCmY98.mjs.map → descriptor-meta-BVoVtyp-.mjs.map} +1 -1
- package/dist/errors-AFvEPZ1R.mjs +34 -0
- package/dist/errors-AFvEPZ1R.mjs.map +1 -0
- package/dist/errors.d.mts +27 -0
- package/dist/errors.d.mts.map +1 -0
- package/dist/errors.mjs +3 -0
- package/dist/issue-planner-CFjB0_oO.mjs +879 -0
- package/dist/issue-planner-CFjB0_oO.mjs.map +1 -0
- package/dist/issue-planner.d.mts +85 -0
- package/dist/issue-planner.d.mts.map +1 -0
- package/dist/issue-planner.mjs +3 -0
- package/dist/migration.d.mts +90 -0
- package/dist/migration.d.mts.map +1 -0
- package/dist/migration.mjs +24 -0
- package/dist/migration.mjs.map +1 -0
- package/dist/native-type-normalizer-CInai_oY.mjs +38 -0
- package/dist/native-type-normalizer-CInai_oY.mjs.map +1 -0
- package/dist/native-type-normalizer.d.mts +18 -0
- package/dist/native-type-normalizer.d.mts.map +1 -0
- package/dist/native-type-normalizer.mjs +3 -0
- package/dist/op-factory-call-BKlruaiC.mjs +605 -0
- package/dist/op-factory-call-BKlruaiC.mjs.map +1 -0
- package/dist/op-factory-call-C3bWXKSP.d.mts +304 -0
- package/dist/op-factory-call-C3bWXKSP.d.mts.map +1 -0
- package/dist/op-factory-call.d.mts +3 -0
- package/dist/op-factory-call.mjs +3 -0
- package/dist/pack.d.mts +1 -1
- package/dist/pack.mjs +1 -1
- package/dist/planner-CLUvVhUN.mjs +98 -0
- package/dist/planner-CLUvVhUN.mjs.map +1 -0
- package/dist/planner-ddl-builders-Dxvw1LHw.mjs +132 -0
- package/dist/planner-ddl-builders-Dxvw1LHw.mjs.map +1 -0
- package/dist/planner-ddl-builders.d.mts +22 -0
- package/dist/planner-ddl-builders.d.mts.map +1 -0
- package/dist/planner-ddl-builders.mjs +3 -0
- package/dist/planner-identity-values-Dju-o5GF.mjs +91 -0
- package/dist/planner-identity-values-Dju-o5GF.mjs.map +1 -0
- package/dist/planner-identity-values.d.mts +20 -0
- package/dist/planner-identity-values.d.mts.map +1 -0
- package/dist/planner-identity-values.mjs +3 -0
- package/dist/planner-produced-postgres-migration-CRRTno6Z.d.mts +20 -0
- package/dist/planner-produced-postgres-migration-CRRTno6Z.d.mts.map +1 -0
- package/dist/planner-produced-postgres-migration-DSSPq8QS.mjs +33 -0
- package/dist/planner-produced-postgres-migration-DSSPq8QS.mjs.map +1 -0
- package/dist/planner-produced-postgres-migration.d.mts +5 -0
- package/dist/planner-produced-postgres-migration.mjs +3 -0
- package/dist/planner-schema-lookup-B7lkypwn.mjs +29 -0
- package/dist/planner-schema-lookup-B7lkypwn.mjs.map +1 -0
- package/dist/planner-schema-lookup.d.mts +22 -0
- package/dist/planner-schema-lookup.d.mts.map +1 -0
- package/dist/planner-schema-lookup.mjs +3 -0
- package/dist/planner-sql-checks-7jkgm9TX.mjs +241 -0
- package/dist/planner-sql-checks-7jkgm9TX.mjs.map +1 -0
- package/dist/planner-sql-checks.d.mts +55 -0
- package/dist/planner-sql-checks.d.mts.map +1 -0
- package/dist/planner-sql-checks.mjs +3 -0
- package/dist/planner-target-details-DH-azLu-.d.mts +11 -0
- package/dist/planner-target-details-DH-azLu-.d.mts.map +1 -0
- package/dist/planner-target-details.d.mts +2 -0
- package/dist/planner-target-details.mjs +1 -0
- package/dist/planner.d.mts +68 -0
- package/dist/planner.d.mts.map +1 -0
- package/dist/planner.mjs +4 -0
- package/dist/postgres-migration-BjA3Zmts.d.mts +50 -0
- package/dist/postgres-migration-BjA3Zmts.d.mts.map +1 -0
- package/dist/postgres-migration-qtmtbONe.mjs +52 -0
- package/dist/postgres-migration-qtmtbONe.mjs.map +1 -0
- package/dist/render-ops-D6_DHdOK.mjs +8 -0
- package/dist/render-ops-D6_DHdOK.mjs.map +1 -0
- package/dist/render-ops.d.mts +11 -0
- package/dist/render-ops.d.mts.map +1 -0
- package/dist/render-ops.mjs +3 -0
- package/dist/render-typescript-1rF_SB4g.mjs +85 -0
- package/dist/render-typescript-1rF_SB4g.mjs.map +1 -0
- package/dist/render-typescript.d.mts +15 -0
- package/dist/render-typescript.d.mts.map +1 -0
- package/dist/render-typescript.mjs +3 -0
- package/dist/runtime.d.mts +15 -3
- package/dist/runtime.d.mts.map +1 -1
- package/dist/runtime.mjs +10 -1
- package/dist/runtime.mjs.map +1 -1
- package/dist/shared-Bxkt8pNO.d.mts +41 -0
- package/dist/shared-Bxkt8pNO.d.mts.map +1 -0
- package/dist/sql-utils-r-Lw535w.mjs +76 -0
- package/dist/sql-utils-r-Lw535w.mjs.map +1 -0
- package/dist/sql-utils.d.mts +59 -0
- package/dist/sql-utils.d.mts.map +1 -0
- package/dist/sql-utils.mjs +3 -0
- package/dist/statement-builders-BPnmt6wx.mjs +116 -0
- package/dist/statement-builders-BPnmt6wx.mjs.map +1 -0
- package/dist/statement-builders.d.mts +23 -0
- package/dist/statement-builders.d.mts.map +1 -0
- package/dist/statement-builders.mjs +3 -0
- package/dist/tables-BmdW_FWO.mjs +477 -0
- package/dist/tables-BmdW_FWO.mjs.map +1 -0
- package/dist/types-ClK03Ojd.d.mts +10 -0
- package/dist/types-ClK03Ojd.d.mts.map +1 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +1 -0
- package/package.json +38 -19
- package/src/core/codec-ids.ts +30 -0
- package/src/core/codecs.ts +645 -0
- package/src/core/default-normalizer.ts +131 -0
- package/src/core/descriptor-meta.ts +1 -1
- package/src/core/errors.ts +33 -0
- package/src/core/json-schema-type-expression.ts +131 -0
- package/src/core/migrations/issue-planner.ts +832 -0
- package/src/core/migrations/op-factory-call.ts +858 -0
- package/src/core/migrations/operations/columns.ts +285 -0
- package/src/core/migrations/operations/constraints.ts +191 -0
- package/src/core/migrations/operations/data-transform.ts +119 -0
- package/src/core/migrations/operations/dependencies.ts +36 -0
- package/src/core/migrations/operations/enums.ts +113 -0
- package/src/core/migrations/operations/indexes.ts +61 -0
- package/src/core/migrations/operations/raw.ts +15 -0
- package/src/core/migrations/operations/shared.ts +67 -0
- package/src/core/migrations/operations/tables.ts +63 -0
- package/src/core/migrations/planner-ddl-builders.ts +1 -1
- package/src/core/migrations/planner-produced-postgres-migration.ts +67 -0
- package/src/core/migrations/planner-recipes.ts +1 -1
- package/src/core/migrations/planner-sql-checks.ts +1 -1
- package/src/core/migrations/planner-strategies.ts +592 -151
- package/src/core/migrations/planner-target-details.ts +0 -6
- package/src/core/migrations/planner.ts +65 -785
- package/src/core/migrations/postgres-migration.ts +73 -0
- package/src/core/migrations/render-ops.ts +9 -0
- package/src/core/migrations/render-typescript.ts +105 -0
- package/src/core/migrations/runner.ts +2 -4
- package/src/core/native-type-normalizer.ts +49 -0
- package/src/core/sql-utils.ts +104 -0
- package/src/exports/codec-ids.ts +1 -0
- package/src/exports/codec-types.ts +51 -0
- package/src/exports/codecs.ts +2 -0
- package/src/exports/control.ts +9 -142
- package/src/exports/data-transform.ts +1 -0
- package/src/exports/default-normalizer.ts +1 -0
- package/src/exports/errors.ts +1 -0
- package/src/exports/issue-planner.ts +1 -0
- package/src/exports/migration.ts +46 -0
- package/src/exports/native-type-normalizer.ts +1 -0
- package/src/exports/op-factory-call.ts +25 -0
- package/src/exports/planner-ddl-builders.ts +8 -0
- package/src/exports/planner-identity-values.ts +1 -0
- package/src/exports/planner-produced-postgres-migration.ts +1 -0
- package/src/exports/planner-schema-lookup.ts +6 -0
- package/src/exports/planner-sql-checks.ts +11 -0
- package/src/exports/planner-target-details.ts +1 -0
- package/src/exports/planner.ts +1 -0
- package/src/exports/render-ops.ts +1 -0
- package/src/exports/render-typescript.ts +1 -0
- package/src/exports/runtime.ts +19 -4
- package/src/exports/sql-utils.ts +7 -0
- package/src/exports/statement-builders.ts +7 -0
- package/src/exports/types.ts +1 -0
- package/dist/migration-builders.d.mts +0 -88
- package/dist/migration-builders.d.mts.map +0 -1
- package/dist/migration-builders.mjs +0 -3
- package/dist/operation-descriptors-CxymFSgK.mjs +0 -52
- package/dist/operation-descriptors-CxymFSgK.mjs.map +0 -1
- package/src/core/migrations/descriptor-planner.ts +0 -464
- package/src/core/migrations/operation-descriptors.ts +0 -166
- package/src/core/migrations/operation-resolver.ts +0 -929
- package/src/core/migrations/planner-reconciliation.ts +0 -798
- package/src/core/migrations/scaffolding.ts +0 -140
- package/src/exports/migration-builders.ts +0 -56
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
//#region src/core/migrations/statement-builders.ts
|
|
2
|
+
const ensurePrismaContractSchemaStatement = {
|
|
3
|
+
sql: "create schema if not exists prisma_contract",
|
|
4
|
+
params: []
|
|
5
|
+
};
|
|
6
|
+
const ensureMarkerTableStatement = {
|
|
7
|
+
sql: `create table if not exists prisma_contract.marker (
|
|
8
|
+
id smallint primary key default 1,
|
|
9
|
+
core_hash text not null,
|
|
10
|
+
profile_hash text not null,
|
|
11
|
+
contract_json jsonb,
|
|
12
|
+
canonical_version int,
|
|
13
|
+
updated_at timestamptz not null default now(),
|
|
14
|
+
app_tag text,
|
|
15
|
+
meta jsonb not null default '{}'
|
|
16
|
+
)`,
|
|
17
|
+
params: []
|
|
18
|
+
};
|
|
19
|
+
const ensureLedgerTableStatement = {
|
|
20
|
+
sql: `create table if not exists prisma_contract.ledger (
|
|
21
|
+
id bigserial primary key,
|
|
22
|
+
created_at timestamptz not null default now(),
|
|
23
|
+
origin_core_hash text,
|
|
24
|
+
origin_profile_hash text,
|
|
25
|
+
destination_core_hash text not null,
|
|
26
|
+
destination_profile_hash text,
|
|
27
|
+
contract_json_before jsonb,
|
|
28
|
+
contract_json_after jsonb,
|
|
29
|
+
operations jsonb not null
|
|
30
|
+
)`,
|
|
31
|
+
params: []
|
|
32
|
+
};
|
|
33
|
+
function buildWriteMarkerStatements(input) {
|
|
34
|
+
const params = [
|
|
35
|
+
1,
|
|
36
|
+
input.storageHash,
|
|
37
|
+
input.profileHash,
|
|
38
|
+
jsonParam(input.contractJson),
|
|
39
|
+
input.canonicalVersion ?? null,
|
|
40
|
+
input.appTag ?? null,
|
|
41
|
+
jsonParam(input.meta ?? {})
|
|
42
|
+
];
|
|
43
|
+
return {
|
|
44
|
+
insert: {
|
|
45
|
+
sql: `insert into prisma_contract.marker (
|
|
46
|
+
id,
|
|
47
|
+
core_hash,
|
|
48
|
+
profile_hash,
|
|
49
|
+
contract_json,
|
|
50
|
+
canonical_version,
|
|
51
|
+
updated_at,
|
|
52
|
+
app_tag,
|
|
53
|
+
meta
|
|
54
|
+
) values (
|
|
55
|
+
$1,
|
|
56
|
+
$2,
|
|
57
|
+
$3,
|
|
58
|
+
$4::jsonb,
|
|
59
|
+
$5,
|
|
60
|
+
now(),
|
|
61
|
+
$6,
|
|
62
|
+
$7::jsonb
|
|
63
|
+
)`,
|
|
64
|
+
params
|
|
65
|
+
},
|
|
66
|
+
update: {
|
|
67
|
+
sql: `update prisma_contract.marker set
|
|
68
|
+
core_hash = $2,
|
|
69
|
+
profile_hash = $3,
|
|
70
|
+
contract_json = $4::jsonb,
|
|
71
|
+
canonical_version = $5,
|
|
72
|
+
updated_at = now(),
|
|
73
|
+
app_tag = $6,
|
|
74
|
+
meta = $7::jsonb
|
|
75
|
+
where id = $1`,
|
|
76
|
+
params
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
function buildLedgerInsertStatement(input) {
|
|
81
|
+
return {
|
|
82
|
+
sql: `insert into prisma_contract.ledger (
|
|
83
|
+
origin_core_hash,
|
|
84
|
+
origin_profile_hash,
|
|
85
|
+
destination_core_hash,
|
|
86
|
+
destination_profile_hash,
|
|
87
|
+
contract_json_before,
|
|
88
|
+
contract_json_after,
|
|
89
|
+
operations
|
|
90
|
+
) values (
|
|
91
|
+
$1,
|
|
92
|
+
$2,
|
|
93
|
+
$3,
|
|
94
|
+
$4,
|
|
95
|
+
$5::jsonb,
|
|
96
|
+
$6::jsonb,
|
|
97
|
+
$7::jsonb
|
|
98
|
+
)`,
|
|
99
|
+
params: [
|
|
100
|
+
input.originStorageHash ?? null,
|
|
101
|
+
input.originProfileHash ?? null,
|
|
102
|
+
input.destinationStorageHash,
|
|
103
|
+
input.destinationProfileHash ?? null,
|
|
104
|
+
jsonParam(input.contractJsonBefore),
|
|
105
|
+
jsonParam(input.contractJsonAfter),
|
|
106
|
+
jsonParam(input.operations)
|
|
107
|
+
]
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
function jsonParam(value) {
|
|
111
|
+
return JSON.stringify(value ?? null);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
//#endregion
|
|
115
|
+
export { ensurePrismaContractSchemaStatement as a, ensureMarkerTableStatement as i, buildWriteMarkerStatements as n, ensureLedgerTableStatement as r, buildLedgerInsertStatement as t };
|
|
116
|
+
//# sourceMappingURL=statement-builders-BPnmt6wx.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"statement-builders-BPnmt6wx.mjs","names":["ensurePrismaContractSchemaStatement: SqlStatement","ensureMarkerTableStatement: SqlStatement","ensureLedgerTableStatement: SqlStatement","params: readonly unknown[]"],"sources":["../src/core/migrations/statement-builders.ts"],"sourcesContent":["export interface SqlStatement {\n readonly sql: string;\n readonly params: readonly unknown[];\n}\n\nexport const ensurePrismaContractSchemaStatement: SqlStatement = {\n sql: 'create schema if not exists prisma_contract',\n params: [],\n};\n\nexport const ensureMarkerTableStatement: SqlStatement = {\n sql: `create table if not exists prisma_contract.marker (\n id smallint primary key default 1,\n core_hash text not null,\n profile_hash text not null,\n contract_json jsonb,\n canonical_version int,\n updated_at timestamptz not null default now(),\n app_tag text,\n meta jsonb not null default '{}'\n )`,\n params: [],\n};\n\nexport const ensureLedgerTableStatement: SqlStatement = {\n sql: `create table if not exists prisma_contract.ledger (\n id bigserial primary key,\n created_at timestamptz not null default now(),\n origin_core_hash text,\n origin_profile_hash text,\n destination_core_hash text not null,\n destination_profile_hash text,\n contract_json_before jsonb,\n contract_json_after jsonb,\n operations jsonb not null\n )`,\n params: [],\n};\n\nexport interface WriteMarkerInput {\n readonly storageHash: string;\n readonly profileHash: string;\n readonly contractJson?: unknown;\n readonly canonicalVersion?: number | null;\n readonly appTag?: string | null;\n readonly meta?: Record<string, unknown>;\n}\n\nexport function buildWriteMarkerStatements(input: WriteMarkerInput): {\n readonly insert: SqlStatement;\n readonly update: SqlStatement;\n} {\n const params: readonly unknown[] = [\n 1,\n input.storageHash,\n input.profileHash,\n jsonParam(input.contractJson),\n input.canonicalVersion ?? null,\n input.appTag ?? null,\n jsonParam(input.meta ?? {}),\n ];\n\n return {\n insert: {\n sql: `insert into prisma_contract.marker (\n id,\n core_hash,\n profile_hash,\n contract_json,\n canonical_version,\n updated_at,\n app_tag,\n meta\n ) values (\n $1,\n $2,\n $3,\n $4::jsonb,\n $5,\n now(),\n $6,\n $7::jsonb\n )`,\n params,\n },\n update: {\n sql: `update prisma_contract.marker set\n core_hash = $2,\n profile_hash = $3,\n contract_json = $4::jsonb,\n canonical_version = $5,\n updated_at = now(),\n app_tag = $6,\n meta = $7::jsonb\n where id = $1`,\n params,\n },\n };\n}\n\nexport interface LedgerInsertInput {\n readonly originStorageHash?: string | null;\n readonly originProfileHash?: string | null;\n readonly destinationStorageHash: string;\n readonly destinationProfileHash?: string | null;\n readonly contractJsonBefore?: unknown;\n readonly contractJsonAfter?: unknown;\n readonly operations: unknown;\n}\n\nexport function buildLedgerInsertStatement(input: LedgerInsertInput): SqlStatement {\n return {\n sql: `insert into prisma_contract.ledger (\n origin_core_hash,\n origin_profile_hash,\n destination_core_hash,\n destination_profile_hash,\n contract_json_before,\n contract_json_after,\n operations\n ) values (\n $1,\n $2,\n $3,\n $4,\n $5::jsonb,\n $6::jsonb,\n $7::jsonb\n )`,\n params: [\n input.originStorageHash ?? null,\n input.originProfileHash ?? null,\n input.destinationStorageHash,\n input.destinationProfileHash ?? null,\n jsonParam(input.contractJsonBefore),\n jsonParam(input.contractJsonAfter),\n jsonParam(input.operations),\n ],\n };\n}\n\nfunction jsonParam(value: unknown): string {\n return JSON.stringify(value ?? null);\n}\n"],"mappings":";AAKA,MAAaA,sCAAoD;CAC/D,KAAK;CACL,QAAQ,EAAE;CACX;AAED,MAAaC,6BAA2C;CACtD,KAAK;;;;;;;;;;CAUL,QAAQ,EAAE;CACX;AAED,MAAaC,6BAA2C;CACtD,KAAK;;;;;;;;;;;CAWL,QAAQ,EAAE;CACX;AAWD,SAAgB,2BAA2B,OAGzC;CACA,MAAMC,SAA6B;EACjC;EACA,MAAM;EACN,MAAM;EACN,UAAU,MAAM,aAAa;EAC7B,MAAM,oBAAoB;EAC1B,MAAM,UAAU;EAChB,UAAU,MAAM,QAAQ,EAAE,CAAC;EAC5B;AAED,QAAO;EACL,QAAQ;GACN,KAAK;;;;;;;;;;;;;;;;;;;GAmBL;GACD;EACD,QAAQ;GACN,KAAK;;;;;;;;;GASL;GACD;EACF;;AAaH,SAAgB,2BAA2B,OAAwC;AACjF,QAAO;EACL,KAAK;;;;;;;;;;;;;;;;;EAiBL,QAAQ;GACN,MAAM,qBAAqB;GAC3B,MAAM,qBAAqB;GAC3B,MAAM;GACN,MAAM,0BAA0B;GAChC,UAAU,MAAM,mBAAmB;GACnC,UAAU,MAAM,kBAAkB;GAClC,UAAU,MAAM,WAAW;GAC5B;EACF;;AAGH,SAAS,UAAU,OAAwB;AACzC,QAAO,KAAK,UAAU,SAAS,KAAK"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
//#region src/core/migrations/statement-builders.d.ts
|
|
2
|
+
interface SqlStatement {
|
|
3
|
+
readonly sql: string;
|
|
4
|
+
readonly params: readonly unknown[];
|
|
5
|
+
}
|
|
6
|
+
declare const ensurePrismaContractSchemaStatement: SqlStatement;
|
|
7
|
+
declare const ensureMarkerTableStatement: SqlStatement;
|
|
8
|
+
declare const ensureLedgerTableStatement: SqlStatement;
|
|
9
|
+
interface WriteMarkerInput {
|
|
10
|
+
readonly storageHash: string;
|
|
11
|
+
readonly profileHash: string;
|
|
12
|
+
readonly contractJson?: unknown;
|
|
13
|
+
readonly canonicalVersion?: number | null;
|
|
14
|
+
readonly appTag?: string | null;
|
|
15
|
+
readonly meta?: Record<string, unknown>;
|
|
16
|
+
}
|
|
17
|
+
declare function buildWriteMarkerStatements(input: WriteMarkerInput): {
|
|
18
|
+
readonly insert: SqlStatement;
|
|
19
|
+
readonly update: SqlStatement;
|
|
20
|
+
};
|
|
21
|
+
//#endregion
|
|
22
|
+
export { type SqlStatement, buildWriteMarkerStatements, ensureLedgerTableStatement, ensureMarkerTableStatement, ensurePrismaContractSchemaStatement };
|
|
23
|
+
//# sourceMappingURL=statement-builders.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"statement-builders.d.mts","names":[],"sources":["../src/core/migrations/statement-builders.ts"],"sourcesContent":[],"mappings":";UAAiB,YAAA;EAAA,SAAA,GAAA,EAAA,MAAY;EAKhB,SAAA,MAAA,EAAA,SAAA,OAAA,EAAA;AAKb;AAca,cAnBA,mCAmB4B,EAnBS,YAgCjD;AAEgB,cA7BJ,0BAmCW,EAnCiB,YAmCjB;AAGR,cAxBH,0BAwB6B,EAxBD,YAwBC;AAAQ,UATjC,gBAAA,CASiC;EAC/B,SAAA,WAAA,EAAA,MAAA;EACA,SAAA,WAAA,EAAA,MAAA;EAAY,SAAA,YAAA,CAAA,EAAA,OAAA;;;kBALb;;iBAGF,0BAAA,QAAkC;mBAC/B;mBACA"}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import { a as ensurePrismaContractSchemaStatement, i as ensureMarkerTableStatement, n as buildWriteMarkerStatements, r as ensureLedgerTableStatement } from "./statement-builders-BPnmt6wx.mjs";
|
|
2
|
+
|
|
3
|
+
export { buildWriteMarkerStatements, ensureLedgerTableStatement, ensureMarkerTableStatement, ensurePrismaContractSchemaStatement };
|
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
import { i as quoteIdentifier, n as escapeLiteral, r as qualifyName } from "./sql-utils-r-Lw535w.mjs";
|
|
2
|
+
import { a as columnNullabilityCheck, c as qualifyTableName, d as toRegclassLiteral, n as columnDefaultExistsCheck, o as columnTypeCheck, r as columnExistsCheck, s as constraintExistsCheck } from "./planner-sql-checks-7jkgm9TX.mjs";
|
|
3
|
+
import { ifDefined } from "@prisma-next/utils/defined";
|
|
4
|
+
|
|
5
|
+
//#region src/core/migrations/operations/shared.ts
|
|
6
|
+
function step(description, sql) {
|
|
7
|
+
return {
|
|
8
|
+
description,
|
|
9
|
+
sql
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
function targetDetails(objectType, name, schema, table) {
|
|
13
|
+
return {
|
|
14
|
+
id: "postgres",
|
|
15
|
+
details: {
|
|
16
|
+
schema,
|
|
17
|
+
objectType,
|
|
18
|
+
name,
|
|
19
|
+
...ifDefined("table", table)
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
function renderColumnDefinition(column) {
|
|
24
|
+
return [
|
|
25
|
+
quoteIdentifier(column.name),
|
|
26
|
+
column.typeSql,
|
|
27
|
+
column.defaultSql,
|
|
28
|
+
column.nullable ? "" : "NOT NULL"
|
|
29
|
+
].filter(Boolean).join(" ");
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
//#endregion
|
|
33
|
+
//#region src/core/migrations/operations/columns.ts
|
|
34
|
+
function addColumn(schemaName, tableName, column) {
|
|
35
|
+
const addSql = [
|
|
36
|
+
`ALTER TABLE ${qualifyTableName(schemaName, tableName)}`,
|
|
37
|
+
`ADD COLUMN ${quoteIdentifier(column.name)} ${column.typeSql}`,
|
|
38
|
+
column.defaultSql,
|
|
39
|
+
column.nullable ? "" : "NOT NULL"
|
|
40
|
+
].filter(Boolean).join(" ");
|
|
41
|
+
return {
|
|
42
|
+
id: `column.${tableName}.${column.name}`,
|
|
43
|
+
label: `Add column "${column.name}" to "${tableName}"`,
|
|
44
|
+
operationClass: "additive",
|
|
45
|
+
target: targetDetails("column", column.name, schemaName, tableName),
|
|
46
|
+
precheck: [step(`ensure column "${column.name}" is missing`, columnExistsCheck({
|
|
47
|
+
schema: schemaName,
|
|
48
|
+
table: tableName,
|
|
49
|
+
column: column.name,
|
|
50
|
+
exists: false
|
|
51
|
+
}))],
|
|
52
|
+
execute: [step(`add column "${column.name}"`, addSql)],
|
|
53
|
+
postcheck: [step(`verify column "${column.name}" exists`, columnExistsCheck({
|
|
54
|
+
schema: schemaName,
|
|
55
|
+
table: tableName,
|
|
56
|
+
column: column.name
|
|
57
|
+
}))]
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
function dropColumn(schemaName, tableName, columnName) {
|
|
61
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
62
|
+
return {
|
|
63
|
+
id: `dropColumn.${tableName}.${columnName}`,
|
|
64
|
+
label: `Drop column "${columnName}" from "${tableName}"`,
|
|
65
|
+
operationClass: "destructive",
|
|
66
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
67
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
68
|
+
schema: schemaName,
|
|
69
|
+
table: tableName,
|
|
70
|
+
column: columnName
|
|
71
|
+
}))],
|
|
72
|
+
execute: [step(`drop column "${columnName}"`, `ALTER TABLE ${qualified} DROP COLUMN ${quoteIdentifier(columnName)}`)],
|
|
73
|
+
postcheck: [step(`verify column "${columnName}" does not exist`, columnExistsCheck({
|
|
74
|
+
schema: schemaName,
|
|
75
|
+
table: tableName,
|
|
76
|
+
column: columnName,
|
|
77
|
+
exists: false
|
|
78
|
+
}))]
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* `qualifiedTargetType` is the new column type as it appears in the
|
|
83
|
+
* `ALTER COLUMN TYPE` clause (schema-qualified for user-defined types, raw
|
|
84
|
+
* native name for built-ins). `formatTypeExpected` is the unqualified
|
|
85
|
+
* `format_type` form used in the postcheck. `rawTargetTypeForLabel` is the
|
|
86
|
+
* string appearing in the human-readable label (typically `toType` when
|
|
87
|
+
* explicit, else the column's native type).
|
|
88
|
+
*/
|
|
89
|
+
function alterColumnType(schemaName, tableName, columnName, options) {
|
|
90
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
91
|
+
const usingClause = options.using ? ` USING ${options.using}` : ` USING ${quoteIdentifier(columnName)}::${options.qualifiedTargetType}`;
|
|
92
|
+
return {
|
|
93
|
+
id: `alterType.${tableName}.${columnName}`,
|
|
94
|
+
label: `Alter type of "${tableName}"."${columnName}" to ${options.rawTargetTypeForLabel}`,
|
|
95
|
+
operationClass: "destructive",
|
|
96
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
97
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
98
|
+
schema: schemaName,
|
|
99
|
+
table: tableName,
|
|
100
|
+
column: columnName
|
|
101
|
+
}))],
|
|
102
|
+
execute: [step(`alter type of "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} TYPE ${options.qualifiedTargetType}${usingClause}`)],
|
|
103
|
+
postcheck: [step(`verify column "${columnName}" has type "${options.formatTypeExpected}"`, columnTypeCheck({
|
|
104
|
+
schema: schemaName,
|
|
105
|
+
table: tableName,
|
|
106
|
+
column: columnName,
|
|
107
|
+
expectedType: options.formatTypeExpected
|
|
108
|
+
}))],
|
|
109
|
+
meta: { warning: "TABLE_REWRITE" }
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
function setNotNull(schemaName, tableName, columnName) {
|
|
113
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
114
|
+
return {
|
|
115
|
+
id: `alterNullability.setNotNull.${tableName}.${columnName}`,
|
|
116
|
+
label: `Set NOT NULL on "${tableName}"."${columnName}"`,
|
|
117
|
+
operationClass: "destructive",
|
|
118
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
119
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
120
|
+
schema: schemaName,
|
|
121
|
+
table: tableName,
|
|
122
|
+
column: columnName
|
|
123
|
+
})), step(`ensure no NULL values in "${columnName}"`, `SELECT NOT EXISTS (SELECT 1 FROM ${qualified} WHERE ${quoteIdentifier(columnName)} IS NULL)`)],
|
|
124
|
+
execute: [step(`set NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET NOT NULL`)],
|
|
125
|
+
postcheck: [step(`verify column "${columnName}" is NOT NULL`, columnNullabilityCheck({
|
|
126
|
+
schema: schemaName,
|
|
127
|
+
table: tableName,
|
|
128
|
+
column: columnName,
|
|
129
|
+
nullable: false
|
|
130
|
+
}))]
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
function dropNotNull(schemaName, tableName, columnName) {
|
|
134
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
135
|
+
return {
|
|
136
|
+
id: `alterNullability.dropNotNull.${tableName}.${columnName}`,
|
|
137
|
+
label: `Drop NOT NULL on "${tableName}"."${columnName}"`,
|
|
138
|
+
operationClass: "widening",
|
|
139
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
140
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
141
|
+
schema: schemaName,
|
|
142
|
+
table: tableName,
|
|
143
|
+
column: columnName
|
|
144
|
+
}))],
|
|
145
|
+
execute: [step(`drop NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP NOT NULL`)],
|
|
146
|
+
postcheck: [step(`verify column "${columnName}" is nullable`, columnNullabilityCheck({
|
|
147
|
+
schema: schemaName,
|
|
148
|
+
table: tableName,
|
|
149
|
+
column: columnName,
|
|
150
|
+
nullable: true
|
|
151
|
+
}))]
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* `defaultSql` is the full `DEFAULT …` clause as produced by
|
|
156
|
+
* `buildColumnDefaultSql` — e.g. `"DEFAULT 42"`,
|
|
157
|
+
* `"DEFAULT (CURRENT_TIMESTAMP)"`, or `"DEFAULT nextval('seq'::regclass)"`.
|
|
158
|
+
*
|
|
159
|
+
* `operationClass` defaults to `'additive'` (setting a default on a column
|
|
160
|
+
* that currently has none). The reconciliation planner passes `'widening'`
|
|
161
|
+
* when the column already has a different default — policy enforcement
|
|
162
|
+
* treats that as a widening change rather than an additive one.
|
|
163
|
+
*/
|
|
164
|
+
function setDefault(schemaName, tableName, columnName, defaultSql, operationClass = "additive") {
|
|
165
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
166
|
+
return {
|
|
167
|
+
id: `setDefault.${tableName}.${columnName}`,
|
|
168
|
+
label: `Set default on "${tableName}"."${columnName}"`,
|
|
169
|
+
operationClass,
|
|
170
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
171
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
172
|
+
schema: schemaName,
|
|
173
|
+
table: tableName,
|
|
174
|
+
column: columnName
|
|
175
|
+
}))],
|
|
176
|
+
execute: [step(`set default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET ${defaultSql}`)],
|
|
177
|
+
postcheck: [step(`verify column "${columnName}" has a default`, columnDefaultExistsCheck({
|
|
178
|
+
schema: schemaName,
|
|
179
|
+
table: tableName,
|
|
180
|
+
column: columnName,
|
|
181
|
+
exists: true
|
|
182
|
+
}))]
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
function dropDefault(schemaName, tableName, columnName) {
|
|
186
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
187
|
+
return {
|
|
188
|
+
id: `dropDefault.${tableName}.${columnName}`,
|
|
189
|
+
label: `Drop default on "${tableName}"."${columnName}"`,
|
|
190
|
+
operationClass: "destructive",
|
|
191
|
+
target: targetDetails("column", columnName, schemaName, tableName),
|
|
192
|
+
precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
|
|
193
|
+
schema: schemaName,
|
|
194
|
+
table: tableName,
|
|
195
|
+
column: columnName
|
|
196
|
+
}))],
|
|
197
|
+
execute: [step(`drop default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`)],
|
|
198
|
+
postcheck: [step(`verify column "${columnName}" has no default`, columnDefaultExistsCheck({
|
|
199
|
+
schema: schemaName,
|
|
200
|
+
table: tableName,
|
|
201
|
+
column: columnName,
|
|
202
|
+
exists: false
|
|
203
|
+
}))]
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
//#endregion
|
|
208
|
+
//#region src/core/migrations/operations/constraints.ts
|
|
209
|
+
const REFERENTIAL_ACTION_SQL = {
|
|
210
|
+
noAction: "NO ACTION",
|
|
211
|
+
restrict: "RESTRICT",
|
|
212
|
+
cascade: "CASCADE",
|
|
213
|
+
setNull: "SET NULL",
|
|
214
|
+
setDefault: "SET DEFAULT"
|
|
215
|
+
};
|
|
216
|
+
function renderForeignKeySql(schemaName, tableName, fk) {
|
|
217
|
+
let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
|
|
218
|
+
ADD CONSTRAINT ${quoteIdentifier(fk.name)}
|
|
219
|
+
FOREIGN KEY (${fk.columns.map(quoteIdentifier).join(", ")})
|
|
220
|
+
REFERENCES ${qualifyTableName(schemaName, fk.references.table)} (${fk.references.columns.map(quoteIdentifier).join(", ")})`;
|
|
221
|
+
if (fk.onDelete !== void 0) {
|
|
222
|
+
const action = REFERENTIAL_ACTION_SQL[fk.onDelete];
|
|
223
|
+
if (!action) throw new Error(`Unknown referential action for onDelete: ${String(fk.onDelete)}`);
|
|
224
|
+
sql += `\nON DELETE ${action}`;
|
|
225
|
+
}
|
|
226
|
+
if (fk.onUpdate !== void 0) {
|
|
227
|
+
const action = REFERENTIAL_ACTION_SQL[fk.onUpdate];
|
|
228
|
+
if (!action) throw new Error(`Unknown referential action for onUpdate: ${String(fk.onUpdate)}`);
|
|
229
|
+
sql += `\nON UPDATE ${action}`;
|
|
230
|
+
}
|
|
231
|
+
return sql;
|
|
232
|
+
}
|
|
233
|
+
function addPrimaryKey(schemaName, tableName, constraintName, columns) {
|
|
234
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
235
|
+
const columnList = columns.map(quoteIdentifier).join(", ");
|
|
236
|
+
return {
|
|
237
|
+
id: `primaryKey.${tableName}.${constraintName}`,
|
|
238
|
+
label: `Add primary key on "${tableName}"`,
|
|
239
|
+
operationClass: "additive",
|
|
240
|
+
target: targetDetails("primaryKey", constraintName, schemaName, tableName),
|
|
241
|
+
precheck: [step(`ensure primary key "${constraintName}" does not exist`, constraintExistsCheck({
|
|
242
|
+
constraintName,
|
|
243
|
+
schema: schemaName,
|
|
244
|
+
table: tableName,
|
|
245
|
+
exists: false
|
|
246
|
+
}))],
|
|
247
|
+
execute: [step(`add primary key "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} PRIMARY KEY (${columnList})`)],
|
|
248
|
+
postcheck: [step(`verify primary key "${constraintName}" exists`, constraintExistsCheck({
|
|
249
|
+
constraintName,
|
|
250
|
+
schema: schemaName,
|
|
251
|
+
table: tableName
|
|
252
|
+
}))]
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
function addUnique(schemaName, tableName, constraintName, columns) {
|
|
256
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
257
|
+
const columnList = columns.map(quoteIdentifier).join(", ");
|
|
258
|
+
return {
|
|
259
|
+
id: `unique.${tableName}.${constraintName}`,
|
|
260
|
+
label: `Add unique constraint on "${tableName}" (${columns.join(", ")})`,
|
|
261
|
+
operationClass: "additive",
|
|
262
|
+
target: targetDetails("unique", constraintName, schemaName, tableName),
|
|
263
|
+
precheck: [step(`ensure constraint "${constraintName}" does not exist`, constraintExistsCheck({
|
|
264
|
+
constraintName,
|
|
265
|
+
schema: schemaName,
|
|
266
|
+
table: tableName,
|
|
267
|
+
exists: false
|
|
268
|
+
}))],
|
|
269
|
+
execute: [step(`add unique constraint "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} UNIQUE (${columnList})`)],
|
|
270
|
+
postcheck: [step(`verify constraint "${constraintName}" exists`, constraintExistsCheck({
|
|
271
|
+
constraintName,
|
|
272
|
+
schema: schemaName,
|
|
273
|
+
table: tableName
|
|
274
|
+
}))]
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
function addForeignKey(schemaName, tableName, fk) {
|
|
278
|
+
return {
|
|
279
|
+
id: `foreignKey.${tableName}.${fk.name}`,
|
|
280
|
+
label: `Add foreign key "${fk.name}" on "${tableName}"`,
|
|
281
|
+
operationClass: "additive",
|
|
282
|
+
target: targetDetails("foreignKey", fk.name, schemaName, tableName),
|
|
283
|
+
precheck: [step(`ensure FK "${fk.name}" does not exist`, constraintExistsCheck({
|
|
284
|
+
constraintName: fk.name,
|
|
285
|
+
schema: schemaName,
|
|
286
|
+
table: tableName,
|
|
287
|
+
exists: false
|
|
288
|
+
}))],
|
|
289
|
+
execute: [step(`add FK "${fk.name}"`, renderForeignKeySql(schemaName, tableName, fk))],
|
|
290
|
+
postcheck: [step(`verify FK "${fk.name}" exists`, constraintExistsCheck({
|
|
291
|
+
constraintName: fk.name,
|
|
292
|
+
schema: schemaName,
|
|
293
|
+
table: tableName
|
|
294
|
+
}))]
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* `kind` feeds the operation's `target.details.objectType`. Descriptor-flow
|
|
299
|
+
* does not carry kind information in its drop-constraint descriptor, so the
|
|
300
|
+
* default is `'unique'`. The reconciliation planner passes the correct kind
|
|
301
|
+
* (`'foreignKey'`, `'primaryKey'`, or `'unique'`) based on the `SchemaIssue`
|
|
302
|
+
* that produced the drop.
|
|
303
|
+
*/
|
|
304
|
+
function dropConstraint(schemaName, tableName, constraintName, kind = "unique") {
|
|
305
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
306
|
+
return {
|
|
307
|
+
id: `dropConstraint.${tableName}.${constraintName}`,
|
|
308
|
+
label: `Drop constraint "${constraintName}" on "${tableName}"`,
|
|
309
|
+
operationClass: "destructive",
|
|
310
|
+
target: targetDetails(kind, constraintName, schemaName, tableName),
|
|
311
|
+
precheck: [step(`ensure constraint "${constraintName}" exists`, constraintExistsCheck({
|
|
312
|
+
constraintName,
|
|
313
|
+
schema: schemaName,
|
|
314
|
+
table: tableName
|
|
315
|
+
}))],
|
|
316
|
+
execute: [step(`drop constraint "${constraintName}"`, `ALTER TABLE ${qualified} DROP CONSTRAINT ${quoteIdentifier(constraintName)}`)],
|
|
317
|
+
postcheck: [step(`verify constraint "${constraintName}" does not exist`, constraintExistsCheck({
|
|
318
|
+
constraintName,
|
|
319
|
+
schema: schemaName,
|
|
320
|
+
table: tableName,
|
|
321
|
+
exists: false
|
|
322
|
+
}))]
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
//#endregion
|
|
327
|
+
//#region src/core/migrations/operations/dependencies.ts
|
|
328
|
+
function createExtension(extensionName) {
|
|
329
|
+
return {
|
|
330
|
+
id: `extension.${extensionName}`,
|
|
331
|
+
label: `Create extension "${extensionName}"`,
|
|
332
|
+
operationClass: "additive",
|
|
333
|
+
target: { id: "postgres" },
|
|
334
|
+
precheck: [],
|
|
335
|
+
execute: [step(`Create extension "${extensionName}"`, `CREATE EXTENSION IF NOT EXISTS ${quoteIdentifier(extensionName)}`)],
|
|
336
|
+
postcheck: []
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
function createSchema(schemaName) {
|
|
340
|
+
return {
|
|
341
|
+
id: `schema.${schemaName}`,
|
|
342
|
+
label: `Create schema "${schemaName}"`,
|
|
343
|
+
operationClass: "additive",
|
|
344
|
+
target: { id: "postgres" },
|
|
345
|
+
precheck: [],
|
|
346
|
+
execute: [step(`Create schema "${schemaName}"`, `CREATE SCHEMA IF NOT EXISTS ${quoteIdentifier(schemaName)}`)],
|
|
347
|
+
postcheck: []
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
//#endregion
|
|
352
|
+
//#region src/core/migrations/operations/enums.ts
|
|
353
|
+
function enumTypeExistsCheck(schemaName, nativeType, exists = true) {
|
|
354
|
+
return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
|
|
355
|
+
SELECT 1
|
|
356
|
+
FROM pg_type t
|
|
357
|
+
JOIN pg_namespace n ON t.typnamespace = n.oid
|
|
358
|
+
WHERE n.nspname = '${escapeLiteral(schemaName)}'
|
|
359
|
+
AND t.typname = '${escapeLiteral(nativeType)}'
|
|
360
|
+
)`;
|
|
361
|
+
}
|
|
362
|
+
function createEnumType(schemaName, typeName, values) {
|
|
363
|
+
const qualifiedType = qualifyName(schemaName, typeName);
|
|
364
|
+
const literalValues = values.map((v) => `'${escapeLiteral(v)}'`).join(", ");
|
|
365
|
+
return {
|
|
366
|
+
id: `type.${typeName}`,
|
|
367
|
+
label: `Create enum type "${typeName}"`,
|
|
368
|
+
operationClass: "additive",
|
|
369
|
+
target: targetDetails("type", typeName, schemaName),
|
|
370
|
+
precheck: [step(`ensure type "${typeName}" does not exist`, enumTypeExistsCheck(schemaName, typeName, false))],
|
|
371
|
+
execute: [step(`create enum type "${typeName}"`, `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`)],
|
|
372
|
+
postcheck: [step(`verify type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))]
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* `typeName` is the contract-facing type name (used for id/label).
|
|
377
|
+
* `nativeType` is the Postgres type name to mutate (may differ for external types).
|
|
378
|
+
*/
|
|
379
|
+
function addEnumValues(schemaName, typeName, nativeType, values) {
|
|
380
|
+
const qualifiedType = qualifyName(schemaName, nativeType);
|
|
381
|
+
return {
|
|
382
|
+
id: `type.${typeName}.addValues`,
|
|
383
|
+
label: `Add values to enum type "${typeName}": ${values.join(", ")}`,
|
|
384
|
+
operationClass: "additive",
|
|
385
|
+
target: targetDetails("type", typeName, schemaName),
|
|
386
|
+
precheck: [step(`ensure type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))],
|
|
387
|
+
execute: values.map((value) => step(`add value '${value}' to enum "${nativeType}"`, `ALTER TYPE ${qualifiedType} ADD VALUE '${escapeLiteral(value)}'`)),
|
|
388
|
+
postcheck: [step(`verify type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))]
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
function dropEnumType(schemaName, typeName) {
|
|
392
|
+
const qualified = qualifyName(schemaName, typeName);
|
|
393
|
+
return {
|
|
394
|
+
id: `type.${typeName}.drop`,
|
|
395
|
+
label: `Drop enum type "${typeName}"`,
|
|
396
|
+
operationClass: "destructive",
|
|
397
|
+
target: targetDetails("type", typeName, schemaName),
|
|
398
|
+
precheck: [step(`ensure type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))],
|
|
399
|
+
execute: [step(`drop enum type "${typeName}"`, `DROP TYPE ${qualified}`)],
|
|
400
|
+
postcheck: [step(`verify type "${typeName}" removed`, enumTypeExistsCheck(schemaName, typeName, false))]
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
function renameType(schemaName, fromName, toName) {
|
|
404
|
+
const qualifiedFrom = qualifyName(schemaName, fromName);
|
|
405
|
+
return {
|
|
406
|
+
id: `type.${fromName}.rename`,
|
|
407
|
+
label: `Rename type "${fromName}" to "${toName}"`,
|
|
408
|
+
operationClass: "destructive",
|
|
409
|
+
target: targetDetails("type", fromName, schemaName),
|
|
410
|
+
precheck: [step(`ensure type "${fromName}" exists`, enumTypeExistsCheck(schemaName, fromName)), step(`ensure type "${toName}" does not already exist`, enumTypeExistsCheck(schemaName, toName, false))],
|
|
411
|
+
execute: [step(`rename type "${fromName}" to "${toName}"`, `ALTER TYPE ${qualifiedFrom} RENAME TO ${quoteIdentifier(toName)}`)],
|
|
412
|
+
postcheck: [step(`verify type "${toName}" exists`, enumTypeExistsCheck(schemaName, toName))]
|
|
413
|
+
};
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
//#endregion
|
|
417
|
+
//#region src/core/migrations/operations/indexes.ts
|
|
418
|
+
function createIndex(schemaName, tableName, indexName, columns) {
|
|
419
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
420
|
+
const columnList = columns.map(quoteIdentifier).join(", ");
|
|
421
|
+
return {
|
|
422
|
+
id: `index.${tableName}.${indexName}`,
|
|
423
|
+
label: `Create index "${indexName}" on "${tableName}"`,
|
|
424
|
+
operationClass: "additive",
|
|
425
|
+
target: targetDetails("index", indexName, schemaName, tableName),
|
|
426
|
+
precheck: [step(`ensure index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)],
|
|
427
|
+
execute: [step(`create index "${indexName}"`, `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualified} (${columnList})`)],
|
|
428
|
+
postcheck: [step(`verify index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)]
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
function dropIndex(schemaName, tableName, indexName) {
|
|
432
|
+
return {
|
|
433
|
+
id: `dropIndex.${tableName}.${indexName}`,
|
|
434
|
+
label: `Drop index "${indexName}"`,
|
|
435
|
+
operationClass: "destructive",
|
|
436
|
+
target: targetDetails("index", indexName, schemaName, tableName),
|
|
437
|
+
precheck: [step(`ensure index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)],
|
|
438
|
+
execute: [step(`drop index "${indexName}"`, `DROP INDEX ${qualifyTableName(schemaName, indexName)}`)],
|
|
439
|
+
postcheck: [step(`verify index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)]
|
|
440
|
+
};
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
//#endregion
|
|
444
|
+
//#region src/core/migrations/operations/tables.ts
|
|
445
|
+
function createTable(schemaName, tableName, columns, primaryKey) {
|
|
446
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
447
|
+
const columnDefs = columns.map(renderColumnDefinition);
|
|
448
|
+
const constraintDefs = [];
|
|
449
|
+
if (primaryKey) constraintDefs.push(`PRIMARY KEY (${primaryKey.columns.map(quoteIdentifier).join(", ")})`);
|
|
450
|
+
const createSql = `CREATE TABLE ${qualified} (\n ${[...columnDefs, ...constraintDefs].join(",\n ")}\n)`;
|
|
451
|
+
return {
|
|
452
|
+
id: `table.${tableName}`,
|
|
453
|
+
label: `Create table "${tableName}"`,
|
|
454
|
+
summary: `Creates table "${tableName}"`,
|
|
455
|
+
operationClass: "additive",
|
|
456
|
+
target: targetDetails("table", tableName, schemaName),
|
|
457
|
+
precheck: [step(`ensure table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)],
|
|
458
|
+
execute: [step(`create table "${tableName}"`, createSql)],
|
|
459
|
+
postcheck: [step(`verify table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)]
|
|
460
|
+
};
|
|
461
|
+
}
|
|
462
|
+
function dropTable(schemaName, tableName) {
|
|
463
|
+
const qualified = qualifyTableName(schemaName, tableName);
|
|
464
|
+
return {
|
|
465
|
+
id: `dropTable.${tableName}`,
|
|
466
|
+
label: `Drop table "${tableName}"`,
|
|
467
|
+
operationClass: "destructive",
|
|
468
|
+
target: targetDetails("table", tableName, schemaName),
|
|
469
|
+
precheck: [step(`ensure table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)],
|
|
470
|
+
execute: [step(`drop table "${tableName}"`, `DROP TABLE ${qualified}`)],
|
|
471
|
+
postcheck: [step(`verify table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)]
|
|
472
|
+
};
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
//#endregion
|
|
476
|
+
export { dropColumn as _, addEnumValues as a, setDefault as b, renameType as c, addForeignKey as d, addPrimaryKey as f, alterColumnType as g, addColumn as h, dropIndex as i, createExtension as l, dropConstraint as m, dropTable as n, createEnumType as o, addUnique as p, createIndex as r, dropEnumType as s, createTable as t, createSchema as u, dropDefault as v, setNotNull as x, dropNotNull as y };
|
|
477
|
+
//# sourceMappingURL=tables-BmdW_FWO.mjs.map
|