@prisma-next/target-postgres 0.4.1 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (179) hide show
  1. package/dist/codec-ids-CojIXVf9.mjs +29 -0
  2. package/dist/codec-ids-CojIXVf9.mjs.map +1 -0
  3. package/dist/codec-ids.d.mts +28 -0
  4. package/dist/codec-ids.d.mts.map +1 -0
  5. package/dist/codec-ids.mjs +3 -0
  6. package/dist/codec-types.d.mts +42 -0
  7. package/dist/codec-types.d.mts.map +1 -0
  8. package/dist/codec-types.mjs +3 -0
  9. package/dist/codecs-CE5EUsNM.d.mts +323 -0
  10. package/dist/codecs-CE5EUsNM.d.mts.map +1 -0
  11. package/dist/codecs-dzZ_dMpK.mjs +290 -0
  12. package/dist/codecs-dzZ_dMpK.mjs.map +1 -0
  13. package/dist/codecs.d.mts +2 -0
  14. package/dist/codecs.mjs +3 -0
  15. package/dist/control.d.mts +1 -1
  16. package/dist/control.mjs +24 -1989
  17. package/dist/control.mjs.map +1 -1
  18. package/dist/data-transform-C83dy0vk.mjs +41 -0
  19. package/dist/data-transform-C83dy0vk.mjs.map +1 -0
  20. package/dist/data-transform-D8x5m1YV.d.mts +38 -0
  21. package/dist/data-transform-D8x5m1YV.d.mts.map +1 -0
  22. package/dist/data-transform.d.mts +2 -0
  23. package/dist/data-transform.mjs +3 -0
  24. package/dist/default-normalizer-DNOpRoOF.mjs +131 -0
  25. package/dist/default-normalizer-DNOpRoOF.mjs.map +1 -0
  26. package/dist/default-normalizer.d.mts +19 -0
  27. package/dist/default-normalizer.d.mts.map +1 -0
  28. package/dist/default-normalizer.mjs +3 -0
  29. package/dist/{descriptor-meta-DkvCmY98.mjs → descriptor-meta-BVoVtyp-.mjs} +1 -1
  30. package/dist/{descriptor-meta-DkvCmY98.mjs.map → descriptor-meta-BVoVtyp-.mjs.map} +1 -1
  31. package/dist/errors-AFvEPZ1R.mjs +34 -0
  32. package/dist/errors-AFvEPZ1R.mjs.map +1 -0
  33. package/dist/errors.d.mts +27 -0
  34. package/dist/errors.d.mts.map +1 -0
  35. package/dist/errors.mjs +3 -0
  36. package/dist/issue-planner-CFjB0_oO.mjs +879 -0
  37. package/dist/issue-planner-CFjB0_oO.mjs.map +1 -0
  38. package/dist/issue-planner.d.mts +85 -0
  39. package/dist/issue-planner.d.mts.map +1 -0
  40. package/dist/issue-planner.mjs +3 -0
  41. package/dist/migration.d.mts +5 -79
  42. package/dist/migration.d.mts.map +1 -1
  43. package/dist/migration.mjs +6 -428
  44. package/dist/migration.mjs.map +1 -1
  45. package/dist/native-type-normalizer-CInai_oY.mjs +38 -0
  46. package/dist/native-type-normalizer-CInai_oY.mjs.map +1 -0
  47. package/dist/native-type-normalizer.d.mts +18 -0
  48. package/dist/native-type-normalizer.d.mts.map +1 -0
  49. package/dist/native-type-normalizer.mjs +3 -0
  50. package/dist/op-factory-call-BKlruaiC.mjs +605 -0
  51. package/dist/op-factory-call-BKlruaiC.mjs.map +1 -0
  52. package/dist/op-factory-call-C3bWXKSP.d.mts +304 -0
  53. package/dist/op-factory-call-C3bWXKSP.d.mts.map +1 -0
  54. package/dist/op-factory-call.d.mts +3 -0
  55. package/dist/op-factory-call.mjs +3 -0
  56. package/dist/pack.d.mts +1 -1
  57. package/dist/pack.mjs +1 -1
  58. package/dist/planner-B4ZSLHRI.mjs +98 -0
  59. package/dist/planner-B4ZSLHRI.mjs.map +1 -0
  60. package/dist/planner-ddl-builders-Dxvw1LHw.mjs +132 -0
  61. package/dist/planner-ddl-builders-Dxvw1LHw.mjs.map +1 -0
  62. package/dist/planner-ddl-builders.d.mts +22 -0
  63. package/dist/planner-ddl-builders.d.mts.map +1 -0
  64. package/dist/planner-ddl-builders.mjs +3 -0
  65. package/dist/planner-identity-values-Dju-o5GF.mjs +91 -0
  66. package/dist/planner-identity-values-Dju-o5GF.mjs.map +1 -0
  67. package/dist/planner-identity-values.d.mts +20 -0
  68. package/dist/planner-identity-values.d.mts.map +1 -0
  69. package/dist/planner-identity-values.mjs +3 -0
  70. package/dist/planner-produced-postgres-migration-C0GNhHGw.mjs +32 -0
  71. package/dist/planner-produced-postgres-migration-C0GNhHGw.mjs.map +1 -0
  72. package/dist/planner-produced-postgres-migration-Dw_mPMKt.d.mts +20 -0
  73. package/dist/planner-produced-postgres-migration-Dw_mPMKt.d.mts.map +1 -0
  74. package/dist/planner-produced-postgres-migration.d.mts +5 -0
  75. package/dist/planner-produced-postgres-migration.mjs +3 -0
  76. package/dist/planner-schema-lookup-B7lkypwn.mjs +29 -0
  77. package/dist/planner-schema-lookup-B7lkypwn.mjs.map +1 -0
  78. package/dist/planner-schema-lookup.d.mts +22 -0
  79. package/dist/planner-schema-lookup.d.mts.map +1 -0
  80. package/dist/planner-schema-lookup.mjs +3 -0
  81. package/dist/planner-sql-checks-7jkgm9TX.mjs +241 -0
  82. package/dist/planner-sql-checks-7jkgm9TX.mjs.map +1 -0
  83. package/dist/planner-sql-checks.d.mts +55 -0
  84. package/dist/planner-sql-checks.d.mts.map +1 -0
  85. package/dist/planner-sql-checks.mjs +3 -0
  86. package/dist/{planner-target-details-MXb3oeul.d.mts → planner-target-details-DH-azLu-.d.mts} +1 -1
  87. package/dist/{planner-target-details-MXb3oeul.d.mts.map → planner-target-details-DH-azLu-.d.mts.map} +1 -1
  88. package/dist/planner-target-details.d.mts +2 -0
  89. package/dist/planner-target-details.mjs +1 -0
  90. package/dist/planner.d.mts +74 -0
  91. package/dist/planner.d.mts.map +1 -0
  92. package/dist/planner.mjs +4 -0
  93. package/dist/postgres-migration-DcfWGqhe.d.mts +50 -0
  94. package/dist/postgres-migration-DcfWGqhe.d.mts.map +1 -0
  95. package/dist/postgres-migration-EGSlO4jO.mjs +52 -0
  96. package/dist/postgres-migration-EGSlO4jO.mjs.map +1 -0
  97. package/dist/render-ops-D6_DHdOK.mjs +8 -0
  98. package/dist/render-ops-D6_DHdOK.mjs.map +1 -0
  99. package/dist/render-ops.d.mts +11 -0
  100. package/dist/render-ops.d.mts.map +1 -0
  101. package/dist/render-ops.mjs +3 -0
  102. package/dist/render-typescript-Co3Emwgz.mjs +84 -0
  103. package/dist/render-typescript-Co3Emwgz.mjs.map +1 -0
  104. package/dist/render-typescript.d.mts +14 -0
  105. package/dist/render-typescript.d.mts.map +1 -0
  106. package/dist/render-typescript.mjs +3 -0
  107. package/dist/runtime.d.mts +15 -3
  108. package/dist/runtime.d.mts.map +1 -1
  109. package/dist/runtime.mjs +10 -1
  110. package/dist/runtime.mjs.map +1 -1
  111. package/dist/shared-Bxkt8pNO.d.mts +41 -0
  112. package/dist/shared-Bxkt8pNO.d.mts.map +1 -0
  113. package/dist/sql-utils-r-Lw535w.mjs +76 -0
  114. package/dist/sql-utils-r-Lw535w.mjs.map +1 -0
  115. package/dist/sql-utils.d.mts +59 -0
  116. package/dist/sql-utils.d.mts.map +1 -0
  117. package/dist/sql-utils.mjs +3 -0
  118. package/dist/statement-builders-CHqCtSfe.mjs +121 -0
  119. package/dist/statement-builders-CHqCtSfe.mjs.map +1 -0
  120. package/dist/statement-builders.d.mts +30 -0
  121. package/dist/statement-builders.d.mts.map +1 -0
  122. package/dist/statement-builders.mjs +3 -0
  123. package/dist/tables-BmdW_FWO.mjs +477 -0
  124. package/dist/tables-BmdW_FWO.mjs.map +1 -0
  125. package/dist/types-ClK03Ojd.d.mts +10 -0
  126. package/dist/types-ClK03Ojd.d.mts.map +1 -0
  127. package/dist/types.d.mts +2 -0
  128. package/dist/types.mjs +1 -0
  129. package/package.json +40 -20
  130. package/src/core/codec-ids.ts +30 -0
  131. package/src/core/codecs.ts +622 -0
  132. package/src/core/default-normalizer.ts +131 -0
  133. package/src/core/descriptor-meta.ts +1 -1
  134. package/src/core/errors.ts +33 -0
  135. package/src/core/migrations/op-factory-call.ts +1 -5
  136. package/src/core/migrations/operations/columns.ts +1 -1
  137. package/src/core/migrations/operations/constraints.ts +1 -1
  138. package/src/core/migrations/operations/data-transform.ts +35 -21
  139. package/src/core/migrations/operations/dependencies.ts +1 -1
  140. package/src/core/migrations/operations/enums.ts +1 -1
  141. package/src/core/migrations/operations/indexes.ts +1 -1
  142. package/src/core/migrations/operations/shared.ts +1 -1
  143. package/src/core/migrations/operations/tables.ts +1 -1
  144. package/src/core/migrations/planner-ddl-builders.ts +1 -1
  145. package/src/core/migrations/planner-produced-postgres-migration.ts +0 -1
  146. package/src/core/migrations/planner-recipes.ts +1 -1
  147. package/src/core/migrations/planner-sql-checks.ts +1 -1
  148. package/src/core/migrations/planner.ts +19 -15
  149. package/src/core/migrations/postgres-migration.ts +54 -1
  150. package/src/core/migrations/render-typescript.ts +23 -17
  151. package/src/core/migrations/runner.ts +47 -13
  152. package/src/core/migrations/statement-builders.ts +22 -6
  153. package/src/core/native-type-normalizer.ts +49 -0
  154. package/src/core/sql-utils.ts +104 -0
  155. package/src/exports/codec-ids.ts +1 -0
  156. package/src/exports/codec-types.ts +51 -0
  157. package/src/exports/codecs.ts +2 -0
  158. package/src/exports/data-transform.ts +1 -0
  159. package/src/exports/default-normalizer.ts +1 -0
  160. package/src/exports/errors.ts +1 -0
  161. package/src/exports/issue-planner.ts +1 -0
  162. package/src/exports/migration.ts +6 -0
  163. package/src/exports/native-type-normalizer.ts +1 -0
  164. package/src/exports/op-factory-call.ts +25 -0
  165. package/src/exports/planner-ddl-builders.ts +8 -0
  166. package/src/exports/planner-identity-values.ts +1 -0
  167. package/src/exports/planner-produced-postgres-migration.ts +1 -0
  168. package/src/exports/planner-schema-lookup.ts +6 -0
  169. package/src/exports/planner-sql-checks.ts +11 -0
  170. package/src/exports/planner-target-details.ts +1 -0
  171. package/src/exports/planner.ts +1 -0
  172. package/src/exports/render-ops.ts +1 -0
  173. package/src/exports/render-typescript.ts +1 -0
  174. package/src/exports/runtime.ts +19 -4
  175. package/src/exports/sql-utils.ts +7 -0
  176. package/src/exports/statement-builders.ts +7 -0
  177. package/src/exports/types.ts +1 -0
  178. package/dist/postgres-migration-BsHJHV9O.mjs +0 -2793
  179. package/dist/postgres-migration-BsHJHV9O.mjs.map +0 -1
@@ -0,0 +1,121 @@
1
+ //#region src/core/migrations/statement-builders.ts
2
+ const ensurePrismaContractSchemaStatement = {
3
+ sql: "create schema if not exists prisma_contract",
4
+ params: []
5
+ };
6
+ const ensureMarkerTableStatement = {
7
+ sql: `create table if not exists prisma_contract.marker (
8
+ id smallint primary key default 1,
9
+ core_hash text not null,
10
+ profile_hash text not null,
11
+ contract_json jsonb,
12
+ canonical_version int,
13
+ updated_at timestamptz not null default now(),
14
+ app_tag text,
15
+ meta jsonb not null default '{}',
16
+ invariants text[] not null default '{}'
17
+ )`,
18
+ params: []
19
+ };
20
+ const ensureLedgerTableStatement = {
21
+ sql: `create table if not exists prisma_contract.ledger (
22
+ id bigserial primary key,
23
+ created_at timestamptz not null default now(),
24
+ origin_core_hash text,
25
+ origin_profile_hash text,
26
+ destination_core_hash text not null,
27
+ destination_profile_hash text,
28
+ contract_json_before jsonb,
29
+ contract_json_after jsonb,
30
+ operations jsonb not null
31
+ )`,
32
+ params: []
33
+ };
34
+ function buildMergeMarkerStatements(input) {
35
+ const params = [
36
+ 1,
37
+ input.storageHash,
38
+ input.profileHash,
39
+ jsonParam(input.contractJson),
40
+ input.canonicalVersion ?? null,
41
+ input.appTag ?? null,
42
+ jsonParam(input.meta ?? {}),
43
+ input.invariants
44
+ ];
45
+ return {
46
+ insert: {
47
+ sql: `insert into prisma_contract.marker (
48
+ id,
49
+ core_hash,
50
+ profile_hash,
51
+ contract_json,
52
+ canonical_version,
53
+ updated_at,
54
+ app_tag,
55
+ meta,
56
+ invariants
57
+ ) values (
58
+ $1,
59
+ $2,
60
+ $3,
61
+ $4::jsonb,
62
+ $5,
63
+ now(),
64
+ $6,
65
+ $7::jsonb,
66
+ $8::text[]
67
+ )`,
68
+ params
69
+ },
70
+ update: {
71
+ sql: `update prisma_contract.marker set
72
+ core_hash = $2,
73
+ profile_hash = $3,
74
+ contract_json = $4::jsonb,
75
+ canonical_version = $5,
76
+ updated_at = now(),
77
+ app_tag = $6,
78
+ meta = $7::jsonb,
79
+ invariants = array(select distinct unnest(invariants || $8::text[]) order by 1)
80
+ where id = $1`,
81
+ params
82
+ }
83
+ };
84
+ }
85
+ function buildLedgerInsertStatement(input) {
86
+ return {
87
+ sql: `insert into prisma_contract.ledger (
88
+ origin_core_hash,
89
+ origin_profile_hash,
90
+ destination_core_hash,
91
+ destination_profile_hash,
92
+ contract_json_before,
93
+ contract_json_after,
94
+ operations
95
+ ) values (
96
+ $1,
97
+ $2,
98
+ $3,
99
+ $4,
100
+ $5::jsonb,
101
+ $6::jsonb,
102
+ $7::jsonb
103
+ )`,
104
+ params: [
105
+ input.originStorageHash ?? null,
106
+ input.originProfileHash ?? null,
107
+ input.destinationStorageHash,
108
+ input.destinationProfileHash ?? null,
109
+ jsonParam(input.contractJsonBefore),
110
+ jsonParam(input.contractJsonAfter),
111
+ jsonParam(input.operations)
112
+ ]
113
+ };
114
+ }
115
+ function jsonParam(value) {
116
+ return JSON.stringify(value ?? null);
117
+ }
118
+
119
+ //#endregion
120
+ export { ensurePrismaContractSchemaStatement as a, ensureMarkerTableStatement as i, buildMergeMarkerStatements as n, ensureLedgerTableStatement as r, buildLedgerInsertStatement as t };
121
+ //# sourceMappingURL=statement-builders-CHqCtSfe.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"statement-builders-CHqCtSfe.mjs","names":["ensurePrismaContractSchemaStatement: SqlStatement","ensureMarkerTableStatement: SqlStatement","ensureLedgerTableStatement: SqlStatement","params: readonly unknown[]"],"sources":["../src/core/migrations/statement-builders.ts"],"sourcesContent":["export interface SqlStatement {\n readonly sql: string;\n readonly params: readonly unknown[];\n}\n\nexport const ensurePrismaContractSchemaStatement: SqlStatement = {\n sql: 'create schema if not exists prisma_contract',\n params: [],\n};\n\nexport const ensureMarkerTableStatement: SqlStatement = {\n sql: `create table if not exists prisma_contract.marker (\n id smallint primary key default 1,\n core_hash text not null,\n profile_hash text not null,\n contract_json jsonb,\n canonical_version int,\n updated_at timestamptz not null default now(),\n app_tag text,\n meta jsonb not null default '{}',\n invariants text[] not null default '{}'\n )`,\n params: [],\n};\n\nexport const ensureLedgerTableStatement: SqlStatement = {\n sql: `create table if not exists prisma_contract.ledger (\n id bigserial primary key,\n created_at timestamptz not null default now(),\n origin_core_hash text,\n origin_profile_hash text,\n destination_core_hash text not null,\n destination_profile_hash text,\n contract_json_before jsonb,\n contract_json_after jsonb,\n operations jsonb not null\n )`,\n params: [],\n};\n\nexport interface MergeMarkerInput {\n readonly storageHash: string;\n readonly profileHash: string;\n readonly contractJson?: unknown;\n readonly canonicalVersion?: number | null;\n readonly appTag?: string | null;\n readonly meta?: Record<string, unknown>;\n /**\n * Invariants to merge into `marker.invariants`. INSERT writes them as\n * the initial value (callers are expected to pass a sorted, deduped\n * array). UPDATE merges them with the existing column server-side via\n * a single atomic SQL expression.\n */\n readonly invariants: readonly string[];\n}\n\nexport function buildMergeMarkerStatements(input: MergeMarkerInput): {\n readonly insert: SqlStatement;\n readonly update: SqlStatement;\n} {\n const params: readonly unknown[] = [\n 1,\n input.storageHash,\n input.profileHash,\n jsonParam(input.contractJson),\n input.canonicalVersion ?? null,\n input.appTag ?? null,\n jsonParam(input.meta ?? {}),\n input.invariants,\n ];\n\n return {\n insert: {\n sql: `insert into prisma_contract.marker (\n id,\n core_hash,\n profile_hash,\n contract_json,\n canonical_version,\n updated_at,\n app_tag,\n meta,\n invariants\n ) values (\n $1,\n $2,\n $3,\n $4::jsonb,\n $5,\n now(),\n $6,\n $7::jsonb,\n $8::text[]\n )`,\n params,\n },\n update: {\n // `invariants = array(select distinct unnest(invariants || $8::text[]) order by 1)`\n // reads the current column value under the UPDATE's row lock, unions\n // with the incoming array, dedupes, and sorts ascending — single\n // statement, atomic, no read-then-write window.\n sql: `update prisma_contract.marker set\n core_hash = $2,\n profile_hash = $3,\n contract_json = $4::jsonb,\n canonical_version = $5,\n updated_at = now(),\n app_tag = $6,\n meta = $7::jsonb,\n invariants = array(select distinct unnest(invariants || $8::text[]) order by 1)\n where id = $1`,\n params,\n },\n };\n}\n\nexport interface LedgerInsertInput {\n readonly originStorageHash?: string | null;\n readonly originProfileHash?: string | null;\n readonly destinationStorageHash: string;\n readonly destinationProfileHash?: string | null;\n readonly contractJsonBefore?: unknown;\n readonly contractJsonAfter?: unknown;\n readonly operations: unknown;\n}\n\nexport function buildLedgerInsertStatement(input: LedgerInsertInput): SqlStatement {\n return {\n sql: `insert into prisma_contract.ledger (\n origin_core_hash,\n origin_profile_hash,\n destination_core_hash,\n destination_profile_hash,\n contract_json_before,\n contract_json_after,\n operations\n ) values (\n $1,\n $2,\n $3,\n $4,\n $5::jsonb,\n $6::jsonb,\n $7::jsonb\n )`,\n params: [\n input.originStorageHash ?? null,\n input.originProfileHash ?? null,\n input.destinationStorageHash,\n input.destinationProfileHash ?? null,\n jsonParam(input.contractJsonBefore),\n jsonParam(input.contractJsonAfter),\n jsonParam(input.operations),\n ],\n };\n}\n\nfunction jsonParam(value: unknown): string {\n return JSON.stringify(value ?? null);\n}\n"],"mappings":";AAKA,MAAaA,sCAAoD;CAC/D,KAAK;CACL,QAAQ,EAAE;CACX;AAED,MAAaC,6BAA2C;CACtD,KAAK;;;;;;;;;;;CAWL,QAAQ,EAAE;CACX;AAED,MAAaC,6BAA2C;CACtD,KAAK;;;;;;;;;;;CAWL,QAAQ,EAAE;CACX;AAkBD,SAAgB,2BAA2B,OAGzC;CACA,MAAMC,SAA6B;EACjC;EACA,MAAM;EACN,MAAM;EACN,UAAU,MAAM,aAAa;EAC7B,MAAM,oBAAoB;EAC1B,MAAM,UAAU;EAChB,UAAU,MAAM,QAAQ,EAAE,CAAC;EAC3B,MAAM;EACP;AAED,QAAO;EACL,QAAQ;GACN,KAAK;;;;;;;;;;;;;;;;;;;;;GAqBL;GACD;EACD,QAAQ;GAKN,KAAK;;;;;;;;;;GAUL;GACD;EACF;;AAaH,SAAgB,2BAA2B,OAAwC;AACjF,QAAO;EACL,KAAK;;;;;;;;;;;;;;;;;EAiBL,QAAQ;GACN,MAAM,qBAAqB;GAC3B,MAAM,qBAAqB;GAC3B,MAAM;GACN,MAAM,0BAA0B;GAChC,UAAU,MAAM,mBAAmB;GACnC,UAAU,MAAM,kBAAkB;GAClC,UAAU,MAAM,WAAW;GAC5B;EACF;;AAGH,SAAS,UAAU,OAAwB;AACzC,QAAO,KAAK,UAAU,SAAS,KAAK"}
@@ -0,0 +1,30 @@
1
+ //#region src/core/migrations/statement-builders.d.ts
2
+ interface SqlStatement {
3
+ readonly sql: string;
4
+ readonly params: readonly unknown[];
5
+ }
6
+ declare const ensurePrismaContractSchemaStatement: SqlStatement;
7
+ declare const ensureMarkerTableStatement: SqlStatement;
8
+ declare const ensureLedgerTableStatement: SqlStatement;
9
+ interface MergeMarkerInput {
10
+ readonly storageHash: string;
11
+ readonly profileHash: string;
12
+ readonly contractJson?: unknown;
13
+ readonly canonicalVersion?: number | null;
14
+ readonly appTag?: string | null;
15
+ readonly meta?: Record<string, unknown>;
16
+ /**
17
+ * Invariants to merge into `marker.invariants`. INSERT writes them as
18
+ * the initial value (callers are expected to pass a sorted, deduped
19
+ * array). UPDATE merges them with the existing column server-side via
20
+ * a single atomic SQL expression.
21
+ */
22
+ readonly invariants: readonly string[];
23
+ }
24
+ declare function buildMergeMarkerStatements(input: MergeMarkerInput): {
25
+ readonly insert: SqlStatement;
26
+ readonly update: SqlStatement;
27
+ };
28
+ //#endregion
29
+ export { type SqlStatement, buildMergeMarkerStatements, ensureLedgerTableStatement, ensureMarkerTableStatement, ensurePrismaContractSchemaStatement };
30
+ //# sourceMappingURL=statement-builders.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"statement-builders.d.mts","names":[],"sources":["../src/core/migrations/statement-builders.ts"],"sourcesContent":[],"mappings":";UAAiB,YAAA;EAAA,SAAA,GAAA,EAAA,MAAY;EAKhB,SAAA,MAAA,EAAA,SAAA,OAAA,EAAA;AAKb;AAea,cApBA,mCAoB4B,EApBS,YAiCjD;AAEgB,cA9BJ,0BAoCW,EApCiB,YAoCjB;AAUR,cA/BH,0BA+B6B,EA/BD,YA+BC;AAAQ,UAhBjC,gBAAA,CAgBiC;EAC/B,SAAA,WAAA,EAAA,MAAA;EACA,SAAA,WAAA,EAAA,MAAA;EAAY,SAAA,YAAA,CAAA,EAAA,OAAA;;;kBAZb;;;;;;;;;iBAUF,0BAAA,QAAkC;mBAC/B;mBACA"}
@@ -0,0 +1,3 @@
1
+ import { a as ensurePrismaContractSchemaStatement, i as ensureMarkerTableStatement, n as buildMergeMarkerStatements, r as ensureLedgerTableStatement } from "./statement-builders-CHqCtSfe.mjs";
2
+
3
+ export { buildMergeMarkerStatements, ensureLedgerTableStatement, ensureMarkerTableStatement, ensurePrismaContractSchemaStatement };
@@ -0,0 +1,477 @@
1
+ import { i as quoteIdentifier, n as escapeLiteral, r as qualifyName } from "./sql-utils-r-Lw535w.mjs";
2
+ import { a as columnNullabilityCheck, c as qualifyTableName, d as toRegclassLiteral, n as columnDefaultExistsCheck, o as columnTypeCheck, r as columnExistsCheck, s as constraintExistsCheck } from "./planner-sql-checks-7jkgm9TX.mjs";
3
+ import { ifDefined } from "@prisma-next/utils/defined";
4
+
5
+ //#region src/core/migrations/operations/shared.ts
6
+ function step(description, sql) {
7
+ return {
8
+ description,
9
+ sql
10
+ };
11
+ }
12
+ function targetDetails(objectType, name, schema, table) {
13
+ return {
14
+ id: "postgres",
15
+ details: {
16
+ schema,
17
+ objectType,
18
+ name,
19
+ ...ifDefined("table", table)
20
+ }
21
+ };
22
+ }
23
+ function renderColumnDefinition(column) {
24
+ return [
25
+ quoteIdentifier(column.name),
26
+ column.typeSql,
27
+ column.defaultSql,
28
+ column.nullable ? "" : "NOT NULL"
29
+ ].filter(Boolean).join(" ");
30
+ }
31
+
32
+ //#endregion
33
+ //#region src/core/migrations/operations/columns.ts
34
+ function addColumn(schemaName, tableName, column) {
35
+ const addSql = [
36
+ `ALTER TABLE ${qualifyTableName(schemaName, tableName)}`,
37
+ `ADD COLUMN ${quoteIdentifier(column.name)} ${column.typeSql}`,
38
+ column.defaultSql,
39
+ column.nullable ? "" : "NOT NULL"
40
+ ].filter(Boolean).join(" ");
41
+ return {
42
+ id: `column.${tableName}.${column.name}`,
43
+ label: `Add column "${column.name}" to "${tableName}"`,
44
+ operationClass: "additive",
45
+ target: targetDetails("column", column.name, schemaName, tableName),
46
+ precheck: [step(`ensure column "${column.name}" is missing`, columnExistsCheck({
47
+ schema: schemaName,
48
+ table: tableName,
49
+ column: column.name,
50
+ exists: false
51
+ }))],
52
+ execute: [step(`add column "${column.name}"`, addSql)],
53
+ postcheck: [step(`verify column "${column.name}" exists`, columnExistsCheck({
54
+ schema: schemaName,
55
+ table: tableName,
56
+ column: column.name
57
+ }))]
58
+ };
59
+ }
60
+ function dropColumn(schemaName, tableName, columnName) {
61
+ const qualified = qualifyTableName(schemaName, tableName);
62
+ return {
63
+ id: `dropColumn.${tableName}.${columnName}`,
64
+ label: `Drop column "${columnName}" from "${tableName}"`,
65
+ operationClass: "destructive",
66
+ target: targetDetails("column", columnName, schemaName, tableName),
67
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
68
+ schema: schemaName,
69
+ table: tableName,
70
+ column: columnName
71
+ }))],
72
+ execute: [step(`drop column "${columnName}"`, `ALTER TABLE ${qualified} DROP COLUMN ${quoteIdentifier(columnName)}`)],
73
+ postcheck: [step(`verify column "${columnName}" does not exist`, columnExistsCheck({
74
+ schema: schemaName,
75
+ table: tableName,
76
+ column: columnName,
77
+ exists: false
78
+ }))]
79
+ };
80
+ }
81
+ /**
82
+ * `qualifiedTargetType` is the new column type as it appears in the
83
+ * `ALTER COLUMN TYPE` clause (schema-qualified for user-defined types, raw
84
+ * native name for built-ins). `formatTypeExpected` is the unqualified
85
+ * `format_type` form used in the postcheck. `rawTargetTypeForLabel` is the
86
+ * string appearing in the human-readable label (typically `toType` when
87
+ * explicit, else the column's native type).
88
+ */
89
+ function alterColumnType(schemaName, tableName, columnName, options) {
90
+ const qualified = qualifyTableName(schemaName, tableName);
91
+ const usingClause = options.using ? ` USING ${options.using}` : ` USING ${quoteIdentifier(columnName)}::${options.qualifiedTargetType}`;
92
+ return {
93
+ id: `alterType.${tableName}.${columnName}`,
94
+ label: `Alter type of "${tableName}"."${columnName}" to ${options.rawTargetTypeForLabel}`,
95
+ operationClass: "destructive",
96
+ target: targetDetails("column", columnName, schemaName, tableName),
97
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
98
+ schema: schemaName,
99
+ table: tableName,
100
+ column: columnName
101
+ }))],
102
+ execute: [step(`alter type of "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} TYPE ${options.qualifiedTargetType}${usingClause}`)],
103
+ postcheck: [step(`verify column "${columnName}" has type "${options.formatTypeExpected}"`, columnTypeCheck({
104
+ schema: schemaName,
105
+ table: tableName,
106
+ column: columnName,
107
+ expectedType: options.formatTypeExpected
108
+ }))],
109
+ meta: { warning: "TABLE_REWRITE" }
110
+ };
111
+ }
112
+ function setNotNull(schemaName, tableName, columnName) {
113
+ const qualified = qualifyTableName(schemaName, tableName);
114
+ return {
115
+ id: `alterNullability.setNotNull.${tableName}.${columnName}`,
116
+ label: `Set NOT NULL on "${tableName}"."${columnName}"`,
117
+ operationClass: "destructive",
118
+ target: targetDetails("column", columnName, schemaName, tableName),
119
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
120
+ schema: schemaName,
121
+ table: tableName,
122
+ column: columnName
123
+ })), step(`ensure no NULL values in "${columnName}"`, `SELECT NOT EXISTS (SELECT 1 FROM ${qualified} WHERE ${quoteIdentifier(columnName)} IS NULL)`)],
124
+ execute: [step(`set NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET NOT NULL`)],
125
+ postcheck: [step(`verify column "${columnName}" is NOT NULL`, columnNullabilityCheck({
126
+ schema: schemaName,
127
+ table: tableName,
128
+ column: columnName,
129
+ nullable: false
130
+ }))]
131
+ };
132
+ }
133
+ function dropNotNull(schemaName, tableName, columnName) {
134
+ const qualified = qualifyTableName(schemaName, tableName);
135
+ return {
136
+ id: `alterNullability.dropNotNull.${tableName}.${columnName}`,
137
+ label: `Drop NOT NULL on "${tableName}"."${columnName}"`,
138
+ operationClass: "widening",
139
+ target: targetDetails("column", columnName, schemaName, tableName),
140
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
141
+ schema: schemaName,
142
+ table: tableName,
143
+ column: columnName
144
+ }))],
145
+ execute: [step(`drop NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP NOT NULL`)],
146
+ postcheck: [step(`verify column "${columnName}" is nullable`, columnNullabilityCheck({
147
+ schema: schemaName,
148
+ table: tableName,
149
+ column: columnName,
150
+ nullable: true
151
+ }))]
152
+ };
153
+ }
154
+ /**
155
+ * `defaultSql` is the full `DEFAULT …` clause as produced by
156
+ * `buildColumnDefaultSql` — e.g. `"DEFAULT 42"`,
157
+ * `"DEFAULT (CURRENT_TIMESTAMP)"`, or `"DEFAULT nextval('seq'::regclass)"`.
158
+ *
159
+ * `operationClass` defaults to `'additive'` (setting a default on a column
160
+ * that currently has none). The reconciliation planner passes `'widening'`
161
+ * when the column already has a different default — policy enforcement
162
+ * treats that as a widening change rather than an additive one.
163
+ */
164
+ function setDefault(schemaName, tableName, columnName, defaultSql, operationClass = "additive") {
165
+ const qualified = qualifyTableName(schemaName, tableName);
166
+ return {
167
+ id: `setDefault.${tableName}.${columnName}`,
168
+ label: `Set default on "${tableName}"."${columnName}"`,
169
+ operationClass,
170
+ target: targetDetails("column", columnName, schemaName, tableName),
171
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
172
+ schema: schemaName,
173
+ table: tableName,
174
+ column: columnName
175
+ }))],
176
+ execute: [step(`set default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET ${defaultSql}`)],
177
+ postcheck: [step(`verify column "${columnName}" has a default`, columnDefaultExistsCheck({
178
+ schema: schemaName,
179
+ table: tableName,
180
+ column: columnName,
181
+ exists: true
182
+ }))]
183
+ };
184
+ }
185
+ function dropDefault(schemaName, tableName, columnName) {
186
+ const qualified = qualifyTableName(schemaName, tableName);
187
+ return {
188
+ id: `dropDefault.${tableName}.${columnName}`,
189
+ label: `Drop default on "${tableName}"."${columnName}"`,
190
+ operationClass: "destructive",
191
+ target: targetDetails("column", columnName, schemaName, tableName),
192
+ precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
193
+ schema: schemaName,
194
+ table: tableName,
195
+ column: columnName
196
+ }))],
197
+ execute: [step(`drop default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`)],
198
+ postcheck: [step(`verify column "${columnName}" has no default`, columnDefaultExistsCheck({
199
+ schema: schemaName,
200
+ table: tableName,
201
+ column: columnName,
202
+ exists: false
203
+ }))]
204
+ };
205
+ }
206
+
207
+ //#endregion
208
+ //#region src/core/migrations/operations/constraints.ts
209
+ const REFERENTIAL_ACTION_SQL = {
210
+ noAction: "NO ACTION",
211
+ restrict: "RESTRICT",
212
+ cascade: "CASCADE",
213
+ setNull: "SET NULL",
214
+ setDefault: "SET DEFAULT"
215
+ };
216
+ function renderForeignKeySql(schemaName, tableName, fk) {
217
+ let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
218
+ ADD CONSTRAINT ${quoteIdentifier(fk.name)}
219
+ FOREIGN KEY (${fk.columns.map(quoteIdentifier).join(", ")})
220
+ REFERENCES ${qualifyTableName(schemaName, fk.references.table)} (${fk.references.columns.map(quoteIdentifier).join(", ")})`;
221
+ if (fk.onDelete !== void 0) {
222
+ const action = REFERENTIAL_ACTION_SQL[fk.onDelete];
223
+ if (!action) throw new Error(`Unknown referential action for onDelete: ${String(fk.onDelete)}`);
224
+ sql += `\nON DELETE ${action}`;
225
+ }
226
+ if (fk.onUpdate !== void 0) {
227
+ const action = REFERENTIAL_ACTION_SQL[fk.onUpdate];
228
+ if (!action) throw new Error(`Unknown referential action for onUpdate: ${String(fk.onUpdate)}`);
229
+ sql += `\nON UPDATE ${action}`;
230
+ }
231
+ return sql;
232
+ }
233
+ function addPrimaryKey(schemaName, tableName, constraintName, columns) {
234
+ const qualified = qualifyTableName(schemaName, tableName);
235
+ const columnList = columns.map(quoteIdentifier).join(", ");
236
+ return {
237
+ id: `primaryKey.${tableName}.${constraintName}`,
238
+ label: `Add primary key on "${tableName}"`,
239
+ operationClass: "additive",
240
+ target: targetDetails("primaryKey", constraintName, schemaName, tableName),
241
+ precheck: [step(`ensure primary key "${constraintName}" does not exist`, constraintExistsCheck({
242
+ constraintName,
243
+ schema: schemaName,
244
+ table: tableName,
245
+ exists: false
246
+ }))],
247
+ execute: [step(`add primary key "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} PRIMARY KEY (${columnList})`)],
248
+ postcheck: [step(`verify primary key "${constraintName}" exists`, constraintExistsCheck({
249
+ constraintName,
250
+ schema: schemaName,
251
+ table: tableName
252
+ }))]
253
+ };
254
+ }
255
+ function addUnique(schemaName, tableName, constraintName, columns) {
256
+ const qualified = qualifyTableName(schemaName, tableName);
257
+ const columnList = columns.map(quoteIdentifier).join(", ");
258
+ return {
259
+ id: `unique.${tableName}.${constraintName}`,
260
+ label: `Add unique constraint on "${tableName}" (${columns.join(", ")})`,
261
+ operationClass: "additive",
262
+ target: targetDetails("unique", constraintName, schemaName, tableName),
263
+ precheck: [step(`ensure constraint "${constraintName}" does not exist`, constraintExistsCheck({
264
+ constraintName,
265
+ schema: schemaName,
266
+ table: tableName,
267
+ exists: false
268
+ }))],
269
+ execute: [step(`add unique constraint "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} UNIQUE (${columnList})`)],
270
+ postcheck: [step(`verify constraint "${constraintName}" exists`, constraintExistsCheck({
271
+ constraintName,
272
+ schema: schemaName,
273
+ table: tableName
274
+ }))]
275
+ };
276
+ }
277
+ function addForeignKey(schemaName, tableName, fk) {
278
+ return {
279
+ id: `foreignKey.${tableName}.${fk.name}`,
280
+ label: `Add foreign key "${fk.name}" on "${tableName}"`,
281
+ operationClass: "additive",
282
+ target: targetDetails("foreignKey", fk.name, schemaName, tableName),
283
+ precheck: [step(`ensure FK "${fk.name}" does not exist`, constraintExistsCheck({
284
+ constraintName: fk.name,
285
+ schema: schemaName,
286
+ table: tableName,
287
+ exists: false
288
+ }))],
289
+ execute: [step(`add FK "${fk.name}"`, renderForeignKeySql(schemaName, tableName, fk))],
290
+ postcheck: [step(`verify FK "${fk.name}" exists`, constraintExistsCheck({
291
+ constraintName: fk.name,
292
+ schema: schemaName,
293
+ table: tableName
294
+ }))]
295
+ };
296
+ }
297
+ /**
298
+ * `kind` feeds the operation's `target.details.objectType`. Descriptor-flow
299
+ * does not carry kind information in its drop-constraint descriptor, so the
300
+ * default is `'unique'`. The reconciliation planner passes the correct kind
301
+ * (`'foreignKey'`, `'primaryKey'`, or `'unique'`) based on the `SchemaIssue`
302
+ * that produced the drop.
303
+ */
304
+ function dropConstraint(schemaName, tableName, constraintName, kind = "unique") {
305
+ const qualified = qualifyTableName(schemaName, tableName);
306
+ return {
307
+ id: `dropConstraint.${tableName}.${constraintName}`,
308
+ label: `Drop constraint "${constraintName}" on "${tableName}"`,
309
+ operationClass: "destructive",
310
+ target: targetDetails(kind, constraintName, schemaName, tableName),
311
+ precheck: [step(`ensure constraint "${constraintName}" exists`, constraintExistsCheck({
312
+ constraintName,
313
+ schema: schemaName,
314
+ table: tableName
315
+ }))],
316
+ execute: [step(`drop constraint "${constraintName}"`, `ALTER TABLE ${qualified} DROP CONSTRAINT ${quoteIdentifier(constraintName)}`)],
317
+ postcheck: [step(`verify constraint "${constraintName}" does not exist`, constraintExistsCheck({
318
+ constraintName,
319
+ schema: schemaName,
320
+ table: tableName,
321
+ exists: false
322
+ }))]
323
+ };
324
+ }
325
+
326
+ //#endregion
327
+ //#region src/core/migrations/operations/dependencies.ts
328
+ function createExtension(extensionName) {
329
+ return {
330
+ id: `extension.${extensionName}`,
331
+ label: `Create extension "${extensionName}"`,
332
+ operationClass: "additive",
333
+ target: { id: "postgres" },
334
+ precheck: [],
335
+ execute: [step(`Create extension "${extensionName}"`, `CREATE EXTENSION IF NOT EXISTS ${quoteIdentifier(extensionName)}`)],
336
+ postcheck: []
337
+ };
338
+ }
339
+ function createSchema(schemaName) {
340
+ return {
341
+ id: `schema.${schemaName}`,
342
+ label: `Create schema "${schemaName}"`,
343
+ operationClass: "additive",
344
+ target: { id: "postgres" },
345
+ precheck: [],
346
+ execute: [step(`Create schema "${schemaName}"`, `CREATE SCHEMA IF NOT EXISTS ${quoteIdentifier(schemaName)}`)],
347
+ postcheck: []
348
+ };
349
+ }
350
+
351
+ //#endregion
352
+ //#region src/core/migrations/operations/enums.ts
353
+ function enumTypeExistsCheck(schemaName, nativeType, exists = true) {
354
+ return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
355
+ SELECT 1
356
+ FROM pg_type t
357
+ JOIN pg_namespace n ON t.typnamespace = n.oid
358
+ WHERE n.nspname = '${escapeLiteral(schemaName)}'
359
+ AND t.typname = '${escapeLiteral(nativeType)}'
360
+ )`;
361
+ }
362
+ function createEnumType(schemaName, typeName, values) {
363
+ const qualifiedType = qualifyName(schemaName, typeName);
364
+ const literalValues = values.map((v) => `'${escapeLiteral(v)}'`).join(", ");
365
+ return {
366
+ id: `type.${typeName}`,
367
+ label: `Create enum type "${typeName}"`,
368
+ operationClass: "additive",
369
+ target: targetDetails("type", typeName, schemaName),
370
+ precheck: [step(`ensure type "${typeName}" does not exist`, enumTypeExistsCheck(schemaName, typeName, false))],
371
+ execute: [step(`create enum type "${typeName}"`, `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`)],
372
+ postcheck: [step(`verify type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))]
373
+ };
374
+ }
375
+ /**
376
+ * `typeName` is the contract-facing type name (used for id/label).
377
+ * `nativeType` is the Postgres type name to mutate (may differ for external types).
378
+ */
379
+ function addEnumValues(schemaName, typeName, nativeType, values) {
380
+ const qualifiedType = qualifyName(schemaName, nativeType);
381
+ return {
382
+ id: `type.${typeName}.addValues`,
383
+ label: `Add values to enum type "${typeName}": ${values.join(", ")}`,
384
+ operationClass: "additive",
385
+ target: targetDetails("type", typeName, schemaName),
386
+ precheck: [step(`ensure type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))],
387
+ execute: values.map((value) => step(`add value '${value}' to enum "${nativeType}"`, `ALTER TYPE ${qualifiedType} ADD VALUE '${escapeLiteral(value)}'`)),
388
+ postcheck: [step(`verify type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))]
389
+ };
390
+ }
391
+ function dropEnumType(schemaName, typeName) {
392
+ const qualified = qualifyName(schemaName, typeName);
393
+ return {
394
+ id: `type.${typeName}.drop`,
395
+ label: `Drop enum type "${typeName}"`,
396
+ operationClass: "destructive",
397
+ target: targetDetails("type", typeName, schemaName),
398
+ precheck: [step(`ensure type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))],
399
+ execute: [step(`drop enum type "${typeName}"`, `DROP TYPE ${qualified}`)],
400
+ postcheck: [step(`verify type "${typeName}" removed`, enumTypeExistsCheck(schemaName, typeName, false))]
401
+ };
402
+ }
403
+ function renameType(schemaName, fromName, toName) {
404
+ const qualifiedFrom = qualifyName(schemaName, fromName);
405
+ return {
406
+ id: `type.${fromName}.rename`,
407
+ label: `Rename type "${fromName}" to "${toName}"`,
408
+ operationClass: "destructive",
409
+ target: targetDetails("type", fromName, schemaName),
410
+ precheck: [step(`ensure type "${fromName}" exists`, enumTypeExistsCheck(schemaName, fromName)), step(`ensure type "${toName}" does not already exist`, enumTypeExistsCheck(schemaName, toName, false))],
411
+ execute: [step(`rename type "${fromName}" to "${toName}"`, `ALTER TYPE ${qualifiedFrom} RENAME TO ${quoteIdentifier(toName)}`)],
412
+ postcheck: [step(`verify type "${toName}" exists`, enumTypeExistsCheck(schemaName, toName))]
413
+ };
414
+ }
415
+
416
+ //#endregion
417
+ //#region src/core/migrations/operations/indexes.ts
418
+ function createIndex(schemaName, tableName, indexName, columns) {
419
+ const qualified = qualifyTableName(schemaName, tableName);
420
+ const columnList = columns.map(quoteIdentifier).join(", ");
421
+ return {
422
+ id: `index.${tableName}.${indexName}`,
423
+ label: `Create index "${indexName}" on "${tableName}"`,
424
+ operationClass: "additive",
425
+ target: targetDetails("index", indexName, schemaName, tableName),
426
+ precheck: [step(`ensure index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)],
427
+ execute: [step(`create index "${indexName}"`, `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualified} (${columnList})`)],
428
+ postcheck: [step(`verify index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)]
429
+ };
430
+ }
431
+ function dropIndex(schemaName, tableName, indexName) {
432
+ return {
433
+ id: `dropIndex.${tableName}.${indexName}`,
434
+ label: `Drop index "${indexName}"`,
435
+ operationClass: "destructive",
436
+ target: targetDetails("index", indexName, schemaName, tableName),
437
+ precheck: [step(`ensure index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)],
438
+ execute: [step(`drop index "${indexName}"`, `DROP INDEX ${qualifyTableName(schemaName, indexName)}`)],
439
+ postcheck: [step(`verify index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)]
440
+ };
441
+ }
442
+
443
+ //#endregion
444
+ //#region src/core/migrations/operations/tables.ts
445
+ function createTable(schemaName, tableName, columns, primaryKey) {
446
+ const qualified = qualifyTableName(schemaName, tableName);
447
+ const columnDefs = columns.map(renderColumnDefinition);
448
+ const constraintDefs = [];
449
+ if (primaryKey) constraintDefs.push(`PRIMARY KEY (${primaryKey.columns.map(quoteIdentifier).join(", ")})`);
450
+ const createSql = `CREATE TABLE ${qualified} (\n ${[...columnDefs, ...constraintDefs].join(",\n ")}\n)`;
451
+ return {
452
+ id: `table.${tableName}`,
453
+ label: `Create table "${tableName}"`,
454
+ summary: `Creates table "${tableName}"`,
455
+ operationClass: "additive",
456
+ target: targetDetails("table", tableName, schemaName),
457
+ precheck: [step(`ensure table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)],
458
+ execute: [step(`create table "${tableName}"`, createSql)],
459
+ postcheck: [step(`verify table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)]
460
+ };
461
+ }
462
+ function dropTable(schemaName, tableName) {
463
+ const qualified = qualifyTableName(schemaName, tableName);
464
+ return {
465
+ id: `dropTable.${tableName}`,
466
+ label: `Drop table "${tableName}"`,
467
+ operationClass: "destructive",
468
+ target: targetDetails("table", tableName, schemaName),
469
+ precheck: [step(`ensure table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)],
470
+ execute: [step(`drop table "${tableName}"`, `DROP TABLE ${qualified}`)],
471
+ postcheck: [step(`verify table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)]
472
+ };
473
+ }
474
+
475
+ //#endregion
476
+ export { dropColumn as _, addEnumValues as a, setDefault as b, renameType as c, addForeignKey as d, addPrimaryKey as f, alterColumnType as g, addColumn as h, dropIndex as i, createExtension as l, dropConstraint as m, dropTable as n, createEnumType as o, addUnique as p, createIndex as r, dropEnumType as s, createTable as t, createSchema as u, dropDefault as v, setNotNull as x, dropNotNull as y };
477
+ //# sourceMappingURL=tables-BmdW_FWO.mjs.map