@supabase/pg-delta 1.0.0-alpha.21 → 1.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/core/catalog.diff.js +4 -3
  2. package/dist/core/catalog.model.d.ts +8 -1
  3. package/dist/core/catalog.model.js +9 -8
  4. package/dist/core/expand-replace-dependencies.js +23 -0
  5. package/dist/core/objects/extract-with-retry.d.ts +36 -0
  6. package/dist/core/objects/extract-with-retry.js +51 -0
  7. package/dist/core/objects/index/index.diff.js +0 -1
  8. package/dist/core/objects/index/index.model.d.ts +2 -3
  9. package/dist/core/objects/index/index.model.js +17 -6
  10. package/dist/core/objects/materialized-view/materialized-view.model.d.ts +2 -1
  11. package/dist/core/objects/materialized-view/materialized-view.model.js +20 -4
  12. package/dist/core/objects/procedure/procedure.model.d.ts +2 -1
  13. package/dist/core/objects/procedure/procedure.model.js +20 -4
  14. package/dist/core/objects/rls-policy/rls-policy.diff.js +13 -1
  15. package/dist/core/objects/rule/rule.model.d.ts +2 -1
  16. package/dist/core/objects/rule/rule.model.js +20 -3
  17. package/dist/core/objects/sequence/sequence.diff.d.ts +2 -1
  18. package/dist/core/objects/sequence/sequence.diff.js +28 -4
  19. package/dist/core/objects/table/changes/table.alter.d.ts +12 -1
  20. package/dist/core/objects/table/changes/table.alter.js +20 -2
  21. package/dist/core/objects/table/table.diff.js +19 -15
  22. package/dist/core/objects/table/table.model.d.ts +6 -1
  23. package/dist/core/objects/table/table.model.js +40 -5
  24. package/dist/core/objects/trigger/trigger.model.d.ts +2 -1
  25. package/dist/core/objects/trigger/trigger.model.js +20 -4
  26. package/dist/core/objects/utils.d.ts +1 -0
  27. package/dist/core/objects/utils.js +3 -0
  28. package/dist/core/objects/view/view.model.d.ts +2 -1
  29. package/dist/core/objects/view/view.model.js +20 -4
  30. package/dist/core/plan/create.js +3 -1
  31. package/dist/core/plan/types.d.ts +8 -0
  32. package/dist/core/{post-diff-cycle-breaking.d.ts → post-diff-normalization.d.ts} +8 -1
  33. package/dist/core/post-diff-normalization.js +202 -0
  34. package/dist/core/sort/cycle-breakers.js +1 -1
  35. package/dist/core/sort/utils.d.ts +10 -0
  36. package/dist/core/sort/utils.js +28 -0
  37. package/package.json +1 -1
  38. package/src/core/catalog.diff.ts +4 -2
  39. package/src/core/catalog.model.ts +20 -8
  40. package/src/core/expand-replace-dependencies.test.ts +131 -0
  41. package/src/core/expand-replace-dependencies.ts +24 -0
  42. package/src/core/objects/extract-with-retry.test.ts +143 -0
  43. package/src/core/objects/extract-with-retry.ts +87 -0
  44. package/src/core/objects/index/index.diff.ts +0 -1
  45. package/src/core/objects/index/index.model.test.ts +37 -1
  46. package/src/core/objects/index/index.model.ts +25 -6
  47. package/src/core/objects/materialized-view/materialized-view.model.test.ts +93 -0
  48. package/src/core/objects/materialized-view/materialized-view.model.ts +27 -4
  49. package/src/core/objects/procedure/procedure.model.test.ts +117 -0
  50. package/src/core/objects/procedure/procedure.model.ts +28 -5
  51. package/src/core/objects/rls-policy/rls-policy.diff.ts +19 -1
  52. package/src/core/objects/rule/rule.model.test.ts +99 -0
  53. package/src/core/objects/rule/rule.model.ts +28 -4
  54. package/src/core/objects/sequence/sequence.diff.test.ts +87 -0
  55. package/src/core/objects/sequence/sequence.diff.ts +31 -6
  56. package/src/core/objects/table/changes/table.alter.test.ts +13 -21
  57. package/src/core/objects/table/changes/table.alter.ts +30 -3
  58. package/src/core/objects/table/table.diff.ts +24 -19
  59. package/src/core/objects/table/table.model.test.ts +209 -0
  60. package/src/core/objects/table/table.model.ts +52 -7
  61. package/src/core/objects/trigger/trigger.model.test.ts +113 -0
  62. package/src/core/objects/trigger/trigger.model.ts +28 -5
  63. package/src/core/objects/utils.ts +3 -0
  64. package/src/core/objects/view/view.model.test.ts +90 -0
  65. package/src/core/objects/view/view.model.ts +28 -5
  66. package/src/core/plan/create.ts +3 -1
  67. package/src/core/plan/types.ts +8 -0
  68. package/src/core/{post-diff-cycle-breaking.test.ts → post-diff-normalization.test.ts} +168 -4
  69. package/src/core/post-diff-normalization.ts +260 -0
  70. package/src/core/sort/cycle-breakers.ts +1 -1
  71. package/src/core/sort/utils.ts +38 -0
  72. package/dist/core/post-diff-cycle-breaking.js +0 -100
  73. package/src/core/post-diff-cycle-breaking.ts +0 -138
@@ -343,7 +343,7 @@ describe.concurrent("table", () => {
343
343
  ).toBe("ALTER TABLE public.test_table REPLICA IDENTITY FULL");
344
344
  });
345
345
 
346
- test("replica identity DEFAULT and INDEX fallback", async () => {
346
+ test("replica identity DEFAULT and USING INDEX", async () => {
347
347
  const baseProps: Omit<
348
348
  TableProps,
349
349
  "owner" | "options" | "replica_identity"
@@ -372,31 +372,23 @@ describe.concurrent("table", () => {
372
372
  options: null,
373
373
  replica_identity: "n",
374
374
  });
375
- const toDefault = new Table({
376
- ...baseProps,
377
- owner: "o1",
378
- options: null,
379
- replica_identity: "d",
380
- });
381
- const toIndex = new Table({
382
- ...baseProps,
383
- owner: "o1",
384
- options: null,
385
- replica_identity: "i",
386
- });
387
375
  expect(
388
376
  new AlterTableSetReplicaIdentity({
389
377
  table,
390
- mode: toDefault.replica_identity,
391
- }).serialize(),
392
- ).toBe("ALTER TABLE public.test_table REPLICA IDENTITY DEFAULT");
393
- // AlterTableSetReplicaIdentity of type "i" will not be emitted in diff, it is handled by index changes, we fallback to DEFAULT here
394
- expect(
395
- new AlterTableSetReplicaIdentity({
396
- table,
397
- mode: toIndex.replica_identity,
378
+ mode: "d",
398
379
  }).serialize(),
399
380
  ).toBe("ALTER TABLE public.test_table REPLICA IDENTITY DEFAULT");
381
+ const usingIndex = new AlterTableSetReplicaIdentity({
382
+ table,
383
+ mode: "i",
384
+ indexName: "test_table_pkey",
385
+ });
386
+ expect(usingIndex.serialize()).toBe(
387
+ "ALTER TABLE public.test_table REPLICA IDENTITY USING INDEX test_table_pkey",
388
+ );
389
+ expect(usingIndex.requires).toContain(
390
+ "index:public.test_table.test_table_pkey",
391
+ );
400
392
  });
401
393
 
402
394
  test("columns add/drop/alter", async () => {
@@ -462,20 +462,46 @@ export class AlterTableValidateConstraint extends AlterTableChange {
462
462
 
463
463
  /**
464
464
  * ALTER TABLE ... REPLICA IDENTITY ...
465
+ *
466
+ * When `mode === "i"` (USING INDEX), `indexName` is the name of the index to
467
+ * use. The extractor populates `Table.replica_identity_index` from
468
+ * `pg_index.indisreplident` whenever `Table.replica_identity` is `'i'`, so
469
+ * callers that source their props from a `Table` instance can rely on the
470
+ * pair being consistent. The non-null assertions in `requires` / `serialize`
471
+ * below are justified by that data invariant — the same pattern the FK
472
+ * branch of `AlterTableAddConstraint` uses for `foreign_key_columns!` /
473
+ * `foreign_key_table!` / `foreign_key_schema!`.
465
474
  */
466
475
  export class AlterTableSetReplicaIdentity extends AlterTableChange {
467
476
  public readonly table: Table;
468
477
  public readonly mode: "d" | "n" | "f" | "i";
478
+ public readonly indexName: string | null;
469
479
  public readonly scope = "object" as const;
470
480
 
471
- constructor(props: { table: Table; mode: "d" | "n" | "f" | "i" }) {
481
+ constructor(props: {
482
+ table: Table;
483
+ mode: "d" | "n" | "f" | "i";
484
+ indexName?: string | null;
485
+ }) {
472
486
  super();
473
487
  this.table = props.table;
474
488
  this.mode = props.mode;
489
+ this.indexName = props.indexName ?? null;
475
490
  }
476
491
 
477
492
  get requires() {
478
- return [this.table.stableId];
493
+ const reqs: string[] = [this.table.stableId];
494
+ if (this.mode === "i") {
495
+ reqs.push(
496
+ stableId.index(
497
+ this.table.schema,
498
+ this.table.name,
499
+ // biome-ignore lint/style/noNonNullAssertion: mode 'i' implies the extractor populated replica_identity_index
500
+ this.indexName!,
501
+ ),
502
+ );
503
+ }
504
+ return reqs;
479
505
  }
480
506
 
481
507
  serialize(_options?: SerializeOptions): string {
@@ -486,7 +512,8 @@ export class AlterTableSetReplicaIdentity extends AlterTableChange {
486
512
  ? "NOTHING"
487
513
  : this.mode === "f"
488
514
  ? "FULL"
489
- : "DEFAULT"; // 'i' (USING INDEX) is handled via index changes; fallback to DEFAULT
515
+ : // biome-ignore lint/style/noNonNullAssertion: mode 'i' implies the extractor populated replica_identity_index
516
+ `USING INDEX ${this.indexName!}`;
490
517
  return [
491
518
  "ALTER TABLE",
492
519
  `${this.table.schema}.${this.table.name}`,
@@ -245,15 +245,13 @@ export function diffTables(
245
245
 
246
246
  // REPLICA IDENTITY: If non-default, emit ALTER TABLE ... REPLICA IDENTITY
247
247
  if (branchTable.replica_identity !== "d") {
248
- // Skip 'i' (USING INDEX) — handled by index changes
249
- if (branchTable.replica_identity !== "i") {
250
- changes.push(
251
- new AlterTableSetReplicaIdentity({
252
- table: branchTable,
253
- mode: branchTable.replica_identity,
254
- }),
255
- );
256
- }
248
+ changes.push(
249
+ new AlterTableSetReplicaIdentity({
250
+ table: branchTable,
251
+ mode: branchTable.replica_identity,
252
+ indexName: branchTable.replica_identity_index,
253
+ }),
254
+ );
257
255
  }
258
256
 
259
257
  changes.push(
@@ -404,16 +402,23 @@ export function diffTables(
404
402
  }
405
403
 
406
404
  // REPLICA IDENTITY
407
- if (mainTable.replica_identity !== branchTable.replica_identity) {
408
- // Skip when target is 'i' (USING INDEX) handled by index changes
409
- if (branchTable.replica_identity !== "i") {
410
- changes.push(
411
- new AlterTableSetReplicaIdentity({
412
- table: mainTable,
413
- mode: branchTable.replica_identity,
414
- }),
415
- );
416
- }
405
+ // Re-emit when the mode changes, or when staying in 'i' mode but pointing
406
+ // at a different index. The index named on the branch must already exist
407
+ // before this ALTER runs; AlterTableSetReplicaIdentity declares that
408
+ // dependency in its `requires`.
409
+ const replicaIdentityChanged =
410
+ mainTable.replica_identity !== branchTable.replica_identity ||
411
+ (branchTable.replica_identity === "i" &&
412
+ mainTable.replica_identity_index !==
413
+ branchTable.replica_identity_index);
414
+ if (replicaIdentityChanged) {
415
+ changes.push(
416
+ new AlterTableSetReplicaIdentity({
417
+ table: mainTable,
418
+ mode: branchTable.replica_identity,
419
+ indexName: branchTable.replica_identity_index,
420
+ }),
421
+ );
417
422
  }
418
423
 
419
424
  // OWNER
@@ -0,0 +1,209 @@
1
+ import { describe, expect, test } from "bun:test";
2
+ import type { Pool } from "pg";
3
+ import { extractTables, Table } from "./table.model.ts";
4
+
5
+ // Minimal fields required by tablePropsSchema; individual tests override the
6
+ // constraints array (and any other relevant fields).
7
+ const baseTableRow = {
8
+ schema: "public",
9
+ name: '"users"',
10
+ persistence: "p" as const,
11
+ row_security: false,
12
+ force_row_security: false,
13
+ has_indexes: false,
14
+ has_rules: false,
15
+ has_triggers: false,
16
+ has_subclasses: false,
17
+ is_populated: true,
18
+ replica_identity: "d" as const,
19
+ is_partition: false,
20
+ options: null,
21
+ partition_bound: null,
22
+ partition_by: null,
23
+ owner: "postgres",
24
+ comment: null,
25
+ parent_schema: null,
26
+ parent_name: null,
27
+ columns: [],
28
+ privileges: [],
29
+ };
30
+
31
+ const baseConstraint = {
32
+ name: '"users_pkey"',
33
+ constraint_type: "p" as const,
34
+ deferrable: false,
35
+ initially_deferred: false,
36
+ validated: true,
37
+ is_local: true,
38
+ no_inherit: false,
39
+ is_temporal: false,
40
+ is_partition_clone: false,
41
+ parent_constraint_schema: null,
42
+ parent_constraint_name: null,
43
+ parent_table_schema: null,
44
+ parent_table_name: null,
45
+ key_columns: ['"id"'],
46
+ foreign_key_columns: null,
47
+ foreign_key_table: null,
48
+ foreign_key_schema: null,
49
+ foreign_key_table_is_partition: null,
50
+ foreign_key_parent_schema: null,
51
+ foreign_key_parent_table: null,
52
+ foreign_key_effective_schema: null,
53
+ foreign_key_effective_table: null,
54
+ on_update: null,
55
+ on_delete: null,
56
+ match_type: null,
57
+ check_expression: null,
58
+ owner: "postgres",
59
+ comment: null,
60
+ };
61
+
62
+ const mockPool = (rows: unknown[]): Pool =>
63
+ ({ query: async () => ({ rows }) }) as unknown as Pool;
64
+
65
+ const mockPoolSequence = (...attempts: unknown[][]): Pool => {
66
+ let i = 0;
67
+ return {
68
+ query: async () => ({
69
+ rows: attempts[Math.min(i++, attempts.length - 1)],
70
+ }),
71
+ } as unknown as Pool;
72
+ };
73
+
74
+ const NO_BACKOFF = { backoffMs: 0 } as const;
75
+
76
+ describe("extractTables", () => {
77
+ test("skips constraints where pg_get_constraintdef returned NULL after exhausting retries", async () => {
78
+ const tables = await extractTables(
79
+ mockPool([
80
+ {
81
+ ...baseTableRow,
82
+ constraints: [
83
+ {
84
+ ...baseConstraint,
85
+ name: '"users_pkey"',
86
+ definition: "PRIMARY KEY (id)",
87
+ },
88
+ {
89
+ ...baseConstraint,
90
+ name: '"users_orphan_chk"',
91
+ constraint_type: "c",
92
+ key_columns: [],
93
+ definition: null,
94
+ },
95
+ ],
96
+ },
97
+ ]),
98
+ NO_BACKOFF,
99
+ );
100
+
101
+ expect(tables).toHaveLength(1);
102
+ expect(tables[0]).toBeInstanceOf(Table);
103
+ expect(tables[0]?.constraints).toHaveLength(1);
104
+ expect(tables[0]?.constraints[0]?.name).toBe('"users_pkey"');
105
+ expect(tables[0]?.constraints[0]?.definition).toBe("PRIMARY KEY (id)");
106
+ });
107
+
108
+ test("does not throw ZodError when every constraint has a null definition", async () => {
109
+ const tables = await extractTables(
110
+ mockPool([
111
+ {
112
+ ...baseTableRow,
113
+ constraints: [
114
+ {
115
+ ...baseConstraint,
116
+ name: '"orphan_a"',
117
+ constraint_type: "c",
118
+ key_columns: [],
119
+ definition: null,
120
+ },
121
+ {
122
+ ...baseConstraint,
123
+ name: '"orphan_b"',
124
+ constraint_type: "c",
125
+ key_columns: [],
126
+ definition: null,
127
+ },
128
+ ],
129
+ },
130
+ ]),
131
+ NO_BACKOFF,
132
+ );
133
+
134
+ expect(tables).toHaveLength(1);
135
+ expect(tables[0]?.constraints).toEqual([]);
136
+ });
137
+
138
+ test("returns all constraints when every definition is valid", async () => {
139
+ const tables = await extractTables(
140
+ mockPool([
141
+ {
142
+ ...baseTableRow,
143
+ constraints: [
144
+ {
145
+ ...baseConstraint,
146
+ name: '"users_pkey"',
147
+ definition: "PRIMARY KEY (id)",
148
+ },
149
+ {
150
+ ...baseConstraint,
151
+ name: '"users_email_key"',
152
+ constraint_type: "u",
153
+ key_columns: ['"email"'],
154
+ definition: "UNIQUE (email)",
155
+ },
156
+ ],
157
+ },
158
+ ]),
159
+ NO_BACKOFF,
160
+ );
161
+
162
+ expect(tables[0]?.constraints.map((c) => c.name)).toEqual([
163
+ '"users_pkey"',
164
+ '"users_email_key"',
165
+ ]);
166
+ });
167
+
168
+ test("recovers when pg_get_constraintdef is NULL on first attempt but resolved on retry", async () => {
169
+ const tables = await extractTables(
170
+ mockPoolSequence(
171
+ // attempt 1: one constraint has NULL definition
172
+ [
173
+ {
174
+ ...baseTableRow,
175
+ constraints: [
176
+ {
177
+ ...baseConstraint,
178
+ name: '"users_racy_chk"',
179
+ constraint_type: "c",
180
+ key_columns: [],
181
+ definition: null,
182
+ },
183
+ ],
184
+ },
185
+ ],
186
+ // attempt 2: constraint resolves on retry
187
+ [
188
+ {
189
+ ...baseTableRow,
190
+ constraints: [
191
+ {
192
+ ...baseConstraint,
193
+ name: '"users_racy_chk"',
194
+ constraint_type: "c",
195
+ key_columns: [],
196
+ definition: "CHECK (id > 0)",
197
+ },
198
+ ],
199
+ },
200
+ ],
201
+ ),
202
+ { retries: 2, backoffMs: 0 },
203
+ );
204
+ expect(tables).toHaveLength(1);
205
+ expect(tables[0]?.constraints).toHaveLength(1);
206
+ expect(tables[0]?.constraints[0]?.name).toBe('"users_racy_chk"');
207
+ expect(tables[0]?.constraints[0]?.definition).toBe("CHECK (id > 0)");
208
+ });
209
+ });
@@ -12,6 +12,10 @@ import {
12
12
  type PrivilegeProps,
13
13
  privilegePropsSchema,
14
14
  } from "../base.privilege-diff.ts";
15
+ import {
16
+ type ExtractRetryOptions,
17
+ extractWithDefinitionRetry,
18
+ } from "../extract-with-retry.ts";
15
19
 
16
20
  const RelationPersistenceSchema = z.enum([
17
21
  "p", // permanent
@@ -82,6 +86,15 @@ const tableConstraintPropsSchema = z.object({
82
86
 
83
87
  export type TableConstraintProps = z.infer<typeof tableConstraintPropsSchema>;
84
88
 
89
+ // pg_get_constraintdef(oid, pretty) can return NULL under the same conditions
90
+ // as pg_get_indexdef: races with concurrent DDL, transient catalog
91
+ // inconsistencies, recovery edges. An unreadable constraint cannot be diffed,
92
+ // so we accept NULL here and filter the constraint out at extraction time
93
+ // rather than crashing the whole catalog parse with a ZodError.
94
+ const tableConstraintRowSchema = tableConstraintPropsSchema.extend({
95
+ definition: z.string().nullable(),
96
+ });
97
+
85
98
  const tablePropsSchema = z.object({
86
99
  schema: z.string(),
87
100
  name: z.string(),
@@ -94,6 +107,7 @@ const tablePropsSchema = z.object({
94
107
  has_subclasses: z.boolean(),
95
108
  is_populated: z.boolean(),
96
109
  replica_identity: ReplicaIdentitySchema,
110
+ replica_identity_index: z.string().nullable().optional(),
97
111
  is_partition: z.boolean(),
98
112
  options: z.array(z.string()).nullable(),
99
113
  partition_bound: z.string().nullable(),
@@ -107,8 +121,13 @@ const tablePropsSchema = z.object({
107
121
  privileges: z.array(privilegePropsSchema),
108
122
  });
109
123
 
124
+ const tableRowSchema = tablePropsSchema.extend({
125
+ constraints: z.array(tableConstraintRowSchema).optional(),
126
+ });
127
+
110
128
  type TablePrivilegeProps = PrivilegeProps;
111
129
  export type TableProps = z.infer<typeof tablePropsSchema>;
130
+ type TableRow = z.infer<typeof tableRowSchema>;
112
131
 
113
132
  export class Table extends BasePgModel implements TableLikeObject {
114
133
  public readonly schema: TableProps["schema"];
@@ -122,6 +141,7 @@ export class Table extends BasePgModel implements TableLikeObject {
122
141
  public readonly has_subclasses: TableProps["has_subclasses"];
123
142
  public readonly is_populated: TableProps["is_populated"];
124
143
  public readonly replica_identity: TableProps["replica_identity"];
144
+ public readonly replica_identity_index: TableProps["replica_identity_index"];
125
145
  public readonly is_partition: TableProps["is_partition"];
126
146
  public readonly options: TableProps["options"];
127
147
  public readonly partition_bound: TableProps["partition_bound"];
@@ -151,6 +171,7 @@ export class Table extends BasePgModel implements TableLikeObject {
151
171
  this.has_subclasses = props.has_subclasses;
152
172
  this.is_populated = props.is_populated;
153
173
  this.replica_identity = props.replica_identity;
174
+ this.replica_identity_index = props.replica_identity_index ?? null;
154
175
  this.is_partition = props.is_partition;
155
176
  this.options = props.options;
156
177
  this.partition_bound = props.partition_bound;
@@ -182,6 +203,7 @@ export class Table extends BasePgModel implements TableLikeObject {
182
203
  row_security: this.row_security,
183
204
  force_row_security: this.force_row_security,
184
205
  replica_identity: this.replica_identity,
206
+ replica_identity_index: this.replica_identity_index,
185
207
  options: this.options,
186
208
  // Partition membership can be altered via ATTACH/DETACH
187
209
  parent_schema: this.parent_schema,
@@ -216,8 +238,17 @@ export class Table extends BasePgModel implements TableLikeObject {
216
238
  }
217
239
  }
218
240
 
219
- export async function extractTables(pool: Pool): Promise<Table[]> {
220
- const { rows: tableRows } = await pool.query<TableProps>(sql`
241
+ export async function extractTables(
242
+ pool: Pool,
243
+ options?: ExtractRetryOptions,
244
+ ): Promise<Table[]> {
245
+ const tableRows = await extractWithDefinitionRetry({
246
+ label: "table constraints",
247
+ options,
248
+ hasNullDefinition: (row: TableRow) =>
249
+ row.constraints?.some((c) => c.definition === null) ?? false,
250
+ query: async () => {
251
+ const result = await pool.query<TableProps>(sql`
221
252
  with extension_oids as (
222
253
  select objid
223
254
  from pg_depend d
@@ -236,6 +267,14 @@ with extension_oids as (
236
267
  c.relhassubclass as has_subclasses,
237
268
  c.relispopulated as is_populated,
238
269
  c.relreplident as replica_identity,
270
+ (
271
+ select quote_ident(ri_class.relname)
272
+ from pg_index ri
273
+ join pg_class ri_class on ri_class.oid = ri.indexrelid
274
+ where ri.indrelid = c.oid
275
+ and ri.indisreplident is true
276
+ limit 1
277
+ ) as replica_identity_index,
239
278
  c.relispartition as is_partition,
240
279
  c.reloptions as options,
241
280
  pg_get_expr(c.relpartbound, c.oid) as partition_bound,
@@ -266,6 +305,7 @@ select
266
305
  t.has_subclasses,
267
306
  t.is_populated,
268
307
  t.replica_identity,
308
+ t.replica_identity_index,
269
309
  t.is_partition,
270
310
  t.options,
271
311
  t.partition_bound,
@@ -453,13 +493,18 @@ from
453
493
  left join pg_attrdef ad on a.attrelid = ad.adrelid and a.attnum = ad.adnum
454
494
  left join pg_type ty on ty.oid = a.atttypid
455
495
  group by
456
- t.oid, t.schema, t.name, t.persistence, t.row_security, t.force_row_security, t.has_indexes, t.has_rules, t.has_triggers, t.has_subclasses, t.is_populated, t.replica_identity, t.is_partition, t.options, t.partition_bound, t.partition_by, t.owner, t.parent_schema, t.parent_name
496
+ t.oid, t.schema, t.name, t.persistence, t.row_security, t.force_row_security, t.has_indexes, t.has_rules, t.has_triggers, t.has_subclasses, t.is_populated, t.replica_identity, t.replica_identity_index, t.is_partition, t.options, t.partition_bound, t.partition_by, t.owner, t.parent_schema, t.parent_name
457
497
  order by
458
498
  t.schema, t.name
459
499
  `);
460
- // Validate and parse each row using the Zod schema
461
- const validatedRows = tableRows.map((row: unknown) =>
462
- tablePropsSchema.parse(row),
463
- );
500
+ return result.rows.map((row: unknown) => tableRowSchema.parse(row));
501
+ },
502
+ });
503
+ const validatedRows = tableRows.map((row): TableProps => {
504
+ const filteredConstraints = row.constraints?.filter(
505
+ (c): c is TableConstraintProps => c.definition !== null,
506
+ );
507
+ return { ...row, constraints: filteredConstraints };
508
+ });
464
509
  return validatedRows.map((row: TableProps) => new Table(row));
465
510
  }
@@ -0,0 +1,113 @@
1
+ import { describe, expect, test } from "bun:test";
2
+ import type { Pool } from "pg";
3
+ import { extractTriggers, Trigger } from "./trigger.model.ts";
4
+
5
+ const baseRow = {
6
+ schema: "public",
7
+ table_name: '"users"',
8
+ table_relkind: "r" as const,
9
+ function_schema: "public",
10
+ function_name: '"my_fn"',
11
+ trigger_type: 7,
12
+ enabled: "O" as const,
13
+ is_internal: false,
14
+ deferrable: false,
15
+ initially_deferred: false,
16
+ argument_count: 0,
17
+ column_numbers: null,
18
+ arguments: [] as string[],
19
+ when_condition: null,
20
+ old_table: null,
21
+ new_table: null,
22
+ is_partition_clone: false,
23
+ parent_trigger_name: null,
24
+ parent_table_schema: null,
25
+ parent_table_name: null,
26
+ is_on_partitioned_table: false,
27
+ owner: "postgres",
28
+ comment: null,
29
+ };
30
+
31
+ const mockPool = (rows: unknown[]): Pool =>
32
+ ({ query: async () => ({ rows }) }) as unknown as Pool;
33
+
34
+ const mockPoolSequence = (...attempts: unknown[][]): Pool => {
35
+ let i = 0;
36
+ return {
37
+ query: async () => ({
38
+ rows: attempts[Math.min(i++, attempts.length - 1)],
39
+ }),
40
+ } as unknown as Pool;
41
+ };
42
+
43
+ const NO_BACKOFF = { backoffMs: 0 } as const;
44
+
45
+ describe("extractTriggers", () => {
46
+ test("skips rows where pg_get_triggerdef returned NULL after exhausting retries", async () => {
47
+ const triggers = await extractTriggers(
48
+ mockPool([
49
+ {
50
+ ...baseRow,
51
+ name: '"good_trg"',
52
+ definition:
53
+ "CREATE TRIGGER good_trg BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
54
+ },
55
+ { ...baseRow, name: '"orphan_trg"', definition: null },
56
+ ]),
57
+ NO_BACKOFF,
58
+ );
59
+
60
+ expect(triggers).toHaveLength(1);
61
+ expect(triggers[0]).toBeInstanceOf(Trigger);
62
+ expect(triggers[0]?.name).toBe('"good_trg"');
63
+ });
64
+
65
+ test("does not throw ZodError when the only row has a null definition", async () => {
66
+ await expect(
67
+ extractTriggers(
68
+ mockPool([{ ...baseRow, name: '"orphan"', definition: null }]),
69
+ NO_BACKOFF,
70
+ ),
71
+ ).resolves.toEqual([]);
72
+ });
73
+
74
+ test("returns all triggers when every row has a valid definition", async () => {
75
+ const triggers = await extractTriggers(
76
+ mockPool([
77
+ {
78
+ ...baseRow,
79
+ name: '"a"',
80
+ definition:
81
+ "CREATE TRIGGER a BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
82
+ },
83
+ {
84
+ ...baseRow,
85
+ name: '"b"',
86
+ definition:
87
+ "CREATE TRIGGER b AFTER UPDATE ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
88
+ },
89
+ ]),
90
+ NO_BACKOFF,
91
+ );
92
+ expect(triggers.map((t) => t.name)).toEqual(['"a"', '"b"']);
93
+ });
94
+
95
+ test("recovers when pg_get_triggerdef is NULL on first attempt but resolved on retry", async () => {
96
+ const triggers = await extractTriggers(
97
+ mockPoolSequence(
98
+ [{ ...baseRow, name: '"racy_trg"', definition: null }],
99
+ [
100
+ {
101
+ ...baseRow,
102
+ name: '"racy_trg"',
103
+ definition:
104
+ "CREATE TRIGGER racy_trg BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
105
+ },
106
+ ],
107
+ ),
108
+ { retries: 2, backoffMs: 0 },
109
+ );
110
+ expect(triggers).toHaveLength(1);
111
+ expect(triggers[0]?.name).toBe('"racy_trg"');
112
+ });
113
+ });
@@ -2,6 +2,10 @@ import { sql } from "@ts-safeql/sql-tag";
2
2
  import type { Pool } from "pg";
3
3
  import z from "zod";
4
4
  import { BasePgModel } from "../base.model.ts";
5
+ import {
6
+ type ExtractRetryOptions,
7
+ extractWithDefinitionRetry,
8
+ } from "../extract-with-retry.ts";
5
9
 
6
10
  const TriggerEnabledSchema = z.enum([
7
11
  "O", // ORIGIN - trigger fires in "origin" and "local" replica modes
@@ -46,6 +50,15 @@ const triggerPropsSchema = z.object({
46
50
  comment: z.string().nullable(),
47
51
  });
48
52
 
53
+ // pg_get_triggerdef(oid, pretty) can return NULL when the trigger (its
54
+ // pg_trigger row) is dropped between catalog scan and resolution, or under
55
+ // transient catalog state. An unreadable trigger cannot be diffed, so we
56
+ // accept NULL here and filter the row out at extraction time rather than
57
+ // crashing the whole catalog parse with a ZodError.
58
+ const triggerRowSchema = triggerPropsSchema.extend({
59
+ definition: z.string().nullable(),
60
+ });
61
+
49
62
  export type TriggerProps = z.infer<typeof triggerPropsSchema>;
50
63
 
51
64
  export class Trigger extends BasePgModel {
@@ -154,8 +167,16 @@ export class Trigger extends BasePgModel {
154
167
  }
155
168
  }
156
169
 
157
- export async function extractTriggers(pool: Pool): Promise<Trigger[]> {
158
- const { rows: triggerRows } = await pool.query<TriggerProps>(sql`
170
+ export async function extractTriggers(
171
+ pool: Pool,
172
+ options?: ExtractRetryOptions,
173
+ ): Promise<Trigger[]> {
174
+ const triggerRows = await extractWithDefinitionRetry({
175
+ label: "triggers",
176
+ options,
177
+ hasNullDefinition: (row) => row.definition === null,
178
+ query: async () => {
179
+ const result = await pool.query<TriggerProps>(sql`
159
180
  with extension_trigger_oids as (
160
181
  select objid
161
182
  from pg_depend d
@@ -260,9 +281,11 @@ export async function extractTriggers(pool: Pool): Promise<Trigger[]> {
260
281
 
261
282
  order by 1, 2
262
283
  `);
263
- // Validate and parse each row using the Zod schema
264
- const validatedRows = triggerRows.map((row: unknown) =>
265
- triggerPropsSchema.parse(row),
284
+ return result.rows.map((row: unknown) => triggerRowSchema.parse(row));
285
+ },
286
+ });
287
+ const validatedRows = triggerRows.filter(
288
+ (row): row is TriggerProps => row.definition !== null,
266
289
  );
267
290
  return validatedRows.map((row: TriggerProps) => new Trigger(row));
268
291
  }