@supabase/pg-delta 1.0.0-alpha.20 → 1.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/dist/core/catalog.diff.js +4 -4
  2. package/dist/core/catalog.model.d.ts +8 -1
  3. package/dist/core/catalog.model.js +9 -8
  4. package/dist/core/expand-replace-dependencies.js +23 -0
  5. package/dist/core/objects/extract-with-retry.d.ts +36 -0
  6. package/dist/core/objects/extract-with-retry.js +51 -0
  7. package/dist/core/objects/index/index.diff.js +0 -1
  8. package/dist/core/objects/index/index.model.d.ts +2 -3
  9. package/dist/core/objects/index/index.model.js +17 -6
  10. package/dist/core/objects/materialized-view/materialized-view.model.d.ts +2 -1
  11. package/dist/core/objects/materialized-view/materialized-view.model.js +20 -4
  12. package/dist/core/objects/procedure/procedure.model.d.ts +2 -1
  13. package/dist/core/objects/procedure/procedure.model.js +20 -4
  14. package/dist/core/objects/publication/changes/publication.alter.d.ts +1 -1
  15. package/dist/core/objects/rls-policy/rls-policy.diff.js +13 -1
  16. package/dist/core/objects/rule/rule.model.d.ts +2 -1
  17. package/dist/core/objects/rule/rule.model.js +20 -3
  18. package/dist/core/objects/sequence/sequence.diff.d.ts +2 -1
  19. package/dist/core/objects/sequence/sequence.diff.js +41 -9
  20. package/dist/core/objects/table/changes/table.alter.d.ts +16 -1
  21. package/dist/core/objects/table/changes/table.alter.js +39 -6
  22. package/dist/core/objects/table/table.diff.js +40 -17
  23. package/dist/core/objects/table/table.model.d.ts +6 -1
  24. package/dist/core/objects/table/table.model.js +50 -12
  25. package/dist/core/objects/trigger/trigger.model.d.ts +2 -1
  26. package/dist/core/objects/trigger/trigger.model.js +20 -4
  27. package/dist/core/objects/utils.d.ts +1 -0
  28. package/dist/core/objects/utils.js +3 -0
  29. package/dist/core/objects/view/view.model.d.ts +2 -1
  30. package/dist/core/objects/view/view.model.js +20 -4
  31. package/dist/core/plan/create.js +3 -1
  32. package/dist/core/plan/types.d.ts +8 -0
  33. package/dist/core/post-diff-normalization.d.ts +36 -0
  34. package/dist/core/post-diff-normalization.js +202 -0
  35. package/dist/core/sort/cycle-breakers.d.ts +15 -0
  36. package/dist/core/sort/cycle-breakers.js +269 -0
  37. package/dist/core/sort/sort-changes.js +97 -43
  38. package/dist/core/sort/utils.d.ts +10 -0
  39. package/dist/core/sort/utils.js +28 -0
  40. package/package.json +1 -1
  41. package/src/core/catalog.diff.ts +4 -3
  42. package/src/core/catalog.model.ts +20 -8
  43. package/src/core/expand-replace-dependencies.test.ts +139 -5
  44. package/src/core/expand-replace-dependencies.ts +24 -0
  45. package/src/core/objects/extract-with-retry.test.ts +143 -0
  46. package/src/core/objects/extract-with-retry.ts +87 -0
  47. package/src/core/objects/index/index.diff.ts +0 -1
  48. package/src/core/objects/index/index.model.test.ts +37 -1
  49. package/src/core/objects/index/index.model.ts +25 -6
  50. package/src/core/objects/materialized-view/materialized-view.model.test.ts +93 -0
  51. package/src/core/objects/materialized-view/materialized-view.model.ts +27 -4
  52. package/src/core/objects/procedure/procedure.model.test.ts +117 -0
  53. package/src/core/objects/procedure/procedure.model.ts +28 -5
  54. package/src/core/objects/publication/changes/publication.alter.ts +1 -1
  55. package/src/core/objects/rls-policy/rls-policy.diff.ts +19 -1
  56. package/src/core/objects/rule/rule.model.test.ts +99 -0
  57. package/src/core/objects/rule/rule.model.ts +28 -4
  58. package/src/core/objects/sequence/sequence.diff.test.ts +93 -1
  59. package/src/core/objects/sequence/sequence.diff.ts +43 -10
  60. package/src/core/objects/table/changes/table.alter.test.ts +26 -23
  61. package/src/core/objects/table/changes/table.alter.ts +66 -10
  62. package/src/core/objects/table/table.diff.test.ts +43 -0
  63. package/src/core/objects/table/table.diff.ts +52 -23
  64. package/src/core/objects/table/table.model.test.ts +209 -0
  65. package/src/core/objects/table/table.model.ts +62 -14
  66. package/src/core/objects/trigger/trigger.model.test.ts +113 -0
  67. package/src/core/objects/trigger/trigger.model.ts +28 -5
  68. package/src/core/objects/utils.ts +3 -0
  69. package/src/core/objects/view/view.model.test.ts +90 -0
  70. package/src/core/objects/view/view.model.ts +28 -5
  71. package/src/core/plan/create.ts +3 -1
  72. package/src/core/plan/types.ts +8 -0
  73. package/src/core/{post-diff-cycle-breaking.test.ts → post-diff-normalization.test.ts} +168 -160
  74. package/src/core/post-diff-normalization.ts +260 -0
  75. package/src/core/sort/cycle-breakers.test.ts +476 -0
  76. package/src/core/sort/cycle-breakers.ts +311 -0
  77. package/src/core/sort/sort-changes.ts +135 -50
  78. package/src/core/sort/utils.ts +38 -0
  79. package/dist/core/post-diff-cycle-breaking.d.ts +0 -29
  80. package/dist/core/post-diff-cycle-breaking.js +0 -209
  81. package/src/core/post-diff-cycle-breaking.ts +0 -317
@@ -0,0 +1,209 @@
1
+ import { describe, expect, test } from "bun:test";
2
+ import type { Pool } from "pg";
3
+ import { extractTables, Table } from "./table.model.ts";
4
+
5
+ // Minimal fields required by tablePropsSchema; individual tests override the
6
+ // constraints array (and any other relevant fields).
7
+ const baseTableRow = {
8
+ schema: "public",
9
+ name: '"users"',
10
+ persistence: "p" as const,
11
+ row_security: false,
12
+ force_row_security: false,
13
+ has_indexes: false,
14
+ has_rules: false,
15
+ has_triggers: false,
16
+ has_subclasses: false,
17
+ is_populated: true,
18
+ replica_identity: "d" as const,
19
+ is_partition: false,
20
+ options: null,
21
+ partition_bound: null,
22
+ partition_by: null,
23
+ owner: "postgres",
24
+ comment: null,
25
+ parent_schema: null,
26
+ parent_name: null,
27
+ columns: [],
28
+ privileges: [],
29
+ };
30
+
31
+ const baseConstraint = {
32
+ name: '"users_pkey"',
33
+ constraint_type: "p" as const,
34
+ deferrable: false,
35
+ initially_deferred: false,
36
+ validated: true,
37
+ is_local: true,
38
+ no_inherit: false,
39
+ is_temporal: false,
40
+ is_partition_clone: false,
41
+ parent_constraint_schema: null,
42
+ parent_constraint_name: null,
43
+ parent_table_schema: null,
44
+ parent_table_name: null,
45
+ key_columns: ['"id"'],
46
+ foreign_key_columns: null,
47
+ foreign_key_table: null,
48
+ foreign_key_schema: null,
49
+ foreign_key_table_is_partition: null,
50
+ foreign_key_parent_schema: null,
51
+ foreign_key_parent_table: null,
52
+ foreign_key_effective_schema: null,
53
+ foreign_key_effective_table: null,
54
+ on_update: null,
55
+ on_delete: null,
56
+ match_type: null,
57
+ check_expression: null,
58
+ owner: "postgres",
59
+ comment: null,
60
+ };
61
+
62
+ const mockPool = (rows: unknown[]): Pool =>
63
+ ({ query: async () => ({ rows }) }) as unknown as Pool;
64
+
65
+ const mockPoolSequence = (...attempts: unknown[][]): Pool => {
66
+ let i = 0;
67
+ return {
68
+ query: async () => ({
69
+ rows: attempts[Math.min(i++, attempts.length - 1)],
70
+ }),
71
+ } as unknown as Pool;
72
+ };
73
+
74
+ const NO_BACKOFF = { backoffMs: 0 } as const;
75
+
76
+ describe("extractTables", () => {
77
+ test("skips constraints where pg_get_constraintdef returned NULL after exhausting retries", async () => {
78
+ const tables = await extractTables(
79
+ mockPool([
80
+ {
81
+ ...baseTableRow,
82
+ constraints: [
83
+ {
84
+ ...baseConstraint,
85
+ name: '"users_pkey"',
86
+ definition: "PRIMARY KEY (id)",
87
+ },
88
+ {
89
+ ...baseConstraint,
90
+ name: '"users_orphan_chk"',
91
+ constraint_type: "c",
92
+ key_columns: [],
93
+ definition: null,
94
+ },
95
+ ],
96
+ },
97
+ ]),
98
+ NO_BACKOFF,
99
+ );
100
+
101
+ expect(tables).toHaveLength(1);
102
+ expect(tables[0]).toBeInstanceOf(Table);
103
+ expect(tables[0]?.constraints).toHaveLength(1);
104
+ expect(tables[0]?.constraints[0]?.name).toBe('"users_pkey"');
105
+ expect(tables[0]?.constraints[0]?.definition).toBe("PRIMARY KEY (id)");
106
+ });
107
+
108
+ test("does not throw ZodError when every constraint has a null definition", async () => {
109
+ const tables = await extractTables(
110
+ mockPool([
111
+ {
112
+ ...baseTableRow,
113
+ constraints: [
114
+ {
115
+ ...baseConstraint,
116
+ name: '"orphan_a"',
117
+ constraint_type: "c",
118
+ key_columns: [],
119
+ definition: null,
120
+ },
121
+ {
122
+ ...baseConstraint,
123
+ name: '"orphan_b"',
124
+ constraint_type: "c",
125
+ key_columns: [],
126
+ definition: null,
127
+ },
128
+ ],
129
+ },
130
+ ]),
131
+ NO_BACKOFF,
132
+ );
133
+
134
+ expect(tables).toHaveLength(1);
135
+ expect(tables[0]?.constraints).toEqual([]);
136
+ });
137
+
138
+ test("returns all constraints when every definition is valid", async () => {
139
+ const tables = await extractTables(
140
+ mockPool([
141
+ {
142
+ ...baseTableRow,
143
+ constraints: [
144
+ {
145
+ ...baseConstraint,
146
+ name: '"users_pkey"',
147
+ definition: "PRIMARY KEY (id)",
148
+ },
149
+ {
150
+ ...baseConstraint,
151
+ name: '"users_email_key"',
152
+ constraint_type: "u",
153
+ key_columns: ['"email"'],
154
+ definition: "UNIQUE (email)",
155
+ },
156
+ ],
157
+ },
158
+ ]),
159
+ NO_BACKOFF,
160
+ );
161
+
162
+ expect(tables[0]?.constraints.map((c) => c.name)).toEqual([
163
+ '"users_pkey"',
164
+ '"users_email_key"',
165
+ ]);
166
+ });
167
+
168
+ test("recovers when pg_get_constraintdef is NULL on first attempt but resolved on retry", async () => {
169
+ const tables = await extractTables(
170
+ mockPoolSequence(
171
+ // attempt 1: one constraint has NULL definition
172
+ [
173
+ {
174
+ ...baseTableRow,
175
+ constraints: [
176
+ {
177
+ ...baseConstraint,
178
+ name: '"users_racy_chk"',
179
+ constraint_type: "c",
180
+ key_columns: [],
181
+ definition: null,
182
+ },
183
+ ],
184
+ },
185
+ ],
186
+ // attempt 2: constraint resolves on retry
187
+ [
188
+ {
189
+ ...baseTableRow,
190
+ constraints: [
191
+ {
192
+ ...baseConstraint,
193
+ name: '"users_racy_chk"',
194
+ constraint_type: "c",
195
+ key_columns: [],
196
+ definition: "CHECK (id > 0)",
197
+ },
198
+ ],
199
+ },
200
+ ],
201
+ ),
202
+ { retries: 2, backoffMs: 0 },
203
+ );
204
+ expect(tables).toHaveLength(1);
205
+ expect(tables[0]?.constraints).toHaveLength(1);
206
+ expect(tables[0]?.constraints[0]?.name).toBe('"users_racy_chk"');
207
+ expect(tables[0]?.constraints[0]?.definition).toBe("CHECK (id > 0)");
208
+ });
209
+ });
@@ -12,6 +12,10 @@ import {
12
12
  type PrivilegeProps,
13
13
  privilegePropsSchema,
14
14
  } from "../base.privilege-diff.ts";
15
+ import {
16
+ type ExtractRetryOptions,
17
+ extractWithDefinitionRetry,
18
+ } from "../extract-with-retry.ts";
15
19
 
16
20
  const RelationPersistenceSchema = z.enum([
17
21
  "p", // permanent
@@ -82,6 +86,15 @@ const tableConstraintPropsSchema = z.object({
82
86
 
83
87
  export type TableConstraintProps = z.infer<typeof tableConstraintPropsSchema>;
84
88
 
89
+ // pg_get_constraintdef(oid, pretty) can return NULL under the same conditions
90
+ // as pg_get_indexdef: races with concurrent DDL, transient catalog
91
+ // inconsistencies, recovery edges. An unreadable constraint cannot be diffed,
92
+ // so we accept NULL here and filter the constraint out at extraction time
93
+ // rather than crashing the whole catalog parse with a ZodError.
94
+ const tableConstraintRowSchema = tableConstraintPropsSchema.extend({
95
+ definition: z.string().nullable(),
96
+ });
97
+
85
98
  const tablePropsSchema = z.object({
86
99
  schema: z.string(),
87
100
  name: z.string(),
@@ -94,6 +107,7 @@ const tablePropsSchema = z.object({
94
107
  has_subclasses: z.boolean(),
95
108
  is_populated: z.boolean(),
96
109
  replica_identity: ReplicaIdentitySchema,
110
+ replica_identity_index: z.string().nullable().optional(),
97
111
  is_partition: z.boolean(),
98
112
  options: z.array(z.string()).nullable(),
99
113
  partition_bound: z.string().nullable(),
@@ -107,8 +121,13 @@ const tablePropsSchema = z.object({
107
121
  privileges: z.array(privilegePropsSchema),
108
122
  });
109
123
 
124
+ const tableRowSchema = tablePropsSchema.extend({
125
+ constraints: z.array(tableConstraintRowSchema).optional(),
126
+ });
127
+
110
128
  type TablePrivilegeProps = PrivilegeProps;
111
129
  export type TableProps = z.infer<typeof tablePropsSchema>;
130
+ type TableRow = z.infer<typeof tableRowSchema>;
112
131
 
113
132
  export class Table extends BasePgModel implements TableLikeObject {
114
133
  public readonly schema: TableProps["schema"];
@@ -122,6 +141,7 @@ export class Table extends BasePgModel implements TableLikeObject {
122
141
  public readonly has_subclasses: TableProps["has_subclasses"];
123
142
  public readonly is_populated: TableProps["is_populated"];
124
143
  public readonly replica_identity: TableProps["replica_identity"];
144
+ public readonly replica_identity_index: TableProps["replica_identity_index"];
125
145
  public readonly is_partition: TableProps["is_partition"];
126
146
  public readonly options: TableProps["options"];
127
147
  public readonly partition_bound: TableProps["partition_bound"];
@@ -151,6 +171,7 @@ export class Table extends BasePgModel implements TableLikeObject {
151
171
  this.has_subclasses = props.has_subclasses;
152
172
  this.is_populated = props.is_populated;
153
173
  this.replica_identity = props.replica_identity;
174
+ this.replica_identity_index = props.replica_identity_index ?? null;
154
175
  this.is_partition = props.is_partition;
155
176
  this.options = props.options;
156
177
  this.partition_bound = props.partition_bound;
@@ -182,6 +203,7 @@ export class Table extends BasePgModel implements TableLikeObject {
182
203
  row_security: this.row_security,
183
204
  force_row_security: this.force_row_security,
184
205
  replica_identity: this.replica_identity,
206
+ replica_identity_index: this.replica_identity_index,
185
207
  options: this.options,
186
208
  // Partition membership can be altered via ATTACH/DETACH
187
209
  parent_schema: this.parent_schema,
@@ -216,8 +238,17 @@ export class Table extends BasePgModel implements TableLikeObject {
216
238
  }
217
239
  }
218
240
 
219
- export async function extractTables(pool: Pool): Promise<Table[]> {
220
- const { rows: tableRows } = await pool.query<TableProps>(sql`
241
+ export async function extractTables(
242
+ pool: Pool,
243
+ options?: ExtractRetryOptions,
244
+ ): Promise<Table[]> {
245
+ const tableRows = await extractWithDefinitionRetry({
246
+ label: "table constraints",
247
+ options,
248
+ hasNullDefinition: (row: TableRow) =>
249
+ row.constraints?.some((c) => c.definition === null) ?? false,
250
+ query: async () => {
251
+ const result = await pool.query<TableProps>(sql`
221
252
  with extension_oids as (
222
253
  select objid
223
254
  from pg_depend d
@@ -236,6 +267,14 @@ with extension_oids as (
236
267
  c.relhassubclass as has_subclasses,
237
268
  c.relispopulated as is_populated,
238
269
  c.relreplident as replica_identity,
270
+ (
271
+ select quote_ident(ri_class.relname)
272
+ from pg_index ri
273
+ join pg_class ri_class on ri_class.oid = ri.indexrelid
274
+ where ri.indrelid = c.oid
275
+ and ri.indisreplident is true
276
+ limit 1
277
+ ) as replica_identity_index,
239
278
  c.relispartition as is_partition,
240
279
  c.reloptions as options,
241
280
  pg_get_expr(c.relpartbound, c.oid) as partition_bound,
@@ -266,6 +305,7 @@ select
266
305
  t.has_subclasses,
267
306
  t.is_populated,
268
307
  t.replica_identity,
308
+ t.replica_identity_index,
269
309
  t.is_partition,
270
310
  t.options,
271
311
  t.partition_bound,
@@ -296,13 +336,16 @@ select
296
336
 
297
337
  'key_columns',
298
338
  case
299
- when c.conkey is not null then (
300
- select json_agg(quote_ident(att.attname) order by pk.ordinality)
301
- from unnest(c.conkey) with ordinality as pk(attnum, ordinality)
302
- join pg_attribute att
303
- on att.attrelid = c.conrelid
304
- and att.attnum = pk.attnum
305
- and att.attisdropped = false
339
+ when c.conkey is not null then coalesce(
340
+ (
341
+ select json_agg(quote_ident(att.attname) order by pk.ordinality)
342
+ from unnest(c.conkey) with ordinality as pk(attnum, ordinality)
343
+ join pg_attribute att
344
+ on att.attrelid = c.conrelid
345
+ and att.attnum = pk.attnum
346
+ and att.attisdropped = false
347
+ ),
348
+ '[]'::json
306
349
  )
307
350
  else '[]'::json
308
351
  end,
@@ -450,13 +493,18 @@ from
450
493
  left join pg_attrdef ad on a.attrelid = ad.adrelid and a.attnum = ad.adnum
451
494
  left join pg_type ty on ty.oid = a.atttypid
452
495
  group by
453
- t.oid, t.schema, t.name, t.persistence, t.row_security, t.force_row_security, t.has_indexes, t.has_rules, t.has_triggers, t.has_subclasses, t.is_populated, t.replica_identity, t.is_partition, t.options, t.partition_bound, t.partition_by, t.owner, t.parent_schema, t.parent_name
496
+ t.oid, t.schema, t.name, t.persistence, t.row_security, t.force_row_security, t.has_indexes, t.has_rules, t.has_triggers, t.has_subclasses, t.is_populated, t.replica_identity, t.replica_identity_index, t.is_partition, t.options, t.partition_bound, t.partition_by, t.owner, t.parent_schema, t.parent_name
454
497
  order by
455
498
  t.schema, t.name
456
499
  `);
457
- // Validate and parse each row using the Zod schema
458
- const validatedRows = tableRows.map((row: unknown) =>
459
- tablePropsSchema.parse(row),
460
- );
500
+ return result.rows.map((row: unknown) => tableRowSchema.parse(row));
501
+ },
502
+ });
503
+ const validatedRows = tableRows.map((row): TableProps => {
504
+ const filteredConstraints = row.constraints?.filter(
505
+ (c): c is TableConstraintProps => c.definition !== null,
506
+ );
507
+ return { ...row, constraints: filteredConstraints };
508
+ });
461
509
  return validatedRows.map((row: TableProps) => new Table(row));
462
510
  }
@@ -0,0 +1,113 @@
1
+ import { describe, expect, test } from "bun:test";
2
+ import type { Pool } from "pg";
3
+ import { extractTriggers, Trigger } from "./trigger.model.ts";
4
+
5
+ const baseRow = {
6
+ schema: "public",
7
+ table_name: '"users"',
8
+ table_relkind: "r" as const,
9
+ function_schema: "public",
10
+ function_name: '"my_fn"',
11
+ trigger_type: 7,
12
+ enabled: "O" as const,
13
+ is_internal: false,
14
+ deferrable: false,
15
+ initially_deferred: false,
16
+ argument_count: 0,
17
+ column_numbers: null,
18
+ arguments: [] as string[],
19
+ when_condition: null,
20
+ old_table: null,
21
+ new_table: null,
22
+ is_partition_clone: false,
23
+ parent_trigger_name: null,
24
+ parent_table_schema: null,
25
+ parent_table_name: null,
26
+ is_on_partitioned_table: false,
27
+ owner: "postgres",
28
+ comment: null,
29
+ };
30
+
31
+ const mockPool = (rows: unknown[]): Pool =>
32
+ ({ query: async () => ({ rows }) }) as unknown as Pool;
33
+
34
+ const mockPoolSequence = (...attempts: unknown[][]): Pool => {
35
+ let i = 0;
36
+ return {
37
+ query: async () => ({
38
+ rows: attempts[Math.min(i++, attempts.length - 1)],
39
+ }),
40
+ } as unknown as Pool;
41
+ };
42
+
43
+ const NO_BACKOFF = { backoffMs: 0 } as const;
44
+
45
+ describe("extractTriggers", () => {
46
+ test("skips rows where pg_get_triggerdef returned NULL after exhausting retries", async () => {
47
+ const triggers = await extractTriggers(
48
+ mockPool([
49
+ {
50
+ ...baseRow,
51
+ name: '"good_trg"',
52
+ definition:
53
+ "CREATE TRIGGER good_trg BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
54
+ },
55
+ { ...baseRow, name: '"orphan_trg"', definition: null },
56
+ ]),
57
+ NO_BACKOFF,
58
+ );
59
+
60
+ expect(triggers).toHaveLength(1);
61
+ expect(triggers[0]).toBeInstanceOf(Trigger);
62
+ expect(triggers[0]?.name).toBe('"good_trg"');
63
+ });
64
+
65
+ test("does not throw ZodError when the only row has a null definition", async () => {
66
+ await expect(
67
+ extractTriggers(
68
+ mockPool([{ ...baseRow, name: '"orphan"', definition: null }]),
69
+ NO_BACKOFF,
70
+ ),
71
+ ).resolves.toEqual([]);
72
+ });
73
+
74
+ test("returns all triggers when every row has a valid definition", async () => {
75
+ const triggers = await extractTriggers(
76
+ mockPool([
77
+ {
78
+ ...baseRow,
79
+ name: '"a"',
80
+ definition:
81
+ "CREATE TRIGGER a BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
82
+ },
83
+ {
84
+ ...baseRow,
85
+ name: '"b"',
86
+ definition:
87
+ "CREATE TRIGGER b AFTER UPDATE ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
88
+ },
89
+ ]),
90
+ NO_BACKOFF,
91
+ );
92
+ expect(triggers.map((t) => t.name)).toEqual(['"a"', '"b"']);
93
+ });
94
+
95
+ test("recovers when pg_get_triggerdef is NULL on first attempt but resolved on retry", async () => {
96
+ const triggers = await extractTriggers(
97
+ mockPoolSequence(
98
+ [{ ...baseRow, name: '"racy_trg"', definition: null }],
99
+ [
100
+ {
101
+ ...baseRow,
102
+ name: '"racy_trg"',
103
+ definition:
104
+ "CREATE TRIGGER racy_trg BEFORE INSERT ON users FOR EACH ROW EXECUTE FUNCTION my_fn()",
105
+ },
106
+ ],
107
+ ),
108
+ { retries: 2, backoffMs: 0 },
109
+ );
110
+ expect(triggers).toHaveLength(1);
111
+ expect(triggers[0]?.name).toBe('"racy_trg"');
112
+ });
113
+ });
@@ -2,6 +2,10 @@ import { sql } from "@ts-safeql/sql-tag";
2
2
  import type { Pool } from "pg";
3
3
  import z from "zod";
4
4
  import { BasePgModel } from "../base.model.ts";
5
+ import {
6
+ type ExtractRetryOptions,
7
+ extractWithDefinitionRetry,
8
+ } from "../extract-with-retry.ts";
5
9
 
6
10
  const TriggerEnabledSchema = z.enum([
7
11
  "O", // ORIGIN - trigger fires in "origin" and "local" replica modes
@@ -46,6 +50,15 @@ const triggerPropsSchema = z.object({
46
50
  comment: z.string().nullable(),
47
51
  });
48
52
 
53
+ // pg_get_triggerdef(oid, pretty) can return NULL when the trigger (its
54
+ // pg_trigger row) is dropped between catalog scan and resolution, or under
55
+ // transient catalog state. An unreadable trigger cannot be diffed, so we
56
+ // accept NULL here and filter the row out at extraction time rather than
57
+ // crashing the whole catalog parse with a ZodError.
58
+ const triggerRowSchema = triggerPropsSchema.extend({
59
+ definition: z.string().nullable(),
60
+ });
61
+
49
62
  export type TriggerProps = z.infer<typeof triggerPropsSchema>;
50
63
 
51
64
  export class Trigger extends BasePgModel {
@@ -154,8 +167,16 @@ export class Trigger extends BasePgModel {
154
167
  }
155
168
  }
156
169
 
157
- export async function extractTriggers(pool: Pool): Promise<Trigger[]> {
158
- const { rows: triggerRows } = await pool.query<TriggerProps>(sql`
170
+ export async function extractTriggers(
171
+ pool: Pool,
172
+ options?: ExtractRetryOptions,
173
+ ): Promise<Trigger[]> {
174
+ const triggerRows = await extractWithDefinitionRetry({
175
+ label: "triggers",
176
+ options,
177
+ hasNullDefinition: (row) => row.definition === null,
178
+ query: async () => {
179
+ const result = await pool.query<TriggerProps>(sql`
159
180
  with extension_trigger_oids as (
160
181
  select objid
161
182
  from pg_depend d
@@ -260,9 +281,11 @@ export async function extractTriggers(pool: Pool): Promise<Trigger[]> {
260
281
 
261
282
  order by 1, 2
262
283
  `);
263
- // Validate and parse each row using the Zod schema
264
- const validatedRows = triggerRows.map((row: unknown) =>
265
- triggerPropsSchema.parse(row),
284
+ return result.rows.map((row: unknown) => triggerRowSchema.parse(row));
285
+ },
286
+ });
287
+ const validatedRows = triggerRows.filter(
288
+ (row): row is TriggerProps => row.definition !== null,
266
289
  );
267
290
  return validatedRows.map((row: TriggerProps) => new Trigger(row));
268
291
  }
@@ -71,6 +71,9 @@ export const stableId = {
71
71
  constraint(schema: string, table: string, constraint: string) {
72
72
  return `constraint:${schema}.${table}.${constraint}` as const;
73
73
  },
74
+ index(schema: string, table: string, indexName: string) {
75
+ return `index:${schema}.${table}.${indexName}` as const;
76
+ },
74
77
  comment(objectStableId: string) {
75
78
  return `comment:${objectStableId}` as const;
76
79
  },
@@ -0,0 +1,90 @@
1
+ import { describe, expect, test } from "bun:test";
2
+ import type { Pool } from "pg";
3
+ import { extractViews, View } from "./view.model.ts";
4
+
5
+ const baseRow = {
6
+ schema: "public",
7
+ row_security: false,
8
+ force_row_security: false,
9
+ has_indexes: false,
10
+ has_rules: false,
11
+ has_triggers: false,
12
+ has_subclasses: false,
13
+ is_populated: true,
14
+ replica_identity: "d" as const,
15
+ is_partition: false,
16
+ options: null,
17
+ partition_bound: null,
18
+ owner: "postgres",
19
+ comment: null,
20
+ columns: [],
21
+ privileges: [],
22
+ };
23
+
24
+ const mockPool = (rows: unknown[]): Pool =>
25
+ ({ query: async () => ({ rows }) }) as unknown as Pool;
26
+
27
+ const mockPoolSequence = (...attempts: unknown[][]): Pool => {
28
+ let i = 0;
29
+ return {
30
+ query: async () => ({
31
+ rows: attempts[Math.min(i++, attempts.length - 1)],
32
+ }),
33
+ } as unknown as Pool;
34
+ };
35
+
36
+ const NO_BACKOFF = { backoffMs: 0 } as const;
37
+
38
+ describe("extractViews", () => {
39
+ test("skips rows where pg_get_viewdef returned NULL after exhausting retries", async () => {
40
+ const views = await extractViews(
41
+ mockPool([
42
+ {
43
+ ...baseRow,
44
+ name: '"good_view"',
45
+ definition: "SELECT 1",
46
+ },
47
+ { ...baseRow, name: '"orphan_view"', definition: null },
48
+ ]),
49
+ NO_BACKOFF,
50
+ );
51
+
52
+ expect(views).toHaveLength(1);
53
+ expect(views[0]).toBeInstanceOf(View);
54
+ expect(views[0]?.name).toBe('"good_view"');
55
+ expect(views[0]?.definition).toBe("SELECT 1");
56
+ });
57
+
58
+ test("does not throw ZodError when the only row has a null definition", async () => {
59
+ await expect(
60
+ extractViews(
61
+ mockPool([{ ...baseRow, name: '"orphan"', definition: null }]),
62
+ NO_BACKOFF,
63
+ ),
64
+ ).resolves.toEqual([]);
65
+ });
66
+
67
+ test("returns all views when every row has a valid definition", async () => {
68
+ const views = await extractViews(
69
+ mockPool([
70
+ { ...baseRow, name: '"a"', definition: "SELECT 1" },
71
+ { ...baseRow, name: '"b"', definition: "SELECT 2" },
72
+ ]),
73
+ NO_BACKOFF,
74
+ );
75
+ expect(views.map((v) => v.name)).toEqual(['"a"', '"b"']);
76
+ });
77
+
78
+ test("recovers when pg_get_viewdef is NULL on first attempt but resolved on retry", async () => {
79
+ const views = await extractViews(
80
+ mockPoolSequence(
81
+ [{ ...baseRow, name: '"racy_view"', definition: null }],
82
+ [{ ...baseRow, name: '"racy_view"', definition: "SELECT 42" }],
83
+ ),
84
+ { retries: 2, backoffMs: 0 },
85
+ );
86
+ expect(views).toHaveLength(1);
87
+ expect(views[0]?.name).toBe('"racy_view"');
88
+ expect(views[0]?.definition).toBe("SELECT 42");
89
+ });
90
+ });