@supabase/pg-delta 1.0.0-alpha.20 → 1.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/dist/core/catalog.diff.js +4 -4
  2. package/dist/core/catalog.model.d.ts +8 -1
  3. package/dist/core/catalog.model.js +9 -8
  4. package/dist/core/expand-replace-dependencies.js +23 -0
  5. package/dist/core/objects/extract-with-retry.d.ts +36 -0
  6. package/dist/core/objects/extract-with-retry.js +51 -0
  7. package/dist/core/objects/index/index.diff.js +0 -1
  8. package/dist/core/objects/index/index.model.d.ts +2 -3
  9. package/dist/core/objects/index/index.model.js +17 -6
  10. package/dist/core/objects/materialized-view/materialized-view.model.d.ts +2 -1
  11. package/dist/core/objects/materialized-view/materialized-view.model.js +20 -4
  12. package/dist/core/objects/procedure/procedure.model.d.ts +2 -1
  13. package/dist/core/objects/procedure/procedure.model.js +20 -4
  14. package/dist/core/objects/publication/changes/publication.alter.d.ts +1 -1
  15. package/dist/core/objects/rls-policy/rls-policy.diff.js +13 -1
  16. package/dist/core/objects/rule/rule.model.d.ts +2 -1
  17. package/dist/core/objects/rule/rule.model.js +20 -3
  18. package/dist/core/objects/sequence/sequence.diff.d.ts +2 -1
  19. package/dist/core/objects/sequence/sequence.diff.js +41 -9
  20. package/dist/core/objects/table/changes/table.alter.d.ts +16 -1
  21. package/dist/core/objects/table/changes/table.alter.js +39 -6
  22. package/dist/core/objects/table/table.diff.js +40 -17
  23. package/dist/core/objects/table/table.model.d.ts +6 -1
  24. package/dist/core/objects/table/table.model.js +50 -12
  25. package/dist/core/objects/trigger/trigger.model.d.ts +2 -1
  26. package/dist/core/objects/trigger/trigger.model.js +20 -4
  27. package/dist/core/objects/utils.d.ts +1 -0
  28. package/dist/core/objects/utils.js +3 -0
  29. package/dist/core/objects/view/view.model.d.ts +2 -1
  30. package/dist/core/objects/view/view.model.js +20 -4
  31. package/dist/core/plan/create.js +3 -1
  32. package/dist/core/plan/types.d.ts +8 -0
  33. package/dist/core/post-diff-normalization.d.ts +36 -0
  34. package/dist/core/post-diff-normalization.js +202 -0
  35. package/dist/core/sort/cycle-breakers.d.ts +15 -0
  36. package/dist/core/sort/cycle-breakers.js +269 -0
  37. package/dist/core/sort/sort-changes.js +97 -43
  38. package/dist/core/sort/utils.d.ts +10 -0
  39. package/dist/core/sort/utils.js +28 -0
  40. package/package.json +1 -1
  41. package/src/core/catalog.diff.ts +4 -3
  42. package/src/core/catalog.model.ts +20 -8
  43. package/src/core/expand-replace-dependencies.test.ts +139 -5
  44. package/src/core/expand-replace-dependencies.ts +24 -0
  45. package/src/core/objects/extract-with-retry.test.ts +143 -0
  46. package/src/core/objects/extract-with-retry.ts +87 -0
  47. package/src/core/objects/index/index.diff.ts +0 -1
  48. package/src/core/objects/index/index.model.test.ts +37 -1
  49. package/src/core/objects/index/index.model.ts +25 -6
  50. package/src/core/objects/materialized-view/materialized-view.model.test.ts +93 -0
  51. package/src/core/objects/materialized-view/materialized-view.model.ts +27 -4
  52. package/src/core/objects/procedure/procedure.model.test.ts +117 -0
  53. package/src/core/objects/procedure/procedure.model.ts +28 -5
  54. package/src/core/objects/publication/changes/publication.alter.ts +1 -1
  55. package/src/core/objects/rls-policy/rls-policy.diff.ts +19 -1
  56. package/src/core/objects/rule/rule.model.test.ts +99 -0
  57. package/src/core/objects/rule/rule.model.ts +28 -4
  58. package/src/core/objects/sequence/sequence.diff.test.ts +93 -1
  59. package/src/core/objects/sequence/sequence.diff.ts +43 -10
  60. package/src/core/objects/table/changes/table.alter.test.ts +26 -23
  61. package/src/core/objects/table/changes/table.alter.ts +66 -10
  62. package/src/core/objects/table/table.diff.test.ts +43 -0
  63. package/src/core/objects/table/table.diff.ts +52 -23
  64. package/src/core/objects/table/table.model.test.ts +209 -0
  65. package/src/core/objects/table/table.model.ts +62 -14
  66. package/src/core/objects/trigger/trigger.model.test.ts +113 -0
  67. package/src/core/objects/trigger/trigger.model.ts +28 -5
  68. package/src/core/objects/utils.ts +3 -0
  69. package/src/core/objects/view/view.model.test.ts +90 -0
  70. package/src/core/objects/view/view.model.ts +28 -5
  71. package/src/core/plan/create.ts +3 -1
  72. package/src/core/plan/types.ts +8 -0
  73. package/src/core/{post-diff-cycle-breaking.test.ts → post-diff-normalization.test.ts} +168 -160
  74. package/src/core/post-diff-normalization.ts +260 -0
  75. package/src/core/sort/cycle-breakers.test.ts +476 -0
  76. package/src/core/sort/cycle-breakers.ts +311 -0
  77. package/src/core/sort/sort-changes.ts +135 -50
  78. package/src/core/sort/utils.ts +38 -0
  79. package/dist/core/post-diff-cycle-breaking.d.ts +0 -29
  80. package/dist/core/post-diff-cycle-breaking.js +0 -209
  81. package/src/core/post-diff-cycle-breaking.ts +0 -317
@@ -1,6 +1,6 @@
1
1
  import debug from "debug";
2
2
  import { expandReplaceDependencies } from "./expand-replace-dependencies.js";
3
- import { normalizePostDiffCycles } from "./post-diff-cycle-breaking.js";
3
+ import { normalizePostDiffChanges } from "./post-diff-normalization.js";
4
4
  const debugCatalog = debug("pg-delta:catalog");
5
5
  import { diffAggregates } from "./objects/aggregate/aggregate.diff.js";
6
6
  import { DefaultPrivilegeState } from "./objects/base.default-privileges.js";
@@ -126,7 +126,7 @@ export function diffCatalogs(main, branch, options) {
126
126
  changes.push(...diffProcedures(diffContext, main.procedures, branch.procedures));
127
127
  changes.push(...diffRlsPolicies(main.rlsPolicies, branch.rlsPolicies));
128
128
  changes.push(...diffSchemas(diffContext, main.schemas, branch.schemas));
129
- changes.push(...diffSequences(diffContext, main.sequences, branch.sequences, branch.tables));
129
+ changes.push(...diffSequences(diffContext, main.sequences, branch.sequences, branch.tables, main.tables));
130
130
  changes.push(...diffTables(diffContext, main.tables, branch.tables));
131
131
  changes.push(...diffTriggers(main.triggers, branch.triggers, branch.indexableObjects));
132
132
  changes.push(...diffEventTriggers(diffContext, main.eventTriggers, branch.eventTriggers));
@@ -159,10 +159,10 @@ export function diffCatalogs(main, branch, options) {
159
159
  mainCatalog: main,
160
160
  branchCatalog: branch,
161
161
  });
162
- filteredChanges = normalizePostDiffCycles({
162
+ filteredChanges = normalizePostDiffChanges({
163
163
  changes: expandedDependencies.changes,
164
- mainCatalog: main,
165
164
  replacedTableIds: expandedDependencies.replacedTableIds,
165
+ branchTables: branch.tables,
166
166
  });
167
167
  debugCatalog("changes catalog diff: %O", stringifyWithBigInt(filteredChanges, 2));
168
168
  return filteredChanges;
@@ -102,5 +102,12 @@ export declare class Catalog {
102
102
  * to `createPlan`.
103
103
  */
104
104
  export declare function createEmptyCatalog(version: number, currentUser: string): Promise<Catalog>;
105
- export declare function extractCatalog(pool: Pool): Promise<Catalog>;
105
+ interface ExtractCatalogOptions {
106
+ /**
107
+ * Number of retry attempts for catalog extractors when `pg_get_*def()`
108
+ * returns NULL for at least one row. See `ExtractRetryOptions.retries`.
109
+ */
110
+ extractRetries?: number;
111
+ }
112
+ export declare function extractCatalog(pool: Pool, options?: ExtractCatalogOptions): Promise<Catalog>;
106
113
  export {};
@@ -190,7 +190,8 @@ export async function createEmptyCatalog(version, currentUser) {
190
190
  currentUser,
191
191
  });
192
192
  }
193
- export async function extractCatalog(pool) {
193
+ export async function extractCatalog(pool, options = {}) {
194
+ const retryOptions = { retries: options.extractRetries };
194
195
  const [aggregates, collations, compositeTypes, domains, enums, extensions, indexes, materializedViews, subscriptions, publications, procedures, rlsPolicies, roles, schemas, sequences, tables, triggers, eventTriggers, rules, ranges, views, foreignDataWrappers, servers, userMappings, foreignTables, depends, version, currentUser,] = await Promise.all([
195
196
  extractAggregates(pool).then(listToRecord),
196
197
  extractCollations(pool).then(listToRecord),
@@ -198,21 +199,21 @@ export async function extractCatalog(pool) {
198
199
  extractDomains(pool).then(listToRecord),
199
200
  extractEnums(pool).then(listToRecord),
200
201
  extractExtensions(pool).then(listToRecord),
201
- extractIndexes(pool).then(listToRecord),
202
- extractMaterializedViews(pool).then(listToRecord),
202
+ extractIndexes(pool, retryOptions).then(listToRecord),
203
+ extractMaterializedViews(pool, retryOptions).then(listToRecord),
203
204
  extractSubscriptions(pool).then(listToRecord),
204
205
  extractPublications(pool).then(listToRecord),
205
- extractProcedures(pool).then(listToRecord),
206
+ extractProcedures(pool, retryOptions).then(listToRecord),
206
207
  extractRlsPolicies(pool).then(listToRecord),
207
208
  extractRoles(pool).then(listToRecord),
208
209
  extractSchemas(pool).then(listToRecord),
209
210
  extractSequences(pool).then(listToRecord),
210
- extractTables(pool).then(listToRecord),
211
- extractTriggers(pool).then(listToRecord),
211
+ extractTables(pool, retryOptions).then(listToRecord),
212
+ extractTriggers(pool, retryOptions).then(listToRecord),
212
213
  extractEventTriggers(pool).then(listToRecord),
213
- extractRules(pool).then(listToRecord),
214
+ extractRules(pool, retryOptions).then(listToRecord),
214
215
  extractRanges(pool).then(listToRecord),
215
- extractViews(pool).then(listToRecord),
216
+ extractViews(pool, retryOptions).then(listToRecord),
216
217
  extractForeignDataWrappers(pool).then(listToRecord),
217
218
  extractServers(pool).then(listToRecord),
218
219
  extractUserMappings(pool).then(listToRecord),
@@ -53,6 +53,29 @@ export function expandReplaceDependencies({ changes, mainCatalog, branchCatalog,
53
53
  replaceRoots.add(id);
54
54
  }
55
55
  }
56
+ // Drop-only objects (no matching create — typically a renamed-away table or
57
+ // type) are also expansion roots: anything in main that depends on them via
58
+ // pg_depend must drop before the parent does. Without this seed, a renamed
59
+ // table whose dependent view stays in the branch catalog (with an updated
60
+ // definition that no longer references the old name) would still try to
61
+ // run DROP TABLE old_name while old_name is referenced by the view, which
62
+ // PostgreSQL refuses without CASCADE. The walk below promotes the surviving
63
+ // dependent to DROP+CREATE so its drop is sequenced before the parent drop.
64
+ for (const id of droppedIds) {
65
+ if (createdIds.has(id))
66
+ continue;
67
+ if (replaceRoots.has(id))
68
+ continue;
69
+ // Only seed for object kinds that can have catalog dependents we know
70
+ // how to recreate via buildReplaceChanges.
71
+ if (id.startsWith("table:") ||
72
+ id.startsWith("view:") ||
73
+ id.startsWith("materializedView:") ||
74
+ id.startsWith("type:") ||
75
+ id.startsWith("domain:")) {
76
+ replaceRoots.add(id);
77
+ }
78
+ }
56
79
  if (replaceRoots.size === 0) {
57
80
  return {
58
81
  changes,
@@ -0,0 +1,36 @@
1
+ export interface ExtractRetryOptions {
2
+ /**
3
+ * Number of retry attempts to make when a `pg_get_*def()` call returns NULL
4
+ * for at least one row. Total attempts is `retries + 1`. Negative values are
5
+ * clamped to 0. When this option is undefined the value is read from the
6
+ * `PGDELTA_EXTRACT_RETRIES` environment variable, falling back to a default
7
+ * of 1 (i.e. the first attempt plus one retry, 2 attempts total).
8
+ */
9
+ retries?: number;
10
+ /**
11
+ * Delay between retry attempts in milliseconds; the actual wait is
12
+ * `backoffMs * attemptNumber` (linear). Defaults to 50. Set to 0 in tests.
13
+ */
14
+ backoffMs?: number;
15
+ }
16
+ export declare function resolveExtractRetries(option?: number): number;
17
+ /**
18
+ * Runs `query()` up to `retries + 1` times, retrying as long as at least one
19
+ * row in the result satisfies `hasNullDefinition`. The retry exists because
20
+ * `pg_get_<x>def()` can return NULL transiently when the underlying catalog
21
+ * row is dropped concurrently or the catalog state is in flux; in practice a
22
+ * second attempt either no longer sees the dropped row or succeeds in
23
+ * resolving the definition.
24
+ *
25
+ * Returns the rows from the first attempt with no offenders, or — once
26
+ * retries are exhausted — the rows from the final attempt (still containing
27
+ * offenders). The caller is responsible for the final filter so this helper
28
+ * works for both flat schemas (definition on the row) and nested schemas
29
+ * (definition on a child collection, e.g. table constraints).
30
+ */
31
+ export declare function extractWithDefinitionRetry<TRow>(params: {
32
+ label: string;
33
+ query: () => Promise<TRow[]>;
34
+ hasNullDefinition: (row: TRow) => boolean;
35
+ options?: ExtractRetryOptions;
36
+ }): Promise<TRow[]>;
@@ -0,0 +1,51 @@
1
+ import debug from "debug";
2
+ const log = debug("pg-delta:extract");
3
+ const DEFAULT_RETRIES = 1;
4
+ const DEFAULT_BACKOFF_MS = 50;
5
+ export function resolveExtractRetries(option) {
6
+ if (typeof option === "number" && Number.isFinite(option)) {
7
+ return Math.max(0, Math.floor(option));
8
+ }
9
+ const envVal = process.env.PGDELTA_EXTRACT_RETRIES;
10
+ if (envVal !== undefined && envVal !== "") {
11
+ const n = Number(envVal);
12
+ if (Number.isFinite(n))
13
+ return Math.max(0, Math.floor(n));
14
+ }
15
+ return DEFAULT_RETRIES;
16
+ }
17
+ const sleep = (ms) => ms > 0 ? new Promise((r) => setTimeout(r, ms)) : Promise.resolve();
18
+ /**
19
+ * Runs `query()` up to `retries + 1` times, retrying as long as at least one
20
+ * row in the result satisfies `hasNullDefinition`. The retry exists because
21
+ * `pg_get_<x>def()` can return NULL transiently when the underlying catalog
22
+ * row is dropped concurrently or the catalog state is in flux; in practice a
23
+ * second attempt either no longer sees the dropped row or succeeds in
24
+ * resolving the definition.
25
+ *
26
+ * Returns the rows from the first attempt with no offenders, or — once
27
+ * retries are exhausted — the rows from the final attempt (still containing
28
+ * offenders). The caller is responsible for the final filter so this helper
29
+ * works for both flat schemas (definition on the row) and nested schemas
30
+ * (definition on a child collection, e.g. table constraints).
31
+ */
32
+ export async function extractWithDefinitionRetry(params) {
33
+ const retries = resolveExtractRetries(params.options?.retries);
34
+ const backoffMs = params.options?.backoffMs ?? DEFAULT_BACKOFF_MS;
35
+ const maxAttempts = retries + 1;
36
+ let rows = [];
37
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
38
+ rows = await params.query();
39
+ const offenders = rows.filter(params.hasNullDefinition).length;
40
+ if (offenders === 0)
41
+ return rows;
42
+ if (attempt < maxAttempts) {
43
+ log("%s: pg_get_*def() returned NULL for %d row(s) on attempt %d/%d; retrying", params.label, offenders, attempt, maxAttempts);
44
+ await sleep(backoffMs * attempt);
45
+ }
46
+ else {
47
+ log("%s: pg_get_*def() returned NULL for %d row(s) after %d attempt(s); skipping", params.label, offenders, maxAttempts);
48
+ }
49
+ }
50
+ return rows;
51
+ }
@@ -74,7 +74,6 @@ export function diffIndexes(main, branch, branchIndexableObjects) {
74
74
  "nulls_not_distinct",
75
75
  "immediate",
76
76
  "is_clustered",
77
- "is_replica_identity",
78
77
  "column_collations",
79
78
  "operator_classes",
80
79
  "column_options",
@@ -1,6 +1,7 @@
1
1
  import type { Pool } from "pg";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.ts";
4
+ import { type ExtractRetryOptions } from "../extract-with-retry.ts";
4
5
  declare const indexPropsSchema: z.ZodObject<{
5
6
  schema: z.ZodString;
6
7
  table_name: z.ZodString;
@@ -97,7 +98,6 @@ export declare class Index extends BasePgModel {
97
98
  nulls_not_distinct: boolean;
98
99
  immediate: boolean;
99
100
  is_clustered: boolean;
100
- is_replica_identity: boolean;
101
101
  column_collations: (string | null)[];
102
102
  operator_classes: string[];
103
103
  column_options: number[];
@@ -132,7 +132,6 @@ export declare class Index extends BasePgModel {
132
132
  nulls_not_distinct: boolean;
133
133
  immediate: boolean;
134
134
  is_clustered: boolean;
135
- is_replica_identity: boolean;
136
135
  index_expressions: string | null;
137
136
  partial_predicate: string | null;
138
137
  table_relkind: "r" | "m" | "p";
@@ -146,5 +145,5 @@ export declare class Index extends BasePgModel {
146
145
  };
147
146
  };
148
147
  }
149
- export declare function extractIndexes(pool: Pool): Promise<Index[]>;
148
+ export declare function extractIndexes(pool: Pool, options?: ExtractRetryOptions): Promise<Index[]>;
150
149
  export {};
@@ -1,6 +1,7 @@
1
1
  import { sql } from "@ts-safeql/sql-tag";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.js";
4
+ import { extractWithDefinitionRetry, } from "../extract-with-retry.js";
4
5
  const TableRelkindSchema = z.enum([
5
6
  "r", // table (regular relation)
6
7
  "m", // materialized view
@@ -136,7 +137,11 @@ export class Index extends BasePgModel {
136
137
  nulls_not_distinct: this.nulls_not_distinct,
137
138
  immediate: this.immediate,
138
139
  is_clustered: this.is_clustered,
139
- is_replica_identity: this.is_replica_identity,
140
+ // is_replica_identity excluded: the table's `replica_identity` /
141
+ // `replica_identity_index` is the source of truth, set via
142
+ // ALTER TABLE ... REPLICA IDENTITY USING INDEX. Including this flag here
143
+ // would trigger spurious DROP+CREATE of the index whenever the table's
144
+ // replica identity changes.
140
145
  // key_columns excluded: contains attribute numbers that can differ between databases
141
146
  // even when indexes are logically identical. The definition field already captures
142
147
  // the logical structure using column names, so we compare by definition instead.
@@ -184,8 +189,13 @@ export class Index extends BasePgModel {
184
189
  };
185
190
  }
186
191
  }
187
- export async function extractIndexes(pool) {
188
- const { rows: indexRows } = await pool.query(sql `
192
+ export async function extractIndexes(pool, options) {
193
+ const indexRows = await extractWithDefinitionRetry({
194
+ label: "indexes",
195
+ options,
196
+ hasNullDefinition: (row) => row.definition === null,
197
+ query: async () => {
198
+ const result = await pool.query(sql `
189
199
  with extension_oids as (
190
200
  select objid
191
201
  from pg_depend d
@@ -341,8 +351,9 @@ export async function extractIndexes(pool) {
341
351
 
342
352
  order by 1, 2
343
353
  `);
344
- const validatedRows = indexRows
345
- .map((row) => indexRowSchema.parse(row))
346
- .filter((row) => row.definition !== null);
354
+ return result.rows.map((row) => indexRowSchema.parse(row));
355
+ },
356
+ });
357
+ const validatedRows = indexRows.filter((row) => row.definition !== null);
347
358
  return validatedRows.map((row) => new Index(row));
348
359
  }
@@ -2,6 +2,7 @@ import type { Pool } from "pg";
2
2
  import z from "zod";
3
3
  import { BasePgModel, type TableLikeObject } from "../base.model.ts";
4
4
  import { type PrivilegeProps } from "../base.privilege-diff.ts";
5
+ import { type ExtractRetryOptions } from "../extract-with-retry.ts";
5
6
  declare const materializedViewPropsSchema: z.ZodObject<{
6
7
  schema: z.ZodString;
7
8
  name: z.ZodString;
@@ -148,5 +149,5 @@ export declare class MaterializedView extends BasePgModel implements TableLikeOb
148
149
  };
149
150
  };
150
151
  }
151
- export declare function extractMaterializedViews(pool: Pool): Promise<MaterializedView[]>;
152
+ export declare function extractMaterializedViews(pool: Pool, options?: ExtractRetryOptions): Promise<MaterializedView[]>;
152
153
  export {};
@@ -2,6 +2,7 @@ import { sql } from "@ts-safeql/sql-tag";
2
2
  import z from "zod";
3
3
  import { BasePgModel, columnPropsSchema, } from "../base.model.js";
4
4
  import { privilegePropsSchema, } from "../base.privilege-diff.js";
5
+ import { extractWithDefinitionRetry, } from "../extract-with-retry.js";
5
6
  import { ReplicaIdentitySchema } from "../table/table.model.js";
6
7
  const materializedViewPropsSchema = z.object({
7
8
  schema: z.string(),
@@ -23,6 +24,14 @@ const materializedViewPropsSchema = z.object({
23
24
  columns: z.array(columnPropsSchema),
24
25
  privileges: z.array(privilegePropsSchema),
25
26
  });
27
+ // pg_get_viewdef(oid) can return NULL when the underlying matview (or its
28
+ // pg_rewrite row) is dropped between catalog scan and resolution, or under
29
+ // transient catalog state during recovery. An unreadable matview cannot be
30
+ // diffed, so we accept NULL here and filter the row out at extraction time
31
+ // rather than crashing the whole catalog parse with a ZodError.
32
+ const materializedViewRowSchema = materializedViewPropsSchema.extend({
33
+ definition: z.string().nullable(),
34
+ });
26
35
  export class MaterializedView extends BasePgModel {
27
36
  schema;
28
37
  name;
@@ -114,8 +123,13 @@ export class MaterializedView extends BasePgModel {
114
123
  };
115
124
  }
116
125
  }
117
- export async function extractMaterializedViews(pool) {
118
- const { rows: mvRows } = await pool.query(sql `
126
+ export async function extractMaterializedViews(pool, options) {
127
+ const mvRows = await extractWithDefinitionRetry({
128
+ label: "materialized views",
129
+ options,
130
+ hasNullDefinition: (row) => row.definition === null,
131
+ query: async () => {
132
+ const result = await pool.query(sql `
119
133
  with extension_oids as (
120
134
  select
121
135
  objid
@@ -220,7 +234,9 @@ group by
220
234
  order by
221
235
  c.relnamespace::regnamespace, c.relname
222
236
  `);
223
- // Validate and parse each row using the Zod schema
224
- const validatedRows = mvRows.map((row) => materializedViewPropsSchema.parse(row));
237
+ return result.rows.map((row) => materializedViewRowSchema.parse(row));
238
+ },
239
+ });
240
+ const validatedRows = mvRows.filter((row) => row.definition !== null);
225
241
  return validatedRows.map((row) => new MaterializedView(row));
226
242
  }
@@ -2,6 +2,7 @@ import type { Pool } from "pg";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.ts";
4
4
  import { type PrivilegeProps } from "../base.privilege-diff.ts";
5
+ import { type ExtractRetryOptions } from "../extract-with-retry.ts";
5
6
  declare const procedurePropsSchema: z.ZodObject<{
6
7
  schema: z.ZodString;
7
8
  name: z.ZodString;
@@ -128,5 +129,5 @@ export declare class Procedure extends BasePgModel {
128
129
  }[];
129
130
  };
130
131
  }
131
- export declare function extractProcedures(pool: Pool): Promise<Procedure[]>;
132
+ export declare function extractProcedures(pool: Pool, options?: ExtractRetryOptions): Promise<Procedure[]>;
132
133
  export {};
@@ -2,6 +2,7 @@ import { sql } from "@ts-safeql/sql-tag";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.js";
4
4
  import { privilegePropsSchema, } from "../base.privilege-diff.js";
5
+ import { extractWithDefinitionRetry, } from "../extract-with-retry.js";
5
6
  const FunctionKindSchema = z.enum([
6
7
  "f", // function
7
8
  "p", // procedure
@@ -56,6 +57,14 @@ const procedurePropsSchema = z.object({
56
57
  comment: z.string().nullable(),
57
58
  privileges: z.array(privilegePropsSchema),
58
59
  });
60
+ // pg_get_functiondef(oid) can return NULL when the function (its pg_proc
61
+ // row) is dropped between catalog scan and resolution, or under transient
62
+ // catalog state. An unreadable function cannot be diffed, so we accept NULL
63
+ // here and filter the row out at extraction time rather than crashing the
64
+ // whole catalog parse with a ZodError.
65
+ const procedureRowSchema = procedurePropsSchema.extend({
66
+ definition: z.string().nullable(),
67
+ });
59
68
  export class Procedure extends BasePgModel {
60
69
  schema;
61
70
  name;
@@ -163,8 +172,13 @@ export class Procedure extends BasePgModel {
163
172
  };
164
173
  }
165
174
  }
166
- export async function extractProcedures(pool) {
167
- const { rows: procedureRows } = await pool.query(sql `
175
+ export async function extractProcedures(pool, options) {
176
+ const procedureRows = await extractWithDefinitionRetry({
177
+ label: "procedures",
178
+ options,
179
+ hasNullDefinition: (row) => row.definition === null,
180
+ query: async () => {
181
+ const result = await pool.query(sql `
168
182
  with extension_oids as (
169
183
  select
170
184
  objid
@@ -236,7 +250,9 @@ from
236
250
  order by
237
251
  1, 2
238
252
  `);
239
- // Validate and parse each row using the Zod schema
240
- const validatedRows = procedureRows.map((row) => procedurePropsSchema.parse(row));
253
+ return result.rows.map((row) => procedureRowSchema.parse(row));
254
+ },
255
+ });
256
+ const validatedRows = procedureRows.filter((row) => row.definition !== null);
241
257
  return validatedRows.map((row) => new Procedure(row));
242
258
  }
@@ -37,7 +37,7 @@ export declare class AlterPublicationAddTables extends AlterPublicationChange {
37
37
  export declare class AlterPublicationDropTables extends AlterPublicationChange {
38
38
  readonly publication: Publication;
39
39
  readonly scope: "object";
40
- private readonly tables;
40
+ readonly tables: PublicationTableProps[];
41
41
  constructor(props: {
42
42
  publication: Publication;
43
43
  tables: PublicationTableProps[];
@@ -35,7 +35,19 @@ export function diffRlsPolicies(main, branch) {
35
35
  "permissive",
36
36
  ];
37
37
  const nonAlterablePropsChanged = hasNonAlterableChanges(mainRlsPolicy, branchRlsPolicy, NON_ALTERABLE_FIELDS, {});
38
- if (nonAlterablePropsChanged) {
38
+ // The set of relations and procedures that the policy's USING / WITH
39
+ // CHECK expressions reference is recorded by PostgreSQL in pg_depend
40
+ // (recordDependencyOnExpr at policy creation). When that set changes
41
+ // it is unsafe to ALTER POLICY in place: the old reference target may
42
+ // be dropped in the same plan, and the new reference target may only
43
+ // exist after the create phase. Drop+create lets the sort phase order
44
+ // the policy's drop before the referenced object's drop and the
45
+ // policy's recreate after the referenced object's create.
46
+ const referencedDependenciesChanged = hasNonAlterableChanges(mainRlsPolicy, branchRlsPolicy, ["referenced_procedures", "referenced_relations"], {
47
+ referenced_procedures: deepEqual,
48
+ referenced_relations: deepEqual,
49
+ });
50
+ if (nonAlterablePropsChanged || referencedDependenciesChanged) {
39
51
  // Replace the entire RLS policy (drop + create)
40
52
  changes.push(new DropRlsPolicy({ policy: mainRlsPolicy }), new CreateRlsPolicy({ policy: branchRlsPolicy }));
41
53
  }
@@ -1,6 +1,7 @@
1
1
  import type { Pool } from "pg";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.ts";
4
+ import { type ExtractRetryOptions } from "../extract-with-retry.ts";
4
5
  declare const RuleEnabledStateSchema: z.ZodEnum<{
5
6
  O: "O";
6
7
  D: "D";
@@ -68,5 +69,5 @@ export declare class Rule extends BasePgModel {
68
69
  };
69
70
  get relationStableId(): string;
70
71
  }
71
- export declare function extractRules(pool: Pool): Promise<Rule[]>;
72
+ export declare function extractRules(pool: Pool, options?: ExtractRetryOptions): Promise<Rule[]>;
72
73
  export {};
@@ -1,6 +1,7 @@
1
1
  import { sql } from "@ts-safeql/sql-tag";
2
2
  import z from "zod";
3
3
  import { BasePgModel } from "../base.model.js";
4
+ import { extractWithDefinitionRetry, } from "../extract-with-retry.js";
4
5
  import { stableId } from "../utils.js";
5
6
  const RuleEventSchema = z.enum(["SELECT", "INSERT", "UPDATE", "DELETE"]);
6
7
  const RuleEnabledStateSchema = z.enum(["O", "D", "R", "A"]);
@@ -24,6 +25,14 @@ const rulePropsSchema = z.object({
24
25
  comment: z.string().nullable(),
25
26
  columns: z.array(z.string()),
26
27
  });
28
+ // pg_get_ruledef(oid, pretty) can return NULL when the rule (its pg_rewrite
29
+ // row) is dropped between catalog scan and resolution, or under transient
30
+ // catalog state. An unreadable rule cannot be diffed, so we accept NULL here
31
+ // and filter the row out at extraction time rather than crashing the whole
32
+ // catalog parse with a ZodError.
33
+ const ruleRowSchema = rulePropsSchema.extend({
34
+ definition: z.string().nullable(),
35
+ });
27
36
  export class Rule extends BasePgModel {
28
37
  schema;
29
38
  name;
@@ -82,8 +91,13 @@ export class Rule extends BasePgModel {
82
91
  }
83
92
  }
84
93
  }
85
- export async function extractRules(pool) {
86
- const { rows: ruleRows } = await pool.query(sql `
94
+ export async function extractRules(pool, options) {
95
+ const ruleRows = await extractWithDefinitionRetry({
96
+ label: "rules",
97
+ options,
98
+ hasNullDefinition: (row) => row.definition === null,
99
+ query: async () => {
100
+ const result = await pool.query(sql `
87
101
  WITH extension_rule_oids AS (
88
102
  SELECT
89
103
  objid
@@ -149,6 +163,9 @@ export async function extractRules(pool) {
149
163
  ORDER BY
150
164
  1, 3, 2
151
165
  `);
152
- const validatedRows = ruleRows.map((row) => rulePropsSchema.parse(row));
166
+ return result.rows.map((row) => ruleRowSchema.parse(row));
167
+ },
168
+ });
169
+ const validatedRows = ruleRows.filter((row) => row.definition !== null);
153
170
  return validatedRows.map((row) => new Rule(row));
154
171
  }
@@ -11,7 +11,8 @@ type SequenceOrColumnSetDefaultChange = AlterTableAlterColumnSetDefault | Sequen
11
11
  * @param main - The sequences in the main catalog.
12
12
  * @param branch - The sequences in the branch catalog.
13
13
  * @param branchTables - The tables in the branch catalog (used to check if owning tables are being dropped).
14
+ * @param mainTables - The tables in the main catalog (used to detect when a same-name sequence will be cascade-dropped because its main-side owning table is going away).
14
15
  * @returns A list of changes to apply to main to make it match branch.
15
16
  */
16
- export declare function diffSequences(ctx: Pick<ObjectDiffContext, "version" | "currentUser" | "defaultPrivilegeState">, main: Record<string, Sequence>, branch: Record<string, Sequence>, branchTables?: Record<string, Table>): SequenceOrColumnSetDefaultChange[];
17
+ export declare function diffSequences(ctx: Pick<ObjectDiffContext, "version" | "currentUser" | "defaultPrivilegeState">, main: Record<string, Sequence>, branch: Record<string, Sequence>, branchTables?: Record<string, Table>, mainTables?: Record<string, Table>): SequenceOrColumnSetDefaultChange[];
17
18
  export {};
@@ -14,9 +14,10 @@ import { GrantSequencePrivileges, RevokeGrantOptionSequencePrivileges, RevokeSeq
14
14
  * @param main - The sequences in the main catalog.
15
15
  * @param branch - The sequences in the branch catalog.
16
16
  * @param branchTables - The tables in the branch catalog (used to check if owning tables are being dropped).
17
+ * @param mainTables - The tables in the main catalog (used to detect when a same-name sequence will be cascade-dropped because its main-side owning table is going away).
17
18
  * @returns A list of changes to apply to main to make it match branch.
18
19
  */
19
- export function diffSequences(ctx, main, branch, branchTables = {}) {
20
+ export function diffSequences(ctx, main, branch, branchTables = {}, mainTables = {}) {
20
21
  const { created, dropped, altered } = diffObjects(main, branch);
21
22
  const changes = [];
22
23
  for (const sequenceId of created) {
@@ -87,14 +88,34 @@ export function diffSequences(ctx, main, branch, branchTables = {}) {
87
88
  const branchSequence = branch[sequenceId];
88
89
  // Check if non-alterable properties have changed
89
90
  // These require dropping and recreating the sequence
90
- const NON_ALTERABLE_FIELDS = [
91
- "data_type",
92
- "persistence",
93
- ];
91
+ const NON_ALTERABLE_FIELDS = ["persistence"];
94
92
  const nonAlterablePropsChanged = hasNonAlterableChanges(mainSequence, branchSequence, NON_ALTERABLE_FIELDS);
95
- if (nonAlterablePropsChanged) {
96
- // Replace the entire sequence (drop + create)
97
- changes.push(new DropSequence({ sequence: mainSequence }), new CreateSequence({ sequence: branchSequence }));
93
+ // A sequence kept the same name (so it's "altered" in catalog terms),
94
+ // but its main-side owning table is going away from the plan (renamed
95
+ // away or simply dropped). PostgreSQL will cascade-drop the sequence
96
+ // alongside the table, leaving any later CREATE TABLE / column-default
97
+ // that depends on the sequence name pointing at nothing. Treat this
98
+ // like a non-alterable change so we recreate the sequence after the
99
+ // owning table is dropped.
100
+ const mainOwnedByTableId = mainSequence.owned_by_schema && mainSequence.owned_by_table
101
+ ? `table:${mainSequence.owned_by_schema}.${mainSequence.owned_by_table}`
102
+ : null;
103
+ const cascadeOrphanedByOwningTable = mainOwnedByTableId !== null &&
104
+ mainTables[mainOwnedByTableId] !== undefined &&
105
+ branchTables[mainOwnedByTableId] === undefined;
106
+ if (nonAlterablePropsChanged || cascadeOrphanedByOwningTable) {
107
+ // When the owning table is going away in this plan, PostgreSQL will
108
+ // cascade-drop the sequence as part of the DROP TABLE. Emitting an
109
+ // explicit DROP SEQUENCE here would (a) introduce an unbreakable
110
+ // DropSequence ↔ DropTable cycle on the catalog edges between the
111
+ // sequence and the dropped column, and (b) be redundant with the
112
+ // cascade. The CreateSequence below restores the sequence under its
113
+ // original name so any same-name reference in a later CREATE TABLE
114
+ // resolves correctly.
115
+ if (!cascadeOrphanedByOwningTable) {
116
+ changes.push(new DropSequence({ sequence: mainSequence }));
117
+ }
118
+ changes.push(new CreateSequence({ sequence: branchSequence }));
98
119
  // Re-apply OWNED BY if present on branch
99
120
  if (branchSequence.owned_by_schema !== null &&
100
121
  branchSequence.owned_by_table !== null &&
@@ -131,7 +152,8 @@ export function diffSequences(ctx, main, branch, branchTables = {}) {
131
152
  }
132
153
  else {
133
154
  // Only alterable properties changed - emit ALTER for options/owner
134
- const optionsChanged = mainSequence.increment !== branchSequence.increment ||
155
+ const optionsChanged = mainSequence.data_type !== branchSequence.data_type ||
156
+ mainSequence.increment !== branchSequence.increment ||
135
157
  mainSequence.minimum_value !== branchSequence.minimum_value ||
136
158
  mainSequence.maximum_value !== branchSequence.maximum_value ||
137
159
  mainSequence.start_value !== branchSequence.start_value ||
@@ -139,6 +161,16 @@ export function diffSequences(ctx, main, branch, branchTables = {}) {
139
161
  mainSequence.cycle_option !== branchSequence.cycle_option;
140
162
  if (optionsChanged) {
141
163
  const options = [];
164
+ // `AS <type>` must come before any MIN/MAX/RESTART clauses per the
165
+ // PG ALTER SEQUENCE grammar. Valid types are smallint, integer,
166
+ // bigint — the same set CREATE SEQUENCE accepts — so the universe
167
+ // of legal transitions is closed. PG enforces last_value range at
168
+ // apply time when shrinking; that's the desired behavior because
169
+ // the previous Drop+Create path silently reset last_value to 1
170
+ // (data-loss bug, see Sentry SUPABASE-API-7RS).
171
+ if (mainSequence.data_type !== branchSequence.data_type) {
172
+ options.push("AS", branchSequence.data_type);
173
+ }
142
174
  if (mainSequence.increment !== branchSequence.increment) {
143
175
  options.push("INCREMENT BY", String(branchSequence.increment));
144
176
  }