@danielhritcu/zenstack-custom 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,917 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
+
4
+ // src/drizzle/define.ts
5
+ import { SQL, sql } from "drizzle-orm";
6
+ import { pgEnum as drizzlePgEnum } from "drizzle-orm/pg-core";
7
+ var ENUM_MAP = Symbol.for("drizzle:EnumMap");
8
+ function toPgEnum(record) {
9
+ return Object.values(record);
10
+ }
11
+ __name(toPgEnum, "toPgEnum");
12
+ function pgOrmEnum(name, values) {
13
+ const pgValues = Object.values(values);
14
+ const result = drizzlePgEnum(name, pgValues);
15
+ result[ENUM_MAP] = values;
16
+ return result;
17
+ }
18
+ __name(pgOrmEnum, "pgOrmEnum");
19
+ var pgTypes = {
20
+ date: /* @__PURE__ */ __name(() => "date", "date"),
21
+ uuid: /* @__PURE__ */ __name(() => "uuid", "uuid"),
22
+ text: /* @__PURE__ */ __name(() => "text", "text"),
23
+ integer: /* @__PURE__ */ __name(() => "integer", "integer"),
24
+ bigint: /* @__PURE__ */ __name(() => "bigint", "bigint"),
25
+ boolean: /* @__PURE__ */ __name(() => "boolean", "boolean"),
26
+ numeric: /* @__PURE__ */ __name(() => "numeric", "numeric"),
27
+ timestamptz: /* @__PURE__ */ __name(() => "timestamptz", "timestamptz"),
28
+ timestamp: /* @__PURE__ */ __name(() => "timestamp", "timestamp"),
29
+ jsonb: /* @__PURE__ */ __name(() => "jsonb", "jsonb"),
30
+ json: /* @__PURE__ */ __name(() => "json", "json")
31
+ };
32
+ var CustomObject = class extends SQL {
33
+ static {
34
+ __name(this, "CustomObject");
35
+ }
36
+ name;
37
+ type;
38
+ schema;
39
+ qualifiedName;
40
+ constructor(name, type, schema) {
41
+ const qn = schema ? `${schema}.${name}` : name;
42
+ super(sql.raw(qn).queryChunks), this.name = name, this.type = type, this.schema = schema;
43
+ this.qualifiedName = qn;
44
+ }
45
+ getSQL() {
46
+ return sql.raw(this.qualifiedName);
47
+ }
48
+ };
49
+ var PgFunction = class extends CustomObject {
50
+ static {
51
+ __name(this, "PgFunction");
52
+ }
53
+ opts;
54
+ constructor(name, opts) {
55
+ super(name, "function", opts.schema), this.opts = opts;
56
+ }
57
+ toSQL(dialect) {
58
+ const args = this.opts.args ? Object.entries(this.opts.args(pgTypes)).map(([n, type]) => `${n} ${type}`).join(", ") : "";
59
+ const language = this.opts.language ?? "plpgsql";
60
+ const body = dialect.sqlToQuery(this.opts.body()).sql;
61
+ const lines = [
62
+ `CREATE OR REPLACE FUNCTION ${this.qualifiedName}(${args})`,
63
+ `RETURNS ${this.opts.returns}`,
64
+ `LANGUAGE ${language}`
65
+ ];
66
+ if (this.opts.security === "definer") lines.push("SECURITY DEFINER");
67
+ if (this.opts.searchPath) lines.push(`SET search_path = ${this.opts.searchPath}`);
68
+ lines.push(`AS $function$
69
+ ${body}
70
+ $function$;`);
71
+ return lines.join("\n");
72
+ }
73
+ };
74
+ var TriggerFunction = class extends PgFunction {
75
+ static {
76
+ __name(this, "TriggerFunction");
77
+ }
78
+ triggerOpts;
79
+ constructor(name, triggerOpts) {
80
+ super(name, {
81
+ returns: "trigger",
82
+ schema: triggerOpts.schema,
83
+ language: triggerOpts.language,
84
+ security: triggerOpts.security,
85
+ searchPath: triggerOpts.searchPath,
86
+ body: triggerOpts.body
87
+ }), this.triggerOpts = triggerOpts;
88
+ }
89
+ };
90
+ var PgTrigger = class extends CustomObject {
91
+ static {
92
+ __name(this, "PgTrigger");
93
+ }
94
+ opts;
95
+ tableName;
96
+ constructor(name, opts) {
97
+ super(name, "trigger", opts.schema), this.opts = opts;
98
+ this.tableName = opts.on[Symbol.for("drizzle:Name")];
99
+ }
100
+ toSQL(dialect) {
101
+ const tableName = this.tableName;
102
+ const eventParts = this.opts.events.map((e) => {
103
+ if (typeof e === "string") return e.toUpperCase();
104
+ const cols = e.update.map((c) => c.name).join(", ");
105
+ return `UPDATE OF ${cols}`;
106
+ });
107
+ const lines = [
108
+ `CREATE OR REPLACE TRIGGER ${this.name}`,
109
+ `${this.opts.timing.toUpperCase()} ${eventParts.join(" OR ")}`,
110
+ `ON ${tableName}`,
111
+ `FOR EACH ${this.opts.forEach.toUpperCase()}`
112
+ ];
113
+ if (this.opts.condition) {
114
+ const cond = dialect.sqlToQuery(this.opts.condition).sql;
115
+ lines.push(`WHEN (${cond})`);
116
+ }
117
+ lines.push(`EXECUTE FUNCTION ${this.opts.execute.qualifiedName}();`);
118
+ return lines.join("\n");
119
+ }
120
+ };
121
+ var PgExtension = class extends CustomObject {
122
+ static {
123
+ __name(this, "PgExtension");
124
+ }
125
+ opts;
126
+ constructor(name, opts = {}) {
127
+ super(name, "extension"), this.opts = opts;
128
+ }
129
+ toSQL() {
130
+ const schema = this.opts.schema ? ` SCHEMA ${this.opts.schema}` : "";
131
+ return `CREATE EXTENSION IF NOT EXISTS "${this.name}"${schema};`;
132
+ }
133
+ };
134
+ function pgExtension(name, opts = {}) {
135
+ return new PgExtension(name, opts);
136
+ }
137
+ __name(pgExtension, "pgExtension");
138
+ var PgCronJob = class extends CustomObject {
139
+ static {
140
+ __name(this, "PgCronJob");
141
+ }
142
+ opts;
143
+ constructor(name, opts) {
144
+ super(name, "cron"), this.opts = opts;
145
+ }
146
+ toSQL(dialect) {
147
+ const cmd = dialect.sqlToQuery(this.opts.command()).sql;
148
+ const escapedName = this.name.replace(/'/g, "''");
149
+ const escapedSchedule = this.opts.schedule.replace(/'/g, "''");
150
+ return `SELECT cron.schedule('${escapedName}', '${escapedSchedule}', $$${cmd}$$);`;
151
+ }
152
+ };
153
+ function pgCron(name, opts) {
154
+ return new PgCronJob(name, opts);
155
+ }
156
+ __name(pgCron, "pgCron");
157
+ function pgFunction(name, opts) {
158
+ if (opts.returns === "trigger" && "triggersOn" in opts) {
159
+ return new TriggerFunction(name, opts);
160
+ }
161
+ return new PgFunction(name, opts);
162
+ }
163
+ __name(pgFunction, "pgFunction");
164
+ function pgTrigger(name, opts) {
165
+ return new PgTrigger(name, opts);
166
+ }
167
+ __name(pgTrigger, "pgTrigger");
168
+
169
+ // src/drizzle/cols.ts
170
+ import { sql as sql3 } from "drizzle-orm";
171
+ import { customType, foreignKey, text, timestamp, uuid } from "drizzle-orm/pg-core";
172
+
173
+ // src/drizzle/rls.ts
174
+ import { sql as sql2 } from "drizzle-orm";
175
+ import { pgPolicy } from "drizzle-orm/pg-core";
176
+ var currentAppSlug = sql2`current_setting('rls.app')`;
177
+ var hasAppSlug = sql2`(select ${currentAppSlug} is not null)`;
178
+ var currentUser = sql2`current_setting('rls.user')`;
179
+ var hasUser = sql2`(select ${currentUser} is not null)`;
180
+ var hasBypassEnabled = sql2`(select current_setting('rls.bypass') = 'enabled')`;
181
+ var TRUE = sql2`true`;
182
+ var FALSE = sql2`false`;
183
+ var defaultPolicy = /* @__PURE__ */ __name((name, { table, policies }) => {
184
+ const isSameAppSlug = sql2`${currentAppSlug} = ${table.appSlug}`;
185
+ const selectPolicy = pgPolicy(`rls.${name}.select`, {
186
+ as: "permissive",
187
+ for: "select",
188
+ using: sql2`${isSameAppSlug} or ${hasBypassEnabled}`
189
+ });
190
+ const insertPolicy = pgPolicy(`rls.${name}.insert`, {
191
+ as: "permissive",
192
+ for: "insert",
193
+ withCheck: sql2`${hasAppSlug} or ${hasBypassEnabled}`
194
+ });
195
+ const updatePolicy = pgPolicy(`rls.${name}.update`, {
196
+ as: "permissive",
197
+ for: "update",
198
+ using: sql2`${isSameAppSlug} or ${hasBypassEnabled}`
199
+ });
200
+ const deletePolicy = pgPolicy(`rls.${name}.delete`, {
201
+ as: "permissive",
202
+ for: "delete",
203
+ using: sql2`${hasBypassEnabled} or ${isSameAppSlug}`
204
+ });
205
+ const ctx = {
206
+ currentAppSlug,
207
+ currentUser,
208
+ hasAppSlug,
209
+ hasUser,
210
+ isSameAppSlug,
211
+ hasBypass: hasBypassEnabled,
212
+ TRUE,
213
+ FALSE
214
+ };
215
+ return [
216
+ typeof policies?.select === "function" ? pgPolicy(`rls.${name}.select`, {
217
+ as: "permissive",
218
+ for: "select",
219
+ using: policies.select(ctx)
220
+ }) : selectPolicy,
221
+ typeof policies?.insert === "function" ? pgPolicy(`rls.${name}.insert`, {
222
+ as: "permissive",
223
+ for: "insert",
224
+ withCheck: policies.insert(ctx)
225
+ }) : insertPolicy,
226
+ typeof policies?.update === "function" ? pgPolicy(`rls.${name}.update`, {
227
+ as: "permissive",
228
+ for: "update",
229
+ withCheck: policies.update(ctx)
230
+ }) : updatePolicy,
231
+ typeof policies?.delete === "function" ? pgPolicy(`rls.${name}.delete`, {
232
+ as: "permissive",
233
+ for: "delete",
234
+ using: policies.delete(ctx)
235
+ }) : deletePolicy
236
+ ].filter(Boolean);
237
+ }, "defaultPolicy");
238
+
239
+ // src/drizzle/cols.ts
240
+ var colNumeric = /* @__PURE__ */ __name((name) => {
241
+ return customType({
242
+ dataType: /* @__PURE__ */ __name(() => "numeric", "dataType"),
243
+ fromDriver: Number,
244
+ toDriver: /* @__PURE__ */ __name((value) => sql3`${value.toString()}::numeric`, "toDriver")
245
+ })(name);
246
+ }, "colNumeric");
247
+ var colAppSlug = /* @__PURE__ */ __name((name) => text(name).default(currentAppSlug), "colAppSlug");
248
+ var colTimestamp = /* @__PURE__ */ __name((name) => timestamp(name, {
249
+ withTimezone: true,
250
+ mode: "date"
251
+ }), "colTimestamp");
252
+ var colCreatedAt = /* @__PURE__ */ __name((name) => colTimestamp(name).defaultNow().notNull(), "colCreatedAt");
253
+ var colUpdatedAt = /* @__PURE__ */ __name((name) => colTimestamp(name).defaultNow().notNull(), "colUpdatedAt");
254
+ var colDeletedAt = /* @__PURE__ */ __name((name) => colTimestamp(name), "colDeletedAt");
255
+ var colId = /* @__PURE__ */ __name((name) => uuid(name).defaultRandom().primaryKey().notNull(), "colId");
256
+ var auditColumns = /* @__PURE__ */ __name(() => ({
257
+ createdBy: text("created_by").default(currentUser).notNull(),
258
+ updatedBy: text("updated_by").default(currentUser).notNull(),
259
+ createdAt: colCreatedAt("created_at"),
260
+ updatedAt: colUpdatedAt("updated_at"),
261
+ deletedAt: colDeletedAt("deleted_at")
262
+ }), "auditColumns");
263
+ var appSlugForeignKey = /* @__PURE__ */ __name((table, tableName, appsTable) => foreignKey({
264
+ columns: [
265
+ table.appSlug
266
+ ],
267
+ foreignColumns: [
268
+ appsTable.slug
269
+ ],
270
+ name: `${tableName}_app_slug_fkey`
271
+ }).onUpdate("cascade").onDelete("cascade"), "appSlugForeignKey");
272
+
273
+ // src/drizzle/generate.ts
274
+ import { execSync } from "child_process";
275
+ import { createHash } from "crypto";
276
+ import { appendFileSync, existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, unlinkSync, writeFileSync } from "fs";
277
+ import { join } from "path";
278
+ import { gunzipSync, gzipSync } from "zlib";
279
+ import { PgDialect } from "drizzle-orm/pg-core";
280
+ function parseFlags(argv = process.argv, env = process.env) {
281
+ const dryRun = argv.includes("--dry-run") || env["DRY_RUN"] === "1";
282
+ const verbose = argv.includes("--verbose") || env["VERBOSE"] === "1" || dryRun;
283
+ const fix = argv.includes("--fix");
284
+ return {
285
+ dryRun,
286
+ verbose,
287
+ fix
288
+ };
289
+ }
290
+ __name(parseFlags, "parseFlags");
291
+ function computeHash(sql4) {
292
+ const hash = createHash("sha256").update(sql4).digest("hex");
293
+ return `sha256:${hash}`;
294
+ }
295
+ __name(computeHash, "computeHash");
296
+ function categorizeChanges(prev, curr) {
297
+ const allKeys = /* @__PURE__ */ new Set([
298
+ ...Object.keys(prev),
299
+ ...Object.keys(curr)
300
+ ]);
301
+ const changes = [];
302
+ for (const key of [
303
+ ...allKeys
304
+ ].sort()) {
305
+ if (!(key in prev)) {
306
+ changes.push({
307
+ key,
308
+ category: "added"
309
+ });
310
+ } else if (!(key in curr)) {
311
+ changes.push({
312
+ key,
313
+ category: "removed"
314
+ });
315
+ } else if (prev[key] !== curr[key]) {
316
+ changes.push({
317
+ key,
318
+ category: "modified"
319
+ });
320
+ }
321
+ }
322
+ return changes;
323
+ }
324
+ __name(categorizeChanges, "categorizeChanges");
325
+ function formatChangeSummary(changes) {
326
+ if (changes.length === 0) return "";
327
+ const prefixes = {
328
+ added: "+",
329
+ modified: "~",
330
+ removed: "-"
331
+ };
332
+ const labels = {
333
+ added: "new",
334
+ modified: "modified",
335
+ removed: "removed"
336
+ };
337
+ const lines = changes.map(({ key, category }) => ` ${prefixes[category]} ${key} (${labels[category]})`);
338
+ return `Custom SQL changes:
339
+ ${lines.join("\n")}`;
340
+ }
341
+ __name(formatChangeSummary, "formatChangeSummary");
342
+ function simpleDiff(prev, curr) {
343
+ const prevLines = prev.split("\n");
344
+ const currLines = curr.split("\n");
345
+ const result = [];
346
+ let pi = 0;
347
+ let ci = 0;
348
+ while (pi < prevLines.length || ci < currLines.length) {
349
+ if (pi >= prevLines.length) {
350
+ result.push(`+${currLines[ci]}`);
351
+ ci++;
352
+ } else if (ci >= currLines.length) {
353
+ result.push(`-${prevLines[pi]}`);
354
+ pi++;
355
+ } else if (prevLines[pi] === currLines[ci]) {
356
+ result.push(` ${prevLines[pi]}`);
357
+ pi++;
358
+ ci++;
359
+ } else {
360
+ result.push(`-${prevLines[pi]}`);
361
+ result.push(`+${currLines[ci]}`);
362
+ pi++;
363
+ ci++;
364
+ }
365
+ }
366
+ return result.join("\n");
367
+ }
368
+ __name(simpleDiff, "simpleDiff");
369
+ function indent(text2) {
370
+ return text2.split("\n").map((line) => ` ${line}`).join("\n");
371
+ }
372
+ __name(indent, "indent");
373
+ function formatVerboseOutput(changes, currentObjects, prevObjects) {
374
+ const sections = [];
375
+ for (const { key, category } of changes) {
376
+ if (category === "added") {
377
+ const sql4 = currentObjects[key]?.sql ?? "";
378
+ sections.push(`+ ${key} (new)
379
+ ${indent(sql4)}`);
380
+ } else if (category === "modified") {
381
+ const currSql = currentObjects[key]?.sql ?? "";
382
+ const prevSql = prevObjects[key]?.sql ?? "";
383
+ if (prevSql) {
384
+ const diff = simpleDiff(prevSql, currSql);
385
+ sections.push(`~ ${key} (modified)
386
+ --- previous
387
+ +++ current
388
+ ${indent(diff)}`);
389
+ } else {
390
+ sections.push(`~ ${key} (modified \u2014 previous SQL not stored, showing current)
391
+ ${indent(currSql)}`);
392
+ }
393
+ } else {
394
+ sections.push(`- ${key} (removed)`);
395
+ }
396
+ }
397
+ return sections.join("\n");
398
+ }
399
+ __name(formatVerboseOutput, "formatVerboseOutput");
400
+ function readJournal(migrationsDir) {
401
+ const journalPath = join(migrationsDir, "meta", "_journal.json");
402
+ return JSON.parse(readFileSync(journalPath, "utf-8"));
403
+ }
404
+ __name(readJournal, "readJournal");
405
+ function timestampFromTag(tag) {
406
+ return tag.match(/^(\d+)/)?.[1] ?? tag;
407
+ }
408
+ __name(timestampFromTag, "timestampFromTag");
409
+ function nowTimestamp() {
410
+ const d = /* @__PURE__ */ new Date();
411
+ const p = /* @__PURE__ */ __name((n) => String(n).padStart(2, "0"), "p");
412
+ return `${d.getFullYear()}${p(d.getMonth() + 1)}${p(d.getDate())}${p(d.getHours())}${p(d.getMinutes())}${p(d.getSeconds())}`;
413
+ }
414
+ __name(nowTimestamp, "nowTimestamp");
415
+ function getLatestSnapshotPath(migrationsDir, journal) {
416
+ if (journal.entries.length === 0) return null;
417
+ const latest = journal.entries[journal.entries.length - 1];
418
+ return join(migrationsDir, "meta", `${timestampFromTag(latest.tag)}_snapshot.json`);
419
+ }
420
+ __name(getLatestSnapshotPath, "getLatestSnapshotPath");
421
+ function readSnapshotCustomObjects(snapshotPath) {
422
+ const snapshot = JSON.parse(readFileSync(snapshotPath, "utf-8"));
423
+ const raw = snapshot.customObjects ?? {};
424
+ const result = {};
425
+ for (const [key, value] of Object.entries(raw)) {
426
+ if (typeof value === "string") {
427
+ result[key] = {
428
+ hash: value,
429
+ sql: ""
430
+ };
431
+ } else {
432
+ result[key] = value;
433
+ }
434
+ }
435
+ return result;
436
+ }
437
+ __name(readSnapshotCustomObjects, "readSnapshotCustomObjects");
438
+ function writeSnapshotCustomObjects(snapshotPath, objects) {
439
+ const snapshot = JSON.parse(readFileSync(snapshotPath, "utf-8"));
440
+ snapshot.customObjects = objects;
441
+ writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2) + "\n");
442
+ }
443
+ __name(writeSnapshotCustomObjects, "writeSnapshotCustomObjects");
444
+ function stableStringify(obj) {
445
+ if (obj === null || typeof obj !== "object") return JSON.stringify(obj);
446
+ if (Array.isArray(obj)) return `[${obj.map(stableStringify).join(",")}]`;
447
+ const sorted = Object.keys(obj).sort();
448
+ const entries = sorted.map((k) => `${JSON.stringify(k)}:${stableStringify(obj[k])}`);
449
+ return `{${entries.join(",")}}`;
450
+ }
451
+ __name(stableStringify, "stableStringify");
452
+ function packToSgz(migrationsDir) {
453
+ const metaDir = join(migrationsDir, "meta");
454
+ if (!existsSync(metaDir)) return;
455
+ const snapshotFiles = readdirSync(metaDir).filter((f) => f.endsWith("_snapshot.json"));
456
+ const sqlFiles = readdirSync(migrationsDir).filter((f) => f.endsWith(".sql"));
457
+ for (const file of snapshotFiles) {
458
+ const timestamp2 = timestampFromTag(file);
459
+ const sqlFile = sqlFiles.find((f) => f.startsWith(timestamp2));
460
+ if (!sqlFile) continue;
461
+ const sqlStem = sqlFile.replace(/\.sql$/, "");
462
+ const normalized = stableStringify(JSON.parse(readFileSync(join(metaDir, file), "utf-8")));
463
+ const compressed = gzipSync(normalized, {
464
+ level: 9
465
+ });
466
+ writeFileSync(join(migrationsDir, `${sqlStem}.sgz`), compressed);
467
+ }
468
+ }
469
+ __name(packToSgz, "packToSgz");
470
+ function unpackFromSgz(migrationsDir) {
471
+ const metaDir = join(migrationsDir, "meta");
472
+ mkdirSync(metaDir, {
473
+ recursive: true
474
+ });
475
+ const sgzFiles = readdirSync(migrationsDir).filter((f) => f.endsWith(".sgz")).sort();
476
+ const sqlFiles = readdirSync(migrationsDir).filter((f) => f.endsWith(".sql"));
477
+ const entries = [];
478
+ for (let i = 0; i < sgzFiles.length; i++) {
479
+ const stem = sgzFiles[i].replace(/\.sgz$/, "");
480
+ const timestamp2 = timestampFromTag(stem);
481
+ const decompressed = gunzipSync(readFileSync(join(migrationsDir, sgzFiles[i])));
482
+ writeFileSync(join(metaDir, `${timestamp2}_snapshot.json`), decompressed);
483
+ const sqlFile = sqlFiles.find((f) => f.startsWith(timestamp2));
484
+ const tag = sqlFile ? sqlFile.replace(/\.sql$/, "") : stem;
485
+ entries.push({
486
+ idx: i,
487
+ version: "7",
488
+ when: parseInt(timestamp2, 10),
489
+ tag,
490
+ breakpoints: true
491
+ });
492
+ }
493
+ const journal = {
494
+ version: "7",
495
+ dialect: "postgresql",
496
+ entries
497
+ };
498
+ writeFileSync(join(metaDir, "_journal.json"), JSON.stringify(journal, null, 2) + "\n");
499
+ }
500
+ __name(unpackFromSgz, "unpackFromSgz");
501
+ function cleanMeta(migrationsDir) {
502
+ const metaDir = join(migrationsDir, "meta");
503
+ if (existsSync(metaDir)) rmSync(metaDir, {
504
+ recursive: true
505
+ });
506
+ }
507
+ __name(cleanMeta, "cleanMeta");
508
+ function listSgzFiles(migrationsDir) {
509
+ return readdirSync(migrationsDir).filter((f) => f.endsWith(".sgz")).sort();
510
+ }
511
+ __name(listSgzFiles, "listSgzFiles");
512
+ function validateTimestamps(migrationsDir) {
513
+ const duplicates = [];
514
+ let prevTimestamp = "";
515
+ for (const file of listSgzFiles(migrationsDir)) {
516
+ const ts = timestampFromTag(file);
517
+ if (ts === prevTimestamp) duplicates.push(file);
518
+ prevTimestamp = ts;
519
+ }
520
+ return duplicates;
521
+ }
522
+ __name(validateTimestamps, "validateTimestamps");
523
+ function validateOrder(migrationsDir) {
524
+ const sgzFiles = listSgzFiles(migrationsDir);
525
+ const issues = [];
526
+ let prevContentId = "";
527
+ for (const file of sgzFiles) {
528
+ const content = JSON.parse(gunzipSync(readFileSync(join(migrationsDir, file))).toString());
529
+ if (prevContentId !== "" && content.prevId !== prevContentId) {
530
+ issues.push({
531
+ file,
532
+ expected: prevContentId,
533
+ actual: content.prevId
534
+ });
535
+ }
536
+ prevContentId = content.id;
537
+ }
538
+ return issues;
539
+ }
540
+ __name(validateOrder, "validateOrder");
541
+ function fixOrder(migrationsDir, issues) {
542
+ let lastUsedTs = "";
543
+ for (const issue of issues) {
544
+ const sgzPath = join(migrationsDir, issue.file);
545
+ const sqlFile = issue.file.replace(/\.sgz$/, ".sql");
546
+ const sqlPath = join(migrationsDir, sqlFile);
547
+ let sqlContent = "";
548
+ try {
549
+ sqlContent = readFileSync(sqlPath, "utf-8");
550
+ } catch {
551
+ }
552
+ const snapshot = JSON.parse(gunzipSync(readFileSync(sgzPath)).toString());
553
+ snapshot.prevId = issue.expected;
554
+ const suffix = sqlFile.replace(/^\d+/, "");
555
+ try {
556
+ unlinkSync(sqlPath);
557
+ } catch {
558
+ }
559
+ unlinkSync(sgzPath);
560
+ let ts = findTimestampAfterDependency(migrationsDir, issue.expected);
561
+ if (ts <= lastUsedTs) ts = String(Number(lastUsedTs) + 1);
562
+ lastUsedTs = ts;
563
+ const newStem = `${ts}${suffix.replace(/\.sql$/, "")}`;
564
+ writeFileSync(join(migrationsDir, `${newStem}.sql`), sqlContent);
565
+ writeFileSync(join(migrationsDir, `${newStem}.sgz`), gzipSync(stableStringify(snapshot), {
566
+ level: 9
567
+ }));
568
+ }
569
+ }
570
+ __name(fixOrder, "fixOrder");
571
+ function findTimestampAfterDependency(migrationsDir, expectedId) {
572
+ for (const file of listSgzFiles(migrationsDir)) {
573
+ const content = JSON.parse(gunzipSync(readFileSync(join(migrationsDir, file))).toString());
574
+ if (content.id === expectedId) {
575
+ const depTs = timestampFromTag(file);
576
+ return String(Number(depTs) + 1);
577
+ }
578
+ }
579
+ return nowTimestamp();
580
+ }
581
+ __name(findTimestampAfterDependency, "findTimestampAfterDependency");
582
+ function collectCustomObjects(moduleExports) {
583
+ return Object.values(moduleExports).filter((v) => v instanceof CustomObject);
584
+ }
585
+ __name(collectCustomObjects, "collectCustomObjects");
586
+ function serializeCustomObject(obj, dialect) {
587
+ return obj.toSQL(dialect);
588
+ }
589
+ __name(serializeCustomObject, "serializeCustomObject");
590
+ function generateDropSQL(key, createSQL) {
591
+ const [type] = key.split(":");
592
+ switch (type) {
593
+ case "function": {
594
+ const m = createSQL.match(/CREATE OR REPLACE FUNCTION\s+(\S+)\s*\(/);
595
+ return m ? `DROP FUNCTION IF EXISTS ${m[1]};` : null;
596
+ }
597
+ case "trigger": {
598
+ const m = createSQL.match(/CREATE OR REPLACE TRIGGER\s+(\S+)[\s\S]*?ON\s+(\S+)/);
599
+ return m ? `DROP TRIGGER IF EXISTS ${m[1]} ON ${m[2]};` : null;
600
+ }
601
+ case "extension": {
602
+ const m = createSQL.match(/CREATE EXTENSION IF NOT EXISTS\s+"([^"]+)"/);
603
+ return m ? `DROP EXTENSION IF EXISTS "${m[1]}";` : null;
604
+ }
605
+ case "cron": {
606
+ const m = createSQL.match(/cron\.schedule\('([^']+)'/);
607
+ return m ? `SELECT cron.unschedule('${m[1]}');` : null;
608
+ }
609
+ default:
610
+ return null;
611
+ }
612
+ }
613
+ __name(generateDropSQL, "generateDropSQL");
614
+ function findNewMigrationFile(migrationsDir, beforeFiles) {
615
+ const afterFiles = readdirSync(migrationsDir).filter((f) => f.endsWith(".sql"));
616
+ const newFile = afterFiles.find((f) => !beforeFiles.has(f));
617
+ return newFile ? join(migrationsDir, newFile) : null;
618
+ }
619
+ __name(findNewMigrationFile, "findNewMigrationFile");
620
+ async function generate(migrationsDir, schemaModules, flags = {
621
+ dryRun: false,
622
+ verbose: false,
623
+ fix: false
624
+ }) {
625
+ const duplicates = validateTimestamps(migrationsDir);
626
+ if (duplicates.length > 0) {
627
+ console.error("Duplicate migration timestamps detected:");
628
+ for (const file of duplicates) console.error(` ${file}`);
629
+ console.error("Resolve manually \u2014 each migration must have a unique timestamp.");
630
+ process.exit(1);
631
+ }
632
+ const orderIssues = validateOrder(migrationsDir);
633
+ if (orderIssues.length > 0) {
634
+ if (!flags.fix) {
635
+ console.error("Migration order issues detected:");
636
+ for (const issue of orderIssues) {
637
+ console.error(` ${issue.file}: expected prevId="${issue.expected}", actual="${issue.actual}"`);
638
+ }
639
+ console.error("Run with --fix to repair.");
640
+ process.exit(1);
641
+ }
642
+ if (flags.dryRun) {
643
+ console.log("Would fix migration order:");
644
+ for (const issue of orderIssues) {
645
+ console.log(` ${issue.file}: re-timestamp, set prevId="${issue.expected}"`);
646
+ }
647
+ } else {
648
+ fixOrder(migrationsDir, orderIssues);
649
+ console.log(`Fixed ${orderIssues.length} migration order issue(s).`);
650
+ }
651
+ }
652
+ const dialect = new PgDialect();
653
+ unpackFromSgz(migrationsDir);
654
+ try {
655
+ const journal = readJournal(migrationsDir);
656
+ const snapshotPath = getLatestSnapshotPath(migrationsDir, journal);
657
+ const prevObjects = snapshotPath ? readSnapshotCustomObjects(snapshotPath) : {};
658
+ let beforeFiles = /* @__PURE__ */ new Set();
659
+ if (!flags.dryRun) {
660
+ beforeFiles = new Set(readdirSync(migrationsDir).filter((f) => f.endsWith(".sql")));
661
+ console.log("Running drizzle-kit generate...");
662
+ try {
663
+ execSync("pnpm drizzle generate", {
664
+ stdio: "inherit"
665
+ });
666
+ } catch {
667
+ throw new Error("drizzle-kit generate failed. Aborting.");
668
+ }
669
+ }
670
+ const allObjects = schemaModules.flatMap(collectCustomObjects);
671
+ const typeOrder = [
672
+ "function",
673
+ "trigger",
674
+ "extension",
675
+ "cron"
676
+ ];
677
+ const ordered = allObjects.filter((o) => typeOrder.includes(o.type)).sort((a, b) => typeOrder.indexOf(a.type) - typeOrder.indexOf(b.type) || a.name.localeCompare(b.name));
678
+ const currentObjects = {};
679
+ for (const obj of ordered) {
680
+ const sqlStr = serializeCustomObject(obj, dialect);
681
+ const triggerTable = obj instanceof PgTrigger ? `.${obj.tableName}` : "";
682
+ const key = `${obj.type}:${obj.name}${triggerTable}`;
683
+ currentObjects[key] = {
684
+ hash: computeHash(sqlStr),
685
+ sql: sqlStr
686
+ };
687
+ }
688
+ const prevHashes = Object.fromEntries(Object.entries(prevObjects).map(([key, entry]) => [
689
+ key,
690
+ entry.hash
691
+ ]));
692
+ const currentHashes = Object.fromEntries(Object.entries(currentObjects).map(([key, entry]) => [
693
+ key,
694
+ entry.hash
695
+ ]));
696
+ const changes = categorizeChanges(prevHashes, currentHashes);
697
+ if (changes.length === 0) {
698
+ console.log("No custom SQL changes detected.");
699
+ if (!flags.dryRun) {
700
+ const latestJournal = readJournal(migrationsDir);
701
+ const latestSnapshotPath = getLatestSnapshotPath(migrationsDir, latestJournal);
702
+ if (latestSnapshotPath) writeSnapshotCustomObjects(latestSnapshotPath, currentObjects);
703
+ packToSgz(migrationsDir);
704
+ }
705
+ return;
706
+ }
707
+ console.log(formatChangeSummary(changes));
708
+ if (flags.verbose) {
709
+ console.log("");
710
+ console.log(formatVerboseOutput(changes, currentObjects, prevObjects));
711
+ }
712
+ if (flags.dryRun) {
713
+ console.log("\n=== DRY RUN: No files were written ===");
714
+ return;
715
+ }
716
+ let migrationFile = findNewMigrationFile(migrationsDir, beforeFiles);
717
+ if (!migrationFile) {
718
+ console.log("No drizzle migration was created. Creating custom migration...");
719
+ execSync("pnpm drizzle generate --custom", {
720
+ stdio: "inherit"
721
+ });
722
+ migrationFile = findNewMigrationFile(migrationsDir, beforeFiles);
723
+ if (!migrationFile) throw new Error("Failed to create custom migration file");
724
+ }
725
+ const createKeys = changes.filter((c) => c.category !== "removed").map((c) => c.key);
726
+ const createStatements = createKeys.map((key) => `-- @drizzle-custom ${key}
727
+ ${currentObjects[key].sql}`);
728
+ const removedKeys = changes.filter((c) => c.category === "removed").map((c) => c.key);
729
+ const dropStatements = removedKeys.map((key) => {
730
+ const dropSql = generateDropSQL(key, prevObjects[key]?.sql ?? "");
731
+ return dropSql ? `-- @drizzle-custom ${key} (removed)
732
+ ${dropSql}` : null;
733
+ }).filter(Boolean);
734
+ const allStatements = [
735
+ ...dropStatements,
736
+ ...createStatements
737
+ ].join("\n\n");
738
+ appendFileSync(migrationFile, `
739
+ -- Custom SQL objects
740
+ ${allStatements}
741
+ `);
742
+ if (dropStatements.length > 0) {
743
+ const dropNames = removedKeys.join(", ");
744
+ appendFileSync(migrationFile, `
745
+ -- TODO: Review dropped objects (${dropNames})
746
+ -- If they have dependants (triggers, cron jobs, etc.), add DROP/UPDATE statements below.
747
+ -- Run 'pnpm dev:init' to verify this migration applies cleanly.
748
+
749
+ `);
750
+ console.log(`\u26A0 ${dropStatements.length} object(s) dropped \u2014 check migration for dependency TODOs`);
751
+ }
752
+ const total = createKeys.length + dropStatements.length;
753
+ console.log(`Appended ${total} custom SQL statement(s) to ${migrationFile}`);
754
+ const updatedJournal = readJournal(migrationsDir);
755
+ const updatedSnapshotPath = getLatestSnapshotPath(migrationsDir, updatedJournal);
756
+ if (updatedSnapshotPath) {
757
+ writeSnapshotCustomObjects(updatedSnapshotPath, currentObjects);
758
+ console.log(`Updated custom hashes in ${updatedSnapshotPath}`);
759
+ }
760
+ packToSgz(migrationsDir);
761
+ } finally {
762
+ cleanMeta(migrationsDir);
763
+ }
764
+ }
765
+ __name(generate, "generate");
766
+ function drop(migrationsDir) {
767
+ unpackFromSgz(migrationsDir);
768
+ try {
769
+ execSync("pnpm drizzle drop", {
770
+ stdio: "inherit"
771
+ });
772
+ packToSgz(migrationsDir);
773
+ for (const f of readdirSync(migrationsDir)) {
774
+ if (!f.endsWith(".sgz")) continue;
775
+ if (!existsSync(join(migrationsDir, f.replace(".sgz", ".sql")))) {
776
+ unlinkSync(join(migrationsDir, f));
777
+ }
778
+ }
779
+ } finally {
780
+ cleanMeta(migrationsDir);
781
+ }
782
+ }
783
+ __name(drop, "drop");
784
+ function custom(migrationsDir) {
785
+ unpackFromSgz(migrationsDir);
786
+ try {
787
+ execSync("pnpm drizzle generate --custom", {
788
+ stdio: "inherit"
789
+ });
790
+ packToSgz(migrationsDir);
791
+ } finally {
792
+ cleanMeta(migrationsDir);
793
+ }
794
+ }
795
+ __name(custom, "custom");
796
+ async function loadSchemaModules(schemaDir) {
797
+ const sourceNames = [
798
+ "functions",
799
+ "triggers",
800
+ "extensions",
801
+ "cron"
802
+ ];
803
+ const importTasks = [];
804
+ for (const name of sourceNames) {
805
+ const dirPath = join(schemaDir, name);
806
+ const filePath = join(schemaDir, `${name}.ts`);
807
+ if (existsSync(dirPath) && statSync(dirPath).isDirectory()) {
808
+ const scanDir = /* @__PURE__ */ __name((dir, prefix) => {
809
+ for (const entry of readdirSync(dir)) {
810
+ const fullPath = join(dir, entry);
811
+ if (statSync(fullPath).isDirectory()) {
812
+ scanDir(fullPath, `${prefix}${entry}/`);
813
+ } else if (entry.endsWith(".ts") && !entry.includes(".test.") && !entry.includes(".spec.")) {
814
+ importTasks.push(import(fullPath).catch((err) => {
815
+ const msg = err instanceof Error ? err.message : String(err);
816
+ console.error(`Failed to load ${prefix}${entry}: ${msg}`);
817
+ process.exit(1);
818
+ }));
819
+ }
820
+ }
821
+ }, "scanDir");
822
+ scanDir(dirPath, `${name}/`);
823
+ } else if (existsSync(filePath)) {
824
+ importTasks.push(import(filePath).catch((err) => {
825
+ const msg = err instanceof Error ? err.message : String(err);
826
+ console.error(`Failed to load ${name}.ts: ${msg}`);
827
+ process.exit(1);
828
+ }));
829
+ }
830
+ }
831
+ return Promise.all(importTasks);
832
+ }
833
+ __name(loadSchemaModules, "loadSchemaModules");
834
+ async function syncMeta(migrationsDir, schemaModules) {
835
+ const dialect = new PgDialect();
836
+ const allObjects = schemaModules.flatMap(collectCustomObjects);
837
+ const ordered = allObjects.sort((a, b) => a.type.localeCompare(b.type) || a.name.localeCompare(b.name));
838
+ const currentObjects = {};
839
+ for (const obj of ordered) {
840
+ const sqlStr = serializeCustomObject(obj, dialect);
841
+ const triggerTable = obj instanceof PgTrigger ? `.${obj.tableName}` : "";
842
+ currentObjects[`${obj.type}:${obj.name}${triggerTable}`] = {
843
+ hash: computeHash(sqlStr),
844
+ sql: sqlStr
845
+ };
846
+ }
847
+ const sgzFiles = listSgzFiles(migrationsDir);
848
+ if (sgzFiles.length === 0) {
849
+ console.error("No .sgz files found. Nothing to sync.");
850
+ process.exit(1);
851
+ }
852
+ const latestSgz = sgzFiles[sgzFiles.length - 1];
853
+ const sgzPath = join(migrationsDir, latestSgz);
854
+ const snap = JSON.parse(gunzipSync(readFileSync(sgzPath)).toString());
855
+ snap.customObjects = currentObjects;
856
+ writeFileSync(sgzPath, gzipSync(stableStringify(snap), {
857
+ level: 9
858
+ }));
859
+ console.log(`Synced ${Object.keys(currentObjects).length} custom objects into ${latestSgz}`);
860
+ }
861
+ __name(syncMeta, "syncMeta");
862
+ export {
863
+ CustomObject,
864
+ ENUM_MAP,
865
+ FALSE,
866
+ PgCronJob,
867
+ PgExtension,
868
+ PgFunction,
869
+ PgTrigger,
870
+ TriggerFunction,
871
+ appSlugForeignKey,
872
+ auditColumns,
873
+ categorizeChanges,
874
+ cleanMeta,
875
+ colAppSlug,
876
+ colCreatedAt,
877
+ colDeletedAt,
878
+ colId,
879
+ colNumeric,
880
+ colTimestamp,
881
+ colUpdatedAt,
882
+ collectCustomObjects,
883
+ computeHash,
884
+ currentAppSlug,
885
+ currentUser,
886
+ custom,
887
+ defaultPolicy,
888
+ drop,
889
+ fixOrder,
890
+ formatChangeSummary,
891
+ formatVerboseOutput,
892
+ generate,
893
+ generateDropSQL,
894
+ getLatestSnapshotPath,
895
+ hasAppSlug,
896
+ hasBypassEnabled,
897
+ loadSchemaModules,
898
+ packToSgz,
899
+ parseFlags,
900
+ pgCron,
901
+ pgExtension,
902
+ pgFunction,
903
+ pgOrmEnum,
904
+ pgTrigger,
905
+ pgTypes,
906
+ readJournal,
907
+ readSnapshotCustomObjects,
908
+ serializeCustomObject,
909
+ simpleDiff,
910
+ syncMeta,
911
+ toPgEnum,
912
+ unpackFromSgz,
913
+ validateOrder,
914
+ validateTimestamps,
915
+ writeSnapshotCustomObjects
916
+ };
917
+ //# sourceMappingURL=index.js.map