@plyaz/db 0.1.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/sql/SQLAdapter.d.ts +1 -0
- package/dist/adapters/sql/SQLAdapter.d.ts.map +1 -1
- package/dist/cli/index.js +862 -51
- package/dist/cli/index.js.map +1 -1
- package/dist/index.cjs +321 -18
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +321 -18
- package/dist/index.mjs.map +1 -1
- package/dist/migrations/MigrationManager.d.ts +22 -0
- package/dist/migrations/MigrationManager.d.ts.map +1 -1
- package/dist/repository/BaseRepository.d.ts +9 -0
- package/dist/repository/BaseRepository.d.ts.map +1 -1
- package/dist/seeds/SeedManager.d.ts +25 -1
- package/dist/seeds/SeedManager.d.ts.map +1 -1
- package/package.json +5 -5
package/dist/cli/index.js
CHANGED
|
@@ -193,6 +193,10 @@ function failure(error) {
|
|
|
193
193
|
return { success: false, error };
|
|
194
194
|
}
|
|
195
195
|
__name(failure, "failure");
|
|
196
|
+
var DESCRIPTION_MAX_LENGTH = 60;
|
|
197
|
+
var FALLBACK_DESCRIPTION_LENGTH = 50;
|
|
198
|
+
var PROGRESS_LOG_INTERVAL = 10;
|
|
199
|
+
var ERROR_MESSAGE_MAX_LENGTH = 300;
|
|
196
200
|
var MigrationManager = class {
|
|
197
201
|
static {
|
|
198
202
|
__name(this, "MigrationManager");
|
|
@@ -302,6 +306,122 @@ var MigrationManager = class {
|
|
|
302
306
|
}
|
|
303
307
|
return { upSQL: sql2.trim(), downSQL: null };
|
|
304
308
|
}
|
|
309
|
+
/**
|
|
310
|
+
* Process dollar-quoted string delimiters ($$ or $tag$)
|
|
311
|
+
* Returns updated state for tracking if we're inside a dollar block
|
|
312
|
+
*/
|
|
313
|
+
processDollarDelimiters(line, inDollarBlock, dollarTag) {
|
|
314
|
+
const dollarMatch = line.match(/\$([a-zA-Z_]*)\$/g);
|
|
315
|
+
if (!dollarMatch) return { inDollarBlock, dollarTag };
|
|
316
|
+
let currentInBlock = inDollarBlock;
|
|
317
|
+
let currentTag = dollarTag;
|
|
318
|
+
for (const match of dollarMatch) {
|
|
319
|
+
if (!currentInBlock) {
|
|
320
|
+
currentInBlock = true;
|
|
321
|
+
currentTag = match;
|
|
322
|
+
} else if (match === currentTag) {
|
|
323
|
+
currentInBlock = false;
|
|
324
|
+
currentTag = "";
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return { inDollarBlock: currentInBlock, dollarTag: currentTag };
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Filter out comment-only statements
|
|
331
|
+
*/
|
|
332
|
+
isNonCommentStatement(statement) {
|
|
333
|
+
const withoutComments = statement.replace(/--.*$/gm, "").trim();
|
|
334
|
+
return withoutComments.length > 0;
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Split SQL into individual statements for better error reporting
|
|
338
|
+
* Handles $$ delimited blocks (functions, triggers) correctly
|
|
339
|
+
*/
|
|
340
|
+
splitSqlStatements(sql2) {
|
|
341
|
+
const statements = [];
|
|
342
|
+
let current = "";
|
|
343
|
+
let inDollarBlock = false;
|
|
344
|
+
let dollarTag = "";
|
|
345
|
+
for (const line of sql2.split("\n")) {
|
|
346
|
+
const trimmedLine = line.trim();
|
|
347
|
+
const isEmptyOrComment = trimmedLine === "" || trimmedLine.startsWith("--");
|
|
348
|
+
current += line + "\n";
|
|
349
|
+
if (isEmptyOrComment) continue;
|
|
350
|
+
const dollarState = this.processDollarDelimiters(
|
|
351
|
+
line,
|
|
352
|
+
inDollarBlock,
|
|
353
|
+
dollarTag
|
|
354
|
+
);
|
|
355
|
+
inDollarBlock = dollarState.inDollarBlock;
|
|
356
|
+
dollarTag = dollarState.dollarTag;
|
|
357
|
+
const isEndOfStatement = !inDollarBlock && trimmedLine.endsWith(";");
|
|
358
|
+
if (isEndOfStatement && current.trim()) {
|
|
359
|
+
statements.push(current.trim());
|
|
360
|
+
current = "";
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
if (current.trim()) {
|
|
364
|
+
statements.push(current.trim());
|
|
365
|
+
}
|
|
366
|
+
return statements.filter((s) => this.isNonCommentStatement(s));
|
|
367
|
+
}
|
|
368
|
+
/**
|
|
369
|
+
* Extract a short description from a SQL statement for logging
|
|
370
|
+
*/
|
|
371
|
+
getStatementDescription(statement) {
|
|
372
|
+
const firstLine = statement.split("\n").find((l) => l.trim() && !l.trim().startsWith("--"))?.trim() ?? "";
|
|
373
|
+
const patterns = [
|
|
374
|
+
/^(CREATE\s+(?:OR\s+REPLACE\s+)?(?:TABLE|INDEX|UNIQUE\s+INDEX|TYPE|FUNCTION|TRIGGER|EXTENSION|SCHEMA|VIEW|POLICY))\s+(?:IF\s+NOT\s+EXISTS\s+)?([^\s(]+)/i,
|
|
375
|
+
/^(ALTER\s+TABLE)\s+([^\s]+)/i,
|
|
376
|
+
/^(DROP\s+(?:TABLE|INDEX|TYPE|FUNCTION|TRIGGER|EXTENSION|SCHEMA|VIEW|POLICY))\s+(?:IF\s+EXISTS\s+)?([^\s(;]+)/i,
|
|
377
|
+
/^(INSERT\s+INTO)\s+([^\s(]+)/i,
|
|
378
|
+
/^(COMMENT\s+ON\s+(?:TABLE|COLUMN|INDEX|FUNCTION|TYPE))\s+([^\s]+)/i,
|
|
379
|
+
/^(GRANT|REVOKE)\s+.+\s+ON\s+([^\s]+)/i
|
|
380
|
+
];
|
|
381
|
+
for (const pattern of patterns) {
|
|
382
|
+
const match = firstLine.match(pattern);
|
|
383
|
+
if (match) {
|
|
384
|
+
return `${match[1]} ${match[2]}`.slice(0, DESCRIPTION_MAX_LENGTH);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
const truncated = firstLine.slice(0, FALLBACK_DESCRIPTION_LENGTH);
|
|
388
|
+
const suffix = firstLine.length > FALLBACK_DESCRIPTION_LENGTH ? "..." : "";
|
|
389
|
+
return truncated + suffix;
|
|
390
|
+
}
|
|
391
|
+
/**
|
|
392
|
+
* Execute SQL statements individually with better error reporting
|
|
393
|
+
*/
|
|
394
|
+
async executeSqlStatements(adapter, sql2, migrationVersion) {
|
|
395
|
+
const statements = this.splitSqlStatements(sql2);
|
|
396
|
+
const total = statements.length;
|
|
397
|
+
console.log(` → ${total} statements to execute`);
|
|
398
|
+
for (let i = 0; i < statements.length; i++) {
|
|
399
|
+
const statement = statements[i];
|
|
400
|
+
const description = this.getStatementDescription(statement);
|
|
401
|
+
try {
|
|
402
|
+
await adapter.query(statement);
|
|
403
|
+
const isInterval = (i + 1) % PROGRESS_LOG_INTERVAL === 0;
|
|
404
|
+
const isLast = i === total - 1;
|
|
405
|
+
const isSignificant = Boolean(
|
|
406
|
+
description.match(/^(CREATE TABLE|CREATE FUNCTION|CREATE TRIGGER)/i)
|
|
407
|
+
);
|
|
408
|
+
if (isInterval || isLast || isSignificant) {
|
|
409
|
+
console.log(` ✓ [${i + 1}/${total}] ${description}`);
|
|
410
|
+
}
|
|
411
|
+
} catch (error) {
|
|
412
|
+
console.log(` ✗ [${i + 1}/${total}] ${description}`);
|
|
413
|
+
const rawMessage = error.message;
|
|
414
|
+
const errorMessage = rawMessage.replace(/^SQL Error:\s*/i, "").replace(/^Failed to execute query:.*?-\s*/i, "").slice(0, ERROR_MESSAGE_MAX_LENGTH);
|
|
415
|
+
throw new DatabaseError(
|
|
416
|
+
`Migration ${migrationVersion} failed at statement ${i + 1}/${total}:
|
|
417
|
+
Statement: ${description}
|
|
418
|
+
Error: ${errorMessage}`,
|
|
419
|
+
DATABASE_ERROR_CODES.QUERY_FAILED,
|
|
420
|
+
{ cause: error }
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
}
|
|
305
425
|
/**
|
|
306
426
|
* Load SQL migration from file
|
|
307
427
|
*/
|
|
@@ -313,12 +433,20 @@ var MigrationManager = class {
|
|
|
313
433
|
name: migrationFile.name,
|
|
314
434
|
up: /* @__PURE__ */ __name(async (adapter) => {
|
|
315
435
|
if (typeof adapter.query === "function") {
|
|
316
|
-
await
|
|
436
|
+
await this.executeSqlStatements(
|
|
437
|
+
adapter,
|
|
438
|
+
upSQL,
|
|
439
|
+
migrationFile.version
|
|
440
|
+
);
|
|
317
441
|
}
|
|
318
442
|
}, "up"),
|
|
319
443
|
down: /* @__PURE__ */ __name(async (adapter) => {
|
|
320
444
|
if (downSQL && typeof adapter.query === "function") {
|
|
321
|
-
await
|
|
445
|
+
await this.executeSqlStatements(
|
|
446
|
+
adapter,
|
|
447
|
+
downSQL,
|
|
448
|
+
migrationFile.version
|
|
449
|
+
);
|
|
322
450
|
} else {
|
|
323
451
|
console.warn(
|
|
324
452
|
`[Migrations] No DOWN migration for ${migrationFile.version}`
|
|
@@ -425,6 +553,7 @@ var MigrationManager = class {
|
|
|
425
553
|
/**
|
|
426
554
|
* Run all pending migrations
|
|
427
555
|
*/
|
|
556
|
+
/* eslint-disable max-depth, complexity */
|
|
428
557
|
async up(targetVersion) {
|
|
429
558
|
try {
|
|
430
559
|
await this.initialize();
|
|
@@ -445,9 +574,15 @@ var MigrationManager = class {
|
|
|
445
574
|
const migration = await this.loadMigration(migrationFile);
|
|
446
575
|
const startTime = Date.now();
|
|
447
576
|
if (typeof this.adapter.transaction === "function") {
|
|
448
|
-
await this.adapter.transaction(async () => {
|
|
577
|
+
const txResult = await this.adapter.transaction(async () => {
|
|
449
578
|
await migration.up(this.adapter);
|
|
450
579
|
});
|
|
580
|
+
if (!txResult.success) {
|
|
581
|
+
throw txResult.error ?? new DatabaseError(
|
|
582
|
+
`Migration ${migration.version} failed`,
|
|
583
|
+
DATABASE_ERROR_CODES.QUERY_FAILED
|
|
584
|
+
);
|
|
585
|
+
}
|
|
451
586
|
} else {
|
|
452
587
|
await migration.up(this.adapter);
|
|
453
588
|
}
|
|
@@ -502,9 +637,15 @@ var MigrationManager = class {
|
|
|
502
637
|
const migration = await this.loadMigration(migrationFile);
|
|
503
638
|
const startTime = Date.now();
|
|
504
639
|
if (typeof this.adapter.transaction === "function") {
|
|
505
|
-
await this.adapter.transaction(async () => {
|
|
640
|
+
const txResult = await this.adapter.transaction(async () => {
|
|
506
641
|
await migration.down(this.adapter);
|
|
507
642
|
});
|
|
643
|
+
if (!txResult.success) {
|
|
644
|
+
throw txResult.error ?? new DatabaseError(
|
|
645
|
+
`Rollback ${appliedMigration.version} failed`,
|
|
646
|
+
DATABASE_ERROR_CODES.QUERY_FAILED
|
|
647
|
+
);
|
|
648
|
+
}
|
|
508
649
|
} else {
|
|
509
650
|
await migration.down(this.adapter);
|
|
510
651
|
}
|
|
@@ -563,6 +704,10 @@ var MigrationManager = class {
|
|
|
563
704
|
}
|
|
564
705
|
}
|
|
565
706
|
};
|
|
707
|
+
var DESCRIPTION_MAX_LENGTH2 = 60;
|
|
708
|
+
var FALLBACK_DESCRIPTION_LENGTH2 = 50;
|
|
709
|
+
var PROGRESS_LOG_INTERVAL2 = 10;
|
|
710
|
+
var ERROR_MESSAGE_MAX_LENGTH2 = 300;
|
|
566
711
|
var SeedManager = class {
|
|
567
712
|
static {
|
|
568
713
|
__name(this, "SeedManager");
|
|
@@ -631,7 +776,7 @@ var SeedManager = class {
|
|
|
631
776
|
const files = fs3.readdirSync(this.seedsPath);
|
|
632
777
|
const seeds = [];
|
|
633
778
|
for (const file of files) {
|
|
634
|
-
const match = file.match(/^(\d+)_(.+)\.(ts|js)$/);
|
|
779
|
+
const match = file.match(/^(\d+)_(.+)\.(ts|js|sql)$/);
|
|
635
780
|
if (match) {
|
|
636
781
|
const [, order, name] = match;
|
|
637
782
|
seeds.push({
|
|
@@ -644,9 +789,141 @@ var SeedManager = class {
|
|
|
644
789
|
return seeds.sort((a, b) => a.order - b.order);
|
|
645
790
|
}
|
|
646
791
|
/**
|
|
647
|
-
*
|
|
792
|
+
* Process dollar-quoted string delimiters ($$ or $tag$)
|
|
793
|
+
*/
|
|
794
|
+
processDollarDelimiters(line, inDollarBlock, dollarTag) {
|
|
795
|
+
const dollarMatch = line.match(/\$([a-zA-Z_]*)\$/g);
|
|
796
|
+
if (!dollarMatch) return { inDollarBlock, dollarTag };
|
|
797
|
+
let currentInBlock = inDollarBlock;
|
|
798
|
+
let currentTag = dollarTag;
|
|
799
|
+
for (const match of dollarMatch) {
|
|
800
|
+
if (!currentInBlock) {
|
|
801
|
+
currentInBlock = true;
|
|
802
|
+
currentTag = match;
|
|
803
|
+
} else if (match === currentTag) {
|
|
804
|
+
currentInBlock = false;
|
|
805
|
+
currentTag = "";
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
return { inDollarBlock: currentInBlock, dollarTag: currentTag };
|
|
809
|
+
}
|
|
810
|
+
/**
|
|
811
|
+
* Filter out comment-only statements
|
|
812
|
+
*/
|
|
813
|
+
isNonCommentStatement(statement) {
|
|
814
|
+
const withoutComments = statement.replace(/--.*$/gm, "").trim();
|
|
815
|
+
return withoutComments.length > 0;
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Split SQL into individual statements for better error reporting
|
|
819
|
+
*/
|
|
820
|
+
splitSqlStatements(sql2) {
|
|
821
|
+
const statements = [];
|
|
822
|
+
let current = "";
|
|
823
|
+
let inDollarBlock = false;
|
|
824
|
+
let dollarTag = "";
|
|
825
|
+
for (const line of sql2.split("\n")) {
|
|
826
|
+
const trimmedLine = line.trim();
|
|
827
|
+
const isEmptyOrComment = trimmedLine === "" || trimmedLine.startsWith("--");
|
|
828
|
+
current += line + "\n";
|
|
829
|
+
if (isEmptyOrComment) continue;
|
|
830
|
+
const dollarState = this.processDollarDelimiters(
|
|
831
|
+
line,
|
|
832
|
+
inDollarBlock,
|
|
833
|
+
dollarTag
|
|
834
|
+
);
|
|
835
|
+
inDollarBlock = dollarState.inDollarBlock;
|
|
836
|
+
dollarTag = dollarState.dollarTag;
|
|
837
|
+
if (!inDollarBlock && trimmedLine.endsWith(";") && current.trim()) {
|
|
838
|
+
statements.push(current.trim());
|
|
839
|
+
current = "";
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
if (current.trim()) {
|
|
843
|
+
statements.push(current.trim());
|
|
844
|
+
}
|
|
845
|
+
return statements.filter((s) => this.isNonCommentStatement(s));
|
|
846
|
+
}
|
|
847
|
+
/**
|
|
848
|
+
* Extract a short description from a SQL statement for logging
|
|
648
849
|
*/
|
|
850
|
+
getStatementDescription(statement) {
|
|
851
|
+
const firstLine = statement.split("\n").find((l) => l.trim() && !l.trim().startsWith("--"))?.trim() ?? "";
|
|
852
|
+
const patterns = [
|
|
853
|
+
/^(CREATE\s+(?:OR\s+REPLACE\s+)?(?:TABLE|INDEX|UNIQUE\s+INDEX|TYPE|FUNCTION|TRIGGER|EXTENSION|SCHEMA|VIEW|POLICY))\s+(?:IF\s+NOT\s+EXISTS\s+)?([^\s(]+)/i,
|
|
854
|
+
/^(ALTER\s+TABLE)\s+([^\s]+)/i,
|
|
855
|
+
/^(DROP\s+(?:TABLE|INDEX|TYPE|FUNCTION|TRIGGER|EXTENSION|SCHEMA|VIEW|POLICY))\s+(?:IF\s+EXISTS\s+)?([^\s(;]+)/i,
|
|
856
|
+
/^(INSERT\s+INTO)\s+([^\s(]+)/i,
|
|
857
|
+
/^(COMMENT\s+ON\s+(?:TABLE|COLUMN|INDEX|FUNCTION|TYPE))\s+([^\s]+)/i,
|
|
858
|
+
/^(GRANT|REVOKE)\s+.+\s+ON\s+([^\s]+)/i
|
|
859
|
+
];
|
|
860
|
+
for (const pattern of patterns) {
|
|
861
|
+
const match = firstLine.match(pattern);
|
|
862
|
+
if (match) {
|
|
863
|
+
return `${match[1]} ${match[2]}`.slice(0, DESCRIPTION_MAX_LENGTH2);
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
const truncated = firstLine.slice(0, FALLBACK_DESCRIPTION_LENGTH2);
|
|
867
|
+
const suffix = firstLine.length > FALLBACK_DESCRIPTION_LENGTH2 ? "..." : "";
|
|
868
|
+
return truncated + suffix;
|
|
869
|
+
}
|
|
870
|
+
/**
|
|
871
|
+
* Execute SQL statements individually with better error reporting
|
|
872
|
+
*/
|
|
873
|
+
async executeSqlStatements(sql2, seedName) {
|
|
874
|
+
const statements = this.splitSqlStatements(sql2);
|
|
875
|
+
const total = statements.length;
|
|
876
|
+
console.log(` → ${total} statements to execute`);
|
|
877
|
+
for (let i = 0; i < statements.length; i++) {
|
|
878
|
+
const statement = statements[i];
|
|
879
|
+
const description = this.getStatementDescription(statement);
|
|
880
|
+
try {
|
|
881
|
+
await this.adapter.query(statement);
|
|
882
|
+
const isInterval = (i + 1) % PROGRESS_LOG_INTERVAL2 === 0;
|
|
883
|
+
const isLast = i === total - 1;
|
|
884
|
+
const isSignificant = Boolean(description.match(/^(INSERT INTO)/i));
|
|
885
|
+
if (isInterval || isLast || isSignificant) {
|
|
886
|
+
console.log(` ✓ [${i + 1}/${total}] ${description}`);
|
|
887
|
+
}
|
|
888
|
+
} catch (error) {
|
|
889
|
+
console.log(` ✗ [${i + 1}/${total}] ${description}`);
|
|
890
|
+
const rawMessage = error.message;
|
|
891
|
+
const errorMessage = rawMessage.replace(/^SQL Error:\s*/i, "").replace(/^Failed to execute query:.*?-\s*/i, "").slice(0, ERROR_MESSAGE_MAX_LENGTH2);
|
|
892
|
+
throw new DatabaseError(
|
|
893
|
+
`Seed "${seedName}" failed at statement ${i + 1}/${total}:
|
|
894
|
+
Statement: ${description}
|
|
895
|
+
Error: ${errorMessage}`,
|
|
896
|
+
DATABASE_ERROR_CODES.QUERY_FAILED,
|
|
897
|
+
{ cause: error }
|
|
898
|
+
);
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
/**
|
|
903
|
+
* Load SQL seed from file
|
|
904
|
+
*/
|
|
905
|
+
loadSqlSeed(seedFile) {
|
|
906
|
+
const sql2 = fs3.readFileSync(seedFile.filePath, "utf-8");
|
|
907
|
+
return {
|
|
908
|
+
name: seedFile.name,
|
|
909
|
+
run: /* @__PURE__ */ __name(async () => {
|
|
910
|
+
if (typeof this.adapter.query === "function") {
|
|
911
|
+
await this.executeSqlStatements(sql2, seedFile.name);
|
|
912
|
+
}
|
|
913
|
+
}, "run"),
|
|
914
|
+
// SQL seeds don't have cleanup by default
|
|
915
|
+
cleanup: void 0
|
|
916
|
+
};
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Load seed from file (supports .ts, .js, and .sql)
|
|
920
|
+
*/
|
|
921
|
+
// eslint-disable-next-line complexity
|
|
649
922
|
async loadSeed(seedFile) {
|
|
923
|
+
const ext = path4.extname(seedFile.filePath);
|
|
924
|
+
if (ext === ".sql") {
|
|
925
|
+
return this.loadSqlSeed(seedFile);
|
|
926
|
+
}
|
|
650
927
|
const importPath = seedFile.filePath.startsWith("/") ? seedFile.filePath : new URL(`file:///${seedFile.filePath.replace(/\\/g, "/")}`).href;
|
|
651
928
|
const seedModule = await import(importPath);
|
|
652
929
|
return {
|
|
@@ -700,9 +977,15 @@ var SeedManager = class {
|
|
|
700
977
|
*/
|
|
701
978
|
async executeSeed(seed) {
|
|
702
979
|
if (typeof this.adapter.transaction === "function") {
|
|
703
|
-
await this.adapter.transaction(async () => {
|
|
980
|
+
const txResult = await this.adapter.transaction(async () => {
|
|
704
981
|
await seed.run(this.adapter);
|
|
705
982
|
});
|
|
983
|
+
if (!txResult.success) {
|
|
984
|
+
throw txResult.error ?? new DatabaseError(
|
|
985
|
+
`Seed ${seed.name} failed`,
|
|
986
|
+
DATABASE_ERROR_CODES.QUERY_FAILED
|
|
987
|
+
);
|
|
988
|
+
}
|
|
706
989
|
} else {
|
|
707
990
|
await seed.run(this.adapter);
|
|
708
991
|
}
|
|
@@ -759,9 +1042,15 @@ var SeedManager = class {
|
|
|
759
1042
|
async executeCleanup(seed) {
|
|
760
1043
|
if (!seed.cleanup) return;
|
|
761
1044
|
if (typeof this.adapter.transaction === "function") {
|
|
762
|
-
await this.adapter.transaction(async () => {
|
|
1045
|
+
const txResult = await this.adapter.transaction(async () => {
|
|
763
1046
|
await seed.cleanup(this.adapter);
|
|
764
1047
|
});
|
|
1048
|
+
if (!txResult.success) {
|
|
1049
|
+
throw txResult.error ?? new DatabaseError(
|
|
1050
|
+
`Seed cleanup for ${seed.name} failed`,
|
|
1051
|
+
DATABASE_ERROR_CODES.QUERY_FAILED
|
|
1052
|
+
);
|
|
1053
|
+
}
|
|
765
1054
|
} else {
|
|
766
1055
|
await seed.cleanup(this.adapter);
|
|
767
1056
|
}
|
|
@@ -4395,6 +4684,7 @@ var SupabaseAdapter = class {
|
|
|
4395
4684
|
return ops[operator]();
|
|
4396
4685
|
}
|
|
4397
4686
|
};
|
|
4687
|
+
var SQL_ERROR_TRUNCATE_LENGTH = 500;
|
|
4398
4688
|
var SQLAdapter = class {
|
|
4399
4689
|
static {
|
|
4400
4690
|
__name(this, "SQLAdapter");
|
|
@@ -4405,6 +4695,7 @@ var SQLAdapter = class {
|
|
|
4405
4695
|
idColumnMap = /* @__PURE__ */ new Map();
|
|
4406
4696
|
configIdColumns;
|
|
4407
4697
|
defaultSchema;
|
|
4698
|
+
showSqlInErrors;
|
|
4408
4699
|
/**
|
|
4409
4700
|
* Creates a new SQLAdapter instance.
|
|
4410
4701
|
* @param {SQLAdapterConfig} config - Configuration for the SQL adapter.
|
|
@@ -4418,6 +4709,7 @@ var SQLAdapter = class {
|
|
|
4418
4709
|
constructor(config) {
|
|
4419
4710
|
this.config = config;
|
|
4420
4711
|
this.defaultSchema = config.schema ?? "public";
|
|
4712
|
+
this.showSqlInErrors = config.showSqlInErrors ?? true;
|
|
4421
4713
|
this.pool = new Pool({
|
|
4422
4714
|
connectionString: config.connectionString,
|
|
4423
4715
|
...config.pool
|
|
@@ -4536,16 +4828,16 @@ var SQLAdapter = class {
|
|
|
4536
4828
|
const result = await this.pool.query(sql2, params);
|
|
4537
4829
|
return result.rows;
|
|
4538
4830
|
} catch (error) {
|
|
4539
|
-
|
|
4540
|
-
|
|
4541
|
-
|
|
4542
|
-
|
|
4543
|
-
|
|
4544
|
-
|
|
4545
|
-
|
|
4546
|
-
|
|
4547
|
-
|
|
4548
|
-
);
|
|
4831
|
+
const truncatedSql = sql2.slice(0, SQL_ERROR_TRUNCATE_LENGTH);
|
|
4832
|
+
const sqlSuffix = sql2.length > SQL_ERROR_TRUNCATE_LENGTH ? "..." : "";
|
|
4833
|
+
const errorMessage = this.showSqlInErrors ? `SQL Error: ${error.message}
|
|
4834
|
+
Query: ${truncatedSql}${sqlSuffix}` : `SQL Error: ${error.message}`;
|
|
4835
|
+
throw new DatabaseError(errorMessage, DATABASE_ERROR_CODES.QUERY_FAILED, {
|
|
4836
|
+
context: {
|
|
4837
|
+
source: "SQLAdapter.query"
|
|
4838
|
+
},
|
|
4839
|
+
cause: error
|
|
4840
|
+
});
|
|
4549
4841
|
}
|
|
4550
4842
|
}
|
|
4551
4843
|
/**
|
|
@@ -7975,20 +8267,48 @@ async function createDatabaseService(config) {
|
|
|
7975
8267
|
}
|
|
7976
8268
|
__name(createDatabaseService, "createDatabaseService");
|
|
7977
8269
|
var JSON_INDENT_SPACES = 2;
|
|
7978
|
-
async function
|
|
8270
|
+
async function getUserSchemas(adapter) {
|
|
7979
8271
|
const result = await adapter.query?.(`
|
|
7980
|
-
SELECT
|
|
7981
|
-
WHERE
|
|
7982
|
-
|
|
8272
|
+
SELECT schema_name FROM information_schema.schemata
|
|
8273
|
+
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast', 'mysql', 'sys', 'performance_schema')
|
|
8274
|
+
AND schema_name NOT LIKE 'pg_%'
|
|
8275
|
+
AND schema_name NOT LIKE 'pg_temp_%'
|
|
8276
|
+
AND schema_name NOT LIKE 'pg_toast_temp_%'
|
|
7983
8277
|
`);
|
|
7984
|
-
const
|
|
7985
|
-
return
|
|
8278
|
+
const schemas = Array.isArray(result) ? result : result.rows || [];
|
|
8279
|
+
return schemas.map((s) => s.schema_name);
|
|
7986
8280
|
}
|
|
7987
|
-
__name(
|
|
8281
|
+
__name(getUserSchemas, "getUserSchemas");
|
|
8282
|
+
async function getTablesFromSchemas(adapter, schemas) {
|
|
8283
|
+
const tables = [];
|
|
8284
|
+
for (const schema of schemas) {
|
|
8285
|
+
const result = await adapter.query?.(
|
|
8286
|
+
`
|
|
8287
|
+
SELECT table_name FROM information_schema.tables
|
|
8288
|
+
WHERE table_schema = $1
|
|
8289
|
+
AND table_type = 'BASE TABLE'
|
|
8290
|
+
AND table_name NOT IN ('schema_migrations', 'seed_history')
|
|
8291
|
+
`,
|
|
8292
|
+
[schema]
|
|
8293
|
+
);
|
|
8294
|
+
const schemaResult = Array.isArray(result) ? result : result.rows || [];
|
|
8295
|
+
for (const row of schemaResult) {
|
|
8296
|
+
tables.push({ schema, table: row.table_name });
|
|
8297
|
+
}
|
|
8298
|
+
}
|
|
8299
|
+
return tables;
|
|
8300
|
+
}
|
|
8301
|
+
__name(getTablesFromSchemas, "getTablesFromSchemas");
|
|
7988
8302
|
async function clearTables(adapter, tables) {
|
|
7989
|
-
|
|
7990
|
-
|
|
7991
|
-
|
|
8303
|
+
let currentSchema = "";
|
|
8304
|
+
for (const { schema, table } of tables) {
|
|
8305
|
+
if (schema !== currentSchema) {
|
|
8306
|
+
console.log(`
|
|
8307
|
+
📁 Schema: ${schema}`);
|
|
8308
|
+
currentSchema = schema;
|
|
8309
|
+
}
|
|
8310
|
+
await adapter.query?.(`TRUNCATE TABLE "${schema}"."${table}" CASCADE`);
|
|
8311
|
+
console.log(` ✓ Cleared ${schema}.${table}`);
|
|
7992
8312
|
}
|
|
7993
8313
|
}
|
|
7994
8314
|
__name(clearTables, "clearTables");
|
|
@@ -8098,8 +8418,28 @@ async function loadConfig(configPath) {
|
|
|
8098
8418
|
return getEnvFallbackConfig();
|
|
8099
8419
|
}
|
|
8100
8420
|
__name(loadConfig, "loadConfig");
|
|
8101
|
-
|
|
8421
|
+
var MIGRATION_POOL_SETTINGS = {
|
|
8422
|
+
max: 3,
|
|
8423
|
+
// Fewer connections, more stable
|
|
8424
|
+
idleTimeoutMillis: 0,
|
|
8425
|
+
// Never timeout idle connections during migrations
|
|
8426
|
+
connectionTimeoutMillis: 3e4,
|
|
8427
|
+
// 30s to establish connection
|
|
8428
|
+
keepAlive: true,
|
|
8429
|
+
keepAliveInitialDelayMillis: 5e3,
|
|
8430
|
+
// Start keepalive after 5s
|
|
8431
|
+
allowExitOnIdle: false
|
|
8432
|
+
// Don't exit while migrations running
|
|
8433
|
+
};
|
|
8434
|
+
async function initDatabase(configPath, forMigration = false) {
|
|
8102
8435
|
const config = await loadConfig(configPath);
|
|
8436
|
+
if (forMigration && config.config) {
|
|
8437
|
+
config.pool = {
|
|
8438
|
+
...MIGRATION_POOL_SETTINGS,
|
|
8439
|
+
...config.pool
|
|
8440
|
+
// User config can still override
|
|
8441
|
+
};
|
|
8442
|
+
}
|
|
8103
8443
|
const db = await createDatabaseService(config);
|
|
8104
8444
|
let adapter = db.adapter;
|
|
8105
8445
|
while (adapter.baseAdapter) {
|
|
@@ -8127,7 +8467,7 @@ program.name("plyaz-db").description("Database management CLI for @plyaz/db").ve
|
|
|
8127
8467
|
var migrateCommand = program.command("migrate").description("Database migration commands");
|
|
8128
8468
|
migrateCommand.command("up").description("Run pending migrations").option("-t, --target <version>", "Target migration version").action(async (options) => {
|
|
8129
8469
|
try {
|
|
8130
|
-
const { adapter, config } = await initDatabase();
|
|
8470
|
+
const { adapter, config } = await initDatabase(void 0, true);
|
|
8131
8471
|
const migrationManager = new MigrationManager({
|
|
8132
8472
|
adapter,
|
|
8133
8473
|
migrationsPath: config.migrationsPath ?? "./migrations",
|
|
@@ -8149,7 +8489,7 @@ migrateCommand.command("up").description("Run pending migrations").option("-t, -
|
|
|
8149
8489
|
});
|
|
8150
8490
|
migrateCommand.command("down").description("Rollback migrations").option("-s, --steps <number>", "Number of migrations to rollback", "1").action(async (options) => {
|
|
8151
8491
|
try {
|
|
8152
|
-
const { adapter, config } = await initDatabase();
|
|
8492
|
+
const { adapter, config } = await initDatabase(void 0, true);
|
|
8153
8493
|
const migrationManager = new MigrationManager({
|
|
8154
8494
|
adapter,
|
|
8155
8495
|
migrationsPath: config.migrationsPath ?? "./migrations",
|
|
@@ -8212,7 +8552,7 @@ migrateCommand.command("reset").description("Rollback all migrations").option("-
|
|
|
8212
8552
|
process.exit(1);
|
|
8213
8553
|
}
|
|
8214
8554
|
try {
|
|
8215
|
-
const { adapter, config } = await initDatabase();
|
|
8555
|
+
const { adapter, config } = await initDatabase(void 0, true);
|
|
8216
8556
|
const migrationManager = new MigrationManager({
|
|
8217
8557
|
adapter,
|
|
8218
8558
|
migrationsPath: config.migrationsPath ?? "./migrations",
|
|
@@ -8255,7 +8595,7 @@ migrateCommand.command("generate-down").description("Generate DOWN sections for
|
|
|
8255
8595
|
var seedCommand = program.command("seed").description("Database seeding commands");
|
|
8256
8596
|
seedCommand.command("run").description("Run seeds").option("-n, --name <name>", "Specific seed to run").option("--skip-existing", "Skip seeds that have already been run").action(async (options) => {
|
|
8257
8597
|
try {
|
|
8258
|
-
const { adapter, config } = await initDatabase();
|
|
8598
|
+
const { adapter, config } = await initDatabase(void 0, true);
|
|
8259
8599
|
const seedManager = new SeedManager({
|
|
8260
8600
|
adapter,
|
|
8261
8601
|
seedsPath: config.seedsPath ?? "./seeds",
|
|
@@ -8315,7 +8655,7 @@ seedCommand.command("reset").description("Reset seeds (run cleanup functions)").
|
|
|
8315
8655
|
process.exit(1);
|
|
8316
8656
|
}
|
|
8317
8657
|
try {
|
|
8318
|
-
const { adapter, config } = await initDatabase();
|
|
8658
|
+
const { adapter, config } = await initDatabase(void 0, true);
|
|
8319
8659
|
const seedManager = new SeedManager({
|
|
8320
8660
|
adapter,
|
|
8321
8661
|
seedsPath: config.seedsPath ?? "./seeds",
|
|
@@ -8335,7 +8675,13 @@ seedCommand.command("reset").description("Reset seeds (run cleanup functions)").
|
|
|
8335
8675
|
process.exit(1);
|
|
8336
8676
|
}
|
|
8337
8677
|
});
|
|
8338
|
-
program.command("clear").description("Clear all data from database (keep schema)").option("--confirm", "Confirm clear operation").option(
|
|
8678
|
+
program.command("clear").description("Clear all data from database (keep schema)").option("--confirm", "Confirm clear operation").option(
|
|
8679
|
+
"-t, --tables <tables>",
|
|
8680
|
+
"Comma-separated list of tables to clear (format: schema.table or just table for public)"
|
|
8681
|
+
).option(
|
|
8682
|
+
"-s, --schemas <schemas>",
|
|
8683
|
+
"Comma-separated list of schemas to clear (default: all user schemas)"
|
|
8684
|
+
).action(async (options) => {
|
|
8339
8685
|
if (!options.confirm) {
|
|
8340
8686
|
console.error("❌ Please use --confirm flag to confirm clear operation");
|
|
8341
8687
|
process.exit(1);
|
|
@@ -8347,16 +8693,36 @@ program.command("clear").description("Clear all data from database (keep schema)
|
|
|
8347
8693
|
console.error("❌ Clear operation not supported by this adapter");
|
|
8348
8694
|
process.exit(1);
|
|
8349
8695
|
}
|
|
8350
|
-
|
|
8696
|
+
let tablesToClear;
|
|
8697
|
+
if (options.tables) {
|
|
8698
|
+
tablesToClear = options.tables.split(",").map((t) => {
|
|
8699
|
+
const trimmed = t.trim();
|
|
8700
|
+
if (trimmed.includes(".")) {
|
|
8701
|
+
const [schema, table] = trimmed.split(".");
|
|
8702
|
+
return { schema, table };
|
|
8703
|
+
}
|
|
8704
|
+
return { schema: "public", table: trimmed };
|
|
8705
|
+
});
|
|
8706
|
+
} else {
|
|
8707
|
+
const schemas = options.schemas ? options.schemas.split(",").map((s) => s.trim()) : await getUserSchemas(adapter);
|
|
8708
|
+
tablesToClear = await getTablesFromSchemas(adapter, schemas);
|
|
8709
|
+
}
|
|
8351
8710
|
await clearTables(adapter, tablesToClear);
|
|
8352
|
-
console.log(
|
|
8711
|
+
console.log(`
|
|
8712
|
+
✅ Cleared ${tablesToClear.length} tables`);
|
|
8353
8713
|
process.exit(0);
|
|
8354
8714
|
} catch (error) {
|
|
8355
8715
|
console.error("❌ Error:", error.message);
|
|
8356
8716
|
process.exit(1);
|
|
8357
8717
|
}
|
|
8358
8718
|
});
|
|
8359
|
-
program.command("drop").description("Drop all tables from database").option("--confirm", "Confirm drop operation").
|
|
8719
|
+
program.command("drop").description("Drop all tables from database (all schemas)").option("--confirm", "Confirm drop operation").option(
|
|
8720
|
+
"-s, --schemas <schemas>",
|
|
8721
|
+
"Comma-separated list of schemas to drop from (default: all user schemas)"
|
|
8722
|
+
).option(
|
|
8723
|
+
"--all",
|
|
8724
|
+
"Drop everything: tables, types, functions, views (not just tables)"
|
|
8725
|
+
).option("--drop-schemas", "Also drop the schemas themselves (except public)").action(async (options) => {
|
|
8360
8726
|
if (!options.confirm) {
|
|
8361
8727
|
console.error("❌ Please use --confirm flag to confirm drop operation");
|
|
8362
8728
|
process.exit(1);
|
|
@@ -8364,22 +8730,226 @@ program.command("drop").description("Drop all tables from database").option("--c
|
|
|
8364
8730
|
try {
|
|
8365
8731
|
const { adapter } = await initDatabase();
|
|
8366
8732
|
console.log("🔄 Dropping all tables...");
|
|
8367
|
-
if (typeof adapter.query
|
|
8368
|
-
const result = await adapter.query(`
|
|
8369
|
-
SELECT tablename FROM pg_tables
|
|
8370
|
-
WHERE schemaname = 'public'
|
|
8371
|
-
`);
|
|
8372
|
-
const tables = Array.isArray(result) ? result : result.rows || [];
|
|
8373
|
-
for (const { tablename } of tables) {
|
|
8374
|
-
await adapter.query(`DROP TABLE IF EXISTS ${tablename} CASCADE`);
|
|
8375
|
-
console.log(` ✓ Dropped ${tablename}`);
|
|
8376
|
-
}
|
|
8377
|
-
console.log("✅ All tables dropped");
|
|
8378
|
-
process.exit(0);
|
|
8379
|
-
} else {
|
|
8733
|
+
if (typeof adapter.query !== "function") {
|
|
8380
8734
|
console.error("❌ Drop operation not supported by this adapter");
|
|
8381
8735
|
process.exit(1);
|
|
8382
8736
|
}
|
|
8737
|
+
const schemas = options.schemas ? options.schemas.split(",").map((s) => s.trim()) : await getUserSchemas(adapter);
|
|
8738
|
+
let totalDropped = 0;
|
|
8739
|
+
for (const schema of schemas) {
|
|
8740
|
+
const result = await adapter.query(
|
|
8741
|
+
`
|
|
8742
|
+
SELECT table_name FROM information_schema.tables
|
|
8743
|
+
WHERE table_schema = $1
|
|
8744
|
+
AND table_type = 'BASE TABLE'
|
|
8745
|
+
`,
|
|
8746
|
+
[schema]
|
|
8747
|
+
);
|
|
8748
|
+
const tables = Array.isArray(result) ? result : result.rows || [];
|
|
8749
|
+
if (tables.length === 0) continue;
|
|
8750
|
+
console.log(`
|
|
8751
|
+
📁 Schema: ${schema}`);
|
|
8752
|
+
for (const { table_name } of tables) {
|
|
8753
|
+
await adapter.query(
|
|
8754
|
+
`DROP TABLE IF EXISTS "${schema}"."${table_name}" CASCADE`
|
|
8755
|
+
);
|
|
8756
|
+
console.log(` ✓ Dropped ${schema}.${table_name}`);
|
|
8757
|
+
totalDropped++;
|
|
8758
|
+
}
|
|
8759
|
+
}
|
|
8760
|
+
if (options.all) {
|
|
8761
|
+
console.log("\n🗑️ Dropping views...");
|
|
8762
|
+
for (const schema of schemas) {
|
|
8763
|
+
const viewResult = await adapter.query(
|
|
8764
|
+
`
|
|
8765
|
+
SELECT table_name FROM information_schema.views
|
|
8766
|
+
WHERE table_schema = $1
|
|
8767
|
+
`,
|
|
8768
|
+
[schema]
|
|
8769
|
+
);
|
|
8770
|
+
const views = Array.isArray(viewResult) ? viewResult : viewResult.rows || [];
|
|
8771
|
+
for (const { table_name } of views) {
|
|
8772
|
+
try {
|
|
8773
|
+
await adapter.query(
|
|
8774
|
+
`DROP VIEW IF EXISTS "${schema}"."${table_name}" CASCADE`
|
|
8775
|
+
);
|
|
8776
|
+
console.log(` ✓ Dropped view ${schema}.${table_name}`);
|
|
8777
|
+
} catch {
|
|
8778
|
+
}
|
|
8779
|
+
}
|
|
8780
|
+
}
|
|
8781
|
+
console.log("\n🗑️ Dropping triggers...");
|
|
8782
|
+
for (const schema of schemas) {
|
|
8783
|
+
try {
|
|
8784
|
+
const triggerResult = await adapter.query(
|
|
8785
|
+
`
|
|
8786
|
+
SELECT DISTINCT trigger_name, event_object_table
|
|
8787
|
+
FROM information_schema.triggers
|
|
8788
|
+
WHERE trigger_schema = $1
|
|
8789
|
+
`,
|
|
8790
|
+
[schema]
|
|
8791
|
+
);
|
|
8792
|
+
const triggers = Array.isArray(triggerResult) ? triggerResult : triggerResult.rows || [];
|
|
8793
|
+
for (const { trigger_name, event_object_table } of triggers) {
|
|
8794
|
+
try {
|
|
8795
|
+
await adapter.query(
|
|
8796
|
+
`DROP TRIGGER IF EXISTS "${trigger_name}" ON "${schema}"."${event_object_table}" CASCADE`
|
|
8797
|
+
);
|
|
8798
|
+
console.log(` ✓ Dropped trigger ${schema}.${trigger_name}`);
|
|
8799
|
+
} catch {
|
|
8800
|
+
}
|
|
8801
|
+
}
|
|
8802
|
+
} catch {
|
|
8803
|
+
}
|
|
8804
|
+
}
|
|
8805
|
+
console.log("\n🗑️ Dropping functions...");
|
|
8806
|
+
for (const schema of schemas) {
|
|
8807
|
+
try {
|
|
8808
|
+
const funcResult = await adapter.query(
|
|
8809
|
+
`
|
|
8810
|
+
SELECT p.proname, pg_get_function_identity_arguments(p.oid) as args
|
|
8811
|
+
FROM pg_proc p
|
|
8812
|
+
JOIN pg_namespace n ON p.pronamespace = n.oid
|
|
8813
|
+
WHERE n.nspname = $1
|
|
8814
|
+
AND p.prokind = 'f'
|
|
8815
|
+
`,
|
|
8816
|
+
[schema]
|
|
8817
|
+
);
|
|
8818
|
+
const funcs = Array.isArray(funcResult) ? funcResult : funcResult.rows || [];
|
|
8819
|
+
for (const { proname, args } of funcs) {
|
|
8820
|
+
try {
|
|
8821
|
+
await adapter.query(
|
|
8822
|
+
`DROP FUNCTION IF EXISTS "${schema}"."${proname}"(${args}) CASCADE`
|
|
8823
|
+
);
|
|
8824
|
+
console.log(` ✓ Dropped function ${schema}.${proname}`);
|
|
8825
|
+
} catch {
|
|
8826
|
+
}
|
|
8827
|
+
}
|
|
8828
|
+
} catch {
|
|
8829
|
+
console.log(
|
|
8830
|
+
` ⚠ Function dropping not supported for this database`
|
|
8831
|
+
);
|
|
8832
|
+
break;
|
|
8833
|
+
}
|
|
8834
|
+
}
|
|
8835
|
+
console.log("\n🗑️ Dropping procedures...");
|
|
8836
|
+
for (const schema of schemas) {
|
|
8837
|
+
try {
|
|
8838
|
+
const procResult = await adapter.query(
|
|
8839
|
+
`
|
|
8840
|
+
SELECT p.proname, pg_get_function_identity_arguments(p.oid) as args
|
|
8841
|
+
FROM pg_proc p
|
|
8842
|
+
JOIN pg_namespace n ON p.pronamespace = n.oid
|
|
8843
|
+
WHERE n.nspname = $1
|
|
8844
|
+
AND p.prokind = 'p'
|
|
8845
|
+
`,
|
|
8846
|
+
[schema]
|
|
8847
|
+
);
|
|
8848
|
+
const procs = Array.isArray(procResult) ? procResult : procResult.rows || [];
|
|
8849
|
+
for (const { proname, args } of procs) {
|
|
8850
|
+
try {
|
|
8851
|
+
await adapter.query(
|
|
8852
|
+
`DROP PROCEDURE IF EXISTS "${schema}"."${proname}"(${args}) CASCADE`
|
|
8853
|
+
);
|
|
8854
|
+
console.log(` ✓ Dropped procedure ${schema}.${proname}`);
|
|
8855
|
+
} catch {
|
|
8856
|
+
}
|
|
8857
|
+
}
|
|
8858
|
+
} catch {
|
|
8859
|
+
}
|
|
8860
|
+
}
|
|
8861
|
+
console.log("\n🗑️ Dropping sequences...");
|
|
8862
|
+
for (const schema of schemas) {
|
|
8863
|
+
try {
|
|
8864
|
+
const seqResult = await adapter.query(
|
|
8865
|
+
`
|
|
8866
|
+
SELECT sequence_name FROM information_schema.sequences
|
|
8867
|
+
WHERE sequence_schema = $1
|
|
8868
|
+
`,
|
|
8869
|
+
[schema]
|
|
8870
|
+
);
|
|
8871
|
+
const sequences = Array.isArray(seqResult) ? seqResult : seqResult.rows || [];
|
|
8872
|
+
for (const { sequence_name } of sequences) {
|
|
8873
|
+
try {
|
|
8874
|
+
await adapter.query(
|
|
8875
|
+
`DROP SEQUENCE IF EXISTS "${schema}"."${sequence_name}" CASCADE`
|
|
8876
|
+
);
|
|
8877
|
+
console.log(` ✓ Dropped sequence ${schema}.${sequence_name}`);
|
|
8878
|
+
} catch {
|
|
8879
|
+
}
|
|
8880
|
+
}
|
|
8881
|
+
} catch {
|
|
8882
|
+
}
|
|
8883
|
+
}
|
|
8884
|
+
console.log("\n🗑️ Dropping types...");
|
|
8885
|
+
for (const schema of schemas) {
|
|
8886
|
+
try {
|
|
8887
|
+
const typeResult = await adapter.query(
|
|
8888
|
+
`
|
|
8889
|
+
SELECT t.typname
|
|
8890
|
+
FROM pg_type t
|
|
8891
|
+
JOIN pg_namespace n ON t.typnamespace = n.oid
|
|
8892
|
+
WHERE n.nspname = $1
|
|
8893
|
+
AND t.typtype = 'e'
|
|
8894
|
+
`,
|
|
8895
|
+
[schema]
|
|
8896
|
+
);
|
|
8897
|
+
const types = Array.isArray(typeResult) ? typeResult : typeResult.rows || [];
|
|
8898
|
+
for (const { typname } of types) {
|
|
8899
|
+
try {
|
|
8900
|
+
await adapter.query(
|
|
8901
|
+
`DROP TYPE IF EXISTS "${schema}"."${typname}" CASCADE`
|
|
8902
|
+
);
|
|
8903
|
+
console.log(` ✓ Dropped type ${schema}.${typname}`);
|
|
8904
|
+
} catch {
|
|
8905
|
+
}
|
|
8906
|
+
}
|
|
8907
|
+
} catch {
|
|
8908
|
+
console.log(` ⚠ Type dropping not supported for this database`);
|
|
8909
|
+
break;
|
|
8910
|
+
}
|
|
8911
|
+
}
|
|
8912
|
+
console.log("\n🗑️ Dropping domains...");
|
|
8913
|
+
for (const schema of schemas) {
|
|
8914
|
+
try {
|
|
8915
|
+
const domainResult = await adapter.query(
|
|
8916
|
+
`
|
|
8917
|
+
SELECT domain_name FROM information_schema.domains
|
|
8918
|
+
WHERE domain_schema = $1
|
|
8919
|
+
`,
|
|
8920
|
+
[schema]
|
|
8921
|
+
);
|
|
8922
|
+
const domains = Array.isArray(domainResult) ? domainResult : domainResult.rows || [];
|
|
8923
|
+
for (const { domain_name } of domains) {
|
|
8924
|
+
try {
|
|
8925
|
+
await adapter.query(
|
|
8926
|
+
`DROP DOMAIN IF EXISTS "${schema}"."${domain_name}" CASCADE`
|
|
8927
|
+
);
|
|
8928
|
+
console.log(` ✓ Dropped domain ${schema}.${domain_name}`);
|
|
8929
|
+
} catch {
|
|
8930
|
+
}
|
|
8931
|
+
}
|
|
8932
|
+
} catch {
|
|
8933
|
+
}
|
|
8934
|
+
}
|
|
8935
|
+
}
|
|
8936
|
+
if (options.dropSchemas) {
|
|
8937
|
+
console.log("\n🗑️ Dropping schemas...");
|
|
8938
|
+
for (const schema of schemas) {
|
|
8939
|
+
if (schema === "public") continue;
|
|
8940
|
+
try {
|
|
8941
|
+
await adapter.query(`DROP SCHEMA IF EXISTS "${schema}" CASCADE`);
|
|
8942
|
+
console.log(` ✓ Dropped schema ${schema}`);
|
|
8943
|
+
} catch {
|
|
8944
|
+
console.log(` ⚠ Could not drop schema ${schema}`);
|
|
8945
|
+
}
|
|
8946
|
+
}
|
|
8947
|
+
}
|
|
8948
|
+
console.log(
|
|
8949
|
+
`
|
|
8950
|
+
✅ Dropped ${totalDropped} tables${options.all ? " + types, functions, views" : ""}`
|
|
8951
|
+
);
|
|
8952
|
+
process.exit(0);
|
|
8383
8953
|
} catch (error) {
|
|
8384
8954
|
console.error("❌ Error:", error.message);
|
|
8385
8955
|
process.exit(1);
|
|
@@ -8415,6 +8985,247 @@ program.command("health").description("Check database health").action(async () =
|
|
|
8415
8985
|
process.exit(1);
|
|
8416
8986
|
}
|
|
8417
8987
|
});
|
|
8988
|
+
async function dropAllObjects(adapter, schemas, dropSchemas) {
|
|
8989
|
+
let totalTables = 0;
|
|
8990
|
+
let totalObjects = 0;
|
|
8991
|
+
console.log("\n🗑️ Dropping tables...");
|
|
8992
|
+
for (const schema of schemas) {
|
|
8993
|
+
const result = await adapter.query(
|
|
8994
|
+
`
|
|
8995
|
+
SELECT table_name FROM information_schema.tables
|
|
8996
|
+
WHERE table_schema = $1
|
|
8997
|
+
AND table_type = 'BASE TABLE'
|
|
8998
|
+
`,
|
|
8999
|
+
[schema]
|
|
9000
|
+
);
|
|
9001
|
+
const tables = Array.isArray(result) ? result : result.rows || [];
|
|
9002
|
+
if (tables.length === 0) continue;
|
|
9003
|
+
console.log(`
|
|
9004
|
+
📁 Schema: ${schema}`);
|
|
9005
|
+
for (const { table_name } of tables) {
|
|
9006
|
+
await adapter.query(
|
|
9007
|
+
`DROP TABLE IF EXISTS "${schema}"."${table_name}" CASCADE`
|
|
9008
|
+
);
|
|
9009
|
+
console.log(` ✓ Dropped table ${schema}.${table_name}`);
|
|
9010
|
+
totalTables++;
|
|
9011
|
+
}
|
|
9012
|
+
}
|
|
9013
|
+
console.log("\n🗑️ Dropping views...");
|
|
9014
|
+
for (const schema of schemas) {
|
|
9015
|
+
const viewResult = await adapter.query(
|
|
9016
|
+
`SELECT table_name FROM information_schema.views WHERE table_schema = $1`,
|
|
9017
|
+
[schema]
|
|
9018
|
+
);
|
|
9019
|
+
const views = Array.isArray(viewResult) ? viewResult : viewResult.rows || [];
|
|
9020
|
+
for (const { table_name } of views) {
|
|
9021
|
+
try {
|
|
9022
|
+
await adapter.query(
|
|
9023
|
+
`DROP VIEW IF EXISTS "${schema}"."${table_name}" CASCADE`
|
|
9024
|
+
);
|
|
9025
|
+
console.log(` ✓ Dropped view ${schema}.${table_name}`);
|
|
9026
|
+
totalObjects++;
|
|
9027
|
+
} catch {
|
|
9028
|
+
}
|
|
9029
|
+
}
|
|
9030
|
+
}
|
|
9031
|
+
console.log("\n🗑️ Dropping triggers...");
|
|
9032
|
+
for (const schema of schemas) {
|
|
9033
|
+
try {
|
|
9034
|
+
const triggerResult = await adapter.query(
|
|
9035
|
+
`SELECT DISTINCT trigger_name, event_object_table FROM information_schema.triggers WHERE trigger_schema = $1`,
|
|
9036
|
+
[schema]
|
|
9037
|
+
);
|
|
9038
|
+
const triggers = Array.isArray(triggerResult) ? triggerResult : triggerResult.rows || [];
|
|
9039
|
+
for (const { trigger_name, event_object_table } of triggers) {
|
|
9040
|
+
try {
|
|
9041
|
+
await adapter.query(
|
|
9042
|
+
`DROP TRIGGER IF EXISTS "${trigger_name}" ON "${schema}"."${event_object_table}" CASCADE`
|
|
9043
|
+
);
|
|
9044
|
+
console.log(` ✓ Dropped trigger ${schema}.${trigger_name}`);
|
|
9045
|
+
totalObjects++;
|
|
9046
|
+
} catch {
|
|
9047
|
+
}
|
|
9048
|
+
}
|
|
9049
|
+
} catch {
|
|
9050
|
+
}
|
|
9051
|
+
}
|
|
9052
|
+
console.log("\n🗑️ Dropping functions...");
|
|
9053
|
+
for (const schema of schemas) {
|
|
9054
|
+
try {
|
|
9055
|
+
const funcResult = await adapter.query(
|
|
9056
|
+
`SELECT p.proname, pg_get_function_identity_arguments(p.oid) as args
|
|
9057
|
+
FROM pg_proc p JOIN pg_namespace n ON p.pronamespace = n.oid
|
|
9058
|
+
WHERE n.nspname = $1 AND p.prokind = 'f'`,
|
|
9059
|
+
[schema]
|
|
9060
|
+
);
|
|
9061
|
+
const funcs = Array.isArray(funcResult) ? funcResult : funcResult.rows || [];
|
|
9062
|
+
for (const { proname, args } of funcs) {
|
|
9063
|
+
try {
|
|
9064
|
+
await adapter.query(
|
|
9065
|
+
`DROP FUNCTION IF EXISTS "${schema}"."${proname}"(${args}) CASCADE`
|
|
9066
|
+
);
|
|
9067
|
+
console.log(` ✓ Dropped function ${schema}.${proname}`);
|
|
9068
|
+
totalObjects++;
|
|
9069
|
+
} catch {
|
|
9070
|
+
}
|
|
9071
|
+
}
|
|
9072
|
+
} catch {
|
|
9073
|
+
}
|
|
9074
|
+
}
|
|
9075
|
+
console.log("\n🗑️ Dropping procedures...");
|
|
9076
|
+
for (const schema of schemas) {
|
|
9077
|
+
try {
|
|
9078
|
+
const procResult = await adapter.query(
|
|
9079
|
+
`SELECT p.proname, pg_get_function_identity_arguments(p.oid) as args
|
|
9080
|
+
FROM pg_proc p JOIN pg_namespace n ON p.pronamespace = n.oid
|
|
9081
|
+
WHERE n.nspname = $1 AND p.prokind = 'p'`,
|
|
9082
|
+
[schema]
|
|
9083
|
+
);
|
|
9084
|
+
const procs = Array.isArray(procResult) ? procResult : procResult.rows || [];
|
|
9085
|
+
for (const { proname, args } of procs) {
|
|
9086
|
+
try {
|
|
9087
|
+
await adapter.query(
|
|
9088
|
+
`DROP PROCEDURE IF EXISTS "${schema}"."${proname}"(${args}) CASCADE`
|
|
9089
|
+
);
|
|
9090
|
+
console.log(` ✓ Dropped procedure ${schema}.${proname}`);
|
|
9091
|
+
totalObjects++;
|
|
9092
|
+
} catch {
|
|
9093
|
+
}
|
|
9094
|
+
}
|
|
9095
|
+
} catch {
|
|
9096
|
+
}
|
|
9097
|
+
}
|
|
9098
|
+
console.log("\n🗑️ Dropping sequences...");
|
|
9099
|
+
for (const schema of schemas) {
|
|
9100
|
+
try {
|
|
9101
|
+
const seqResult = await adapter.query(
|
|
9102
|
+
`SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema = $1`,
|
|
9103
|
+
[schema]
|
|
9104
|
+
);
|
|
9105
|
+
const sequences = Array.isArray(seqResult) ? seqResult : seqResult.rows || [];
|
|
9106
|
+
for (const { sequence_name } of sequences) {
|
|
9107
|
+
try {
|
|
9108
|
+
await adapter.query(
|
|
9109
|
+
`DROP SEQUENCE IF EXISTS "${schema}"."${sequence_name}" CASCADE`
|
|
9110
|
+
);
|
|
9111
|
+
console.log(` ✓ Dropped sequence ${schema}.${sequence_name}`);
|
|
9112
|
+
totalObjects++;
|
|
9113
|
+
} catch {
|
|
9114
|
+
}
|
|
9115
|
+
}
|
|
9116
|
+
} catch {
|
|
9117
|
+
}
|
|
9118
|
+
}
|
|
9119
|
+
console.log("\n🗑️ Dropping types...");
|
|
9120
|
+
for (const schema of schemas) {
|
|
9121
|
+
try {
|
|
9122
|
+
const typeResult = await adapter.query(
|
|
9123
|
+
`SELECT t.typname FROM pg_type t
|
|
9124
|
+
JOIN pg_namespace n ON t.typnamespace = n.oid
|
|
9125
|
+
WHERE n.nspname = $1 AND t.typtype = 'e'`,
|
|
9126
|
+
[schema]
|
|
9127
|
+
);
|
|
9128
|
+
const types = Array.isArray(typeResult) ? typeResult : typeResult.rows || [];
|
|
9129
|
+
for (const { typname } of types) {
|
|
9130
|
+
try {
|
|
9131
|
+
await adapter.query(
|
|
9132
|
+
`DROP TYPE IF EXISTS "${schema}"."${typname}" CASCADE`
|
|
9133
|
+
);
|
|
9134
|
+
console.log(` ✓ Dropped type ${schema}.${typname}`);
|
|
9135
|
+
totalObjects++;
|
|
9136
|
+
} catch {
|
|
9137
|
+
}
|
|
9138
|
+
}
|
|
9139
|
+
} catch {
|
|
9140
|
+
}
|
|
9141
|
+
}
|
|
9142
|
+
console.log("\n🗑️ Dropping domains...");
|
|
9143
|
+
for (const schema of schemas) {
|
|
9144
|
+
try {
|
|
9145
|
+
const domainResult = await adapter.query(
|
|
9146
|
+
`SELECT domain_name FROM information_schema.domains WHERE domain_schema = $1`,
|
|
9147
|
+
[schema]
|
|
9148
|
+
);
|
|
9149
|
+
const domains = Array.isArray(domainResult) ? domainResult : domainResult.rows || [];
|
|
9150
|
+
for (const { domain_name } of domains) {
|
|
9151
|
+
try {
|
|
9152
|
+
await adapter.query(
|
|
9153
|
+
`DROP DOMAIN IF EXISTS "${schema}"."${domain_name}" CASCADE`
|
|
9154
|
+
);
|
|
9155
|
+
console.log(` ✓ Dropped domain ${schema}.${domain_name}`);
|
|
9156
|
+
totalObjects++;
|
|
9157
|
+
} catch {
|
|
9158
|
+
}
|
|
9159
|
+
}
|
|
9160
|
+
} catch {
|
|
9161
|
+
}
|
|
9162
|
+
}
|
|
9163
|
+
if (dropSchemas) {
|
|
9164
|
+
console.log("\n🗑️ Dropping schemas...");
|
|
9165
|
+
for (const schema of schemas) {
|
|
9166
|
+
if (schema === "public") continue;
|
|
9167
|
+
try {
|
|
9168
|
+
await adapter.query(`DROP SCHEMA IF EXISTS "${schema}" CASCADE`);
|
|
9169
|
+
console.log(` ✓ Dropped schema ${schema}`);
|
|
9170
|
+
} catch {
|
|
9171
|
+
console.log(` ⚠ Could not drop schema ${schema}`);
|
|
9172
|
+
}
|
|
9173
|
+
}
|
|
9174
|
+
}
|
|
9175
|
+
return { tables: totalTables, objects: totalObjects };
|
|
9176
|
+
}
|
|
9177
|
+
__name(dropAllObjects, "dropAllObjects");
|
|
9178
|
+
program.command("reset").description(
|
|
9179
|
+
"Full database reset: drop all objects from all schemas, optionally run migrations"
|
|
9180
|
+
).option("--confirm", "Confirm reset operation").option(
|
|
9181
|
+
"-s, --schemas <schemas>",
|
|
9182
|
+
"Comma-separated list of schemas (default: all user schemas)"
|
|
9183
|
+
).option("--drop-schemas", "Also drop schemas themselves (except public)").option("--migrate", "Run migrations after reset").action(async (options) => {
|
|
9184
|
+
if (!options.confirm) {
|
|
9185
|
+
console.error("❌ Please use --confirm flag to confirm reset operation");
|
|
9186
|
+
console.error(
|
|
9187
|
+
" This will DROP ALL database objects (tables, types, functions, etc.)"
|
|
9188
|
+
);
|
|
9189
|
+
process.exit(1);
|
|
9190
|
+
}
|
|
9191
|
+
try {
|
|
9192
|
+
const { adapter, config } = await initDatabase();
|
|
9193
|
+
if (typeof adapter.query !== "function") {
|
|
9194
|
+
console.error("❌ Reset operation not supported by this adapter");
|
|
9195
|
+
process.exit(1);
|
|
9196
|
+
}
|
|
9197
|
+
console.log("🔄 Resetting database (dropping all objects)...");
|
|
9198
|
+
const schemas = options.schemas ? options.schemas.split(",").map((s) => s.trim()) : await getUserSchemas(adapter);
|
|
9199
|
+
const { tables, objects } = await dropAllObjects(
|
|
9200
|
+
adapter,
|
|
9201
|
+
schemas,
|
|
9202
|
+
options.dropSchemas ?? false
|
|
9203
|
+
);
|
|
9204
|
+
console.log(
|
|
9205
|
+
`
|
|
9206
|
+
✅ Reset complete: dropped ${tables} tables, ${objects} other objects`
|
|
9207
|
+
);
|
|
9208
|
+
if (options.migrate) {
|
|
9209
|
+
console.log("\n🔄 Running migrations...");
|
|
9210
|
+
const migrationManager = new MigrationManager({
|
|
9211
|
+
adapter,
|
|
9212
|
+
migrationsPath: config.migrationsPath ?? "./migrations",
|
|
9213
|
+
tableName: config.migrationsTable ?? "schema_migrations"
|
|
9214
|
+
});
|
|
9215
|
+
const result = await migrationManager.up();
|
|
9216
|
+
if (result.success) {
|
|
9217
|
+
console.log(`✅ Applied ${result.value} migration(s)`);
|
|
9218
|
+
} else {
|
|
9219
|
+
console.error("❌ Migration failed:", result.error?.message);
|
|
9220
|
+
process.exit(1);
|
|
9221
|
+
}
|
|
9222
|
+
}
|
|
9223
|
+
process.exit(0);
|
|
9224
|
+
} catch (error) {
|
|
9225
|
+
console.error("❌ Error:", error.message);
|
|
9226
|
+
process.exit(1);
|
|
9227
|
+
}
|
|
9228
|
+
});
|
|
8418
9229
|
program.parse();
|
|
8419
9230
|
//# sourceMappingURL=index.js.map
|
|
8420
9231
|
//# sourceMappingURL=index.js.map
|