@bunnykit/orm 0.1.25 → 0.1.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +100 -1
- package/dist/bin/bunny.js +41 -0
- package/dist/src/config/BunnyConfig.d.ts +1 -0
- package/dist/src/index.d.ts +3 -0
- package/dist/src/index.js +2 -0
- package/dist/src/migration/Migrator.d.ts +10 -1
- package/dist/src/migration/Migrator.js +112 -17
- package/dist/src/schema/Schema.d.ts +20 -0
- package/dist/src/schema/Schema.js +158 -0
- package/dist/src/seeding/Factory.d.ts +19 -0
- package/dist/src/seeding/Factory.js +56 -0
- package/dist/src/seeding/Seeder.d.ts +16 -0
- package/dist/src/seeding/Seeder.js +80 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -23,7 +23,8 @@ An **Eloquent-inspired ORM** built specifically for [Bun](https://bun.sh)'s nati
|
|
|
23
23
|
- 🧬 **Eloquent-style Models** — Property attributes, defaults, casts, dirty tracking, soft deletes, scopes, find-or-fail, first-or-create
|
|
24
24
|
- 🔗 **Relations** — Standard, many-to-many, polymorphic, through, one-of-many, and relation queries
|
|
25
25
|
- 👁️ **Observers** — Lifecycle hooks (`creating`, `created`, `updating`, `updated`, etc.)
|
|
26
|
-
- 🚀 **Migrations & CLI** — Create, run, and
|
|
26
|
+
- 🚀 **Migrations & CLI** — Create, run, reset, refresh, and inspect migrations from the command line
|
|
27
|
+
- 🌱 **Seeders & Factories** — Run all seeders or target one seeder by name/file, plus lightweight model factories
|
|
27
28
|
- 💬 **REPL** — Inspect models and run queries interactively with `bunny repl`
|
|
28
29
|
- ⚡ **Streaming** — `chunk`, `cursor`, `each`, and `lazy` for memory-efficient large dataset processing
|
|
29
30
|
|
|
@@ -71,6 +72,7 @@ export default {
|
|
|
71
72
|
// }),
|
|
72
73
|
// listTenants: async () => await getAllTenantIds(),
|
|
73
74
|
// },
|
|
75
|
+
seedersPath: "./database/seeders",
|
|
74
76
|
modelsPath: ["./src/models", "./src/admin/models"],
|
|
75
77
|
// Optional legacy type output directory
|
|
76
78
|
// typesOutDir: "./src/generated/model-types",
|
|
@@ -87,6 +89,7 @@ Or use environment variables:
|
|
|
87
89
|
```bash
|
|
88
90
|
export DATABASE_URL="sqlite://app.db"
|
|
89
91
|
export MIGRATIONS_PATH="./database/migrations,./database/tenant-migrations"
|
|
92
|
+
export SEEDERS_PATH="./database/seeders"
|
|
90
93
|
export MODELS_PATH="./src/models,./src/admin/models"
|
|
91
94
|
export TYPES_OUT_DIR="./src/generated/model-types"
|
|
92
95
|
```
|
|
@@ -1108,6 +1111,84 @@ ObserverRegistry.register(User, {
|
|
|
1108
1111
|
|
|
1109
1112
|
---
|
|
1110
1113
|
|
|
1114
|
+
## Seeders and Factories
|
|
1115
|
+
|
|
1116
|
+
Set `seedersPath` in `bunny.config.ts` to define the default directory used by `db:seed`:
|
|
1117
|
+
|
|
1118
|
+
```ts
|
|
1119
|
+
export default {
|
|
1120
|
+
connection: { url: "sqlite://app.db" },
|
|
1121
|
+
seedersPath: "./database/seeders",
|
|
1122
|
+
};
|
|
1123
|
+
```
|
|
1124
|
+
|
|
1125
|
+
`seedersPath` can also be an array:
|
|
1126
|
+
|
|
1127
|
+
```ts
|
|
1128
|
+
export default {
|
|
1129
|
+
connection: { url: "sqlite://app.db" },
|
|
1130
|
+
seedersPath: ["./database/seeders", "./modules/demo/seeders"],
|
|
1131
|
+
};
|
|
1132
|
+
```
|
|
1133
|
+
|
|
1134
|
+
Create a seeder by extending `Seeder`:
|
|
1135
|
+
|
|
1136
|
+
```ts
|
|
1137
|
+
import { Seeder } from "@bunnykit/orm";
|
|
1138
|
+
import { User } from "../models/User";
|
|
1139
|
+
|
|
1140
|
+
export default class UserSeeder extends Seeder {
|
|
1141
|
+
async run(): Promise<void> {
|
|
1142
|
+
await User.create({ name: "Ada Lovelace", email: "ada@example.test" });
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
```
|
|
1146
|
+
|
|
1147
|
+
Run every seeder in `seedersPath`:
|
|
1148
|
+
|
|
1149
|
+
```bash
|
|
1150
|
+
bun run bunny db:seed
|
|
1151
|
+
```
|
|
1152
|
+
|
|
1153
|
+
Run one seeder by class/file name from `seedersPath`:
|
|
1154
|
+
|
|
1155
|
+
```bash
|
|
1156
|
+
bun run bunny db:seed UserSeeder
|
|
1157
|
+
```
|
|
1158
|
+
|
|
1159
|
+
Run one seeder by direct file path:
|
|
1160
|
+
|
|
1161
|
+
```bash
|
|
1162
|
+
bun run bunny db:seed ./database/seeders/UserSeeder.ts
|
|
1163
|
+
```
|
|
1164
|
+
|
|
1165
|
+
Programmatic seeding is available through `SeederRunner`:
|
|
1166
|
+
|
|
1167
|
+
```ts
|
|
1168
|
+
import { SeederRunner } from "@bunnykit/orm";
|
|
1169
|
+
|
|
1170
|
+
await new SeederRunner(connection).runTarget("UserSeeder", "./database/seeders");
|
|
1171
|
+
await new SeederRunner(connection).runFile("./database/seeders/UserSeeder.ts");
|
|
1172
|
+
```
|
|
1173
|
+
|
|
1174
|
+
Factories can create raw attributes, unsaved models, or persisted records:
|
|
1175
|
+
|
|
1176
|
+
```ts
|
|
1177
|
+
import { factory } from "@bunnykit/orm";
|
|
1178
|
+
import { User } from "../models/User";
|
|
1179
|
+
|
|
1180
|
+
const users = factory(User, (sequence) => ({
|
|
1181
|
+
name: `User ${sequence}`,
|
|
1182
|
+
email: `user${sequence}@example.test`,
|
|
1183
|
+
}));
|
|
1184
|
+
|
|
1185
|
+
const attributes = users.raw();
|
|
1186
|
+
const model = users.make();
|
|
1187
|
+
const created = await users.count(3).state({ role: "admin" }).create();
|
|
1188
|
+
```
|
|
1189
|
+
|
|
1190
|
+
---
|
|
1191
|
+
|
|
1111
1192
|
## Migrations
|
|
1112
1193
|
|
|
1113
1194
|
### CLI Commands
|
|
@@ -1125,9 +1206,27 @@ bun run bunny migrate
|
|
|
1125
1206
|
# Rollback the last batch
|
|
1126
1207
|
bun run bunny migrate:rollback
|
|
1127
1208
|
|
|
1209
|
+
# Rollback all migrations
|
|
1210
|
+
bun run bunny migrate:reset
|
|
1211
|
+
|
|
1212
|
+
# Reset and rerun migrations
|
|
1213
|
+
bun run bunny migrate:refresh
|
|
1214
|
+
|
|
1215
|
+
# Drop all tables and rerun migrations
|
|
1216
|
+
bun run bunny migrate:fresh
|
|
1217
|
+
|
|
1128
1218
|
# Show migration status
|
|
1129
1219
|
bun run bunny migrate:status
|
|
1130
1220
|
|
|
1221
|
+
# Run all seeders in seedersPath
|
|
1222
|
+
bun run bunny db:seed
|
|
1223
|
+
|
|
1224
|
+
# Run one seeder by name from seedersPath
|
|
1225
|
+
bun run bunny db:seed UserSeeder
|
|
1226
|
+
|
|
1227
|
+
# Run one seeder by direct file path
|
|
1228
|
+
bun run bunny db:seed ./database/seeders/UserSeeder.ts
|
|
1229
|
+
|
|
1131
1230
|
# Dump the current database schema
|
|
1132
1231
|
bun run bunny schema:dump ./database/schema.sql
|
|
1133
1232
|
|
package/dist/bin/bunny.js
CHANGED
|
@@ -5,6 +5,7 @@ import { TenantContext } from "../src/connection/TenantContext.js";
|
|
|
5
5
|
import { configureBunny } from "../src/config/BunnyConfig.js";
|
|
6
6
|
import { Migrator } from "../src/migration/Migrator.js";
|
|
7
7
|
import { MigrationCreator } from "../src/migration/MigrationCreator.js";
|
|
8
|
+
import { SeederRunner } from "../src/seeding/Seeder.js";
|
|
8
9
|
import { TypeGenerator } from "../src/typegen/TypeGenerator.js";
|
|
9
10
|
import { existsSync } from "fs";
|
|
10
11
|
import { mkdir, rm, writeFile } from "fs/promises";
|
|
@@ -64,6 +65,18 @@ async function runMigratorCommand(command, migrator, statusLabel) {
|
|
|
64
65
|
await migrator.rollback();
|
|
65
66
|
return;
|
|
66
67
|
}
|
|
68
|
+
if (command === "migrate:reset") {
|
|
69
|
+
await migrator.reset();
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
if (command === "migrate:refresh") {
|
|
73
|
+
await migrator.refresh();
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
if (command === "migrate:fresh") {
|
|
77
|
+
await migrator.fresh();
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
67
80
|
const status = await migrator.status();
|
|
68
81
|
if (statusLabel) {
|
|
69
82
|
console.log(statusLabel);
|
|
@@ -338,6 +351,7 @@ async function loadConfig(allowFallback = false) {
|
|
|
338
351
|
return {
|
|
339
352
|
connection: { url },
|
|
340
353
|
migrationsPath: parseEnvPathSetting(process.env.MIGRATIONS_PATH) || "./database/migrations",
|
|
354
|
+
seedersPath: parseEnvPathSetting(process.env.SEEDERS_PATH),
|
|
341
355
|
modelsPath: parseEnvPathSetting(process.env.MODELS_PATH),
|
|
342
356
|
};
|
|
343
357
|
}
|
|
@@ -354,6 +368,7 @@ async function loadConfig(allowFallback = false) {
|
|
|
354
368
|
filename: process.env.DB_DATABASE,
|
|
355
369
|
},
|
|
356
370
|
migrationsPath: parseEnvPathSetting(process.env.MIGRATIONS_PATH) || "./database/migrations",
|
|
371
|
+
seedersPath: parseEnvPathSetting(process.env.SEEDERS_PATH),
|
|
357
372
|
modelsPath: parseEnvPathSetting(process.env.MODELS_PATH),
|
|
358
373
|
};
|
|
359
374
|
}
|
|
@@ -361,6 +376,7 @@ async function loadConfig(allowFallback = false) {
|
|
|
361
376
|
return {
|
|
362
377
|
connection: { url: "sqlite://:memory:" },
|
|
363
378
|
migrationsPath: parseEnvPathSetting(process.env.MIGRATIONS_PATH) || "./database/migrations",
|
|
379
|
+
seedersPath: parseEnvPathSetting(process.env.SEEDERS_PATH),
|
|
364
380
|
modelsPath: parseEnvPathSetting(process.env.MODELS_PATH),
|
|
365
381
|
};
|
|
366
382
|
}
|
|
@@ -432,9 +448,29 @@ async function main() {
|
|
|
432
448
|
else if (command === "migrate:rollback") {
|
|
433
449
|
await runConfiguredMigrationCommand(command, config, connection, parseMigrationTarget(args.slice(1)));
|
|
434
450
|
}
|
|
451
|
+
else if (command === "migrate:reset") {
|
|
452
|
+
await runConfiguredMigrationCommand(command, config, connection, parseMigrationTarget(args.slice(1)));
|
|
453
|
+
}
|
|
454
|
+
else if (command === "migrate:refresh") {
|
|
455
|
+
await runConfiguredMigrationCommand(command, config, connection, parseMigrationTarget(args.slice(1)));
|
|
456
|
+
}
|
|
457
|
+
else if (command === "migrate:fresh") {
|
|
458
|
+
await runConfiguredMigrationCommand(command, config, connection, parseMigrationTarget(args.slice(1)));
|
|
459
|
+
}
|
|
435
460
|
else if (command === "migrate:status") {
|
|
436
461
|
await runConfiguredMigrationCommand(command, config, connection, parseMigrationTarget(args.slice(1)));
|
|
437
462
|
}
|
|
463
|
+
else if (command === "db:seed") {
|
|
464
|
+
const target = args[1];
|
|
465
|
+
const seederPath = config.seedersPath || "./database/seeders";
|
|
466
|
+
const runner = new SeederRunner(connection);
|
|
467
|
+
if (target) {
|
|
468
|
+
await runner.runTarget(target, seederPath);
|
|
469
|
+
}
|
|
470
|
+
else {
|
|
471
|
+
await runner.runPaths(seederPath);
|
|
472
|
+
}
|
|
473
|
+
}
|
|
438
474
|
else {
|
|
439
475
|
console.log("Usage:");
|
|
440
476
|
console.log(" bun run bunny migrate Run landlord migrations, then all tenant migrations when configured");
|
|
@@ -443,7 +479,12 @@ async function main() {
|
|
|
443
479
|
console.log(" bun run bunny migrate --tenant <id> Run one tenant's migrations only");
|
|
444
480
|
console.log(" bun run bunny migrate:make <name> [dir] Create a new migration");
|
|
445
481
|
console.log(" bun run bunny migrate:rollback Rollback the last batch");
|
|
482
|
+
console.log(" bun run bunny migrate:reset Rollback all migrations");
|
|
483
|
+
console.log(" bun run bunny migrate:refresh Reset and rerun migrations");
|
|
484
|
+
console.log(" bun run bunny migrate:fresh Drop all tables and rerun migrations");
|
|
446
485
|
console.log(" bun run bunny migrate:status Show migration status");
|
|
486
|
+
console.log(" bun run bunny db:seed Run seeders from seedersPath");
|
|
487
|
+
console.log(" bun run bunny db:seed <seeder> Run one seeder by file path or name");
|
|
447
488
|
console.log(" bun run bunny schema:dump [path] Dump the current database schema");
|
|
448
489
|
console.log(" bun run bunny schema:squash [path] Dump schema and mark configured migrations as ran");
|
|
449
490
|
console.log(" bun run bunny types:generate [dir] Generate model type declarations from DB schema");
|
|
@@ -5,6 +5,7 @@ import type { ConnectionConfig } from "../types/index.js";
|
|
|
5
5
|
export interface BunnyConfig {
|
|
6
6
|
connection: ConnectionConfig;
|
|
7
7
|
migrationsPath?: string | string[];
|
|
8
|
+
seedersPath?: string | string[];
|
|
8
9
|
migrations?: {
|
|
9
10
|
landlord?: string | string[];
|
|
10
11
|
tenant?: string | string[];
|
package/dist/src/index.d.ts
CHANGED
|
@@ -27,3 +27,6 @@ export type { MigrationEvent, MigrationEventListener, MigrationEventPayload, Mig
|
|
|
27
27
|
export { MigrationCreator } from "./migration/MigrationCreator.js";
|
|
28
28
|
export { TypeGenerator } from "./typegen/TypeGenerator.js";
|
|
29
29
|
export { TypeMapper } from "./typegen/TypeMapper.js";
|
|
30
|
+
export { Seeder, SeederRunner } from "./seeding/Seeder.js";
|
|
31
|
+
export { Factory, factory } from "./seeding/Factory.js";
|
|
32
|
+
export type { FactoryDefinition, FactoryState } from "./seeding/Factory.js";
|
package/dist/src/index.js
CHANGED
|
@@ -21,3 +21,5 @@ export { Migrator } from "./migration/Migrator.js";
|
|
|
21
21
|
export { MigrationCreator } from "./migration/MigrationCreator.js";
|
|
22
22
|
export { TypeGenerator } from "./typegen/TypeGenerator.js";
|
|
23
23
|
export { TypeMapper } from "./typegen/TypeMapper.js";
|
|
24
|
+
export { Seeder, SeederRunner } from "./seeding/Seeder.js";
|
|
25
|
+
export { Factory, factory } from "./seeding/Factory.js";
|
|
@@ -4,6 +4,8 @@ export interface MigrationStatusRow {
|
|
|
4
4
|
migration: string;
|
|
5
5
|
status: string;
|
|
6
6
|
tenant: string | null;
|
|
7
|
+
checksum?: string;
|
|
8
|
+
storedChecksum?: string | null;
|
|
7
9
|
}
|
|
8
10
|
export interface MigratorOptions {
|
|
9
11
|
tenantId?: string | null;
|
|
@@ -39,13 +41,20 @@ export declare class Migrator {
|
|
|
39
41
|
private emit;
|
|
40
42
|
private getLastBatchNumber;
|
|
41
43
|
private getMigrationFiles;
|
|
44
|
+
private checksumFile;
|
|
42
45
|
run(): Promise<void>;
|
|
43
|
-
rollback(): Promise<void>;
|
|
46
|
+
rollback(steps?: number): Promise<void>;
|
|
47
|
+
private getRollbackBatches;
|
|
48
|
+
reset(): Promise<void>;
|
|
49
|
+
refresh(): Promise<void>;
|
|
50
|
+
fresh(): Promise<void>;
|
|
44
51
|
private generateTypesIfNeeded;
|
|
45
52
|
status(): Promise<MigrationStatusRow[]>;
|
|
46
53
|
dumpSchema(path: string): Promise<string>;
|
|
47
54
|
squash(path: string): Promise<string>;
|
|
48
55
|
private getSchemaDumpSql;
|
|
56
|
+
private dropAllTables;
|
|
49
57
|
private resolve;
|
|
50
58
|
private getRan;
|
|
59
|
+
private getRanRecords;
|
|
51
60
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
1
2
|
import { existsSync } from "fs";
|
|
2
|
-
import { mkdir, readdir, writeFile } from "fs/promises";
|
|
3
|
+
import { mkdir, readdir, readFile, writeFile } from "fs/promises";
|
|
3
4
|
import { basename, join, relative, resolve } from "path";
|
|
4
5
|
import { Schema } from "../schema/Schema.js";
|
|
5
6
|
import { Builder } from "../query/Builder.js";
|
|
@@ -30,6 +31,7 @@ export class Migrator {
|
|
|
30
31
|
table.increments("id");
|
|
31
32
|
table.string("migration");
|
|
32
33
|
table.string("tenant").nullable().index();
|
|
34
|
+
table.string("checksum").nullable();
|
|
33
35
|
table.integer("batch");
|
|
34
36
|
});
|
|
35
37
|
return;
|
|
@@ -39,6 +41,11 @@ export class Migrator {
|
|
|
39
41
|
table.string("tenant").nullable().index();
|
|
40
42
|
});
|
|
41
43
|
}
|
|
44
|
+
if (!(await Schema.hasColumn("migrations", "checksum"))) {
|
|
45
|
+
await Schema.table("migrations", (table) => {
|
|
46
|
+
table.string("checksum").nullable();
|
|
47
|
+
});
|
|
48
|
+
}
|
|
42
49
|
}
|
|
43
50
|
getTenantId() {
|
|
44
51
|
return this.options.tenantId ?? null;
|
|
@@ -134,11 +141,16 @@ export class Migrator {
|
|
|
134
141
|
id: toPosixPath(relative(process.cwd(), fullPath)),
|
|
135
142
|
fileName,
|
|
136
143
|
fullPath,
|
|
144
|
+
checksum: await this.checksumFile(fullPath),
|
|
137
145
|
});
|
|
138
146
|
}
|
|
139
147
|
}
|
|
140
148
|
return files.sort((a, b) => a.fileName.localeCompare(b.fileName) || a.id.localeCompare(b.id));
|
|
141
149
|
}
|
|
150
|
+
async checksumFile(path) {
|
|
151
|
+
const contents = await readFile(path);
|
|
152
|
+
return createHash("sha256").update(contents).digest("hex");
|
|
153
|
+
}
|
|
142
154
|
async run() {
|
|
143
155
|
await this.ensureMigrationsTable();
|
|
144
156
|
const locked = await this.acquireLock();
|
|
@@ -160,6 +172,7 @@ export class Migrator {
|
|
|
160
172
|
await new Builder(this.connection, "migrations").insert({
|
|
161
173
|
migration: file.id,
|
|
162
174
|
tenant: this.getTenantId(),
|
|
175
|
+
checksum: file.checksum,
|
|
163
176
|
batch,
|
|
164
177
|
});
|
|
165
178
|
await this.emit("migrated", { migration: file.id, batch });
|
|
@@ -177,17 +190,17 @@ export class Migrator {
|
|
|
177
190
|
await this.releaseLock();
|
|
178
191
|
}
|
|
179
192
|
}
|
|
180
|
-
async rollback() {
|
|
193
|
+
async rollback(steps = 1) {
|
|
181
194
|
await this.ensureMigrationsTable();
|
|
182
195
|
const locked = await this.acquireLock();
|
|
183
196
|
try {
|
|
184
|
-
const
|
|
185
|
-
if (
|
|
197
|
+
const batches = await this.getRollbackBatches(steps);
|
|
198
|
+
if (batches.length === 0) {
|
|
186
199
|
console.log("Nothing to rollback.");
|
|
187
200
|
return;
|
|
188
201
|
}
|
|
189
202
|
const records = (await this.scopedMigrations()
|
|
190
|
-
.
|
|
203
|
+
.whereIn("batch", batches)
|
|
191
204
|
.orderBy("id", "desc")
|
|
192
205
|
.get());
|
|
193
206
|
if (records.length === 0) {
|
|
@@ -198,12 +211,12 @@ export class Migrator {
|
|
|
198
211
|
for (const record of records) {
|
|
199
212
|
const migration = await this.resolve(record.migration);
|
|
200
213
|
console.log(`Rolling back: ${record.migration}`);
|
|
201
|
-
await this.emit("rollingBack", { migration: record.migration, batch });
|
|
214
|
+
await this.emit("rollingBack", { migration: record.migration, batch: record.batch });
|
|
202
215
|
await migration.down();
|
|
203
216
|
await new Builder(this.connection, "migrations")
|
|
204
217
|
.where("id", record.id)
|
|
205
218
|
.delete();
|
|
206
|
-
await this.emit("rolledBack", { migration: record.migration, batch });
|
|
219
|
+
await this.emit("rolledBack", { migration: record.migration, batch: record.batch });
|
|
207
220
|
console.log(`Rolled back: ${record.migration}`);
|
|
208
221
|
}
|
|
209
222
|
await this.connection.commit();
|
|
@@ -218,6 +231,36 @@ export class Migrator {
|
|
|
218
231
|
await this.releaseLock();
|
|
219
232
|
}
|
|
220
233
|
}
|
|
234
|
+
async getRollbackBatches(steps) {
|
|
235
|
+
await this.ensureMigrationsTable();
|
|
236
|
+
const rows = await this.scopedMigrations()
|
|
237
|
+
.select("batch")
|
|
238
|
+
.orderBy("batch", "desc")
|
|
239
|
+
.get();
|
|
240
|
+
const batches = [];
|
|
241
|
+
for (const row of rows) {
|
|
242
|
+
const batch = Number(row.batch);
|
|
243
|
+
if (!Number.isFinite(batch) || batches.includes(batch))
|
|
244
|
+
continue;
|
|
245
|
+
batches.push(batch);
|
|
246
|
+
if (batches.length >= steps)
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
return batches;
|
|
250
|
+
}
|
|
251
|
+
async reset() {
|
|
252
|
+
while ((await this.getLastBatchNumber()) > 0) {
|
|
253
|
+
await this.rollback();
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
async refresh() {
|
|
257
|
+
await this.reset();
|
|
258
|
+
await this.run();
|
|
259
|
+
}
|
|
260
|
+
async fresh() {
|
|
261
|
+
await this.dropAllTables();
|
|
262
|
+
await this.run();
|
|
263
|
+
}
|
|
221
264
|
async generateTypesIfNeeded() {
|
|
222
265
|
const modelDirectories = normalizePathList(this.typeGeneratorOptions.modelDirectories || this.typeGeneratorOptions.modelDirectory);
|
|
223
266
|
if (!this.typesOutDir && modelDirectories.length === 0)
|
|
@@ -234,14 +277,20 @@ export class Migrator {
|
|
|
234
277
|
}
|
|
235
278
|
async status() {
|
|
236
279
|
await this.ensureMigrationsTable();
|
|
237
|
-
const ran = await this.
|
|
280
|
+
const ran = await this.getRanRecords();
|
|
238
281
|
const files = await this.getMigrationFiles();
|
|
239
282
|
const tenant = this.getTenantId();
|
|
240
|
-
return files.map((file) =>
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
283
|
+
return files.map((file) => {
|
|
284
|
+
const record = ran.get(file.id) || ran.get(file.fileName);
|
|
285
|
+
const storedChecksum = record?.checksum ?? null;
|
|
286
|
+
return {
|
|
287
|
+
migration: file.id,
|
|
288
|
+
status: !record ? "Pending" : storedChecksum && storedChecksum !== file.checksum ? "Changed" : "Ran",
|
|
289
|
+
tenant,
|
|
290
|
+
checksum: file.checksum,
|
|
291
|
+
storedChecksum,
|
|
292
|
+
};
|
|
293
|
+
});
|
|
245
294
|
}
|
|
246
295
|
async dumpSchema(path) {
|
|
247
296
|
const sql = await this.getSchemaDumpSql();
|
|
@@ -262,6 +311,7 @@ export class Migrator {
|
|
|
262
311
|
await new Builder(this.connection, "migrations").insert({
|
|
263
312
|
migration: file.id,
|
|
264
313
|
tenant: this.getTenantId(),
|
|
314
|
+
checksum: file.checksum,
|
|
265
315
|
batch,
|
|
266
316
|
});
|
|
267
317
|
}
|
|
@@ -332,6 +382,42 @@ export class Migrator {
|
|
|
332
382
|
}
|
|
333
383
|
return statements.join("\n\n") + "\n";
|
|
334
384
|
}
|
|
385
|
+
async dropAllTables() {
|
|
386
|
+
const driver = this.connection.getDriverName();
|
|
387
|
+
const grammar = this.connection.getGrammar();
|
|
388
|
+
if (driver === "sqlite") {
|
|
389
|
+
await this.connection.run("PRAGMA foreign_keys = OFF");
|
|
390
|
+
try {
|
|
391
|
+
const rows = await this.connection.query("SELECT name FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%'");
|
|
392
|
+
for (const row of rows) {
|
|
393
|
+
await this.connection.run(`DROP TABLE IF EXISTS ${grammar.wrap(String(row.name))}`);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
finally {
|
|
397
|
+
await this.connection.run("PRAGMA foreign_keys = ON");
|
|
398
|
+
}
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
if (driver === "mysql") {
|
|
402
|
+
const tables = await this.connection.query("SHOW TABLES");
|
|
403
|
+
const key = Object.keys(tables[0] ?? {})[0];
|
|
404
|
+
await this.connection.run("SET FOREIGN_KEY_CHECKS = 0");
|
|
405
|
+
try {
|
|
406
|
+
for (const row of tables) {
|
|
407
|
+
await this.connection.run(`DROP TABLE IF EXISTS ${grammar.wrap(String(row[key]))}`);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
finally {
|
|
411
|
+
await this.connection.run("SET FOREIGN_KEY_CHECKS = 1");
|
|
412
|
+
}
|
|
413
|
+
return;
|
|
414
|
+
}
|
|
415
|
+
const schema = this.connection.getSchema() || "public";
|
|
416
|
+
const tables = await this.connection.query("SELECT table_name FROM information_schema.tables WHERE table_schema = $1 AND table_type = 'BASE TABLE'", [schema]);
|
|
417
|
+
for (const row of tables) {
|
|
418
|
+
await this.connection.run(`DROP TABLE IF EXISTS ${grammar.wrap(`${schema}.${row.table_name}`)} CASCADE`);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
335
421
|
async resolve(file) {
|
|
336
422
|
const normalized = toPosixPath(file);
|
|
337
423
|
const candidates = new Set();
|
|
@@ -358,16 +444,25 @@ export class Migrator {
|
|
|
358
444
|
return new MigrationClass();
|
|
359
445
|
}
|
|
360
446
|
async getRan() {
|
|
447
|
+
const results = await this.getRanRecords();
|
|
448
|
+
const ran = new Set();
|
|
449
|
+
for (const migration of results.keys()) {
|
|
450
|
+
ran.add(migration);
|
|
451
|
+
ran.add(basename(migration));
|
|
452
|
+
}
|
|
453
|
+
return ran;
|
|
454
|
+
}
|
|
455
|
+
async getRanRecords() {
|
|
361
456
|
await this.ensureMigrationsTable();
|
|
362
457
|
const results = await this.scopedMigrations()
|
|
363
458
|
.orderBy("id", "asc")
|
|
364
459
|
.get();
|
|
365
|
-
const
|
|
460
|
+
const records = new Map();
|
|
366
461
|
for (const row of results) {
|
|
367
462
|
const migration = toPosixPath(String(row.migration));
|
|
368
|
-
|
|
369
|
-
|
|
463
|
+
records.set(migration, row);
|
|
464
|
+
records.set(basename(migration), row);
|
|
370
465
|
}
|
|
371
|
-
return
|
|
466
|
+
return records;
|
|
372
467
|
}
|
|
373
468
|
}
|
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
import { Connection } from "../connection/Connection.js";
|
|
2
2
|
import { Blueprint } from "./Blueprint.js";
|
|
3
|
+
export interface SchemaIndex {
|
|
4
|
+
name: string;
|
|
5
|
+
columns: string[];
|
|
6
|
+
unique: boolean;
|
|
7
|
+
primary?: boolean;
|
|
8
|
+
}
|
|
9
|
+
export interface SchemaForeignKey {
|
|
10
|
+
name?: string;
|
|
11
|
+
columns: string[];
|
|
12
|
+
references: string[];
|
|
13
|
+
onTable: string;
|
|
14
|
+
onDelete?: string;
|
|
15
|
+
onUpdate?: string;
|
|
16
|
+
}
|
|
3
17
|
export declare class Schema {
|
|
4
18
|
static connection: Connection;
|
|
5
19
|
static setConnection(connection: Connection): void;
|
|
@@ -17,6 +31,12 @@ export declare class Schema {
|
|
|
17
31
|
static rename(from: string, to: string): Promise<void>;
|
|
18
32
|
static hasTable(table: string): Promise<boolean>;
|
|
19
33
|
static hasColumn(table: string, column: string): Promise<boolean>;
|
|
34
|
+
static getIndexes(table: string): Promise<SchemaIndex[]>;
|
|
35
|
+
static hasIndex(table: string, indexOrColumns: string | string[]): Promise<boolean>;
|
|
36
|
+
static getForeignKeys(table: string): Promise<SchemaForeignKey[]>;
|
|
37
|
+
static hasForeignKey(table: string, keyOrColumns: string | string[]): Promise<boolean>;
|
|
38
|
+
private static groupIndexRows;
|
|
39
|
+
private static groupForeignKeyRows;
|
|
20
40
|
static getColumn(table: string, column: string): Promise<{
|
|
21
41
|
name: string;
|
|
22
42
|
type: string;
|
|
@@ -198,6 +198,164 @@ export class Schema {
|
|
|
198
198
|
const result = await connection.query(sql, bindings);
|
|
199
199
|
return result.length > 0;
|
|
200
200
|
}
|
|
201
|
+
static async getIndexes(table) {
|
|
202
|
+
const connection = this.getConnection();
|
|
203
|
+
const driver = connection.getDriverName();
|
|
204
|
+
const grammar = this.getGrammar();
|
|
205
|
+
const schema = connection.getSchema() || "public";
|
|
206
|
+
if (driver === "sqlite") {
|
|
207
|
+
const indexes = await connection.query(`PRAGMA index_list(${grammar.wrap(table)})`);
|
|
208
|
+
const results = [];
|
|
209
|
+
for (const index of indexes) {
|
|
210
|
+
const name = String(index.name);
|
|
211
|
+
const columns = await connection.query(`PRAGMA index_info(${grammar.wrap(name)})`);
|
|
212
|
+
results.push({
|
|
213
|
+
name,
|
|
214
|
+
columns: columns.map((row) => String(row.name)),
|
|
215
|
+
unique: Number(index.unique) === 1,
|
|
216
|
+
primary: String(index.origin || "") === "pk",
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
return results;
|
|
220
|
+
}
|
|
221
|
+
if (driver === "mysql") {
|
|
222
|
+
const rows = await connection.query(`SELECT index_name, column_name, non_unique
|
|
223
|
+
FROM information_schema.statistics
|
|
224
|
+
WHERE table_schema = DATABASE() AND table_name = ?
|
|
225
|
+
ORDER BY index_name, seq_in_index`, [table]);
|
|
226
|
+
return this.groupIndexRows(rows, "index_name", "column_name", (row) => Number(row.non_unique) === 0);
|
|
227
|
+
}
|
|
228
|
+
const rows = await connection.query(`SELECT
|
|
229
|
+
i.relname AS index_name,
|
|
230
|
+
a.attname AS column_name,
|
|
231
|
+
ix.indisunique AS is_unique,
|
|
232
|
+
ix.indisprimary AS is_primary,
|
|
233
|
+
k.ordinality
|
|
234
|
+
FROM pg_class t
|
|
235
|
+
JOIN pg_namespace n ON n.oid = t.relnamespace
|
|
236
|
+
JOIN pg_index ix ON t.oid = ix.indrelid
|
|
237
|
+
JOIN pg_class i ON i.oid = ix.indexrelid
|
|
238
|
+
JOIN unnest(ix.indkey) WITH ORDINALITY AS k(attnum, ordinality) ON true
|
|
239
|
+
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = k.attnum
|
|
240
|
+
WHERE n.nspname = $1 AND t.relname = $2
|
|
241
|
+
ORDER BY i.relname, k.ordinality`, [schema, table]);
|
|
242
|
+
return this.groupIndexRows(rows, "index_name", "column_name", (row) => !!row.is_unique, (row) => !!row.is_primary);
|
|
243
|
+
}
|
|
244
|
+
static async hasIndex(table, indexOrColumns) {
|
|
245
|
+
const expectedColumns = Array.isArray(indexOrColumns) ? indexOrColumns : undefined;
|
|
246
|
+
const indexes = await this.getIndexes(table);
|
|
247
|
+
if (!expectedColumns) {
|
|
248
|
+
return indexes.some((index) => index.name === indexOrColumns);
|
|
249
|
+
}
|
|
250
|
+
return indexes.some((index) => (index.columns.length === expectedColumns.length &&
|
|
251
|
+
index.columns.every((column, indexPosition) => column === expectedColumns[indexPosition])));
|
|
252
|
+
}
|
|
253
|
+
static async getForeignKeys(table) {
|
|
254
|
+
const connection = this.getConnection();
|
|
255
|
+
const driver = connection.getDriverName();
|
|
256
|
+
const grammar = this.getGrammar();
|
|
257
|
+
const schema = connection.getSchema() || "public";
|
|
258
|
+
if (driver === "sqlite") {
|
|
259
|
+
const rows = await connection.query(`PRAGMA foreign_key_list(${grammar.wrap(table)})`);
|
|
260
|
+
const grouped = new Map();
|
|
261
|
+
for (const row of rows) {
|
|
262
|
+
const key = String(row.id);
|
|
263
|
+
const fk = grouped.get(key) || {
|
|
264
|
+
name: undefined,
|
|
265
|
+
columns: [],
|
|
266
|
+
references: [],
|
|
267
|
+
onTable: String(row.table),
|
|
268
|
+
onDelete: row.on_delete ? String(row.on_delete).toLowerCase() : undefined,
|
|
269
|
+
onUpdate: row.on_update ? String(row.on_update).toLowerCase() : undefined,
|
|
270
|
+
};
|
|
271
|
+
fk.columns.push(String(row.from));
|
|
272
|
+
fk.references.push(String(row.to));
|
|
273
|
+
grouped.set(key, fk);
|
|
274
|
+
}
|
|
275
|
+
return [...grouped.values()];
|
|
276
|
+
}
|
|
277
|
+
if (driver === "mysql") {
|
|
278
|
+
const rows = await connection.query(`SELECT
|
|
279
|
+
k.constraint_name,
|
|
280
|
+
k.column_name,
|
|
281
|
+
k.referenced_table_name,
|
|
282
|
+
k.referenced_column_name,
|
|
283
|
+
rc.delete_rule,
|
|
284
|
+
rc.update_rule,
|
|
285
|
+
k.ordinal_position
|
|
286
|
+
FROM information_schema.key_column_usage k
|
|
287
|
+
JOIN information_schema.referential_constraints rc
|
|
288
|
+
ON rc.constraint_schema = k.constraint_schema
|
|
289
|
+
AND rc.constraint_name = k.constraint_name
|
|
290
|
+
WHERE k.table_schema = DATABASE()
|
|
291
|
+
AND k.table_name = ?
|
|
292
|
+
AND k.referenced_table_name IS NOT NULL
|
|
293
|
+
ORDER BY k.constraint_name, k.ordinal_position`, [table]);
|
|
294
|
+
return this.groupForeignKeyRows(rows, "constraint_name", "column_name", "referenced_table_name", "referenced_column_name", "delete_rule", "update_rule");
|
|
295
|
+
}
|
|
296
|
+
const rows = await connection.query(`SELECT
|
|
297
|
+
tc.constraint_name,
|
|
298
|
+
kcu.column_name,
|
|
299
|
+
ccu.table_name AS referenced_table_name,
|
|
300
|
+
ccu.column_name AS referenced_column_name,
|
|
301
|
+
rc.delete_rule,
|
|
302
|
+
rc.update_rule,
|
|
303
|
+
kcu.ordinal_position
|
|
304
|
+
FROM information_schema.table_constraints tc
|
|
305
|
+
JOIN information_schema.key_column_usage kcu
|
|
306
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
307
|
+
AND tc.table_schema = kcu.table_schema
|
|
308
|
+
JOIN information_schema.constraint_column_usage ccu
|
|
309
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
310
|
+
AND ccu.table_schema = tc.table_schema
|
|
311
|
+
JOIN information_schema.referential_constraints rc
|
|
312
|
+
ON rc.constraint_name = tc.constraint_name
|
|
313
|
+
AND rc.constraint_schema = tc.table_schema
|
|
314
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
315
|
+
AND tc.table_schema = $1
|
|
316
|
+
AND tc.table_name = $2
|
|
317
|
+
ORDER BY tc.constraint_name, kcu.ordinal_position`, [schema, table]);
|
|
318
|
+
return this.groupForeignKeyRows(rows, "constraint_name", "column_name", "referenced_table_name", "referenced_column_name", "delete_rule", "update_rule");
|
|
319
|
+
}
|
|
320
|
+
static async hasForeignKey(table, keyOrColumns) {
|
|
321
|
+
const expectedColumns = Array.isArray(keyOrColumns) ? keyOrColumns : undefined;
|
|
322
|
+
const foreignKeys = await this.getForeignKeys(table);
|
|
323
|
+
if (!expectedColumns) {
|
|
324
|
+
return foreignKeys.some((fk) => fk.name === keyOrColumns);
|
|
325
|
+
}
|
|
326
|
+
return foreignKeys.some((fk) => (fk.columns.length === expectedColumns.length &&
|
|
327
|
+
fk.columns.every((column, indexPosition) => column === expectedColumns[indexPosition])));
|
|
328
|
+
}
|
|
329
|
+
static groupIndexRows(rows, nameKey, columnKey, unique, primary = () => false) {
|
|
330
|
+
const grouped = new Map();
|
|
331
|
+
for (const row of rows) {
|
|
332
|
+
const name = String(row[nameKey]);
|
|
333
|
+
const index = grouped.get(name) || { name, columns: [], unique: unique(row), primary: primary(row) };
|
|
334
|
+
index.columns.push(String(row[columnKey]));
|
|
335
|
+
index.unique = index.unique || unique(row);
|
|
336
|
+
index.primary = index.primary || primary(row);
|
|
337
|
+
grouped.set(name, index);
|
|
338
|
+
}
|
|
339
|
+
return [...grouped.values()];
|
|
340
|
+
}
|
|
341
|
+
static groupForeignKeyRows(rows, nameKey, columnKey, tableKey, referenceKey, deleteKey, updateKey) {
|
|
342
|
+
const grouped = new Map();
|
|
343
|
+
for (const row of rows) {
|
|
344
|
+
const name = String(row[nameKey]);
|
|
345
|
+
const fk = grouped.get(name) || {
|
|
346
|
+
name,
|
|
347
|
+
columns: [],
|
|
348
|
+
references: [],
|
|
349
|
+
onTable: String(row[tableKey]),
|
|
350
|
+
onDelete: row[deleteKey] ? String(row[deleteKey]).toLowerCase() : undefined,
|
|
351
|
+
onUpdate: row[updateKey] ? String(row[updateKey]).toLowerCase() : undefined,
|
|
352
|
+
};
|
|
353
|
+
fk.columns.push(String(row[columnKey]));
|
|
354
|
+
fk.references.push(String(row[referenceKey]));
|
|
355
|
+
grouped.set(name, fk);
|
|
356
|
+
}
|
|
357
|
+
return [...grouped.values()];
|
|
358
|
+
}
|
|
201
359
|
static async getColumn(table, column) {
|
|
202
360
|
const connection = this.getConnection();
|
|
203
361
|
const driver = connection.getDriverName();
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { ModelAttributeInput, ModelConstructor } from "../model/Model.js";
|
|
2
|
+
import { Model } from "../model/Model.js";
|
|
3
|
+
export type FactoryDefinition<T extends Model> = (sequence: number) => ModelAttributeInput<T>;
|
|
4
|
+
export type FactoryState<T extends Model> = ModelAttributeInput<T> | ((attributes: ModelAttributeInput<T>, sequence: number) => ModelAttributeInput<T>);
|
|
5
|
+
export declare class Factory<T extends Model> {
|
|
6
|
+
private model;
|
|
7
|
+
private definition;
|
|
8
|
+
private amount;
|
|
9
|
+
private states;
|
|
10
|
+
constructor(model: ModelConstructor<T>, definition: FactoryDefinition<T>);
|
|
11
|
+
count(amount: number): Factory<T>;
|
|
12
|
+
state(state: FactoryState<T>): Factory<T>;
|
|
13
|
+
make(overrides?: ModelAttributeInput<T>): T | T[];
|
|
14
|
+
create(overrides?: ModelAttributeInput<T>): Promise<T | T[]>;
|
|
15
|
+
raw(overrides?: ModelAttributeInput<T>): ModelAttributeInput<T> | ModelAttributeInput<T>[];
|
|
16
|
+
private attributesFor;
|
|
17
|
+
private clone;
|
|
18
|
+
}
|
|
19
|
+
export declare function factory<T extends Model>(model: ModelConstructor<T>, definition: FactoryDefinition<T>): Factory<T>;
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
export class Factory {
|
|
2
|
+
model;
|
|
3
|
+
definition;
|
|
4
|
+
amount = 1;
|
|
5
|
+
states = [];
|
|
6
|
+
constructor(model, definition) {
|
|
7
|
+
this.model = model;
|
|
8
|
+
this.definition = definition;
|
|
9
|
+
}
|
|
10
|
+
count(amount) {
|
|
11
|
+
const next = this.clone();
|
|
12
|
+
next.amount = Math.max(0, amount);
|
|
13
|
+
return next;
|
|
14
|
+
}
|
|
15
|
+
state(state) {
|
|
16
|
+
const next = this.clone();
|
|
17
|
+
next.states = [...next.states, state];
|
|
18
|
+
return next;
|
|
19
|
+
}
|
|
20
|
+
make(overrides = {}) {
|
|
21
|
+
const models = Array.from({ length: this.amount }, (_, index) => {
|
|
22
|
+
const attributes = this.attributesFor(index + 1, overrides);
|
|
23
|
+
return new this.model(attributes);
|
|
24
|
+
});
|
|
25
|
+
return this.amount === 1 ? models[0] : models;
|
|
26
|
+
}
|
|
27
|
+
async create(overrides = {}) {
|
|
28
|
+
const records = Array.from({ length: this.amount }, (_, index) => this.attributesFor(index + 1, overrides));
|
|
29
|
+
const models = [];
|
|
30
|
+
for (const attributes of records) {
|
|
31
|
+
models.push(await this.model.create(attributes));
|
|
32
|
+
}
|
|
33
|
+
return this.amount === 1 ? models[0] : models;
|
|
34
|
+
}
|
|
35
|
+
raw(overrides = {}) {
|
|
36
|
+
const records = Array.from({ length: this.amount }, (_, index) => this.attributesFor(index + 1, overrides));
|
|
37
|
+
return this.amount === 1 ? records[0] : records;
|
|
38
|
+
}
|
|
39
|
+
attributesFor(sequence, overrides) {
|
|
40
|
+
let attributes = { ...this.definition(sequence) };
|
|
41
|
+
for (const state of this.states) {
|
|
42
|
+
const next = typeof state === "function" ? state(attributes, sequence) : state;
|
|
43
|
+
attributes = { ...attributes, ...next };
|
|
44
|
+
}
|
|
45
|
+
return { ...attributes, ...overrides };
|
|
46
|
+
}
|
|
47
|
+
clone() {
|
|
48
|
+
const next = new Factory(this.model, this.definition);
|
|
49
|
+
next.amount = this.amount;
|
|
50
|
+
next.states = [...this.states];
|
|
51
|
+
return next;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
export function factory(model, definition) {
|
|
55
|
+
return new Factory(model, definition);
|
|
56
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Connection } from "../connection/Connection.js";
|
|
2
|
+
export declare abstract class Seeder {
|
|
3
|
+
protected connection: Connection;
|
|
4
|
+
constructor(connection?: Connection);
|
|
5
|
+
abstract run(): Promise<void> | void;
|
|
6
|
+
protected call(seeders: (Seeder | (new (connection?: Connection) => Seeder))[]): Promise<void>;
|
|
7
|
+
}
|
|
8
|
+
export declare class SeederRunner {
|
|
9
|
+
private connection;
|
|
10
|
+
constructor(connection?: Connection);
|
|
11
|
+
run(seeders: (Seeder | (new (connection?: Connection) => Seeder))[]): Promise<void>;
|
|
12
|
+
runPaths(paths: string | string[]): Promise<void>;
|
|
13
|
+
runFile(file: string): Promise<void>;
|
|
14
|
+
runTarget(target: string, searchPaths?: string | string[]): Promise<void>;
|
|
15
|
+
private getSeederFiles;
|
|
16
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { existsSync } from "fs";
|
|
2
|
+
import { readdir, stat } from "fs/promises";
|
|
3
|
+
import { basename, extname, resolve } from "path";
|
|
4
|
+
import { pathToFileURL } from "url";
|
|
5
|
+
import { Schema } from "../schema/Schema.js";
|
|
6
|
+
import { normalizePathList, toPosixPath } from "../utils.js";
|
|
7
|
+
export class Seeder {
|
|
8
|
+
connection;
|
|
9
|
+
constructor(connection = Schema.getConnection()) {
|
|
10
|
+
this.connection = connection;
|
|
11
|
+
}
|
|
12
|
+
async call(seeders) {
|
|
13
|
+
for (const seeder of seeders) {
|
|
14
|
+
const instance = typeof seeder === "function" ? new seeder(this.connection) : seeder;
|
|
15
|
+
await instance.run();
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export class SeederRunner {
|
|
20
|
+
connection;
|
|
21
|
+
constructor(connection = Schema.getConnection()) {
|
|
22
|
+
this.connection = connection;
|
|
23
|
+
}
|
|
24
|
+
async run(seeders) {
|
|
25
|
+
for (const seeder of seeders) {
|
|
26
|
+
const instance = typeof seeder === "function" ? new seeder(this.connection) : seeder;
|
|
27
|
+
await instance.run();
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
async runPaths(paths) {
|
|
31
|
+
const files = await this.getSeederFiles(paths);
|
|
32
|
+
for (const file of files) {
|
|
33
|
+
await this.runFile(file);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async runFile(file) {
|
|
37
|
+
const resolved = resolve(file);
|
|
38
|
+
const module = await import(pathToFileURL(resolved).href);
|
|
39
|
+
const SeederClass = module.default || Object.values(module)[0];
|
|
40
|
+
if (!SeederClass) {
|
|
41
|
+
throw new Error(`Seeder ${file} does not export a class.`);
|
|
42
|
+
}
|
|
43
|
+
await this.run([SeederClass]);
|
|
44
|
+
}
|
|
45
|
+
async runTarget(target, searchPaths = "./database/seeders") {
|
|
46
|
+
const resolved = resolve(target);
|
|
47
|
+
if (existsSync(resolved) && (await stat(resolved)).isFile()) {
|
|
48
|
+
await this.runFile(resolved);
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
const files = await this.getSeederFiles(searchPaths);
|
|
52
|
+
const normalizedTarget = target.replace(/\.(ts|js|mts|mjs|cts|cjs)$/i, "");
|
|
53
|
+
const match = files.find((file) => {
|
|
54
|
+
const name = basename(file, extname(file));
|
|
55
|
+
return name === normalizedTarget || file.endsWith(target) || file.endsWith(`${target}.ts`) || file.endsWith(`${target}.js`);
|
|
56
|
+
});
|
|
57
|
+
if (!match) {
|
|
58
|
+
throw new Error(`Seeder "${target}" could not be found in ${normalizePathList(searchPaths).join(", ")}.`);
|
|
59
|
+
}
|
|
60
|
+
await this.runFile(match);
|
|
61
|
+
}
|
|
62
|
+
async getSeederFiles(paths) {
|
|
63
|
+
const files = [];
|
|
64
|
+
for (const path of normalizePathList(paths)) {
|
|
65
|
+
const root = resolve(path);
|
|
66
|
+
if (!existsSync(root))
|
|
67
|
+
continue;
|
|
68
|
+
for (const entry of await readdir(root, { withFileTypes: true })) {
|
|
69
|
+
if (!entry.isFile())
|
|
70
|
+
continue;
|
|
71
|
+
if (entry.name.endsWith(".d.ts") || entry.name.endsWith(".test.ts") || entry.name.endsWith(".spec.ts"))
|
|
72
|
+
continue;
|
|
73
|
+
if (![".ts", ".js", ".mts", ".mjs", ".cts", ".cjs"].includes(extname(entry.name)))
|
|
74
|
+
continue;
|
|
75
|
+
files.push(toPosixPath(resolve(root, entry.name)));
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
return files.sort((a, b) => basename(a).localeCompare(basename(b)) || a.localeCompare(b));
|
|
79
|
+
}
|
|
80
|
+
}
|
package/package.json
CHANGED