@lobomfz/db 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +66 -59
- package/package.json +1 -1
- package/src/database.ts +74 -35
- package/src/dialect/connection.ts +12 -12
- package/src/dialect/driver.ts +1 -3
- package/src/generated.ts +3 -1
- package/src/index.ts +1 -0
- package/src/migration/diff.ts +193 -0
- package/src/migration/execute.ts +93 -0
- package/src/migration/introspect.ts +140 -0
- package/src/migration/types.ts +72 -0
- package/src/plugin.ts +121 -17
- package/src/types.ts +6 -0
package/README.md
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
# @lobomfz/db
|
|
2
2
|
|
|
3
|
-
SQLite database with Arktype schemas and typed Kysely client.
|
|
3
|
+
SQLite database with Arktype schemas and typed Kysely client for Bun.
|
|
4
4
|
|
|
5
5
|
## Install
|
|
6
6
|
|
|
7
7
|
```bash
|
|
8
|
-
bun add @lobomfz/db
|
|
8
|
+
bun add @lobomfz/db arktype kysely
|
|
9
9
|
```
|
|
10
10
|
|
|
11
11
|
## Usage
|
|
@@ -15,87 +15,94 @@ import { Database, generated, type } from "@lobomfz/db";
|
|
|
15
15
|
|
|
16
16
|
const db = new Database({
|
|
17
17
|
path: "data.db",
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
18
|
+
schema: {
|
|
19
|
+
tables: {
|
|
20
|
+
users: type({
|
|
21
|
+
id: generated("autoincrement"),
|
|
22
|
+
name: "string",
|
|
23
|
+
email: type("string").configure({ unique: true }),
|
|
24
|
+
"bio?": "string", // optional → nullable in SQLite
|
|
25
|
+
active: type("boolean").default(true),
|
|
26
|
+
created_at: generated("now"), // defaults to current time
|
|
27
|
+
}),
|
|
28
|
+
posts: type({
|
|
29
|
+
id: generated("autoincrement"),
|
|
30
|
+
user_id: type("number.integer").configure({ references: "users.id", onDelete: "cascade" }),
|
|
31
|
+
title: "string",
|
|
32
|
+
published_at: "Date", // native Date support
|
|
33
|
+
tags: "string[]", // JSON columns just work
|
|
34
|
+
metadata: type({ source: "string", "priority?": "number" }), // validated on write by default
|
|
35
|
+
status: type.enumerated("draft", "published").default("draft"),
|
|
36
|
+
}),
|
|
37
|
+
},
|
|
38
|
+
indexes: {
|
|
39
|
+
posts: [{ columns: ["user_id", "status"] }, { columns: ["title"], unique: true }],
|
|
40
|
+
},
|
|
33
41
|
},
|
|
34
|
-
|
|
35
|
-
|
|
42
|
+
pragmas: {
|
|
43
|
+
journal_mode: "wal",
|
|
44
|
+
synchronous: "normal",
|
|
36
45
|
},
|
|
37
46
|
});
|
|
38
47
|
|
|
39
|
-
// Fully typed Kysely client
|
|
48
|
+
// Fully typed Kysely client — generated/default fields are optional on insert
|
|
40
49
|
await db.kysely.insertInto("users").values({ name: "John", email: "john@example.com" }).execute();
|
|
41
50
|
|
|
42
51
|
const users = await db.kysely.selectFrom("users").selectAll().execute();
|
|
52
|
+
// users[0].active → true
|
|
53
|
+
// users[0].created_at → Date
|
|
43
54
|
```
|
|
44
55
|
|
|
45
|
-
|
|
56
|
+
Booleans, dates, objects, arrays — everything round-trips as the type you declared. The schema is the source of truth for table creation, TypeScript types, and runtime coercion.
|
|
46
57
|
|
|
47
|
-
|
|
48
|
-
- Full TypeScript inference (insert vs select types)
|
|
49
|
-
- JSON columns with validation
|
|
50
|
-
- Foreign keys, unique constraints, defaults
|
|
51
|
-
- Composite indexes
|
|
52
|
-
|
|
53
|
-
## Generated Fields
|
|
54
|
-
|
|
55
|
-
Use `generated()` for SQL-generated values:
|
|
58
|
+
## API
|
|
56
59
|
|
|
57
60
|
```typescript
|
|
58
|
-
generated("autoincrement"); //
|
|
59
|
-
generated("now"); //
|
|
61
|
+
generated("autoincrement"); // auto-incrementing primary key
|
|
62
|
+
generated("now"); // defaults to current timestamp, returned as Date
|
|
63
|
+
type("string").default("pending"); // SQL DEFAULT
|
|
64
|
+
type("string").configure({ unique: true }); // UNIQUE
|
|
65
|
+
type("number.integer").configure({ references: "users.id", onDelete: "cascade" }); // FK
|
|
60
66
|
```
|
|
61
67
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
Use Arktype's `.default()` for JS defaults (also creates SQL DEFAULT):
|
|
68
|
+
JSON columns are validated against the schema on write by default. To also validate on read, or to disable write validation:
|
|
65
69
|
|
|
66
70
|
```typescript
|
|
67
|
-
|
|
68
|
-
|
|
71
|
+
new Database({
|
|
72
|
+
// ...
|
|
73
|
+
validation: { onRead: true }, // default: { onRead: false, onWrite: true }
|
|
74
|
+
});
|
|
69
75
|
```
|
|
70
76
|
|
|
71
|
-
##
|
|
77
|
+
## Migrations
|
|
72
78
|
|
|
73
|
-
|
|
74
|
-
type("string").configure({ unique: true });
|
|
75
|
-
type("number.integer").configure({ references: "users.id", onDelete: "cascade" });
|
|
76
|
-
```
|
|
79
|
+
Schema changes are applied automatically on startup. Every time `new Database(...)` runs, the library compares your Arktype schema against the actual SQLite database and applies the minimum set of operations to bring them in sync. No migration files, no version tracking — the database itself is the source of truth.
|
|
77
80
|
|
|
78
|
-
|
|
81
|
+
### What's supported
|
|
79
82
|
|
|
80
|
-
|
|
83
|
+
| Change | Strategy |
|
|
84
|
+
|---|---|
|
|
85
|
+
| New table | `CREATE TABLE` |
|
|
86
|
+
| Removed table | `DROP TABLE` |
|
|
87
|
+
| New nullable column | `ALTER TABLE ADD COLUMN` |
|
|
88
|
+
| New NOT NULL column with DEFAULT | `ALTER TABLE ADD COLUMN` |
|
|
89
|
+
| Removed column | Table rebuild |
|
|
90
|
+
| Type change | Table rebuild |
|
|
91
|
+
| Nullability change | Table rebuild |
|
|
92
|
+
| DEFAULT change | Table rebuild |
|
|
93
|
+
| UNIQUE added/removed | Table rebuild |
|
|
94
|
+
| FK added/removed/changed | Table rebuild |
|
|
95
|
+
| Index added | `CREATE INDEX` |
|
|
96
|
+
| Index removed | `DROP INDEX` |
|
|
81
97
|
|
|
82
|
-
|
|
83
|
-
const db = new Database({
|
|
84
|
-
tables: { ... },
|
|
85
|
-
indexes: {
|
|
86
|
-
posts: [
|
|
87
|
-
{ columns: ["user_id", "category_id"], unique: true },
|
|
88
|
-
{ columns: ["created_at"] },
|
|
89
|
-
],
|
|
90
|
-
},
|
|
91
|
-
});
|
|
92
|
-
```
|
|
98
|
+
Table rebuilds follow SQLite's [recommended procedure](https://www.sqlite.org/lang_altertable.html#otheralter): create a new table with the target schema, copy data from the old table, drop the old table, rename the new one. Foreign keys are disabled during rebuilds and validated via `PRAGMA foreign_key_check` before committing.
|
|
93
99
|
|
|
94
|
-
|
|
100
|
+
### Safety rules
|
|
95
101
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
102
|
+
- Adding a NOT NULL column without DEFAULT to a table **with data** throws an error
|
|
103
|
+
- Changing a nullable column to NOT NULL without DEFAULT throws if any row has NULL in that column
|
|
104
|
+
- Nullable-to-NOT-NULL with DEFAULT uses `COALESCE` to fill existing NULLs
|
|
105
|
+
- Column renames are treated as drop + add (data in the old column is not preserved)
|
|
99
106
|
|
|
100
107
|
## License
|
|
101
108
|
|
package/package.json
CHANGED
package/src/database.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { Kysely } from "kysely";
|
|
|
3
3
|
import { BunSqliteDialect } from "./dialect/dialect";
|
|
4
4
|
import type { Type } from "arktype";
|
|
5
5
|
import type { GeneratedPreset } from "./generated";
|
|
6
|
+
import type { DbFieldMeta } from "./env";
|
|
6
7
|
import { DeserializePlugin, type ColumnCoercion, type ColumnsMap } from "./plugin";
|
|
7
8
|
import type {
|
|
8
9
|
DatabaseOptions,
|
|
@@ -11,6 +12,9 @@ import type {
|
|
|
11
12
|
TablesFromSchemas,
|
|
12
13
|
DatabasePragmas,
|
|
13
14
|
} from "./types";
|
|
15
|
+
import { Introspector } from "./migration/introspect";
|
|
16
|
+
import { Differ, type DesiredTable } from "./migration/diff";
|
|
17
|
+
import { Executor } from "./migration/execute";
|
|
14
18
|
|
|
15
19
|
type ArkBranch = {
|
|
16
20
|
domain?: string;
|
|
@@ -26,7 +30,7 @@ type StructureProp = {
|
|
|
26
30
|
value: Type & {
|
|
27
31
|
branches: ArkBranch[];
|
|
28
32
|
proto?: unknown;
|
|
29
|
-
meta:
|
|
33
|
+
meta: DbFieldMeta & { _generated?: GeneratedPreset };
|
|
30
34
|
};
|
|
31
35
|
inner: { default?: unknown };
|
|
32
36
|
};
|
|
@@ -41,7 +45,7 @@ type Prop = {
|
|
|
41
45
|
isDate?: boolean;
|
|
42
46
|
isJson?: boolean;
|
|
43
47
|
jsonSchema?: Type;
|
|
44
|
-
meta?:
|
|
48
|
+
meta?: DbFieldMeta;
|
|
45
49
|
generated?: GeneratedPreset;
|
|
46
50
|
defaultValue?: unknown;
|
|
47
51
|
};
|
|
@@ -69,11 +73,16 @@ export class Database<T extends SchemaRecord> {
|
|
|
69
73
|
|
|
70
74
|
this.applyPragmas();
|
|
71
75
|
|
|
72
|
-
this.
|
|
76
|
+
this.migrate();
|
|
77
|
+
|
|
78
|
+
const validation = {
|
|
79
|
+
onRead: options.validation?.onRead ?? false,
|
|
80
|
+
onWrite: options.validation?.onWrite ?? true,
|
|
81
|
+
};
|
|
73
82
|
|
|
74
83
|
this.kysely = new Kysely<TablesFromSchemas<T>>({
|
|
75
84
|
dialect: new BunSqliteDialect({ database: this.sqlite }),
|
|
76
|
-
plugins: [new DeserializePlugin(this.columns)],
|
|
85
|
+
plugins: [new DeserializePlugin(this.columns, validation)],
|
|
77
86
|
});
|
|
78
87
|
}
|
|
79
88
|
|
|
@@ -95,10 +104,10 @@ export class Database<T extends SchemaRecord> {
|
|
|
95
104
|
}
|
|
96
105
|
}
|
|
97
106
|
|
|
98
|
-
private normalizeProp(structureProp: StructureProp, parentSchema: Type)
|
|
107
|
+
private normalizeProp(structureProp: StructureProp, parentSchema: Type) {
|
|
99
108
|
const { key, value: v, inner } = structureProp;
|
|
100
|
-
const kind = structureProp.required ? "required" : "optional";
|
|
101
|
-
const generated = v.meta._generated
|
|
109
|
+
const kind: Prop["kind"] = structureProp.required ? "required" : "optional";
|
|
110
|
+
const generated = v.meta._generated;
|
|
102
111
|
const defaultValue = inner.default;
|
|
103
112
|
|
|
104
113
|
const nonNull = v.branches.filter((b) => b.unit !== null);
|
|
@@ -155,7 +164,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
155
164
|
return "TEXT";
|
|
156
165
|
}
|
|
157
166
|
|
|
158
|
-
private columnConstraint(prop: Prop)
|
|
167
|
+
private columnConstraint(prop: Prop) {
|
|
159
168
|
if (prop.generated === "autoincrement") {
|
|
160
169
|
return "PRIMARY KEY AUTOINCREMENT";
|
|
161
170
|
}
|
|
@@ -171,7 +180,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
171
180
|
return null;
|
|
172
181
|
}
|
|
173
182
|
|
|
174
|
-
private defaultClause(prop: Prop)
|
|
183
|
+
private defaultClause(prop: Prop) {
|
|
175
184
|
if (prop.generated === "now") {
|
|
176
185
|
return "DEFAULT (unixepoch())";
|
|
177
186
|
}
|
|
@@ -192,7 +201,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
192
201
|
return `DEFAULT ${prop.defaultValue}`;
|
|
193
202
|
}
|
|
194
203
|
|
|
195
|
-
|
|
204
|
+
throw new Error(`Unsupported default value type: ${typeof prop.defaultValue}`);
|
|
196
205
|
}
|
|
197
206
|
|
|
198
207
|
private columnDef(prop: Prop) {
|
|
@@ -208,7 +217,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
208
217
|
}
|
|
209
218
|
|
|
210
219
|
private foreignKey(prop: Prop) {
|
|
211
|
-
const ref = prop.meta?.references
|
|
220
|
+
const ref = prop.meta?.references;
|
|
212
221
|
|
|
213
222
|
if (!ref) {
|
|
214
223
|
return null;
|
|
@@ -218,7 +227,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
218
227
|
|
|
219
228
|
let fk = `FOREIGN KEY ("${prop.key}") REFERENCES "${table}"("${column}")`;
|
|
220
229
|
|
|
221
|
-
const onDelete = prop.meta?.onDelete
|
|
230
|
+
const onDelete = prop.meta?.onDelete;
|
|
222
231
|
|
|
223
232
|
if (onDelete) {
|
|
224
233
|
fk += ` ON DELETE ${onDelete.toUpperCase()}`;
|
|
@@ -227,6 +236,23 @@ export class Database<T extends SchemaRecord> {
|
|
|
227
236
|
return fk;
|
|
228
237
|
}
|
|
229
238
|
|
|
239
|
+
private addColumnDef(prop: Prop) {
|
|
240
|
+
let def = this.columnDef(prop);
|
|
241
|
+
|
|
242
|
+
const ref = prop.meta?.references;
|
|
243
|
+
|
|
244
|
+
if (ref) {
|
|
245
|
+
const [table, column] = ref.split(".");
|
|
246
|
+
def += ` REFERENCES "${table}"("${column}")`;
|
|
247
|
+
|
|
248
|
+
if (prop.meta?.onDelete) {
|
|
249
|
+
def += ` ON DELETE ${prop.meta.onDelete.toUpperCase()}`;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
return def;
|
|
254
|
+
}
|
|
255
|
+
|
|
230
256
|
private parseSchemaProps(schema: Type) {
|
|
231
257
|
const structureProps = (schema as any).structure?.props as StructureProp[] | undefined;
|
|
232
258
|
|
|
@@ -278,15 +304,47 @@ export class Database<T extends SchemaRecord> {
|
|
|
278
304
|
return `CREATE TABLE IF NOT EXISTS "${tableName}" (${columns.concat(fks).join(", ")})`;
|
|
279
305
|
}
|
|
280
306
|
|
|
281
|
-
private
|
|
307
|
+
private migrate() {
|
|
308
|
+
const desiredTables: DesiredTable[] = [];
|
|
309
|
+
const schemaIndexes = this.options.schema.indexes;
|
|
310
|
+
|
|
282
311
|
for (const [name, schema] of Object.entries(this.options.schema.tables)) {
|
|
283
312
|
const props = this.parseSchemaProps(schema);
|
|
284
313
|
|
|
285
314
|
this.registerColumns(name, props);
|
|
286
|
-
|
|
315
|
+
|
|
316
|
+
const columns = props.map((prop) => {
|
|
317
|
+
const isNotNull = this.columnConstraint(prop) === "NOT NULL";
|
|
318
|
+
const defaultClause = this.defaultClause(prop);
|
|
319
|
+
const hasLiteralDefault = prop.generated !== "now" && defaultClause !== null;
|
|
320
|
+
|
|
321
|
+
return {
|
|
322
|
+
name: prop.key,
|
|
323
|
+
addable: !isNotNull || hasLiteralDefault,
|
|
324
|
+
columnDef: this.addColumnDef(prop),
|
|
325
|
+
type: this.sqlType(prop),
|
|
326
|
+
notnull: isNotNull,
|
|
327
|
+
defaultValue: defaultClause
|
|
328
|
+
? defaultClause.replace("DEFAULT ", "").replace(/^\((.+)\)$/, "$1")
|
|
329
|
+
: null,
|
|
330
|
+
unique: !!prop.meta?.unique,
|
|
331
|
+
references: prop.meta?.references ?? null,
|
|
332
|
+
onDelete: prop.meta?.onDelete?.toUpperCase() ?? null,
|
|
333
|
+
};
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
const indexes = (schemaIndexes?.[name] ?? []).map((indexDef) => ({
|
|
337
|
+
name: this.generateIndexName(name, indexDef.columns, indexDef.unique ?? false),
|
|
338
|
+
sql: this.generateCreateIndexSQL(name, indexDef),
|
|
339
|
+
}));
|
|
340
|
+
|
|
341
|
+
desiredTables.push({ name, sql: this.generateCreateTableSQL(name, props), columns, indexes });
|
|
287
342
|
}
|
|
288
343
|
|
|
289
|
-
this.
|
|
344
|
+
const existing = new Introspector(this.sqlite).introspect();
|
|
345
|
+
const ops = new Differ(desiredTables, existing).diff();
|
|
346
|
+
|
|
347
|
+
new Executor(this.sqlite, ops).execute();
|
|
290
348
|
}
|
|
291
349
|
|
|
292
350
|
private generateIndexName(tableName: string, columns: string[], unique: boolean) {
|
|
@@ -300,25 +358,7 @@ export class Database<T extends SchemaRecord> {
|
|
|
300
358
|
const unique = indexDef.unique ? "UNIQUE " : "";
|
|
301
359
|
const columns = indexDef.columns.map((c) => `"${c}"`).join(", ");
|
|
302
360
|
|
|
303
|
-
return `CREATE ${unique}INDEX
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
private createIndexes() {
|
|
307
|
-
const indexes = this.options.schema.indexes;
|
|
308
|
-
|
|
309
|
-
if (!indexes) {
|
|
310
|
-
return;
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
for (const [tableName, tableIndexes] of Object.entries(indexes)) {
|
|
314
|
-
if (!tableIndexes) {
|
|
315
|
-
continue;
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
for (const indexDef of tableIndexes) {
|
|
319
|
-
this.sqlite.run(this.generateCreateIndexSQL(tableName, indexDef));
|
|
320
|
-
}
|
|
321
|
-
}
|
|
361
|
+
return `CREATE ${unique}INDEX "${indexName}" ON "${tableName}" (${columns})`;
|
|
322
362
|
}
|
|
323
363
|
|
|
324
364
|
reset(table?: keyof T & string): void {
|
|
@@ -328,5 +368,4 @@ export class Database<T extends SchemaRecord> {
|
|
|
328
368
|
this.sqlite.run(`DELETE FROM "${t}"`);
|
|
329
369
|
}
|
|
330
370
|
}
|
|
331
|
-
|
|
332
371
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Database } from "bun:sqlite";
|
|
1
|
+
import type { Database } from "bun:sqlite";
|
|
2
2
|
import type { CompiledQuery, DatabaseConnection, QueryResult } from "kysely";
|
|
3
3
|
import { serializeParam } from "./serialize";
|
|
4
4
|
|
|
@@ -9,30 +9,30 @@ export class BunSqliteConnection implements DatabaseConnection {
|
|
|
9
9
|
this.#db = db;
|
|
10
10
|
}
|
|
11
11
|
|
|
12
|
-
executeQuery<O>(
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
const stmt = this.#db.query(sql);
|
|
12
|
+
async executeQuery<O>(compiled: CompiledQuery): Promise<QueryResult<O>> {
|
|
13
|
+
const serializedParams = compiled.parameters.map(serializeParam);
|
|
14
|
+
|
|
15
|
+
const stmt = this.#db.query(compiled.sql);
|
|
16
16
|
|
|
17
17
|
if (stmt.columnNames.length > 0) {
|
|
18
|
-
return
|
|
18
|
+
return {
|
|
19
19
|
rows: stmt.all(serializedParams as any) as O[],
|
|
20
|
-
}
|
|
20
|
+
};
|
|
21
21
|
}
|
|
22
22
|
|
|
23
23
|
const results = stmt.run(serializedParams as any);
|
|
24
24
|
|
|
25
|
-
return
|
|
25
|
+
return {
|
|
26
26
|
insertId: BigInt(results.lastInsertRowid),
|
|
27
27
|
numAffectedRows: BigInt(results.changes),
|
|
28
28
|
rows: [],
|
|
29
|
-
}
|
|
29
|
+
};
|
|
30
30
|
}
|
|
31
31
|
|
|
32
32
|
async *streamQuery<R>(compiledQuery: CompiledQuery): AsyncIterableIterator<QueryResult<R>> {
|
|
33
|
-
const
|
|
34
|
-
|
|
35
|
-
const stmt = this.#db.prepare(sql);
|
|
33
|
+
const serializedParams = compiledQuery.parameters.map(serializeParam);
|
|
34
|
+
|
|
35
|
+
const stmt = this.#db.prepare(compiledQuery.sql);
|
|
36
36
|
|
|
37
37
|
for await (const row of stmt.iterate(serializedParams as any)) {
|
|
38
38
|
yield { rows: [row as R] };
|
package/src/dialect/driver.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Database } from "bun:sqlite";
|
|
1
|
+
import type { Database } from "bun:sqlite";
|
|
2
2
|
import { CompiledQuery, type DatabaseConnection, type Driver } from "kysely";
|
|
3
3
|
import type { BunSqliteDialectConfig } from "./config";
|
|
4
4
|
import { BunSqliteConnection } from "./connection";
|
|
@@ -38,12 +38,10 @@ export class BunSqliteDriver implements Driver {
|
|
|
38
38
|
await connection.executeQuery(CompiledQuery.raw("rollback"));
|
|
39
39
|
}
|
|
40
40
|
|
|
41
|
-
// oxlint-disable-next-line require-await
|
|
42
41
|
async releaseConnection(): Promise<void> {
|
|
43
42
|
this.#connectionMutex.unlock();
|
|
44
43
|
}
|
|
45
44
|
|
|
46
|
-
// oxlint-disable-next-line require-await
|
|
47
45
|
async destroy(): Promise<void> {
|
|
48
46
|
this.#db?.close();
|
|
49
47
|
}
|
package/src/generated.ts
CHANGED
|
@@ -13,6 +13,8 @@ const generatedTypes = {
|
|
|
13
13
|
.default(() => new Date(0)),
|
|
14
14
|
};
|
|
15
15
|
|
|
16
|
-
export function generated<P extends GeneratedPreset>(
|
|
16
|
+
export function generated<P extends GeneratedPreset>(
|
|
17
|
+
preset: P,
|
|
18
|
+
): ReturnType<(typeof generatedTypes)[P]> {
|
|
17
19
|
return generatedTypes[preset]() as ReturnType<(typeof generatedTypes)[P]>;
|
|
18
20
|
}
|
package/src/index.ts
CHANGED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
import type { ColumnSchema, IntrospectedTable, ColumnCopy, MigrationOp } from "./types";
|
|
2
|
+
|
|
3
|
+
export interface DesiredColumn extends ColumnSchema {
|
|
4
|
+
addable: boolean;
|
|
5
|
+
columnDef: string;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export type DesiredIndex = {
|
|
9
|
+
name: string;
|
|
10
|
+
sql: string;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
export type DesiredTable = {
|
|
14
|
+
name: string;
|
|
15
|
+
sql: string;
|
|
16
|
+
columns: DesiredColumn[];
|
|
17
|
+
indexes?: DesiredIndex[];
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
export class Differ {
|
|
21
|
+
private ops: MigrationOp[] = [];
|
|
22
|
+
private desiredNames: Set<string>;
|
|
23
|
+
private rebuiltTables = new Set<string>();
|
|
24
|
+
|
|
25
|
+
constructor(
|
|
26
|
+
private desired: DesiredTable[],
|
|
27
|
+
private existing: Map<string, IntrospectedTable>,
|
|
28
|
+
) {
|
|
29
|
+
this.desiredNames = new Set(desired.map((t) => t.name));
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
diff() {
|
|
33
|
+
this.diffTables();
|
|
34
|
+
this.dropOrphans();
|
|
35
|
+
this.diffIndexes();
|
|
36
|
+
return this.ops;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
private diffTables() {
|
|
40
|
+
for (const table of this.desired) {
|
|
41
|
+
const existingTable = this.existing.get(table.name);
|
|
42
|
+
|
|
43
|
+
if (!existingTable) {
|
|
44
|
+
this.ops.push({ type: "CreateTable", table: table.name, sql: table.sql });
|
|
45
|
+
this.rebuiltTables.add(table.name);
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
this.diffColumns(table, existingTable);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
private diffColumns(table: DesiredTable, existingTable: IntrospectedTable) {
|
|
54
|
+
const desiredNames = new Set(table.columns.map((c) => c.name));
|
|
55
|
+
const hasRemovedColumns = [...existingTable.columns.keys()].some(
|
|
56
|
+
(name) => !desiredNames.has(name),
|
|
57
|
+
);
|
|
58
|
+
const hasChangedColumns = table.columns.some((col) => {
|
|
59
|
+
const existing = existingTable.columns.get(col.name);
|
|
60
|
+
|
|
61
|
+
if (!existing) {
|
|
62
|
+
return false;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return this.columnChanged(col, existing);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
if (hasRemovedColumns || hasChangedColumns) {
|
|
69
|
+
this.buildRebuild(table, existingTable);
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
this.buildAddColumns(table, existingTable);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private buildRebuild(table: DesiredTable, existingTable: IntrospectedTable) {
|
|
77
|
+
const columnCopies: ColumnCopy[] = [];
|
|
78
|
+
|
|
79
|
+
for (const col of table.columns) {
|
|
80
|
+
const existing = existingTable.columns.get(col.name);
|
|
81
|
+
|
|
82
|
+
if (!existing) {
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (col.type !== existing.type) {
|
|
87
|
+
if (col.notnull && col.defaultValue === null && existingTable.hasData) {
|
|
88
|
+
throw new Error(
|
|
89
|
+
`Cannot change type of NOT NULL column "${col.name}" without DEFAULT in table "${table.name}" with existing data`,
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if (!existing.notnull && col.notnull && col.defaultValue === null && existing.hasNulls) {
|
|
97
|
+
throw new Error(
|
|
98
|
+
`Cannot make column "${col.name}" NOT NULL without DEFAULT in table "${table.name}" with existing data`,
|
|
99
|
+
);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (!existing.notnull && col.notnull && col.defaultValue !== null) {
|
|
103
|
+
columnCopies.push({ name: col.name, expr: `COALESCE("${col.name}", ${col.defaultValue})` });
|
|
104
|
+
} else {
|
|
105
|
+
columnCopies.push({ name: col.name, expr: `"${col.name}"` });
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
this.ops.push({ type: "RebuildTable", table: table.name, createSql: table.sql, columnCopies });
|
|
110
|
+
this.rebuiltTables.add(table.name);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
private buildAddColumns(table: DesiredTable, existingTable: IntrospectedTable) {
|
|
114
|
+
const newColumns = table.columns.filter((c) => !existingTable.columns.has(c.name));
|
|
115
|
+
|
|
116
|
+
if (newColumns.length === 0) {
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const nonAddable = newColumns.filter((c) => !c.addable);
|
|
121
|
+
|
|
122
|
+
if (nonAddable.length > 0) {
|
|
123
|
+
if (existingTable.hasData) {
|
|
124
|
+
throw new Error(
|
|
125
|
+
`Cannot add NOT NULL column "${nonAddable[0]!.name}" without DEFAULT to table "${table.name}" with existing data`,
|
|
126
|
+
);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
this.ops.push({ type: "DropTable", table: table.name });
|
|
130
|
+
this.ops.push({ type: "CreateTable", table: table.name, sql: table.sql });
|
|
131
|
+
this.rebuiltTables.add(table.name);
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
for (const col of newColumns) {
|
|
136
|
+
this.ops.push({ type: "AddColumn", table: table.name, columnDef: col.columnDef });
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
private columnChanged(desired: ColumnSchema, existing: ColumnSchema) {
|
|
141
|
+
return (
|
|
142
|
+
desired.type !== existing.type ||
|
|
143
|
+
desired.notnull !== existing.notnull ||
|
|
144
|
+
desired.defaultValue !== existing.defaultValue ||
|
|
145
|
+
desired.unique !== existing.unique ||
|
|
146
|
+
desired.references !== existing.references ||
|
|
147
|
+
desired.onDelete !== existing.onDelete
|
|
148
|
+
);
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
private dropOrphans() {
|
|
152
|
+
for (const [name] of this.existing) {
|
|
153
|
+
if (!this.desiredNames.has(name)) {
|
|
154
|
+
this.ops.push({ type: "DropTable", table: name });
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
private diffIndexes() {
|
|
160
|
+
for (const table of this.desired) {
|
|
161
|
+
const tableIndexes = table.indexes ?? [];
|
|
162
|
+
|
|
163
|
+
if (this.rebuiltTables.has(table.name)) {
|
|
164
|
+
for (const idx of tableIndexes) {
|
|
165
|
+
this.ops.push({ type: "CreateIndex", sql: idx.sql });
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const existingTable = this.existing.get(table.name);
|
|
172
|
+
|
|
173
|
+
if (!existingTable) {
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
const existingNames = new Set(existingTable.indexes.map((i) => i.name));
|
|
178
|
+
const desiredNames = new Set(tableIndexes.map((i) => i.name));
|
|
179
|
+
|
|
180
|
+
for (const idx of tableIndexes) {
|
|
181
|
+
if (!existingNames.has(idx.name)) {
|
|
182
|
+
this.ops.push({ type: "CreateIndex", sql: idx.sql });
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
for (const idx of existingTable.indexes) {
|
|
187
|
+
if (!desiredNames.has(idx.name)) {
|
|
188
|
+
this.ops.push({ type: "DropIndex", index: idx.name });
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite";
|
|
2
|
+
import type { MigrationOp, RebuildTableOp } from "./types";
|
|
3
|
+
|
|
4
|
+
export class Executor {
|
|
5
|
+
constructor(
|
|
6
|
+
private db: Database,
|
|
7
|
+
private ops: MigrationOp[],
|
|
8
|
+
) {}
|
|
9
|
+
|
|
10
|
+
execute() {
|
|
11
|
+
if (this.ops.length === 0) {
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const hasRebuild = this.ops.some((op) => op.type === "RebuildTable");
|
|
16
|
+
|
|
17
|
+
let restoreFk = false;
|
|
18
|
+
|
|
19
|
+
if (hasRebuild) {
|
|
20
|
+
const { foreign_keys } = this.db.prepare("PRAGMA foreign_keys").get() as {
|
|
21
|
+
foreign_keys: number;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
if (foreign_keys === 1) {
|
|
25
|
+
this.db.run("PRAGMA foreign_keys = OFF");
|
|
26
|
+
restoreFk = true;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
this.db.transaction(() => {
|
|
32
|
+
for (const op of this.ops) {
|
|
33
|
+
this.executeOp(op);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (restoreFk) {
|
|
37
|
+
const violations = this.db.prepare("PRAGMA foreign_key_check").all();
|
|
38
|
+
|
|
39
|
+
if (violations.length > 0) {
|
|
40
|
+
throw new Error("Foreign key check failed after rebuild");
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
})();
|
|
44
|
+
} finally {
|
|
45
|
+
if (restoreFk) {
|
|
46
|
+
this.db.run("PRAGMA foreign_keys = ON");
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private executeOp(op: MigrationOp) {
|
|
52
|
+
switch (op.type) {
|
|
53
|
+
case "CreateTable": {
|
|
54
|
+
return this.db.run(op.sql);
|
|
55
|
+
}
|
|
56
|
+
case "DropTable": {
|
|
57
|
+
return this.db.run(`DROP TABLE "${op.table}"`);
|
|
58
|
+
}
|
|
59
|
+
case "AddColumn": {
|
|
60
|
+
return this.db.run(`ALTER TABLE "${op.table}" ADD COLUMN ${op.columnDef}`);
|
|
61
|
+
}
|
|
62
|
+
case "RebuildTable": {
|
|
63
|
+
return this.rebuildTable(op);
|
|
64
|
+
}
|
|
65
|
+
case "CreateIndex": {
|
|
66
|
+
return this.db.run(op.sql);
|
|
67
|
+
}
|
|
68
|
+
case "DropIndex": {
|
|
69
|
+
return this.db.run(`DROP INDEX "${op.index}"`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
private rebuildTable(op: RebuildTableOp) {
|
|
75
|
+
const tempName = `__new_${op.table}`;
|
|
76
|
+
const tempSql = op.createSql.replace(
|
|
77
|
+
`CREATE TABLE IF NOT EXISTS "${op.table}"`,
|
|
78
|
+
`CREATE TABLE "${tempName}"`,
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
this.db.run(tempSql);
|
|
82
|
+
|
|
83
|
+
if (op.columnCopies.length > 0) {
|
|
84
|
+
const destCols = op.columnCopies.map((c) => `"${c.name}"`).join(", ");
|
|
85
|
+
const srcExprs = op.columnCopies.map((c) => c.expr).join(", ");
|
|
86
|
+
|
|
87
|
+
this.db.run(`INSERT INTO "${tempName}" (${destCols}) SELECT ${srcExprs} FROM "${op.table}"`);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
this.db.run(`DROP TABLE "${op.table}"`);
|
|
91
|
+
this.db.run(`ALTER TABLE "${tempName}" RENAME TO "${op.table}"`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite";
|
|
2
|
+
import type { IntrospectedColumn, IntrospectedIndex, IntrospectedTable } from "./types";
|
|
3
|
+
|
|
4
|
+
type TableListRow = {
|
|
5
|
+
name: string;
|
|
6
|
+
type: string;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
type TableXInfoRow = {
|
|
10
|
+
name: string;
|
|
11
|
+
type: string;
|
|
12
|
+
notnull: number;
|
|
13
|
+
dflt_value: string | null;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
type IndexListRow = {
|
|
17
|
+
name: string;
|
|
18
|
+
unique: number;
|
|
19
|
+
origin: string;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
type IndexInfoRow = {
|
|
23
|
+
name: string;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
type ForeignKeyListRow = {
|
|
27
|
+
from: string;
|
|
28
|
+
table: string;
|
|
29
|
+
to: string;
|
|
30
|
+
on_delete: string;
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
export class Introspector {
|
|
34
|
+
constructor(private db: Database) {}
|
|
35
|
+
|
|
36
|
+
introspect() {
|
|
37
|
+
const tables = new Map<string, IntrospectedTable>();
|
|
38
|
+
const tableRows = this.db.prepare("PRAGMA table_list").all() as TableListRow[];
|
|
39
|
+
|
|
40
|
+
for (const row of tableRows) {
|
|
41
|
+
if (row.type !== "table" || row.name.startsWith("sqlite_")) {
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const indexRows = this.db.prepare(`PRAGMA index_list("${row.name}")`).all() as IndexListRow[];
|
|
46
|
+
const uniqueCols = this.uniqueColumns(indexRows);
|
|
47
|
+
const fkMap = this.foreignKeys(row.name);
|
|
48
|
+
const columns = this.columns(row.name, uniqueCols, fkMap);
|
|
49
|
+
const indexes = this.indexes(indexRows);
|
|
50
|
+
const hasData = this.db.prepare(`SELECT 1 FROM "${row.name}" LIMIT 1`).get() !== null;
|
|
51
|
+
|
|
52
|
+
tables.set(row.name, { columns, indexes, hasData });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return tables;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
private uniqueColumns(indexRows: IndexListRow[]) {
|
|
59
|
+
const unique = new Set<string>();
|
|
60
|
+
|
|
61
|
+
for (const idx of indexRows) {
|
|
62
|
+
if (idx.unique !== 1 || idx.origin !== "u") {
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const idxCols = this.db.prepare(`PRAGMA index_info("${idx.name}")`).all() as IndexInfoRow[];
|
|
67
|
+
|
|
68
|
+
if (idxCols.length === 1) {
|
|
69
|
+
unique.add(idxCols[0]!.name);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return unique;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
private indexes(indexRows: IndexListRow[]) {
|
|
77
|
+
const indexes: IntrospectedIndex[] = [];
|
|
78
|
+
|
|
79
|
+
for (const idx of indexRows) {
|
|
80
|
+
if (idx.origin !== "c") {
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const idxCols = this.db.prepare(`PRAGMA index_info("${idx.name}")`).all() as IndexInfoRow[];
|
|
85
|
+
|
|
86
|
+
indexes.push({
|
|
87
|
+
name: idx.name,
|
|
88
|
+
columns: idxCols.map((c) => c.name),
|
|
89
|
+
unique: idx.unique === 1,
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
return indexes;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
private foreignKeys(table: string) {
|
|
97
|
+
const fkRows = this.db
|
|
98
|
+
.prepare(`PRAGMA foreign_key_list("${table}")`)
|
|
99
|
+
.all() as ForeignKeyListRow[];
|
|
100
|
+
const fkMap = new Map<string, { references: string; onDelete: string | null }>();
|
|
101
|
+
|
|
102
|
+
for (const fk of fkRows) {
|
|
103
|
+
fkMap.set(fk.from, {
|
|
104
|
+
references: `${fk.table}.${fk.to}`,
|
|
105
|
+
onDelete: fk.on_delete === "NO ACTION" ? null : fk.on_delete,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return fkMap;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
private columns(
|
|
113
|
+
table: string,
|
|
114
|
+
uniqueCols: Set<string>,
|
|
115
|
+
fkMap: Map<string, { references: string; onDelete: string | null }>,
|
|
116
|
+
) {
|
|
117
|
+
const colRows = this.db.prepare(`PRAGMA table_xinfo("${table}")`).all() as TableXInfoRow[];
|
|
118
|
+
const columns = new Map<string, IntrospectedColumn>();
|
|
119
|
+
|
|
120
|
+
for (const col of colRows) {
|
|
121
|
+
const fk = fkMap.get(col.name);
|
|
122
|
+
const isNotnull = col.notnull === 1;
|
|
123
|
+
|
|
124
|
+
columns.set(col.name, {
|
|
125
|
+
name: col.name,
|
|
126
|
+
type: col.type,
|
|
127
|
+
notnull: isNotnull,
|
|
128
|
+
defaultValue: col.dflt_value,
|
|
129
|
+
unique: uniqueCols.has(col.name),
|
|
130
|
+
references: fk?.references ?? null,
|
|
131
|
+
onDelete: fk?.onDelete ?? null,
|
|
132
|
+
hasNulls:
|
|
133
|
+
!isNotnull &&
|
|
134
|
+
this.db.prepare(`SELECT 1 FROM "${table}" WHERE "${col.name}" IS NULL LIMIT 1`).get() !== null,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
return columns;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
export type CreateTableOp = {
|
|
2
|
+
type: "CreateTable";
|
|
3
|
+
table: string;
|
|
4
|
+
sql: string;
|
|
5
|
+
};
|
|
6
|
+
|
|
7
|
+
export type DropTableOp = {
|
|
8
|
+
type: "DropTable";
|
|
9
|
+
table: string;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
export type AddColumnOp = {
|
|
13
|
+
type: "AddColumn";
|
|
14
|
+
table: string;
|
|
15
|
+
columnDef: string;
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
export type ColumnCopy = {
|
|
19
|
+
name: string;
|
|
20
|
+
expr: string;
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export type RebuildTableOp = {
|
|
24
|
+
type: "RebuildTable";
|
|
25
|
+
table: string;
|
|
26
|
+
createSql: string;
|
|
27
|
+
columnCopies: ColumnCopy[];
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export type CreateIndexOp = {
|
|
31
|
+
type: "CreateIndex";
|
|
32
|
+
sql: string;
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export type DropIndexOp = {
|
|
36
|
+
type: "DropIndex";
|
|
37
|
+
index: string;
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
export type MigrationOp =
|
|
41
|
+
| CreateTableOp
|
|
42
|
+
| DropTableOp
|
|
43
|
+
| AddColumnOp
|
|
44
|
+
| RebuildTableOp
|
|
45
|
+
| CreateIndexOp
|
|
46
|
+
| DropIndexOp;
|
|
47
|
+
|
|
48
|
+
export type ColumnSchema = {
|
|
49
|
+
name: string;
|
|
50
|
+
type: string;
|
|
51
|
+
notnull: boolean;
|
|
52
|
+
defaultValue: string | null;
|
|
53
|
+
unique: boolean;
|
|
54
|
+
references: string | null;
|
|
55
|
+
onDelete: string | null;
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
export interface IntrospectedColumn extends ColumnSchema {
|
|
59
|
+
hasNulls: boolean;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export type IntrospectedIndex = {
|
|
63
|
+
name: string;
|
|
64
|
+
columns: string[];
|
|
65
|
+
unique: boolean;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
export type IntrospectedTable = {
|
|
69
|
+
columns: Map<string, IntrospectedColumn>;
|
|
70
|
+
indexes: IntrospectedIndex[];
|
|
71
|
+
hasData: boolean;
|
|
72
|
+
};
|
package/src/plugin.ts
CHANGED
|
@@ -1,40 +1,71 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
type KyselyPlugin,
|
|
3
|
+
type RootOperationNode,
|
|
4
|
+
type UnknownRow,
|
|
5
|
+
type QueryId,
|
|
6
|
+
TableNode,
|
|
7
|
+
AliasNode,
|
|
8
|
+
ValuesNode,
|
|
9
|
+
ValueNode,
|
|
10
|
+
ColumnNode,
|
|
11
|
+
} from "kysely";
|
|
2
12
|
import { type } from "arktype";
|
|
3
13
|
import type { Type } from "arktype";
|
|
4
14
|
import { JsonParseError } from "./errors";
|
|
5
15
|
import { JsonValidationError } from "./validation-error";
|
|
16
|
+
import type { JsonValidation } from "./types";
|
|
6
17
|
|
|
7
18
|
export type ColumnCoercion = "boolean" | "date" | { type: "json"; schema: Type };
|
|
8
19
|
export type ColumnsMap = Map<string, Map<string, ColumnCoercion>>;
|
|
9
20
|
|
|
10
21
|
export class DeserializePlugin implements KyselyPlugin {
|
|
11
|
-
private queryNodes = new
|
|
22
|
+
private queryNodes = new WeakMap<QueryId, RootOperationNode>();
|
|
12
23
|
|
|
13
|
-
constructor(
|
|
24
|
+
constructor(
|
|
25
|
+
private columns: ColumnsMap,
|
|
26
|
+
private validation: Required<JsonValidation>,
|
|
27
|
+
) {}
|
|
14
28
|
|
|
15
29
|
transformQuery: KyselyPlugin["transformQuery"] = (args) => {
|
|
16
30
|
this.queryNodes.set(args.queryId, args.node);
|
|
17
31
|
|
|
32
|
+
if (this.validation.onWrite) {
|
|
33
|
+
this.validateWriteNode(args.node);
|
|
34
|
+
}
|
|
35
|
+
|
|
18
36
|
return args.node;
|
|
19
37
|
};
|
|
20
38
|
|
|
21
|
-
private getTableFromNode(node: RootOperationNode)
|
|
39
|
+
private getTableFromNode(node: RootOperationNode) {
|
|
22
40
|
switch (node.kind) {
|
|
23
41
|
case "InsertQueryNode":
|
|
24
|
-
return
|
|
42
|
+
return node.into?.table.identifier.name ?? null;
|
|
25
43
|
|
|
26
|
-
case "UpdateQueryNode":
|
|
27
|
-
|
|
44
|
+
case "UpdateQueryNode": {
|
|
45
|
+
if (node.table && TableNode.is(node.table)) {
|
|
46
|
+
return node.table.table.identifier.name;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
28
51
|
|
|
29
52
|
case "SelectQueryNode":
|
|
30
53
|
case "DeleteQueryNode": {
|
|
31
|
-
const fromNode =
|
|
54
|
+
const fromNode = node.from?.froms[0];
|
|
55
|
+
|
|
56
|
+
if (!fromNode) {
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (AliasNode.is(fromNode) && TableNode.is(fromNode.node)) {
|
|
61
|
+
return fromNode.node.table.identifier.name;
|
|
62
|
+
}
|
|
32
63
|
|
|
33
|
-
if (fromNode
|
|
34
|
-
return fromNode.
|
|
64
|
+
if (TableNode.is(fromNode)) {
|
|
65
|
+
return fromNode.table.identifier.name;
|
|
35
66
|
}
|
|
36
67
|
|
|
37
|
-
return
|
|
68
|
+
return null;
|
|
38
69
|
}
|
|
39
70
|
|
|
40
71
|
default:
|
|
@@ -42,6 +73,82 @@ export class DeserializePlugin implements KyselyPlugin {
|
|
|
42
73
|
}
|
|
43
74
|
}
|
|
44
75
|
|
|
76
|
+
private validateJsonValue(table: string, col: string, value: unknown, schema: Type) {
|
|
77
|
+
if (value === null || value === undefined) {
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const result = schema(value);
|
|
82
|
+
|
|
83
|
+
if (result instanceof type.errors) {
|
|
84
|
+
throw new JsonValidationError(table, col, result.summary);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
private validateWriteNode(node: RootOperationNode) {
|
|
89
|
+
if (node.kind !== "InsertQueryNode" && node.kind !== "UpdateQueryNode") {
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const table = this.getTableFromNode(node);
|
|
94
|
+
|
|
95
|
+
if (!table) {
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const cols = this.columns.get(table);
|
|
100
|
+
|
|
101
|
+
if (!cols) {
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
for (const [col, value] of this.writeValues(node)) {
|
|
106
|
+
const coercion = cols.get(col);
|
|
107
|
+
|
|
108
|
+
if (!coercion || typeof coercion === "string") {
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
this.validateJsonValue(table, col, value, coercion.schema);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
private *writeValues(node: RootOperationNode) {
|
|
117
|
+
if (node.kind === "InsertQueryNode") {
|
|
118
|
+
const columns = node.columns?.map((c) => c.column.name);
|
|
119
|
+
|
|
120
|
+
if (!columns || !node.values || !ValuesNode.is(node.values)) {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
for (const valueList of node.values.values) {
|
|
125
|
+
for (let i = 0; i < columns.length; i++) {
|
|
126
|
+
const col = columns[i]!;
|
|
127
|
+
|
|
128
|
+
if (valueList.kind === "PrimitiveValueListNode") {
|
|
129
|
+
yield [col, valueList.values[i]] as [string, unknown];
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const raw = valueList.values[i];
|
|
134
|
+
yield [col, raw && ValueNode.is(raw) ? raw.value : raw] as [string, unknown];
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (node.kind !== "UpdateQueryNode" || !node.updates) {
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for (const update of node.updates) {
|
|
146
|
+
if (ColumnNode.is(update.column) && ValueNode.is(update.value)) {
|
|
147
|
+
yield [update.column.column.name, update.value.value] as [string, unknown];
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
45
152
|
private coerceRow(table: string, row: UnknownRow, cols: Map<string, ColumnCoercion>) {
|
|
46
153
|
for (const [col, coercion] of cols) {
|
|
47
154
|
if (!(col in row)) {
|
|
@@ -78,17 +185,14 @@ export class DeserializePlugin implements KyselyPlugin {
|
|
|
78
185
|
throw new JsonParseError(table, col, value, e);
|
|
79
186
|
}
|
|
80
187
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
if (validated instanceof type.errors) {
|
|
84
|
-
throw new JsonValidationError(table, col, validated.summary);
|
|
188
|
+
if (this.validation.onRead) {
|
|
189
|
+
this.validateJsonValue(table, col, parsed, coercion.schema);
|
|
85
190
|
}
|
|
86
191
|
|
|
87
|
-
row[col] =
|
|
192
|
+
row[col] = parsed;
|
|
88
193
|
}
|
|
89
194
|
}
|
|
90
195
|
|
|
91
|
-
// oxlint-disable-next-line require-await
|
|
92
196
|
transformResult: KyselyPlugin["transformResult"] = async (args) => {
|
|
93
197
|
const node = this.queryNodes.get(args.queryId);
|
|
94
198
|
|
package/src/types.ts
CHANGED
|
@@ -58,8 +58,14 @@ export type DatabaseSchema<T extends SchemaRecord> = {
|
|
|
58
58
|
indexes?: IndexesConfig<T>;
|
|
59
59
|
};
|
|
60
60
|
|
|
61
|
+
export type JsonValidation = {
|
|
62
|
+
onRead?: boolean;
|
|
63
|
+
onWrite?: boolean;
|
|
64
|
+
};
|
|
65
|
+
|
|
61
66
|
export type DatabaseOptions<T extends SchemaRecord> = {
|
|
62
67
|
path: string;
|
|
63
68
|
schema: DatabaseSchema<T>;
|
|
64
69
|
pragmas?: DatabasePragmas;
|
|
70
|
+
validation?: JsonValidation;
|
|
65
71
|
};
|