spooder 4.6.2 → 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1119 -342
- package/bun.lock +9 -5
- package/package.json +2 -2
- package/src/api.ts +976 -531
- package/src/api_db.ts +670 -0
- package/src/cli.ts +93 -19
- package/src/config.ts +13 -8
- package/src/dispatch.ts +136 -11
- package/src/template/directory_index.html +303 -0
- package/src/github.ts +0 -121
- package/src/utils.ts +0 -57
package/src/api_db.ts
ADDED
|
@@ -0,0 +1,670 @@
|
|
|
1
|
+
import { Database } from 'bun:sqlite';
|
|
2
|
+
import { log_create_logger, log_list, log_error, caution } from './api';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import fs from 'node:fs/promises';
|
|
5
|
+
import type { RowDataPacket, ResultSetHeader } from 'mysql2';
|
|
6
|
+
|
|
7
|
+
const db_log = log_create_logger('db', 'spooder');
|
|
8
|
+
|
|
9
|
+
// region utility
|
|
10
|
+
export function db_cast_set<T extends string>(set: string | null): Set<T> {
|
|
11
|
+
return new Set(set?.split(',') as T[] ?? []);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function db_serialize_set<T extends string>(set: Set<T> | null): string {
|
|
15
|
+
return set?.size ? Array.from(set).join(',') : '';
|
|
16
|
+
}
|
|
17
|
+
// endregion
|
|
18
|
+
|
|
19
|
+
// region schema
|
|
20
|
+
interface DependencyTarget {
|
|
21
|
+
file_name: string;
|
|
22
|
+
deps: string[];
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function order_schema_dep_tree<T extends DependencyTarget>(deps: T[]): T[] {
|
|
26
|
+
const visited = new Set<string>();
|
|
27
|
+
const temp = new Set<string>();
|
|
28
|
+
const result: T[] = [];
|
|
29
|
+
const map = new Map(deps.map(d => [d.file_name, d]));
|
|
30
|
+
|
|
31
|
+
function visit(node: T): void {
|
|
32
|
+
if (temp.has(node.file_name))
|
|
33
|
+
throw new Error(`Cyclic dependency {${node.file_name}}`);
|
|
34
|
+
|
|
35
|
+
if (visited.has(node.file_name))
|
|
36
|
+
return;
|
|
37
|
+
|
|
38
|
+
temp.add(node.file_name);
|
|
39
|
+
|
|
40
|
+
for (const dep of node.deps) {
|
|
41
|
+
const dep_node = map.get(dep);
|
|
42
|
+
if (!dep_node)
|
|
43
|
+
throw new Error(`Missing dependency {${dep}}`);
|
|
44
|
+
|
|
45
|
+
visit(dep_node as T);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
temp.delete(node.file_name);
|
|
49
|
+
visited.add(node.file_name);
|
|
50
|
+
result.push(node);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
for (const dep of deps)
|
|
54
|
+
if (!visited.has(dep.file_name))
|
|
55
|
+
visit(dep);
|
|
56
|
+
|
|
57
|
+
return result;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
type Row_DBSchema = { db_schema_table_name: string, db_schema_version: number };
|
|
61
|
+
type SchemaVersionMap = Map<string, number>;
|
|
62
|
+
|
|
63
|
+
async function db_load_schema(schema_dir: string, schema_versions: SchemaVersionMap) {
|
|
64
|
+
const schema_out = [];
|
|
65
|
+
const schema_files = await fs.readdir(schema_dir, { recursive: true, withFileTypes: true });
|
|
66
|
+
|
|
67
|
+
for (const schema_file_ent of schema_files) {
|
|
68
|
+
if (schema_file_ent.isDirectory())
|
|
69
|
+
continue;
|
|
70
|
+
|
|
71
|
+
const schema_file = schema_file_ent.name;
|
|
72
|
+
const schema_file_lower = schema_file.toLowerCase();
|
|
73
|
+
if (!schema_file_lower.endsWith('.sql'))
|
|
74
|
+
continue;
|
|
75
|
+
|
|
76
|
+
db_log(`parsing schema file {${schema_file_lower}}`);
|
|
77
|
+
|
|
78
|
+
const schema_name = path.basename(schema_file_lower, '.sql');
|
|
79
|
+
const schema_path = path.join(schema_file_ent.parentPath, schema_file);
|
|
80
|
+
const schema = await fs.readFile(schema_path, 'utf8');
|
|
81
|
+
|
|
82
|
+
const deps = new Array<string>();
|
|
83
|
+
|
|
84
|
+
const revisions = new Map();
|
|
85
|
+
let current_rev_id = 0;
|
|
86
|
+
let current_rev = '';
|
|
87
|
+
let current_rev_comment = '';
|
|
88
|
+
|
|
89
|
+
for (const line of schema.split(/\r?\n/)) {
|
|
90
|
+
const line_identifier = line.match(/^--\s*\[(\d+|deps)\]/);
|
|
91
|
+
if (line_identifier !== null) {
|
|
92
|
+
if (line_identifier[1] === 'deps') {
|
|
93
|
+
// Line contains schema dependencies, example: -- [deps] schema_b.sql,schema_c.sql
|
|
94
|
+
const deps_raw = line.substring(line.indexOf(']') + 1);
|
|
95
|
+
deps.push(...deps_raw.split(',').map(e => e.trim().toLowerCase()));
|
|
96
|
+
} else {
|
|
97
|
+
// New chunk definition detected, store the current chunk and start a new one.
|
|
98
|
+
if (current_rev_id > 0) {
|
|
99
|
+
revisions.set(current_rev_id, { sql: current_rev, comment: current_rev_comment });
|
|
100
|
+
current_rev = '';
|
|
101
|
+
current_rev_comment = '';
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const rev_number = parseInt(line_identifier[1]);
|
|
105
|
+
if (isNaN(rev_number) || rev_number < 1)
|
|
106
|
+
throw new Error(rev_number + ' is not a valid revision number in ' + schema_file_lower);
|
|
107
|
+
current_rev_id = rev_number;
|
|
108
|
+
|
|
109
|
+
// Extract comment from the header line (everything after the closing bracket)
|
|
110
|
+
const comment_start = line.indexOf(']') + 1;
|
|
111
|
+
current_rev_comment = line.substring(comment_start).trim();
|
|
112
|
+
}
|
|
113
|
+
} else {
|
|
114
|
+
// Append to existing revision.
|
|
115
|
+
current_rev += line + '\n';
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// There may be something left in current_chunk once we reach end of the file.
|
|
120
|
+
if (current_rev_id > 0)
|
|
121
|
+
revisions.set(current_rev_id, { sql: current_rev, comment: current_rev_comment });
|
|
122
|
+
|
|
123
|
+
if (revisions.size === 0) {
|
|
124
|
+
db_log(`{${schema_file}} contains no valid revisions`);
|
|
125
|
+
continue;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (deps.length > 0)
|
|
129
|
+
db_log(`{${schema_file}} dependencies: ${log_list(deps)}`);
|
|
130
|
+
|
|
131
|
+
const current_schema_version = schema_versions.get(schema_name) ?? 0;
|
|
132
|
+
schema_out.push({
|
|
133
|
+
revisions,
|
|
134
|
+
file_name: schema_file_lower,
|
|
135
|
+
name: schema_name,
|
|
136
|
+
current_version: current_schema_version,
|
|
137
|
+
deps,
|
|
138
|
+
chunk_keys: Array.from(revisions.keys()).filter(chunk_id => chunk_id > current_schema_version).sort((a, b) => a - b)
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return order_schema_dep_tree(schema_out);
|
|
143
|
+
}
|
|
144
|
+
// endregion
|
|
145
|
+
|
|
146
|
+
// region mysql
|
|
147
|
+
import type * as mysql_types from 'mysql2/promise';
|
|
148
|
+
let mysql: typeof mysql_types | undefined;
|
|
149
|
+
try {
|
|
150
|
+
mysql = await import('mysql2/promise') as typeof mysql_types;
|
|
151
|
+
} catch (e) {
|
|
152
|
+
// mysql2 optional dependency not installed.
|
|
153
|
+
// this dependency will be replaced once bun:sql supports mysql.
|
|
154
|
+
// db_update_schema_mysql and db_init_schema_mysql will throw.
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export async function db_update_schema_mysql(db: mysql_types.Connection, schema_dir: string, schema_table_name = 'db_schema') {
|
|
158
|
+
if (mysql === undefined)
|
|
159
|
+
throw new Error('{db_update_schema_mysql} cannot be called without optional dependency {mysql2} installed');
|
|
160
|
+
|
|
161
|
+
db_log(`updating database schema for {${db.config.database}}`);
|
|
162
|
+
|
|
163
|
+
const schema_versions = new Map();
|
|
164
|
+
|
|
165
|
+
try {
|
|
166
|
+
const [rows] = await db.query('SELECT db_schema_table_name, db_schema_version FROM ' + schema_table_name);
|
|
167
|
+
for (const row of rows as Array<Row_DBSchema>)
|
|
168
|
+
schema_versions.set(row.db_schema_table_name, row.db_schema_version);
|
|
169
|
+
} catch (e) {
|
|
170
|
+
db_log(`creating schema table {${schema_table_name}}`);
|
|
171
|
+
await db.query(`CREATE TABLE ${schema_table_name} (db_schema_table_name VARCHAR(255) PRIMARY KEY, db_schema_version INT)`);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
await db.beginTransaction();
|
|
175
|
+
|
|
176
|
+
const update_schema_query = await db.prepare(`
|
|
177
|
+
INSERT INTO ${schema_table_name} (db_schema_version, db_schema_table_name) VALUES (?, ?)
|
|
178
|
+
ON DUPLICATE KEY UPDATE db_schema_version = VALUES(db_schema_version);
|
|
179
|
+
`);
|
|
180
|
+
|
|
181
|
+
const schemas = await db_load_schema(schema_dir, schema_versions);
|
|
182
|
+
for (const schema of schemas) {
|
|
183
|
+
let newest_schema_version = schema.current_version;
|
|
184
|
+
for (const rev_id of schema.chunk_keys) {
|
|
185
|
+
const revision = schema.revisions.get(rev_id);
|
|
186
|
+
const comment_text = revision.comment ? ` "{${revision.comment}}"` : '';
|
|
187
|
+
db_log(`applying revision [{${rev_id}}]${comment_text} to {${schema.name}}`);
|
|
188
|
+
|
|
189
|
+
await db.query(revision.sql);
|
|
190
|
+
newest_schema_version = rev_id;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
if (newest_schema_version > schema.current_version) {
|
|
194
|
+
db_log(`updated table {${schema.name}} to revision {${newest_schema_version}}`);
|
|
195
|
+
await update_schema_query.execute([newest_schema_version, schema.name]);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
await db.commit();
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
type MySQLDatabaseInterface = ReturnType<typeof create_mysql_api>;
|
|
203
|
+
|
|
204
|
+
function create_mysql_api(instance: mysql_types.Connection | mysql_types.Pool, error_handler: (error: unknown, return_value: any, title: string) => any) {
|
|
205
|
+
return {
|
|
206
|
+
/**
|
|
207
|
+
* Executes a query and returns the LAST_INSERT_ID.
|
|
208
|
+
* Returns -1 if the query fails or no LAST_INSERT_ID is available.
|
|
209
|
+
*/
|
|
210
|
+
insert: async (sql: string, ...values: any) => {
|
|
211
|
+
try {
|
|
212
|
+
const [result] = await instance.query<ResultSetHeader>(sql, values);
|
|
213
|
+
return result.insertId ?? -1;
|
|
214
|
+
} catch (error) {
|
|
215
|
+
return error_handler(error, -1, 'insert failed');
|
|
216
|
+
}
|
|
217
|
+
},
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Executes an insert query using object key/value mapping.
|
|
221
|
+
* Returns the LAST_INSERT_ID or -1 if the query fails.
|
|
222
|
+
*/
|
|
223
|
+
insert_object: async (table: string, obj: Record<string, any>) => {
|
|
224
|
+
try {
|
|
225
|
+
const values = Object.values(obj);
|
|
226
|
+
let sql = 'INSERT INTO `' + table + '` (';
|
|
227
|
+
sql += Object.keys(obj).map(e => '`' + e + '`').join(', ');
|
|
228
|
+
sql += ') VALUES(' + values.map(() => '?').join(', ') + ')';
|
|
229
|
+
|
|
230
|
+
const [result] = await instance.query<ResultSetHeader>(sql, values);
|
|
231
|
+
return result.insertId ?? -1;
|
|
232
|
+
} catch (error) {
|
|
233
|
+
return error_handler(error, -1, 'insert_object failed');
|
|
234
|
+
}
|
|
235
|
+
},
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Executes a query and returns the number of affected rows.
|
|
239
|
+
* Returns -1 if the query fails.
|
|
240
|
+
*/
|
|
241
|
+
execute: async (sql: string, ...values: any) => {
|
|
242
|
+
try {
|
|
243
|
+
const [result] = await instance.query<ResultSetHeader>(sql, values);
|
|
244
|
+
return result.affectedRows;
|
|
245
|
+
} catch (error) {
|
|
246
|
+
return error_handler(error, -1, 'execute failed');
|
|
247
|
+
}
|
|
248
|
+
},
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Returns the complete query result set as an array.
|
|
252
|
+
* Returns empty array if no rows found or if query fails.
|
|
253
|
+
*/
|
|
254
|
+
get_all: async <T = RowDataPacket>(sql: string, ...values: any): Promise<T[]> => {
|
|
255
|
+
try {
|
|
256
|
+
const [rows] = await instance.execute(sql, values);
|
|
257
|
+
return rows as T[];
|
|
258
|
+
} catch (error) {
|
|
259
|
+
return error_handler(error, [], 'get_all failed');
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
|
|
263
|
+
/**
|
|
264
|
+
* Returns the first row from a query result set.
|
|
265
|
+
* Returns null if no rows found or if query fails.
|
|
266
|
+
*/
|
|
267
|
+
get_single: async <T = RowDataPacket>(sql: string, ...values: any): Promise<T | null> => {
|
|
268
|
+
try {
|
|
269
|
+
const [rows] = await instance.execute(sql, values);
|
|
270
|
+
const typed_rows = rows as T[];
|
|
271
|
+
return typed_rows[0] ?? null;
|
|
272
|
+
} catch (error) {
|
|
273
|
+
return error_handler(error, null, 'get_single failed');
|
|
274
|
+
}
|
|
275
|
+
},
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Returns the query result as a single column array.
|
|
279
|
+
* Returns empty array if no rows found or if query fails.
|
|
280
|
+
*/
|
|
281
|
+
get_column: async <T = any>(sql: string, column: string, ...values: any): Promise<T[]> => {
|
|
282
|
+
try {
|
|
283
|
+
const [rows] = await instance.execute(sql, values) as RowDataPacket[][];
|
|
284
|
+
return rows.map((e: any) => e[column]) as T[];
|
|
285
|
+
} catch (error) {
|
|
286
|
+
return error_handler(error, [], 'get_column failed');
|
|
287
|
+
}
|
|
288
|
+
},
|
|
289
|
+
|
|
290
|
+
/**
|
|
291
|
+
* Calls a stored procedure and returns the result set as an array.
|
|
292
|
+
* Returns empty array if no rows found or if query fails.
|
|
293
|
+
*/
|
|
294
|
+
call: async <T = RowDataPacket>(func_name: string, ...args: any): Promise<T[]> => {
|
|
295
|
+
try {
|
|
296
|
+
const placeholders = args.map(() => '?').join(', ');
|
|
297
|
+
const sql = `CALL ${func_name}(${placeholders})`;
|
|
298
|
+
const result = await instance.execute<RowDataPacket[][]>(sql, args);
|
|
299
|
+
return result[0][0] as T[];
|
|
300
|
+
} catch (error) {
|
|
301
|
+
return error_handler(error, [], 'call failed');
|
|
302
|
+
}
|
|
303
|
+
},
|
|
304
|
+
|
|
305
|
+
/**
|
|
306
|
+
* Returns an async iterator that yields pages of database rows.
|
|
307
|
+
* Each page contains at most `page_size` rows (default 1000).
|
|
308
|
+
*/
|
|
309
|
+
get_paged: async function* <T = RowDataPacket>(sql: string, values: any[] = [], page_size: number = 1000): AsyncGenerator<T[]> {
|
|
310
|
+
let current_offset = 0;
|
|
311
|
+
|
|
312
|
+
while (true) {
|
|
313
|
+
try {
|
|
314
|
+
const paged_sql = `${sql} LIMIT ${page_size} OFFSET ${current_offset}`;
|
|
315
|
+
|
|
316
|
+
const [rows] = await instance.execute(paged_sql, values);
|
|
317
|
+
const page_rows = rows as T[];
|
|
318
|
+
|
|
319
|
+
if (page_rows.length === 0)
|
|
320
|
+
break;
|
|
321
|
+
|
|
322
|
+
yield page_rows;
|
|
323
|
+
|
|
324
|
+
current_offset += page_size;
|
|
325
|
+
|
|
326
|
+
if (page_rows.length < page_size)
|
|
327
|
+
break;
|
|
328
|
+
} catch (error) {
|
|
329
|
+
error_handler(error, undefined, 'get_paged failed');
|
|
330
|
+
return;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
},
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Returns the value of `count` from a query.
|
|
337
|
+
* Returns 0 if query fails.
|
|
338
|
+
*/
|
|
339
|
+
count: async (sql: string, ...values: any): Promise<number> => {
|
|
340
|
+
try {
|
|
341
|
+
const [rows] = await instance.execute(sql, values);
|
|
342
|
+
const typed_rows = rows as RowDataPacket[];
|
|
343
|
+
return typed_rows[0]?.count ?? 0;
|
|
344
|
+
} catch (error) {
|
|
345
|
+
return error_handler(error, 0, 'count failed');
|
|
346
|
+
}
|
|
347
|
+
},
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Returns the total count of rows from a table.
|
|
351
|
+
* Returns 0 if query fails.
|
|
352
|
+
*/
|
|
353
|
+
count_table: async (table_name: string): Promise<number> => {
|
|
354
|
+
try {
|
|
355
|
+
const [rows] = await instance.execute('SELECT COUNT(*) AS `count` FROM `' + table_name + '`');
|
|
356
|
+
const typed_rows = rows as RowDataPacket[];
|
|
357
|
+
return typed_rows[0]?.count ?? 0;
|
|
358
|
+
} catch (error) {
|
|
359
|
+
return error_handler(error, 0, 'count_table failed');
|
|
360
|
+
}
|
|
361
|
+
},
|
|
362
|
+
|
|
363
|
+
/**
|
|
364
|
+
* Returns true if the query returns any results.
|
|
365
|
+
* Returns false if no results found or if query fails.
|
|
366
|
+
*/
|
|
367
|
+
exists: async (sql: string, ...values: any): Promise<boolean> => {
|
|
368
|
+
try {
|
|
369
|
+
const [rows] = await instance.execute(sql, values);
|
|
370
|
+
const typed_rows = rows as RowDataPacket[];
|
|
371
|
+
return typed_rows.length > 0;
|
|
372
|
+
} catch (error) {
|
|
373
|
+
return error_handler(error, false, 'exists failed');
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
export async function db_mysql(db_info: mysql_types.ConnectionOptions, pool: boolean = false, use_canary_reporting = false) {
|
|
380
|
+
if (mysql === undefined)
|
|
381
|
+
throw new Error('db_mysql cannot be called without optional dependency {mysql2} installed');
|
|
382
|
+
|
|
383
|
+
// required for parsing multiple statements from schema files
|
|
384
|
+
db_info.multipleStatements = true;
|
|
385
|
+
|
|
386
|
+
const instance = pool ? mysql.createPool(db_info) : await mysql.createConnection(db_info);
|
|
387
|
+
|
|
388
|
+
function db_handle_error(error: unknown, return_value: any, title: string) {
|
|
389
|
+
log_error(`error in {db_mysql}: ${title}`);
|
|
390
|
+
|
|
391
|
+
if (use_canary_reporting)
|
|
392
|
+
caution('db_mysql: ' + title, { error });
|
|
393
|
+
|
|
394
|
+
return return_value;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
return {
|
|
398
|
+
instance,
|
|
399
|
+
|
|
400
|
+
update_schema: async (schema_dir: string, schema_table_name: string = 'db_schema') => {
|
|
401
|
+
await db_update_schema_mysql(instance, schema_dir, schema_table_name);
|
|
402
|
+
},
|
|
403
|
+
|
|
404
|
+
transaction: async (scope: (transaction: MySQLDatabaseInterface) => void | Promise<void>) => {
|
|
405
|
+
let connection: mysql_types.Connection = instance;
|
|
406
|
+
|
|
407
|
+
if (pool)
|
|
408
|
+
connection = await (instance as mysql_types.Pool).getConnection();
|
|
409
|
+
|
|
410
|
+
await connection.beginTransaction();
|
|
411
|
+
|
|
412
|
+
try {
|
|
413
|
+
const transaction_api = create_mysql_api(connection, db_handle_error);
|
|
414
|
+
await scope(transaction_api);
|
|
415
|
+
await connection.commit();
|
|
416
|
+
return true;
|
|
417
|
+
} catch (error) {
|
|
418
|
+
await connection.rollback();
|
|
419
|
+
return db_handle_error(error, false, 'transaction failed');
|
|
420
|
+
} finally {
|
|
421
|
+
if (pool)
|
|
422
|
+
(connection as mysql_types.PoolConnection).release();
|
|
423
|
+
}
|
|
424
|
+
},
|
|
425
|
+
|
|
426
|
+
...create_mysql_api(instance, db_handle_error)
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
// endregion
|
|
430
|
+
|
|
431
|
+
// region sqlite
|
|
432
|
+
export async function db_update_schema_sqlite(db: Database, schema_dir: string, schema_table_name = 'db_schema'): Promise<void> {
|
|
433
|
+
db_log(`updating database schema for {${db.filename}}`);
|
|
434
|
+
|
|
435
|
+
const schema_versions = new Map();
|
|
436
|
+
|
|
437
|
+
try {
|
|
438
|
+
const query = db.query('SELECT db_schema_table_name, db_schema_version FROM ' + schema_table_name);
|
|
439
|
+
for (const row of query.all() as Array<Row_DBSchema>)
|
|
440
|
+
schema_versions.set(row.db_schema_table_name, row.db_schema_version);
|
|
441
|
+
} catch (e) {
|
|
442
|
+
db_log(`creating schema table {${schema_table_name}}`);
|
|
443
|
+
db.run(`CREATE TABLE ${schema_table_name} (db_schema_table_name TEXT PRIMARY KEY, db_schema_version INTEGER)`);
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
return new Promise(resolve => {
|
|
447
|
+
db.transaction(async () => {
|
|
448
|
+
const update_schema_query = db.prepare(`
|
|
449
|
+
INSERT INTO ${schema_table_name} (db_schema_version, db_schema_table_name) VALUES (?1, ?2)
|
|
450
|
+
ON CONFLICT(db_schema_table_name) DO UPDATE SET db_schema_version = EXCLUDED.db_schema_version
|
|
451
|
+
`);
|
|
452
|
+
|
|
453
|
+
const schemas = await db_load_schema(schema_dir, schema_versions);
|
|
454
|
+
|
|
455
|
+
for (const schema of schemas) {
|
|
456
|
+
let newest_schema_version = schema.current_version;
|
|
457
|
+
for (const rev_id of schema.chunk_keys) {
|
|
458
|
+
const revision = schema.revisions.get(rev_id);
|
|
459
|
+
const comment_text = revision.comment ? ` "{${revision.comment}}"` : '';
|
|
460
|
+
db_log(`applying revision [{${rev_id}}]${comment_text} to {${schema.name}}`);
|
|
461
|
+
db.transaction(() => db.run(revision.sql))();
|
|
462
|
+
newest_schema_version = rev_id;
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
if (newest_schema_version > schema.current_version) {
|
|
466
|
+
db_log(`updated table {${schema.name}} to revision {${newest_schema_version}}`);
|
|
467
|
+
update_schema_query.run(newest_schema_version, schema.name);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
resolve();
|
|
472
|
+
})();
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
type SQLiteDatabaseInterface = ReturnType<typeof create_sqlite_api>;
|
|
477
|
+
|
|
478
|
+
function create_sqlite_api(instance: Database, error_handler: (error: unknown, return_value: any, title: string) => any) {
|
|
479
|
+
return {
|
|
480
|
+
/**
|
|
481
|
+
* Executes a query and returns the lastInsertRowid.
|
|
482
|
+
* Returns -1 if the query fails or no lastInsertRowid is available.
|
|
483
|
+
*/
|
|
484
|
+
insert: (sql: string, ...values: any) => {
|
|
485
|
+
try {
|
|
486
|
+
const result = instance.run(sql, ...values);
|
|
487
|
+
return Number(result.lastInsertRowid) || -1;
|
|
488
|
+
} catch (error) {
|
|
489
|
+
return error_handler(error, -1, 'insert failed');
|
|
490
|
+
}
|
|
491
|
+
},
|
|
492
|
+
|
|
493
|
+
/**
|
|
494
|
+
* Executes an insert query using object key/value mapping.
|
|
495
|
+
* Returns the lastInsertRowid or -1 if the query fails.
|
|
496
|
+
*/
|
|
497
|
+
insert_object: (table: string, obj: Record<string, any>) => {
|
|
498
|
+
try {
|
|
499
|
+
const values = Object.values(obj);
|
|
500
|
+
let sql = 'INSERT INTO `' + table + '` (';
|
|
501
|
+
sql += Object.keys(obj).map(e => '`' + e + '`').join(', ');
|
|
502
|
+
sql += ') VALUES(' + values.map(() => '?').join(', ') + ')';
|
|
503
|
+
|
|
504
|
+
const result = instance.run(sql, ...values);
|
|
505
|
+
return Number(result.lastInsertRowid) || -1;
|
|
506
|
+
} catch (error) {
|
|
507
|
+
return error_handler(error, -1, 'insert_object failed');
|
|
508
|
+
}
|
|
509
|
+
},
|
|
510
|
+
|
|
511
|
+
/**
|
|
512
|
+
* Executes a query and returns the number of affected rows.
|
|
513
|
+
* Returns -1 if the query fails.
|
|
514
|
+
*/
|
|
515
|
+
execute: (sql: string, ...values: any) => {
|
|
516
|
+
try {
|
|
517
|
+
const result = instance.run(sql, ...values);
|
|
518
|
+
return result.changes || 0;
|
|
519
|
+
} catch (error) {
|
|
520
|
+
return error_handler(error, -1, 'execute failed');
|
|
521
|
+
}
|
|
522
|
+
},
|
|
523
|
+
|
|
524
|
+
/**
|
|
525
|
+
* Returns the complete query result set as an array.
|
|
526
|
+
* Returns empty array if no rows found or if query fails.
|
|
527
|
+
*/
|
|
528
|
+
get_all: <T = any>(sql: string, ...values: any): T[] => {
|
|
529
|
+
try {
|
|
530
|
+
const rows = instance.query(sql).all(...values);
|
|
531
|
+
return rows as T[];
|
|
532
|
+
} catch (error) {
|
|
533
|
+
return error_handler(error, [], 'get_all failed');
|
|
534
|
+
}
|
|
535
|
+
},
|
|
536
|
+
|
|
537
|
+
/**
|
|
538
|
+
* Returns the first row from a query result set.
|
|
539
|
+
* Returns null if no rows found or if query fails.
|
|
540
|
+
*/
|
|
541
|
+
get_single: <T = any>(sql: string, ...values: any): T | null => {
|
|
542
|
+
try {
|
|
543
|
+
const row = instance.query(sql).get(...values);
|
|
544
|
+
return (row as T) ?? null;
|
|
545
|
+
} catch (error) {
|
|
546
|
+
return error_handler(error, null, 'get_single failed');
|
|
547
|
+
}
|
|
548
|
+
},
|
|
549
|
+
|
|
550
|
+
/**
|
|
551
|
+
* Returns the query result as a single column array.
|
|
552
|
+
* Returns empty array if no rows found or if query fails.
|
|
553
|
+
*/
|
|
554
|
+
get_column: <T = any>(sql: string, column: string, ...values: any): T[] => {
|
|
555
|
+
try {
|
|
556
|
+
const rows = instance.query(sql).all(...values) as any[];
|
|
557
|
+
return rows.map((row: any) => row[column]) as T[];
|
|
558
|
+
} catch (error) {
|
|
559
|
+
return error_handler(error, [], 'get_column failed');
|
|
560
|
+
}
|
|
561
|
+
},
|
|
562
|
+
|
|
563
|
+
/**
|
|
564
|
+
* Returns an async iterator that yields pages of database rows.
|
|
565
|
+
* Each page contains at most `page_size` rows (default 1000).
|
|
566
|
+
*/
|
|
567
|
+
get_paged: async function* <T = any>(sql: string, values: any[] = [], page_size: number = 1000): AsyncGenerator<T[]> {
|
|
568
|
+
let current_offset = 0;
|
|
569
|
+
|
|
570
|
+
while (true) {
|
|
571
|
+
try {
|
|
572
|
+
const paged_sql = `${sql} LIMIT ? OFFSET ?`;
|
|
573
|
+
const paged_values = [...values, page_size, current_offset];
|
|
574
|
+
|
|
575
|
+
const rows = instance.query(paged_sql).all(...paged_values) as T[];
|
|
576
|
+
|
|
577
|
+
if (rows.length === 0)
|
|
578
|
+
break;
|
|
579
|
+
|
|
580
|
+
yield rows;
|
|
581
|
+
|
|
582
|
+
current_offset += page_size;
|
|
583
|
+
|
|
584
|
+
if (rows.length < page_size)
|
|
585
|
+
break;
|
|
586
|
+
} catch (error) {
|
|
587
|
+
error_handler(error, undefined, 'get_paged failed');
|
|
588
|
+
return;
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
},
|
|
592
|
+
|
|
593
|
+
/**
|
|
594
|
+
* Returns the value of `count` from a query.
|
|
595
|
+
* Returns 0 if query fails.
|
|
596
|
+
*/
|
|
597
|
+
count: (sql: string, ...values: any): number => {
|
|
598
|
+
try {
|
|
599
|
+
const row = instance.query(sql).get(...values) as any;
|
|
600
|
+
return row?.count ?? 0;
|
|
601
|
+
} catch (error) {
|
|
602
|
+
return error_handler(error, 0, 'count failed');
|
|
603
|
+
}
|
|
604
|
+
},
|
|
605
|
+
|
|
606
|
+
/**
|
|
607
|
+
* Returns the total count of rows from a table.
|
|
608
|
+
* Returns 0 if query fails.
|
|
609
|
+
*/
|
|
610
|
+
count_table: (table_name: string): number => {
|
|
611
|
+
try {
|
|
612
|
+
const row = instance.query('SELECT COUNT(*) AS `count` FROM `' + table_name + '`').get();
|
|
613
|
+
return (row as any)?.count ?? 0;
|
|
614
|
+
} catch (error) {
|
|
615
|
+
return error_handler(error, 0, 'count_table failed');
|
|
616
|
+
}
|
|
617
|
+
},
|
|
618
|
+
|
|
619
|
+
/**
|
|
620
|
+
* Returns true if the query returns any results.
|
|
621
|
+
* Returns false if no results found or if query fails.
|
|
622
|
+
*/
|
|
623
|
+
exists: (sql: string, ...values: any): boolean => {
|
|
624
|
+
try {
|
|
625
|
+
const row = instance.query(sql).get(...values);
|
|
626
|
+
return row !== null;
|
|
627
|
+
} catch (error) {
|
|
628
|
+
return error_handler(error, false, 'exists failed');
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
};
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
export function db_sqlite(filename: string, options: ConstructorParameters<typeof Database>[1], use_canary_reporting = false) {
|
|
635
|
+
const instance = new Database(filename, options);
|
|
636
|
+
|
|
637
|
+
function db_handle_error(error: unknown, return_value: any, title: string) {
|
|
638
|
+
log_error(`error in {db_sqlite}: ${title}`);
|
|
639
|
+
|
|
640
|
+
if (use_canary_reporting)
|
|
641
|
+
caution('db_sqlite: ' + title, { error });
|
|
642
|
+
|
|
643
|
+
return return_value;
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
return {
|
|
647
|
+
instance,
|
|
648
|
+
|
|
649
|
+
update_schema: async (schema_dir: string, schema_table_name: string = 'db_schema') => {
|
|
650
|
+
await db_update_schema_sqlite(instance, schema_dir, schema_table_name);
|
|
651
|
+
},
|
|
652
|
+
|
|
653
|
+
transaction: (scope: (transaction: SQLiteDatabaseInterface) => void | Promise<void>) => {
|
|
654
|
+
const transaction_fn = instance.transaction(async () => {
|
|
655
|
+
const transaction_api = create_sqlite_api(instance, db_handle_error);
|
|
656
|
+
await scope(transaction_api);
|
|
657
|
+
});
|
|
658
|
+
|
|
659
|
+
try {
|
|
660
|
+
transaction_fn();
|
|
661
|
+
return true;
|
|
662
|
+
} catch (error) {
|
|
663
|
+
return db_handle_error(error, false, 'transaction failed');
|
|
664
|
+
}
|
|
665
|
+
},
|
|
666
|
+
|
|
667
|
+
...create_sqlite_api(instance, db_handle_error)
|
|
668
|
+
};
|
|
669
|
+
}
|
|
670
|
+
// endregion
|