@koishijs/plugin-database-mysql 4.1.4 → 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +13 -12
- package/lib/index.js +74 -54
- package/lib/index.js.map +2 -2
- package/package.json +4 -4
package/lib/index.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/// <reference types="koishi/lib" />
|
|
2
2
|
import type { Pool, PoolConfig } from 'mysql';
|
|
3
|
-
import { Context, Database,
|
|
3
|
+
import { Context, Database, Schema, Tables } from 'koishi';
|
|
4
|
+
import { Driver, Model, Modifier, Query } from '@koishijs/orm';
|
|
4
5
|
import { Builder } from '@koishijs/sql-utils';
|
|
5
6
|
declare module 'mysql' {
|
|
6
7
|
interface UntypedFieldInfo {
|
|
@@ -15,7 +16,6 @@ declare module 'koishi' {
|
|
|
15
16
|
'database-mysql': typeof import('.');
|
|
16
17
|
}
|
|
17
18
|
}
|
|
18
|
-
export declare type TableType = keyof Tables;
|
|
19
19
|
declare class MySQLBuilder extends Builder {
|
|
20
20
|
private model;
|
|
21
21
|
constructor(model: Model);
|
|
@@ -38,8 +38,8 @@ declare class MysqlDatabase extends Database {
|
|
|
38
38
|
private _getColDefs;
|
|
39
39
|
/** synchronize table schema */
|
|
40
40
|
private _syncTable;
|
|
41
|
-
_inferFields<T extends
|
|
42
|
-
_createFilter(name:
|
|
41
|
+
_inferFields<T extends keyof Tables>(table: T, keys: readonly string[]): (keyof Tables[T])[];
|
|
42
|
+
_createFilter(name: keyof Tables, query: Query): string;
|
|
43
43
|
_joinKeys: (keys: readonly string[]) => string;
|
|
44
44
|
_formatValues: (table: string, data: object, keys: readonly string[]) => any[];
|
|
45
45
|
query<T = any>(sql: string, values?: any): Promise<T>;
|
|
@@ -47,13 +47,14 @@ declare class MysqlDatabase extends Database {
|
|
|
47
47
|
private _flushTasks;
|
|
48
48
|
select<T extends {}>(table: string, fields: readonly (string & keyof T)[], conditional?: string, values?: readonly any[]): Promise<T[]>;
|
|
49
49
|
drop(): Promise<void>;
|
|
50
|
-
stats(): Promise<
|
|
51
|
-
get(name:
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
50
|
+
stats(): Promise<Driver.Stats>;
|
|
51
|
+
get(name: keyof Tables, query: Query, modifier?: Modifier): Promise<any>;
|
|
52
|
+
private toUpdateExpr;
|
|
53
|
+
set(name: keyof Tables, query: Query, data: {}): Promise<void>;
|
|
54
|
+
remove(name: keyof Tables, query: Query): Promise<void>;
|
|
55
|
+
create<T extends keyof Tables>(name: T, data: {}): Promise<any>;
|
|
56
|
+
upsert(name: keyof Tables, data: any[], keys: string | string[]): Promise<void>;
|
|
57
|
+
eval(name: keyof Tables, expr: any, query: Query): Promise<any>;
|
|
57
58
|
}
|
|
58
59
|
declare namespace MysqlDatabase {
|
|
59
60
|
export interface Config extends PoolConfig {
|
|
@@ -72,7 +73,7 @@ declare namespace MysqlDatabase {
|
|
|
72
73
|
database?: string;
|
|
73
74
|
} & import("schemastery").Dict<any, string>>;
|
|
74
75
|
type Declarations = {
|
|
75
|
-
[T in
|
|
76
|
+
[T in keyof Tables]?: {
|
|
76
77
|
[K in keyof Tables[T]]?: () => string;
|
|
77
78
|
};
|
|
78
79
|
};
|
package/lib/index.js
CHANGED
|
@@ -46,7 +46,7 @@ __export(exports, {
|
|
|
46
46
|
});
|
|
47
47
|
var import_mysql = __toModule(require("@vlasky/mysql"));
|
|
48
48
|
var import_koishi = __toModule(require("koishi"));
|
|
49
|
-
var
|
|
49
|
+
var import_orm = __toModule(require("@koishijs/orm"));
|
|
50
50
|
var import_sql_utils = __toModule(require("@koishijs/sql-utils"));
|
|
51
51
|
var logger = new import_koishi.Logger("mysql");
|
|
52
52
|
var DEFAULT_DATE = new Date("1970-01-01");
|
|
@@ -90,8 +90,12 @@ function getTypeDefinition({ type, length, precision, scale }) {
|
|
|
90
90
|
}
|
|
91
91
|
}
|
|
92
92
|
__name(getTypeDefinition, "getTypeDefinition");
|
|
93
|
+
function backtick(str) {
|
|
94
|
+
return "`" + str + "`";
|
|
95
|
+
}
|
|
96
|
+
__name(backtick, "backtick");
|
|
93
97
|
function createIndex(keys) {
|
|
94
|
-
return (0, import_koishi.makeArray)(keys).map(
|
|
98
|
+
return (0, import_koishi.makeArray)(keys).map(backtick).join(", ");
|
|
95
99
|
}
|
|
96
100
|
__name(createIndex, "createIndex");
|
|
97
101
|
var MySQLBuilder = class extends import_sql_utils.Builder {
|
|
@@ -118,7 +122,7 @@ var MySQLBuilder = class extends import_sql_utils.Builder {
|
|
|
118
122
|
return JSON.stringify(value);
|
|
119
123
|
} else if ((meta == null ? void 0 : meta.type) === "list") {
|
|
120
124
|
return value.join(",");
|
|
121
|
-
} else if (
|
|
125
|
+
} else if (import_orm.Model.Field.date.includes(meta == null ? void 0 : meta.type)) {
|
|
122
126
|
return import_koishi.Time.template("yyyy-MM-dd hh:mm:ss", value);
|
|
123
127
|
}
|
|
124
128
|
return value;
|
|
@@ -151,8 +155,8 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
151
155
|
const type = (_a = MysqlDatabase.tables[orgTable]) == null ? void 0 : _a[orgName];
|
|
152
156
|
if (typeof type === "object")
|
|
153
157
|
return type.parse(field);
|
|
154
|
-
const meta = (_b = this.
|
|
155
|
-
if ((meta == null ? void 0 : meta.type)
|
|
158
|
+
const meta = (_b = this.model.config[orgTable]) == null ? void 0 : _b.fields[orgName];
|
|
159
|
+
if (import_orm.Model.Field.string.includes(meta == null ? void 0 : meta.type)) {
|
|
156
160
|
return field.string();
|
|
157
161
|
} else if ((meta == null ? void 0 : meta.type) === "json") {
|
|
158
162
|
const source = field.string();
|
|
@@ -178,11 +182,11 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
178
182
|
}
|
|
179
183
|
}
|
|
180
184
|
}, config);
|
|
181
|
-
this.sql = new MySQLBuilder(this.
|
|
185
|
+
this.sql = new MySQLBuilder(this.model);
|
|
182
186
|
}
|
|
183
187
|
async start() {
|
|
184
188
|
this.pool = (0, import_mysql.createPool)(this.config);
|
|
185
|
-
for (const name in this.
|
|
189
|
+
for (const name in this.model.config) {
|
|
186
190
|
this._tableTasks[name] = this._syncTable(name);
|
|
187
191
|
}
|
|
188
192
|
this.ctx.on("model", (name) => {
|
|
@@ -193,7 +197,7 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
193
197
|
this.pool.end();
|
|
194
198
|
}
|
|
195
199
|
_getColDefs(name, columns) {
|
|
196
|
-
const table = this.
|
|
200
|
+
const table = this.resolveTable(name);
|
|
197
201
|
const { primary, foreign, autoInc } = table;
|
|
198
202
|
const fields = __spreadValues({}, table.fields);
|
|
199
203
|
const unique = [...table.unique];
|
|
@@ -202,7 +206,7 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
202
206
|
if (columns.includes(key))
|
|
203
207
|
continue;
|
|
204
208
|
const { initial, nullable = true } = fields[key];
|
|
205
|
-
let def = (
|
|
209
|
+
let def = backtick(key);
|
|
206
210
|
if (key === primary && autoInc) {
|
|
207
211
|
def += " int unsigned not null auto_increment";
|
|
208
212
|
} else {
|
|
@@ -226,7 +230,7 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
226
230
|
}
|
|
227
231
|
for (const key in foreign) {
|
|
228
232
|
const [table2, key2] = foreign[key];
|
|
229
|
-
result.push(`foreign key (${(
|
|
233
|
+
result.push(`foreign key (${backtick(key)}) references ${(0, import_mysql.escapeId)(table2)} (${backtick(key2)})`);
|
|
230
234
|
}
|
|
231
235
|
}
|
|
232
236
|
return result;
|
|
@@ -254,7 +258,7 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
254
258
|
});
|
|
255
259
|
}
|
|
256
260
|
_createFilter(name, query) {
|
|
257
|
-
return this.sql.parseQuery(this.
|
|
261
|
+
return this.sql.parseQuery(this.resolveQuery(name, query));
|
|
258
262
|
}
|
|
259
263
|
query(sql, values) {
|
|
260
264
|
const error = new Error();
|
|
@@ -266,7 +270,7 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
266
270
|
return resolve(results);
|
|
267
271
|
logger.warn(sql);
|
|
268
272
|
if (err["code"] === "ER_DUP_ENTRY") {
|
|
269
|
-
err = new import_koishi.
|
|
273
|
+
err = new import_koishi.DriverError("duplicate-entry", err.message);
|
|
270
274
|
}
|
|
271
275
|
err.stack = err.message + error.stack.slice(5);
|
|
272
276
|
reject(err);
|
|
@@ -324,30 +328,55 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
324
328
|
const filter = this._createFilter(name, query);
|
|
325
329
|
if (filter === "0")
|
|
326
330
|
return [];
|
|
327
|
-
const { fields, limit, offset, sort } =
|
|
331
|
+
const { fields, limit, offset, sort } = this.resolveModifier(name, modifier);
|
|
328
332
|
const keys = this._joinKeys(this._inferFields(name, fields));
|
|
329
333
|
let sql = `SELECT ${keys} FROM ${name} _${name} WHERE ${filter}`;
|
|
330
334
|
if (sort)
|
|
331
|
-
sql += " ORDER BY " + Object.entries(sort).map(([key, order]) => `${
|
|
335
|
+
sql += " ORDER BY " + Object.entries(sort).map(([key, order]) => `${backtick(key)} ${order}`).join(", ");
|
|
332
336
|
if (limit)
|
|
333
337
|
sql += " LIMIT " + limit;
|
|
334
338
|
if (offset)
|
|
335
339
|
sql += " OFFSET " + offset;
|
|
336
|
-
return this.queue(sql)
|
|
340
|
+
return this.queue(sql).then((data) => {
|
|
341
|
+
return data.map((row) => this.model.parse(name, row));
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
toUpdateExpr(name, item, field, upsert) {
|
|
345
|
+
const escaped = backtick(field);
|
|
346
|
+
if (field in item) {
|
|
347
|
+
if ((0, import_orm.isEvalExpr)(item[field]) || !upsert) {
|
|
348
|
+
return this.sql.parseEval(item[field], name, field);
|
|
349
|
+
} else {
|
|
350
|
+
return `VALUES(${escaped})`;
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
const valueInit = `ifnull(${escaped}, '{}')`;
|
|
354
|
+
let value = valueInit;
|
|
355
|
+
for (const key in item) {
|
|
356
|
+
if (!key.startsWith(field + "."))
|
|
357
|
+
continue;
|
|
358
|
+
const rest = key.slice(field.length + 1).split(".");
|
|
359
|
+
value = `json_set(${value}, '$${rest.map((key2) => `."${key2}"`).join("")}', ${this.sql.parseEval(item[key])})`;
|
|
360
|
+
}
|
|
361
|
+
if (value === valueInit) {
|
|
362
|
+
return escaped;
|
|
363
|
+
} else {
|
|
364
|
+
return value;
|
|
365
|
+
}
|
|
337
366
|
}
|
|
338
367
|
async set(name, query, data) {
|
|
368
|
+
data = this.model.format(name, data);
|
|
369
|
+
const { fields } = this.resolveTable(name);
|
|
339
370
|
await this._tableTasks[name];
|
|
340
371
|
const filter = this._createFilter(name, query);
|
|
341
372
|
if (filter === "0")
|
|
342
373
|
return;
|
|
343
|
-
const
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
const
|
|
348
|
-
|
|
349
|
-
return `${keyExpr} = ${valueExpr}`;
|
|
350
|
-
return `${keyExpr} = json_set(ifnull(${keyExpr}, '{}'), '$${rest.map((key2) => `."${key2}"`).join("")}', ${valueExpr})`;
|
|
374
|
+
const updateFields = [...new Set(Object.keys(data).map((key) => {
|
|
375
|
+
return Object.keys(fields).find((field) => field === key || key.startsWith(field + "."));
|
|
376
|
+
}))];
|
|
377
|
+
const update = updateFields.map((field) => {
|
|
378
|
+
const escaped = backtick(field);
|
|
379
|
+
return `${escaped} = ${this.toUpdateExpr(name, data, field, false)}`;
|
|
351
380
|
}).join(", ");
|
|
352
381
|
await this.query(`UPDATE ${name} SET ${update} WHERE ${filter}`);
|
|
353
382
|
}
|
|
@@ -360,23 +389,30 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
360
389
|
}
|
|
361
390
|
async create(name, data) {
|
|
362
391
|
await this._tableTasks[name];
|
|
363
|
-
data =
|
|
364
|
-
const
|
|
365
|
-
const
|
|
366
|
-
|
|
392
|
+
data = this.model.create(name, data);
|
|
393
|
+
const formatted = this.model.format(name, data);
|
|
394
|
+
const { autoInc, primary } = this.resolveTable(name);
|
|
395
|
+
const keys = Object.keys(formatted);
|
|
396
|
+
const header = await this.query(`INSERT INTO ?? (${this._joinKeys(keys)}) VALUES (${keys.map(() => "?").join(", ")})`, [name, ...this._formatValues(name, formatted, keys)]);
|
|
397
|
+
if (!autoInc)
|
|
398
|
+
return data;
|
|
399
|
+
return __spreadProps(__spreadValues({}, data), { [primary]: header.insertId });
|
|
367
400
|
}
|
|
368
401
|
async upsert(name, data, keys) {
|
|
369
402
|
if (!data.length)
|
|
370
403
|
return;
|
|
404
|
+
data = data.map((item) => this.model.format(name, item));
|
|
371
405
|
await this._tableTasks[name];
|
|
372
|
-
const { fields, primary } = this.
|
|
406
|
+
const { fields, primary } = this.resolveTable(name);
|
|
373
407
|
const merged = {};
|
|
374
408
|
const insertion = data.map((item) => {
|
|
375
409
|
Object.assign(merged, item);
|
|
376
|
-
return (0,
|
|
410
|
+
return this.model.format(name, (0, import_orm.executeUpdate)(this.model.create(name), item));
|
|
377
411
|
});
|
|
378
412
|
const indexFields = (0, import_koishi.makeArray)(keys || primary);
|
|
379
|
-
const dataFields = [...new Set(Object.keys(merged).map((key) =>
|
|
413
|
+
const dataFields = [...new Set(Object.keys(merged).map((key) => {
|
|
414
|
+
return Object.keys(fields).find((field) => field === key || key.startsWith(field + "."));
|
|
415
|
+
}))];
|
|
380
416
|
const updateFields = (0, import_koishi.difference)(dataFields, indexFields);
|
|
381
417
|
const createFilter = /* @__PURE__ */ __name((item) => this.sql.parseQuery((0, import_koishi.pick)(item, indexFields)), "createFilter");
|
|
382
418
|
const createMultiFilter = /* @__PURE__ */ __name((items) => {
|
|
@@ -390,32 +426,16 @@ var MysqlDatabase = class extends import_koishi.Database {
|
|
|
390
426
|
}
|
|
391
427
|
}, "createMultiFilter");
|
|
392
428
|
const update = updateFields.map((field) => {
|
|
393
|
-
const escaped =
|
|
429
|
+
const escaped = backtick(field);
|
|
394
430
|
const branches = {};
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
branches[createFilter(item)] = this.sql.parseEval(item[field], name, field);
|
|
399
|
-
}
|
|
400
|
-
return;
|
|
401
|
-
}
|
|
402
|
-
const valueInit = `ifnull(${escaped}, '{}')`;
|
|
403
|
-
let value2 = valueInit;
|
|
404
|
-
for (const key in item) {
|
|
405
|
-
const [first, ...rest] = key.split(".");
|
|
406
|
-
if (first !== field)
|
|
407
|
-
continue;
|
|
408
|
-
value2 = `json_set(${value2}, '$${rest.map((key2) => `."${key2}"`).join("")}', ${this.sql.parseEval(item[key])})`;
|
|
409
|
-
}
|
|
410
|
-
if (value2 === valueInit)
|
|
411
|
-
return true;
|
|
412
|
-
branches[createFilter(item)] = value2;
|
|
431
|
+
data.forEach((item) => {
|
|
432
|
+
var _a, _b;
|
|
433
|
+
((_b = branches[_a = this.toUpdateExpr(name, item, field, true)]) != null ? _b : branches[_a] = []).push(item);
|
|
413
434
|
});
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
let
|
|
417
|
-
|
|
418
|
-
value = `if(${condition}, ${branches[condition]}, ${value})`;
|
|
435
|
+
const entries = Object.entries(branches).map(([expr, items]) => [createMultiFilter(items), expr]).sort(([a], [b]) => a.length - b.length).reverse();
|
|
436
|
+
let value = entries[0][1];
|
|
437
|
+
for (let index = 1; index < entries.length; index++) {
|
|
438
|
+
value = `if(${entries[index][0]}, ${entries[index][1]}, ${value})`;
|
|
419
439
|
}
|
|
420
440
|
return `${escaped} = ${value}`;
|
|
421
441
|
}).join(", ");
|
package/lib/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { createPool, escapeId, format, escape as mysqlEscape } from '@vlasky/mysql'\nimport type { OkPacket, Pool, PoolConfig } from 'mysql'\nimport { Context, Database, Dict, difference, KoishiError, Logger, makeArray, Model, pick, Query, Schema, Tables, Time } from 'koishi'\nimport { executeUpdate } from '@koishijs/orm-utils'\nimport { Builder } from '@koishijs/sql-utils'\n\ndeclare module 'mysql' {\n interface UntypedFieldInfo {\n packet: UntypedFieldInfo\n }\n}\n\ndeclare module 'koishi' {\n interface Database {\n mysql: MysqlDatabase\n }\n\n interface Modules {\n 'database-mysql': typeof import('.')\n }\n}\n\nconst logger = new Logger('mysql')\n\nconst DEFAULT_DATE = new Date('1970-01-01')\n\nexport type TableType = keyof Tables\n\nfunction getIntegerType(length = 11) {\n if (length <= 4) return 'tinyint'\n if (length <= 6) return 'smallint'\n if (length <= 9) return 'mediumint'\n if (length <= 11) return 'int'\n return 'bigint'\n}\n\nfunction getTypeDefinition({ type, length, precision, scale }: Model.Field) {\n switch (type) {\n case 'float':\n case 'double':\n case 'date':\n case 'time': return type\n case 'timestamp': return 'datetime'\n case 'integer': return getIntegerType(length)\n case 'unsigned': return `${getIntegerType(length)} unsigned`\n case 'decimal': return `decimal(${precision}, ${scale}) unsigned`\n case 'char': return `char(${length || 255})`\n case 'string': return `varchar(${length || 255})`\n case 'text': return `text(${length || 65535})`\n case 'list': return `text(${length || 65535})`\n case 'json': return `text(${length || 65535})`\n }\n}\n\nfunction createIndex(keys: string | string[]) {\n return makeArray(keys).map(key => escapeId(key)).join(', ')\n}\n\nclass MySQLBuilder extends Builder {\n constructor(private model: Model) {\n super()\n }\n\n format(sql: string, values: any[], stringifyObjects?: boolean, timeZone?: string) {\n return format(sql, values, stringifyObjects, timeZone)\n }\n\n escapeId(value: string, forbidQualified?: boolean) {\n return escapeId(value, forbidQualified)\n }\n\n escape(value: any, table?: string, field?: string) {\n return mysqlEscape(this.stringify(value, table, field))\n }\n\n stringify(value: any, table?: string, field?: string) {\n const type = MysqlDatabase.tables[table]?.[field]\n if (typeof type === 'object') return type.stringify(value)\n\n const meta = this.model.config[table]?.fields[field]\n if (meta?.type === 'json') {\n return JSON.stringify(value)\n } else if (meta?.type === 'list') {\n return value.join(',')\n } else if (Model.Field.date.includes(meta?.type)) {\n return Time.template('yyyy-MM-dd hh:mm:ss', value)\n }\n\n return value\n }\n}\n\ninterface QueryTask {\n sql: string\n resolve: (value: any) => void\n reject: (error: Error) => void\n}\n\nclass MysqlDatabase extends Database {\n public pool: Pool\n public config: MysqlDatabase.Config\n\n mysql = this\n sql: MySQLBuilder\n\n private _tableTasks: Dict<Promise<any>> = {}\n private _queryTasks: QueryTask[] = []\n\n constructor(public ctx: Context, config?: MysqlDatabase.Config) {\n super(ctx)\n\n this.config = {\n host: 'localhost',\n port: 3306,\n user: 'root',\n database: 'koishi',\n charset: 'utf8mb4_general_ci',\n multipleStatements: true,\n typeCast: (field, next) => {\n const { orgName, orgTable } = field.packet\n const type = MysqlDatabase.tables[orgTable]?.[orgName]\n if (typeof type === 'object') return type.parse(field)\n\n const meta = this.ctx.model.config[orgTable]?.fields[orgName]\n if (meta?.type === 'string') {\n return field.string()\n } else if (meta?.type === 'json') {\n const source = field.string()\n return source ? JSON.parse(source) : meta.initial\n } else if (meta?.type === 'list') {\n const source = field.string()\n return source ? source.split(',') : []\n } else if (meta?.type === 'time') {\n const source = field.string()\n if (!source) return meta.initial\n const time = new Date(DEFAULT_DATE)\n const [h, m, s] = source.split(':')\n time.setHours(parseInt(h))\n time.setMinutes(parseInt(m))\n time.setSeconds(parseInt(s))\n return time\n }\n\n if (field.type === 'BIT') {\n return Boolean(field.buffer()?.readUInt8(0))\n } else {\n return next()\n }\n },\n ...config,\n }\n\n this.sql = new MySQLBuilder(this.ctx.model)\n }\n\n async start() {\n this.pool = createPool(this.config)\n\n for (const name in this.ctx.model.config) {\n this._tableTasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this._tableTasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n this.pool.end()\n }\n\n private _getColDefs(name: string, columns: string[]) {\n const table = this.ctx.model.config[name]\n const { primary, foreign, autoInc } = table\n const fields = { ...table.fields }\n const unique = [...table.unique]\n const result: string[] = []\n\n // orm definitions\n for (const key in fields) {\n if (columns.includes(key)) continue\n const { initial, nullable = true } = fields[key]\n let def = escapeId(key)\n if (key === primary && autoInc) {\n def += ' int unsigned not null auto_increment'\n } else {\n const typedef = getTypeDefinition(fields[key])\n def += ' ' + typedef\n if (makeArray(primary).includes(key)) {\n def += ' not null'\n } else {\n def += (nullable ? ' ' : ' not ') + 'null'\n }\n // blob, text, geometry or json columns cannot have default values\n if (initial && !typedef.startsWith('text')) {\n def += ' default ' + this.sql.escape(initial, name, key)\n }\n }\n result.push(def)\n }\n\n if (!columns.length) {\n result.push(`primary key (${createIndex(primary)})`)\n for (const key of unique) {\n result.push(`unique index (${createIndex(key)})`)\n }\n for (const key in foreign) {\n const [table, key2] = foreign[key]\n result.push(`foreign key (${escapeId(key)}) references ${escapeId(table)} (${escapeId(key2)})`)\n }\n }\n\n return result\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this._tableTasks[name]\n // eslint-disable-next-line max-len\n const data = await this.queue<any[]>('SELECT COLUMN_NAME from information_schema.columns WHERE TABLE_SCHEMA = ? && TABLE_NAME = ?', [this.config.database, name])\n const columns = data.map(row => row.COLUMN_NAME)\n const result = this._getColDefs(name, columns)\n if (!columns.length) {\n logger.info('auto creating table %c', name)\n await this.queue(`CREATE TABLE ?? (${result.join(',')}) COLLATE = ?`, [name, this.config.charset])\n } else if (result.length) {\n logger.info('auto updating table %c', name)\n await this.queue(`ALTER TABLE ?? ${result.map(def => 'ADD ' + def).join(',')}`, [name])\n }\n }\n\n _inferFields<T extends TableType>(table: T, keys: readonly string[]) {\n if (!keys) return\n const types = MysqlDatabase.tables[table] || {}\n return keys.map((key) => {\n const type = types[key]\n return typeof type === 'function' ? `${type()} AS ${key}` : key\n }) as (keyof Tables[T])[]\n }\n\n _createFilter(name: TableType, query: Query) {\n return this.sql.parseQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n _joinKeys = (keys: readonly string[]) => {\n return keys ? keys.map(key => key.includes('`') ? key : `\\`${key}\\``).join(',') : '*'\n }\n\n _formatValues = (table: string, data: object, keys: readonly string[]) => {\n return keys.map((key) => this.sql.stringify(data[key], table as never, key))\n }\n\n query<T = any>(sql: string, values?: any): Promise<T> {\n const error = new Error()\n return new Promise((resolve, reject) => {\n sql = format(sql, values)\n logger.debug('[sql]', sql)\n this.pool.query(sql, (err: Error, results) => {\n if (!err) return resolve(results)\n logger.warn(sql)\n if (err['code'] === 'ER_DUP_ENTRY') {\n err = new KoishiError(err.message, 'database.duplicate-entry')\n }\n err.stack = err.message + error.stack.slice(5)\n reject(err)\n })\n })\n }\n\n queue<T = any>(sql: string, values?: any): Promise<T> {\n if (!this.config.multipleStatements) {\n return this.query(sql, values)\n }\n\n sql = format(sql, values)\n return new Promise<any>((resolve, reject) => {\n this._queryTasks.push({ sql, resolve, reject })\n process.nextTick(() => this._flushTasks())\n })\n }\n\n private async _flushTasks() {\n const tasks = this._queryTasks\n if (!tasks.length) return\n this._queryTasks = []\n\n try {\n let results = await this.query(tasks.map(task => task.sql).join('; '))\n if (tasks.length === 1) results = [results]\n tasks.forEach((task, index) => {\n task.resolve(results[index])\n })\n } catch (error) {\n tasks.forEach(task => task.reject(error))\n }\n }\n\n select<T extends {}>(table: string, fields: readonly (string & keyof T)[], conditional?: string, values?: readonly any[]): Promise<T[]>\n select(table: string, fields: string[], conditional?: string, values: readonly any[] = []) {\n logger.debug(`[select] ${table}: ${fields ? fields.join(', ') : '*'}`)\n const sql = 'SELECT '\n + this._joinKeys(fields)\n + (table.includes('.') ? `FROM ${table}` : ' FROM `' + table + `\\` _${table}`)\n + (conditional ? ' WHERE ' + conditional : '')\n return this.queue(sql, values)\n }\n\n async drop() {\n const data = await this.select('information_schema.tables', ['TABLE_NAME'], 'TABLE_SCHEMA = ?', [this.config.database])\n if (!data.length) return\n await this.query(data.map(({ TABLE_NAME }) => `DROP TABLE ${this.sql.escapeId(TABLE_NAME)}`).join('; '))\n }\n\n async stats() {\n const data = await this.select('information_schema.tables', ['TABLE_NAME', 'TABLE_ROWS', 'DATA_LENGTH'], 'TABLE_SCHEMA = ?', [this.config.database])\n const stats: Query.Stats = { size: 0 }\n stats.tables = Object.fromEntries(data.map(({ TABLE_NAME: name, TABLE_ROWS: count, DATA_LENGTH: size }) => {\n stats.size += size\n return [name, { count, size }]\n }))\n return stats\n }\n\n async get(name: TableType, query: Query, modifier?: Query.Modifier) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return []\n const { fields, limit, offset, sort } = Query.resolveModifier(modifier)\n const keys = this._joinKeys(this._inferFields(name, fields))\n let sql = `SELECT ${keys} FROM ${name} _${name} WHERE ${filter}`\n if (sort) sql += ' ORDER BY ' + Object.entries(sort).map(([key, order]) => `${this.sql.escapeId(key)} ${order}`).join(', ')\n if (limit) sql += ' LIMIT ' + limit\n if (offset) sql += ' OFFSET ' + offset\n return this.queue(sql)\n }\n\n async set(name: TableType, query: Query, data: {}) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return\n const keys = Object.keys(data)\n const update = keys.map((key) => {\n const valueExpr = this.sql.parseEval(data[key], name, key)\n const [field, ...rest] = key.split('.')\n const keyExpr = this.sql.escapeId(field)\n if (!rest.length) return `${keyExpr} = ${valueExpr}`\n return `${keyExpr} = json_set(ifnull(${keyExpr}, '{}'), '$${rest.map(key => `.\"${key}\"`).join('')}', ${valueExpr})`\n }).join(', ')\n await this.query(`UPDATE ${name} SET ${update} WHERE ${filter}`)\n }\n\n async remove(name: TableType, query: Query) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return\n await this.query('DELETE FROM ?? WHERE ' + filter, [name])\n }\n\n async create(name: TableType, data: {}) {\n await this._tableTasks[name]\n data = { ...this.ctx.model.create(name), ...data }\n const keys = Object.keys(data)\n const header = await this.query<OkPacket>(\n `INSERT INTO ?? (${this._joinKeys(keys)}) VALUES (${keys.map(() => '?').join(', ')})`,\n [name, ...this._formatValues(name, data, keys)],\n )\n return { ...data, id: header.insertId } as any\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n await this._tableTasks[name]\n\n const { fields, primary } = this.ctx.model.config[name]\n const merged = {}\n const insertion = data.map((item) => {\n Object.assign(merged, item)\n return executeUpdate(this.ctx.model.create(name), item)\n })\n const indexFields = makeArray(keys || primary)\n const dataFields = [...new Set(Object.keys(merged).map(key => key.split('.', 1)[0]))]\n const updateFields = difference(dataFields, indexFields)\n\n const createFilter = (item: any) => this.sql.parseQuery(pick(item, indexFields))\n const createMultiFilter = (items: any[]) => {\n if (items.length === 1) {\n return createFilter(items[0])\n } else if (indexFields.length === 1) {\n const key = indexFields[0]\n return this.sql.parseQuery({ [key]: items.map(item => item[key]) })\n } else {\n return items.map(createFilter).join(' OR ')\n }\n }\n\n const update = updateFields.map((field) => {\n const escaped = this.sql.escapeId(field)\n const branches: Dict<string> = {}\n const absent = data.filter((item) => {\n // update directly\n if (field in item) {\n if (Object.keys(item[field]).some(key => key.startsWith('$'))) {\n branches[createFilter(item)] = this.sql.parseEval(item[field], name, field)\n }\n return\n }\n\n // update with json_set\n const valueInit = `ifnull(${escaped}, '{}')`\n let value = valueInit\n for (const key in item) {\n const [first, ...rest] = key.split('.')\n if (first !== field) continue\n value = `json_set(${value}, '$${rest.map(key => `.\"${key}\"`).join('')}', ${this.sql.parseEval(item[key])})`\n }\n if (value === valueInit) return true\n branches[createFilter(item)] = value\n })\n\n if (absent.length) branches[createMultiFilter(absent)] = escaped\n let value = `VALUES(${escaped})`\n for (const condition in branches) {\n value = `if(${condition}, ${branches[condition]}, ${value})`\n }\n return `${escaped} = ${value}`\n }).join(', ')\n\n const initFields = Object.keys(fields)\n const placeholder = `(${initFields.map(() => '?').join(', ')})`\n await this.query(\n `INSERT INTO ${this.sql.escapeId(name)} (${this._joinKeys(initFields)}) VALUES ${data.map(() => placeholder).join(', ')}\n ON DUPLICATE KEY UPDATE ${update}`,\n [].concat(...insertion.map(item => this._formatValues(name, item, initFields))),\n )\n }\n\n async eval(name: TableType, expr: any, query: Query) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n const output = this.sql.parseEval(expr)\n const [data] = await this.queue(`SELECT ${output} AS value FROM ${name} WHERE ${filter}`)\n return data.value\n }\n}\n\nnamespace MysqlDatabase {\n export interface Config extends PoolConfig {}\n\n export const Config = Schema.object({\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.natural().max(65535).description('要连接到的端口号。').default(3306),\n user: Schema.string().description('要使用的用户名。').default('root'),\n password: Schema.string().description('要使用的密码。').role('secret'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n\n type Declarations = {\n [T in TableType]?: {\n [K in keyof Tables[T]]?: () => string\n }\n }\n\n /**\n * @deprecated use `import('koishi').Field` instead\n */\n export const tables: Declarations = {\n user: {},\n channel: {},\n }\n}\n\nexport default MysqlDatabase\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,mBAAoE;AAEpE,
|
|
4
|
+
"sourcesContent": ["import { createPool, escapeId, format, escape as mysqlEscape } from '@vlasky/mysql'\nimport type { OkPacket, Pool, PoolConfig } from 'mysql'\nimport { Context, Database, Dict, difference, DriverError, Logger, makeArray, pick, Schema, Tables, Time } from 'koishi'\nimport { Driver, executeUpdate, isEvalExpr, Model, Modifier, Query } from '@koishijs/orm'\nimport { Builder } from '@koishijs/sql-utils'\n\ndeclare module 'mysql' {\n interface UntypedFieldInfo {\n packet: UntypedFieldInfo\n }\n}\n\ndeclare module 'koishi' {\n interface Database {\n mysql: MysqlDatabase\n }\n\n interface Modules {\n 'database-mysql': typeof import('.')\n }\n}\n\nconst logger = new Logger('mysql')\n\nconst DEFAULT_DATE = new Date('1970-01-01')\n\nfunction getIntegerType(length = 11) {\n if (length <= 4) return 'tinyint'\n if (length <= 6) return 'smallint'\n if (length <= 9) return 'mediumint'\n if (length <= 11) return 'int'\n return 'bigint'\n}\n\nfunction getTypeDefinition({ type, length, precision, scale }: Model.Field) {\n switch (type) {\n case 'float':\n case 'double':\n case 'date':\n case 'time': return type\n case 'timestamp': return 'datetime'\n case 'integer': return getIntegerType(length)\n case 'unsigned': return `${getIntegerType(length)} unsigned`\n case 'decimal': return `decimal(${precision}, ${scale}) unsigned`\n case 'char': return `char(${length || 255})`\n case 'string': return `varchar(${length || 255})`\n case 'text': return `text(${length || 65535})`\n case 'list': return `text(${length || 65535})`\n case 'json': return `text(${length || 65535})`\n }\n}\n\nfunction backtick(str: string) {\n return '`' + str + '`'\n}\n\nfunction createIndex(keys: string | string[]) {\n return makeArray(keys).map(backtick).join(', ')\n}\n\nclass MySQLBuilder extends Builder {\n constructor(private model: Model) {\n super()\n }\n\n format(sql: string, values: any[], stringifyObjects?: boolean, timeZone?: string) {\n return format(sql, values, stringifyObjects, timeZone)\n }\n\n escapeId(value: string, forbidQualified?: boolean) {\n return escapeId(value, forbidQualified)\n }\n\n escape(value: any, table?: string, field?: string) {\n return mysqlEscape(this.stringify(value, table, field))\n }\n\n stringify(value: any, table?: string, field?: string) {\n const type = MysqlDatabase.tables[table]?.[field]\n if (typeof type === 'object') return type.stringify(value)\n\n const meta = this.model.config[table]?.fields[field]\n if (meta?.type === 'json') {\n return JSON.stringify(value)\n } else if (meta?.type === 'list') {\n return value.join(',')\n } else if (Model.Field.date.includes(meta?.type)) {\n return Time.template('yyyy-MM-dd hh:mm:ss', value)\n }\n\n return value\n }\n}\n\ninterface QueryTask {\n sql: string\n resolve: (value: any) => void\n reject: (error: Error) => void\n}\n\nclass MysqlDatabase extends Database {\n public pool: Pool\n public config: MysqlDatabase.Config\n\n mysql = this\n sql: MySQLBuilder\n\n private _tableTasks: Dict<Promise<any>> = {}\n private _queryTasks: QueryTask[] = []\n\n constructor(public ctx: Context, config?: MysqlDatabase.Config) {\n super(ctx)\n\n this.config = {\n host: 'localhost',\n port: 3306,\n user: 'root',\n database: 'koishi',\n charset: 'utf8mb4_general_ci',\n multipleStatements: true,\n typeCast: (field, next) => {\n const { orgName, orgTable } = field.packet\n const type = MysqlDatabase.tables[orgTable]?.[orgName]\n if (typeof type === 'object') return type.parse(field)\n\n const meta = this.model.config[orgTable]?.fields[orgName]\n if (Model.Field.string.includes(meta?.type)) {\n return field.string()\n } else if (meta?.type === 'json') {\n const source = field.string()\n return source ? JSON.parse(source) : meta.initial\n } else if (meta?.type === 'list') {\n const source = field.string()\n return source ? source.split(',') : []\n } else if (meta?.type === 'time') {\n const source = field.string()\n if (!source) return meta.initial\n const time = new Date(DEFAULT_DATE)\n const [h, m, s] = source.split(':')\n time.setHours(parseInt(h))\n time.setMinutes(parseInt(m))\n time.setSeconds(parseInt(s))\n return time\n }\n\n if (field.type === 'BIT') {\n return Boolean(field.buffer()?.readUInt8(0))\n } else {\n return next()\n }\n },\n ...config,\n }\n\n this.sql = new MySQLBuilder(this.model)\n }\n\n async start() {\n this.pool = createPool(this.config)\n\n for (const name in this.model.config) {\n this._tableTasks[name] = this._syncTable(name as keyof Tables)\n }\n\n this.ctx.on('model', (name) => {\n this._tableTasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n this.pool.end()\n }\n\n private _getColDefs(name: keyof Tables, columns: string[]) {\n const table = this.resolveTable(name)\n const { primary, foreign, autoInc } = table\n const fields = { ...table.fields }\n const unique = [...table.unique]\n const result: string[] = []\n\n // orm definitions\n for (const key in fields) {\n if (columns.includes(key)) continue\n const { initial, nullable = true } = fields[key]\n let def = backtick(key)\n if (key === primary && autoInc) {\n def += ' int unsigned not null auto_increment'\n } else {\n const typedef = getTypeDefinition(fields[key])\n def += ' ' + typedef\n if (makeArray(primary).includes(key)) {\n def += ' not null'\n } else {\n def += (nullable ? ' ' : ' not ') + 'null'\n }\n // blob, text, geometry or json columns cannot have default values\n if (initial && !typedef.startsWith('text')) {\n def += ' default ' + this.sql.escape(initial, name, key)\n }\n }\n result.push(def)\n }\n\n if (!columns.length) {\n result.push(`primary key (${createIndex(primary)})`)\n for (const key of unique) {\n result.push(`unique index (${createIndex(key)})`)\n }\n for (const key in foreign) {\n const [table, key2] = foreign[key]\n result.push(`foreign key (${backtick(key)}) references ${escapeId(table)} (${backtick(key2)})`)\n }\n }\n\n return result\n }\n\n /** synchronize table schema */\n private async _syncTable(name: keyof Tables) {\n await this._tableTasks[name]\n // eslint-disable-next-line max-len\n const data = await this.queue<any[]>('SELECT COLUMN_NAME from information_schema.columns WHERE TABLE_SCHEMA = ? && TABLE_NAME = ?', [this.config.database, name])\n const columns = data.map(row => row.COLUMN_NAME)\n const result = this._getColDefs(name, columns)\n if (!columns.length) {\n logger.info('auto creating table %c', name)\n await this.queue(`CREATE TABLE ?? (${result.join(',')}) COLLATE = ?`, [name, this.config.charset])\n } else if (result.length) {\n logger.info('auto updating table %c', name)\n await this.queue(`ALTER TABLE ?? ${result.map(def => 'ADD ' + def).join(',')}`, [name])\n }\n }\n\n _inferFields<T extends keyof Tables>(table: T, keys: readonly string[]) {\n if (!keys) return\n const types = MysqlDatabase.tables[table] || {}\n return keys.map((key) => {\n const type = types[key]\n return typeof type === 'function' ? `${type()} AS ${key}` : key\n }) as (keyof Tables[T])[]\n }\n\n _createFilter(name: keyof Tables, query: Query) {\n return this.sql.parseQuery(this.resolveQuery(name, query))\n }\n\n _joinKeys = (keys: readonly string[]) => {\n return keys ? keys.map(key => key.includes('`') ? key : `\\`${key}\\``).join(',') : '*'\n }\n\n _formatValues = (table: string, data: object, keys: readonly string[]) => {\n return keys.map((key) => this.sql.stringify(data[key], table as never, key))\n }\n\n query<T = any>(sql: string, values?: any): Promise<T> {\n const error = new Error()\n return new Promise((resolve, reject) => {\n sql = format(sql, values)\n logger.debug('[sql]', sql)\n this.pool.query(sql, (err: Error, results) => {\n if (!err) return resolve(results)\n logger.warn(sql)\n if (err['code'] === 'ER_DUP_ENTRY') {\n err = new DriverError('duplicate-entry', err.message)\n }\n err.stack = err.message + error.stack.slice(5)\n reject(err)\n })\n })\n }\n\n queue<T = any>(sql: string, values?: any): Promise<T> {\n if (!this.config.multipleStatements) {\n return this.query(sql, values)\n }\n\n sql = format(sql, values)\n return new Promise<any>((resolve, reject) => {\n this._queryTasks.push({ sql, resolve, reject })\n process.nextTick(() => this._flushTasks())\n })\n }\n\n private async _flushTasks() {\n const tasks = this._queryTasks\n if (!tasks.length) return\n this._queryTasks = []\n\n try {\n let results = await this.query(tasks.map(task => task.sql).join('; '))\n if (tasks.length === 1) results = [results]\n tasks.forEach((task, index) => {\n task.resolve(results[index])\n })\n } catch (error) {\n tasks.forEach(task => task.reject(error))\n }\n }\n\n select<T extends {}>(table: string, fields: readonly (string & keyof T)[], conditional?: string, values?: readonly any[]): Promise<T[]>\n select(table: string, fields: string[], conditional?: string, values: readonly any[] = []) {\n logger.debug(`[select] ${table}: ${fields ? fields.join(', ') : '*'}`)\n const sql = 'SELECT '\n + this._joinKeys(fields)\n + (table.includes('.') ? `FROM ${table}` : ' FROM `' + table + `\\` _${table}`)\n + (conditional ? ' WHERE ' + conditional : '')\n return this.queue(sql, values)\n }\n\n async drop() {\n const data = await this.select('information_schema.tables', ['TABLE_NAME'], 'TABLE_SCHEMA = ?', [this.config.database])\n if (!data.length) return\n await this.query(data.map(({ TABLE_NAME }) => `DROP TABLE ${this.sql.escapeId(TABLE_NAME)}`).join('; '))\n }\n\n async stats() {\n const data = await this.select('information_schema.tables', ['TABLE_NAME', 'TABLE_ROWS', 'DATA_LENGTH'], 'TABLE_SCHEMA = ?', [this.config.database])\n const stats: Driver.Stats = { size: 0 }\n stats.tables = Object.fromEntries(data.map(({ TABLE_NAME: name, TABLE_ROWS: count, DATA_LENGTH: size }) => {\n stats.size += size\n return [name, { count, size }]\n }))\n return stats\n }\n\n async get(name: keyof Tables, query: Query, modifier?: Modifier) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return []\n const { fields, limit, offset, sort } = this.resolveModifier(name, modifier)\n const keys = this._joinKeys(this._inferFields(name, fields))\n let sql = `SELECT ${keys} FROM ${name} _${name} WHERE ${filter}`\n if (sort) sql += ' ORDER BY ' + Object.entries(sort).map(([key, order]) => `${backtick(key)} ${order}`).join(', ')\n if (limit) sql += ' LIMIT ' + limit\n if (offset) sql += ' OFFSET ' + offset\n return this.queue(sql).then((data) => {\n return data.map((row) => this.model.parse(name, row))\n })\n }\n\n private toUpdateExpr(name: string, item: any, field: string, upsert: boolean) {\n const escaped = backtick(field)\n\n // update directly\n if (field in item) {\n if (isEvalExpr(item[field]) || !upsert) {\n return this.sql.parseEval(item[field], name, field)\n } else {\n return `VALUES(${escaped})`\n }\n }\n\n // update with json_set\n const valueInit = `ifnull(${escaped}, '{}')`\n let value = valueInit\n for (const key in item) {\n if (!key.startsWith(field + '.')) continue\n const rest = key.slice(field.length + 1).split('.')\n value = `json_set(${value}, '$${rest.map(key => `.\"${key}\"`).join('')}', ${this.sql.parseEval(item[key])})`\n }\n\n if (value === valueInit) {\n return escaped\n } else {\n return value\n }\n }\n\n async set(name: keyof Tables, query: Query, data: {}) {\n data = this.model.format(name, data)\n const { fields } = this.resolveTable(name)\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return\n const updateFields = [...new Set(Object.keys(data).map((key) => {\n return Object.keys(fields).find(field => field === key || key.startsWith(field + '.'))\n }))]\n\n const update = updateFields.map((field) => {\n const escaped = backtick(field)\n return `${escaped} = ${this.toUpdateExpr(name, data, field, false)}`\n }).join(', ')\n\n await this.query(`UPDATE ${name} SET ${update} WHERE ${filter}`)\n }\n\n async remove(name: keyof Tables, query: Query) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n if (filter === '0') return\n await this.query('DELETE FROM ?? WHERE ' + filter, [name])\n }\n\n async create<T extends keyof Tables>(name: T, data: {}) {\n await this._tableTasks[name]\n data = this.model.create(name, data)\n const formatted = this.model.format(name, data)\n const { autoInc, primary } = this.resolveTable(name)\n const keys = Object.keys(formatted)\n const header = await this.query<OkPacket>(\n `INSERT INTO ?? (${this._joinKeys(keys)}) VALUES (${keys.map(() => '?').join(', ')})`,\n [name, ...this._formatValues(name, formatted, keys)],\n )\n if (!autoInc) return data as any\n return { ...data, [primary as string]: header.insertId } as any\n }\n\n async upsert(name: keyof Tables, data: any[], keys: string | string[]) {\n if (!data.length) return\n data = data.map(item => this.model.format(name, item))\n await this._tableTasks[name]\n\n const { fields, primary } = this.resolveTable(name)\n const merged = {}\n const insertion = data.map((item) => {\n Object.assign(merged, item)\n return this.model.format(name, executeUpdate(this.model.create(name), item))\n })\n const indexFields = makeArray(keys || primary)\n const dataFields = [...new Set(Object.keys(merged).map((key) => {\n return Object.keys(fields).find(field => field === key || key.startsWith(field + '.'))\n }))]\n const updateFields = difference(dataFields, indexFields)\n\n const createFilter = (item: any) => this.sql.parseQuery(pick(item, indexFields))\n const createMultiFilter = (items: any[]) => {\n if (items.length === 1) {\n return createFilter(items[0])\n } else if (indexFields.length === 1) {\n const key = indexFields[0]\n return this.sql.parseQuery({ [key]: items.map(item => item[key]) })\n } else {\n return items.map(createFilter).join(' OR ')\n }\n }\n\n const update = updateFields.map((field) => {\n const escaped = backtick(field)\n const branches: Dict<any[]> = {}\n data.forEach((item) => {\n (branches[this.toUpdateExpr(name, item, field, true)] ??= []).push(item)\n })\n\n const entries = Object.entries(branches)\n .map(([expr, items]) => [createMultiFilter(items), expr])\n .sort(([a], [b]) => a.length - b.length)\n .reverse()\n\n let value = entries[0][1]\n for (let index = 1; index < entries.length; index++) {\n value = `if(${entries[index][0]}, ${entries[index][1]}, ${value})`\n }\n return `${escaped} = ${value}`\n }).join(', ')\n\n const initFields = Object.keys(fields)\n const placeholder = `(${initFields.map(() => '?').join(', ')})`\n await this.query(\n `INSERT INTO ${this.sql.escapeId(name)} (${this._joinKeys(initFields)}) VALUES ${data.map(() => placeholder).join(', ')}\n ON DUPLICATE KEY UPDATE ${update}`,\n [].concat(...insertion.map(item => this._formatValues(name, item, initFields))),\n )\n }\n\n async eval(name: keyof Tables, expr: any, query: Query) {\n await this._tableTasks[name]\n const filter = this._createFilter(name, query)\n const output = this.sql.parseEval(expr)\n const [data] = await this.queue(`SELECT ${output} AS value FROM ${name} WHERE ${filter}`)\n return data.value\n }\n}\n\nnamespace MysqlDatabase {\n export interface Config extends PoolConfig {}\n\n export const Config = Schema.object({\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.natural().max(65535).description('要连接到的端口号。').default(3306),\n user: Schema.string().description('要使用的用户名。').default('root'),\n password: Schema.string().description('要使用的密码。').role('secret'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n\n type Declarations = {\n [T in keyof Tables]?: {\n [K in keyof Tables[T]]?: () => string\n }\n }\n\n /**\n * @deprecated use `import('koishi').Field` instead\n */\n export const tables: Declarations = {\n user: {},\n channel: {},\n }\n}\n\nexport default MysqlDatabase\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,mBAAoE;AAEpE,oBAAgH;AAChH,iBAA0E;AAC1E,uBAAwB;AAkBxB,IAAM,SAAS,IAAI,qBAAO;AAE1B,IAAM,eAAe,IAAI,KAAK;AAE9B,wBAAwB,SAAS,IAAI;AACnC,MAAI,UAAU;AAAG,WAAO;AACxB,MAAI,UAAU;AAAG,WAAO;AACxB,MAAI,UAAU;AAAG,WAAO;AACxB,MAAI,UAAU;AAAI,WAAO;AACzB,SAAO;AAAA;AALA;AAQT,2BAA2B,EAAE,MAAM,QAAQ,WAAW,SAAsB;AAC1E,UAAQ;AAAA,SACD;AAAA,SACA;AAAA,SACA;AAAA,SACA;AAAQ,aAAO;AAAA,SACf;AAAa,aAAO;AAAA,SACpB;AAAW,aAAO,eAAe;AAAA,SACjC;AAAY,aAAO,GAAG,eAAe;AAAA,SACrC;AAAW,aAAO,WAAW,cAAc;AAAA,SAC3C;AAAQ,aAAO,QAAQ,UAAU;AAAA,SACjC;AAAU,aAAO,WAAW,UAAU;AAAA,SACtC;AAAQ,aAAO,QAAQ,UAAU;AAAA,SACjC;AAAQ,aAAO,QAAQ,UAAU;AAAA,SACjC;AAAQ,aAAO,QAAQ,UAAU;AAAA;AAAA;AAdjC;AAkBT,kBAAkB,KAAa;AAC7B,SAAO,MAAM,MAAM;AAAA;AADZ;AAIT,qBAAqB,MAAyB;AAC5C,SAAO,6BAAU,MAAM,IAAI,UAAU,KAAK;AAAA;AADnC;AAIT,iCAA2B,yBAAQ;AAAA,EACjC,YAAoB,OAAc;AAChC;AADkB;AAAA;AAAA,EAIpB,OAAO,KAAa,QAAe,kBAA4B,UAAmB;AAChF,WAAO,yBAAO,KAAK,QAAQ,kBAAkB;AAAA;AAAA,EAG/C,SAAS,OAAe,iBAA2B;AACjD,WAAO,2BAAS,OAAO;AAAA;AAAA,EAGzB,OAAO,OAAY,OAAgB,OAAgB;AACjD,WAAO,yBAAY,KAAK,UAAU,OAAO,OAAO;AAAA;AAAA,EAGlD,UAAU,OAAY,OAAgB,OAAgB;AA7ExD;AA8EI,UAAM,OAAO,oBAAc,OAAO,WAArB,mBAA8B;AAC3C,QAAI,OAAO,SAAS;AAAU,aAAO,KAAK,UAAU;AAEpD,UAAM,OAAO,WAAK,MAAM,OAAO,WAAlB,mBAA0B,OAAO;AAC9C,QAAI,8BAAM,UAAS,QAAQ;AACzB,aAAO,KAAK,UAAU;AAAA,eACb,8BAAM,UAAS,QAAQ;AAChC,aAAO,MAAM,KAAK;AAAA,eACT,iBAAM,MAAM,KAAK,SAAS,6BAAM,OAAO;AAChD,aAAO,mBAAK,SAAS,uBAAuB;AAAA;AAG9C,WAAO;AAAA;AAAA;AA9BX;AAwCA,kCAA4B,uBAAS;AAAA,EAUnC,YAAmB,KAAc,QAA+B;AAC9D,UAAM;AADW;AANnB,iBAAQ;AAGA,uBAAkC;AAClC,uBAA2B;AA0InC,qBAAY,CAAC,SAA4B;AACvC,aAAO,OAAO,KAAK,IAAI,SAAO,IAAI,SAAS,OAAO,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA;AAGpF,yBAAgB,CAAC,OAAe,MAAc,SAA4B;AACxE,aAAO,KAAK,IAAI,CAAC,QAAQ,KAAK,IAAI,UAAU,KAAK,MAAM,OAAgB;AAAA;AA1IvE,SAAK,SAAS;AAAA,MACZ,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,MACT,oBAAoB;AAAA,MACpB,UAAU,CAAC,OAAO,SAAS;AAxHjC;AAyHQ,cAAM,EAAE,SAAS,aAAa,MAAM;AACpC,cAAM,OAAO,oBAAc,OAAO,cAArB,mBAAiC;AAC9C,YAAI,OAAO,SAAS;AAAU,iBAAO,KAAK,MAAM;AAEhD,cAAM,OAAO,WAAK,MAAM,OAAO,cAAlB,mBAA6B,OAAO;AACjD,YAAI,iBAAM,MAAM,OAAO,SAAS,6BAAM,OAAO;AAC3C,iBAAO,MAAM;AAAA,mBACJ,8BAAM,UAAS,QAAQ;AAChC,gBAAM,SAAS,MAAM;AACrB,iBAAO,SAAS,KAAK,MAAM,UAAU,KAAK;AAAA,mBACjC,8BAAM,UAAS,QAAQ;AAChC,gBAAM,SAAS,MAAM;AACrB,iBAAO,SAAS,OAAO,MAAM,OAAO;AAAA,mBAC3B,8BAAM,UAAS,QAAQ;AAChC,gBAAM,SAAS,MAAM;AACrB,cAAI,CAAC;AAAQ,mBAAO,KAAK;AACzB,gBAAM,OAAO,IAAI,KAAK;AACtB,gBAAM,CAAC,GAAG,GAAG,KAAK,OAAO,MAAM;AAC/B,eAAK,SAAS,SAAS;AACvB,eAAK,WAAW,SAAS;AACzB,eAAK,WAAW,SAAS;AACzB,iBAAO;AAAA;AAGT,YAAI,MAAM,SAAS,OAAO;AACxB,iBAAO,QAAQ,YAAM,aAAN,mBAAgB,UAAU;AAAA,eACpC;AACL,iBAAO;AAAA;AAAA;AAAA,OAGR;AAGL,SAAK,MAAM,IAAI,aAAa,KAAK;AAAA;AAAA,QAG7B,QAAQ;AACZ,SAAK,OAAO,6BAAW,KAAK;AAE5B,eAAW,QAAQ,KAAK,MAAM,QAAQ;AACpC,WAAK,YAAY,QAAQ,KAAK,WAAW;AAAA;AAG3C,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,YAAY,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAI7C,OAAO;AACL,SAAK,KAAK;AAAA;AAAA,EAGJ,YAAY,MAAoB,SAAmB;AACzD,UAAM,QAAQ,KAAK,aAAa;AAChC,UAAM,EAAE,SAAS,SAAS,YAAY;AACtC,UAAM,SAAS,mBAAK,MAAM;AAC1B,UAAM,SAAS,CAAC,GAAG,MAAM;AACzB,UAAM,SAAmB;AAGzB,eAAW,OAAO,QAAQ;AACxB,UAAI,QAAQ,SAAS;AAAM;AAC3B,YAAM,EAAE,SAAS,WAAW,SAAS,OAAO;AAC5C,UAAI,MAAM,SAAS;AACnB,UAAI,QAAQ,WAAW,SAAS;AAC9B,eAAO;AAAA,aACF;AACL,cAAM,UAAU,kBAAkB,OAAO;AACzC,eAAO,MAAM;AACb,YAAI,6BAAU,SAAS,SAAS,MAAM;AACpC,iBAAO;AAAA,eACF;AACL,iBAAQ,YAAW,MAAM,WAAW;AAAA;AAGtC,YAAI,WAAW,CAAC,QAAQ,WAAW,SAAS;AAC1C,iBAAO,cAAc,KAAK,IAAI,OAAO,SAAS,MAAM;AAAA;AAAA;AAGxD,aAAO,KAAK;AAAA;AAGd,QAAI,CAAC,QAAQ,QAAQ;AACnB,aAAO,KAAK,gBAAgB,YAAY;AACxC,iBAAW,OAAO,QAAQ;AACxB,eAAO,KAAK,iBAAiB,YAAY;AAAA;AAE3C,iBAAW,OAAO,SAAS;AACzB,cAAM,CAAC,QAAO,QAAQ,QAAQ;AAC9B,eAAO,KAAK,gBAAgB,SAAS,oBAAoB,2BAAS,YAAW,SAAS;AAAA;AAAA;AAI1F,WAAO;AAAA;AAAA,QAIK,WAAW,MAAoB;AAC3C,UAAM,KAAK,YAAY;AAEvB,UAAM,OAAO,MAAM,KAAK,MAAa,+FAA+F,CAAC,KAAK,OAAO,UAAU;AAC3J,UAAM,UAAU,KAAK,IAAI,SAAO,IAAI;AACpC,UAAM,SAAS,KAAK,YAAY,MAAM;AACtC,QAAI,CAAC,QAAQ,QAAQ;AACnB,aAAO,KAAK,0BAA0B;AACtC,YAAM,KAAK,MAAM,oBAAoB,OAAO,KAAK,qBAAqB,CAAC,MAAM,KAAK,OAAO;AAAA,eAChF,OAAO,QAAQ;AACxB,aAAO,KAAK,0BAA0B;AACtC,YAAM,KAAK,MAAM,kBAAkB,OAAO,IAAI,SAAO,SAAS,KAAK,KAAK,QAAQ,CAAC;AAAA;AAAA;AAAA,EAIrF,aAAqC,OAAU,MAAyB;AACtE,QAAI,CAAC;AAAM;AACX,UAAM,QAAQ,cAAc,OAAO,UAAU;AAC7C,WAAO,KAAK,IAAI,CAAC,QAAQ;AACvB,YAAM,OAAO,MAAM;AACnB,aAAO,OAAO,SAAS,aAAa,GAAG,aAAa,QAAQ;AAAA;AAAA;AAAA,EAIhE,cAAc,MAAoB,OAAc;AAC9C,WAAO,KAAK,IAAI,WAAW,KAAK,aAAa,MAAM;AAAA;AAAA,EAWrD,MAAe,KAAa,QAA0B;AACpD,UAAM,QAAQ,IAAI;AAClB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,yBAAO,KAAK;AAClB,aAAO,MAAM,SAAS;AACtB,WAAK,KAAK,MAAM,KAAK,CAAC,KAAY,YAAY;AAC5C,YAAI,CAAC;AAAK,iBAAO,QAAQ;AACzB,eAAO,KAAK;AACZ,YAAI,IAAI,YAAY,gBAAgB;AAClC,gBAAM,IAAI,0BAAY,mBAAmB,IAAI;AAAA;AAE/C,YAAI,QAAQ,IAAI,UAAU,MAAM,MAAM,MAAM;AAC5C,eAAO;AAAA;AAAA;AAAA;AAAA,EAKb,MAAe,KAAa,QAA0B;AACpD,QAAI,CAAC,KAAK,OAAO,oBAAoB;AACnC,aAAO,KAAK,MAAM,KAAK;AAAA;AAGzB,UAAM,yBAAO,KAAK;AAClB,WAAO,IAAI,QAAa,CAAC,SAAS,WAAW;AAC3C,WAAK,YAAY,KAAK,EAAE,KAAK,SAAS;AACtC,cAAQ,SAAS,MAAM,KAAK;AAAA;AAAA;AAAA,QAIlB,cAAc;AAC1B,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAM;AAAQ;AACnB,SAAK,cAAc;AAEnB,QAAI;AACF,UAAI,UAAU,MAAM,KAAK,MAAM,MAAM,IAAI,UAAQ,KAAK,KAAK,KAAK;AAChE,UAAI,MAAM,WAAW;AAAG,kBAAU,CAAC;AACnC,YAAM,QAAQ,CAAC,MAAM,UAAU;AAC7B,aAAK,QAAQ,QAAQ;AAAA;AAAA,aAEhB,OAAP;AACA,YAAM,QAAQ,UAAQ,KAAK,OAAO;AAAA;AAAA;AAAA,EAKtC,OAAO,OAAe,QAAkB,aAAsB,SAAyB,IAAI;AACzF,WAAO,MAAM,YAAY,UAAU,SAAS,OAAO,KAAK,QAAQ;AAChE,UAAM,MAAM,YACR,KAAK,UAAU,UACd,OAAM,SAAS,OAAO,QAAQ,UAAU,YAAY,QAAQ,OAAO,WACnE,eAAc,YAAY,cAAc;AAC7C,WAAO,KAAK,MAAM,KAAK;AAAA;AAAA,QAGnB,OAAO;AACX,UAAM,OAAO,MAAM,KAAK,OAAO,6BAA6B,CAAC,eAAe,oBAAoB,CAAC,KAAK,OAAO;AAC7G,QAAI,CAAC,KAAK;AAAQ;AAClB,UAAM,KAAK,MAAM,KAAK,IAAI,CAAC,EAAE,iBAAiB,cAAc,KAAK,IAAI,SAAS,eAAe,KAAK;AAAA;AAAA,QAG9F,QAAQ;AACZ,UAAM,OAAO,MAAM,KAAK,OAAO,6BAA6B,CAAC,cAAc,cAAc,gBAAgB,oBAAoB,CAAC,KAAK,OAAO;AAC1I,UAAM,QAAsB,EAAE,MAAM;AACpC,UAAM,SAAS,OAAO,YAAY,KAAK,IAAI,CAAC,EAAE,YAAY,MAAM,YAAY,OAAO,aAAa,WAAW;AACzG,YAAM,QAAQ;AACd,aAAO,CAAC,MAAM,EAAE,OAAO;AAAA;AAEzB,WAAO;AAAA;AAAA,QAGH,IAAI,MAAoB,OAAc,UAAqB;AAC/D,UAAM,KAAK,YAAY;AACvB,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,WAAW;AAAK,aAAO;AAC3B,UAAM,EAAE,QAAQ,OAAO,QAAQ,SAAS,KAAK,gBAAgB,MAAM;AACnE,UAAM,OAAO,KAAK,UAAU,KAAK,aAAa,MAAM;AACpD,QAAI,MAAM,UAAU,aAAa,SAAS,cAAc;AACxD,QAAI;AAAM,aAAO,eAAe,OAAO,QAAQ,MAAM,IAAI,CAAC,CAAC,KAAK,WAAW,GAAG,SAAS,QAAQ,SAAS,KAAK;AAC7G,QAAI;AAAO,aAAO,YAAY;AAC9B,QAAI;AAAQ,aAAO,aAAa;AAChC,WAAO,KAAK,MAAM,KAAK,KAAK,CAAC,SAAS;AACpC,aAAO,KAAK,IAAI,CAAC,QAAQ,KAAK,MAAM,MAAM,MAAM;AAAA;AAAA;AAAA,EAI5C,aAAa,MAAc,MAAW,OAAe,QAAiB;AAC5E,UAAM,UAAU,SAAS;AAGzB,QAAI,SAAS,MAAM;AACjB,UAAI,2BAAW,KAAK,WAAW,CAAC,QAAQ;AACtC,eAAO,KAAK,IAAI,UAAU,KAAK,QAAQ,MAAM;AAAA,aACxC;AACL,eAAO,UAAU;AAAA;AAAA;AAKrB,UAAM,YAAY,UAAU;AAC5B,QAAI,QAAQ;AACZ,eAAW,OAAO,MAAM;AACtB,UAAI,CAAC,IAAI,WAAW,QAAQ;AAAM;AAClC,YAAM,OAAO,IAAI,MAAM,MAAM,SAAS,GAAG,MAAM;AAC/C,cAAQ,YAAY,YAAY,KAAK,IAAI,UAAO,KAAK,SAAQ,KAAK,SAAS,KAAK,IAAI,UAAU,KAAK;AAAA;AAGrG,QAAI,UAAU,WAAW;AACvB,aAAO;AAAA,WACF;AACL,aAAO;AAAA;AAAA;AAAA,QAIL,IAAI,MAAoB,OAAc,MAAU;AACpD,WAAO,KAAK,MAAM,OAAO,MAAM;AAC/B,UAAM,EAAE,WAAW,KAAK,aAAa;AACrC,UAAM,KAAK,YAAY;AACvB,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,WAAW;AAAK;AACpB,UAAM,eAAe,CAAC,GAAG,IAAI,IAAI,OAAO,KAAK,MAAM,IAAI,CAAC,QAAQ;AAC9D,aAAO,OAAO,KAAK,QAAQ,KAAK,WAAS,UAAU,OAAO,IAAI,WAAW,QAAQ;AAAA;AAGnF,UAAM,SAAS,aAAa,IAAI,CAAC,UAAU;AACzC,YAAM,UAAU,SAAS;AACzB,aAAO,GAAG,aAAa,KAAK,aAAa,MAAM,MAAM,OAAO;AAAA,OAC3D,KAAK;AAER,UAAM,KAAK,MAAM,UAAU,YAAY,gBAAgB;AAAA;AAAA,QAGnD,OAAO,MAAoB,OAAc;AAC7C,UAAM,KAAK,YAAY;AACvB,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,WAAW;AAAK;AACpB,UAAM,KAAK,MAAM,0BAA0B,QAAQ,CAAC;AAAA;AAAA,QAGhD,OAA+B,MAAS,MAAU;AACtD,UAAM,KAAK,YAAY;AACvB,WAAO,KAAK,MAAM,OAAO,MAAM;AAC/B,UAAM,YAAY,KAAK,MAAM,OAAO,MAAM;AAC1C,UAAM,EAAE,SAAS,YAAY,KAAK,aAAa;AAC/C,UAAM,OAAO,OAAO,KAAK;AACzB,UAAM,SAAS,MAAM,KAAK,MACxB,mBAAmB,KAAK,UAAU,kBAAkB,KAAK,IAAI,MAAM,KAAK,KAAK,UAC7E,CAAC,MAAM,GAAG,KAAK,cAAc,MAAM,WAAW;AAEhD,QAAI,CAAC;AAAS,aAAO;AACrB,WAAO,iCAAK,OAAL,GAAY,UAAoB,OAAO;AAAA;AAAA,QAG1C,OAAO,MAAoB,MAAa,MAAyB;AACrE,QAAI,CAAC,KAAK;AAAQ;AAClB,WAAO,KAAK,IAAI,UAAQ,KAAK,MAAM,OAAO,MAAM;AAChD,UAAM,KAAK,YAAY;AAEvB,UAAM,EAAE,QAAQ,YAAY,KAAK,aAAa;AAC9C,UAAM,SAAS;AACf,UAAM,YAAY,KAAK,IAAI,CAAC,SAAS;AACnC,aAAO,OAAO,QAAQ;AACtB,aAAO,KAAK,MAAM,OAAO,MAAM,8BAAc,KAAK,MAAM,OAAO,OAAO;AAAA;AAExE,UAAM,cAAc,6BAAU,QAAQ;AACtC,UAAM,aAAa,CAAC,GAAG,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,CAAC,QAAQ;AAC9D,aAAO,OAAO,KAAK,QAAQ,KAAK,WAAS,UAAU,OAAO,IAAI,WAAW,QAAQ;AAAA;AAEnF,UAAM,eAAe,8BAAW,YAAY;AAE5C,UAAM,eAAe,wBAAC,SAAc,KAAK,IAAI,WAAW,wBAAK,MAAM,eAA9C;AACrB,UAAM,oBAAoB,wBAAC,UAAiB;AAC1C,UAAI,MAAM,WAAW,GAAG;AACtB,eAAO,aAAa,MAAM;AAAA,iBACjB,YAAY,WAAW,GAAG;AACnC,cAAM,MAAM,YAAY;AACxB,eAAO,KAAK,IAAI,WAAW,GAAG,MAAM,MAAM,IAAI,UAAQ,KAAK;AAAA,aACtD;AACL,eAAO,MAAM,IAAI,cAAc,KAAK;AAAA;AAAA,OAPd;AAW1B,UAAM,SAAS,aAAa,IAAI,CAAC,UAAU;AACzC,YAAM,UAAU,SAAS;AACzB,YAAM,WAAwB;AAC9B,WAAK,QAAQ,CAAC,SAAS;AAvb7B;AAwbQ,QAAC,qBAAS,KAAK,aAAa,MAAM,MAAM,OAAO,WAA9C,2BAAyD,IAAI,KAAK;AAAA;AAGrE,YAAM,UAAU,OAAO,QAAQ,UAC5B,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,kBAAkB,QAAQ,OAClD,KAAK,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,EAAE,QAChC;AAEH,UAAI,QAAQ,QAAQ,GAAG;AACvB,eAAS,QAAQ,GAAG,QAAQ,QAAQ,QAAQ,SAAS;AACnD,gBAAQ,MAAM,QAAQ,OAAO,OAAO,QAAQ,OAAO,OAAO;AAAA;AAE5D,aAAO,GAAG,aAAa;AAAA,OACtB,KAAK;AAER,UAAM,aAAa,OAAO,KAAK;AAC/B,UAAM,cAAc,IAAI,WAAW,IAAI,MAAM,KAAK,KAAK;AACvD,UAAM,KAAK,MACT,eAAe,KAAK,IAAI,SAAS,UAAU,KAAK,UAAU,uBAAuB,KAAK,IAAI,MAAM,aAAa,KAAK;AAAA,gCACxF,UAC1B,GAAG,OAAO,GAAG,UAAU,IAAI,UAAQ,KAAK,cAAc,MAAM,MAAM;AAAA;AAAA,QAIhE,KAAK,MAAoB,MAAW,OAAc;AACtD,UAAM,KAAK,YAAY;AACvB,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,SAAS,KAAK,IAAI,UAAU;AAClC,UAAM,CAAC,QAAQ,MAAM,KAAK,MAAM,UAAU,wBAAwB,cAAc;AAChF,WAAO,KAAK;AAAA;AAAA;AAjXhB;AAqXA,UAAU,gBAAV;AAGS,EAAM,wBAAS,qBAAO,OAAO;AAAA,IAClC,MAAM,qBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,qBAAO,UAAU,IAAI,OAAO,YAAY,aAAa,QAAQ;AAAA,IACnE,MAAM,qBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IACtD,UAAU,qBAAO,SAAS,YAAY,WAAW,KAAK;AAAA,IACtD,UAAU,qBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAYtD,EAAM,wBAAuB;AAAA,IAClC,MAAM;AAAA,IACN,SAAS;AAAA;AAAA,GAtBH;AA0BV,IAAO,cAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@koishijs/plugin-database-mysql",
|
|
3
3
|
"description": "MySQL support for Koishi",
|
|
4
|
-
"version": "4.
|
|
4
|
+
"version": "4.2.0",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -33,11 +33,11 @@
|
|
|
33
33
|
"@koishijs/plugin-mock": "^1.0.3"
|
|
34
34
|
},
|
|
35
35
|
"peerDependencies": {
|
|
36
|
-
"koishi": "^4.
|
|
36
|
+
"koishi": "^4.5.0"
|
|
37
37
|
},
|
|
38
38
|
"dependencies": {
|
|
39
|
-
"@koishijs/orm
|
|
40
|
-
"@koishijs/sql-utils": "^1.
|
|
39
|
+
"@koishijs/orm": "^1.0.0",
|
|
40
|
+
"@koishijs/sql-utils": "^1.1.0",
|
|
41
41
|
"@types/mysql": "^2.15.21",
|
|
42
42
|
"@vlasky/mysql": "^2.18.5"
|
|
43
43
|
}
|