@koishijs/plugin-database-mongo 3.0.0 → 3.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +2 -3
- package/lib/index.js +113 -24
- package/lib/index.js.map +2 -2
- package/package.json +7 -6
package/lib/index.d.ts
CHANGED
|
@@ -7,9 +7,6 @@ declare module 'koishi' {
|
|
|
7
7
|
interface Database {
|
|
8
8
|
mongo: MongoDatabase;
|
|
9
9
|
}
|
|
10
|
-
interface Modules {
|
|
11
|
-
'database-mongo': typeof import('.');
|
|
12
|
-
}
|
|
13
10
|
}
|
|
14
11
|
declare type TableType = keyof Tables;
|
|
15
12
|
declare class MongoDatabase extends Database {
|
|
@@ -24,6 +21,8 @@ declare class MongoDatabase extends Database {
|
|
|
24
21
|
private connectionStringFromConfig;
|
|
25
22
|
start(): Promise<void>;
|
|
26
23
|
stop(): Promise<void>;
|
|
24
|
+
private _createIndexes;
|
|
25
|
+
private _createFields;
|
|
27
26
|
/** synchronize table schema */
|
|
28
27
|
private _syncTable;
|
|
29
28
|
private _createFilter;
|
package/lib/index.js
CHANGED
|
@@ -48,53 +48,114 @@ var import_orm_utils = __toModule(require("@koishijs/orm-utils"));
|
|
|
48
48
|
|
|
49
49
|
// plugins/database/mongo/src/utils.ts
|
|
50
50
|
var import_koishi = __toModule(require("koishi"));
|
|
51
|
-
function
|
|
51
|
+
function createFieldFilter(query, key) {
|
|
52
|
+
const filters = [];
|
|
53
|
+
const result = {};
|
|
54
|
+
const child = transformFieldQuery(query, key, filters);
|
|
55
|
+
if (child === false)
|
|
56
|
+
return false;
|
|
57
|
+
if (child !== true)
|
|
58
|
+
result[key] = child;
|
|
59
|
+
if (filters.length)
|
|
60
|
+
result.$and = filters;
|
|
61
|
+
if (Object.keys(result).length)
|
|
62
|
+
return result;
|
|
63
|
+
return true;
|
|
64
|
+
}
|
|
65
|
+
__name(createFieldFilter, "createFieldFilter");
|
|
66
|
+
function transformFieldQuery(query, key, filters) {
|
|
52
67
|
if (typeof query === "string" || typeof query === "number" || query instanceof Date) {
|
|
53
68
|
return { $eq: query };
|
|
54
69
|
} else if (Array.isArray(query)) {
|
|
55
70
|
if (!query.length)
|
|
56
|
-
return;
|
|
71
|
+
return false;
|
|
57
72
|
return { $in: query };
|
|
58
73
|
} else if (query instanceof RegExp) {
|
|
59
74
|
return { $regex: query };
|
|
60
75
|
}
|
|
61
76
|
const result = {};
|
|
62
77
|
for (const prop in query) {
|
|
63
|
-
if (prop === "$
|
|
64
|
-
|
|
78
|
+
if (prop === "$and") {
|
|
79
|
+
for (const item of query[prop]) {
|
|
80
|
+
const child = createFieldFilter(item, key);
|
|
81
|
+
if (child === false)
|
|
82
|
+
return false;
|
|
83
|
+
if (child !== true)
|
|
84
|
+
filters.push(child);
|
|
85
|
+
}
|
|
86
|
+
} else if (prop === "$or") {
|
|
87
|
+
const $or = [];
|
|
88
|
+
if (!query[prop].length)
|
|
89
|
+
return false;
|
|
90
|
+
const always = query[prop].some((item) => {
|
|
91
|
+
const child = createFieldFilter(item, key);
|
|
92
|
+
if (typeof child === "boolean")
|
|
93
|
+
return child;
|
|
94
|
+
$or.push(child);
|
|
95
|
+
});
|
|
96
|
+
if (!always)
|
|
97
|
+
filters.push({ $or });
|
|
98
|
+
} else if (prop === "$not") {
|
|
99
|
+
const child = createFieldFilter(query[prop], key);
|
|
100
|
+
if (child === true)
|
|
101
|
+
return false;
|
|
102
|
+
if (child !== false)
|
|
103
|
+
filters.push({ $nor: [child] });
|
|
104
|
+
} else if (prop === "$el") {
|
|
105
|
+
const child = transformFieldQuery(query[prop], key, filters);
|
|
106
|
+
if (child === false)
|
|
107
|
+
return false;
|
|
108
|
+
if (child !== true)
|
|
109
|
+
result.$elemMatch = child;
|
|
65
110
|
} else if (prop === "$regexFor") {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
111
|
+
filters.push({
|
|
112
|
+
$expr: {
|
|
113
|
+
$function: {
|
|
114
|
+
body: function(data, value) {
|
|
115
|
+
return new RegExp(data, "i").test(value);
|
|
116
|
+
}.toString(),
|
|
117
|
+
args: ["$" + key, query.$regexFor],
|
|
118
|
+
lang: "js"
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
});
|
|
73
122
|
} else {
|
|
74
123
|
result[prop] = query[prop];
|
|
75
124
|
}
|
|
76
125
|
}
|
|
126
|
+
if (!Object.keys(result).length)
|
|
127
|
+
return true;
|
|
77
128
|
return result;
|
|
78
129
|
}
|
|
79
130
|
__name(transformFieldQuery, "transformFieldQuery");
|
|
80
131
|
function transformQuery(query) {
|
|
81
132
|
const filter = {};
|
|
133
|
+
const additional = [];
|
|
82
134
|
for (const key in query) {
|
|
83
135
|
const value = query[key];
|
|
84
136
|
if (key === "$and" || key === "$or") {
|
|
85
137
|
if (value.length) {
|
|
86
138
|
filter[key] = value.map(transformQuery);
|
|
87
139
|
} else if (key === "$or") {
|
|
88
|
-
return
|
|
140
|
+
return;
|
|
89
141
|
}
|
|
90
142
|
} else if (key === "$not") {
|
|
91
|
-
|
|
143
|
+
const query2 = transformQuery(value);
|
|
144
|
+
if (query2)
|
|
145
|
+
filter.$nor = [query2];
|
|
92
146
|
} else if (key === "$expr") {
|
|
93
|
-
|
|
147
|
+
additional.push({ $expr: transformEval(value) });
|
|
94
148
|
} else {
|
|
95
|
-
|
|
149
|
+
const query2 = transformFieldQuery(value, key, additional);
|
|
150
|
+
if (query2 === false)
|
|
151
|
+
return;
|
|
152
|
+
if (query2 !== true)
|
|
153
|
+
filter[key] = query2;
|
|
96
154
|
}
|
|
97
155
|
}
|
|
156
|
+
if (additional.length) {
|
|
157
|
+
(filter.$and || (filter.$and = [])).push(...additional);
|
|
158
|
+
}
|
|
98
159
|
return filter;
|
|
99
160
|
}
|
|
100
161
|
__name(transformQuery, "transformQuery");
|
|
@@ -177,10 +238,9 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
177
238
|
stop() {
|
|
178
239
|
return this.client.close();
|
|
179
240
|
}
|
|
180
|
-
async
|
|
181
|
-
await this._tableTasks[name];
|
|
182
|
-
const coll = await this.db.createCollection(name).catch(() => this.db.collection(name));
|
|
241
|
+
async _createIndexes(name) {
|
|
183
242
|
const { primary, unique } = this.ctx.model.config[name];
|
|
243
|
+
const coll = this.db.collection(name);
|
|
184
244
|
const newSpecs = [];
|
|
185
245
|
const oldSpecs = await coll.indexes();
|
|
186
246
|
[primary, ...unique].forEach((keys, index) => {
|
|
@@ -188,13 +248,34 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
188
248
|
const name2 = (index ? "unique:" : "primary:") + keys.join("+");
|
|
189
249
|
if (oldSpecs.find((spec) => spec.name === name2))
|
|
190
250
|
return;
|
|
191
|
-
|
|
192
|
-
|
|
251
|
+
newSpecs.push({
|
|
252
|
+
name: name2,
|
|
253
|
+
key: Object.fromEntries(keys.map((key) => [key, 1])),
|
|
254
|
+
unique: true,
|
|
255
|
+
partialFilterExpression: Object.fromEntries(keys.map((key) => [key, { $exists: true }]))
|
|
256
|
+
});
|
|
193
257
|
});
|
|
194
258
|
if (!newSpecs.length)
|
|
195
259
|
return;
|
|
196
260
|
await coll.createIndexes(newSpecs);
|
|
197
261
|
}
|
|
262
|
+
async _createFields(name) {
|
|
263
|
+
const { fields } = this.ctx.model.config[name];
|
|
264
|
+
const coll = this.db.collection(name);
|
|
265
|
+
await Promise.all(Object.keys(fields).map((key) => {
|
|
266
|
+
if ((0, import_koishi2.isNullable)(fields[key].initial))
|
|
267
|
+
return;
|
|
268
|
+
return coll.updateMany({ [key]: { $exists: false } }, { $set: { [key]: fields[key].initial } });
|
|
269
|
+
}));
|
|
270
|
+
}
|
|
271
|
+
async _syncTable(name) {
|
|
272
|
+
await this._tableTasks[name];
|
|
273
|
+
await this.db.createCollection(name).catch(import_koishi2.noop);
|
|
274
|
+
await Promise.all([
|
|
275
|
+
this._createIndexes(name),
|
|
276
|
+
this._createFields(name)
|
|
277
|
+
]);
|
|
278
|
+
}
|
|
198
279
|
_createFilter(name, query) {
|
|
199
280
|
return transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
200
281
|
}
|
|
@@ -219,6 +300,9 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
219
300
|
}
|
|
220
301
|
async get(name, query, modifier) {
|
|
221
302
|
const filter = this._createFilter(name, query);
|
|
303
|
+
if (!filter)
|
|
304
|
+
return [];
|
|
305
|
+
await this._tableTasks[name];
|
|
222
306
|
let cursor = this.db.collection(name).find(filter);
|
|
223
307
|
const { fields, limit, offset = 0, sort } = import_koishi2.Query.resolveModifier(modifier);
|
|
224
308
|
cursor = cursor.project(__spreadValues({ _id: 0 }, Object.fromEntries((fields != null ? fields : []).map((key) => [key, 1]))));
|
|
@@ -231,11 +315,13 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
231
315
|
return await cursor.toArray();
|
|
232
316
|
}
|
|
233
317
|
async set(name, query, update) {
|
|
318
|
+
const filter = this._createFilter(name, query);
|
|
319
|
+
if (!filter)
|
|
320
|
+
return;
|
|
234
321
|
await this._tableTasks[name];
|
|
235
322
|
const { primary } = this.ctx.model.config[name];
|
|
236
323
|
const indexFields = (0, import_koishi2.makeArray)(primary);
|
|
237
324
|
const updateFields = new Set(Object.keys(update).map((key) => key.split(".", 1)[0]));
|
|
238
|
-
const filter = this._createFilter(name, query);
|
|
239
325
|
const coll = this.db.collection(name);
|
|
240
326
|
const original = await coll.find(filter).toArray();
|
|
241
327
|
if (!original.length)
|
|
@@ -248,6 +334,8 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
248
334
|
}
|
|
249
335
|
async remove(name, query) {
|
|
250
336
|
const filter = this._createFilter(name, query);
|
|
337
|
+
if (!filter)
|
|
338
|
+
return;
|
|
251
339
|
await this.db.collection(name).deleteMany(filter);
|
|
252
340
|
}
|
|
253
341
|
queue(name, callback) {
|
|
@@ -315,7 +403,8 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
315
403
|
for (const task of tasks) {
|
|
316
404
|
const { expr, table, query } = task;
|
|
317
405
|
task.expr = transformEval(expr, (pipeline) => {
|
|
318
|
-
|
|
406
|
+
const filter = this._createFilter(table, query) || { _id: null };
|
|
407
|
+
pipeline.unshift({ $match: filter });
|
|
319
408
|
stages.push({ $unionWith: { coll: table, pipeline } });
|
|
320
409
|
});
|
|
321
410
|
}
|
|
@@ -342,9 +431,9 @@ __name(MongoDatabase, "MongoDatabase");
|
|
|
342
431
|
MongoDatabase2.Config = import_koishi2.Schema.object({
|
|
343
432
|
protocol: import_koishi2.Schema.string().description("要使用的协议名。").default("mongodb"),
|
|
344
433
|
host: import_koishi2.Schema.string().description("要连接到的主机名。").default("localhost"),
|
|
345
|
-
port: import_koishi2.Schema.
|
|
434
|
+
port: import_koishi2.Schema.natural().max(65535).description("要连接到的端口号。"),
|
|
346
435
|
username: import_koishi2.Schema.string().description("要使用的用户名。"),
|
|
347
|
-
password: import_koishi2.Schema.string().description("要使用的密码。"),
|
|
436
|
+
password: import_koishi2.Schema.string().description("要使用的密码。").role("secret"),
|
|
348
437
|
database: import_koishi2.Schema.string().description("要访问的数据库名。").default("koishi")
|
|
349
438
|
});
|
|
350
439
|
})(MongoDatabase || (MongoDatabase = {}));
|
package/lib/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts", "../src/utils.ts"],
|
|
4
|
-
"sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\ninterface EvalTask {\n expr: any\n table: TableType\n query: Query\n resolve: (value: any) => void\n reject: (error: Error) => void\n}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private _tableTasks: Dict<Promise<any>> = {}\n private _evalTasks: EvalTask[] = []\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this._tableTasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this._tableTasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this._tableTasks[name]\n const coll = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n await this._tableTasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const filter = this._createFilter(name, query)\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this._tableTasks[name] = Promise.resolve(this._tableTasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n data[primary] = data[primary].padStart(8, '0')\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this._tableTasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n eval(table: TableType, expr: any, query: Query) {\n return new Promise<any>((resolve, reject) => {\n this._evalTasks.push({ expr, table, query, resolve, reject })\n process.nextTick(() => this._flushEvalTasks())\n })\n }\n\n private async _flushEvalTasks() {\n const tasks = this._evalTasks\n if (!tasks.length) return\n this._evalTasks = []\n\n const stages: any[] = [{ $match: { _id: null } }]\n for (const task of tasks) {\n const { expr, table, query } = task\n task.expr = transformEval(expr, (pipeline: any[]) => {\n pipeline.unshift({ $match: this._createFilter(table, query) })\n stages.push({ $unionWith: { coll: table, pipeline } })\n })\n }\n\n let data: any\n try {\n const results = await this.db.collection('user').aggregate(stages).toArray()\n data = Object.assign({}, ...results)\n } catch (error) {\n tasks.forEach(task => task.reject(error))\n return\n }\n\n for (const { expr, resolve, reject } of tasks) {\n try {\n resolve(executeEval(data, expr))\n } catch (error) {\n reject(error)\n }\n }\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, Random, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter.$nor = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, onAggr?: (pipeline: any[]) => void) {\n return valueMap(expr as any, (value) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, onAggr))\n } else {\n return transformEval(value, onAggr)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, onAggr?: (pipeline: any[]) => void) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = Random.id()\n if (key === '$count') {\n onAggr([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n onAggr([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, onAggr)\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,
|
|
4
|
+
"sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError, isNullable } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n}\n\ntype TableType = keyof Tables\n\ninterface EvalTask {\n expr: any\n table: TableType\n query: Query\n resolve: (value: any) => void\n reject: (error: Error) => void\n}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private _tableTasks: Dict<Promise<any>> = {}\n private _evalTasks: EvalTask[] = []\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this._tableTasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this._tableTasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n private async _createIndexes(name: string) {\n const { primary, unique } = this.ctx.model.config[name]\n const coll = this.db.collection(name)\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n newSpecs.push({\n name,\n key: Object.fromEntries(keys.map(key => [key, 1])),\n unique: true,\n // https://docs.mongodb.com/manual/core/index-partial/#std-label-partial-index-with-unique-constraints\n partialFilterExpression: Object.fromEntries(keys.map(key => [key, { $exists: true }])),\n })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private async _createFields(name: string) {\n const { fields } = this.ctx.model.config[name]\n const coll = this.db.collection(name)\n await Promise.all(Object.keys(fields).map((key) => {\n if (isNullable(fields[key].initial)) return\n return coll.updateMany({ [key]: { $exists: false } }, { $set: { [key]: fields[key].initial } })\n }))\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this._tableTasks[name]\n await this.db.createCollection(name).catch(noop)\n await Promise.all([\n this._createIndexes(name),\n this._createFields(name),\n ])\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n if (!filter) return []\n await this._tableTasks[name]\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n const filter = this._createFilter(name, query)\n if (!filter) return\n await this._tableTasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n if (!filter) return\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this._tableTasks[name] = Promise.resolve(this._tableTasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n data[primary] = data[primary].padStart(8, '0')\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this._tableTasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n eval(table: TableType, expr: any, query: Query) {\n return new Promise<any>((resolve, reject) => {\n this._evalTasks.push({ expr, table, query, resolve, reject })\n process.nextTick(() => this._flushEvalTasks())\n })\n }\n\n private async _flushEvalTasks() {\n const tasks = this._evalTasks\n if (!tasks.length) return\n this._evalTasks = []\n\n const stages: any[] = [{ $match: { _id: null } }]\n for (const task of tasks) {\n const { expr, table, query } = task\n task.expr = transformEval(expr, (pipeline) => {\n const filter = this._createFilter(table, query) || { _id: null }\n pipeline.unshift({ $match: filter })\n stages.push({ $unionWith: { coll: table, pipeline } })\n })\n }\n\n let data: any\n try {\n const results = await this.db.collection('user').aggregate(stages).toArray()\n data = Object.assign({}, ...results)\n } catch (error) {\n tasks.forEach(task => task.reject(error))\n return\n }\n\n for (const { expr, resolve, reject } of tasks) {\n try {\n resolve(executeEval(data, expr))\n } catch (error) {\n reject(error)\n }\n }\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.natural().max(65535).description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。').role('secret'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, Random, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction createFieldFilter(query: Query.FieldQuery, key: string) {\n const filters: Filter<any>[] = []\n const result: Filter<any> = {}\n const child = transformFieldQuery(query, key, filters)\n if (child === false) return false\n if (child !== true) result[key] = child\n if (filters.length) result.$and = filters\n if (Object.keys(result).length) return result\n return true\n}\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string, filters: Filter<any>[]) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return false\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$and') {\n for (const item of query[prop]) {\n const child = createFieldFilter(item, key)\n if (child === false) return false\n if (child !== true) filters.push(child)\n }\n } else if (prop === '$or') {\n const $or: Filter<any>[] = []\n if (!query[prop].length) return false\n const always = query[prop].some((item) => {\n const child = createFieldFilter(item, key)\n if (typeof child === 'boolean') return child\n $or.push(child)\n })\n if (!always) filters.push({ $or })\n } else if (prop === '$not') {\n const child = createFieldFilter(query[prop], key)\n if (child === true) return false\n if (child !== false) filters.push({ $nor: [child] })\n } else if (prop === '$el') {\n const child = transformFieldQuery(query[prop], key, filters)\n if (child === false) return false\n if (child !== true) result.$elemMatch = child\n } else if (prop === '$regexFor') {\n filters.push({\n $expr: {\n $function: {\n body: function (data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n }.toString(),\n args: ['$' + key, query.$regexFor],\n lang: 'js',\n },\n },\n })\n } else {\n result[prop] = query[prop]\n }\n }\n if (!Object.keys(result).length) return true\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n const additional: Filter<any>[] = []\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n // { $and: [] } matches everything\n // { $or: [] } matches nothing\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n // this may solve this problem but lead to performance degradation\n const query = transformQuery(value)\n if (query) filter.$nor = [query]\n } else if (key === '$expr') {\n additional.push({ $expr: transformEval(value) })\n } else {\n const query = transformFieldQuery(value, key, additional)\n if (query === false) return\n if (query !== true) filter[key] = query\n }\n }\n if (additional.length) {\n (filter.$and ||= []).push(...additional)\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, onAggr?: (pipeline: any[]) => void) {\n return valueMap(expr as any, (value) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, onAggr))\n } else {\n return transformEval(value, onAggr)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, onAggr?: (pipeline: any[]) => void) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = Random.id()\n if (key === '$count') {\n onAggr([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n onAggr([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, onAggr)\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,qBAA4H;AAC5H,iBAAgC;AAChC,uBAA2C;;;ACH3C,oBAAwC;AAGxC,2BAA2B,OAAyB,KAAa;AAC/D,QAAM,UAAyB;AAC/B,QAAM,SAAsB;AAC5B,QAAM,QAAQ,oBAAoB,OAAO,KAAK;AAC9C,MAAI,UAAU;AAAO,WAAO;AAC5B,MAAI,UAAU;AAAM,WAAO,OAAO;AAClC,MAAI,QAAQ;AAAQ,WAAO,OAAO;AAClC,MAAI,OAAO,KAAK,QAAQ;AAAQ,WAAO;AACvC,SAAO;AAAA;AARA;AAWT,6BAA6B,OAAyB,KAAa,SAAwB;AAEzF,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ,aAAO;AAC1B,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA+B;AACrC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,QAAQ;AACnB,iBAAW,QAAQ,MAAM,OAAO;AAC9B,cAAM,QAAQ,kBAAkB,MAAM;AACtC,YAAI,UAAU;AAAO,iBAAO;AAC5B,YAAI,UAAU;AAAM,kBAAQ,KAAK;AAAA;AAAA,eAE1B,SAAS,OAAO;AACzB,YAAM,MAAqB;AAC3B,UAAI,CAAC,MAAM,MAAM;AAAQ,eAAO;AAChC,YAAM,SAAS,MAAM,MAAM,KAAK,CAAC,SAAS;AACxC,cAAM,QAAQ,kBAAkB,MAAM;AACtC,YAAI,OAAO,UAAU;AAAW,iBAAO;AACvC,YAAI,KAAK;AAAA;AAEX,UAAI,CAAC;AAAQ,gBAAQ,KAAK,EAAE;AAAA,eACnB,SAAS,QAAQ;AAC1B,YAAM,QAAQ,kBAAkB,MAAM,OAAO;AAC7C,UAAI,UAAU;AAAM,eAAO;AAC3B,UAAI,UAAU;AAAO,gBAAQ,KAAK,EAAE,MAAM,CAAC;AAAA,eAClC,SAAS,OAAO;AACzB,YAAM,QAAQ,oBAAoB,MAAM,OAAO,KAAK;AACpD,UAAI,UAAU;AAAO,eAAO;AAC5B,UAAI,UAAU;AAAM,eAAO,aAAa;AAAA,eAC/B,SAAS,aAAa;AAC/B,cAAQ,KAAK;AAAA,QACX,OAAO;AAAA,UACL,WAAW;AAAA,YACT,MAAM,SAAU,MAAc,OAAe;AAC3C,qBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA,cAClC;AAAA,YACF,MAAM,CAAC,MAAM,KAAK,MAAM;AAAA,YACxB,MAAM;AAAA;AAAA;AAAA;AAAA,WAIP;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,MAAI,CAAC,OAAO,KAAK,QAAQ;AAAQ,WAAO;AACxC,SAAO;AAAA;AAtDA;AAyDF,wBAAwB,OAAmB;AAChD,QAAM,SAAsB;AAC5B,QAAM,aAA4B;AAClC,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAInC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB;AAAA;AAAA,eAEO,QAAQ,QAAQ;AAIzB,YAAM,SAAQ,eAAe;AAC7B,UAAI;AAAO,eAAO,OAAO,CAAC;AAAA,eACjB,QAAQ,SAAS;AAC1B,iBAAW,KAAK,EAAE,OAAO,cAAc;AAAA,WAClC;AACL,YAAM,SAAQ,oBAAoB,OAAO,KAAK;AAC9C,UAAI,WAAU;AAAO;AACrB,UAAI,WAAU;AAAM,eAAO,OAAO;AAAA;AAAA;AAGtC,MAAI,WAAW,QAAQ;AACrB,IAAC,QAAO,QAAP,QAAO,OAAS,KAAI,KAAK,GAAG;AAAA;AAE/B,SAAO;AAAA;AA/BO;AAkChB,2BAA2B,MAAW,QAAoC;AACxE,SAAO,4BAAS,MAAa,CAAC,UAAU;AACtC,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI,SAAO,cAAc,KAAK;AAAA,WACtC;AACL,aAAO,cAAc,OAAO;AAAA;AAAA;AAAA;AALzB;AAUT,uBAAuB,MAAW;AAChC,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA;AAEf,SAAO,kBAAkB;AAAA;AAJlB;AAOT,IAAM,WAAW,CAAC,QAAQ,QAAQ,QAAQ,QAAQ;AAE3C,uBAAuB,MAAW,QAAoC;AAC3E,MAAI,OAAO,SAAS,YAAY,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AACrF,WAAO;AAAA,aACE,KAAK,GAAG;AACjB,WAAO,MAAM,KAAK;AAAA;AAGpB,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,KAAK;AAAM;AAChB,UAAM,QAAQ,cAAc,KAAK;AACjC,UAAM,IAAI,qBAAO;AACjB,QAAI,QAAQ,UAAU;AACpB,aAAO;AAAA,QACL,EAAE,QAAQ,EAAE,KAAK;AAAA,QACjB,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,EAAE,QAAQ;AAAA;AAAA,WAEnC;AACL,aAAO,CAAC,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,GAAG,MAAM;AAAA;AAE/C,WAAO,EAAE;AAAA;AAGX,SAAO,kBAAkB,MAAM;AAAA;AAtBjB;;;ADtGhB,kCAA4B,wBAAS;AAAA,EAOnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAJlC,iBAAQ;AACP,uBAAkC;AAClC,sBAAyB;AAAA;AAAA,EAMzB,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ;AACxC,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,YAAY,QAAQ,KAAK,WAAW;AAAA;AAG3C,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,YAAY,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAI7C,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAGP,eAAe,MAAc;AACzC,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAA+B;AACrC,UAAM,WAAW,MAAM,KAAK;AAC3B,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,eAAS,KAAK;AAAA,QACZ;AAAA,QACA,KAAK,OAAO,YAAY,KAAK,IAAI,SAAO,CAAC,KAAK;AAAA,QAC9C,QAAQ;AAAA,QAER,yBAAyB,OAAO,YAAY,KAAK,IAAI,SAAO,CAAC,KAAK,EAAE,SAAS;AAAA;AAAA;AAGjF,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,KAAK,cAAc;AAAA;AAAA,QAGb,cAAc,MAAc;AACxC,UAAM,EAAE,WAAW,KAAK,IAAI,MAAM,OAAO;AACzC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,QAAQ,IAAI,OAAO,KAAK,QAAQ,IAAI,CAAC,QAAQ;AACjD,UAAI,+BAAW,OAAO,KAAK;AAAU;AACrC,aAAO,KAAK,WAAW,GAAG,MAAM,EAAE,SAAS,WAAW,EAAE,MAAM,GAAG,MAAM,OAAO,KAAK;AAAA;AAAA;AAAA,QAKzE,WAAW,MAAc;AACrC,UAAM,KAAK,YAAY;AACvB,UAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM;AAC3C,UAAM,QAAQ,IAAI;AAAA,MAChB,KAAK,eAAe;AAAA,MACpB,KAAK,cAAc;AAAA;AAAA;AAAA,EAIf,cAAc,MAAc,OAAc;AAChD,WAAO,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAAA;AAAA,QAGpD,OAAO;AACX,UAAM,QAAQ,IAAI,OAAO,KAAK,KAAK,IAAI,MAAM,QAAQ,IAAI,UAAQ,KAAK,GAAG,eAAe;AAAA;AAAA,QAG5E,aAAa;AACzB,UAAM,SAAS,OAAO,KAAK,KAAK,IAAI,MAAM;AAC1C,UAAM,UAAU,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,SAAS;AAC3D,YAAM,OAAO,KAAK,GAAG,WAAW;AAChC,YAAM,EAAE,OAAO,SAAS,MAAM,KAAK;AACnC,aAAO,CAAC,KAAK,gBAAgB,EAAE,OAAO;AAAA;AAExC,WAAO,OAAO,YAAY;AAAA;AAAA,QAGtB,QAAQ;AAEZ,UAAM,CAAC,EAAE,aAAa,UAAU,MAAM,QAAQ,IAAI;AAAA,MAChD,KAAK,GAAG;AAAA,MACR,KAAK;AAAA;AAEP,WAAO,EAAE,MAAM,WAAW;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,CAAC;AAAQ,aAAO;AACpB,UAAM,KAAK,YAAY;AACvB,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,GAAG,SAAS,qBAAM,gBAAgB;AAClE,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,QAAI;AAAM,eAAS,OAAO,KAAK;AAC/B,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,QAAY;AACnD,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,CAAC;AAAQ;AACb,UAAM,KAAK,YAAY;AACvB,UAAM,EAAE,YAAY,KAAK,IAAI,MAAM,OAAO;AAC1C,UAAM,cAAc,8BAAU;AAC9B,UAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,QAAQ;AACzC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,OAAO,KAAK;AAClB,eAAW,QAAQ,UAAU;AAC3B,WAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM,yBAAK,oCAAc,MAAM,SAAS;AAAA;AAEzF,UAAM,KAAK;AAAA;AAAA,QAGP,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,CAAC;AAAQ;AACb,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,YAAY,QAAQ,QAAQ,QAAQ,KAAK,YAAY,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGrF,OAAO,MAAiB,MAAW;AACvC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAC9D,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AACjB,eAAK,WAAW,KAAK,SAAS,SAAS,GAAG;AAAA;AAAA;AAG9C,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,UAAU;AACrB,eAAO,KAAK;AACZ,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,gBAAM,IAAI,2BAAY,IAAI,SAAS;AAAA;AAErC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,UAAM,cAAc,8BAAU;AAC9B,UAAM,KAAK,YAAY;AACvB,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,EAAE,KAAK,KAAK,IAAI,UAAQ,yBAAK,MAAM,iBAAiB;AACrF,UAAM,OAAO,KAAK;AAClB,eAAW,UAAU,MAAM;AACzB,YAAM,OAAO,SAAS,KAAK,WAAQ,YAAY,MAAM,SAAO,MAAK,KAAK,cAAc,OAAO,KAAK;AAChG,UAAI,MAAM;AACR,cAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,cAAM,WAAW,yBAAK,yBAAK,oCAAc,MAAM,SAAS,eAAe;AACvE,aAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM;AAAA,aAChD;AACL,aAAK,OAAO,oCAAc,KAAK,IAAI,MAAM,OAAO,OAAO;AAAA;AAAA;AAG3D,UAAM,KAAK;AAAA;AAAA,EAGb,KAAK,OAAkB,MAAW,OAAc;AAC9C,WAAO,IAAI,QAAa,CAAC,SAAS,WAAW;AAC3C,WAAK,WAAW,KAAK,EAAE,MAAM,OAAO,OAAO,SAAS;AACpD,cAAQ,SAAS,MAAM,KAAK;AAAA;AAAA;AAAA,QAIlB,kBAAkB;AAC9B,UAAM,QAAQ,KAAK;AACnB,QAAI,CAAC,MAAM;AAAQ;AACnB,SAAK,aAAa;AAElB,UAAM,SAAgB,CAAC,EAAE,QAAQ,EAAE,KAAK;AACxC,eAAW,QAAQ,OAAO;AACxB,YAAM,EAAE,MAAM,OAAO,UAAU;AAC/B,WAAK,OAAO,cAAc,MAAM,CAAC,aAAa;AAC5C,cAAM,SAAS,KAAK,cAAc,OAAO,UAAU,EAAE,KAAK;AAC1D,iBAAS,QAAQ,EAAE,QAAQ;AAC3B,eAAO,KAAK,EAAE,YAAY,EAAE,MAAM,OAAO;AAAA;AAAA;AAI7C,QAAI;AACJ,QAAI;AACF,YAAM,UAAU,MAAM,KAAK,GAAG,WAAW,QAAQ,UAAU,QAAQ;AACnE,aAAO,OAAO,OAAO,IAAI,GAAG;AAAA,aACrB,OAAP;AACA,YAAM,QAAQ,UAAQ,KAAK,OAAO;AAClC;AAAA;AAGF,eAAW,EAAE,MAAM,SAAS,YAAY,OAAO;AAC7C,UAAI;AACF,gBAAQ,kCAAY,MAAM;AAAA,eACnB,OAAP;AACA,eAAO;AAAA;AAAA;AAAA;AAAA;AAtOf;AA4OA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,UAAU,IAAI,OAAO,YAAY;AAAA,IAC9C,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,WAAW,KAAK;AAAA,IACtD,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@koishijs/plugin-database-mongo",
|
|
3
3
|
"description": "MongoDB support for Koishi",
|
|
4
|
-
"version": "3.0.
|
|
4
|
+
"version": "3.0.4",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -29,17 +29,18 @@
|
|
|
29
29
|
"chatbot",
|
|
30
30
|
"koishi",
|
|
31
31
|
"database",
|
|
32
|
-
"mysql"
|
|
32
|
+
"mysql",
|
|
33
|
+
"impl:database"
|
|
33
34
|
],
|
|
34
35
|
"devDependencies": {
|
|
35
|
-
"@koishijs/
|
|
36
|
-
"@koishijs/
|
|
36
|
+
"@koishijs/database-tests": "^1.0.0",
|
|
37
|
+
"@koishijs/plugin-mock": "^1.0.2"
|
|
37
38
|
},
|
|
38
39
|
"peerDependencies": {
|
|
39
|
-
"koishi": "^4.
|
|
40
|
+
"koishi": "^4.2.0"
|
|
40
41
|
},
|
|
41
42
|
"dependencies": {
|
|
42
43
|
"@koishijs/orm-utils": "^1.0.0",
|
|
43
|
-
"mongodb": "^4.
|
|
44
|
+
"mongodb": "^4.3.0"
|
|
44
45
|
}
|
|
45
46
|
}
|