@koishijs/plugin-database-mongo 3.0.0-beta.5 → 3.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts CHANGED
@@ -1,9 +1,9 @@
1
1
  /// <reference types="node" />
2
- import { Query, Eval, Context, Database, Tables as KoishiTables, Schema } from 'koishi';
3
- import { MongoClient, Db } from 'mongodb';
2
+ import { Query, Context, Database, Tables as KoishiTables, Schema } from 'koishi';
3
+ import { Filter, MongoClient, Db } from 'mongodb';
4
4
  import { URLSearchParams } from 'url';
5
- export function transformQuery(query: Query.Expr): {};
6
- export function transformEval(expr: Eval.Numeric | Eval.Aggregation): any;
5
+ export function transformQuery(query: Query.Expr): Filter<any>;
6
+ export function transformEval(expr: any, aggrs?: any[][]): any;
7
7
  declare module 'koishi' {
8
8
  interface Database {
9
9
  mongo: MongoDatabase;
@@ -28,9 +28,20 @@ declare class MongoDatabase extends Database {
28
28
  stop(): Promise<void>;
29
29
  /** synchronize table schema */
30
30
  private _syncTable;
31
- drop(name: TableType): Promise<void>;
32
- get(name: TableType, query: Query, modifier: Query.Modifier): Promise<any[]>;
33
- set(name: TableType, query: Query, data: any): Promise<void>;
31
+ private _createFilter;
32
+ drop(): Promise<void>;
33
+ private _collStats;
34
+ stats(): Promise<{
35
+ size: any;
36
+ tables: {
37
+ [k: string]: {
38
+ readonly count: number;
39
+ readonly size: number;
40
+ };
41
+ };
42
+ }>;
43
+ get(name: TableType, query: Query, modifier: Query.Modifier): Promise<any>;
44
+ set(name: TableType, query: Query, update: {}): Promise<void>;
34
45
  remove(name: TableType, query: Query): Promise<void>;
35
46
  private queue;
36
47
  create(name: TableType, data: any): Promise<any>;
package/lib/index.js CHANGED
@@ -44,6 +44,7 @@ __export(exports, {
44
44
  var import_mongodb = __toModule(require("mongodb"));
45
45
  var import_koishi2 = __toModule(require("koishi"));
46
46
  var import_url = __toModule(require("url"));
47
+ var import_orm_utils = __toModule(require("@koishijs/orm-utils"));
47
48
 
48
49
  // plugins/database/mongo/src/utils.ts
49
50
  var import_koishi = __toModule(require("koishi"));
@@ -87,7 +88,7 @@ function transformQuery(query) {
87
88
  return { $nor: [{}] };
88
89
  }
89
90
  } else if (key === "$not") {
90
- filter["$nor"] = [transformQuery(value)];
91
+ filter.$nor = [transformQuery(value)];
91
92
  } else if (key === "$expr") {
92
93
  filter[key] = transformEval(value);
93
94
  } else {
@@ -97,19 +98,46 @@ function transformQuery(query) {
97
98
  return filter;
98
99
  }
99
100
  __name(transformQuery, "transformQuery");
100
- function transformEval(expr) {
101
+ function transformEvalExpr(expr, aggrs) {
102
+ return (0, import_koishi.valueMap)(expr, (value, key) => {
103
+ if (Array.isArray(value)) {
104
+ return value.map((val) => transformEval(val, aggrs));
105
+ } else {
106
+ return transformEval(value, aggrs);
107
+ }
108
+ });
109
+ }
110
+ __name(transformEvalExpr, "transformEvalExpr");
111
+ function transformAggr(expr) {
101
112
  if (typeof expr === "string") {
102
113
  return "$" + expr;
103
- } else if (typeof expr === "number" || typeof expr === "boolean") {
114
+ }
115
+ return transformEvalExpr(expr);
116
+ }
117
+ __name(transformAggr, "transformAggr");
118
+ var aggrKeys = ["$sum", "$avg", "$min", "$max", "$count"];
119
+ function transformEval(expr, aggrs) {
120
+ if (typeof expr === "number" || typeof expr === "string" || typeof expr === "boolean") {
104
121
  return expr;
122
+ } else if (expr.$) {
123
+ return "$" + expr.$;
105
124
  }
106
- return (0, import_koishi.valueMap)(expr, (value) => {
107
- if (Array.isArray(value)) {
108
- return value.map(transformEval);
125
+ for (const key of aggrKeys) {
126
+ if (!expr[key])
127
+ continue;
128
+ const value = transformAggr(expr[key]);
129
+ const $ = "temp" + aggrs.length;
130
+ if (key === "$count") {
131
+ aggrs.push([
132
+ { $group: { _id: value } },
133
+ { $group: { _id: null, [$]: { $count: {} } } }
134
+ ]);
109
135
  } else {
110
- return transformEval(value);
136
+ aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }]);
111
137
  }
112
- });
138
+ return { $ };
139
+ }
140
+ return transformEvalExpr(expr, aggrs);
113
141
  }
114
142
  __name(transformEval, "transformEval");
115
143
 
@@ -136,10 +164,7 @@ var MongoDatabase = class extends import_koishi2.Database {
136
164
  }
137
165
  async start() {
138
166
  const mongourl = this.config.uri || this.connectionStringFromConfig();
139
- this.client = await import_mongodb.MongoClient.connect(mongourl, {
140
- useNewUrlParser: true,
141
- useUnifiedTopology: true
142
- });
167
+ this.client = await import_mongodb.MongoClient.connect(mongourl);
143
168
  this.db = this.client.db(this.config.database);
144
169
  for (const name in this.ctx.model.config) {
145
170
  this.tasks[name] = this._syncTable(name);
@@ -153,10 +178,10 @@ var MongoDatabase = class extends import_koishi2.Database {
153
178
  }
154
179
  async _syncTable(name) {
155
180
  await this.tasks[name];
156
- const col = await this.db.createCollection(name).catch(() => this.db.collection(name));
181
+ const coll = await this.db.createCollection(name).catch(() => this.db.collection(name));
157
182
  const { primary, unique } = this.ctx.model.config[name];
158
183
  const newSpecs = [];
159
- const oldSpecs = await col.indexes();
184
+ const oldSpecs = await coll.indexes();
160
185
  [primary, ...unique].forEach((keys, index) => {
161
186
  keys = (0, import_koishi2.makeArray)(keys);
162
187
  const name2 = (index ? "unique:" : "primary:") + keys.join("+");
@@ -167,44 +192,72 @@ var MongoDatabase = class extends import_koishi2.Database {
167
192
  });
168
193
  if (!newSpecs.length)
169
194
  return;
170
- await col.createIndexes(newSpecs);
195
+ await coll.createIndexes(newSpecs);
171
196
  }
172
- async drop(name) {
173
- if (name) {
174
- await this.db.collection(name).drop();
175
- } else {
176
- const collections = await this.db.collections();
177
- await Promise.all(collections.map((c) => c.drop()));
178
- }
197
+ _createFilter(name, query) {
198
+ return transformQuery(this.ctx.model.resolveQuery(name, query));
199
+ }
200
+ async drop() {
201
+ await Promise.all(Object.keys(this.ctx.model.config).map((name) => this.db.dropCollection(name)));
202
+ }
203
+ async _collStats() {
204
+ const tables = Object.keys(this.ctx.model.config);
205
+ const entries = await Promise.all(tables.map(async (name) => {
206
+ const coll = this.db.collection(name);
207
+ const { count, size } = await coll.stats();
208
+ return [coll.collectionName, { count, size }];
209
+ }));
210
+ return Object.fromEntries(entries);
211
+ }
212
+ async stats() {
213
+ const [{ totalSize }, tables] = await Promise.all([
214
+ this.db.stats(),
215
+ this._collStats()
216
+ ]);
217
+ return { size: totalSize, tables };
179
218
  }
180
219
  async get(name, query, modifier) {
181
- const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
220
+ const filter = this._createFilter(name, query);
182
221
  let cursor = this.db.collection(name).find(filter);
183
- const { fields, limit, offset = 0 } = import_koishi2.Query.resolveModifier(modifier);
222
+ const { fields, limit, offset = 0, sort } = import_koishi2.Query.resolveModifier(modifier);
184
223
  cursor = cursor.project(__spreadValues({ _id: 0 }, Object.fromEntries((fields != null ? fields : []).map((key) => [key, 1]))));
185
224
  if (offset)
186
225
  cursor = cursor.skip(offset);
187
226
  if (limit)
188
227
  cursor = cursor.limit(offset + limit);
228
+ if (sort)
229
+ cursor = cursor.sort(sort);
189
230
  return await cursor.toArray();
190
231
  }
191
- async set(name, query, data) {
232
+ async set(name, query, update) {
192
233
  await this.tasks[name];
193
- const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
194
- await this.db.collection(name).updateMany(filter, { $set: data });
234
+ const { primary } = this.ctx.model.config[name];
235
+ const indexFields = (0, import_koishi2.makeArray)(primary);
236
+ const updateFields = new Set(Object.keys(update).map((key) => key.split(".", 1)[0]));
237
+ const filter = this._createFilter(name, query);
238
+ const coll = this.db.collection(name);
239
+ const original = await coll.find(filter).toArray();
240
+ if (!original.length)
241
+ return;
242
+ const bulk = coll.initializeUnorderedBulkOp();
243
+ for (const item of original) {
244
+ bulk.find((0, import_koishi2.pick)(item, indexFields)).updateOne({ $set: (0, import_koishi2.pick)((0, import_orm_utils.executeUpdate)(item, update), updateFields) });
245
+ }
246
+ await bulk.execute();
195
247
  }
196
248
  async remove(name, query) {
197
- const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
249
+ const filter = this._createFilter(name, query);
198
250
  await this.db.collection(name).deleteMany(filter);
199
251
  }
200
252
  queue(name, callback) {
201
253
  return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(import_koishi2.noop).then(callback);
202
254
  }
203
255
  async create(name, data) {
256
+ const coll = this.db.collection(name);
204
257
  return this.queue(name, async () => {
205
258
  const { primary, fields, autoInc } = this.ctx.model.config[name];
206
259
  if (autoInc && !Array.isArray(primary) && !(primary in data)) {
207
- const [latest] = await this.db.collection(name).find().sort(primary, -1).limit(1).toArray();
260
+ const [latest] = await coll.find().sort(primary, -1).limit(1).toArray();
208
261
  data[primary] = latest ? +latest[primary] + 1 : 1;
209
262
  if (import_koishi2.Model.Field.string.includes(fields[primary].type)) {
210
263
  data[primary] += "";
@@ -212,11 +265,12 @@ var MongoDatabase = class extends import_koishi2.Database {
212
265
  }
213
266
  const copy = __spreadValues(__spreadValues({}, this.ctx.model.create(name)), data);
214
267
  try {
215
- await this.db.collection(name).insertOne(copy);
268
+ await coll.insertOne(copy);
269
+ delete copy._id;
216
270
  return copy;
217
271
  } catch (err) {
218
272
  if (err instanceof import_mongodb.MongoError && err.code === 11e3) {
219
- err[Symbol.for("koishi.error-type")] = "duplicate-entry";
273
+ throw new import_koishi2.KoishiError(err.message, "database.duplicate-entry");
220
274
  }
221
275
  throw err;
222
276
  }
@@ -227,22 +281,37 @@ var MongoDatabase = class extends import_koishi2.Database {
227
281
  return;
228
282
  if (!keys)
229
283
  keys = this.ctx.model.config[name].primary;
230
- keys = (0, import_koishi2.makeArray)(keys);
284
+ const indexFields = (0, import_koishi2.makeArray)(keys);
231
285
  await this.tasks[name];
232
- const bulk = this.db.collection(name).initializeUnorderedBulkOp();
233
- for (const item of data) {
234
- bulk.find((0, import_koishi2.pick)(item, keys)).upsert().updateOne({ $set: (0, import_koishi2.omit)(item, keys), $setOnInsert: (0, import_koishi2.omit)(this.ctx.model.create(name), [...keys, ...Object.keys(item)]) });
286
+ const coll = this.db.collection(name);
287
+ const original = await coll.find({ $or: data.map((item) => (0, import_koishi2.pick)(item, indexFields)) }).toArray();
288
+ const bulk = coll.initializeUnorderedBulkOp();
289
+ for (const update of data) {
290
+ const item = original.find((item2) => indexFields.every((key) => item2[key].valueOf() === update[key].valueOf()));
291
+ if (item) {
292
+ const updateFields = new Set(Object.keys(update).map((key) => key.split(".", 1)[0]));
293
+ const override = (0, import_koishi2.omit)((0, import_koishi2.pick)((0, import_orm_utils.executeUpdate)(item, update), updateFields), indexFields);
294
+ bulk.find((0, import_koishi2.pick)(item, indexFields)).updateOne({ $set: override });
295
+ } else {
296
+ bulk.insert((0, import_orm_utils.executeUpdate)(this.ctx.model.create(name), update));
297
+ }
235
298
  }
236
299
  await bulk.execute();
237
300
  }
238
301
  async aggregate(name, fields, query) {
239
- const $match = transformQuery(this.ctx.model.resolveQuery(name, query));
240
- const [data] = await this.db.collection(name).aggregate([{ $match }, {
241
- $group: __spreadValues({
242
- _id: 1
243
- }, (0, import_koishi2.valueMap)(fields, transformEval))
244
- }]).toArray();
245
- return data;
302
+ if (!Object.keys(fields).length)
303
+ return {};
304
+ const $match = this._createFilter(name, query);
305
+ const aggrs = [];
306
+ fields = (0, import_koishi2.valueMap)(fields, (value) => transformEval(value, aggrs));
307
+ const stages = aggrs.map((pipeline) => {
308
+ pipeline.unshift({ $match });
309
+ return { $unionWith: { coll: name, pipeline } };
310
+ });
311
+ stages.unshift({ $match: { _id: null } });
312
+ const results = await this.db.collection(name).aggregate(stages).toArray();
313
+ const data = Object.assign({}, ...results);
314
+ return (0, import_koishi2.valueMap)(fields, (value) => (0, import_orm_utils.executeEval)(data, value));
246
315
  }
247
316
  };
248
317
  __name(MongoDatabase, "MongoDatabase");
package/lib/index.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/index.ts", "../src/utils.ts"],
4
- "sourcesContent": ["import { MongoClient, Db, IndexSpecification, MongoError } from 'mongodb'\nimport { Context, Database, Tables as KoishiTables, makeArray, Schema, valueMap, pick, omit, Query, Model, Dict, noop } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nexport interface Tables extends KoishiTables {}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl, {\n useNewUrlParser: true,\n useUnifiedTopology: true,\n })\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const col = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexSpecification[] = []\n const oldSpecs: IndexSpecification[] = await col.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await col.createIndexes(newSpecs)\n }\n\n async drop(name: TableType) {\n if (name) {\n await this.db.collection(name).drop()\n } else {\n const collections = await this.db.collections()\n await Promise.all(collections.map(c => c.drop()))\n }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0 } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n return await cursor.toArray()\n }\n\n async set(name: TableType, query: Query, data: any) {\n await this.tasks[name]\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n await this.db.collection(name).updateMany(filter, { $set: data })\n }\n\n async remove(name: TableType, query: Query) {\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await this.db.collection(name).find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await this.db.collection(name).insertOne(copy)\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n err[Symbol.for('koishi.error-type')] = 'duplicate-entry'\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n keys = makeArray(keys)\n await this.tasks[name]\n const bulk = this.db.collection(name).initializeUnorderedBulkOp()\n for (const item of data) {\n bulk.find(pick(item, keys))\n .upsert()\n .updateOne({ $set: omit(item, keys), $setOnInsert: omit(this.ctx.model.create(name), [...keys, ...Object.keys(item) as any]) })\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n const $match = transformQuery(this.ctx.model.resolveQuery(name, query))\n const [data] = await this.db.collection(name).aggregate([{ $match }, {\n $group: {\n _id: 1,\n ...valueMap(fields, transformEval),\n },\n }]).toArray()\n return data\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, Eval, valueMap } from 'koishi'\nimport { QuerySelector } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: QuerySelector<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter['$nor'] = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nexport function transformEval(expr: Eval.Numeric | Eval.Aggregation) {\n if (typeof expr === 'string') {\n return '$' + expr\n } else if (typeof expr === 'number' || typeof expr === 'boolean') {\n return expr\n }\n\n return valueMap(expr as any, (value) => {\n if (Array.isArray(value)) {\n return value.map(transformEval)\n } else {\n return transformEval(value)\n }\n })\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAAgE;AAChE,qBAA6H;AAC7H,iBAAgC;;;ACFhC,oBAAsC;AAGtC,6BAA6B,OAAyB,KAAa;AAEjE,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ;AACnB,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA6B;AACnC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,OAAO;AAClB,aAAO,aAAa,oBAAoB,MAAM,OAAO;AAAA,eAC5C,SAAS,aAAa;AAC/B,aAAO,QAAQ;AAAA,QACb,KAAK,MAAc,OAAe;AAChC,iBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA;AAAA,QAEpC,MAAM,CAAC,MAAM,KAAK;AAAA,QAClB,MAAM;AAAA;AAAA,WAEH;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,SAAO;AAAA;AA5BA;AA+BF,wBAAwB,OAAmB;AAChD,QAAM,SAAS;AACf,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAEnC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB,eAAO,EAAE,MAAM,CAAC;AAAA;AAAA,eAET,QAAQ,QAAQ;AAGzB,aAAO,UAAU,CAAC,eAAe;AAAA,eACxB,QAAQ,SAAS;AAC1B,aAAO,OAAO,cAAc;AAAA,WACvB;AACL,aAAO,OAAO,oBAAoB,OAAO;AAAA;AAAA;AAG7C,SAAO;AAAA;AArBO;AAwBT,uBAAuB,MAAuC;AACnE,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA,aACJ,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AAChE,WAAO;AAAA;AAGT,SAAO,4BAAS,MAAa,CAAC,UAAU;AACtC,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI;AAAA,WACZ;AACL,aAAO,cAAc;AAAA;AAAA;AAAA;AAXX;;;ADvChB,kCAA4B,wBAAS;AAAA,EAMnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAHlC,iBAAQ;AACP,iBAA4B;AAAA;AAAA,EAM5B,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ,UAAU;AAAA,MAChD,iBAAiB;AAAA,MACjB,oBAAoB;AAAA;AAEtB,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAGrC,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAIvC,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAIP,WAAW,MAAc;AACrC,UAAM,KAAK,MAAM;AACjB,UAAM,MAAM,MAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM,MAAM,KAAK,GAAG,WAAW;AAChF,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,WAAiC;AACvC,UAAM,WAAiC,MAAM,IAAI;AAChD,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,YAAM,MAAM,OAAO,YAAY,KAAK,IAAI,UAAO,CAAC,MAAK;AACrD,eAAS,KAAK,EAAE,aAAM,KAAK,QAAQ;AAAA;AAErC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,IAAI,cAAc;AAAA;AAAA,QAGpB,KAAK,MAAiB;AAC1B,QAAI,MAAM;AACR,YAAM,KAAK,GAAG,WAAW,MAAM;AAAA,WAC1B;AACL,YAAM,cAAc,MAAM,KAAK,GAAG;AAClC,YAAM,QAAQ,IAAI,YAAY,IAAI,OAAK,EAAE;AAAA;AAAA;AAAA,QAIvC,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,MAAM,qBAAM,gBAAgB;AAC5D,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,MAAW;AAClD,UAAM,KAAK,MAAM;AACjB,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW,QAAQ,EAAE,MAAM;AAAA;AAAA,QAGtD,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,MAAM,QAAQ,QAAQ,QAAQ,KAAK,MAAM,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGzE,OAAO,MAAiB,MAAW;AACvC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,GAAG,WAAW,MAAM,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAClF,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AAAA;AAAA;AAGrB,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,GAAG,WAAW,MAAM,UAAU;AACzC,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,cAAI,OAAO,IAAI,wBAAwB;AAAA;AAEzC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,WAAO,8BAAU;AACjB,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,KAAK,GAAG,WAAW,MAAM;AACtC,eAAW,QAAQ,MAAM;AACvB,WAAK,KAAK,yBAAK,MAAM,OAClB,SACA,UAAU,EAAE,MAAM,yBAAK,MAAM,OAAO,cAAc,yBAAK,KAAK,IAAI,MAAM,OAAO,OAAO,CAAC,GAAG,MAAM,GAAG,OAAO,KAAK;AAAA;AAElH,UAAM,KAAK;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,CAAC,QAAQ,MAAM,KAAK,GAAG,WAAW,MAAM,UAAU,CAAC,EAAE,UAAU;AAAA,MACnE,QAAQ;AAAA,QACN,KAAK;AAAA,SACF,6BAAS,QAAQ;AAAA,QAEpB;AACJ,WAAO;AAAA;AAAA;AA5IX;AAgJA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,SAAS,YAAY;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
4
+ "sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables as KoishiTables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError, valueMap } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nexport interface Tables extends KoishiTables {}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const coll = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n await this.tasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const filter = this._createFilter(name, query)\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this.tasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n if (!Object.keys(fields).length) return {}\n const $match = this._createFilter(name, query)\n const aggrs: any[][] = []\n fields = valueMap(fields, value => transformEval(value, aggrs))\n const stages = aggrs.map<any>((pipeline) => {\n pipeline.unshift({ $match })\n return { $unionWith: { coll: name, pipeline } }\n })\n stages.unshift({ $match: { _id: null } })\n const results = await this.db.collection(name).aggregate(stages).toArray()\n const data = Object.assign({}, ...results)\n return valueMap(fields, value => executeEval(data, value)) as any\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter.$nor = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, aggrs?: any[][]) {\n return valueMap(expr as any, (value, key) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, aggrs))\n } else {\n return transformEval(value, aggrs)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, aggrs?: any[][]) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = 'temp' + aggrs.length\n if (key === '$count') {\n aggrs.push([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, aggrs)\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,qBAA0I;AAC1I,iBAAgC;AAChC,uBAA2C;;;ACH3C,oBAAgC;AAGhC,6BAA6B,OAAyB,KAAa;AAEjE,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ;AACnB,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA+B;AACrC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,OAAO;AAClB,aAAO,aAAa,oBAAoB,MAAM,OAAO;AAAA,eAC5C,SAAS,aAAa;AAC/B,aAAO,QAAQ;AAAA,QACb,KAAK,MAAc,OAAe;AAChC,iBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA;AAAA,QAEpC,MAAM,CAAC,MAAM,KAAK;AAAA,QAClB,MAAM;AAAA;AAAA,WAEH;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,SAAO;AAAA;AA5BA;AA+BF,wBAAwB,OAAmB;AAChD,QAAM,SAAsB;AAC5B,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAEnC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB,eAAO,EAAE,MAAM,CAAC;AAAA;AAAA,eAET,QAAQ,QAAQ;AAGzB,aAAO,OAAO,CAAC,eAAe;AAAA,eACrB,QAAQ,SAAS;AAC1B,aAAO,OAAO,cAAc;AAAA,WACvB;AACL,aAAO,OAAO,oBAAoB,OAAO;AAAA;AAAA;AAG7C,SAAO;AAAA;AArBO;AAwBhB,2BAA2B,MAAW,OAAiB;AACrD,SAAO,4BAAS,MAAa,CAAC,OAAO,QAAQ;AAC3C,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI,SAAO,cAAc,KAAK;AAAA,WACtC;AACL,aAAO,cAAc,OAAO;AAAA;AAAA;AAAA;AALzB;AAUT,uBAAuB,MAAW;AAChC,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA;AAEf,SAAO,kBAAkB;AAAA;AAJlB;AAOT,IAAM,WAAW,CAAC,QAAQ,QAAQ,QAAQ,QAAQ;AAE3C,uBAAuB,MAAW,OAAiB;AACxD,MAAI,OAAO,SAAS,YAAY,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AACrF,WAAO;AAAA,aACE,KAAK,GAAG;AACjB,WAAO,MAAM,KAAK;AAAA;AAGpB,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,KAAK;AAAM;AAChB,UAAM,QAAQ,cAAc,KAAK;AACjC,UAAM,IAAI,SAAS,MAAM;AACzB,QAAI,QAAQ,UAAU;AACpB,YAAM,KAAK;AAAA,QACT,EAAE,QAAQ,EAAE,KAAK;AAAA,QACjB,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,EAAE,QAAQ;AAAA;AAAA,WAEnC;AACL,YAAM,KAAK,CAAC,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,GAAG,MAAM;AAAA;AAEnD,WAAO,EAAE;AAAA;AAGX,SAAO,kBAAkB,MAAM;AAAA;AAtBjB;;;ADzDhB,kCAA4B,wBAAS;AAAA,EAMnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAHlC,iBAAQ;AACP,iBAA4B;AAAA;AAAA,EAM5B,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ;AACxC,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAGrC,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAIvC,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAIP,WAAW,MAAc;AACrC,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,MAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM,MAAM,KAAK,GAAG,WAAW;AACjF,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,WAA+B;AACrC,UAAM,WAAW,MAAM,KAAK;AAC3B,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,YAAM,MAAM,OAAO,YAAY,KAAK,IAAI,UAAO,CAAC,MAAK;AACrD,eAAS,KAAK,EAAE,aAAM,KAAK,QAAQ;AAAA;AAErC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,KAAK,cAAc;AAAA;AAAA,EAGnB,cAAc,MAAc,OAAc;AAChD,WAAO,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAAA;AAAA,QAGpD,OAAO;AACX,UAAM,QAAQ,IAAI,OAAO,KAAK,KAAK,IAAI,MAAM,QAAQ,IAAI,UAAQ,KAAK,GAAG,eAAe;AAAA;AAAA,QAG5E,aAAa;AACzB,UAAM,SAAS,OAAO,KAAK,KAAK,IAAI,MAAM;AAC1C,UAAM,UAAU,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,SAAS;AAC3D,YAAM,OAAO,KAAK,GAAG,WAAW;AAChC,YAAM,EAAE,OAAO,SAAS,MAAM,KAAK;AACnC,aAAO,CAAC,KAAK,gBAAgB,EAAE,OAAO;AAAA;AAExC,WAAO,OAAO,YAAY;AAAA;AAAA,QAGtB,QAAQ;AAEZ,UAAM,CAAC,EAAE,aAAa,UAAU,MAAM,QAAQ,IAAI;AAAA,MAChD,KAAK,GAAG;AAAA,MACR,KAAK;AAAA;AAEP,WAAO,EAAE,MAAM,WAAW;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,GAAG,SAAS,qBAAM,gBAAgB;AAClE,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,QAAI;AAAM,eAAS,OAAO,KAAK;AAC/B,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,QAAY;AACnD,UAAM,KAAK,MAAM;AACjB,UAAM,EAAE,YAAY,KAAK,IAAI,MAAM,OAAO;AAC1C,UAAM,cAAc,8BAAU;AAC9B,UAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,QAAQ;AACzC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,OAAO,KAAK;AAClB,eAAW,QAAQ,UAAU;AAC3B,WAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM,yBAAK,oCAAc,MAAM,SAAS;AAAA;AAEzF,UAAM,KAAK;AAAA;AAAA,QAGP,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,MAAM,QAAQ,QAAQ,QAAQ,KAAK,MAAM,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGzE,OAAO,MAAiB,MAAW;AACvC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAC9D,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AAAA;AAAA;AAGrB,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,UAAU;AACrB,eAAO,KAAK;AACZ,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,gBAAM,IAAI,2BAAY,IAAI,SAAS;AAAA;AAErC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,UAAM,cAAc,8BAAU;AAC9B,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,EAAE,KAAK,KAAK,IAAI,UAAQ,yBAAK,MAAM,iBAAiB;AACrF,UAAM,OAAO,KAAK;AAClB,eAAW,UAAU,MAAM;AACzB,YAAM,OAAO,SAAS,KAAK,WAAQ,YAAY,MAAM,SAAO,MAAK,KAAK,cAAc,OAAO,KAAK;AAChG,UAAI,MAAM;AACR,cAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,cAAM,WAAW,yBAAK,yBAAK,oCAAc,MAAM,SAAS,eAAe;AACvE,aAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM;AAAA,aAChD;AACL,aAAK,OAAO,oCAAc,KAAK,IAAI,MAAM,OAAO,OAAO;AAAA;AAAA;AAG3D,UAAM,KAAK;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,QAAI,CAAC,OAAO,KAAK,QAAQ;AAAQ,aAAO;AACxC,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,QAAiB;AACvB,aAAS,6BAAS,QAAQ,WAAS,cAAc,OAAO;AACxD,UAAM,SAAS,MAAM,IAAS,CAAC,aAAa;AAC1C,eAAS,QAAQ,EAAE;AACnB,aAAO,EAAE,YAAY,EAAE,MAAM,MAAM;AAAA;AAErC,WAAO,QAAQ,EAAE,QAAQ,EAAE,KAAK;AAChC,UAAM,UAAU,MAAM,KAAK,GAAG,WAAW,MAAM,UAAU,QAAQ;AACjE,UAAM,OAAO,OAAO,OAAO,IAAI,GAAG;AAClC,WAAO,6BAAS,QAAQ,WAAS,kCAAY,MAAM;AAAA;AAAA;AAnLvD;AAuLA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,SAAS,YAAY;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
6
6
  "names": []
7
7
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@koishijs/plugin-database-mongo",
3
3
  "description": "MongoDB support for Koishi",
4
- "version": "3.0.0-beta.5",
4
+ "version": "3.0.0-beta.6",
5
5
  "main": "lib/index.js",
6
6
  "typings": "lib/index.d.ts",
7
7
  "files": [
@@ -32,14 +32,14 @@
32
32
  "mysql"
33
33
  ],
34
34
  "devDependencies": {
35
- "@koishijs/plugin-mock": "^1.0.0-beta.1",
36
- "@koishijs/test-utils": "^8.0.0-beta.5"
35
+ "@koishijs/plugin-mock": "^1.0.0-beta.2",
36
+ "@koishijs/test-utils": "^8.0.0-beta.6"
37
37
  },
38
38
  "peerDependencies": {
39
- "koishi": "^4.0.0-beta.5"
39
+ "koishi": "^4.0.0-beta.6"
40
40
  },
41
41
  "dependencies": {
42
- "@types/mongodb": "^3.6.12",
43
- "mongodb": "^3.6.6"
42
+ "@koishijs/orm-utils": "^1.0.0-beta.4",
43
+ "mongodb": "^4.2.2"
44
44
  }
45
45
  }