@koishijs/plugin-database-mongo 3.0.0-beta.4 → 3.0.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +28 -15
- package/lib/index.js +179 -95
- package/lib/index.js.map +2 -2
- package/lib/utils.d.ts +5 -0
- package/package.json +6 -6
package/lib/index.d.ts
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
|
+
/// <reference types="koishi/lib" />
|
|
1
2
|
/// <reference types="node" />
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
3
|
+
import { MongoClient, Db } from 'mongodb';
|
|
4
|
+
import { Context, Database, Tables, Schema, Query } from 'koishi';
|
|
4
5
|
import { URLSearchParams } from 'url';
|
|
5
|
-
export function createFilter<T extends TableType>(name: T, query: Query<T>): {};
|
|
6
|
-
export function transformEval(expr: Eval.Numeric | Eval.Aggregation): any;
|
|
7
6
|
declare module 'koishi' {
|
|
8
7
|
interface Database {
|
|
9
8
|
mongo: MongoDatabase;
|
|
@@ -12,22 +11,39 @@ declare module 'koishi' {
|
|
|
12
11
|
'database-mongo': typeof import('.');
|
|
13
12
|
}
|
|
14
13
|
}
|
|
15
|
-
type TableType = keyof Tables;
|
|
16
|
-
export interface Tables extends KoishiTables {
|
|
17
|
-
}
|
|
14
|
+
declare type TableType = keyof Tables;
|
|
18
15
|
declare class MongoDatabase extends Database {
|
|
19
16
|
ctx: Context;
|
|
20
17
|
private config;
|
|
21
18
|
client: MongoClient;
|
|
22
19
|
db: Db;
|
|
23
20
|
mongo: this;
|
|
24
|
-
|
|
25
|
-
channel: Collection<Channel>;
|
|
21
|
+
private tasks;
|
|
26
22
|
constructor(ctx: Context, config: MongoDatabase.Config);
|
|
23
|
+
private connectionStringFromConfig;
|
|
27
24
|
start(): Promise<void>;
|
|
28
|
-
collection<T extends TableType>(name: T): Collection<Tables[T]>;
|
|
29
25
|
stop(): Promise<void>;
|
|
30
|
-
|
|
26
|
+
/** synchronize table schema */
|
|
27
|
+
private _syncTable;
|
|
28
|
+
private _createFilter;
|
|
29
|
+
drop(): Promise<void>;
|
|
30
|
+
private _collStats;
|
|
31
|
+
stats(): Promise<{
|
|
32
|
+
size: any;
|
|
33
|
+
tables: {
|
|
34
|
+
[k: string]: {
|
|
35
|
+
readonly count: number;
|
|
36
|
+
readonly size: number;
|
|
37
|
+
};
|
|
38
|
+
};
|
|
39
|
+
}>;
|
|
40
|
+
get(name: TableType, query: Query, modifier: Query.Modifier): Promise<any>;
|
|
41
|
+
set(name: TableType, query: Query, update: {}): Promise<void>;
|
|
42
|
+
remove(name: TableType, query: Query): Promise<void>;
|
|
43
|
+
private queue;
|
|
44
|
+
create(name: TableType, data: any): Promise<any>;
|
|
45
|
+
upsert(name: TableType, data: any[], keys: string | string[]): Promise<void>;
|
|
46
|
+
aggregate(name: TableType, fields: {}, query: Query): Promise<any>;
|
|
31
47
|
}
|
|
32
48
|
declare namespace MongoDatabase {
|
|
33
49
|
const name = "database-mongo";
|
|
@@ -39,11 +55,10 @@ declare namespace MongoDatabase {
|
|
|
39
55
|
port?: number;
|
|
40
56
|
/** database name */
|
|
41
57
|
database?: string;
|
|
42
|
-
prefix?: string;
|
|
43
58
|
/** default auth database */
|
|
44
59
|
authDatabase?: string;
|
|
45
60
|
connectOptions?: ConstructorParameters<typeof URLSearchParams>[0];
|
|
46
|
-
/** connection string (will overwrite all configs except 'name'
|
|
61
|
+
/** connection string (will overwrite all configs except 'name') */
|
|
47
62
|
uri?: string;
|
|
48
63
|
}
|
|
49
64
|
const Config: Schema<{
|
|
@@ -53,7 +68,6 @@ declare namespace MongoDatabase {
|
|
|
53
68
|
username?: string;
|
|
54
69
|
password?: string;
|
|
55
70
|
database?: string;
|
|
56
|
-
prefix?: string;
|
|
57
71
|
} & {
|
|
58
72
|
[x: string]: any;
|
|
59
73
|
}, {
|
|
@@ -63,7 +77,6 @@ declare namespace MongoDatabase {
|
|
|
63
77
|
username?: string;
|
|
64
78
|
password?: string;
|
|
65
79
|
database?: string;
|
|
66
|
-
prefix?: string;
|
|
67
80
|
} & {
|
|
68
81
|
[x: string]: any;
|
|
69
82
|
}>;
|
package/lib/index.js
CHANGED
|
@@ -44,6 +44,7 @@ __export(exports, {
|
|
|
44
44
|
var import_mongodb = __toModule(require("mongodb"));
|
|
45
45
|
var import_koishi2 = __toModule(require("koishi"));
|
|
46
46
|
var import_url = __toModule(require("url"));
|
|
47
|
+
var import_orm_utils = __toModule(require("@koishijs/orm-utils"));
|
|
47
48
|
|
|
48
49
|
// plugins/database/mongo/src/utils.ts
|
|
49
50
|
var import_koishi = __toModule(require("koishi"));
|
|
@@ -87,7 +88,7 @@ function transformQuery(query) {
|
|
|
87
88
|
return { $nor: [{}] };
|
|
88
89
|
}
|
|
89
90
|
} else if (key === "$not") {
|
|
90
|
-
filter
|
|
91
|
+
filter.$nor = [transformQuery(value)];
|
|
91
92
|
} else if (key === "$expr") {
|
|
92
93
|
filter[key] = transformEval(value);
|
|
93
94
|
} else {
|
|
@@ -97,23 +98,46 @@ function transformQuery(query) {
|
|
|
97
98
|
return filter;
|
|
98
99
|
}
|
|
99
100
|
__name(transformQuery, "transformQuery");
|
|
100
|
-
function
|
|
101
|
-
return
|
|
101
|
+
function transformEvalExpr(expr, aggrs) {
|
|
102
|
+
return (0, import_koishi.valueMap)(expr, (value, key) => {
|
|
103
|
+
if (Array.isArray(value)) {
|
|
104
|
+
return value.map((val) => transformEval(val, aggrs));
|
|
105
|
+
} else {
|
|
106
|
+
return transformEval(value, aggrs);
|
|
107
|
+
}
|
|
108
|
+
});
|
|
102
109
|
}
|
|
103
|
-
__name(
|
|
104
|
-
function
|
|
110
|
+
__name(transformEvalExpr, "transformEvalExpr");
|
|
111
|
+
function transformAggr(expr) {
|
|
105
112
|
if (typeof expr === "string") {
|
|
106
113
|
return "$" + expr;
|
|
107
|
-
}
|
|
114
|
+
}
|
|
115
|
+
return transformEvalExpr(expr);
|
|
116
|
+
}
|
|
117
|
+
__name(transformAggr, "transformAggr");
|
|
118
|
+
var aggrKeys = ["$sum", "$avg", "$min", "$max", "$count"];
|
|
119
|
+
function transformEval(expr, aggrs) {
|
|
120
|
+
if (typeof expr === "number" || typeof expr === "string" || typeof expr === "boolean") {
|
|
108
121
|
return expr;
|
|
122
|
+
} else if (expr.$) {
|
|
123
|
+
return "$" + expr.$;
|
|
109
124
|
}
|
|
110
|
-
|
|
111
|
-
if (
|
|
112
|
-
|
|
125
|
+
for (const key of aggrKeys) {
|
|
126
|
+
if (!expr[key])
|
|
127
|
+
continue;
|
|
128
|
+
const value = transformAggr(expr[key]);
|
|
129
|
+
const $ = "temp" + aggrs.length;
|
|
130
|
+
if (key === "$count") {
|
|
131
|
+
aggrs.push([
|
|
132
|
+
{ $group: { _id: value } },
|
|
133
|
+
{ $group: { _id: null, [$]: { $count: {} } } }
|
|
134
|
+
]);
|
|
113
135
|
} else {
|
|
114
|
-
|
|
136
|
+
aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }]);
|
|
115
137
|
}
|
|
116
|
-
|
|
138
|
+
return { $ };
|
|
139
|
+
}
|
|
140
|
+
return transformEvalExpr(expr, aggrs);
|
|
117
141
|
}
|
|
118
142
|
__name(transformEval, "transformEval");
|
|
119
143
|
|
|
@@ -124,30 +148,7 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
124
148
|
this.ctx = ctx;
|
|
125
149
|
this.config = config;
|
|
126
150
|
this.mongo = this;
|
|
127
|
-
|
|
128
|
-
async start() {
|
|
129
|
-
const mongourl = this.config.uri || this.connectionStringFromConfig();
|
|
130
|
-
this.client = await import_mongodb.MongoClient.connect(mongourl, { useNewUrlParser: true, useUnifiedTopology: true });
|
|
131
|
-
this.db = this.client.db(this.config.database);
|
|
132
|
-
if (this.config.prefix) {
|
|
133
|
-
this.db.collection = ((func, prefix) => /* @__PURE__ */ __name(function collection(name) {
|
|
134
|
-
return func(`${prefix}.${name}`);
|
|
135
|
-
}, "collection"))(this.db.collection.bind(this.db), this.config.prefix);
|
|
136
|
-
}
|
|
137
|
-
this.user = this.db.collection("user");
|
|
138
|
-
this.channel = this.db.collection("channel");
|
|
139
|
-
await this.channel.createIndex({ type: 1, pid: 1 }, { unique: true });
|
|
140
|
-
for (const name in import_koishi2.Tables.config) {
|
|
141
|
-
const { primary } = import_koishi2.Tables.config[name];
|
|
142
|
-
const col = this.db.collection(name);
|
|
143
|
-
await col.createIndex(Object.fromEntries((0, import_koishi2.makeArray)(primary).map((K) => [K, 1])), { unique: true });
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
collection(name) {
|
|
147
|
-
return this.db.collection(name);
|
|
148
|
-
}
|
|
149
|
-
stop() {
|
|
150
|
-
return this.client.close();
|
|
151
|
+
this.tasks = {};
|
|
151
152
|
}
|
|
152
153
|
connectionStringFromConfig() {
|
|
153
154
|
const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config;
|
|
@@ -161,87 +162,170 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
161
162
|
}
|
|
162
163
|
return mongourl;
|
|
163
164
|
}
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
(
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
host: import_koishi2.Schema.string().description("要连接到的主机名。").default("localhost"),
|
|
171
|
-
port: import_koishi2.Schema.number().description("要连接到的端口号。"),
|
|
172
|
-
username: import_koishi2.Schema.string().description("要使用的用户名。"),
|
|
173
|
-
password: import_koishi2.Schema.string().description("要使用的密码。"),
|
|
174
|
-
database: import_koishi2.Schema.string().description("要访问的数据库名。").default("koishi"),
|
|
175
|
-
prefix: import_koishi2.Schema.string().description("使用的表名前缀。当配置了这一项时,所有通过 Koishi 创建的表名都会以这个配置项为前缀。")
|
|
176
|
-
});
|
|
177
|
-
})(MongoDatabase || (MongoDatabase = {}));
|
|
178
|
-
import_koishi2.Database.extend(MongoDatabase, {
|
|
179
|
-
async drop(table) {
|
|
180
|
-
if (table) {
|
|
181
|
-
await this.db.collection(table).drop();
|
|
182
|
-
} else {
|
|
183
|
-
const collections = await this.db.collections();
|
|
184
|
-
await Promise.all(collections.map((c) => c.drop()));
|
|
165
|
+
async start() {
|
|
166
|
+
const mongourl = this.config.uri || this.connectionStringFromConfig();
|
|
167
|
+
this.client = await import_mongodb.MongoClient.connect(mongourl);
|
|
168
|
+
this.db = this.client.db(this.config.database);
|
|
169
|
+
for (const name in this.ctx.model.config) {
|
|
170
|
+
this.tasks[name] = this._syncTable(name);
|
|
185
171
|
}
|
|
186
|
-
|
|
172
|
+
this.ctx.on("model", (name) => {
|
|
173
|
+
this.tasks[name] = this._syncTable(name);
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
stop() {
|
|
177
|
+
return this.client.close();
|
|
178
|
+
}
|
|
179
|
+
async _syncTable(name) {
|
|
180
|
+
await this.tasks[name];
|
|
181
|
+
const coll = await this.db.createCollection(name).catch(() => this.db.collection(name));
|
|
182
|
+
const { primary, unique } = this.ctx.model.config[name];
|
|
183
|
+
const newSpecs = [];
|
|
184
|
+
const oldSpecs = await coll.indexes();
|
|
185
|
+
[primary, ...unique].forEach((keys, index) => {
|
|
186
|
+
keys = (0, import_koishi2.makeArray)(keys);
|
|
187
|
+
const name2 = (index ? "unique:" : "primary:") + keys.join("+");
|
|
188
|
+
if (oldSpecs.find((spec) => spec.name === name2))
|
|
189
|
+
return;
|
|
190
|
+
const key = Object.fromEntries(keys.map((key2) => [key2, 1]));
|
|
191
|
+
newSpecs.push({ name: name2, key, unique: true });
|
|
192
|
+
});
|
|
193
|
+
if (!newSpecs.length)
|
|
194
|
+
return;
|
|
195
|
+
await coll.createIndexes(newSpecs);
|
|
196
|
+
}
|
|
197
|
+
_createFilter(name, query) {
|
|
198
|
+
return transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
199
|
+
}
|
|
200
|
+
async drop() {
|
|
201
|
+
await Promise.all(Object.keys(this.ctx.model.config).map((name) => this.db.dropCollection(name)));
|
|
202
|
+
}
|
|
203
|
+
async _collStats() {
|
|
204
|
+
const tables = Object.keys(this.ctx.model.config);
|
|
205
|
+
const entries = await Promise.all(tables.map(async (name) => {
|
|
206
|
+
const coll = this.db.collection(name);
|
|
207
|
+
const { count, size } = await coll.stats();
|
|
208
|
+
return [coll.collectionName, { count, size }];
|
|
209
|
+
}));
|
|
210
|
+
return Object.fromEntries(entries);
|
|
211
|
+
}
|
|
212
|
+
async stats() {
|
|
213
|
+
const [{ totalSize }, tables] = await Promise.all([
|
|
214
|
+
this.db.stats(),
|
|
215
|
+
this._collStats()
|
|
216
|
+
]);
|
|
217
|
+
return { size: totalSize, tables };
|
|
218
|
+
}
|
|
187
219
|
async get(name, query, modifier) {
|
|
188
|
-
const filter =
|
|
220
|
+
const filter = this._createFilter(name, query);
|
|
189
221
|
let cursor = this.db.collection(name).find(filter);
|
|
190
|
-
const { fields, limit, offset = 0 } = import_koishi2.Query.resolveModifier(modifier);
|
|
222
|
+
const { fields, limit, offset = 0, sort } = import_koishi2.Query.resolveModifier(modifier);
|
|
191
223
|
cursor = cursor.project(__spreadValues({ _id: 0 }, Object.fromEntries((fields != null ? fields : []).map((key) => [key, 1]))));
|
|
192
224
|
if (offset)
|
|
193
225
|
cursor = cursor.skip(offset);
|
|
194
226
|
if (limit)
|
|
195
227
|
cursor = cursor.limit(offset + limit);
|
|
228
|
+
if (sort)
|
|
229
|
+
cursor = cursor.sort(sort);
|
|
196
230
|
return await cursor.toArray();
|
|
197
|
-
}
|
|
198
|
-
async set(name, query,
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
231
|
+
}
|
|
232
|
+
async set(name, query, update) {
|
|
233
|
+
await this.tasks[name];
|
|
234
|
+
const { primary } = this.ctx.model.config[name];
|
|
235
|
+
const indexFields = (0, import_koishi2.makeArray)(primary);
|
|
236
|
+
const updateFields = new Set(Object.keys(update).map((key) => key.split(".", 1)[0]));
|
|
237
|
+
const filter = this._createFilter(name, query);
|
|
238
|
+
const coll = this.db.collection(name);
|
|
239
|
+
const original = await coll.find(filter).toArray();
|
|
240
|
+
if (!original.length)
|
|
241
|
+
return;
|
|
242
|
+
const bulk = coll.initializeUnorderedBulkOp();
|
|
243
|
+
for (const item of original) {
|
|
244
|
+
bulk.find((0, import_koishi2.pick)(item, indexFields)).updateOne({ $set: (0, import_koishi2.pick)((0, import_orm_utils.executeUpdate)(item, update), updateFields) });
|
|
245
|
+
}
|
|
246
|
+
await bulk.execute();
|
|
247
|
+
}
|
|
202
248
|
async remove(name, query) {
|
|
203
|
-
const filter =
|
|
249
|
+
const filter = this._createFilter(name, query);
|
|
204
250
|
await this.db.collection(name).deleteMany(filter);
|
|
205
|
-
}
|
|
251
|
+
}
|
|
252
|
+
queue(name, callback) {
|
|
253
|
+
return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(import_koishi2.noop).then(callback);
|
|
254
|
+
}
|
|
206
255
|
async create(name, data) {
|
|
207
|
-
const
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
256
|
+
const coll = this.db.collection(name);
|
|
257
|
+
return this.queue(name, async () => {
|
|
258
|
+
const { primary, fields, autoInc } = this.ctx.model.config[name];
|
|
259
|
+
if (autoInc && !Array.isArray(primary) && !(primary in data)) {
|
|
260
|
+
const [latest] = await coll.find().sort(primary, -1).limit(1).toArray();
|
|
261
|
+
data[primary] = latest ? +latest[primary] + 1 : 1;
|
|
262
|
+
if (import_koishi2.Model.Field.string.includes(fields[primary].type)) {
|
|
263
|
+
data[primary] += "";
|
|
264
|
+
}
|
|
214
265
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
266
|
+
const copy = __spreadValues(__spreadValues({}, this.ctx.model.create(name)), data);
|
|
267
|
+
try {
|
|
268
|
+
await coll.insertOne(copy);
|
|
269
|
+
delete copy._id;
|
|
270
|
+
return copy;
|
|
271
|
+
} catch (err) {
|
|
272
|
+
if (err instanceof import_mongodb.MongoError && err.code === 11e3) {
|
|
273
|
+
throw new import_koishi2.KoishiError(err.message, "database.duplicate-entry");
|
|
274
|
+
}
|
|
275
|
+
throw err;
|
|
276
|
+
}
|
|
277
|
+
});
|
|
278
|
+
}
|
|
223
279
|
async upsert(name, data, keys) {
|
|
224
280
|
if (!data.length)
|
|
225
281
|
return;
|
|
226
282
|
if (!keys)
|
|
227
|
-
keys =
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
283
|
+
keys = this.ctx.model.config[name].primary;
|
|
284
|
+
const indexFields = (0, import_koishi2.makeArray)(keys);
|
|
285
|
+
await this.tasks[name];
|
|
286
|
+
const coll = this.db.collection(name);
|
|
287
|
+
const original = await coll.find({ $or: data.map((item) => (0, import_koishi2.pick)(item, indexFields)) }).toArray();
|
|
288
|
+
const bulk = coll.initializeUnorderedBulkOp();
|
|
289
|
+
for (const update of data) {
|
|
290
|
+
const item = original.find((item2) => indexFields.every((key) => item2[key].valueOf() === update[key].valueOf()));
|
|
291
|
+
if (item) {
|
|
292
|
+
const updateFields = new Set(Object.keys(update).map((key) => key.split(".", 1)[0]));
|
|
293
|
+
const override = (0, import_koishi2.omit)((0, import_koishi2.pick)((0, import_orm_utils.executeUpdate)(item, update), updateFields), indexFields);
|
|
294
|
+
bulk.find((0, import_koishi2.pick)(item, indexFields)).updateOne({ $set: override });
|
|
295
|
+
} else {
|
|
296
|
+
bulk.insert((0, import_orm_utils.executeUpdate)(this.ctx.model.create(name), update));
|
|
297
|
+
}
|
|
232
298
|
}
|
|
233
299
|
await bulk.execute();
|
|
234
|
-
}
|
|
300
|
+
}
|
|
235
301
|
async aggregate(name, fields, query) {
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
302
|
+
if (!Object.keys(fields).length)
|
|
303
|
+
return {};
|
|
304
|
+
const $match = this._createFilter(name, query);
|
|
305
|
+
const aggrs = [];
|
|
306
|
+
fields = (0, import_koishi2.valueMap)(fields, (value) => transformEval(value, aggrs));
|
|
307
|
+
const stages = aggrs.map((pipeline) => {
|
|
308
|
+
pipeline.unshift({ $match });
|
|
309
|
+
return { $unionWith: { coll: name, pipeline } };
|
|
310
|
+
});
|
|
311
|
+
stages.unshift({ $match: { _id: null } });
|
|
312
|
+
const results = await this.db.collection(name).aggregate(stages).toArray();
|
|
313
|
+
const data = Object.assign({}, ...results);
|
|
314
|
+
return (0, import_koishi2.valueMap)(fields, (value) => (0, import_orm_utils.executeEval)(data, value));
|
|
243
315
|
}
|
|
244
|
-
}
|
|
316
|
+
};
|
|
317
|
+
__name(MongoDatabase, "MongoDatabase");
|
|
318
|
+
(function(MongoDatabase2) {
|
|
319
|
+
MongoDatabase2.name = "database-mongo";
|
|
320
|
+
MongoDatabase2.Config = import_koishi2.Schema.object({
|
|
321
|
+
protocol: import_koishi2.Schema.string().description("要使用的协议名。").default("mongodb"),
|
|
322
|
+
host: import_koishi2.Schema.string().description("要连接到的主机名。").default("localhost"),
|
|
323
|
+
port: import_koishi2.Schema.number().description("要连接到的端口号。"),
|
|
324
|
+
username: import_koishi2.Schema.string().description("要使用的用户名。"),
|
|
325
|
+
password: import_koishi2.Schema.string().description("要使用的密码。"),
|
|
326
|
+
database: import_koishi2.Schema.string().description("要访问的数据库名。").default("koishi")
|
|
327
|
+
});
|
|
328
|
+
})(MongoDatabase || (MongoDatabase = {}));
|
|
245
329
|
var src_default = MongoDatabase;
|
|
246
330
|
// Annotate the CommonJS export names for ESM import in node:
|
|
247
331
|
0 && (module.exports = {});
|
package/lib/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts", "../src/utils.ts"],
|
|
4
|
-
"sourcesContent": ["import { MongoClient, Db, Collection } from 'mongodb'\nimport { Context, Channel, Database, User, Tables as KoishiTables, makeArray, Schema, valueMap, pick, omit, Query } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { createFilter, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nexport interface Tables extends KoishiTables {}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n\n mongo = this\n\n user: Collection<User>\n channel: Collection<Channel>\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(\n mongourl, { useNewUrlParser: true, useUnifiedTopology: true },\n )\n this.db = this.client.db(this.config.database)\n if (this.config.prefix) {\n this.db.collection = ((func, prefix) => function collection<T extends TableType>(name: T) {\n return func(`${prefix}.${name}`)\n })(this.db.collection.bind(this.db), this.config.prefix)\n }\n this.user = this.db.collection('user')\n this.channel = this.db.collection('channel')\n await this.channel.createIndex({ type: 1, pid: 1 }, { unique: true })\n\n for (const name in KoishiTables.config) {\n const { primary } = KoishiTables.config[name]\n const col = this.db.collection(name)\n await col.createIndex(Object.fromEntries(makeArray(primary).map(K => [K, 1])), { unique: true })\n }\n }\n\n collection<T extends TableType>(name: T): Collection<Tables[T]> {\n return this.db.collection(name)\n }\n\n stop() {\n return this.client.close()\n }\n\n connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n prefix?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name' and 'prefix') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n prefix: Schema.string().description('使用的表名前缀。当配置了这一项时,所有通过 Koishi 创建的表名都会以这个配置项为前缀。'),\n })\n}\n\nDatabase.extend(MongoDatabase, {\n async drop(table) {\n if (table) {\n await this.db.collection(table).drop()\n } else {\n const collections = await this.db.collections()\n await Promise.all(collections.map(c => c.drop()))\n }\n },\n\n async get(name, query, modifier) {\n const filter = createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0 } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n return await cursor.toArray()\n },\n\n async set(name, query, data) {\n const filter = createFilter(name, query)\n await this.db.collection(name).updateMany(filter, { $set: data })\n },\n\n async remove(name, query) {\n const filter = createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n },\n\n async create(name, data: any) {\n const table = KoishiTables.config[name]\n const { primary, fields } = table\n if (!Array.isArray(primary) && table.autoInc && !(primary in data)) {\n const [latest] = await this.db.collection(name).find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (KoishiTables.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...KoishiTables.create(name), ...data }\n try {\n await this.db.collection(name).insertOne(copy)\n return copy\n } catch {}\n },\n\n async upsert(name, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = KoishiTables.config[name].primary\n keys = makeArray(keys)\n const bulk = this.db.collection(name).initializeUnorderedBulkOp()\n for (const item of data) {\n bulk.find(pick(item, keys))\n .upsert()\n .updateOne({ $set: omit(item, keys), $setOnInsert: omit(KoishiTables.create(name), [...keys, ...Object.keys(item) as any]) })\n }\n await bulk.execute()\n },\n\n async aggregate(name, fields, query) {\n const $match = createFilter(name, query)\n const [data] = await this.db.collection(name).aggregate([{ $match }, {\n $group: {\n _id: 1,\n ...valueMap(fields, transformEval),\n },\n }]).toArray()\n return data\n },\n})\n\nexport default MongoDatabase\n", "import { TableType, Query, Eval, valueMap } from 'koishi'\nimport { QuerySelector } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: QuerySelector<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nfunction transformQuery(query: Query.Expr) {\n const filter = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter['$nor'] = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nexport function createFilter<T extends TableType>(name: T, query: Query<T>) {\n return transformQuery(Query.resolve(name, query))\n}\n\nexport function transformEval(expr: Eval.Numeric | Eval.Aggregation) {\n if (typeof expr === 'string') {\n return '$' + expr\n } else if (typeof expr === 'number' || typeof expr === 'boolean') {\n return expr\n }\n\n return valueMap(expr as any, (value) => {\n if (Array.isArray(value)) {\n return value.map(transformEval)\n } else {\n return transformEval(value)\n }\n })\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,
|
|
4
|
+
"sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError, valueMap } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const coll = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n await this.tasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const filter = this._createFilter(name, query)\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this.tasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n if (!Object.keys(fields).length) return {}\n const $match = this._createFilter(name, query)\n const aggrs: any[][] = []\n fields = valueMap(fields, value => transformEval(value, aggrs))\n const stages = aggrs.map<any>((pipeline) => {\n pipeline.unshift({ $match })\n return { $unionWith: { coll: name, pipeline } }\n })\n stages.unshift({ $match: { _id: null } })\n const results = await this.db.collection(name).aggregate(stages).toArray()\n const data = Object.assign({}, ...results)\n return valueMap(fields, value => executeEval(data, value)) as any\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter.$nor = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, aggrs?: any[][]) {\n return valueMap(expr as any, (value, key) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, aggrs))\n } else {\n return transformEval(value, aggrs)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, aggrs?: any[][]) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = 'temp' + aggrs.length\n if (key === '$count') {\n aggrs.push([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, aggrs)\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,qBAA0H;AAC1H,iBAAgC;AAChC,uBAA2C;;;ACH3C,oBAAgC;AAGhC,6BAA6B,OAAyB,KAAa;AAEjE,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ;AACnB,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA+B;AACrC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,OAAO;AAClB,aAAO,aAAa,oBAAoB,MAAM,OAAO;AAAA,eAC5C,SAAS,aAAa;AAC/B,aAAO,QAAQ;AAAA,QACb,KAAK,MAAc,OAAe;AAChC,iBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA;AAAA,QAEpC,MAAM,CAAC,MAAM,KAAK;AAAA,QAClB,MAAM;AAAA;AAAA,WAEH;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,SAAO;AAAA;AA5BA;AA+BF,wBAAwB,OAAmB;AAChD,QAAM,SAAsB;AAC5B,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAEnC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB,eAAO,EAAE,MAAM,CAAC;AAAA;AAAA,eAET,QAAQ,QAAQ;AAGzB,aAAO,OAAO,CAAC,eAAe;AAAA,eACrB,QAAQ,SAAS;AAC1B,aAAO,OAAO,cAAc;AAAA,WACvB;AACL,aAAO,OAAO,oBAAoB,OAAO;AAAA;AAAA;AAG7C,SAAO;AAAA;AArBO;AAwBhB,2BAA2B,MAAW,OAAiB;AACrD,SAAO,4BAAS,MAAa,CAAC,OAAO,QAAQ;AAC3C,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI,SAAO,cAAc,KAAK;AAAA,WACtC;AACL,aAAO,cAAc,OAAO;AAAA;AAAA;AAAA;AALzB;AAUT,uBAAuB,MAAW;AAChC,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA;AAEf,SAAO,kBAAkB;AAAA;AAJlB;AAOT,IAAM,WAAW,CAAC,QAAQ,QAAQ,QAAQ,QAAQ;AAE3C,uBAAuB,MAAW,OAAiB;AACxD,MAAI,OAAO,SAAS,YAAY,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AACrF,WAAO;AAAA,aACE,KAAK,GAAG;AACjB,WAAO,MAAM,KAAK;AAAA;AAGpB,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,KAAK;AAAM;AAChB,UAAM,QAAQ,cAAc,KAAK;AACjC,UAAM,IAAI,SAAS,MAAM;AACzB,QAAI,QAAQ,UAAU;AACpB,YAAM,KAAK;AAAA,QACT,EAAE,QAAQ,EAAE,KAAK;AAAA,QACjB,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,EAAE,QAAQ;AAAA;AAAA,WAEnC;AACL,YAAM,KAAK,CAAC,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,GAAG,MAAM;AAAA;AAEnD,WAAO,EAAE;AAAA;AAGX,SAAO,kBAAkB,MAAM;AAAA;AAtBjB;;;AD3DhB,kCAA4B,wBAAS;AAAA,EAMnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAHlC,iBAAQ;AACP,iBAA4B;AAAA;AAAA,EAM5B,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ;AACxC,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAGrC,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAIvC,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAIP,WAAW,MAAc;AACrC,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,MAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM,MAAM,KAAK,GAAG,WAAW;AACjF,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,WAA+B;AACrC,UAAM,WAAW,MAAM,KAAK;AAC3B,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,YAAM,MAAM,OAAO,YAAY,KAAK,IAAI,UAAO,CAAC,MAAK;AACrD,eAAS,KAAK,EAAE,aAAM,KAAK,QAAQ;AAAA;AAErC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,KAAK,cAAc;AAAA;AAAA,EAGnB,cAAc,MAAc,OAAc;AAChD,WAAO,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAAA;AAAA,QAGpD,OAAO;AACX,UAAM,QAAQ,IAAI,OAAO,KAAK,KAAK,IAAI,MAAM,QAAQ,IAAI,UAAQ,KAAK,GAAG,eAAe;AAAA;AAAA,QAG5E,aAAa;AACzB,UAAM,SAAS,OAAO,KAAK,KAAK,IAAI,MAAM;AAC1C,UAAM,UAAU,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,SAAS;AAC3D,YAAM,OAAO,KAAK,GAAG,WAAW;AAChC,YAAM,EAAE,OAAO,SAAS,MAAM,KAAK;AACnC,aAAO,CAAC,KAAK,gBAAgB,EAAE,OAAO;AAAA;AAExC,WAAO,OAAO,YAAY;AAAA;AAAA,QAGtB,QAAQ;AAEZ,UAAM,CAAC,EAAE,aAAa,UAAU,MAAM,QAAQ,IAAI;AAAA,MAChD,KAAK,GAAG;AAAA,MACR,KAAK;AAAA;AAEP,WAAO,EAAE,MAAM,WAAW;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,GAAG,SAAS,qBAAM,gBAAgB;AAClE,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,QAAI;AAAM,eAAS,OAAO,KAAK;AAC/B,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,QAAY;AACnD,UAAM,KAAK,MAAM;AACjB,UAAM,EAAE,YAAY,KAAK,IAAI,MAAM,OAAO;AAC1C,UAAM,cAAc,8BAAU;AAC9B,UAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,QAAQ;AACzC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,OAAO,KAAK;AAClB,eAAW,QAAQ,UAAU;AAC3B,WAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM,yBAAK,oCAAc,MAAM,SAAS;AAAA;AAEzF,UAAM,KAAK;AAAA;AAAA,QAGP,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,MAAM,QAAQ,QAAQ,QAAQ,KAAK,MAAM,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGzE,OAAO,MAAiB,MAAW;AACvC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAC9D,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AAAA;AAAA;AAGrB,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,UAAU;AACrB,eAAO,KAAK;AACZ,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,gBAAM,IAAI,2BAAY,IAAI,SAAS;AAAA;AAErC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,UAAM,cAAc,8BAAU;AAC9B,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,EAAE,KAAK,KAAK,IAAI,UAAQ,yBAAK,MAAM,iBAAiB;AACrF,UAAM,OAAO,KAAK;AAClB,eAAW,UAAU,MAAM;AACzB,YAAM,OAAO,SAAS,KAAK,WAAQ,YAAY,MAAM,SAAO,MAAK,KAAK,cAAc,OAAO,KAAK;AAChG,UAAI,MAAM;AACR,cAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,cAAM,WAAW,yBAAK,yBAAK,oCAAc,MAAM,SAAS,eAAe;AACvE,aAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM;AAAA,aAChD;AACL,aAAK,OAAO,oCAAc,KAAK,IAAI,MAAM,OAAO,OAAO;AAAA;AAAA;AAG3D,UAAM,KAAK;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,QAAI,CAAC,OAAO,KAAK,QAAQ;AAAQ,aAAO;AACxC,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,QAAiB;AACvB,aAAS,6BAAS,QAAQ,WAAS,cAAc,OAAO;AACxD,UAAM,SAAS,MAAM,IAAS,CAAC,aAAa;AAC1C,eAAS,QAAQ,EAAE;AACnB,aAAO,EAAE,YAAY,EAAE,MAAM,MAAM;AAAA;AAErC,WAAO,QAAQ,EAAE,QAAQ,EAAE,KAAK;AAChC,UAAM,UAAU,MAAM,KAAK,GAAG,WAAW,MAAM,UAAU,QAAQ;AACjE,UAAM,OAAO,OAAO,OAAO,IAAI,GAAG;AAClC,WAAO,6BAAS,QAAQ,WAAS,kCAAY,MAAM;AAAA;AAAA;AAnLvD;AAuLA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,SAAS,YAAY;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/lib/utils.d.ts
ADDED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@koishijs/plugin-database-mongo",
|
|
3
3
|
"description": "MongoDB support for Koishi",
|
|
4
|
-
"version": "3.0.0-
|
|
4
|
+
"version": "3.0.0-rc.0",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -32,14 +32,14 @@
|
|
|
32
32
|
"mysql"
|
|
33
33
|
],
|
|
34
34
|
"devDependencies": {
|
|
35
|
-
"@koishijs/plugin-mock": "^1.0.0-
|
|
36
|
-
"@koishijs/test-utils": "^8.0.0-
|
|
35
|
+
"@koishijs/plugin-mock": "^1.0.0-rc.0",
|
|
36
|
+
"@koishijs/test-utils": "^8.0.0-rc.0"
|
|
37
37
|
},
|
|
38
38
|
"peerDependencies": {
|
|
39
|
-
"koishi": "^4.0.0-
|
|
39
|
+
"koishi": "^4.0.0-rc.0"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@
|
|
43
|
-
"mongodb": "^
|
|
42
|
+
"@koishijs/orm-utils": "^1.0.0-rc.0",
|
|
43
|
+
"mongodb": "^4.2.2"
|
|
44
44
|
}
|
|
45
45
|
}
|