@koishijs/plugin-database-mongo 3.0.0-beta.4 → 3.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +16 -11
- package/lib/index.js +88 -73
- package/lib/index.js.map +2 -2
- package/package.json +4 -4
package/lib/index.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
|
-
import {
|
|
3
|
-
import { MongoClient, Db
|
|
2
|
+
import { Query, Eval, Context, Database, Tables as KoishiTables, Schema } from 'koishi';
|
|
3
|
+
import { MongoClient, Db } from 'mongodb';
|
|
4
4
|
import { URLSearchParams } from 'url';
|
|
5
|
-
export function
|
|
5
|
+
export function transformQuery(query: Query.Expr): {};
|
|
6
6
|
export function transformEval(expr: Eval.Numeric | Eval.Aggregation): any;
|
|
7
7
|
declare module 'koishi' {
|
|
8
8
|
interface Database {
|
|
@@ -21,13 +21,21 @@ declare class MongoDatabase extends Database {
|
|
|
21
21
|
client: MongoClient;
|
|
22
22
|
db: Db;
|
|
23
23
|
mongo: this;
|
|
24
|
-
|
|
25
|
-
channel: Collection<Channel>;
|
|
24
|
+
private tasks;
|
|
26
25
|
constructor(ctx: Context, config: MongoDatabase.Config);
|
|
26
|
+
private connectionStringFromConfig;
|
|
27
27
|
start(): Promise<void>;
|
|
28
|
-
collection<T extends TableType>(name: T): Collection<Tables[T]>;
|
|
29
28
|
stop(): Promise<void>;
|
|
30
|
-
|
|
29
|
+
/** synchronize table schema */
|
|
30
|
+
private _syncTable;
|
|
31
|
+
drop(name: TableType): Promise<void>;
|
|
32
|
+
get(name: TableType, query: Query, modifier: Query.Modifier): Promise<any[]>;
|
|
33
|
+
set(name: TableType, query: Query, data: any): Promise<void>;
|
|
34
|
+
remove(name: TableType, query: Query): Promise<void>;
|
|
35
|
+
private queue;
|
|
36
|
+
create(name: TableType, data: any): Promise<any>;
|
|
37
|
+
upsert(name: TableType, data: any[], keys: string | string[]): Promise<void>;
|
|
38
|
+
aggregate(name: TableType, fields: {}, query: Query): Promise<any>;
|
|
31
39
|
}
|
|
32
40
|
declare namespace MongoDatabase {
|
|
33
41
|
const name = "database-mongo";
|
|
@@ -39,11 +47,10 @@ declare namespace MongoDatabase {
|
|
|
39
47
|
port?: number;
|
|
40
48
|
/** database name */
|
|
41
49
|
database?: string;
|
|
42
|
-
prefix?: string;
|
|
43
50
|
/** default auth database */
|
|
44
51
|
authDatabase?: string;
|
|
45
52
|
connectOptions?: ConstructorParameters<typeof URLSearchParams>[0];
|
|
46
|
-
/** connection string (will overwrite all configs except 'name'
|
|
53
|
+
/** connection string (will overwrite all configs except 'name') */
|
|
47
54
|
uri?: string;
|
|
48
55
|
}
|
|
49
56
|
const Config: Schema<{
|
|
@@ -53,7 +60,6 @@ declare namespace MongoDatabase {
|
|
|
53
60
|
username?: string;
|
|
54
61
|
password?: string;
|
|
55
62
|
database?: string;
|
|
56
|
-
prefix?: string;
|
|
57
63
|
} & {
|
|
58
64
|
[x: string]: any;
|
|
59
65
|
}, {
|
|
@@ -63,7 +69,6 @@ declare namespace MongoDatabase {
|
|
|
63
69
|
username?: string;
|
|
64
70
|
password?: string;
|
|
65
71
|
database?: string;
|
|
66
|
-
prefix?: string;
|
|
67
72
|
} & {
|
|
68
73
|
[x: string]: any;
|
|
69
74
|
}>;
|
package/lib/index.js
CHANGED
|
@@ -97,10 +97,6 @@ function transformQuery(query) {
|
|
|
97
97
|
return filter;
|
|
98
98
|
}
|
|
99
99
|
__name(transformQuery, "transformQuery");
|
|
100
|
-
function createFilter(name, query) {
|
|
101
|
-
return transformQuery(import_koishi.Query.resolve(name, query));
|
|
102
|
-
}
|
|
103
|
-
__name(createFilter, "createFilter");
|
|
104
100
|
function transformEval(expr) {
|
|
105
101
|
if (typeof expr === "string") {
|
|
106
102
|
return "$" + expr;
|
|
@@ -124,30 +120,7 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
124
120
|
this.ctx = ctx;
|
|
125
121
|
this.config = config;
|
|
126
122
|
this.mongo = this;
|
|
127
|
-
|
|
128
|
-
async start() {
|
|
129
|
-
const mongourl = this.config.uri || this.connectionStringFromConfig();
|
|
130
|
-
this.client = await import_mongodb.MongoClient.connect(mongourl, { useNewUrlParser: true, useUnifiedTopology: true });
|
|
131
|
-
this.db = this.client.db(this.config.database);
|
|
132
|
-
if (this.config.prefix) {
|
|
133
|
-
this.db.collection = ((func, prefix) => /* @__PURE__ */ __name(function collection(name) {
|
|
134
|
-
return func(`${prefix}.${name}`);
|
|
135
|
-
}, "collection"))(this.db.collection.bind(this.db), this.config.prefix);
|
|
136
|
-
}
|
|
137
|
-
this.user = this.db.collection("user");
|
|
138
|
-
this.channel = this.db.collection("channel");
|
|
139
|
-
await this.channel.createIndex({ type: 1, pid: 1 }, { unique: true });
|
|
140
|
-
for (const name in import_koishi2.Tables.config) {
|
|
141
|
-
const { primary } = import_koishi2.Tables.config[name];
|
|
142
|
-
const col = this.db.collection(name);
|
|
143
|
-
await col.createIndex(Object.fromEntries((0, import_koishi2.makeArray)(primary).map((K) => [K, 1])), { unique: true });
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
collection(name) {
|
|
147
|
-
return this.db.collection(name);
|
|
148
|
-
}
|
|
149
|
-
stop() {
|
|
150
|
-
return this.client.close();
|
|
123
|
+
this.tasks = {};
|
|
151
124
|
}
|
|
152
125
|
connectionStringFromConfig() {
|
|
153
126
|
const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config;
|
|
@@ -161,31 +134,51 @@ var MongoDatabase = class extends import_koishi2.Database {
|
|
|
161
134
|
}
|
|
162
135
|
return mongourl;
|
|
163
136
|
}
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
(
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
137
|
+
async start() {
|
|
138
|
+
const mongourl = this.config.uri || this.connectionStringFromConfig();
|
|
139
|
+
this.client = await import_mongodb.MongoClient.connect(mongourl, {
|
|
140
|
+
useNewUrlParser: true,
|
|
141
|
+
useUnifiedTopology: true
|
|
142
|
+
});
|
|
143
|
+
this.db = this.client.db(this.config.database);
|
|
144
|
+
for (const name in this.ctx.model.config) {
|
|
145
|
+
this.tasks[name] = this._syncTable(name);
|
|
146
|
+
}
|
|
147
|
+
this.ctx.on("model", (name) => {
|
|
148
|
+
this.tasks[name] = this._syncTable(name);
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
stop() {
|
|
152
|
+
return this.client.close();
|
|
153
|
+
}
|
|
154
|
+
async _syncTable(name) {
|
|
155
|
+
await this.tasks[name];
|
|
156
|
+
const col = await this.db.createCollection(name).catch(() => this.db.collection(name));
|
|
157
|
+
const { primary, unique } = this.ctx.model.config[name];
|
|
158
|
+
const newSpecs = [];
|
|
159
|
+
const oldSpecs = await col.indexes();
|
|
160
|
+
[primary, ...unique].forEach((keys, index) => {
|
|
161
|
+
keys = (0, import_koishi2.makeArray)(keys);
|
|
162
|
+
const name2 = (index ? "unique:" : "primary:") + keys.join("+");
|
|
163
|
+
if (oldSpecs.find((spec) => spec.name === name2))
|
|
164
|
+
return;
|
|
165
|
+
const key = Object.fromEntries(keys.map((key2) => [key2, 1]));
|
|
166
|
+
newSpecs.push({ name: name2, key, unique: true });
|
|
167
|
+
});
|
|
168
|
+
if (!newSpecs.length)
|
|
169
|
+
return;
|
|
170
|
+
await col.createIndexes(newSpecs);
|
|
171
|
+
}
|
|
172
|
+
async drop(name) {
|
|
173
|
+
if (name) {
|
|
174
|
+
await this.db.collection(name).drop();
|
|
182
175
|
} else {
|
|
183
176
|
const collections = await this.db.collections();
|
|
184
177
|
await Promise.all(collections.map((c) => c.drop()));
|
|
185
178
|
}
|
|
186
|
-
}
|
|
179
|
+
}
|
|
187
180
|
async get(name, query, modifier) {
|
|
188
|
-
const filter =
|
|
181
|
+
const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
189
182
|
let cursor = this.db.collection(name).find(filter);
|
|
190
183
|
const { fields, limit, offset = 0 } = import_koishi2.Query.resolveModifier(modifier);
|
|
191
184
|
cursor = cursor.project(__spreadValues({ _id: 0 }, Object.fromEntries((fields != null ? fields : []).map((key) => [key, 1]))));
|
|
@@ -194,46 +187,56 @@ import_koishi2.Database.extend(MongoDatabase, {
|
|
|
194
187
|
if (limit)
|
|
195
188
|
cursor = cursor.limit(offset + limit);
|
|
196
189
|
return await cursor.toArray();
|
|
197
|
-
}
|
|
190
|
+
}
|
|
198
191
|
async set(name, query, data) {
|
|
199
|
-
|
|
192
|
+
await this.tasks[name];
|
|
193
|
+
const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
200
194
|
await this.db.collection(name).updateMany(filter, { $set: data });
|
|
201
|
-
}
|
|
195
|
+
}
|
|
202
196
|
async remove(name, query) {
|
|
203
|
-
const filter =
|
|
197
|
+
const filter = transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
204
198
|
await this.db.collection(name).deleteMany(filter);
|
|
205
|
-
}
|
|
199
|
+
}
|
|
200
|
+
queue(name, callback) {
|
|
201
|
+
return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(import_koishi2.noop).then(callback);
|
|
202
|
+
}
|
|
206
203
|
async create(name, data) {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
204
|
+
return this.queue(name, async () => {
|
|
205
|
+
const { primary, fields, autoInc } = this.ctx.model.config[name];
|
|
206
|
+
if (autoInc && !Array.isArray(primary) && !(primary in data)) {
|
|
207
|
+
const [latest] = await this.db.collection(name).find().sort(primary, -1).limit(1).toArray();
|
|
208
|
+
data[primary] = latest ? +latest[primary] + 1 : 1;
|
|
209
|
+
if (import_koishi2.Model.Field.string.includes(fields[primary].type)) {
|
|
210
|
+
data[primary] += "";
|
|
211
|
+
}
|
|
214
212
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
213
|
+
const copy = __spreadValues(__spreadValues({}, this.ctx.model.create(name)), data);
|
|
214
|
+
try {
|
|
215
|
+
await this.db.collection(name).insertOne(copy);
|
|
216
|
+
return copy;
|
|
217
|
+
} catch (err) {
|
|
218
|
+
if (err instanceof import_mongodb.MongoError && err.code === 11e3) {
|
|
219
|
+
err[Symbol.for("koishi.error-type")] = "duplicate-entry";
|
|
220
|
+
}
|
|
221
|
+
throw err;
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
}
|
|
223
225
|
async upsert(name, data, keys) {
|
|
224
226
|
if (!data.length)
|
|
225
227
|
return;
|
|
226
228
|
if (!keys)
|
|
227
|
-
keys =
|
|
229
|
+
keys = this.ctx.model.config[name].primary;
|
|
228
230
|
keys = (0, import_koishi2.makeArray)(keys);
|
|
231
|
+
await this.tasks[name];
|
|
229
232
|
const bulk = this.db.collection(name).initializeUnorderedBulkOp();
|
|
230
233
|
for (const item of data) {
|
|
231
|
-
bulk.find((0, import_koishi2.pick)(item, keys)).upsert().updateOne({ $set: (0, import_koishi2.omit)(item, keys), $setOnInsert: (0, import_koishi2.omit)(
|
|
234
|
+
bulk.find((0, import_koishi2.pick)(item, keys)).upsert().updateOne({ $set: (0, import_koishi2.omit)(item, keys), $setOnInsert: (0, import_koishi2.omit)(this.ctx.model.create(name), [...keys, ...Object.keys(item)]) });
|
|
232
235
|
}
|
|
233
236
|
await bulk.execute();
|
|
234
|
-
}
|
|
237
|
+
}
|
|
235
238
|
async aggregate(name, fields, query) {
|
|
236
|
-
const $match =
|
|
239
|
+
const $match = transformQuery(this.ctx.model.resolveQuery(name, query));
|
|
237
240
|
const [data] = await this.db.collection(name).aggregate([{ $match }, {
|
|
238
241
|
$group: __spreadValues({
|
|
239
242
|
_id: 1
|
|
@@ -241,7 +244,19 @@ import_koishi2.Database.extend(MongoDatabase, {
|
|
|
241
244
|
}]).toArray();
|
|
242
245
|
return data;
|
|
243
246
|
}
|
|
244
|
-
}
|
|
247
|
+
};
|
|
248
|
+
__name(MongoDatabase, "MongoDatabase");
|
|
249
|
+
(function(MongoDatabase2) {
|
|
250
|
+
MongoDatabase2.name = "database-mongo";
|
|
251
|
+
MongoDatabase2.Config = import_koishi2.Schema.object({
|
|
252
|
+
protocol: import_koishi2.Schema.string().description("要使用的协议名。").default("mongodb"),
|
|
253
|
+
host: import_koishi2.Schema.string().description("要连接到的主机名。").default("localhost"),
|
|
254
|
+
port: import_koishi2.Schema.number().description("要连接到的端口号。"),
|
|
255
|
+
username: import_koishi2.Schema.string().description("要使用的用户名。"),
|
|
256
|
+
password: import_koishi2.Schema.string().description("要使用的密码。"),
|
|
257
|
+
database: import_koishi2.Schema.string().description("要访问的数据库名。").default("koishi")
|
|
258
|
+
});
|
|
259
|
+
})(MongoDatabase || (MongoDatabase = {}));
|
|
245
260
|
var src_default = MongoDatabase;
|
|
246
261
|
// Annotate the CommonJS export names for ESM import in node:
|
|
247
262
|
0 && (module.exports = {});
|
package/lib/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts", "../src/utils.ts"],
|
|
4
|
-
"sourcesContent": ["import { MongoClient, Db,
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,
|
|
4
|
+
"sourcesContent": ["import { MongoClient, Db, IndexSpecification, MongoError } from 'mongodb'\nimport { Context, Database, Tables as KoishiTables, makeArray, Schema, valueMap, pick, omit, Query, Model, Dict, noop } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nexport interface Tables extends KoishiTables {}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl, {\n useNewUrlParser: true,\n useUnifiedTopology: true,\n })\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const col = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexSpecification[] = []\n const oldSpecs: IndexSpecification[] = await col.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await col.createIndexes(newSpecs)\n }\n\n async drop(name: TableType) {\n if (name) {\n await this.db.collection(name).drop()\n } else {\n const collections = await this.db.collections()\n await Promise.all(collections.map(c => c.drop()))\n }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0 } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n return await cursor.toArray()\n }\n\n async set(name: TableType, query: Query, data: any) {\n await this.tasks[name]\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n await this.db.collection(name).updateMany(filter, { $set: data })\n }\n\n async remove(name: TableType, query: Query) {\n const filter = transformQuery(this.ctx.model.resolveQuery(name, query))\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await this.db.collection(name).find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await this.db.collection(name).insertOne(copy)\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n err[Symbol.for('koishi.error-type')] = 'duplicate-entry'\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n keys = makeArray(keys)\n await this.tasks[name]\n const bulk = this.db.collection(name).initializeUnorderedBulkOp()\n for (const item of data) {\n bulk.find(pick(item, keys))\n .upsert()\n .updateOne({ $set: omit(item, keys), $setOnInsert: omit(this.ctx.model.create(name), [...keys, ...Object.keys(item) as any]) })\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n const $match = transformQuery(this.ctx.model.resolveQuery(name, query))\n const [data] = await this.db.collection(name).aggregate([{ $match }, {\n $group: {\n _id: 1,\n ...valueMap(fields, transformEval),\n },\n }]).toArray()\n return data\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, Eval, valueMap } from 'koishi'\nimport { QuerySelector } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: QuerySelector<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter['$nor'] = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nexport function transformEval(expr: Eval.Numeric | Eval.Aggregation) {\n if (typeof expr === 'string') {\n return '$' + expr\n } else if (typeof expr === 'number' || typeof expr === 'boolean') {\n return expr\n }\n\n return valueMap(expr as any, (value) => {\n if (Array.isArray(value)) {\n return value.map(transformEval)\n } else {\n return transformEval(value)\n }\n })\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAAgE;AAChE,qBAA6H;AAC7H,iBAAgC;;;ACFhC,oBAAsC;AAGtC,6BAA6B,OAAyB,KAAa;AAEjE,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ;AACnB,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA6B;AACnC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,OAAO;AAClB,aAAO,aAAa,oBAAoB,MAAM,OAAO;AAAA,eAC5C,SAAS,aAAa;AAC/B,aAAO,QAAQ;AAAA,QACb,KAAK,MAAc,OAAe;AAChC,iBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA;AAAA,QAEpC,MAAM,CAAC,MAAM,KAAK;AAAA,QAClB,MAAM;AAAA;AAAA,WAEH;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,SAAO;AAAA;AA5BA;AA+BF,wBAAwB,OAAmB;AAChD,QAAM,SAAS;AACf,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAEnC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB,eAAO,EAAE,MAAM,CAAC;AAAA;AAAA,eAET,QAAQ,QAAQ;AAGzB,aAAO,UAAU,CAAC,eAAe;AAAA,eACxB,QAAQ,SAAS;AAC1B,aAAO,OAAO,cAAc;AAAA,WACvB;AACL,aAAO,OAAO,oBAAoB,OAAO;AAAA;AAAA;AAG7C,SAAO;AAAA;AArBO;AAwBT,uBAAuB,MAAuC;AACnE,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA,aACJ,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AAChE,WAAO;AAAA;AAGT,SAAO,4BAAS,MAAa,CAAC,UAAU;AACtC,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI;AAAA,WACZ;AACL,aAAO,cAAc;AAAA;AAAA;AAAA;AAXX;;;ADvChB,kCAA4B,wBAAS;AAAA,EAMnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAHlC,iBAAQ;AACP,iBAA4B;AAAA;AAAA,EAM5B,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ,UAAU;AAAA,MAChD,iBAAiB;AAAA,MACjB,oBAAoB;AAAA;AAEtB,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAGrC,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAIvC,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAIP,WAAW,MAAc;AACrC,UAAM,KAAK,MAAM;AACjB,UAAM,MAAM,MAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM,MAAM,KAAK,GAAG,WAAW;AAChF,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,WAAiC;AACvC,UAAM,WAAiC,MAAM,IAAI;AAChD,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,YAAM,MAAM,OAAO,YAAY,KAAK,IAAI,UAAO,CAAC,MAAK;AACrD,eAAS,KAAK,EAAE,aAAM,KAAK,QAAQ;AAAA;AAErC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,IAAI,cAAc;AAAA;AAAA,QAGpB,KAAK,MAAiB;AAC1B,QAAI,MAAM;AACR,YAAM,KAAK,GAAG,WAAW,MAAM;AAAA,WAC1B;AACL,YAAM,cAAc,MAAM,KAAK,GAAG;AAClC,YAAM,QAAQ,IAAI,YAAY,IAAI,OAAK,EAAE;AAAA;AAAA;AAAA,QAIvC,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,MAAM,qBAAM,gBAAgB;AAC5D,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,MAAW;AAClD,UAAM,KAAK,MAAM;AACjB,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW,QAAQ,EAAE,MAAM;AAAA;AAAA,QAGtD,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,MAAM,QAAQ,QAAQ,QAAQ,KAAK,MAAM,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGzE,OAAO,MAAiB,MAAW;AACvC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,GAAG,WAAW,MAAM,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAClF,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AAAA;AAAA;AAGrB,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,GAAG,WAAW,MAAM,UAAU;AACzC,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,cAAI,OAAO,IAAI,wBAAwB;AAAA;AAEzC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,WAAO,8BAAU;AACjB,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,KAAK,GAAG,WAAW,MAAM;AACtC,eAAW,QAAQ,MAAM;AACvB,WAAK,KAAK,yBAAK,MAAM,OAClB,SACA,UAAU,EAAE,MAAM,yBAAK,MAAM,OAAO,cAAc,yBAAK,KAAK,IAAI,MAAM,OAAO,OAAO,CAAC,GAAG,MAAM,GAAG,OAAO,KAAK;AAAA;AAElH,UAAM,KAAK;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,UAAM,SAAS,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAChE,UAAM,CAAC,QAAQ,MAAM,KAAK,GAAG,WAAW,MAAM,UAAU,CAAC,EAAE,UAAU;AAAA,MACnE,QAAQ;AAAA,QACN,KAAK;AAAA,SACF,6BAAS,QAAQ;AAAA,QAEpB;AACJ,WAAO;AAAA;AAAA;AA5IX;AAgJA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,SAAS,YAAY;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@koishijs/plugin-database-mongo",
|
|
3
3
|
"description": "MongoDB support for Koishi",
|
|
4
|
-
"version": "3.0.0-beta.
|
|
4
|
+
"version": "3.0.0-beta.5",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -32,11 +32,11 @@
|
|
|
32
32
|
"mysql"
|
|
33
33
|
],
|
|
34
34
|
"devDependencies": {
|
|
35
|
-
"@koishijs/plugin-mock": "^1.0.0-beta.
|
|
36
|
-
"@koishijs/test-utils": "^8.0.0-beta.
|
|
35
|
+
"@koishijs/plugin-mock": "^1.0.0-beta.1",
|
|
36
|
+
"@koishijs/test-utils": "^8.0.0-beta.5"
|
|
37
37
|
},
|
|
38
38
|
"peerDependencies": {
|
|
39
|
-
"koishi": "^4.0.0-beta.
|
|
39
|
+
"koishi": "^4.0.0-beta.5"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
|
42
42
|
"@types/mongodb": "^3.6.12",
|