@koishijs/plugin-database-mongo 3.0.0-beta.6 → 3.0.0-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.d.ts +4 -7
- package/lib/index.js.map +2 -2
- package/lib/utils.d.ts +5 -0
- package/package.json +3 -3
package/lib/index.d.ts
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
|
+
/// <reference types="koishi/lib" />
|
|
1
2
|
/// <reference types="node" />
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
3
|
+
import { MongoClient, Db } from 'mongodb';
|
|
4
|
+
import { Context, Database, Tables, Schema, Query } from 'koishi';
|
|
4
5
|
import { URLSearchParams } from 'url';
|
|
5
|
-
export function transformQuery(query: Query.Expr): Filter<any>;
|
|
6
|
-
export function transformEval(expr: any, aggrs?: any[][]): any;
|
|
7
6
|
declare module 'koishi' {
|
|
8
7
|
interface Database {
|
|
9
8
|
mongo: MongoDatabase;
|
|
@@ -12,9 +11,7 @@ declare module 'koishi' {
|
|
|
12
11
|
'database-mongo': typeof import('.');
|
|
13
12
|
}
|
|
14
13
|
}
|
|
15
|
-
type TableType = keyof Tables;
|
|
16
|
-
export interface Tables extends KoishiTables {
|
|
17
|
-
}
|
|
14
|
+
declare type TableType = keyof Tables;
|
|
18
15
|
declare class MongoDatabase extends Database {
|
|
19
16
|
ctx: Context;
|
|
20
17
|
private config;
|
package/lib/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/index.ts", "../src/utils.ts"],
|
|
4
|
-
"sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables as KoishiTables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError, valueMap } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nexport interface Tables extends KoishiTables {}\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const coll = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n await this.tasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const filter = this._createFilter(name, query)\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this.tasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n if (!Object.keys(fields).length) return {}\n const $match = this._createFilter(name, query)\n const aggrs: any[][] = []\n fields = valueMap(fields, value => transformEval(value, aggrs))\n const stages = aggrs.map<any>((pipeline) => {\n pipeline.unshift({ $match })\n return { $unionWith: { coll: name, pipeline } }\n })\n stages.unshift({ $match: { _id: null } })\n const results = await this.db.collection(name).aggregate(stages).toArray()\n const data = Object.assign({}, ...results)\n return valueMap(fields, value => executeEval(data, value)) as any\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter.$nor = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, aggrs?: any[][]) {\n return valueMap(expr as any, (value, key) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, aggrs))\n } else {\n return transformEval(value, aggrs)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, aggrs?: any[][]) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = 'temp' + aggrs.length\n if (key === '$count') {\n aggrs.push([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, aggrs)\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,
|
|
4
|
+
"sourcesContent": ["import { MongoClient, Db, MongoError, IndexDescription } from 'mongodb'\nimport { Context, Database, Tables, makeArray, Schema, pick, omit, Query, Model, Dict, noop, KoishiError, valueMap } from 'koishi'\nimport { URLSearchParams } from 'url'\nimport { executeUpdate, executeEval } from '@koishijs/orm-utils'\nimport { transformQuery, transformEval } from './utils'\n\ndeclare module 'koishi' {\n interface Database {\n mongo: MongoDatabase\n }\n\n interface Modules {\n 'database-mongo': typeof import('.')\n }\n}\n\ntype TableType = keyof Tables\n\nclass MongoDatabase extends Database {\n public client: MongoClient\n public db: Db\n public mongo = this\n private tasks: Dict<Promise<any>> = {}\n\n constructor(public ctx: Context, private config: MongoDatabase.Config) {\n super(ctx)\n }\n\n private connectionStringFromConfig() {\n const { authDatabase, connectOptions, host, database: name, password, port, protocol, username } = this.config\n let mongourl = `${protocol}://`\n if (username) mongourl += `${encodeURIComponent(username)}${password ? `:${encodeURIComponent(password)}` : ''}@`\n mongourl += `${host}${port ? `:${port}` : ''}/${authDatabase || name}`\n if (connectOptions) {\n const params = new URLSearchParams(connectOptions)\n mongourl += `?${params}`\n }\n return mongourl\n }\n\n async start() {\n const mongourl = this.config.uri || this.connectionStringFromConfig()\n this.client = await MongoClient.connect(mongourl)\n this.db = this.client.db(this.config.database)\n\n for (const name in this.ctx.model.config) {\n this.tasks[name] = this._syncTable(name)\n }\n\n this.ctx.on('model', (name) => {\n this.tasks[name] = this._syncTable(name)\n })\n }\n\n stop() {\n return this.client.close()\n }\n\n /** synchronize table schema */\n private async _syncTable(name: string) {\n await this.tasks[name]\n const coll = await this.db.createCollection(name).catch(() => this.db.collection(name))\n const { primary, unique } = this.ctx.model.config[name]\n const newSpecs: IndexDescription[] = []\n const oldSpecs = await coll.indexes()\n ;[primary, ...unique].forEach((keys, index) => {\n keys = makeArray(keys)\n const name = (index ? 'unique:' : 'primary:') + keys.join('+')\n if (oldSpecs.find(spec => spec.name === name)) return\n const key = Object.fromEntries(keys.map(key => [key, 1]))\n newSpecs.push({ name, key, unique: true })\n })\n if (!newSpecs.length) return\n await coll.createIndexes(newSpecs)\n }\n\n private _createFilter(name: string, query: Query) {\n return transformQuery(this.ctx.model.resolveQuery(name, query))\n }\n\n async drop() {\n await Promise.all(Object.keys(this.ctx.model.config).map(name => this.db.dropCollection(name)))\n }\n\n private async _collStats() {\n const tables = Object.keys(this.ctx.model.config)\n const entries = await Promise.all(tables.map(async (name) => {\n const coll = this.db.collection(name)\n const { count, size } = await coll.stats()\n return [coll.collectionName, { count, size }] as const\n }))\n return Object.fromEntries(entries)\n }\n\n async stats() {\n // https://docs.mongodb.com/manual/reference/command/dbStats/#std-label-dbstats-output\n const [{ totalSize }, tables] = await Promise.all([\n this.db.stats(),\n this._collStats(),\n ])\n return { size: totalSize, tables }\n }\n\n async get(name: TableType, query: Query, modifier: Query.Modifier) {\n const filter = this._createFilter(name, query)\n let cursor = this.db.collection(name).find(filter)\n const { fields, limit, offset = 0, sort } = Query.resolveModifier(modifier)\n cursor = cursor.project({ _id: 0, ...Object.fromEntries((fields ?? []).map(key => [key, 1])) })\n if (offset) cursor = cursor.skip(offset)\n if (limit) cursor = cursor.limit(offset + limit)\n if (sort) cursor = cursor.sort(sort)\n return await cursor.toArray() as any\n }\n\n async set(name: TableType, query: Query, update: {}) {\n await this.tasks[name]\n const { primary } = this.ctx.model.config[name]\n const indexFields = makeArray(primary)\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const filter = this._createFilter(name, query)\n const coll = this.db.collection(name)\n const original = await coll.find(filter).toArray()\n if (!original.length) return\n const bulk = coll.initializeUnorderedBulkOp()\n for (const item of original) {\n bulk.find(pick(item, indexFields)).updateOne({ $set: pick(executeUpdate(item, update), updateFields) })\n }\n await bulk.execute()\n }\n\n async remove(name: TableType, query: Query) {\n const filter = this._createFilter(name, query)\n await this.db.collection(name).deleteMany(filter)\n }\n\n private queue(name: TableType, callback: () => Promise<any>) {\n return this.tasks[name] = Promise.resolve(this.tasks[name]).catch(noop).then(callback)\n }\n\n async create(name: TableType, data: any) {\n const coll = this.db.collection(name)\n return this.queue(name, async () => {\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n if (autoInc && !Array.isArray(primary) && !(primary in data)) {\n const [latest] = await coll.find().sort(primary, -1).limit(1).toArray()\n data[primary] = latest ? +latest[primary] + 1 : 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n try {\n await coll.insertOne(copy)\n delete copy._id\n return copy\n } catch (err) {\n if (err instanceof MongoError && err.code === 11000) {\n throw new KoishiError(err.message, 'database.duplicate-entry')\n }\n throw err\n }\n })\n }\n\n async upsert(name: TableType, data: any[], keys: string | string[]) {\n if (!data.length) return\n if (!keys) keys = this.ctx.model.config[name].primary\n const indexFields = makeArray(keys)\n await this.tasks[name]\n const coll = this.db.collection(name)\n const original = await coll.find({ $or: data.map(item => pick(item, indexFields)) }).toArray()\n const bulk = coll.initializeUnorderedBulkOp()\n for (const update of data) {\n const item = original.find(item => indexFields.every(key => item[key].valueOf() === update[key].valueOf()))\n if (item) {\n const updateFields = new Set(Object.keys(update).map(key => key.split('.', 1)[0]))\n const override = omit(pick(executeUpdate(item, update), updateFields), indexFields)\n bulk.find(pick(item, indexFields)).updateOne({ $set: override })\n } else {\n bulk.insert(executeUpdate(this.ctx.model.create(name), update))\n }\n }\n await bulk.execute()\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n if (!Object.keys(fields).length) return {}\n const $match = this._createFilter(name, query)\n const aggrs: any[][] = []\n fields = valueMap(fields, value => transformEval(value, aggrs))\n const stages = aggrs.map<any>((pipeline) => {\n pipeline.unshift({ $match })\n return { $unionWith: { coll: name, pipeline } }\n })\n stages.unshift({ $match: { _id: null } })\n const results = await this.db.collection(name).aggregate(stages).toArray()\n const data = Object.assign({}, ...results)\n return valueMap(fields, value => executeEval(data, value)) as any\n }\n}\n\nnamespace MongoDatabase {\n export const name = 'database-mongo'\n\n export interface Config {\n username?: string\n password?: string\n protocol?: string\n host?: string\n port?: number\n /** database name */\n database?: string\n /** default auth database */\n authDatabase?: string\n connectOptions?: ConstructorParameters<typeof URLSearchParams>[0]\n /** connection string (will overwrite all configs except 'name') */\n uri?: string\n }\n\n export const Config = Schema.object({\n protocol: Schema.string().description('要使用的协议名。').default('mongodb'),\n host: Schema.string().description('要连接到的主机名。').default('localhost'),\n port: Schema.number().description('要连接到的端口号。'),\n username: Schema.string().description('要使用的用户名。'),\n password: Schema.string().description('要使用的密码。'),\n database: Schema.string().description('要访问的数据库名。').default('koishi'),\n })\n}\n\nexport default MongoDatabase\n", "import { Query, valueMap } from 'koishi'\nimport { Filter, FilterOperators } from 'mongodb'\n\nfunction transformFieldQuery(query: Query.FieldQuery, key: string) {\n // shorthand syntax\n if (typeof query === 'string' || typeof query === 'number' || query instanceof Date) {\n return { $eq: query }\n } else if (Array.isArray(query)) {\n if (!query.length) return\n return { $in: query }\n } else if (query instanceof RegExp) {\n return { $regex: query }\n }\n\n // query operators\n const result: FilterOperators<any> = {}\n for (const prop in query) {\n if (prop === '$el') {\n result.$elemMatch = transformFieldQuery(query[prop], key)\n } else if (prop === '$regexFor') {\n result.$expr = {\n body(data: string, value: string) {\n return new RegExp(data, 'i').test(value)\n },\n args: ['$' + key, query],\n lang: 'js',\n }\n } else {\n result[prop] = query[prop]\n }\n }\n return result\n}\n\nexport function transformQuery(query: Query.Expr) {\n const filter: Filter<any> = {}\n for (const key in query) {\n const value = query[key]\n if (key === '$and' || key === '$or') {\n // MongoError: $and/$or/$nor must be a nonempty array\n if (value.length) {\n filter[key] = value.map(transformQuery)\n } else if (key === '$or') {\n return { $nor: [{}] }\n }\n } else if (key === '$not') {\n // MongoError: unknown top level operator: $not\n // https://stackoverflow.com/questions/25270396/mongodb-how-to-invert-query-with-not\n filter.$nor = [transformQuery(value)]\n } else if (key === '$expr') {\n filter[key] = transformEval(value)\n } else {\n filter[key] = transformFieldQuery(value, key)\n }\n }\n return filter\n}\n\nfunction transformEvalExpr(expr: any, aggrs?: any[][]) {\n return valueMap(expr as any, (value, key) => {\n if (Array.isArray(value)) {\n return value.map(val => transformEval(val, aggrs))\n } else {\n return transformEval(value, aggrs)\n }\n })\n}\n\nfunction transformAggr(expr: any) {\n if (typeof expr === 'string') {\n return '$' + expr\n }\n return transformEvalExpr(expr)\n}\n\nconst aggrKeys = ['$sum', '$avg', '$min', '$max', '$count']\n\nexport function transformEval(expr: any, aggrs?: any[][]) {\n if (typeof expr === 'number' || typeof expr === 'string' || typeof expr === 'boolean') {\n return expr\n } else if (expr.$) {\n return '$' + expr.$\n }\n\n for (const key of aggrKeys) {\n if (!expr[key]) continue\n const value = transformAggr(expr[key])\n const $ = 'temp' + aggrs.length\n if (key === '$count') {\n aggrs.push([\n { $group: { _id: value } },\n { $group: { _id: null, [$]: { $count: {} } } }\n ])\n } else {\n aggrs.push([{ $group: { _id: null, [$]: { [key]: value } } }])\n }\n return { $ }\n }\n\n return transformEvalExpr(expr, aggrs)\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,qBAA8D;AAC9D,qBAA0H;AAC1H,iBAAgC;AAChC,uBAA2C;;;ACH3C,oBAAgC;AAGhC,6BAA6B,OAAyB,KAAa;AAEjE,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,iBAAiB,MAAM;AACnF,WAAO,EAAE,KAAK;AAAA,aACL,MAAM,QAAQ,QAAQ;AAC/B,QAAI,CAAC,MAAM;AAAQ;AACnB,WAAO,EAAE,KAAK;AAAA,aACL,iBAAiB,QAAQ;AAClC,WAAO,EAAE,QAAQ;AAAA;AAInB,QAAM,SAA+B;AACrC,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,OAAO;AAClB,aAAO,aAAa,oBAAoB,MAAM,OAAO;AAAA,eAC5C,SAAS,aAAa;AAC/B,aAAO,QAAQ;AAAA,QACb,KAAK,MAAc,OAAe;AAChC,iBAAO,IAAI,OAAO,MAAM,KAAK,KAAK;AAAA;AAAA,QAEpC,MAAM,CAAC,MAAM,KAAK;AAAA,QAClB,MAAM;AAAA;AAAA,WAEH;AACL,aAAO,QAAQ,MAAM;AAAA;AAAA;AAGzB,SAAO;AAAA;AA5BA;AA+BF,wBAAwB,OAAmB;AAChD,QAAM,SAAsB;AAC5B,aAAW,OAAO,OAAO;AACvB,UAAM,QAAQ,MAAM;AACpB,QAAI,QAAQ,UAAU,QAAQ,OAAO;AAEnC,UAAI,MAAM,QAAQ;AAChB,eAAO,OAAO,MAAM,IAAI;AAAA,iBACf,QAAQ,OAAO;AACxB,eAAO,EAAE,MAAM,CAAC;AAAA;AAAA,eAET,QAAQ,QAAQ;AAGzB,aAAO,OAAO,CAAC,eAAe;AAAA,eACrB,QAAQ,SAAS;AAC1B,aAAO,OAAO,cAAc;AAAA,WACvB;AACL,aAAO,OAAO,oBAAoB,OAAO;AAAA;AAAA;AAG7C,SAAO;AAAA;AArBO;AAwBhB,2BAA2B,MAAW,OAAiB;AACrD,SAAO,4BAAS,MAAa,CAAC,OAAO,QAAQ;AAC3C,QAAI,MAAM,QAAQ,QAAQ;AACxB,aAAO,MAAM,IAAI,SAAO,cAAc,KAAK;AAAA,WACtC;AACL,aAAO,cAAc,OAAO;AAAA;AAAA;AAAA;AALzB;AAUT,uBAAuB,MAAW;AAChC,MAAI,OAAO,SAAS,UAAU;AAC5B,WAAO,MAAM;AAAA;AAEf,SAAO,kBAAkB;AAAA;AAJlB;AAOT,IAAM,WAAW,CAAC,QAAQ,QAAQ,QAAQ,QAAQ;AAE3C,uBAAuB,MAAW,OAAiB;AACxD,MAAI,OAAO,SAAS,YAAY,OAAO,SAAS,YAAY,OAAO,SAAS,WAAW;AACrF,WAAO;AAAA,aACE,KAAK,GAAG;AACjB,WAAO,MAAM,KAAK;AAAA;AAGpB,aAAW,OAAO,UAAU;AAC1B,QAAI,CAAC,KAAK;AAAM;AAChB,UAAM,QAAQ,cAAc,KAAK;AACjC,UAAM,IAAI,SAAS,MAAM;AACzB,QAAI,QAAQ,UAAU;AACpB,YAAM,KAAK;AAAA,QACT,EAAE,QAAQ,EAAE,KAAK;AAAA,QACjB,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,EAAE,QAAQ;AAAA;AAAA,WAEnC;AACL,YAAM,KAAK,CAAC,EAAE,QAAQ,EAAE,KAAK,OAAO,IAAI,GAAG,MAAM;AAAA;AAEnD,WAAO,EAAE;AAAA;AAGX,SAAO,kBAAkB,MAAM;AAAA;AAtBjB;;;AD3DhB,kCAA4B,wBAAS;AAAA,EAMnC,YAAmB,KAAsB,QAA8B;AACrE,UAAM;AADW;AAAsB;AAHlC,iBAAQ;AACP,iBAA4B;AAAA;AAAA,EAM5B,6BAA6B;AACnC,UAAM,EAAE,cAAc,gBAAgB,MAAM,UAAU,MAAM,UAAU,MAAM,UAAU,aAAa,KAAK;AACxG,QAAI,WAAW,GAAG;AAClB,QAAI;AAAU,kBAAY,GAAG,mBAAmB,YAAY,WAAW,IAAI,mBAAmB,cAAc;AAC5G,gBAAY,GAAG,OAAO,OAAO,IAAI,SAAS,MAAM,gBAAgB;AAChE,QAAI,gBAAgB;AAClB,YAAM,SAAS,IAAI,2BAAgB;AACnC,kBAAY,IAAI;AAAA;AAElB,WAAO;AAAA;AAAA,QAGH,QAAQ;AACZ,UAAM,WAAW,KAAK,OAAO,OAAO,KAAK;AACzC,SAAK,SAAS,MAAM,2BAAY,QAAQ;AACxC,SAAK,KAAK,KAAK,OAAO,GAAG,KAAK,OAAO;AAErC,eAAW,QAAQ,KAAK,IAAI,MAAM,QAAQ;AACxC,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAGrC,SAAK,IAAI,GAAG,SAAS,CAAC,SAAS;AAC7B,WAAK,MAAM,QAAQ,KAAK,WAAW;AAAA;AAAA;AAAA,EAIvC,OAAO;AACL,WAAO,KAAK,OAAO;AAAA;AAAA,QAIP,WAAW,MAAc;AACrC,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,MAAM,KAAK,GAAG,iBAAiB,MAAM,MAAM,MAAM,KAAK,GAAG,WAAW;AACjF,UAAM,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,OAAO;AAClD,UAAM,WAA+B;AACrC,UAAM,WAAW,MAAM,KAAK;AAC3B,KAAC,SAAS,GAAG,QAAQ,QAAQ,CAAC,MAAM,UAAU;AAC7C,aAAO,8BAAU;AACjB,YAAM,QAAQ,SAAQ,YAAY,cAAc,KAAK,KAAK;AAC1D,UAAI,SAAS,KAAK,UAAQ,KAAK,SAAS;AAAO;AAC/C,YAAM,MAAM,OAAO,YAAY,KAAK,IAAI,UAAO,CAAC,MAAK;AACrD,eAAS,KAAK,EAAE,aAAM,KAAK,QAAQ;AAAA;AAErC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,KAAK,cAAc;AAAA;AAAA,EAGnB,cAAc,MAAc,OAAc;AAChD,WAAO,eAAe,KAAK,IAAI,MAAM,aAAa,MAAM;AAAA;AAAA,QAGpD,OAAO;AACX,UAAM,QAAQ,IAAI,OAAO,KAAK,KAAK,IAAI,MAAM,QAAQ,IAAI,UAAQ,KAAK,GAAG,eAAe;AAAA;AAAA,QAG5E,aAAa;AACzB,UAAM,SAAS,OAAO,KAAK,KAAK,IAAI,MAAM;AAC1C,UAAM,UAAU,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,SAAS;AAC3D,YAAM,OAAO,KAAK,GAAG,WAAW;AAChC,YAAM,EAAE,OAAO,SAAS,MAAM,KAAK;AACnC,aAAO,CAAC,KAAK,gBAAgB,EAAE,OAAO;AAAA;AAExC,WAAO,OAAO,YAAY;AAAA;AAAA,QAGtB,QAAQ;AAEZ,UAAM,CAAC,EAAE,aAAa,UAAU,MAAM,QAAQ,IAAI;AAAA,MAChD,KAAK,GAAG;AAAA,MACR,KAAK;AAAA;AAEP,WAAO,EAAE,MAAM,WAAW;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,UAA0B;AACjE,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,QAAI,SAAS,KAAK,GAAG,WAAW,MAAM,KAAK;AAC3C,UAAM,EAAE,QAAQ,OAAO,SAAS,GAAG,SAAS,qBAAM,gBAAgB;AAClE,aAAS,OAAO,QAAQ,iBAAE,KAAK,KAAM,OAAO,YAAa,2BAAU,IAAI,IAAI,SAAO,CAAC,KAAK;AACxF,QAAI;AAAQ,eAAS,OAAO,KAAK;AACjC,QAAI;AAAO,eAAS,OAAO,MAAM,SAAS;AAC1C,QAAI;AAAM,eAAS,OAAO,KAAK;AAC/B,WAAO,MAAM,OAAO;AAAA;AAAA,QAGhB,IAAI,MAAiB,OAAc,QAAY;AACnD,UAAM,KAAK,MAAM;AACjB,UAAM,EAAE,YAAY,KAAK,IAAI,MAAM,OAAO;AAC1C,UAAM,cAAc,8BAAU;AAC9B,UAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,QAAQ;AACzC,QAAI,CAAC,SAAS;AAAQ;AACtB,UAAM,OAAO,KAAK;AAClB,eAAW,QAAQ,UAAU;AAC3B,WAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM,yBAAK,oCAAc,MAAM,SAAS;AAAA;AAEzF,UAAM,KAAK;AAAA;AAAA,QAGP,OAAO,MAAiB,OAAc;AAC1C,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,KAAK,GAAG,WAAW,MAAM,WAAW;AAAA;AAAA,EAGpC,MAAM,MAAiB,UAA8B;AAC3D,WAAO,KAAK,MAAM,QAAQ,QAAQ,QAAQ,KAAK,MAAM,OAAO,MAAM,qBAAM,KAAK;AAAA;AAAA,QAGzE,OAAO,MAAiB,MAAW;AACvC,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,WAAO,KAAK,MAAM,MAAM,YAAY;AAClC,YAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,UAAI,WAAW,CAAC,MAAM,QAAQ,YAAY,CAAE,YAAW,OAAO;AAC5D,cAAM,CAAC,UAAU,MAAM,KAAK,OAAO,KAAK,SAAS,IAAI,MAAM,GAAG;AAC9D,aAAK,WAAW,SAAS,CAAC,OAAO,WAAW,IAAI;AAChD,YAAI,qBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,eAAK,YAAY;AAAA;AAAA;AAGrB,YAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAI;AACF,cAAM,KAAK,UAAU;AACrB,eAAO,KAAK;AACZ,eAAO;AAAA,eACA,KAAP;AACA,YAAI,eAAe,6BAAc,IAAI,SAAS,MAAO;AACnD,gBAAM,IAAI,2BAAY,IAAI,SAAS;AAAA;AAErC,cAAM;AAAA;AAAA;AAAA;AAAA,QAKN,OAAO,MAAiB,MAAa,MAAyB;AAClE,QAAI,CAAC,KAAK;AAAQ;AAClB,QAAI,CAAC;AAAM,aAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC9C,UAAM,cAAc,8BAAU;AAC9B,UAAM,KAAK,MAAM;AACjB,UAAM,OAAO,KAAK,GAAG,WAAW;AAChC,UAAM,WAAW,MAAM,KAAK,KAAK,EAAE,KAAK,KAAK,IAAI,UAAQ,yBAAK,MAAM,iBAAiB;AACrF,UAAM,OAAO,KAAK;AAClB,eAAW,UAAU,MAAM;AACzB,YAAM,OAAO,SAAS,KAAK,WAAQ,YAAY,MAAM,SAAO,MAAK,KAAK,cAAc,OAAO,KAAK;AAChG,UAAI,MAAM;AACR,cAAM,eAAe,IAAI,IAAI,OAAO,KAAK,QAAQ,IAAI,SAAO,IAAI,MAAM,KAAK,GAAG;AAC9E,cAAM,WAAW,yBAAK,yBAAK,oCAAc,MAAM,SAAS,eAAe;AACvE,aAAK,KAAK,yBAAK,MAAM,cAAc,UAAU,EAAE,MAAM;AAAA,aAChD;AACL,aAAK,OAAO,oCAAc,KAAK,IAAI,MAAM,OAAO,OAAO;AAAA;AAAA;AAG3D,UAAM,KAAK;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,QAAI,CAAC,OAAO,KAAK,QAAQ;AAAQ,aAAO;AACxC,UAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAM,QAAiB;AACvB,aAAS,6BAAS,QAAQ,WAAS,cAAc,OAAO;AACxD,UAAM,SAAS,MAAM,IAAS,CAAC,aAAa;AAC1C,eAAS,QAAQ,EAAE;AACnB,aAAO,EAAE,YAAY,EAAE,MAAM,MAAM;AAAA;AAErC,WAAO,QAAQ,EAAE,QAAQ,EAAE,KAAK;AAChC,UAAM,UAAU,MAAM,KAAK,GAAG,WAAW,MAAM,UAAU,QAAQ;AACjE,UAAM,OAAO,OAAO,OAAO,IAAI,GAAG;AAClC,WAAO,6BAAS,QAAQ,WAAS,kCAAY,MAAM;AAAA;AAAA;AAnLvD;AAuLA,UAAU,gBAAV;AACS,EAAM,sBAAO;AAiBb,EAAM,wBAAS,sBAAO,OAAO;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY,YAAY,QAAQ;AAAA,IAC1D,MAAM,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA,IACvD,MAAM,sBAAO,SAAS,YAAY;AAAA,IAClC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY;AAAA,IACtC,UAAU,sBAAO,SAAS,YAAY,aAAa,QAAQ;AAAA;AAAA,GAxBrD;AA4BV,IAAO,cAAQ;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/lib/utils.d.ts
ADDED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@koishijs/plugin-database-mongo",
|
|
3
3
|
"description": "MongoDB support for Koishi",
|
|
4
|
-
"version": "3.0.0-beta.
|
|
4
|
+
"version": "3.0.0-beta.7",
|
|
5
5
|
"main": "lib/index.js",
|
|
6
6
|
"typings": "lib/index.d.ts",
|
|
7
7
|
"files": [
|
|
@@ -36,10 +36,10 @@
|
|
|
36
36
|
"@koishijs/test-utils": "^8.0.0-beta.6"
|
|
37
37
|
},
|
|
38
38
|
"peerDependencies": {
|
|
39
|
-
"koishi": "^4.0.0-beta.
|
|
39
|
+
"koishi": "^4.0.0-beta.7"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@koishijs/orm-utils": "^1.0.0-beta.
|
|
42
|
+
"@koishijs/orm-utils": "^1.0.0-beta.5",
|
|
43
43
|
"mongodb": "^4.2.2"
|
|
44
44
|
}
|
|
45
45
|
}
|