@koishijs/plugin-database-memory 1.0.0-alpha.7 → 1.0.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- /// <reference types="node" />
2
- import { Context, Database, TableType, Dict, Schema } from 'koishi';
1
+ /// <reference types="koishi/lib" />
2
+ import { Context, Database, Query, TableType, Dict } from 'koishi';
3
+ import { Config } from './storage';
3
4
  declare module 'koishi' {
4
5
  interface Database {
5
6
  memory: MemoryDatabase;
@@ -8,7 +9,7 @@ declare module 'koishi' {
8
9
  'database-memory': typeof import('.');
9
10
  }
10
11
  }
11
- export class MemoryDatabase extends Database {
12
+ export declare class MemoryDatabase extends Database {
12
13
  ctx: Context;
13
14
  config: Config;
14
15
  memory: this;
@@ -16,23 +17,15 @@ export class MemoryDatabase extends Database {
16
17
  private _storage;
17
18
  constructor(ctx: Context, config?: Config);
18
19
  start(): Promise<void>;
19
- $drop(name?: string): Promise<void>;
20
20
  $save(name: string): Promise<void>;
21
21
  stop(): void;
22
22
  $table<K extends TableType>(table: K): any[];
23
+ drop(name: TableType): Promise<void>;
24
+ get(name: TableType, query: Query, modifier?: Query.Modifier): Promise<Pick<any, string>[]>;
25
+ set(name: TableType, query: Query, data: {}): Promise<void>;
26
+ remove(name: TableType, query: Query): Promise<void>;
27
+ create(name: TableType, data: any): Promise<any>;
28
+ upsert(name: TableType, data: any[], key: string | string[]): Promise<void>;
29
+ aggregate(name: TableType, fields: {}, query: Query): Promise<any>;
23
30
  }
24
- export const name = "database-memory";
25
- export const schema: Schema<Config>;
26
- export function apply(ctx: Context, config?: Config): void;
27
- export class Storage {
28
- constructor(ctx: Context, config: Config);
29
- start(tables: Record<string, any[]>): Promise<void>;
30
- save(name: string, table: any[]): Promise<void>;
31
- drop(name?: string): Promise<void>;
32
- }
33
- export interface Config {
34
- storage?: boolean;
35
- loader?: 'json' | 'yaml' | 'yml';
36
- root?: string;
37
- prefix?: string;
38
- }
31
+ export default MemoryDatabase;
package/lib/index.js CHANGED
@@ -20,13 +20,10 @@ var __spreadValues = (a, b) => {
20
20
  };
21
21
  var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
22
22
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
23
- var __require = typeof require !== "undefined" ? require : (x) => {
24
- throw new Error('Dynamic require of "' + x + '" is not supported');
25
- };
26
23
  var __export = (target, all) => {
27
24
  __markAsModule(target);
28
- for (var name2 in all)
29
- __defProp(target, name2, { get: all[name2], enumerable: true });
25
+ for (var name in all)
26
+ __defProp(target, name, { get: all[name], enumerable: true });
30
27
  };
31
28
  var __reExport = (target, module2, desc) => {
32
29
  if (module2 && typeof module2 === "object" || typeof module2 === "function") {
@@ -43,9 +40,7 @@ var __toModule = (module2) => {
43
40
  // plugins/database/memory/src/index.ts
44
41
  __export(exports, {
45
42
  MemoryDatabase: () => MemoryDatabase,
46
- apply: () => apply,
47
- name: () => name,
48
- schema: () => schema
43
+ default: () => src_default
49
44
  });
50
45
  var import_koishi = __toModule(require("koishi"));
51
46
  var import_orm_utils = __toModule(require("@koishijs/orm-utils"));
@@ -74,8 +69,8 @@ var Storage = class {
74
69
  const buffer = await import_fs.promises.readFile(filename);
75
70
  try {
76
71
  const data = await this.load(buffer, loader);
77
- const name2 = filename.slice(0, -1 - extension.length);
78
- tables[name2] = data;
72
+ const name = filename.slice(0, -1 - extension.length);
73
+ tables[name] = data;
79
74
  } catch {
80
75
  }
81
76
  }));
@@ -88,19 +83,19 @@ var Storage = class {
88
83
  return load(buffer.toString());
89
84
  }
90
85
  }
91
- async drop(name2) {
86
+ async drop(name) {
92
87
  const { root, loader } = this.config;
93
- if (name2) {
94
- await import_fs.promises.rm((0, import_path.resolve)(root, `${name2}.${loader}`));
88
+ if (name) {
89
+ await import_fs.promises.rm((0, import_path.resolve)(root, `${name}.${loader}`));
95
90
  } else {
96
91
  await import_fs.promises.rm(root, { recursive: true, force: true });
97
92
  }
98
93
  }
99
- async save(name2, table) {
94
+ async save(name, table) {
100
95
  const { root, loader } = this.config;
101
96
  try {
102
97
  const buffer = await this.dump(table, loader);
103
- await import_fs.promises.writeFile((0, import_path.resolve)(root, `${name2}.${loader}`), buffer);
98
+ await import_fs.promises.writeFile((0, import_path.resolve)(root, `${name}.${loader}`), buffer);
104
99
  } catch {
105
100
  }
106
101
  }
@@ -131,13 +126,9 @@ var MemoryDatabase = class extends import_koishi.Database {
131
126
  var _a;
132
127
  await ((_a = this._storage) == null ? void 0 : _a.start(this.$store));
133
128
  }
134
- async $drop(name2) {
135
- var _a;
136
- await ((_a = this._storage) == null ? void 0 : _a.drop(name2));
137
- }
138
- async $save(name2) {
129
+ async $save(name) {
139
130
  var _a;
140
- await ((_a = this._storage) == null ? void 0 : _a.save(name2, this.$store[name2]));
131
+ await ((_a = this._storage) == null ? void 0 : _a.save(name, this.$store[name]));
141
132
  }
142
133
  stop() {
143
134
  }
@@ -145,83 +136,74 @@ var MemoryDatabase = class extends import_koishi.Database {
145
136
  var _a;
146
137
  return (_a = this.$store)[table] || (_a[table] = []);
147
138
  }
148
- };
149
- __name(MemoryDatabase, "MemoryDatabase");
150
- import_koishi.Database.extend(MemoryDatabase, {
151
- async drop(name2) {
152
- if (name2) {
153
- delete this.$store[name2];
139
+ async drop(name) {
140
+ var _a;
141
+ if (name) {
142
+ delete this.$store[name];
154
143
  } else {
155
144
  this.$store = {};
156
145
  }
157
- await this.$drop(name2);
158
- },
159
- async get(name2, query, modifier) {
160
- const expr = import_koishi.Query.resolve(name2, query);
146
+ await ((_a = this._storage) == null ? void 0 : _a.drop(name));
147
+ }
148
+ async get(name, query, modifier) {
149
+ const expr = this.ctx.model.resolveQuery(name, query);
161
150
  const { fields, limit = Infinity, offset = 0 } = import_koishi.Query.resolveModifier(modifier);
162
- return this.$table(name2).filter((row) => (0, import_orm_utils.executeQuery)(expr, row)).map((row) => (0, import_koishi.clone)((0, import_koishi.pick)(row, fields))).slice(offset, offset + limit);
163
- },
164
- async set(name2, query, data) {
165
- const expr = import_koishi.Query.resolve(name2, query);
166
- this.$table(name2).filter((row) => (0, import_orm_utils.executeQuery)(expr, row)).forEach((row) => Object.assign(row, data));
167
- this.$save(name2);
168
- },
169
- async remove(name2, query) {
170
- const expr = import_koishi.Query.resolve(name2, query);
171
- this.$store[name2] = this.$table(name2).filter((row) => !(0, import_orm_utils.executeQuery)(expr, row));
172
- this.$save(name2);
173
- },
174
- async create(name2, data) {
175
- const store = this.$table(name2);
176
- const { primary, fields, autoInc } = import_koishi.Tables.config[name2];
151
+ return this.$table(name).filter((row) => (0, import_orm_utils.executeQuery)(expr, row)).map((row) => (0, import_koishi.clone)((0, import_koishi.pick)(row, fields))).slice(offset, offset + limit);
152
+ }
153
+ async set(name, query, data) {
154
+ const expr = this.ctx.model.resolveQuery(name, query);
155
+ this.$table(name).filter((row) => (0, import_orm_utils.executeQuery)(expr, row)).forEach((row) => Object.assign(row, data));
156
+ this.$save(name);
157
+ }
158
+ async remove(name, query) {
159
+ const expr = this.ctx.model.resolveQuery(name, query);
160
+ this.$store[name] = this.$table(name).filter((row) => !(0, import_orm_utils.executeQuery)(expr, row));
161
+ this.$save(name);
162
+ }
163
+ async create(name, data) {
164
+ const store = this.$table(name);
165
+ const { primary, fields, autoInc } = this.ctx.model.config[name];
177
166
  data = (0, import_koishi.clone)(data);
178
167
  if (!Array.isArray(primary) && autoInc && !(primary in data)) {
179
168
  const max = store.length ? Math.max(...store.map((row) => +row[primary])) : 0;
180
169
  data[primary] = max + 1;
181
- if (import_koishi.Tables.Field.string.includes(fields[primary].type)) {
170
+ if (import_koishi.Model.Field.string.includes(fields[primary].type)) {
182
171
  data[primary] += "";
183
172
  }
184
173
  } else {
185
- const duplicated = await this.get(name2, (0, import_koishi.pick)(data, (0, import_koishi.makeArray)(primary)));
174
+ const duplicated = await this.get(name, (0, import_koishi.pick)(data, (0, import_koishi.makeArray)(primary)));
186
175
  if (duplicated.length)
187
176
  return;
188
177
  }
189
- const copy = __spreadValues(__spreadValues({}, import_koishi.Tables.create(name2)), data);
178
+ const copy = __spreadValues(__spreadValues({}, this.ctx.model.create(name)), data);
190
179
  store.push(copy);
191
- this.$save(name2);
180
+ this.$save(name);
192
181
  return copy;
193
- },
194
- async upsert(name2, data, key) {
195
- const keys = (0, import_koishi.makeArray)(key || import_koishi.Tables.config[name2].primary);
182
+ }
183
+ async upsert(name, data, key) {
184
+ const keys = (0, import_koishi.makeArray)(key || this.ctx.model.config[name].primary);
196
185
  for (const item of data) {
197
- const row = this.$table(name2).find((row2) => {
186
+ const row = this.$table(name).find((row2) => {
198
187
  return keys.every((key2) => row2[key2] === item[key2]);
199
188
  });
200
189
  if (row) {
201
190
  Object.assign(row, (0, import_koishi.clone)(item));
202
191
  } else {
203
- await this.create(name2, item);
192
+ await this.create(name, item);
204
193
  }
205
194
  }
206
- this.$save(name2);
207
- },
208
- async aggregate(name2, fields, query) {
209
- const expr = import_koishi.Query.resolve(name2, query);
210
- const table = this.$table(name2).filter((row) => (0, import_orm_utils.executeQuery)(expr, row));
195
+ this.$save(name);
196
+ }
197
+ async aggregate(name, fields, query) {
198
+ const expr = this.ctx.model.resolveQuery(name, query);
199
+ const table = this.$table(name).filter((row) => (0, import_orm_utils.executeQuery)(expr, row));
211
200
  return (0, import_koishi.valueMap)(fields, (expr2) => (0, import_orm_utils.executeEval)(expr2, table));
212
201
  }
213
- });
214
- var name = "database-memory";
215
- var schema = import_koishi.Schema.object({});
216
- function apply(ctx, config = {}) {
217
- ctx.database = new MemoryDatabase(ctx, config);
218
- }
219
- __name(apply, "apply");
202
+ };
203
+ __name(MemoryDatabase, "MemoryDatabase");
204
+ var src_default = MemoryDatabase;
220
205
  // Annotate the CommonJS export names for ESM import in node:
221
206
  0 && (module.exports = {
222
- MemoryDatabase,
223
- apply,
224
- name,
225
- schema
207
+ MemoryDatabase
226
208
  });
227
209
  //# sourceMappingURL=index.js.map
package/lib/index.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/index.ts", "../src/storage/node.ts"],
4
- "sourcesContent": ["import { Context, Database, Query, Tables, TableType, clone, makeArray, pick, Dict, valueMap, Schema } from 'koishi'\nimport { executeEval, executeQuery } from '@koishijs/orm-utils'\nimport { Storage, Config } from './storage'\n\ndeclare module 'koishi' {\n interface Database {\n memory: MemoryDatabase\n }\n\n interface Modules {\n 'database-memory': typeof import('.')\n }\n}\n\nexport class MemoryDatabase extends Database {\n public memory = this\n public $store: Dict<any[]> = {}\n\n private _storage: Storage\n\n constructor(public ctx: Context, public config: Config = {}) {\n super(ctx)\n\n if (config.storage) {\n this._storage = new Storage(ctx, config)\n }\n }\n\n async start() {\n await this._storage?.start(this.$store)\n }\n\n async $drop(name?: string) {\n await this._storage?.drop(name)\n }\n\n async $save(name: string) {\n await this._storage?.save(name, this.$store[name])\n }\n\n stop() {}\n\n $table<K extends TableType>(table: K) {\n return this.$store[table] ||= []\n }\n}\n\nDatabase.extend(MemoryDatabase, {\n async drop(name) {\n if (name) {\n delete this.$store[name]\n } else {\n this.$store = {}\n }\n await this.$drop(name)\n },\n\n async get(name, query, modifier) {\n const expr = Query.resolve(name, query)\n const { fields, limit = Infinity, offset = 0 } = Query.resolveModifier(modifier)\n return this.$table(name)\n .filter(row => executeQuery(expr, row))\n .map(row => clone(pick(row, fields)))\n .slice(offset, offset + limit)\n },\n\n async set(name, query, data) {\n const expr = Query.resolve(name, query)\n this.$table(name)\n .filter(row => executeQuery(expr, row))\n .forEach(row => Object.assign(row, data))\n this.$save(name)\n },\n\n async remove(name, query) {\n const expr = Query.resolve(name, query)\n this.$store[name] = this.$table(name)\n .filter(row => !executeQuery(expr, row))\n this.$save(name)\n },\n\n async create(name, data: any) {\n const store = this.$table(name)\n const { primary, fields, autoInc } = Tables.config[name] as Tables.Config\n data = clone(data)\n if (!Array.isArray(primary) && autoInc && !(primary in data)) {\n const max = store.length ? Math.max(...store.map(row => +row[primary])) : 0\n data[primary] = max + 1\n if (Tables.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n } else {\n const duplicated = await this.get(name, pick(data, makeArray(primary)))\n if (duplicated.length) return\n }\n const copy = { ...Tables.create(name), ...data }\n store.push(copy)\n this.$save(name)\n return copy\n },\n\n async upsert(name, data, key) {\n const keys = makeArray(key || Tables.config[name].primary)\n for (const item of data) {\n const row = this.$table(name).find(row => {\n return keys.every(key => row[key] === item[key])\n })\n if (row) {\n Object.assign(row, clone(item))\n } else {\n await this.create(name, item)\n }\n }\n this.$save(name)\n },\n\n async aggregate(name, fields, query) {\n const expr = Query.resolve(name, query)\n const table = this.$table(name).filter(row => executeQuery(expr, row))\n return valueMap(fields, expr => executeEval(expr, table))\n },\n})\n\nexport const name = 'database-memory'\n\nexport const schema: Schema<Config> = Schema.object({})\n\nexport function apply(ctx: Context, config: Config = {}) {\n ctx.database = new MemoryDatabase(ctx, config)\n}\n", "import type * as yaml from 'js-yaml'\nimport { Context } from 'koishi'\nimport { promises as fs } from 'fs'\nimport { extname, resolve } from 'path'\n\ntype Loader = 'json' | 'yaml' | 'yml'\nconst loaders = ['json', 'yaml', 'yml']\n\nexport interface Config {\n loader?: Loader\n root?: string\n}\n\nexport class Storage {\n constructor(ctx: Context, private config: Config) {\n config.loader ||= 'json'\n config.root ||= resolve(ctx.app.options.baseDir, '.koishi/database')\n if (!loaders.includes(config.loader)) {\n throw new Error(`unsupported loader \"${config.loader}\"`)\n }\n }\n\n async start(tables: Record<string, any[]>) {\n const { root, loader } = this.config\n await fs.mkdir(root, { recursive: true })\n const files = await fs.readdir(root)\n await Promise.all(files.map(async (filename) => {\n const extension = extname(filename)\n if (extension !== loader) return\n const buffer = await fs.readFile(filename)\n try {\n const data = await this.load(buffer, loader)\n const name = filename.slice(0, -1 - extension.length)\n tables[name] = data\n } catch {}\n }))\n }\n\n async load(buffer: Buffer, loader: Loader) {\n if (loader === 'json') {\n return JSON.parse(buffer.toString())\n } else if (loader === 'yaml' || loader === 'yml') {\n const { load } = require('js-yaml') as typeof yaml\n return load(buffer.toString())\n }\n }\n\n async drop(name?: string) {\n const { root, loader } = this.config\n if (name) {\n await fs.rm(resolve(root, `${name}.${loader}`))\n } else {\n await fs.rm(root, { recursive: true, force: true })\n }\n }\n\n async save(name: string, table: any[]) {\n const { root, loader } = this.config\n try {\n const buffer = await this.dump(table, loader)\n await fs.writeFile(resolve(root, `${name}.${loader}`), buffer)\n } catch {}\n }\n\n async dump(data: any, loader: Loader) {\n if (loader === 'json') {\n return JSON.stringify(data)\n } else if (loader === 'yaml' || loader === 'yml') {\n const { dump } = require('js-yaml') as typeof yaml\n return dump(data)\n }\n }\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA4G;AAC5G,uBAA0C;;;ACC1C,gBAA+B;AAC/B,kBAAiC;AAGjC,IAAM,UAAU,CAAC,QAAQ,QAAQ;AAO1B,oBAAc;AAAA,EACnB,YAAY,KAAsB,QAAgB;AAAhB;AAChC,WAAO,UAAP,QAAO,SAAW;AAClB,WAAO,QAAP,QAAO,OAAS,yBAAQ,IAAI,IAAI,QAAQ,SAAS;AACjD,QAAI,CAAC,QAAQ,SAAS,OAAO,SAAS;AACpC,YAAM,IAAI,MAAM,uBAAuB,OAAO;AAAA;AAAA;AAAA,QAI5C,MAAM,QAA+B;AACzC,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,UAAM,mBAAG,MAAM,MAAM,EAAE,WAAW;AAClC,UAAM,QAAQ,MAAM,mBAAG,QAAQ;AAC/B,UAAM,QAAQ,IAAI,MAAM,IAAI,OAAO,aAAa;AAC9C,YAAM,YAAY,yBAAQ;AAC1B,UAAI,cAAc;AAAQ;AAC1B,YAAM,SAAS,MAAM,mBAAG,SAAS;AACjC,UAAI;AACF,cAAM,OAAO,MAAM,KAAK,KAAK,QAAQ;AACrC,cAAM,QAAO,SAAS,MAAM,GAAG,KAAK,UAAU;AAC9C,eAAO,SAAQ;AAAA,cACf;AAAA;AAAA;AAAA;AAAA,QAIA,KAAK,QAAgB,QAAgB;AACzC,QAAI,WAAW,QAAQ;AACrB,aAAO,KAAK,MAAM,OAAO;AAAA,eAChB,WAAW,UAAU,WAAW,OAAO;AAChD,YAAM,EAAE,SAAS,QAAQ;AACzB,aAAO,KAAK,OAAO;AAAA;AAAA;AAAA,QAIjB,KAAK,OAAe;AACxB,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,QAAI,OAAM;AACR,YAAM,mBAAG,GAAG,yBAAQ,MAAM,GAAG,SAAQ;AAAA,WAChC;AACL,YAAM,mBAAG,GAAG,MAAM,EAAE,WAAW,MAAM,OAAO;AAAA;AAAA;AAAA,QAI1C,KAAK,OAAc,OAAc;AACrC,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,KAAK,OAAO;AACtC,YAAM,mBAAG,UAAU,yBAAQ,MAAM,GAAG,SAAQ,WAAW;AAAA,YACvD;AAAA;AAAA;AAAA,QAGE,KAAK,MAAW,QAAgB;AACpC,QAAI,WAAW,QAAQ;AACrB,aAAO,KAAK,UAAU;AAAA,eACb,WAAW,UAAU,WAAW,OAAO;AAChD,YAAM,EAAE,SAAS,QAAQ;AACzB,aAAO,KAAK;AAAA;AAAA;AAAA;AAxDX;;;ADCA,mCAA6B,uBAAS;AAAA,EAM3C,YAAmB,KAAqB,SAAiB,IAAI;AAC3D,UAAM;AADW;AAAqB;AALjC,kBAAS;AACT,kBAAsB;AAO3B,QAAI,OAAO,SAAS;AAClB,WAAK,WAAW,IAAI,QAAQ,KAAK;AAAA;AAAA;AAAA,QAI/B,QAAQ;AA5BhB;AA6BI,UAAM,YAAK,aAAL,mBAAe,MAAM,KAAK;AAAA;AAAA,QAG5B,MAAM,OAAe;AAhC7B;AAiCI,UAAM,YAAK,aAAL,mBAAe,KAAK;AAAA;AAAA,QAGtB,MAAM,OAAc;AApC5B;AAqCI,UAAM,YAAK,aAAL,mBAAe,KAAK,OAAM,KAAK,OAAO;AAAA;AAAA,EAG9C,OAAO;AAAA;AAAA,EAEP,OAA4B,OAAU;AA1CxC;AA2CI,WAAO,WAAK,QAAL,uBAAuB;AAAA;AAAA;AA7B3B;AAiCP,uBAAS,OAAO,gBAAgB;AAAA,QACxB,KAAK,OAAM;AACf,QAAI,OAAM;AACR,aAAO,KAAK,OAAO;AAAA,WACd;AACL,WAAK,SAAS;AAAA;AAEhB,UAAM,KAAK,MAAM;AAAA;AAAA,QAGb,IAAI,OAAM,OAAO,UAAU;AAC/B,UAAM,OAAO,oBAAM,QAAQ,OAAM;AACjC,UAAM,EAAE,QAAQ,QAAQ,UAAU,SAAS,MAAM,oBAAM,gBAAgB;AACvE,WAAO,KAAK,OAAO,OAChB,OAAO,SAAO,mCAAa,MAAM,MACjC,IAAI,SAAO,yBAAM,wBAAK,KAAK,UAC3B,MAAM,QAAQ,SAAS;AAAA;AAAA,QAGtB,IAAI,OAAM,OAAO,MAAM;AAC3B,UAAM,OAAO,oBAAM,QAAQ,OAAM;AACjC,SAAK,OAAO,OACT,OAAO,SAAO,mCAAa,MAAM,MACjC,QAAQ,SAAO,OAAO,OAAO,KAAK;AACrC,SAAK,MAAM;AAAA;AAAA,QAGP,OAAO,OAAM,OAAO;AACxB,UAAM,OAAO,oBAAM,QAAQ,OAAM;AACjC,SAAK,OAAO,SAAQ,KAAK,OAAO,OAC7B,OAAO,SAAO,CAAC,mCAAa,MAAM;AACrC,SAAK,MAAM;AAAA;AAAA,QAGP,OAAO,OAAM,MAAW;AAC5B,UAAM,QAAQ,KAAK,OAAO;AAC1B,UAAM,EAAE,SAAS,QAAQ,YAAY,qBAAO,OAAO;AACnD,WAAO,yBAAM;AACb,QAAI,CAAC,MAAM,QAAQ,YAAY,WAAW,CAAE,YAAW,OAAO;AAC5D,YAAM,MAAM,MAAM,SAAS,KAAK,IAAI,GAAG,MAAM,IAAI,SAAO,CAAC,IAAI,aAAa;AAC1E,WAAK,WAAW,MAAM;AACtB,UAAI,qBAAO,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACtD,aAAK,YAAY;AAAA;AAAA,WAEd;AACL,YAAM,aAAa,MAAM,KAAK,IAAI,OAAM,wBAAK,MAAM,6BAAU;AAC7D,UAAI,WAAW;AAAQ;AAAA;AAEzB,UAAM,OAAO,kCAAK,qBAAO,OAAO,SAAU;AAC1C,UAAM,KAAK;AACX,SAAK,MAAM;AACX,WAAO;AAAA;AAAA,QAGH,OAAO,OAAM,MAAM,KAAK;AAC5B,UAAM,OAAO,6BAAU,OAAO,qBAAO,OAAO,OAAM;AAClD,eAAW,QAAQ,MAAM;AACvB,YAAM,MAAM,KAAK,OAAO,OAAM,KAAK,UAAO;AACxC,eAAO,KAAK,MAAM,UAAO,KAAI,UAAS,KAAK;AAAA;AAE7C,UAAI,KAAK;AACP,eAAO,OAAO,KAAK,yBAAM;AAAA,aACpB;AACL,cAAM,KAAK,OAAO,OAAM;AAAA;AAAA;AAG5B,SAAK,MAAM;AAAA;AAAA,QAGP,UAAU,OAAM,QAAQ,OAAO;AACnC,UAAM,OAAO,oBAAM,QAAQ,OAAM;AACjC,UAAM,QAAQ,KAAK,OAAO,OAAM,OAAO,SAAO,mCAAa,MAAM;AACjE,WAAO,4BAAS,QAAQ,WAAQ,kCAAY,OAAM;AAAA;AAAA;AAI/C,IAAM,OAAO;AAEb,IAAM,SAAyB,qBAAO,OAAO;AAE7C,eAAe,KAAc,SAAiB,IAAI;AACvD,MAAI,WAAW,IAAI,eAAe,KAAK;AAAA;AADzB;",
4
+ "sourcesContent": ["import { Context, Database, Query, TableType, clone, makeArray, pick, Dict, valueMap, Model } from 'koishi'\nimport { executeEval, executeQuery } from '@koishijs/orm-utils'\nimport { Storage, Config } from './storage'\n\ndeclare module 'koishi' {\n interface Database {\n memory: MemoryDatabase\n }\n\n interface Modules {\n 'database-memory': typeof import('.')\n }\n}\n\nexport class MemoryDatabase extends Database {\n public memory = this\n public $store: Dict<any[]> = {}\n\n private _storage: Storage\n\n constructor(public ctx: Context, public config: Config = {}) {\n super(ctx)\n\n if (config.storage) {\n this._storage = new Storage(ctx, config)\n }\n }\n\n async start() {\n await this._storage?.start(this.$store)\n }\n\n async $save(name: string) {\n await this._storage?.save(name, this.$store[name])\n }\n\n stop() {}\n\n $table<K extends TableType>(table: K) {\n return this.$store[table] ||= []\n }\n\n async drop(name: TableType) {\n if (name) {\n delete this.$store[name]\n } else {\n this.$store = {}\n }\n await this._storage?.drop(name)\n }\n\n async get(name: TableType, query: Query, modifier?: Query.Modifier) {\n const expr = this.ctx.model.resolveQuery(name, query)\n const { fields, limit = Infinity, offset = 0 } = Query.resolveModifier(modifier)\n return this.$table(name)\n .filter(row => executeQuery(expr, row))\n .map(row => clone(pick(row, fields)))\n .slice(offset, offset + limit)\n }\n\n async set(name: TableType, query: Query, data: {}) {\n const expr = this.ctx.model.resolveQuery(name, query)\n this.$table(name)\n .filter(row => executeQuery(expr, row))\n .forEach(row => Object.assign(row, data))\n this.$save(name)\n }\n\n async remove(name: TableType, query: Query) {\n const expr = this.ctx.model.resolveQuery(name, query)\n this.$store[name] = this.$table(name)\n .filter(row => !executeQuery(expr, row))\n this.$save(name)\n }\n\n async create(name: TableType, data: any) {\n const store = this.$table(name)\n const { primary, fields, autoInc } = this.ctx.model.config[name]\n data = clone(data)\n if (!Array.isArray(primary) && autoInc && !(primary in data)) {\n const max = store.length ? Math.max(...store.map(row => +row[primary])) : 0\n data[primary] = max + 1\n if (Model.Field.string.includes(fields[primary].type)) {\n data[primary] += ''\n }\n } else {\n const duplicated = await this.get(name, pick(data, makeArray(primary)))\n if (duplicated.length) return\n }\n const copy = { ...this.ctx.model.create(name), ...data }\n store.push(copy)\n this.$save(name)\n return copy\n }\n\n async upsert(name: TableType, data: any[], key: string | string[]) {\n const keys = makeArray(key || this.ctx.model.config[name].primary)\n for (const item of data) {\n const row = this.$table(name).find(row => {\n return keys.every(key => row[key] === item[key])\n })\n if (row) {\n Object.assign(row, clone(item))\n } else {\n await this.create(name, item)\n }\n }\n this.$save(name)\n }\n\n async aggregate(name: TableType, fields: {}, query: Query) {\n const expr = this.ctx.model.resolveQuery(name, query)\n const table = this.$table(name).filter(row => executeQuery(expr, row))\n return valueMap(fields, expr => executeEval(expr, table)) as any\n }\n}\n\nexport default MemoryDatabase\n", "import type * as yaml from 'js-yaml'\nimport { Context } from 'koishi'\nimport { promises as fs } from 'fs'\nimport { extname, resolve } from 'path'\n\ntype Loader = 'json' | 'yaml' | 'yml'\nconst loaders = ['json', 'yaml', 'yml']\n\nexport interface Config {\n loader?: Loader\n root?: string\n}\n\nexport class Storage {\n constructor(ctx: Context, private config: Config) {\n config.loader ||= 'json'\n config.root ||= resolve(ctx.app.options.baseDir, '.koishi/database')\n if (!loaders.includes(config.loader)) {\n throw new Error(`unsupported loader \"${config.loader}\"`)\n }\n }\n\n async start(tables: Record<string, any[]>) {\n const { root, loader } = this.config\n await fs.mkdir(root, { recursive: true })\n const files = await fs.readdir(root)\n await Promise.all(files.map(async (filename) => {\n const extension = extname(filename)\n if (extension !== loader) return\n const buffer = await fs.readFile(filename)\n try {\n const data = await this.load(buffer, loader)\n const name = filename.slice(0, -1 - extension.length)\n tables[name] = data\n } catch {}\n }))\n }\n\n async load(buffer: Buffer, loader: Loader) {\n if (loader === 'json') {\n return JSON.parse(buffer.toString())\n } else if (loader === 'yaml' || loader === 'yml') {\n const { load } = require('js-yaml') as typeof yaml\n return load(buffer.toString())\n }\n }\n\n async drop(name?: string) {\n const { root, loader } = this.config\n if (name) {\n await fs.rm(resolve(root, `${name}.${loader}`))\n } else {\n await fs.rm(root, { recursive: true, force: true })\n }\n }\n\n async save(name: string, table: any[]) {\n const { root, loader } = this.config\n try {\n const buffer = await this.dump(table, loader)\n await fs.writeFile(resolve(root, `${name}.${loader}`), buffer)\n } catch {}\n }\n\n async dump(data: any, loader: Loader) {\n if (loader === 'json') {\n return JSON.stringify(data)\n } else if (loader === 'yaml' || loader === 'yml') {\n const { dump } = require('js-yaml') as typeof yaml\n return dump(data)\n }\n }\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmG;AACnG,uBAA0C;;;ACC1C,gBAA+B;AAC/B,kBAAiC;AAGjC,IAAM,UAAU,CAAC,QAAQ,QAAQ;AAO1B,oBAAc;AAAA,EACnB,YAAY,KAAsB,QAAgB;AAAhB;AAChC,WAAO,UAAP,QAAO,SAAW;AAClB,WAAO,QAAP,QAAO,OAAS,yBAAQ,IAAI,IAAI,QAAQ,SAAS;AACjD,QAAI,CAAC,QAAQ,SAAS,OAAO,SAAS;AACpC,YAAM,IAAI,MAAM,uBAAuB,OAAO;AAAA;AAAA;AAAA,QAI5C,MAAM,QAA+B;AACzC,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,UAAM,mBAAG,MAAM,MAAM,EAAE,WAAW;AAClC,UAAM,QAAQ,MAAM,mBAAG,QAAQ;AAC/B,UAAM,QAAQ,IAAI,MAAM,IAAI,OAAO,aAAa;AAC9C,YAAM,YAAY,yBAAQ;AAC1B,UAAI,cAAc;AAAQ;AAC1B,YAAM,SAAS,MAAM,mBAAG,SAAS;AACjC,UAAI;AACF,cAAM,OAAO,MAAM,KAAK,KAAK,QAAQ;AACrC,cAAM,OAAO,SAAS,MAAM,GAAG,KAAK,UAAU;AAC9C,eAAO,QAAQ;AAAA,cACf;AAAA;AAAA;AAAA;AAAA,QAIA,KAAK,QAAgB,QAAgB;AACzC,QAAI,WAAW,QAAQ;AACrB,aAAO,KAAK,MAAM,OAAO;AAAA,eAChB,WAAW,UAAU,WAAW,OAAO;AAChD,YAAM,EAAE,SAAS,QAAQ;AACzB,aAAO,KAAK,OAAO;AAAA;AAAA;AAAA,QAIjB,KAAK,MAAe;AACxB,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,QAAI,MAAM;AACR,YAAM,mBAAG,GAAG,yBAAQ,MAAM,GAAG,QAAQ;AAAA,WAChC;AACL,YAAM,mBAAG,GAAG,MAAM,EAAE,WAAW,MAAM,OAAO;AAAA;AAAA;AAAA,QAI1C,KAAK,MAAc,OAAc;AACrC,UAAM,EAAE,MAAM,WAAW,KAAK;AAC9B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,KAAK,OAAO;AACtC,YAAM,mBAAG,UAAU,yBAAQ,MAAM,GAAG,QAAQ,WAAW;AAAA,YACvD;AAAA;AAAA;AAAA,QAGE,KAAK,MAAW,QAAgB;AACpC,QAAI,WAAW,QAAQ;AACrB,aAAO,KAAK,UAAU;AAAA,eACb,WAAW,UAAU,WAAW,OAAO;AAChD,YAAM,EAAE,SAAS,QAAQ;AACzB,aAAO,KAAK;AAAA;AAAA;AAAA;AAxDX;;;ADCA,mCAA6B,uBAAS;AAAA,EAM3C,YAAmB,KAAqB,SAAiB,IAAI;AAC3D,UAAM;AADW;AAAqB;AALjC,kBAAS;AACT,kBAAsB;AAO3B,QAAI,OAAO,SAAS;AAClB,WAAK,WAAW,IAAI,QAAQ,KAAK;AAAA;AAAA;AAAA,QAI/B,QAAQ;AA5BhB;AA6BI,UAAM,YAAK,aAAL,mBAAe,MAAM,KAAK;AAAA;AAAA,QAG5B,MAAM,MAAc;AAhC5B;AAiCI,UAAM,YAAK,aAAL,mBAAe,KAAK,MAAM,KAAK,OAAO;AAAA;AAAA,EAG9C,OAAO;AAAA;AAAA,EAEP,OAA4B,OAAU;AAtCxC;AAuCI,WAAO,WAAK,QAAL,uBAAuB;AAAA;AAAA,QAG1B,KAAK,MAAiB;AA1C9B;AA2CI,QAAI,MAAM;AACR,aAAO,KAAK,OAAO;AAAA,WACd;AACL,WAAK,SAAS;AAAA;AAEhB,UAAM,YAAK,aAAL,mBAAe,KAAK;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,UAA2B;AAClE,UAAM,OAAO,KAAK,IAAI,MAAM,aAAa,MAAM;AAC/C,UAAM,EAAE,QAAQ,QAAQ,UAAU,SAAS,MAAM,oBAAM,gBAAgB;AACvE,WAAO,KAAK,OAAO,MAChB,OAAO,SAAO,mCAAa,MAAM,MACjC,IAAI,SAAO,yBAAM,wBAAK,KAAK,UAC3B,MAAM,QAAQ,SAAS;AAAA;AAAA,QAGtB,IAAI,MAAiB,OAAc,MAAU;AACjD,UAAM,OAAO,KAAK,IAAI,MAAM,aAAa,MAAM;AAC/C,SAAK,OAAO,MACT,OAAO,SAAO,mCAAa,MAAM,MACjC,QAAQ,SAAO,OAAO,OAAO,KAAK;AACrC,SAAK,MAAM;AAAA;AAAA,QAGP,OAAO,MAAiB,OAAc;AAC1C,UAAM,OAAO,KAAK,IAAI,MAAM,aAAa,MAAM;AAC/C,SAAK,OAAO,QAAQ,KAAK,OAAO,MAC7B,OAAO,SAAO,CAAC,mCAAa,MAAM;AACrC,SAAK,MAAM;AAAA;AAAA,QAGP,OAAO,MAAiB,MAAW;AACvC,UAAM,QAAQ,KAAK,OAAO;AAC1B,UAAM,EAAE,SAAS,QAAQ,YAAY,KAAK,IAAI,MAAM,OAAO;AAC3D,WAAO,yBAAM;AACb,QAAI,CAAC,MAAM,QAAQ,YAAY,WAAW,CAAE,YAAW,OAAO;AAC5D,YAAM,MAAM,MAAM,SAAS,KAAK,IAAI,GAAG,MAAM,IAAI,SAAO,CAAC,IAAI,aAAa;AAC1E,WAAK,WAAW,MAAM;AACtB,UAAI,oBAAM,MAAM,OAAO,SAAS,OAAO,SAAS,OAAO;AACrD,aAAK,YAAY;AAAA;AAAA,WAEd;AACL,YAAM,aAAa,MAAM,KAAK,IAAI,MAAM,wBAAK,MAAM,6BAAU;AAC7D,UAAI,WAAW;AAAQ;AAAA;AAEzB,UAAM,OAAO,kCAAK,KAAK,IAAI,MAAM,OAAO,QAAU;AAClD,UAAM,KAAK;AACX,SAAK,MAAM;AACX,WAAO;AAAA;AAAA,QAGH,OAAO,MAAiB,MAAa,KAAwB;AACjE,UAAM,OAAO,6BAAU,OAAO,KAAK,IAAI,MAAM,OAAO,MAAM;AAC1D,eAAW,QAAQ,MAAM;AACvB,YAAM,MAAM,KAAK,OAAO,MAAM,KAAK,UAAO;AACxC,eAAO,KAAK,MAAM,UAAO,KAAI,UAAS,KAAK;AAAA;AAE7C,UAAI,KAAK;AACP,eAAO,OAAO,KAAK,yBAAM;AAAA,aACpB;AACL,cAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAG5B,SAAK,MAAM;AAAA;AAAA,QAGP,UAAU,MAAiB,QAAY,OAAc;AACzD,UAAM,OAAO,KAAK,IAAI,MAAM,aAAa,MAAM;AAC/C,UAAM,QAAQ,KAAK,OAAO,MAAM,OAAO,SAAO,mCAAa,MAAM;AACjE,WAAO,4BAAS,QAAQ,WAAQ,kCAAY,OAAM;AAAA;AAAA;AAnG/C;AAuGP,IAAO,cAAQ;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,11 @@
1
+ import { Context } from 'koishi';
2
+ export interface Config {
3
+ prefix?: string;
4
+ }
5
+ export declare class Storage {
6
+ private config;
7
+ constructor(ctx: Context, config: Config);
8
+ start(tables: Record<string, any[]>): Promise<void>;
9
+ drop(name?: string): Promise<void>;
10
+ save(name: string, table: any[]): Promise<void>;
11
+ }
@@ -0,0 +1,13 @@
1
+ import { Context } from 'koishi';
2
+ export declare class Storage {
3
+ constructor(ctx: Context, config: Config);
4
+ start(tables: Record<string, any[]>): Promise<void>;
5
+ save(name: string, table: any[]): Promise<void>;
6
+ drop(name?: string): Promise<void>;
7
+ }
8
+ export interface Config {
9
+ storage?: boolean;
10
+ loader?: 'json' | 'yaml' | 'yml';
11
+ root?: string;
12
+ prefix?: string;
13
+ }
@@ -0,0 +1,17 @@
1
+ /// <reference types="node" />
2
+ import { Context } from 'koishi';
3
+ declare type Loader = 'json' | 'yaml' | 'yml';
4
+ export interface Config {
5
+ loader?: Loader;
6
+ root?: string;
7
+ }
8
+ export declare class Storage {
9
+ private config;
10
+ constructor(ctx: Context, config: Config);
11
+ start(tables: Record<string, any[]>): Promise<void>;
12
+ load(buffer: Buffer, loader: Loader): Promise<any>;
13
+ drop(name?: string): Promise<void>;
14
+ save(name: string, table: any[]): Promise<void>;
15
+ dump(data: any, loader: Loader): Promise<string>;
16
+ }
17
+ export {};
package/package.json CHANGED
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "name": "@koishijs/plugin-database-memory",
3
3
  "description": "A in-memory database implementation for Koishi",
4
- "version": "1.0.0-alpha.7",
4
+ "version": "1.0.0-beta.3",
5
5
  "main": "lib/node.js",
6
6
  "module": "lib/browser.js",
7
7
  "typings": "lib/index.d.ts",
8
8
  "files": [
9
9
  "lib"
10
10
  ],
11
- "author": "Shigma <1700011071@pku.edu.cn>",
11
+ "author": "Shigma <shigma10826@gmail.com>",
12
12
  "license": "MIT",
13
13
  "repository": {
14
14
  "type": "git",
@@ -30,13 +30,15 @@
30
30
  "server"
31
31
  ],
32
32
  "peerDependencies": {
33
- "koishi": "^4.0.0-alpha.10"
33
+ "koishi": "^4.0.0-beta.5"
34
34
  },
35
35
  "devDependencies": {
36
+ "@koishijs/plugin-mock": "^1.0.0-beta.1",
37
+ "@koishijs/test-utils": "^8.0.0-beta.5",
36
38
  "@types/js-yaml": "^4.0.3",
37
39
  "js-yaml": "^4.1.0"
38
40
  },
39
41
  "dependencies": {
40
- "@koishijs/orm-utils": "^1.0.0-alpha.2"
42
+ "@koishijs/orm-utils": "^1.0.0-beta.3"
41
43
  }
42
44
  }