@naturalcycles/db-lib 10.26.1 → 10.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/adapter/cachedb/cache.db.d.ts +2 -2
  2. package/dist/adapter/cachedb/cache.db.js +7 -7
  3. package/dist/adapter/file/file.db.d.ts +2 -2
  4. package/dist/adapter/file/file.db.js +5 -6
  5. package/dist/commondao/common.dao.d.ts +9 -23
  6. package/dist/commondao/common.dao.js +38 -157
  7. package/dist/commondao/common.dao.model.d.ts +1 -19
  8. package/dist/commondb/base.common.db.d.ts +2 -2
  9. package/dist/commondb/common.db.d.ts +2 -2
  10. package/dist/inmemory/inMemory.db.d.ts +2 -2
  11. package/dist/inmemory/inMemory.db.js +2 -2
  12. package/dist/inmemory/inMemoryKeyValueDB.d.ts +4 -4
  13. package/dist/inmemory/inMemoryKeyValueDB.js +4 -4
  14. package/dist/inmemory/queryInMemory.js +2 -2
  15. package/dist/kv/commonKeyValueDB.d.ts +4 -4
  16. package/dist/kv/commonKeyValueDao.d.ts +5 -5
  17. package/dist/kv/commonKeyValueDao.js +9 -12
  18. package/dist/pipeline/dbPipelineBackup.d.ts +1 -1
  19. package/dist/pipeline/dbPipelineBackup.js +3 -3
  20. package/dist/pipeline/dbPipelineCopy.d.ts +1 -1
  21. package/dist/pipeline/dbPipelineCopy.js +3 -4
  22. package/dist/pipeline/dbPipelineRestore.d.ts +1 -2
  23. package/dist/pipeline/dbPipelineRestore.js +2 -3
  24. package/dist/query/dbQuery.d.ts +6 -12
  25. package/dist/query/dbQuery.js +0 -19
  26. package/dist/testing/commonDBTest.js +2 -2
  27. package/dist/testing/commonDaoTest.js +12 -6
  28. package/package.json +1 -1
  29. package/src/adapter/cachedb/cache.db.ts +8 -9
  30. package/src/adapter/file/file.db.ts +6 -10
  31. package/src/commondao/common.dao.model.ts +1 -26
  32. package/src/commondao/common.dao.ts +50 -208
  33. package/src/commondb/base.common.db.ts +2 -2
  34. package/src/commondb/common.db.ts +2 -2
  35. package/src/inmemory/inMemory.db.ts +3 -7
  36. package/src/inmemory/inMemoryKeyValueDB.ts +7 -8
  37. package/src/inmemory/queryInMemory.ts +2 -2
  38. package/src/kv/commonKeyValueDB.ts +4 -4
  39. package/src/kv/commonKeyValueDao.ts +16 -20
  40. package/src/pipeline/dbPipelineBackup.ts +6 -6
  41. package/src/pipeline/dbPipelineCopy.ts +6 -8
  42. package/src/pipeline/dbPipelineRestore.ts +2 -2
  43. package/src/query/dbQuery.ts +5 -39
  44. package/src/testing/commonDBTest.ts +2 -2
  45. package/src/testing/commonDaoTest.ts +12 -6
  46. package/src/testing/test.model.ts +1 -1
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
2
2
  import type { ObjectWithId } from '@naturalcycles/js-lib/types';
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
3
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import { BaseCommonDB } from '../../commondb/base.common.db.js';
5
5
  import type { CommonDB, CommonDBSupport } from '../../commondb/common.db.js';
6
6
  import type { RunQueryResult } from '../../db.model.js';
@@ -27,7 +27,7 @@ export declare class CacheDB extends BaseCommonDB implements CommonDB {
27
27
  saveBatch<ROW extends ObjectWithId>(table: string, rows: ROW[], opt?: CacheDBSaveOptions<ROW>): Promise<void>;
28
28
  runQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CacheDBSaveOptions<ROW>): Promise<RunQueryResult<ROW>>;
29
29
  runQueryCount<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CacheDBOptions): Promise<number>;
30
- streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CacheDBStreamOptions): ReadableTyped<ROW>;
30
+ streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CacheDBStreamOptions): Pipeline<ROW>;
31
31
  deleteByQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CacheDBOptions): Promise<number>;
32
32
  patchByQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, patch: Partial<ROW>, opt?: CacheDBOptions): Promise<number>;
33
33
  }
@@ -1,5 +1,5 @@
1
- import { Readable } from 'node:stream';
2
1
  import { _isTruthy } from '@naturalcycles/js-lib';
2
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
3
3
  import { BaseCommonDB } from '../../commondb/base.common.db.js';
4
4
  import { commonDBFullSupport } from '../../commondb/common.db.js';
5
5
  /**
@@ -103,7 +103,7 @@ export class CacheDB extends BaseCommonDB {
103
103
  this.cfg.logger?.log(`${q.table}.runQuery ${rows.length} rows from downstream`);
104
104
  }
105
105
  // Don't save to cache if it was a projection query
106
- if (!opt.skipCache && !opt.skipCache && !q._selectedFieldNames) {
106
+ if (!opt.skipCache && !this.cfg.skipCache && !q._selectedFieldNames) {
107
107
  const cacheResult = this.cfg.cacheDB.saveBatch(q.table, rows, opt);
108
108
  if (this.cfg.awaitCache)
109
109
  await cacheResult;
@@ -130,7 +130,7 @@ export class CacheDB extends BaseCommonDB {
130
130
  }
131
131
  streamQuery(q, opt = {}) {
132
132
  if (!opt.onlyCache && !this.cfg.onlyCache) {
133
- const stream = this.cfg.downstreamDB.streamQuery(q, opt);
133
+ const pipeline = this.cfg.downstreamDB.streamQuery(q, opt);
134
134
  // Don't save to cache if it was a projection query
135
135
  if (!opt.skipCache && !this.cfg.skipCache && !q._selectedFieldNames) {
136
136
  // todo: rethink if we really should download WHOLE stream into memory in order to save it to cache
@@ -141,11 +141,11 @@ export class CacheDB extends BaseCommonDB {
141
141
  // await this.cfg.cacheDB.saveBatch(q.table, dbms as any)
142
142
  // })
143
143
  }
144
- return stream;
144
+ return pipeline;
145
145
  }
146
146
  if (opt.skipCache || this.cfg.skipCache)
147
- return Readable.from([]);
148
- const stream = this.cfg.cacheDB.streamQuery(q, opt);
147
+ return Pipeline.fromArray([]);
148
+ const pipeline = this.cfg.cacheDB.streamQuery(q, opt);
149
149
  // if (this.cfg.logCached) {
150
150
  // let count = 0
151
151
  //
@@ -156,7 +156,7 @@ export class CacheDB extends BaseCommonDB {
156
156
  // this.log(`${q.table}.streamQuery ${length} rows from cache`)
157
157
  // })
158
158
  // }
159
- return stream;
159
+ return pipeline;
160
160
  }
161
161
  async deleteByQuery(q, opt = {}) {
162
162
  if (!opt.onlyCache && !this.cfg.onlyCache) {
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
2
2
  import { type ObjectWithId } from '@naturalcycles/js-lib/types';
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
3
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import { BaseCommonDB } from '../../commondb/base.common.db.js';
5
5
  import type { CommonDB, CommonDBSupport } from '../../commondb/common.db.js';
6
6
  import type { CommonDBOptions, CommonDBSaveOptions, CommonDBStreamOptions, DBSaveBatchOperation, RunQueryResult } from '../../db.model.js';
@@ -26,7 +26,7 @@ export declare class FileDB extends BaseCommonDB implements CommonDB {
26
26
  saveBatch<ROW extends ObjectWithId>(table: string, rows: ROW[], _opt?: CommonDBSaveOptions<ROW>): Promise<void>;
27
27
  runQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Promise<RunQueryResult<ROW>>;
28
28
  runQueryCount<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Promise<number>;
29
- streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBStreamOptions): ReadableTyped<ROW>;
29
+ streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBStreamOptions): Pipeline<ROW>;
30
30
  deleteByQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Promise<number>;
31
31
  deleteByIds(table: string, ids: string[], _opt?: CommonDBOptions): Promise<number>;
32
32
  getTableSchema<ROW extends ObjectWithId>(table: string): Promise<JsonSchemaRootObject<ROW>>;
@@ -1,3 +1,4 @@
1
+ import { Readable } from 'node:stream';
1
2
  import { _by, _sortBy } from '@naturalcycles/js-lib/array';
2
3
  import { _since, localTime } from '@naturalcycles/js-lib/datetime';
3
4
  import { _assert } from '@naturalcycles/js-lib/error/assert.js';
@@ -5,7 +6,7 @@ import { generateJsonSchemaFromData } from '@naturalcycles/js-lib/json-schema';
5
6
  import { _deepEquals, _filterUndefinedValues, _sortObjectDeep } from '@naturalcycles/js-lib/object';
6
7
  import { _stringMapValues, } from '@naturalcycles/js-lib/types';
7
8
  import { dimGrey } from '@naturalcycles/nodejs-lib/colors';
8
- import { readableCreate } from '@naturalcycles/nodejs-lib/stream';
9
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
9
10
  import { BaseCommonDB } from '../../commondb/base.common.db.js';
10
11
  import { commonDBFullSupport } from '../../commondb/common.db.js';
11
12
  import { queryInMemory } from '../../inmemory/queryInMemory.js';
@@ -82,12 +83,10 @@ export class FileDB extends BaseCommonDB {
82
83
  return (await this.loadFile(q.table)).length;
83
84
  }
84
85
  streamQuery(q, opt) {
85
- const readable = readableCreate();
86
- void this.runQuery(q, opt).then(({ rows }) => {
87
- rows.forEach(r => readable.push(r));
88
- readable.push(null); // done
86
+ return Pipeline.fromAsyncReadable(async () => {
87
+ const { rows } = await this.runQuery(q, opt);
88
+ return Readable.from(rows);
89
89
  });
90
- return readable;
91
90
  }
92
91
  async deleteByQuery(q, _opt) {
93
92
  const byId = _by(await this.loadFile(q.table), r => r.id);
@@ -1,11 +1,11 @@
1
1
  import type { Transform } from 'node:stream';
2
2
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
3
- import { type AsyncIndexedMapper, type BaseDBEntity, type NonNegativeInteger, type StringMap, type Unsaved } from '@naturalcycles/js-lib/types';
4
- import { type ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
3
+ import { type BaseDBEntity, type NonNegativeInteger, type StringMap, type Unsaved } from '@naturalcycles/js-lib/types';
4
+ import { type Pipeline } from '@naturalcycles/nodejs-lib/stream';
5
5
  import type { CommonDBTransactionOptions, RunQueryResult } from '../db.model.js';
6
6
  import type { DBQuery } from '../query/dbQuery.js';
7
7
  import { RunnableDBQuery } from '../query/dbQuery.js';
8
- import type { CommonDaoCfg, CommonDaoCreateOptions, CommonDaoOptions, CommonDaoPatchByIdOptions, CommonDaoPatchOptions, CommonDaoReadOptions, CommonDaoSaveBatchOptions, CommonDaoSaveOptions, CommonDaoStreamDeleteOptions, CommonDaoStreamForEachOptions, CommonDaoStreamOptions, CommonDaoStreamSaveOptions } from './common.dao.model.js';
8
+ import type { CommonDaoCfg, CommonDaoCreateOptions, CommonDaoOptions, CommonDaoPatchByIdOptions, CommonDaoPatchOptions, CommonDaoReadOptions, CommonDaoSaveBatchOptions, CommonDaoSaveOptions, CommonDaoStreamDeleteOptions, CommonDaoStreamOptions, CommonDaoStreamSaveOptions } from './common.dao.model.js';
9
9
  import { CommonDaoTransaction } from './commonDaoTransaction.js';
10
10
  /**
11
11
  * Lowest common denominator API between supported Databases.
@@ -45,25 +45,11 @@ export declare class CommonDao<BM extends BaseDBEntity, DBM extends BaseDBEntity
45
45
  runQueryAsDBM(q: DBQuery<DBM>, opt?: CommonDaoReadOptions): Promise<DBM[]>;
46
46
  runQueryExtendedAsDBM(q: DBQuery<DBM>, opt?: CommonDaoReadOptions): Promise<RunQueryResult<DBM>>;
47
47
  runQueryCount(q: DBQuery<DBM>, opt?: CommonDaoReadOptions): Promise<number>;
48
- streamQueryForEach(q: DBQuery<DBM>, mapper: AsyncIndexedMapper<BM, void>, opt?: CommonDaoStreamForEachOptions<BM>): Promise<void>;
49
- streamQueryAsDBMForEach(q: DBQuery<DBM>, mapper: AsyncIndexedMapper<DBM, void>, opt?: CommonDaoStreamForEachOptions<DBM>): Promise<void>;
50
- /**
51
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
52
- */
53
- streamQueryAsDBM(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<DBM>): ReadableTyped<DBM>;
54
- /**
55
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
56
- *
57
- * Please note that this stream is currently not async-iteration friendly, because of
58
- * `through2` usage.
59
- * Will be migrated/fixed at some point in the future.
60
- *
61
- * You can do `.pipe(transformNoOp)` to make it "valid again".
62
- */
63
- streamQuery(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<BM>): ReadableTyped<BM>;
48
+ streamQueryAsDBM(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<DBM>): Pipeline<DBM>;
49
+ streamQuery(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<BM>): Pipeline<BM>;
50
+ private streamQueryRaw;
64
51
  queryIds(q: DBQuery<DBM>, opt?: CommonDaoReadOptions): Promise<ID[]>;
65
- streamQueryIds(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<ID>): ReadableTyped<ID>;
66
- streamQueryIdsForEach(q: DBQuery<DBM>, mapper: AsyncIndexedMapper<ID, void>, opt?: CommonDaoStreamForEachOptions<ID>): Promise<void>;
52
+ streamQueryIds(q: DBQuery<DBM>, opt?: CommonDaoStreamOptions<ID>): Pipeline<ID>;
67
53
  /**
68
54
  * Mutates!
69
55
  */
@@ -166,7 +152,7 @@ export declare class CommonDao<BM extends BaseDBEntity, DBM extends BaseDBEntity
166
152
  * Load rows (by their ids) from Multiple tables at once.
167
153
  * An optimized way to load data, minimizing DB round-trips.
168
154
  *
169
- * @experimental.
155
+ * @experimental
170
156
  */
171
157
  static multiGet<MAP extends Record<string, DaoWithIds<AnyDao> | DaoWithId<AnyDao>>>(inputMap: MAP, opt?: CommonDaoReadOptions): Promise<{
172
158
  [K in keyof MAP]: MAP[K] extends DaoWithIds<any> ? InferBM<MAP[K]['dao']>[] : InferBM<MAP[K]['dao']> | null;
@@ -175,7 +161,7 @@ export declare class CommonDao<BM extends BaseDBEntity, DBM extends BaseDBEntity
175
161
  private static multiGetMapByTableById;
176
162
  private static prepareMultiGetOutput;
177
163
  /**
178
- * @experimental.
164
+ * @experimental
179
165
  */
180
166
  static multiDelete(inputs: (DaoWithId<AnyDao> | DaoWithIds<AnyDao>)[], opt?: CommonDaoOptions): Promise<NonNegativeInteger>;
181
167
  static multiSave(inputs: (DaoWithRows<AnyDao> | DaoWithRow<AnyDao>)[], opt?: CommonDaoSaveBatchOptions<any>): Promise<void>;
@@ -5,11 +5,9 @@ import { _assert, ErrorMode } from '@naturalcycles/js-lib/error';
5
5
  import { _deepJsonEquals } from '@naturalcycles/js-lib/object/deepEquals.js';
6
6
  import { _filterUndefinedValues, _objectAssignExact, } from '@naturalcycles/js-lib/object/object.util.js';
7
7
  import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
8
- import { _stringMapEntries, _stringMapValues, } from '@naturalcycles/js-lib/types';
9
- import { _passthroughPredicate, _typeCast } from '@naturalcycles/js-lib/types';
8
+ import { _passthroughPredicate, _stringMapEntries, _stringMapValues, _typeCast, } from '@naturalcycles/js-lib/types';
10
9
  import { stringId } from '@naturalcycles/nodejs-lib';
11
- import { Pipeline, transformFlatten, transformMapSync, } from '@naturalcycles/nodejs-lib/stream';
12
- import { transformChunk, transformLogProgress, transformMap, transformNoOp, } from '@naturalcycles/nodejs-lib/stream';
10
+ import { transformChunk, transformFlatten, transformLogProgress, transformMap, } from '@naturalcycles/nodejs-lib/stream';
13
11
  import { DBLibError } from '../cnst.js';
14
12
  import { RunnableDBQuery } from '../query/dbQuery.js';
15
13
  import { CommonDaoTransaction } from './commonDaoTransaction.js';
@@ -36,7 +34,6 @@ export class CommonDao {
36
34
  hooks: {
37
35
  parseNaturalId: () => ({}),
38
36
  beforeCreate: bm => bm,
39
- anonymize: dbm => dbm,
40
37
  onValidationError: err => err,
41
38
  ...cfg.hooks,
42
39
  },
@@ -161,116 +158,28 @@ export class CommonDao {
161
158
  q.table = opt.table || q.table;
162
159
  return await this.cfg.db.runQueryCount(q, opt);
163
160
  }
164
- async streamQueryForEach(q, mapper, opt = {}) {
165
- this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
166
- q.table = opt.table || q.table;
167
- opt.skipValidation = opt.skipValidation !== false; // default true
168
- opt.errorMode ||= ErrorMode.SUPPRESS;
161
+ streamQueryAsDBM(q, opt = {}) {
162
+ const pipeline = this.streamQueryRaw(q, opt);
169
163
  const isPartialQuery = !!q._selectedFieldNames;
170
- await Pipeline.from(this.cfg.db.streamQuery(q, opt))
171
- .map(async (dbm) => {
172
- if (isPartialQuery)
173
- return dbm;
174
- return await this.dbmToBM(dbm, opt);
175
- }, {
176
- errorMode: opt.errorMode,
177
- })
178
- .map(mapper, {
179
- ...opt,
180
- predicate: _passthroughPredicate, // to be able to logProgress
181
- })
182
- // LogProgress should be AFTER the mapper, to be able to report correct stats
183
- .logProgress({
184
- metric: q.table,
185
- ...opt,
186
- })
187
- .run();
188
- }
189
- async streamQueryAsDBMForEach(q, mapper, opt = {}) {
190
- this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
191
- q.table = opt.table || q.table;
192
- opt.skipValidation = opt.skipValidation !== false; // default true
164
+ if (isPartialQuery)
165
+ return pipeline;
166
+ opt.skipValidation ??= true;
193
167
  opt.errorMode ||= ErrorMode.SUPPRESS;
194
- const isPartialQuery = !!q._selectedFieldNames;
195
- await Pipeline.from(this.cfg.db.streamQuery(q, opt))
196
- .mapSync(dbm => {
197
- if (isPartialQuery)
198
- return dbm;
199
- return this.anyToDBM(dbm, opt);
200
- }, {
201
- errorMode: opt.errorMode,
202
- })
203
- .map(mapper, {
204
- ...opt,
205
- predicate: _passthroughPredicate, // to be able to logProgress
206
- })
207
- .logProgress({
208
- metric: q.table,
209
- ...opt,
210
- })
211
- .run();
168
+ return pipeline.mapSync(dbm => this.anyToDBM(dbm, opt), { errorMode: opt.errorMode });
212
169
  }
213
- /**
214
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
215
- */
216
- streamQueryAsDBM(q, opt = {}) {
217
- this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
218
- q.table = opt.table || q.table;
219
- opt.skipValidation = opt.skipValidation !== false; // default true
220
- opt.errorMode ||= ErrorMode.SUPPRESS;
170
+ streamQuery(q, opt = {}) {
171
+ const pipeline = this.streamQueryRaw(q, opt);
221
172
  const isPartialQuery = !!q._selectedFieldNames;
222
- const stream = this.cfg.db.streamQuery(q, opt);
223
173
  if (isPartialQuery)
224
- return stream;
225
- return (stream
226
- // the commented out line was causing RangeError: Maximum call stack size exceeded
227
- // .on('error', err => stream.emit('error', err))
228
- .pipe(transformMapSync(dbm => {
229
- return this.anyToDBM(dbm, opt);
230
- }, {
231
- errorMode: opt.errorMode,
232
- })));
174
+ return pipeline;
175
+ opt.skipValidation ??= true;
176
+ opt.errorMode ||= ErrorMode.SUPPRESS;
177
+ return pipeline.map(async (dbm) => await this.dbmToBM(dbm, opt), { errorMode: opt.errorMode });
233
178
  }
234
- /**
235
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
236
- *
237
- * Please note that this stream is currently not async-iteration friendly, because of
238
- * `through2` usage.
239
- * Will be migrated/fixed at some point in the future.
240
- *
241
- * You can do `.pipe(transformNoOp)` to make it "valid again".
242
- */
243
- streamQuery(q, opt = {}) {
179
+ streamQueryRaw(q, opt = {}) {
244
180
  this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
245
181
  q.table = opt.table || q.table;
246
- opt.skipValidation = opt.skipValidation !== false; // default true
247
- opt.errorMode ||= ErrorMode.SUPPRESS;
248
- const stream = this.cfg.db.streamQuery(q, opt);
249
- const isPartialQuery = !!q._selectedFieldNames;
250
- if (isPartialQuery)
251
- return stream;
252
- // This almost works, but hard to implement `errorMode: THROW_AGGREGATED` in this case
253
- // return stream.flatMap(async (dbm: DBM) => {
254
- // return [await this.dbmToBM(dbm, opt)] satisfies BM[]
255
- // }, {
256
- // concurrency: 16,
257
- // })
258
- return (stream
259
- // optimization: 1 validation is enough
260
- // .pipe(transformMap<any, DBM>(dbm => this.anyToDBM(dbm, opt), safeOpt))
261
- // .pipe(transformMap<DBM, BM>(dbm => this.dbmToBM(dbm, opt), safeOpt))
262
- // the commented out line was causing RangeError: Maximum call stack size exceeded
263
- // .on('error', err => stream.emit('error', err))
264
- .pipe(transformMap(async (dbm) => {
265
- return await this.dbmToBM(dbm, opt);
266
- }, {
267
- errorMode: opt.errorMode,
268
- }))
269
- // this can make the stream async-iteration-friendly
270
- // but not applying it now for perf reasons
271
- // UPD: applying, to be compliant with `.toArray()`, etc.
272
- .on('error', err => stream.destroy(err))
273
- .pipe(transformNoOp()));
182
+ return this.cfg.db.streamQuery(q, opt);
274
183
  }
275
184
  async queryIds(q, opt = {}) {
276
185
  this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
@@ -282,36 +191,7 @@ export class CommonDao {
282
191
  this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
283
192
  q.table = opt.table || q.table;
284
193
  opt.errorMode ||= ErrorMode.SUPPRESS;
285
- // Experimental: using `.map()`
286
- const stream = this.cfg.db
287
- .streamQuery(q.select(['id']), opt)
288
- // .on('error', err => stream.emit('error', err))
289
- .map((r) => r.id);
290
- // const stream: ReadableTyped<string> = this.cfg.db
291
- // .streamQuery<DBM>(q.select(['id']), opt)
292
- // .on('error', err => stream.emit('error', err))
293
- // .pipe(
294
- // transformMapSimple<DBM, string>(r => r.id, {
295
- // errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
296
- // }),
297
- // )
298
- return stream;
299
- }
300
- async streamQueryIdsForEach(q, mapper, opt = {}) {
301
- this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
302
- q.table = opt.table || q.table;
303
- opt.errorMode ||= ErrorMode.SUPPRESS;
304
- await Pipeline.from(this.cfg.db.streamQuery(q.select(['id']), opt).map(r => r.id))
305
- .map(mapper, {
306
- ...opt,
307
- predicate: _passthroughPredicate,
308
- })
309
- // LogProgress should be AFTER the mapper, to be able to report correct stats
310
- .logProgress({
311
- metric: q.table,
312
- ...opt,
313
- })
314
- .run();
194
+ return this.cfg.db.streamQuery(q.select(['id']), opt).mapSync((r) => r.id);
315
195
  }
316
196
  /**
317
197
  * Mutates!
@@ -590,7 +470,9 @@ export class CommonDao {
590
470
  let deleted = 0;
591
471
  if (opt.chunkSize) {
592
472
  const { chunkSize, chunkConcurrency = 8 } = opt;
593
- await Pipeline.from(this.cfg.db.streamQuery(q.select(['id']), opt).map(r => r.id))
473
+ await this.cfg.db
474
+ .streamQuery(q.select(['id']), opt)
475
+ .mapSync(r => r.id)
594
476
  .chunk(chunkSize)
595
477
  .map(async (ids) => {
596
478
  await this.cfg.db.deleteByIds(q.table, ids, opt);
@@ -656,10 +538,7 @@ export class CommonDao {
656
538
  return null;
657
539
  // optimization: no need to run full joi DBM validation, cause BM validation will be run
658
540
  // const dbm = this.anyToDBM(_dbm, opt)
659
- let dbm = { ..._dbm, ...this.cfg.hooks.parseNaturalId(_dbm.id) };
660
- if (opt.anonymize) {
661
- dbm = this.cfg.hooks.anonymize(dbm);
662
- }
541
+ const dbm = { ..._dbm, ...this.cfg.hooks.parseNaturalId(_dbm.id) };
663
542
  // DBM > BM
664
543
  const bm = ((await this.cfg.hooks.beforeDBMToBM?.(dbm)) || dbm);
665
544
  // Validate/convert BM
@@ -680,17 +559,12 @@ export class CommonDao {
680
559
  // try/catch?
681
560
  return await pMap(bms, async (bm) => await this.bmToDBM(bm, opt));
682
561
  }
683
- anyToDBM(dbm, opt = {}) {
562
+ anyToDBM(dbm, _opt = {}) {
684
563
  if (!dbm)
685
564
  return null;
686
565
  // this shouldn't be happening on load! but should on save!
687
566
  // this.assignIdCreatedUpdated(dbm, opt)
688
567
  dbm = { ...dbm, ...this.cfg.hooks.parseNaturalId(dbm.id) };
689
- // todo: is this the right place?
690
- // todo: is anyToDBM even needed?
691
- if (opt.anonymize) {
692
- dbm = this.cfg.hooks.anonymize(dbm);
693
- }
694
568
  // Validate/convert DBM
695
569
  // return this.validateAndConvert(dbm, this.cfg.dbmSchema, DBModelType.DBM, opt)
696
570
  return dbm;
@@ -772,7 +646,7 @@ export class CommonDao {
772
646
  * Load rows (by their ids) from Multiple tables at once.
773
647
  * An optimized way to load data, minimizing DB round-trips.
774
648
  *
775
- * @experimental.
649
+ * @experimental
776
650
  */
777
651
  static async multiGet(inputMap, opt = {}) {
778
652
  const db = Object.values(inputMap)[0]?.dao.cfg.db;
@@ -842,7 +716,7 @@ export class CommonDao {
842
716
  return bmsByProp;
843
717
  }
844
718
  /**
845
- * @experimental.
719
+ * @experimental
846
720
  */
847
721
  static async multiDelete(inputs, opt = {}) {
848
722
  if (!inputs.length)
@@ -949,13 +823,20 @@ export class CommonDao {
949
823
  * Throws if query uses a property that is in `excludeFromIndexes` list.
950
824
  */
951
825
  validateQueryIndexes(q) {
952
- const { excludeFromIndexes } = this.cfg;
953
- if (!excludeFromIndexes)
954
- return;
955
- for (const f of q._filters) {
956
- _assert(!excludeFromIndexes.includes(f.name), `cannot query on non-indexed property: ${this.cfg.table}.${f.name}`, {
957
- query: q.pretty(),
958
- });
826
+ const { excludeFromIndexes, indexes } = this.cfg;
827
+ if (excludeFromIndexes) {
828
+ for (const f of q._filters) {
829
+ _assert(!excludeFromIndexes.includes(f.name), `cannot query on non-indexed property: ${this.cfg.table}.${f.name}`, {
830
+ query: q.pretty(),
831
+ });
832
+ }
833
+ }
834
+ if (indexes) {
835
+ for (const f of q._filters) {
836
+ _assert(f.name === 'id' || indexes.includes(f.name), `cannot query on non-indexed property: ${this.cfg.table}.${f.name}`, {
837
+ query: q.pretty(),
838
+ });
839
+ }
959
840
  }
960
841
  }
961
842
  }
@@ -2,7 +2,7 @@ import type { ValidationFunction } from '@naturalcycles/js-lib';
2
2
  import type { AppError, ErrorMode } from '@naturalcycles/js-lib/error';
3
3
  import type { CommonLogger } from '@naturalcycles/js-lib/log';
4
4
  import type { BaseDBEntity, UnixTimestamp } from '@naturalcycles/js-lib/types';
5
- import type { TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
5
+ import type { TransformLogProgressOptions } from '@naturalcycles/nodejs-lib/stream';
6
6
  import type { CommonDB } from '../commondb/common.db.js';
7
7
  import type { CommonDBCreateOptions, CommonDBOptions, CommonDBSaveOptions } from '../db.model.js';
8
8
  export interface CommonDaoHooks<BM extends BaseDBEntity, DBM extends BaseDBEntity, ID extends string = BM['id']> {
@@ -54,16 +54,6 @@ export interface CommonDaoHooks<BM extends BaseDBEntity, DBM extends BaseDBEntit
54
54
  * Or, you can mutate the DBM if needed.
55
55
  */
56
56
  beforeSave?: (dbm: DBM) => void;
57
- /**
58
- * Called in:
59
- * - dbmToBM (applied before DBM becomes BM)
60
- * - anyToDBM
61
- *
62
- * Hook only allows to apply anonymization to DBM (not to BM).
63
- * It still applies to BM "transitively", during dbmToBM
64
- * (e.g after loaded from the Database).
65
- */
66
- anonymize: (dbm: DBM) => DBM;
67
57
  /**
68
58
  * If hook is defined - allows to prevent or modify the error thrown.
69
59
  * Return `false` to prevent throwing an error.
@@ -230,12 +220,6 @@ export interface CommonDaoOptions extends CommonDBOptions {
230
220
  * @default false (for streams). Setting to true enables deletion of immutable objects
231
221
  */
232
222
  allowMutability?: boolean;
233
- /**
234
- * If true - data will be anonymized (by calling a BaseDao.anonymize() hook that you can extend in your Dao implementation).
235
- * Only applicable to loading/querying/streaming_loading operations (n/a for saving).
236
- * There is additional validation applied AFTER Anonymization, so your anonymization implementation should keep the object valid.
237
- */
238
- anonymize?: boolean;
239
223
  /**
240
224
  * Allows to override the Table that this Dao is connected to, only in the context of this call.
241
225
  *
@@ -288,8 +272,6 @@ export interface CommonDaoStreamDeleteOptions<DBM extends BaseDBEntity> extends
288
272
  }
289
273
  export interface CommonDaoStreamSaveOptions<DBM extends BaseDBEntity> extends CommonDaoSaveBatchOptions<DBM>, CommonDaoStreamOptions<DBM> {
290
274
  }
291
- export interface CommonDaoStreamForEachOptions<IN> extends CommonDaoStreamOptions<IN>, TransformMapOptions<IN, any> {
292
- }
293
275
  export interface CommonDaoStreamOptions<IN> extends CommonDaoReadOptions, TransformLogProgressOptions<IN> {
294
276
  /**
295
277
  * @default true (for streams)
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
2
2
  import type { ObjectWithId, StringMap } from '@naturalcycles/js-lib/types';
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
3
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import type { CommonDBOptions, CommonDBReadOptions, CommonDBSaveOptions, CommonDBTransactionOptions, DBTransaction, DBTransactionFn, RunQueryResult } from '../db.model.js';
5
5
  import type { DBQuery } from '../query/dbQuery.js';
6
6
  import type { CommonDB, CommonDBSupport } from './common.db.js';
@@ -23,7 +23,7 @@ export declare class BaseCommonDB implements CommonDB {
23
23
  runQuery<ROW extends ObjectWithId>(_q: DBQuery<ROW>): Promise<RunQueryResult<ROW>>;
24
24
  runQueryCount<ROW extends ObjectWithId>(_q: DBQuery<ROW>): Promise<number>;
25
25
  saveBatch<ROW extends ObjectWithId>(_table: string, _rows: ROW[], _opt?: CommonDBSaveOptions<ROW>): Promise<void>;
26
- streamQuery<ROW extends ObjectWithId>(_q: DBQuery<ROW>): ReadableTyped<ROW>;
26
+ streamQuery<ROW extends ObjectWithId>(_q: DBQuery<ROW>): Pipeline<ROW>;
27
27
  deleteByIds(_table: string, _ids: string[], _opt?: CommonDBOptions): Promise<number>;
28
28
  runInTransaction(fn: DBTransactionFn, _opt?: CommonDBTransactionOptions): Promise<void>;
29
29
  createTransaction(_opt?: CommonDBTransactionOptions): Promise<DBTransaction>;
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
2
2
  import type { NonNegativeInteger, ObjectWithId, StringMap } from '@naturalcycles/js-lib/types';
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
3
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import type { CommonDBCreateOptions, CommonDBOptions, CommonDBReadOptions, CommonDBSaveOptions, CommonDBStreamOptions, CommonDBTransactionOptions, DBTransaction, DBTransactionFn, RunQueryResult } from '../db.model.js';
5
5
  import type { DBQuery } from '../query/dbQuery.js';
6
6
  export interface CommonDB {
@@ -62,7 +62,7 @@ export interface CommonDB {
62
62
  */
63
63
  runQuery: <ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBReadOptions) => Promise<RunQueryResult<ROW>>;
64
64
  runQueryCount: <ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBReadOptions) => Promise<NonNegativeInteger>;
65
- streamQuery: <ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBStreamOptions) => ReadableTyped<ROW>;
65
+ streamQuery: <ROW extends ObjectWithId>(q: DBQuery<ROW>, opt?: CommonDBStreamOptions) => Pipeline<ROW>;
66
66
  /**
67
67
  * rows can have missing ids only if DB supports auto-generating them (like mysql auto_increment).
68
68
  */
@@ -1,7 +1,7 @@
1
1
  import { type JsonSchemaObject, type JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema';
2
2
  import type { CommonLogger } from '@naturalcycles/js-lib/log';
3
3
  import { type AnyObjectWithId, type ObjectWithId, type StringMap } from '@naturalcycles/js-lib/types';
4
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream/stream.model.js';
4
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
5
5
  import type { CommonDB, CommonDBSupport } from '../commondb/common.db.js';
6
6
  import { CommonDBType } from '../commondb/common.db.js';
7
7
  import type { CommonDBCreateOptions, CommonDBOptions, CommonDBSaveOptions, CommonDBTransactionOptions, DBOperation, DBTransaction, DBTransactionFn, RunQueryResult } from '../db.model.js';
@@ -60,7 +60,7 @@ export declare class InMemoryDB implements CommonDB {
60
60
  patchByQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, patch: Partial<ROW>): Promise<number>;
61
61
  runQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Promise<RunQueryResult<ROW>>;
62
62
  runQueryCount<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Promise<number>;
63
- streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): ReadableTyped<ROW>;
63
+ streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Pipeline<ROW>;
64
64
  runInTransaction(fn: DBTransactionFn, opt?: CommonDBTransactionOptions): Promise<void>;
65
65
  createTransaction(opt?: CommonDBTransactionOptions): Promise<DBTransaction>;
66
66
  incrementBatch(table: string, prop: string, incrementMap: StringMap<number>, _opt?: CommonDBOptions): Promise<StringMap<number>>;
@@ -1,9 +1,9 @@
1
- import { Readable } from 'node:stream';
2
1
  import { _isEmptyObject } from '@naturalcycles/js-lib';
3
2
  import { _assert } from '@naturalcycles/js-lib/error/assert.js';
4
3
  import { generateJsonSchemaFromData, } from '@naturalcycles/js-lib/json-schema';
5
4
  import { _deepCopy, _sortObjectDeep } from '@naturalcycles/js-lib/object';
6
5
  import { _stringMapEntries, _stringMapValues, } from '@naturalcycles/js-lib/types';
6
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
7
7
  import { bufferReviver } from '@naturalcycles/nodejs-lib/stream/ndjson/transformJsonParse.js';
8
8
  import { commonDBFullSupport, CommonDBType } from '../commondb/common.db.js';
9
9
  import { queryInMemory } from './queryInMemory.js';
@@ -161,7 +161,7 @@ export class InMemoryDB {
161
161
  }
162
162
  streamQuery(q, _opt) {
163
163
  const table = this.cfg.tablesPrefix + q.table;
164
- return Readable.from(queryInMemory(q, Object.values(this.data[table] || {})));
164
+ return Pipeline.fromArray(queryInMemory(q, Object.values(this.data[table] || {})));
165
165
  }
166
166
  async runInTransaction(fn, opt = {}) {
167
167
  const tx = new InMemoryDBTransaction(this, {
@@ -1,5 +1,5 @@
1
1
  import type { StringMap } from '@naturalcycles/js-lib/types';
2
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
2
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
3
3
  import type { CommonDBCreateOptions } from '../db.model.js';
4
4
  import type { CommonKeyValueDB, IncrementTuple, KeyValueDBTuple } from '../kv/commonKeyValueDB.js';
5
5
  export interface InMemoryKeyValueDBCfg {
@@ -17,9 +17,9 @@ export declare class InMemoryKeyValueDB implements CommonKeyValueDB {
17
17
  deleteByIds(table: string, ids: string[]): Promise<void>;
18
18
  getByIds(table: string, ids: string[]): Promise<KeyValueDBTuple[]>;
19
19
  saveBatch(table: string, entries: KeyValueDBTuple[]): Promise<void>;
20
- streamIds(table: string, limit?: number): ReadableTyped<string>;
21
- streamValues(table: string, limit?: number): ReadableTyped<Buffer>;
22
- streamEntries(table: string, limit?: number): ReadableTyped<KeyValueDBTuple>;
20
+ streamIds(table: string, limit?: number): Pipeline<string>;
21
+ streamValues(table: string, limit?: number): Pipeline<Buffer>;
22
+ streamEntries(table: string, limit?: number): Pipeline<KeyValueDBTuple>;
23
23
  count(table: string): Promise<number>;
24
24
  incrementBatch(table: string, entries: IncrementTuple[]): Promise<IncrementTuple[]>;
25
25
  }
@@ -1,4 +1,4 @@
1
- import { Readable } from 'node:stream';
1
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
2
2
  import { commonKeyValueDBFullSupport } from '../kv/commonKeyValueDB.js';
3
3
  export class InMemoryKeyValueDB {
4
4
  cfg;
@@ -29,13 +29,13 @@ export class InMemoryKeyValueDB {
29
29
  }
30
30
  }
31
31
  streamIds(table, limit) {
32
- return Readable.from(Object.keys(this.data[table] || {}).slice(0, limit));
32
+ return Pipeline.fromArray(Object.keys(this.data[table] || {}).slice(0, limit));
33
33
  }
34
34
  streamValues(table, limit) {
35
- return Readable.from(Object.values(this.data[table] || {}).slice(0, limit));
35
+ return Pipeline.fromArray(Object.values(this.data[table] || {}).slice(0, limit));
36
36
  }
37
37
  streamEntries(table, limit) {
38
- return Readable.from(Object.entries(this.data[table] || {}).slice(0, limit));
38
+ return Pipeline.fromArray(Object.entries(this.data[table] || {}).slice(0, limit));
39
39
  }
40
40
  async count(table) {
41
41
  this.data[table] ||= {};
@@ -15,7 +15,7 @@ const FILTER_FNS = {
15
15
  // But should be careful here..
16
16
  export function queryInMemory(q, rows = []) {
17
17
  // .filter
18
- // eslint-disable-next-line unicorn/no-array-reduce
18
+ // oxlint-disable-next-line unicorn/no-array-reduce
19
19
  rows = q._filters.reduce((rows, filter) => {
20
20
  return rows.filter(row => {
21
21
  const value = _get(row, filter.name);
@@ -31,7 +31,7 @@ export function queryInMemory(q, rows = []) {
31
31
  if (order) {
32
32
  const { name, descending } = order;
33
33
  rows = rows.sort((a, b) => {
34
- // biome-ignore lint/suspicious/noDoubleEquals: ok
34
+ // oxlint-disable-next-line eqeqeq
35
35
  if (a[name] == b[name])
36
36
  return 0;
37
37
  if (descending) {