@naturalcycles/db-lib 10.26.1 → 10.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/adapter/cachedb/cache.db.d.ts +2 -2
  2. package/dist/adapter/cachedb/cache.db.js +7 -7
  3. package/dist/adapter/file/file.db.d.ts +2 -2
  4. package/dist/adapter/file/file.db.js +5 -6
  5. package/dist/commondao/common.dao.d.ts +9 -23
  6. package/dist/commondao/common.dao.js +38 -157
  7. package/dist/commondao/common.dao.model.d.ts +1 -19
  8. package/dist/commondb/base.common.db.d.ts +2 -2
  9. package/dist/commondb/common.db.d.ts +2 -2
  10. package/dist/inmemory/inMemory.db.d.ts +2 -2
  11. package/dist/inmemory/inMemory.db.js +2 -2
  12. package/dist/inmemory/inMemoryKeyValueDB.d.ts +4 -4
  13. package/dist/inmemory/inMemoryKeyValueDB.js +4 -4
  14. package/dist/inmemory/queryInMemory.js +2 -2
  15. package/dist/kv/commonKeyValueDB.d.ts +4 -4
  16. package/dist/kv/commonKeyValueDao.d.ts +5 -5
  17. package/dist/kv/commonKeyValueDao.js +9 -12
  18. package/dist/pipeline/dbPipelineBackup.d.ts +1 -1
  19. package/dist/pipeline/dbPipelineBackup.js +3 -3
  20. package/dist/pipeline/dbPipelineCopy.d.ts +1 -1
  21. package/dist/pipeline/dbPipelineCopy.js +3 -4
  22. package/dist/pipeline/dbPipelineRestore.d.ts +1 -2
  23. package/dist/pipeline/dbPipelineRestore.js +2 -3
  24. package/dist/query/dbQuery.d.ts +6 -12
  25. package/dist/query/dbQuery.js +0 -19
  26. package/dist/testing/commonDBTest.js +2 -2
  27. package/dist/testing/commonDaoTest.js +12 -6
  28. package/package.json +1 -1
  29. package/src/adapter/cachedb/cache.db.ts +8 -9
  30. package/src/adapter/file/file.db.ts +6 -10
  31. package/src/commondao/common.dao.model.ts +1 -26
  32. package/src/commondao/common.dao.ts +50 -208
  33. package/src/commondb/base.common.db.ts +2 -2
  34. package/src/commondb/common.db.ts +2 -2
  35. package/src/inmemory/inMemory.db.ts +3 -7
  36. package/src/inmemory/inMemoryKeyValueDB.ts +7 -8
  37. package/src/inmemory/queryInMemory.ts +2 -2
  38. package/src/kv/commonKeyValueDB.ts +4 -4
  39. package/src/kv/commonKeyValueDao.ts +16 -20
  40. package/src/pipeline/dbPipelineBackup.ts +6 -6
  41. package/src/pipeline/dbPipelineCopy.ts +6 -8
  42. package/src/pipeline/dbPipelineRestore.ts +2 -2
  43. package/src/query/dbQuery.ts +5 -39
  44. package/src/testing/commonDBTest.ts +2 -2
  45. package/src/testing/commonDaoTest.ts +12 -6
  46. package/src/testing/test.model.ts +1 -1
@@ -1,5 +1,5 @@
1
1
  import type { Integer, UnixTimestamp } from '@naturalcycles/js-lib/types';
2
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
2
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
3
3
  import type { CommonDBCreateOptions } from '../db.model.js';
4
4
  /**
5
5
  * Common interface for Key-Value database implementations.
@@ -28,9 +28,9 @@ export interface CommonKeyValueDB {
28
28
  getByIds: (table: string, ids: string[]) => Promise<KeyValueDBTuple[]>;
29
29
  deleteByIds: (table: string, ids: string[]) => Promise<void>;
30
30
  saveBatch: (table: string, entries: KeyValueDBTuple[], opt?: CommonKeyValueDBSaveBatchOptions) => Promise<void>;
31
- streamIds: (table: string, limit?: number) => ReadableTyped<string>;
32
- streamValues: (table: string, limit?: number) => ReadableTyped<Buffer>;
33
- streamEntries: (table: string, limit?: number) => ReadableTyped<KeyValueDBTuple>;
31
+ streamIds: (table: string, limit?: number) => Pipeline<string>;
32
+ streamValues: (table: string, limit?: number) => Pipeline<Buffer>;
33
+ streamEntries: (table: string, limit?: number) => Pipeline<KeyValueDBTuple>;
34
34
  count: (table: string) => Promise<number>;
35
35
  /**
36
36
  * Perform a batch of Increment operations.
@@ -1,6 +1,6 @@
1
1
  import type { CommonLogger } from '@naturalcycles/js-lib/log';
2
- import type { KeyValueTuple } from '@naturalcycles/js-lib/types';
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
2
+ import { type KeyValueTuple } from '@naturalcycles/js-lib/types';
3
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import type { CommonDaoLogLevel } from '../commondao/common.dao.model.js';
5
5
  import type { CommonDBCreateOptions } from '../db.model.js';
6
6
  import type { CommonKeyValueDB, CommonKeyValueDBSaveBatchOptions, IncrementTuple, KeyValueDBTuple } from './commonKeyValueDB.js';
@@ -49,9 +49,9 @@ export declare class CommonKeyValueDao<K extends string = string, V = Buffer> {
49
49
  saveBatch(entries: KeyValueTuple<K, V>[], opt?: CommonKeyValueDaoSaveOptions): Promise<void>;
50
50
  deleteByIds(ids: K[]): Promise<void>;
51
51
  deleteById(id: K): Promise<void>;
52
- streamIds(limit?: number): ReadableTyped<K>;
53
- streamValues(limit?: number): ReadableTyped<V>;
54
- streamEntries(limit?: number): ReadableTyped<KeyValueTuple<K, V>>;
52
+ streamIds(limit?: number): Pipeline<K>;
53
+ streamValues(limit?: number): Pipeline<V>;
54
+ streamEntries(limit?: number): Pipeline<KeyValueTuple<K, V>>;
55
55
  getAllKeys(limit?: number): Promise<K[]>;
56
56
  getAllValues(limit?: number): Promise<V[]>;
57
57
  getAllEntries(limit?: number): Promise<KeyValueTuple<K, V>[]>;
@@ -1,5 +1,6 @@
1
1
  import { AppError } from '@naturalcycles/js-lib/error/error.util.js';
2
2
  import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
3
+ import { SKIP } from '@naturalcycles/js-lib/types';
3
4
  import { deflateString, inflateToString } from '@naturalcycles/nodejs-lib/zip';
4
5
  export const commonKeyValueDaoDeflatedJsonTransformer = {
5
6
  valueToBuffer: async (v) => await deflateString(JSON.stringify(v)),
@@ -95,34 +96,30 @@ export class CommonKeyValueDao {
95
96
  if (!transformer) {
96
97
  return this.cfg.db.streamValues(this.cfg.table, limit);
97
98
  }
98
- return this.cfg.db.streamValues(this.cfg.table, limit).flatMap(async (buf) => {
99
+ return this.cfg.db.streamValues(this.cfg.table, limit).map(async (buf) => {
99
100
  try {
100
- return [await transformer.bufferToValue(buf)];
101
+ return await transformer.bufferToValue(buf);
101
102
  }
102
103
  catch (err) {
103
104
  this.cfg.logger.error(err);
104
- return []; // SKIP
105
+ return SKIP;
105
106
  }
106
- }, {
107
- concurrency: 32,
108
- });
107
+ }, { concurrency: 32 });
109
108
  }
110
109
  streamEntries(limit) {
111
110
  const { transformer } = this.cfg;
112
111
  if (!transformer) {
113
112
  return this.cfg.db.streamEntries(this.cfg.table, limit);
114
113
  }
115
- return this.cfg.db.streamEntries(this.cfg.table, limit).flatMap(async ([id, buf]) => {
114
+ return this.cfg.db.streamEntries(this.cfg.table, limit).map(async ([id, buf]) => {
116
115
  try {
117
- return [[id, await transformer.bufferToValue(buf)]];
116
+ return [id, await transformer.bufferToValue(buf)];
118
117
  }
119
118
  catch (err) {
120
119
  this.cfg.logger.error(err);
121
- return []; // SKIP
120
+ return SKIP;
122
121
  }
123
- }, {
124
- concurrency: 32,
125
- });
122
+ }, { concurrency: 32 });
126
123
  }
127
124
  async getAllKeys(limit) {
128
125
  return await this.streamIds(limit).toArray();
@@ -1,6 +1,6 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error';
2
2
  import type { AsyncMapper, StringMap, UnixTimestamp } from '@naturalcycles/js-lib/types';
3
- import { type TransformLogProgressOptions, type TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
3
+ import type { TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
4
4
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
5
5
  import type { CommonDB } from '../commondb/common.db.js';
6
6
  import { DBQuery } from '../query/dbQuery.js';
@@ -4,7 +4,6 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
4
4
  import { _passthroughMapper } from '@naturalcycles/js-lib/types';
5
5
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors';
6
6
  import { fs2 } from '@naturalcycles/nodejs-lib/fs2';
7
- import { Pipeline, } from '@naturalcycles/nodejs-lib/stream';
8
7
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
9
8
  import { DBQuery } from '../query/dbQuery.js';
10
9
  /**
@@ -56,7 +55,8 @@ export async function dbPipelineBackup(opt) {
56
55
  await fs2.writeJsonAsync(schemaFilePath, schema, { spaces: 2 });
57
56
  console.log(`>> ${grey(schemaFilePath)} saved (generated from DB)`);
58
57
  }
59
- await Pipeline.from(db.streamQuery(q))
58
+ await db
59
+ .streamQuery(q)
60
60
  .logProgress({
61
61
  ...opt,
62
62
  logEvery: logEveryPerTable[table] ?? opt.logEvery ?? 1000,
@@ -68,7 +68,7 @@ export async function dbPipelineBackup(opt) {
68
68
  metric: table,
69
69
  })
70
70
  .flattenIfNeeded()
71
- .tap(() => rows++)
71
+ .tapSync(() => rows++)
72
72
  .toNDJsonFile(filePath);
73
73
  const { size: sizeBytes } = await fs2.statAsync(filePath);
74
74
  const stats = NDJsonStats.create({
@@ -1,6 +1,6 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
2
  import type { AsyncMapper, UnixTimestamp } from '@naturalcycles/js-lib/types';
3
- import { type TransformLogProgressOptions, type TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
3
+ import type { TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
4
4
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
5
5
  import type { CommonDB } from '../commondb/common.db.js';
6
6
  import type { CommonDBSaveOptions } from '../db.model.js';
@@ -3,7 +3,6 @@ import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
3
3
  import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
4
4
  import { _passthroughMapper } from '@naturalcycles/js-lib/types';
5
5
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors';
6
- import { Pipeline, } from '@naturalcycles/nodejs-lib/stream';
7
6
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
8
7
  import { DBQuery } from '../query/dbQuery.js';
9
8
  /**
@@ -27,10 +26,10 @@ export async function dbPipelineCopy(opt) {
27
26
  }
28
27
  const saveOptions = saveOptionsPerTable[table] || {};
29
28
  const mapper = mapperPerTable[table] || _passthroughMapper;
30
- const stream = dbInput.streamQuery(q);
31
29
  const started = Date.now();
32
30
  let rows = 0;
33
- await Pipeline.from(stream)
31
+ await dbInput
32
+ .streamQuery(q)
34
33
  .logProgress({
35
34
  logEvery: 1000,
36
35
  ...opt,
@@ -42,7 +41,7 @@ export async function dbPipelineCopy(opt) {
42
41
  metric: table,
43
42
  })
44
43
  .flattenIfNeeded()
45
- .tap(() => rows++)
44
+ .tapSync(() => rows++)
46
45
  .chunk(chunkSize)
47
46
  .forEach(async (dbms) => {
48
47
  await dbOutput.saveBatch(table, dbms, saveOptions);
@@ -1,7 +1,6 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
2
  import type { AsyncMapper, UnixTimestamp } from '@naturalcycles/js-lib/types';
3
- import { type TransformLogProgressOptions, type TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
4
- import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
3
+ import { NDJsonStats, type TransformLogProgressOptions, type TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
5
4
  import type { CommonDB } from '../commondb/common.db.js';
6
5
  import type { CommonDBSaveOptions } from '../db.model.js';
7
6
  export interface DBPipelineRestoreOptions extends TransformLogProgressOptions {
@@ -6,8 +6,7 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
6
6
  import { _passthroughMapper } from '@naturalcycles/js-lib/types';
7
7
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors';
8
8
  import { fs2 } from '@naturalcycles/nodejs-lib/fs2';
9
- import { Pipeline, } from '@naturalcycles/nodejs-lib/stream';
10
- import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
9
+ import { NDJsonStats, Pipeline, } from '@naturalcycles/nodejs-lib/stream';
11
10
  /**
12
11
  * Pipeline from NDJSON files in a folder (optionally gzipped) to CommonDB.
13
12
  * Allows to define a mapper and a predicate to map/filter objects between input and output.
@@ -69,7 +68,7 @@ export async function dbPipelineRestore(opt) {
69
68
  console.log(`<< ${grey(filePath)} ${dimWhite(_hb(sizeBytes))} started...`);
70
69
  await Pipeline.fromNDJsonFile(filePath)
71
70
  .limitSource(limit)
72
- .tap(() => rows++)
71
+ .tapSync(() => rows++)
73
72
  .logProgress({
74
73
  logEvery: 1000,
75
74
  ...opt,
@@ -1,7 +1,7 @@
1
- import type { AsyncIndexedMapper, BaseDBEntity, ObjectWithId } from '@naturalcycles/js-lib/types';
2
- import { Pipeline, type ReadableTyped } from '@naturalcycles/nodejs-lib/stream';
1
+ import type { BaseDBEntity, ObjectWithId } from '@naturalcycles/js-lib/types';
2
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
3
3
  import type { CommonDao } from '../commondao/common.dao.js';
4
- import type { CommonDaoOptions, CommonDaoReadOptions, CommonDaoStreamDeleteOptions, CommonDaoStreamForEachOptions, CommonDaoStreamOptions } from '../commondao/common.dao.model.js';
4
+ import type { CommonDaoOptions, CommonDaoReadOptions, CommonDaoStreamDeleteOptions, CommonDaoStreamOptions } from '../commondao/common.dao.model.js';
5
5
  import type { RunQueryResult } from '../db.model.js';
6
6
  /**
7
7
  * Modeled after Firestore operators (WhereFilterOp type)
@@ -100,15 +100,9 @@ export declare class RunnableDBQuery<BM extends BaseDBEntity, DBM extends BaseDB
100
100
  runQueryExtendedAsDBM(opt?: CommonDaoReadOptions): Promise<RunQueryResult<DBM>>;
101
101
  runQueryCount(opt?: CommonDaoReadOptions): Promise<number>;
102
102
  patchByQuery(patch: Partial<DBM>, opt?: CommonDaoOptions): Promise<number>;
103
- streamQueryForEach(mapper: AsyncIndexedMapper<BM, void>, opt?: CommonDaoStreamForEachOptions<BM>): Promise<void>;
104
- streamQueryAsDBMForEach(mapper: AsyncIndexedMapper<DBM, void>, opt?: CommonDaoStreamForEachOptions<DBM>): Promise<void>;
105
- streamQuery(opt?: CommonDaoStreamOptions<BM>): ReadableTyped<BM>;
106
- streamQueryAsDBM(opt?: CommonDaoStreamOptions<DBM>): ReadableTyped<DBM>;
107
- pipeline(opt?: CommonDaoStreamOptions<BM>): Pipeline<BM>;
108
- pipelineAsDBM(opt?: CommonDaoStreamOptions<DBM>): Pipeline<DBM>;
103
+ streamQuery(opt?: CommonDaoStreamOptions<BM>): Pipeline<BM>;
104
+ streamQueryAsDBM(opt?: CommonDaoStreamOptions<DBM>): Pipeline<DBM>;
109
105
  queryIds(opt?: CommonDaoReadOptions): Promise<ID[]>;
110
- streamQueryIds(opt?: CommonDaoStreamOptions<ID>): ReadableTyped<ID>;
111
- pipelineIds(opt?: CommonDaoStreamOptions<ID>): Pipeline<ID>;
112
- streamQueryIdsForEach(mapper: AsyncIndexedMapper<ID, void>, opt?: CommonDaoStreamForEachOptions<ID>): Promise<void>;
106
+ streamQueryIds(opt?: CommonDaoStreamOptions<ID>): Pipeline<ID>;
113
107
  deleteByQuery(opt?: CommonDaoStreamDeleteOptions<DBM>): Promise<number>;
114
108
  }
@@ -1,6 +1,5 @@
1
1
  import { _truncate } from '@naturalcycles/js-lib/string/string.util.js';
2
2
  import { _objectAssign } from '@naturalcycles/js-lib/types';
3
- import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
3
  export const dbQueryFilterOperatorValues = [
5
4
  '<',
6
5
  '<=',
@@ -176,36 +175,18 @@ export class RunnableDBQuery extends DBQuery {
176
175
  async patchByQuery(patch, opt) {
177
176
  return await this.dao.patchByQuery(this, patch, opt);
178
177
  }
179
- async streamQueryForEach(mapper, opt) {
180
- await this.dao.streamQueryForEach(this, mapper, opt);
181
- }
182
- async streamQueryAsDBMForEach(mapper, opt) {
183
- await this.dao.streamQueryAsDBMForEach(this, mapper, opt);
184
- }
185
178
  streamQuery(opt) {
186
179
  return this.dao.streamQuery(this, opt);
187
180
  }
188
181
  streamQueryAsDBM(opt) {
189
182
  return this.dao.streamQueryAsDBM(this, opt);
190
183
  }
191
- pipeline(opt) {
192
- return Pipeline.from(this.dao.streamQuery(this, opt));
193
- }
194
- pipelineAsDBM(opt) {
195
- return Pipeline.from(this.dao.streamQueryAsDBM(this, opt));
196
- }
197
184
  async queryIds(opt) {
198
185
  return await this.dao.queryIds(this, opt);
199
186
  }
200
187
  streamQueryIds(opt) {
201
188
  return this.dao.streamQueryIds(this, opt);
202
189
  }
203
- pipelineIds(opt) {
204
- return Pipeline.from(this.dao.streamQueryIds(this, opt));
205
- }
206
- async streamQueryIdsForEach(mapper, opt) {
207
- await this.dao.streamQueryIdsForEach(this, mapper, opt);
208
- }
209
190
  async deleteByQuery(opt) {
210
191
  return await this.dao.deleteByQuery(this, opt);
211
192
  }
@@ -258,8 +258,8 @@ export async function runCommonDBTest(db, quirks = {}) {
258
258
  await tx.saveBatch(TEST_TABLE, [{ ...items[0], k1: 5, id: null }]);
259
259
  });
260
260
  }
261
- catch (err_) {
262
- err = err_;
261
+ catch (err2) {
262
+ err = err2;
263
263
  }
264
264
  expect(err).toBeDefined();
265
265
  const { rows } = await db.runQuery(queryAll());
@@ -179,7 +179,10 @@ export async function runCommonDaoTest(db, quirks = {}) {
179
179
  if (support.streaming) {
180
180
  test('streamQueryForEach all', async () => {
181
181
  let rows = [];
182
- await dao.query().streamQueryForEach(bm => void rows.push(bm));
182
+ await dao
183
+ .query()
184
+ .streamQuery()
185
+ .forEachSync(bm => void rows.push(bm));
183
186
  rows = _sortBy(rows, r => r.id);
184
187
  expectMatch(expectedItems, rows, quirks);
185
188
  });
@@ -190,7 +193,10 @@ export async function runCommonDaoTest(db, quirks = {}) {
190
193
  });
191
194
  test('streamQueryIdsForEach all', async () => {
192
195
  let ids = [];
193
- await dao.query().streamQueryIdsForEach(id => void ids.push(id));
196
+ await dao
197
+ .query()
198
+ .streamQueryIds()
199
+ .forEachSync(id => void ids.push(id));
194
200
  ids = ids.sort();
195
201
  expectMatch(expectedItems.map(i => i.id), ids, quirks);
196
202
  });
@@ -303,8 +309,8 @@ export async function runCommonDaoTest(db, quirks = {}) {
303
309
  await tx.save(dao, { ...items[0], k1: 5 }); // it should fail here
304
310
  });
305
311
  }
306
- catch (err_) {
307
- err = err_;
312
+ catch (err2) {
313
+ err = err2;
308
314
  }
309
315
  expect(err).toBeDefined();
310
316
  expect(err).toBeInstanceOf(Error);
@@ -321,8 +327,8 @@ export async function runCommonDaoTest(db, quirks = {}) {
321
327
  await tx.save(dao, { ...items[0], k1: 5 }); // it should fail here
322
328
  await tx.commit();
323
329
  }
324
- catch (err_) {
325
- err = err_;
330
+ catch (err2) {
331
+ err = err2;
326
332
  }
327
333
  expect(err).toBeDefined();
328
334
  expect(err).toBeInstanceOf(Error);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@naturalcycles/db-lib",
3
3
  "type": "module",
4
- "version": "10.26.1",
4
+ "version": "10.28.0",
5
5
  "dependencies": {
6
6
  "@naturalcycles/js-lib": "^15",
7
7
  "@naturalcycles/nodejs-lib": "^15"
@@ -1,8 +1,7 @@
1
- import { Readable } from 'node:stream'
2
1
  import { _isTruthy } from '@naturalcycles/js-lib'
3
2
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema'
4
3
  import type { ObjectWithId, StringMap } from '@naturalcycles/js-lib/types'
5
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
4
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream'
6
5
  import { BaseCommonDB } from '../../commondb/base.common.db.js'
7
6
  import type { CommonDB, CommonDBSupport } from '../../commondb/common.db.js'
8
7
  import { commonDBFullSupport } from '../../commondb/common.db.js'
@@ -163,7 +162,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB {
163
162
  }
164
163
 
165
164
  // Don't save to cache if it was a projection query
166
- if (!opt.skipCache && !opt.skipCache && !q._selectedFieldNames) {
165
+ if (!opt.skipCache && !this.cfg.skipCache && !q._selectedFieldNames) {
167
166
  const cacheResult = this.cfg.cacheDB.saveBatch(q.table, rows as any, opt)
168
167
  if (this.cfg.awaitCache) await cacheResult
169
168
  }
@@ -201,9 +200,9 @@ export class CacheDB extends BaseCommonDB implements CommonDB {
201
200
  override streamQuery<ROW extends ObjectWithId>(
202
201
  q: DBQuery<ROW>,
203
202
  opt: CacheDBStreamOptions = {},
204
- ): ReadableTyped<ROW> {
203
+ ): Pipeline<ROW> {
205
204
  if (!opt.onlyCache && !this.cfg.onlyCache) {
206
- const stream = this.cfg.downstreamDB.streamQuery<ROW>(q, opt)
205
+ const pipeline = this.cfg.downstreamDB.streamQuery<ROW>(q, opt)
207
206
 
208
207
  // Don't save to cache if it was a projection query
209
208
  if (!opt.skipCache && !this.cfg.skipCache && !q._selectedFieldNames) {
@@ -216,12 +215,12 @@ export class CacheDB extends BaseCommonDB implements CommonDB {
216
215
  // })
217
216
  }
218
217
 
219
- return stream
218
+ return pipeline
220
219
  }
221
220
 
222
- if (opt.skipCache || this.cfg.skipCache) return Readable.from([])
221
+ if (opt.skipCache || this.cfg.skipCache) return Pipeline.fromArray([])
223
222
 
224
- const stream = this.cfg.cacheDB.streamQuery<ROW>(q, opt)
223
+ const pipeline = this.cfg.cacheDB.streamQuery<ROW>(q, opt)
225
224
 
226
225
  // if (this.cfg.logCached) {
227
226
  // let count = 0
@@ -234,7 +233,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB {
234
233
  // })
235
234
  // }
236
235
 
237
- return stream
236
+ return pipeline
238
237
  }
239
238
 
240
239
  override async deleteByQuery<ROW extends ObjectWithId>(
@@ -1,3 +1,4 @@
1
+ import { Readable } from 'node:stream'
1
2
  import { _by, _sortBy } from '@naturalcycles/js-lib/array'
2
3
  import { _since, localTime } from '@naturalcycles/js-lib/datetime'
3
4
  import { _assert } from '@naturalcycles/js-lib/error/assert.js'
@@ -10,8 +11,7 @@ import {
10
11
  type UnixTimestampMillis,
11
12
  } from '@naturalcycles/js-lib/types'
12
13
  import { dimGrey } from '@naturalcycles/nodejs-lib/colors'
13
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
14
- import { readableCreate } from '@naturalcycles/nodejs-lib/stream'
14
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream'
15
15
  import { BaseCommonDB } from '../../commondb/base.common.db.js'
16
16
  import type { CommonDB, CommonDBSupport } from '../../commondb/common.db.js'
17
17
  import { commonDBFullSupport } from '../../commondb/common.db.js'
@@ -127,15 +127,11 @@ export class FileDB extends BaseCommonDB implements CommonDB {
127
127
  override streamQuery<ROW extends ObjectWithId>(
128
128
  q: DBQuery<ROW>,
129
129
  opt?: CommonDBStreamOptions,
130
- ): ReadableTyped<ROW> {
131
- const readable = readableCreate<ROW>()
132
-
133
- void this.runQuery(q, opt).then(({ rows }) => {
134
- rows.forEach(r => readable.push(r))
135
- readable.push(null) // done
130
+ ): Pipeline<ROW> {
131
+ return Pipeline.fromAsyncReadable(async () => {
132
+ const { rows } = await this.runQuery(q, opt)
133
+ return Readable.from(rows)
136
134
  })
137
-
138
- return readable
139
135
  }
140
136
 
141
137
  override async deleteByQuery<ROW extends ObjectWithId>(
@@ -2,10 +2,7 @@ import type { ValidationFunction } from '@naturalcycles/js-lib'
2
2
  import type { AppError, ErrorMode } from '@naturalcycles/js-lib/error'
3
3
  import type { CommonLogger } from '@naturalcycles/js-lib/log'
4
4
  import type { BaseDBEntity, UnixTimestamp } from '@naturalcycles/js-lib/types'
5
- import type {
6
- TransformLogProgressOptions,
7
- TransformMapOptions,
8
- } from '@naturalcycles/nodejs-lib/stream'
5
+ import type { TransformLogProgressOptions } from '@naturalcycles/nodejs-lib/stream'
9
6
  import type { CommonDB } from '../commondb/common.db.js'
10
7
  import type { CommonDBCreateOptions, CommonDBOptions, CommonDBSaveOptions } from '../db.model.js'
11
8
 
@@ -70,17 +67,6 @@ export interface CommonDaoHooks<
70
67
  */
71
68
  beforeSave?: (dbm: DBM) => void
72
69
 
73
- /**
74
- * Called in:
75
- * - dbmToBM (applied before DBM becomes BM)
76
- * - anyToDBM
77
- *
78
- * Hook only allows to apply anonymization to DBM (not to BM).
79
- * It still applies to BM "transitively", during dbmToBM
80
- * (e.g after loaded from the Database).
81
- */
82
- anonymize: (dbm: DBM) => DBM
83
-
84
70
  /**
85
71
  * If hook is defined - allows to prevent or modify the error thrown.
86
72
  * Return `false` to prevent throwing an error.
@@ -280,13 +266,6 @@ export interface CommonDaoOptions extends CommonDBOptions {
280
266
  */
281
267
  allowMutability?: boolean
282
268
 
283
- /**
284
- * If true - data will be anonymized (by calling a BaseDao.anonymize() hook that you can extend in your Dao implementation).
285
- * Only applicable to loading/querying/streaming_loading operations (n/a for saving).
286
- * There is additional validation applied AFTER Anonymization, so your anonymization implementation should keep the object valid.
287
- */
288
- anonymize?: boolean
289
-
290
269
  /**
291
270
  * Allows to override the Table that this Dao is connected to, only in the context of this call.
292
271
  *
@@ -363,10 +342,6 @@ export interface CommonDaoStreamSaveOptions<DBM extends BaseDBEntity>
363
342
  extends CommonDaoSaveBatchOptions<DBM>,
364
343
  CommonDaoStreamOptions<DBM> {}
365
344
 
366
- export interface CommonDaoStreamForEachOptions<IN>
367
- extends CommonDaoStreamOptions<IN>,
368
- TransformMapOptions<IN, any> {}
369
-
370
345
  export interface CommonDaoStreamOptions<IN>
371
346
  extends CommonDaoReadOptions,
372
347
  TransformLogProgressOptions<IN> {