@naturalcycles/db-lib 10.26.1 → 10.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/adapter/cachedb/cache.db.d.ts +2 -2
  2. package/dist/adapter/cachedb/cache.db.js +6 -6
  3. package/dist/adapter/file/file.db.d.ts +2 -2
  4. package/dist/adapter/file/file.db.js +5 -6
  5. package/dist/commondao/common.dao.d.ts +7 -21
  6. package/dist/commondao/common.dao.js +36 -153
  7. package/dist/commondao/common.dao.model.d.ts +1 -19
  8. package/dist/commondb/base.common.db.d.ts +2 -2
  9. package/dist/commondb/common.db.d.ts +2 -2
  10. package/dist/inmemory/inMemory.db.d.ts +2 -2
  11. package/dist/inmemory/inMemory.db.js +2 -2
  12. package/dist/inmemory/inMemoryKeyValueDB.d.ts +4 -4
  13. package/dist/inmemory/inMemoryKeyValueDB.js +4 -4
  14. package/dist/kv/commonKeyValueDB.d.ts +4 -4
  15. package/dist/kv/commonKeyValueDao.d.ts +5 -5
  16. package/dist/kv/commonKeyValueDao.js +9 -12
  17. package/dist/pipeline/dbPipelineBackup.d.ts +1 -1
  18. package/dist/pipeline/dbPipelineBackup.js +3 -3
  19. package/dist/pipeline/dbPipelineCopy.d.ts +1 -1
  20. package/dist/pipeline/dbPipelineCopy.js +3 -4
  21. package/dist/pipeline/dbPipelineRestore.js +1 -1
  22. package/dist/query/dbQuery.d.ts +6 -12
  23. package/dist/query/dbQuery.js +0 -19
  24. package/dist/testing/commonDaoTest.js +8 -2
  25. package/package.json +2 -2
  26. package/src/adapter/cachedb/cache.db.ts +7 -8
  27. package/src/adapter/file/file.db.ts +6 -10
  28. package/src/commondao/common.dao.model.ts +1 -26
  29. package/src/commondao/common.dao.ts +45 -205
  30. package/src/commondb/base.common.db.ts +2 -2
  31. package/src/commondb/common.db.ts +2 -2
  32. package/src/inmemory/inMemory.db.ts +3 -7
  33. package/src/inmemory/inMemoryKeyValueDB.ts +7 -8
  34. package/src/kv/commonKeyValueDB.ts +4 -4
  35. package/src/kv/commonKeyValueDao.ts +16 -20
  36. package/src/pipeline/dbPipelineBackup.ts +6 -6
  37. package/src/pipeline/dbPipelineCopy.ts +6 -8
  38. package/src/pipeline/dbPipelineRestore.ts +1 -1
  39. package/src/query/dbQuery.ts +5 -39
  40. package/src/testing/commonDaoTest.ts +8 -2
@@ -13,7 +13,6 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js'
13
13
  import {
14
14
  _stringMapEntries,
15
15
  _stringMapValues,
16
- type AsyncIndexedMapper,
17
16
  type BaseDBEntity,
18
17
  type NonNegativeInteger,
19
18
  type ObjectWithId,
@@ -22,17 +21,11 @@ import {
22
21
  } from '@naturalcycles/js-lib/types'
23
22
  import { _passthroughPredicate, _typeCast } from '@naturalcycles/js-lib/types'
24
23
  import { stringId } from '@naturalcycles/nodejs-lib'
25
- import {
26
- Pipeline,
27
- type ReadableTyped,
28
- transformFlatten,
29
- transformMapSync,
30
- } from '@naturalcycles/nodejs-lib/stream'
24
+ import { type Pipeline, transformFlatten } from '@naturalcycles/nodejs-lib/stream'
31
25
  import {
32
26
  transformChunk,
33
27
  transformLogProgress,
34
28
  transformMap,
35
- transformNoOp,
36
29
  } from '@naturalcycles/nodejs-lib/stream'
37
30
  import { DBLibError } from '../cnst.js'
38
31
  import type {
@@ -53,7 +46,6 @@ import type {
53
46
  CommonDaoSaveBatchOptions,
54
47
  CommonDaoSaveOptions,
55
48
  CommonDaoStreamDeleteOptions,
56
- CommonDaoStreamForEachOptions,
57
49
  CommonDaoStreamOptions,
58
50
  CommonDaoStreamSaveOptions,
59
51
  } from './common.dao.model.js'
@@ -84,7 +76,6 @@ export class CommonDao<
84
76
  hooks: {
85
77
  parseNaturalId: () => ({}),
86
78
  beforeCreate: bm => bm as BM,
87
- anonymize: dbm => dbm,
88
79
  onValidationError: err => err,
89
80
  ...cfg.hooks,
90
81
  } satisfies Partial<CommonDaoHooks<BM, DBM, ID>>,
@@ -240,153 +231,34 @@ export class CommonDao<
240
231
  return await this.cfg.db.runQueryCount(q, opt)
241
232
  }
242
233
 
243
- async streamQueryForEach(
244
- q: DBQuery<DBM>,
245
- mapper: AsyncIndexedMapper<BM, void>,
246
- opt: CommonDaoStreamForEachOptions<BM> = {},
247
- ): Promise<void> {
248
- this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
249
- q.table = opt.table || q.table
250
- opt.skipValidation = opt.skipValidation !== false // default true
251
- opt.errorMode ||= ErrorMode.SUPPRESS
234
+ streamQueryAsDBM(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<DBM> = {}): Pipeline<DBM> {
235
+ const pipeline = this.streamQueryRaw(q, opt)
252
236
 
253
237
  const isPartialQuery = !!q._selectedFieldNames
238
+ if (isPartialQuery) return pipeline
254
239
 
255
- await Pipeline.from(this.cfg.db.streamQuery<DBM>(q, opt))
256
- .map(
257
- async dbm => {
258
- if (isPartialQuery) return dbm as any
259
- return await this.dbmToBM(dbm, opt)
260
- },
261
- {
262
- errorMode: opt.errorMode,
263
- },
264
- )
265
- .map(mapper, {
266
- ...opt,
267
- predicate: _passthroughPredicate, // to be able to logProgress
268
- })
269
- // LogProgress should be AFTER the mapper, to be able to report correct stats
270
- .logProgress({
271
- metric: q.table,
272
- ...opt,
273
- })
274
- .run()
275
- }
276
-
277
- async streamQueryAsDBMForEach(
278
- q: DBQuery<DBM>,
279
- mapper: AsyncIndexedMapper<DBM, void>,
280
- opt: CommonDaoStreamForEachOptions<DBM> = {},
281
- ): Promise<void> {
282
- this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
283
- q.table = opt.table || q.table
284
- opt.skipValidation = opt.skipValidation !== false // default true
240
+ opt.skipValidation ??= true
285
241
  opt.errorMode ||= ErrorMode.SUPPRESS
286
242
 
287
- const isPartialQuery = !!q._selectedFieldNames
288
-
289
- await Pipeline.from(this.cfg.db.streamQuery<any>(q, opt))
290
- .mapSync(
291
- dbm => {
292
- if (isPartialQuery) return dbm
293
- return this.anyToDBM(dbm, opt)
294
- },
295
- {
296
- errorMode: opt.errorMode,
297
- },
298
- )
299
- .map(mapper, {
300
- ...opt,
301
- predicate: _passthroughPredicate, // to be able to logProgress
302
- })
303
- .logProgress({
304
- metric: q.table,
305
- ...opt,
306
- })
307
- .run()
243
+ return pipeline.mapSync(dbm => this.anyToDBM(dbm, opt), { errorMode: opt.errorMode })
308
244
  }
309
245
 
310
- /**
311
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
312
- */
313
- streamQueryAsDBM(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<DBM> = {}): ReadableTyped<DBM> {
314
- this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
315
- q.table = opt.table || q.table
316
- opt.skipValidation = opt.skipValidation !== false // default true
317
- opt.errorMode ||= ErrorMode.SUPPRESS
246
+ streamQuery(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<BM> = {}): Pipeline<BM> {
247
+ const pipeline = this.streamQueryRaw(q, opt)
318
248
 
319
249
  const isPartialQuery = !!q._selectedFieldNames
250
+ if (isPartialQuery) return pipeline as any as Pipeline<BM>
320
251
 
321
- const stream = this.cfg.db.streamQuery<DBM>(q, opt)
322
- if (isPartialQuery) return stream
323
-
324
- return (
325
- stream
326
- // the commented out line was causing RangeError: Maximum call stack size exceeded
327
- // .on('error', err => stream.emit('error', err))
328
- .pipe(
329
- transformMapSync<any, DBM>(
330
- dbm => {
331
- return this.anyToDBM(dbm, opt)
332
- },
333
- {
334
- errorMode: opt.errorMode,
335
- },
336
- ),
337
- )
338
- )
252
+ opt.skipValidation ??= true
253
+ opt.errorMode ||= ErrorMode.SUPPRESS
254
+
255
+ return pipeline.map(async dbm => await this.dbmToBM(dbm, opt), { errorMode: opt.errorMode })
339
256
  }
340
257
 
341
- /**
342
- * Stream as Readable, to be able to .pipe() it further with support of backpressure.
343
- *
344
- * Please note that this stream is currently not async-iteration friendly, because of
345
- * `through2` usage.
346
- * Will be migrated/fixed at some point in the future.
347
- *
348
- * You can do `.pipe(transformNoOp)` to make it "valid again".
349
- */
350
- streamQuery(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<BM> = {}): ReadableTyped<BM> {
258
+ private streamQueryRaw(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<any> = {}): Pipeline<DBM> {
351
259
  this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
352
260
  q.table = opt.table || q.table
353
- opt.skipValidation = opt.skipValidation !== false // default true
354
- opt.errorMode ||= ErrorMode.SUPPRESS
355
-
356
- const stream = this.cfg.db.streamQuery<DBM>(q, opt)
357
- const isPartialQuery = !!q._selectedFieldNames
358
- if (isPartialQuery) return stream as any
359
-
360
- // This almost works, but hard to implement `errorMode: THROW_AGGREGATED` in this case
361
- // return stream.flatMap(async (dbm: DBM) => {
362
- // return [await this.dbmToBM(dbm, opt)] satisfies BM[]
363
- // }, {
364
- // concurrency: 16,
365
- // })
366
-
367
- return (
368
- stream
369
- // optimization: 1 validation is enough
370
- // .pipe(transformMap<any, DBM>(dbm => this.anyToDBM(dbm, opt), safeOpt))
371
- // .pipe(transformMap<DBM, BM>(dbm => this.dbmToBM(dbm, opt), safeOpt))
372
- // the commented out line was causing RangeError: Maximum call stack size exceeded
373
- // .on('error', err => stream.emit('error', err))
374
- .pipe(
375
- transformMap<DBM, BM>(
376
- async dbm => {
377
- return await this.dbmToBM(dbm, opt)
378
- },
379
- {
380
- errorMode: opt.errorMode,
381
- },
382
- ),
383
- )
384
- // this can make the stream async-iteration-friendly
385
- // but not applying it now for perf reasons
386
- // UPD: applying, to be compliant with `.toArray()`, etc.
387
- .on('error', err => stream.destroy(err))
388
- .pipe(transformNoOp())
389
- )
261
+ return this.cfg.db.streamQuery<DBM>(q, opt)
390
262
  }
391
263
 
392
264
  async queryIds(q: DBQuery<DBM>, opt: CommonDaoReadOptions = {}): Promise<ID[]> {
@@ -396,49 +268,12 @@ export class CommonDao<
396
268
  return rows.map(r => r.id as ID)
397
269
  }
398
270
 
399
- streamQueryIds(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<ID> = {}): ReadableTyped<ID> {
271
+ streamQueryIds(q: DBQuery<DBM>, opt: CommonDaoStreamOptions<ID> = {}): Pipeline<ID> {
400
272
  this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
401
273
  q.table = opt.table || q.table
402
274
  opt.errorMode ||= ErrorMode.SUPPRESS
403
275
 
404
- // Experimental: using `.map()`
405
- const stream: ReadableTyped<ID> = this.cfg.db
406
- .streamQuery<DBM>(q.select(['id']), opt)
407
- // .on('error', err => stream.emit('error', err))
408
- .map((r: ObjectWithId) => r.id as ID)
409
-
410
- // const stream: ReadableTyped<string> = this.cfg.db
411
- // .streamQuery<DBM>(q.select(['id']), opt)
412
- // .on('error', err => stream.emit('error', err))
413
- // .pipe(
414
- // transformMapSimple<DBM, string>(r => r.id, {
415
- // errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
416
- // }),
417
- // )
418
-
419
- return stream
420
- }
421
-
422
- async streamQueryIdsForEach(
423
- q: DBQuery<DBM>,
424
- mapper: AsyncIndexedMapper<ID, void>,
425
- opt: CommonDaoStreamForEachOptions<ID> = {},
426
- ): Promise<void> {
427
- this.validateQueryIndexes(q) // throws if query uses `excludeFromIndexes` property
428
- q.table = opt.table || q.table
429
- opt.errorMode ||= ErrorMode.SUPPRESS
430
-
431
- await Pipeline.from(this.cfg.db.streamQuery<DBM>(q.select(['id']), opt).map(r => r.id as ID))
432
- .map(mapper, {
433
- ...opt,
434
- predicate: _passthroughPredicate,
435
- })
436
- // LogProgress should be AFTER the mapper, to be able to report correct stats
437
- .logProgress({
438
- metric: q.table,
439
- ...opt,
440
- })
441
- .run()
276
+ return this.cfg.db.streamQuery(q.select(['id']), opt).mapSync((r: ObjectWithId) => r.id as ID)
442
277
  }
443
278
 
444
279
  /**
@@ -791,7 +626,9 @@ export class CommonDao<
791
626
  if (opt.chunkSize) {
792
627
  const { chunkSize, chunkConcurrency = 8 } = opt
793
628
 
794
- await Pipeline.from(this.cfg.db.streamQuery<DBM>(q.select(['id']), opt).map(r => r.id))
629
+ await this.cfg.db
630
+ .streamQuery<DBM>(q.select(['id']), opt)
631
+ .mapSync(r => r.id)
795
632
  .chunk(chunkSize)
796
633
  .map(
797
634
  async ids => {
@@ -876,11 +713,7 @@ export class CommonDao<
876
713
 
877
714
  // optimization: no need to run full joi DBM validation, cause BM validation will be run
878
715
  // const dbm = this.anyToDBM(_dbm, opt)
879
- let dbm: DBM = { ..._dbm, ...this.cfg.hooks!.parseNaturalId!(_dbm.id as ID) }
880
-
881
- if (opt.anonymize) {
882
- dbm = this.cfg.hooks!.anonymize!(dbm)
883
- }
716
+ const dbm: DBM = { ..._dbm, ...this.cfg.hooks!.parseNaturalId!(_dbm.id as ID) }
884
717
 
885
718
  // DBM > BM
886
719
  const bm = ((await this.cfg.hooks!.beforeDBMToBM?.(dbm)) || dbm) as Partial<BM>
@@ -916,7 +749,7 @@ export class CommonDao<
916
749
 
917
750
  anyToDBM(dbm: undefined, opt?: CommonDaoOptions): null
918
751
  anyToDBM(dbm?: any, opt?: CommonDaoOptions): DBM
919
- anyToDBM(dbm?: DBM, opt: CommonDaoOptions = {}): DBM | null {
752
+ anyToDBM(dbm?: DBM, _opt: CommonDaoOptions = {}): DBM | null {
920
753
  if (!dbm) return null
921
754
 
922
755
  // this shouldn't be happening on load! but should on save!
@@ -924,12 +757,6 @@ export class CommonDao<
924
757
 
925
758
  dbm = { ...dbm, ...this.cfg.hooks!.parseNaturalId!(dbm.id as ID) }
926
759
 
927
- // todo: is this the right place?
928
- // todo: is anyToDBM even needed?
929
- if (opt.anonymize) {
930
- dbm = this.cfg.hooks!.anonymize!(dbm)
931
- }
932
-
933
760
  // Validate/convert DBM
934
761
  // return this.validateAndConvert(dbm, this.cfg.dbmSchema, DBModelType.DBM, opt)
935
762
  return dbm
@@ -1253,17 +1080,30 @@ export class CommonDao<
1253
1080
  * Throws if query uses a property that is in `excludeFromIndexes` list.
1254
1081
  */
1255
1082
  private validateQueryIndexes(q: DBQuery<DBM>): void {
1256
- const { excludeFromIndexes } = this.cfg
1257
- if (!excludeFromIndexes) return
1083
+ const { excludeFromIndexes, indexes } = this.cfg
1258
1084
 
1259
- for (const f of q._filters) {
1260
- _assert(
1261
- !excludeFromIndexes.includes(f.name),
1262
- `cannot query on non-indexed property: ${this.cfg.table}.${f.name as string}`,
1263
- {
1264
- query: q.pretty(),
1265
- },
1266
- )
1085
+ if (excludeFromIndexes) {
1086
+ for (const f of q._filters) {
1087
+ _assert(
1088
+ !excludeFromIndexes.includes(f.name),
1089
+ `cannot query on non-indexed property: ${this.cfg.table}.${f.name as string}`,
1090
+ {
1091
+ query: q.pretty(),
1092
+ },
1093
+ )
1094
+ }
1095
+ }
1096
+
1097
+ if (indexes) {
1098
+ for (const f of q._filters) {
1099
+ _assert(
1100
+ f.name === 'id' || indexes.includes(f.name),
1101
+ `cannot query on non-indexed property: ${this.cfg.table}.${f.name as string}`,
1102
+ {
1103
+ query: q.pretty(),
1104
+ },
1105
+ )
1106
+ }
1267
1107
  }
1268
1108
  }
1269
1109
  }
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema'
2
2
  import type { ObjectWithId, StringMap } from '@naturalcycles/js-lib/types'
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
3
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream'
4
4
  import type {
5
5
  CommonDBOptions,
6
6
  CommonDBReadOptions,
@@ -86,7 +86,7 @@ export class BaseCommonDB implements CommonDB {
86
86
  throw new Error('saveBatch is not implemented')
87
87
  }
88
88
 
89
- streamQuery<ROW extends ObjectWithId>(_q: DBQuery<ROW>): ReadableTyped<ROW> {
89
+ streamQuery<ROW extends ObjectWithId>(_q: DBQuery<ROW>): Pipeline<ROW> {
90
90
  throw new Error('streamQuery is not implemented')
91
91
  }
92
92
 
@@ -1,6 +1,6 @@
1
1
  import type { JsonSchemaObject, JsonSchemaRootObject } from '@naturalcycles/js-lib/json-schema'
2
2
  import type { NonNegativeInteger, ObjectWithId, StringMap } from '@naturalcycles/js-lib/types'
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
3
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream'
4
4
  import type {
5
5
  CommonDBCreateOptions,
6
6
  CommonDBOptions,
@@ -105,7 +105,7 @@ export interface CommonDB {
105
105
  streamQuery: <ROW extends ObjectWithId>(
106
106
  q: DBQuery<ROW>,
107
107
  opt?: CommonDBStreamOptions,
108
- ) => ReadableTyped<ROW>
108
+ ) => Pipeline<ROW>
109
109
 
110
110
  // SAVE
111
111
  /**
@@ -1,4 +1,3 @@
1
- import { Readable } from 'node:stream'
2
1
  import { _isEmptyObject } from '@naturalcycles/js-lib'
3
2
  import { _assert } from '@naturalcycles/js-lib/error/assert.js'
4
3
  import {
@@ -15,8 +14,8 @@ import {
15
14
  type ObjectWithId,
16
15
  type StringMap,
17
16
  } from '@naturalcycles/js-lib/types'
17
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream'
18
18
  import { bufferReviver } from '@naturalcycles/nodejs-lib/stream/ndjson/transformJsonParse.js'
19
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream/stream.model.js'
20
19
  import type { CommonDB, CommonDBSupport } from '../commondb/common.db.js'
21
20
  import { commonDBFullSupport, CommonDBType } from '../commondb/common.db.js'
22
21
  import type {
@@ -278,12 +277,9 @@ export class InMemoryDB implements CommonDB {
278
277
  return queryInMemory<any>(q, Object.values(this.data[table] || {})).length
279
278
  }
280
279
 
281
- streamQuery<ROW extends ObjectWithId>(
282
- q: DBQuery<ROW>,
283
- _opt?: CommonDBOptions,
284
- ): ReadableTyped<ROW> {
280
+ streamQuery<ROW extends ObjectWithId>(q: DBQuery<ROW>, _opt?: CommonDBOptions): Pipeline<ROW> {
285
281
  const table = this.cfg.tablesPrefix + q.table
286
- return Readable.from(queryInMemory(q, Object.values(this.data[table] || {}) as ROW[]))
282
+ return Pipeline.fromArray(queryInMemory(q, Object.values(this.data[table] || {}) as ROW[]))
287
283
  }
288
284
 
289
285
  async runInTransaction(fn: DBTransactionFn, opt: CommonDBTransactionOptions = {}): Promise<void> {
@@ -1,6 +1,5 @@
1
- import { Readable } from 'node:stream'
2
1
  import type { StringMap } from '@naturalcycles/js-lib/types'
3
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
2
+ import { Pipeline } from '@naturalcycles/nodejs-lib/stream'
4
3
  import type { CommonDBCreateOptions } from '../db.model.js'
5
4
  import type { CommonKeyValueDB, IncrementTuple, KeyValueDBTuple } from '../kv/commonKeyValueDB.js'
6
5
  import { commonKeyValueDBFullSupport } from '../kv/commonKeyValueDB.js'
@@ -40,16 +39,16 @@ export class InMemoryKeyValueDB implements CommonKeyValueDB {
40
39
  }
41
40
  }
42
41
 
43
- streamIds(table: string, limit?: number): ReadableTyped<string> {
44
- return Readable.from(Object.keys(this.data[table] || {}).slice(0, limit))
42
+ streamIds(table: string, limit?: number): Pipeline<string> {
43
+ return Pipeline.fromArray(Object.keys(this.data[table] || {}).slice(0, limit))
45
44
  }
46
45
 
47
- streamValues(table: string, limit?: number): ReadableTyped<Buffer> {
48
- return Readable.from(Object.values(this.data[table] || {}).slice(0, limit))
46
+ streamValues(table: string, limit?: number): Pipeline<Buffer> {
47
+ return Pipeline.fromArray(Object.values(this.data[table] || {}).slice(0, limit))
49
48
  }
50
49
 
51
- streamEntries(table: string, limit?: number): ReadableTyped<KeyValueDBTuple> {
52
- return Readable.from(Object.entries(this.data[table] || {}).slice(0, limit))
50
+ streamEntries(table: string, limit?: number): Pipeline<KeyValueDBTuple> {
51
+ return Pipeline.fromArray(Object.entries(this.data[table] || {}).slice(0, limit))
53
52
  }
54
53
 
55
54
  async count(table: string): Promise<number> {
@@ -1,5 +1,5 @@
1
1
  import type { Integer, UnixTimestamp } from '@naturalcycles/js-lib/types'
2
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
2
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream'
3
3
  import type { CommonDBCreateOptions } from '../db.model.js'
4
4
 
5
5
  /**
@@ -39,9 +39,9 @@ export interface CommonKeyValueDB {
39
39
  opt?: CommonKeyValueDBSaveBatchOptions,
40
40
  ) => Promise<void>
41
41
 
42
- streamIds: (table: string, limit?: number) => ReadableTyped<string>
43
- streamValues: (table: string, limit?: number) => ReadableTyped<Buffer>
44
- streamEntries: (table: string, limit?: number) => ReadableTyped<KeyValueDBTuple>
42
+ streamIds: (table: string, limit?: number) => Pipeline<string>
43
+ streamValues: (table: string, limit?: number) => Pipeline<Buffer>
44
+ streamEntries: (table: string, limit?: number) => Pipeline<KeyValueDBTuple>
45
45
 
46
46
  count: (table: string) => Promise<number>
47
47
 
@@ -1,8 +1,8 @@
1
1
  import { AppError } from '@naturalcycles/js-lib/error/error.util.js'
2
2
  import type { CommonLogger } from '@naturalcycles/js-lib/log'
3
3
  import { pMap } from '@naturalcycles/js-lib/promise/pMap.js'
4
- import type { KeyValueTuple } from '@naturalcycles/js-lib/types'
5
- import type { ReadableTyped } from '@naturalcycles/nodejs-lib/stream'
4
+ import { type KeyValueTuple, SKIP } from '@naturalcycles/js-lib/types'
5
+ import type { Pipeline } from '@naturalcycles/nodejs-lib/stream'
6
6
  import { deflateString, inflateToString } from '@naturalcycles/nodejs-lib/zip'
7
7
  import type { CommonDaoLogLevel } from '../commondao/common.dao.model.js'
8
8
  import type { CommonDBCreateOptions } from '../db.model.js'
@@ -159,51 +159,47 @@ export class CommonKeyValueDao<K extends string = string, V = Buffer> {
159
159
  await this.cfg.db.deleteByIds(this.cfg.table, [id])
160
160
  }
161
161
 
162
- streamIds(limit?: number): ReadableTyped<K> {
163
- return this.cfg.db.streamIds(this.cfg.table, limit) as ReadableTyped<K>
162
+ streamIds(limit?: number): Pipeline<K> {
163
+ return this.cfg.db.streamIds(this.cfg.table, limit) as Pipeline<K>
164
164
  }
165
165
 
166
- streamValues(limit?: number): ReadableTyped<V> {
166
+ streamValues(limit?: number): Pipeline<V> {
167
167
  const { transformer } = this.cfg
168
168
 
169
169
  if (!transformer) {
170
- return this.cfg.db.streamValues(this.cfg.table, limit) as ReadableTyped<V>
170
+ return this.cfg.db.streamValues(this.cfg.table, limit) as Pipeline<V>
171
171
  }
172
172
 
173
- return this.cfg.db.streamValues(this.cfg.table, limit).flatMap(
173
+ return this.cfg.db.streamValues(this.cfg.table, limit).map(
174
174
  async buf => {
175
175
  try {
176
- return [await transformer.bufferToValue(buf)]
176
+ return await transformer.bufferToValue(buf)
177
177
  } catch (err) {
178
178
  this.cfg.logger.error(err)
179
- return [] // SKIP
179
+ return SKIP
180
180
  }
181
181
  },
182
- {
183
- concurrency: 32,
184
- },
182
+ { concurrency: 32 },
185
183
  )
186
184
  }
187
185
 
188
- streamEntries(limit?: number): ReadableTyped<KeyValueTuple<K, V>> {
186
+ streamEntries(limit?: number): Pipeline<KeyValueTuple<K, V>> {
189
187
  const { transformer } = this.cfg
190
188
 
191
189
  if (!transformer) {
192
- return this.cfg.db.streamEntries(this.cfg.table, limit) as ReadableTyped<KeyValueTuple<K, V>>
190
+ return this.cfg.db.streamEntries(this.cfg.table, limit) as Pipeline<KeyValueTuple<K, V>>
193
191
  }
194
192
 
195
- return this.cfg.db.streamEntries(this.cfg.table, limit).flatMap(
193
+ return this.cfg.db.streamEntries(this.cfg.table, limit).map(
196
194
  async ([id, buf]) => {
197
195
  try {
198
- return [[id as K, await transformer.bufferToValue(buf)]]
196
+ return [id as K, await transformer.bufferToValue(buf)]
199
197
  } catch (err) {
200
198
  this.cfg.logger.error(err)
201
- return [] // SKIP
199
+ return SKIP
202
200
  }
203
201
  },
204
- {
205
- concurrency: 32,
206
- },
202
+ { concurrency: 32 },
207
203
  )
208
204
  }
209
205
 
@@ -5,10 +5,9 @@ import type { AsyncMapper, StringMap, UnixTimestamp } from '@naturalcycles/js-li
5
5
  import { _passthroughMapper } from '@naturalcycles/js-lib/types'
6
6
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors'
7
7
  import { fs2 } from '@naturalcycles/nodejs-lib/fs2'
8
- import {
9
- Pipeline,
10
- type TransformLogProgressOptions,
11
- type TransformMapOptions,
8
+ import type {
9
+ TransformLogProgressOptions,
10
+ TransformMapOptions,
12
11
  } from '@naturalcycles/nodejs-lib/stream'
13
12
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream'
14
13
  import type { CommonDB } from '../commondb/common.db.js'
@@ -205,7 +204,8 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
205
204
  console.log(`>> ${grey(schemaFilePath)} saved (generated from DB)`)
206
205
  }
207
206
 
208
- await Pipeline.from(db.streamQuery(q))
207
+ await db
208
+ .streamQuery(q)
209
209
  .logProgress({
210
210
  ...opt,
211
211
  logEvery: logEveryPerTable[table] ?? opt.logEvery ?? 1000,
@@ -217,7 +217,7 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
217
217
  metric: table,
218
218
  })
219
219
  .flattenIfNeeded()
220
- .tap(() => rows++)
220
+ .tapSync(() => rows++)
221
221
  .toNDJsonFile(filePath)
222
222
 
223
223
  const { size: sizeBytes } = await fs2.statAsync(filePath)
@@ -4,10 +4,9 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js'
4
4
  import type { AsyncMapper, BaseDBEntity, UnixTimestamp } from '@naturalcycles/js-lib/types'
5
5
  import { _passthroughMapper } from '@naturalcycles/js-lib/types'
6
6
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors'
7
- import {
8
- Pipeline,
9
- type TransformLogProgressOptions,
10
- type TransformMapOptions,
7
+ import type {
8
+ TransformLogProgressOptions,
9
+ TransformMapOptions,
11
10
  } from '@naturalcycles/nodejs-lib/stream'
12
11
  import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream'
13
12
  import type { CommonDB } from '../commondb/common.db.js'
@@ -123,12 +122,11 @@ export async function dbPipelineCopy(opt: DBPipelineCopyOptions): Promise<NDJson
123
122
  const saveOptions: CommonDBSaveOptions<any> = saveOptionsPerTable[table] || {}
124
123
  const mapper = mapperPerTable[table] || _passthroughMapper
125
124
 
126
- const stream = dbInput.streamQuery(q)
127
-
128
125
  const started = Date.now()
129
126
  let rows = 0
130
127
 
131
- await Pipeline.from(stream)
128
+ await dbInput
129
+ .streamQuery(q)
132
130
  .logProgress({
133
131
  logEvery: 1000,
134
132
  ...opt,
@@ -140,7 +138,7 @@ export async function dbPipelineCopy(opt: DBPipelineCopyOptions): Promise<NDJson
140
138
  metric: table,
141
139
  })
142
140
  .flattenIfNeeded()
143
- .tap(() => rows++)
141
+ .tapSync(() => rows++)
144
142
  .chunk(chunkSize)
145
143
  .forEach(async dbms => {
146
144
  await dbOutput.saveBatch(table, dbms, saveOptions)
@@ -187,7 +187,7 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise<
187
187
 
188
188
  await Pipeline.fromNDJsonFile<BaseDBEntity>(filePath)
189
189
  .limitSource(limit)
190
- .tap(() => rows++)
190
+ .tapSync(() => rows++)
191
191
  .logProgress({
192
192
  logEvery: 1000,
193
193
  ...opt,