@naturalcycles/db-lib 8.43.4 → 8.44.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  /// <reference types="node" />
2
- import { Readable } from 'stream';
2
+ import { Readable } from 'node:stream';
3
3
  import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib';
4
4
  import { BaseCommonDB } from '../../base.common.db';
5
5
  import { CommonDB } from '../../common.db';
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.CacheDB = void 0;
4
- const stream_1 = require("stream");
4
+ const node_stream_1 = require("node:stream");
5
5
  const base_common_db_1 = require("../../base.common.db");
6
6
  /**
7
7
  * CommonDB implementation that proxies requests to downstream CommonDB
@@ -160,7 +160,7 @@ class CacheDB extends base_common_db_1.BaseCommonDB {
160
160
  return stream;
161
161
  }
162
162
  if (opt.skipCache || this.cfg.skipCache)
163
- return stream_1.Readable.from([]);
163
+ return node_stream_1.Readable.from([]);
164
164
  const stream = this.cfg.cacheDB.streamQuery(q, opt);
165
165
  // if (this.cfg.logCached) {
166
166
  // let count = 0
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.LocalFilePersistencePlugin = void 0;
4
- const stream_1 = require("stream");
5
- const zlib_1 = require("zlib");
4
+ const node_stream_1 = require("node:stream");
5
+ const node_zlib_1 = require("node:zlib");
6
6
  const js_lib_1 = require("@naturalcycles/js-lib");
7
7
  const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");
8
8
  const fs = require("fs-extra");
@@ -29,7 +29,7 @@ class LocalFilePersistencePlugin {
29
29
  const filePath = `${this.cfg.storagePath}/${table}.${ext}`;
30
30
  if (!(await fs.pathExists(filePath)))
31
31
  return [];
32
- const transformUnzip = this.cfg.gzip ? [(0, zlib_1.createUnzip)()] : [];
32
+ const transformUnzip = this.cfg.gzip ? [(0, node_zlib_1.createUnzip)()] : [];
33
33
  const rows = [];
34
34
  await (0, nodejs_lib_1._pipeline)([
35
35
  fs.createReadStream(filePath),
@@ -47,9 +47,9 @@ class LocalFilePersistencePlugin {
47
47
  await fs.ensureDir(this.cfg.storagePath);
48
48
  const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}`;
49
49
  const filePath = `${this.cfg.storagePath}/${table}.${ext}`;
50
- const transformZip = this.cfg.gzip ? [(0, zlib_1.createGzip)()] : [];
50
+ const transformZip = this.cfg.gzip ? [(0, node_zlib_1.createGzip)()] : [];
51
51
  await (0, nodejs_lib_1._pipeline)([
52
- stream_1.Readable.from(rows),
52
+ node_stream_1.Readable.from(rows),
53
53
  (0, nodejs_lib_1.transformToNDJson)(),
54
54
  ...transformZip,
55
55
  fs.createWriteStream(filePath),
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.InMemoryDB = void 0;
4
- const stream_1 = require("stream");
5
- const zlib_1 = require("zlib");
4
+ const node_stream_1 = require("node:stream");
5
+ const node_zlib_1 = require("node:zlib");
6
6
  const js_lib_1 = require("@naturalcycles/js-lib");
7
7
  const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");
8
8
  const colors_1 = require("@naturalcycles/nodejs-lib/dist/colors");
@@ -58,25 +58,22 @@ class InMemoryDB {
58
58
  };
59
59
  }
60
60
  async createTable(_table, _schema, opt = {}) {
61
- var _a;
62
61
  const table = this.cfg.tablesPrefix + _table;
63
62
  if (opt.dropIfExists) {
64
63
  this.data[table] = {};
65
64
  }
66
65
  else {
67
- (_a = this.data)[table] || (_a[table] = {});
66
+ this.data[table] ||= {};
68
67
  }
69
68
  }
70
69
  async getByIds(_table, ids, _opt) {
71
- var _a;
72
70
  const table = this.cfg.tablesPrefix + _table;
73
- (_a = this.data)[table] || (_a[table] = {});
71
+ this.data[table] ||= {};
74
72
  return ids.map(id => this.data[table][id]).filter(Boolean);
75
73
  }
76
74
  async saveBatch(_table, rows, opt = {}) {
77
- var _a;
78
75
  const table = this.cfg.tablesPrefix + _table;
79
- (_a = this.data)[table] || (_a[table] = {});
76
+ this.data[table] ||= {};
80
77
  rows.forEach(r => {
81
78
  if (!r.id) {
82
79
  this.cfg.logger.warn({ rows });
@@ -122,7 +119,7 @@ class InMemoryDB {
122
119
  }
123
120
  streamQuery(q, _opt) {
124
121
  const table = this.cfg.tablesPrefix + q.table;
125
- return stream_1.Readable.from((0, __1.queryInMemory)(q, Object.values(this.data[table] || {})));
122
+ return node_stream_1.Readable.from((0, __1.queryInMemory)(q, Object.values(this.data[table] || {})));
126
123
  }
127
124
  async commitTransaction(tx, opt) {
128
125
  const backup = (0, js_lib_1._deepCopy)(this.data);
@@ -156,7 +153,7 @@ class InMemoryDB {
156
153
  const { persistentStoragePath, persistZip } = this.cfg;
157
154
  const started = Date.now();
158
155
  await fs.emptyDir(persistentStoragePath);
159
- const transformZip = persistZip ? [(0, zlib_1.createGzip)()] : [];
156
+ const transformZip = persistZip ? [(0, node_zlib_1.createGzip)()] : [];
160
157
  let tables = 0;
161
158
  // infinite concurrency for now
162
159
  await (0, js_lib_1.pMap)(Object.keys(this.data), async (table) => {
@@ -166,7 +163,7 @@ class InMemoryDB {
166
163
  tables++;
167
164
  const fname = `${persistentStoragePath}/${table}.ndjson${persistZip ? '.gz' : ''}`;
168
165
  await (0, nodejs_lib_1._pipeline)([
169
- stream_1.Readable.from(rows),
166
+ node_stream_1.Readable.from(rows),
170
167
  (0, nodejs_lib_1.transformToNDJson)(),
171
168
  ...transformZip,
172
169
  fs.createWriteStream(fname),
@@ -190,7 +187,7 @@ class InMemoryDB {
190
187
  await (0, js_lib_1.pMap)(files, async (file) => {
191
188
  const fname = `${persistentStoragePath}/${file}`;
192
189
  const table = file.split('.ndjson')[0];
193
- const transformUnzip = file.endsWith('.gz') ? [(0, zlib_1.createUnzip)()] : [];
190
+ const transformUnzip = file.endsWith('.gz') ? [(0, node_zlib_1.createUnzip)()] : [];
194
191
  const rows = [];
195
192
  await (0, nodejs_lib_1._pipeline)([
196
193
  fs.createReadStream(fname),
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.InMemoryKeyValueDB = void 0;
4
- const stream_1 = require("stream");
4
+ const node_stream_1 = require("node:stream");
5
5
  class InMemoryKeyValueDB {
6
6
  constructor(cfg = {}) {
7
7
  this.cfg = cfg;
@@ -11,32 +11,28 @@ class InMemoryKeyValueDB {
11
11
  async ping() { }
12
12
  async createTable(_table, _opt) { }
13
13
  async deleteByIds(table, ids) {
14
- var _a;
15
- (_a = this.data)[table] || (_a[table] = {});
14
+ this.data[table] ||= {};
16
15
  ids.forEach(id => delete this.data[table][id]);
17
16
  }
18
17
  async getByIds(table, ids) {
19
- var _a;
20
- (_a = this.data)[table] || (_a[table] = {});
18
+ this.data[table] ||= {};
21
19
  return ids.map(id => [id, this.data[table][id]]).filter(e => e[1]);
22
20
  }
23
21
  async saveBatch(table, entries) {
24
- var _a;
25
- (_a = this.data)[table] || (_a[table] = {});
22
+ this.data[table] ||= {};
26
23
  entries.forEach(([id, buf]) => (this.data[table][id] = buf));
27
24
  }
28
25
  streamIds(table, limit) {
29
- return stream_1.Readable.from(Object.keys(this.data[table] || {}).slice(0, limit));
26
+ return node_stream_1.Readable.from(Object.keys(this.data[table] || {}).slice(0, limit));
30
27
  }
31
28
  streamValues(table, limit) {
32
- return stream_1.Readable.from(Object.values(this.data[table] || {}).slice(0, limit));
29
+ return node_stream_1.Readable.from(Object.values(this.data[table] || {}).slice(0, limit));
33
30
  }
34
31
  streamEntries(table, limit) {
35
- return stream_1.Readable.from(Object.entries(this.data[table] || {}).slice(0, limit));
32
+ return node_stream_1.Readable.from(Object.entries(this.data[table] || {}).slice(0, limit));
36
33
  }
37
34
  async count(table) {
38
- var _a;
39
- (_a = this.data)[table] || (_a[table] = {});
35
+ this.data[table] ||= {};
40
36
  return Object.keys(this.data[table]).length;
41
37
  }
42
38
  }
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.BaseCommonDB = void 0;
4
- const stream_1 = require("stream");
4
+ const node_stream_1 = require("node:stream");
5
5
  const dbTransaction_util_1 = require("./transaction/dbTransaction.util");
6
6
  /**
7
7
  * No-op implementation of CommonDB interface.
@@ -39,7 +39,7 @@ class BaseCommonDB {
39
39
  }
40
40
  async saveBatch(_table, _rows, _opt) { }
41
41
  streamQuery(_q) {
42
- return stream_1.Readable.from([]);
42
+ return node_stream_1.Readable.from([]);
43
43
  }
44
44
  /**
45
45
  * Naive implementation.
@@ -20,7 +20,6 @@ const isCI = !!process.env['CI'];
20
20
  */
21
21
  class CommonDao {
22
22
  constructor(cfg) {
23
- var _a;
24
23
  this.cfg = cfg;
25
24
  this.tx = {
26
25
  save: async (bm, opt = {}) => {
@@ -91,10 +90,10 @@ class CommonDao {
91
90
  };
92
91
  if (this.cfg.createId) {
93
92
  (0, js_lib_1._assert)(this.cfg.idType === 'string', 'db-lib: automatic generation of non-string ids is not supported');
94
- (_a = this.cfg.hooks).createId || (_a.createId = () => (0, nodejs_lib_1.stringId)());
93
+ this.cfg.hooks.createRandomId ||= () => (0, nodejs_lib_1.stringId)();
95
94
  }
96
95
  else {
97
- delete this.cfg.hooks.createId;
96
+ delete this.cfg.hooks.createRandomId;
98
97
  }
99
98
  }
100
99
  // CREATE
@@ -333,7 +332,7 @@ class CommonDao {
333
332
  q.table = opt.table || q.table;
334
333
  opt.skipValidation = opt.skipValidation !== false; // default true
335
334
  opt.skipConversion = opt.skipConversion !== false; // default true
336
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
335
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
337
336
  const partialQuery = !!q._selectedFieldNames;
338
337
  const op = `streamQueryForEach(${q.pretty()})`;
339
338
  const started = this.logStarted(op, q.table, true);
@@ -367,7 +366,7 @@ class CommonDao {
367
366
  q.table = opt.table || q.table;
368
367
  opt.skipValidation = opt.skipValidation !== false; // default true
369
368
  opt.skipConversion = opt.skipConversion !== false; // default true
370
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
369
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
371
370
  const partialQuery = !!q._selectedFieldNames;
372
371
  const op = `streamQueryAsDBMForEach(${q.pretty()})`;
373
372
  const started = this.logStarted(op, q.table, true);
@@ -402,12 +401,14 @@ class CommonDao {
402
401
  q.table = opt.table || q.table;
403
402
  opt.skipValidation = opt.skipValidation !== false; // default true
404
403
  opt.skipConversion = opt.skipConversion !== false; // default true
405
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
404
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
406
405
  const partialQuery = !!q._selectedFieldNames;
407
406
  const stream = this.cfg.db.streamQuery(q, opt);
408
407
  if (partialQuery || opt.raw)
409
408
  return stream;
410
- return stream.pipe((0, nodejs_lib_1.transformMapSimple)(dbm => this.anyToDBM(dbm, opt), {
409
+ return stream
410
+ .on('error', err => stream.emit('error', err))
411
+ .pipe((0, nodejs_lib_1.transformMapSimple)(dbm => this.anyToDBM(dbm, opt), {
411
412
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
412
413
  }));
413
414
  }
@@ -424,7 +425,7 @@ class CommonDao {
424
425
  q.table = opt.table || q.table;
425
426
  opt.skipValidation = opt.skipValidation !== false; // default true
426
427
  opt.skipConversion = opt.skipConversion !== false; // default true
427
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
428
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
428
429
  const stream = this.cfg.db.streamQuery(q, opt);
429
430
  const partialQuery = !!q._selectedFieldNames;
430
431
  if (partialQuery || opt.raw)
@@ -433,6 +434,7 @@ class CommonDao {
433
434
  // optimization: 1 validation is enough
434
435
  // .pipe(transformMap<any, DBM>(dbm => this.anyToDBM(dbm, opt), safeOpt))
435
436
  // .pipe(transformMap<DBM, Saved<BM>>(dbm => this.dbmToBM(dbm, opt), safeOpt))
437
+ .on('error', err => stream.emit('error', err))
436
438
  .pipe((0, nodejs_lib_1.transformMap)(async (dbm) => await this.dbmToBM(dbm, opt), {
437
439
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
438
440
  }))
@@ -448,14 +450,18 @@ class CommonDao {
448
450
  }
449
451
  streamQueryIds(q, opt = {}) {
450
452
  q.table = opt.table || q.table;
451
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
452
- return this.cfg.db.streamQuery(q.select(['id']), opt).pipe((0, nodejs_lib_1.transformMapSimple)(objectWithId => objectWithId.id, {
453
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
454
+ const stream = this.cfg.db
455
+ .streamQuery(q.select(['id']), opt)
456
+ .on('error', err => stream.emit('error', err))
457
+ .pipe((0, nodejs_lib_1.transformMapSimple)(objectWithId => objectWithId.id, {
453
458
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
454
459
  }));
460
+ return stream;
455
461
  }
456
462
  async streamQueryIdsForEach(q, mapper, opt = {}) {
457
463
  q.table = opt.table || q.table;
458
- opt.errorMode || (opt.errorMode = js_lib_1.ErrorMode.SUPPRESS);
464
+ opt.errorMode ||= js_lib_1.ErrorMode.SUPPRESS;
459
465
  const op = `streamQueryIdsForEach(${q.pretty()})`;
460
466
  const started = this.logStarted(op, q.table, true);
461
467
  let count = 0;
@@ -479,12 +485,13 @@ class CommonDao {
479
485
  }
480
486
  }
481
487
  assignIdCreatedUpdated(obj, opt = {}) {
482
- var _a;
483
488
  const now = Math.floor(Date.now() / 1000);
484
- obj.id || (obj.id = this.cfg.hooks.createId?.(obj));
489
+ if (this.cfg.createId) {
490
+ obj.id ||= this.cfg.hooks.createNaturalId?.(obj) || this.cfg.hooks.createRandomId();
491
+ }
485
492
  if (this.cfg.created) {
486
493
  ;
487
- (_a = obj)['created'] || (_a['created'] = obj['updated'] || now);
494
+ obj['created'] ||= obj['updated'] || now;
488
495
  }
489
496
  if (this.cfg.updated) {
490
497
  ;
@@ -3,7 +3,12 @@ import { AjvSchema, AjvValidationError, JoiValidationError, ObjectSchemaTyped, T
3
3
  import { CommonDB } from '../common.db';
4
4
  import { CommonDBCreateOptions, CommonDBOptions, CommonDBSaveOptions } from '../db.model';
5
5
  export interface CommonDaoHooks<BM extends Partial<ObjectWithId<ID>>, DBM extends ObjectWithId<ID>, TM, ID extends string | number> {
6
- createId: (obj: DBM | BM) => ID;
6
+ createRandomId: () => ID;
7
+ /**
8
+ * createNaturalId hook is called (tried) first.
9
+ * If it doesn't exist - createRandomId is called.
10
+ */
11
+ createNaturalId: (obj: DBM | BM) => ID;
7
12
  parseNaturalId: (id: ID) => Partial<DBM>;
8
13
  beforeCreate: (bm: Partial<BM>) => Partial<BM>;
9
14
  beforeDBMValidate: (dbm: Partial<DBM>) => Partial<DBM>;
@@ -210,4 +215,4 @@ export interface CommonDaoStreamOptions extends CommonDaoOptions {
210
215
  */
211
216
  errorMode?: ErrorMode;
212
217
  }
213
- export declare type CommonDaoCreateOptions = CommonDBCreateOptions;
218
+ export type CommonDaoCreateOptions = CommonDBCreateOptions;
@@ -7,7 +7,7 @@ import { ObjectWithId } from '@naturalcycles/js-lib';
7
7
  *
8
8
  * Default is Upsert.
9
9
  */
10
- export declare type CommonDBSaveMethod = 'upsert' | 'insert' | 'update';
10
+ export type CommonDBSaveMethod = 'upsert' | 'insert' | 'update';
11
11
  export interface CommonDBOptions {
12
12
  }
13
13
  /**
@@ -27,7 +27,7 @@ export interface CommonDBSaveOptions<ROW extends Partial<ObjectWithId> = any> ex
27
27
  */
28
28
  assignGeneratedIds?: boolean;
29
29
  }
30
- export declare type CommonDBStreamOptions = CommonDBOptions;
30
+ export type CommonDBStreamOptions = CommonDBOptions;
31
31
  export interface CommonDBCreateOptions extends CommonDBOptions {
32
32
  /**
33
33
  * Caution! If set to true - will actually DROP the table!
@@ -40,7 +40,7 @@ export interface RunQueryResult<T> {
40
40
  rows: T[];
41
41
  endCursor?: string;
42
42
  }
43
- export declare type DBOperation = DBSaveBatchOperation | DBDeleteByIdsOperation;
43
+ export type DBOperation = DBSaveBatchOperation | DBDeleteByIdsOperation;
44
44
  export interface DBSaveBatchOperation<ROW extends Partial<ObjectWithId> = any> {
45
45
  type: 'saveBatch';
46
46
  table: string;
@@ -1,7 +1,7 @@
1
1
  /// <reference types="node" />
2
2
  import { ReadableTyped } from '@naturalcycles/nodejs-lib';
3
3
  import { CommonDBCreateOptions } from '../db.model';
4
- export declare type KeyValueDBTuple = [key: string, value: Buffer];
4
+ export type KeyValueDBTuple = [key: string, value: Buffer];
5
5
  /**
6
6
  * Common interface for Key-Value database implementations.
7
7
  *
@@ -103,17 +103,25 @@ class CommonKeyValueDao {
103
103
  }
104
104
  // todo: consider it when readableMap supports `errorMode: SUPPRESS`
105
105
  // readableMap(this.cfg.db.streamValues(this.cfg.table, limit), async buf => await this.cfg.hooks!.mapBufferToValue(buf))
106
- return this.cfg.db.streamValues(this.cfg.table, limit).pipe((0, nodejs_lib_1.transformMap)(async (buf) => await this.cfg.hooks.mapBufferToValue(buf), {
106
+ const stream = this.cfg.db
107
+ .streamValues(this.cfg.table, limit)
108
+ .on('error', err => stream.emit('error', err))
109
+ .pipe((0, nodejs_lib_1.transformMap)(async (buf) => await this.cfg.hooks.mapBufferToValue(buf), {
107
110
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
108
111
  }));
112
+ return stream;
109
113
  }
110
114
  streamEntries(limit) {
111
115
  if (!this.cfg.hooks?.mapBufferToValue) {
112
116
  return this.cfg.db.streamEntries(this.cfg.table, limit);
113
117
  }
114
- return this.cfg.db.streamEntries(this.cfg.table, limit).pipe((0, nodejs_lib_1.transformMap)(async ([id, buf]) => [id, await this.cfg.hooks.mapBufferToValue(buf)], {
118
+ const stream = this.cfg.db
119
+ .streamEntries(this.cfg.table, limit)
120
+ .on('error', err => stream.emit('error', err))
121
+ .pipe((0, nodejs_lib_1.transformMap)(async ([id, buf]) => [id, await this.cfg.hooks.mapBufferToValue(buf)], {
115
122
  errorMode: js_lib_1.ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
116
123
  }));
124
+ return stream;
117
125
  }
118
126
  }
119
127
  exports.CommonKeyValueDao = CommonKeyValueDao;
@@ -1,5 +1,5 @@
1
1
  /// <reference types="node" />
2
- import { ZlibOptions } from 'zlib';
2
+ import { ZlibOptions } from 'node:zlib';
3
3
  import { AsyncMapper, ErrorMode } from '@naturalcycles/js-lib';
4
4
  import { NDJsonStats, TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib';
5
5
  import { CommonDB } from '../common.db';
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.dbPipelineBackup = void 0;
4
- const zlib_1 = require("zlib");
4
+ const node_zlib_1 = require("node:zlib");
5
5
  const js_lib_1 = require("@naturalcycles/js-lib");
6
6
  const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");
7
7
  const colors_1 = require("@naturalcycles/nodejs-lib/dist/colors");
@@ -65,7 +65,7 @@ async function dbPipelineBackup(opt) {
65
65
  rows++;
66
66
  }),
67
67
  (0, nodejs_lib_1.transformToNDJson)({ strict, sortObjects }),
68
- ...(gzip ? [(0, zlib_1.createGzip)(zlibOptions)] : []),
68
+ ...(gzip ? [(0, node_zlib_1.createGzip)(zlibOptions)] : []),
69
69
  fs.createWriteStream(filePath),
70
70
  ]);
71
71
  const { size: sizeBytes } = await fs.stat(filePath);
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.dbPipelineRestore = void 0;
4
- const zlib_1 = require("zlib");
4
+ const node_zlib_1 = require("node:zlib");
5
5
  const js_lib_1 = require("@naturalcycles/js-lib");
6
6
  const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");
7
7
  const colors_1 = require("@naturalcycles/nodejs-lib/dist/colors");
@@ -68,7 +68,7 @@ async function dbPipelineRestore(opt) {
68
68
  console.log(`<< ${(0, colors_1.grey)(filePath)} ${(0, colors_1.dimWhite)((0, js_lib_1._hb)(sizeBytes))} started...`);
69
69
  await (0, nodejs_lib_1._pipeline)([
70
70
  fs.createReadStream(filePath),
71
- ...(gzip ? [(0, zlib_1.createUnzip)()] : []),
71
+ ...(gzip ? [(0, node_zlib_1.createUnzip)()] : []),
72
72
  (0, nodejs_lib_1.transformSplit)(),
73
73
  (0, nodejs_lib_1.transformJsonParse)({ strict }),
74
74
  (0, nodejs_lib_1.transformTap)(() => rows++),
@@ -22,7 +22,7 @@ import { RunQueryResult } from '../db.model';
22
22
  *
23
23
  * You may also look at queryInMemory() for its implementation (it implements all those).
24
24
  */
25
- export declare type DBQueryFilterOperator = '<' | '<=' | '==' | '!=' | '>=' | '>' | 'in' | 'not-in' | 'array-contains' | 'array-contains-any';
25
+ export type DBQueryFilterOperator = '<' | '<=' | '==' | '!=' | '>=' | '>' | 'in' | 'not-in' | 'array-contains' | 'array-contains-any';
26
26
  export declare const dbQueryFilterOperatorValues: DBQueryFilterOperator[];
27
27
  export interface DBQueryFilter<ROW extends ObjectWithId = AnyObjectWithId> {
28
28
  name: keyof ROW;
@@ -7,7 +7,7 @@ export interface CommonTimeSeriesDaoCfg {
7
7
  * Second number: value
8
8
  * null in the second position means "absense of value" (may exist in _RAW table)
9
9
  */
10
- export declare type TimeSeriesDataPoint = [number, number | null];
10
+ export type TimeSeriesDataPoint = [number, number | null];
11
11
  export interface TimeSeriesRow {
12
12
  id: number;
13
13
  ts: number;
package/package.json CHANGED
@@ -6,7 +6,7 @@
6
6
  "dependencies": {
7
7
  "@naturalcycles/js-lib": "^14.0.0",
8
8
  "@naturalcycles/nodejs-lib": "^12.0.0",
9
- "fs-extra": "^10.0.0"
9
+ "fs-extra": "^11.1.0"
10
10
  },
11
11
  "devDependencies": {
12
12
  "@naturalcycles/bench-lib": "^1.0.0",
@@ -41,7 +41,7 @@
41
41
  "engines": {
42
42
  "node": ">=14.15"
43
43
  },
44
- "version": "8.43.4",
44
+ "version": "8.44.0",
45
45
  "description": "Lowest Common Denominator API to supported Databases",
46
46
  "keywords": [
47
47
  "db",
@@ -1,4 +1,4 @@
1
- import { Readable } from 'stream'
1
+ import { Readable } from 'node:stream'
2
2
  import {
3
3
  JsonSchemaObject,
4
4
  JsonSchemaRootObject,
@@ -1,5 +1,5 @@
1
- import { Readable } from 'stream'
2
- import { createGzip, createUnzip } from 'zlib'
1
+ import { Readable } from 'node:stream'
2
+ import { createGzip, createUnzip } from 'node:zlib'
3
3
  import { pMap, ObjectWithId } from '@naturalcycles/js-lib'
4
4
  import {
5
5
  transformJsonParse,
@@ -1,5 +1,5 @@
1
- import { Readable } from 'stream'
2
- import { createGzip, createUnzip } from 'zlib'
1
+ import { Readable } from 'node:stream'
2
+ import { createGzip, createUnzip } from 'node:zlib'
3
3
  import {
4
4
  generateJsonSchemaFromData,
5
5
  JsonSchemaObject,
@@ -1,4 +1,4 @@
1
- import { Readable } from 'stream'
1
+ import { Readable } from 'node:stream'
2
2
  import { StringMap } from '@naturalcycles/js-lib'
3
3
  import { ReadableTyped } from '@naturalcycles/nodejs-lib'
4
4
  import { CommonDBCreateOptions } from '../../db.model'
@@ -1,4 +1,4 @@
1
- import { Readable } from 'stream'
1
+ import { Readable } from 'node:stream'
2
2
  import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib'
3
3
  import { ReadableTyped } from '@naturalcycles/nodejs-lib'
4
4
  import { CommonDB } from './common.db'
@@ -16,7 +16,12 @@ export interface CommonDaoHooks<
16
16
  TM,
17
17
  ID extends string | number,
18
18
  > {
19
- createId: (obj: DBM | BM) => ID
19
+ createRandomId: () => ID
20
+ /**
21
+ * createNaturalId hook is called (tried) first.
22
+ * If it doesn't exist - createRandomId is called.
23
+ */
24
+ createNaturalId: (obj: DBM | BM) => ID
20
25
  parseNaturalId: (id: ID) => Partial<DBM>
21
26
  beforeCreate: (bm: Partial<BM>) => Partial<BM>
22
27
  beforeDBMValidate: (dbm: Partial<DBM>) => Partial<DBM>
@@ -106,9 +106,9 @@ export class CommonDao<
106
106
  'db-lib: automatic generation of non-string ids is not supported',
107
107
  )
108
108
 
109
- this.cfg.hooks!.createId ||= () => stringId() as ID
109
+ this.cfg.hooks!.createRandomId ||= () => stringId() as ID
110
110
  } else {
111
- delete this.cfg.hooks!.createId
111
+ delete this.cfg.hooks!.createRandomId
112
112
  }
113
113
  }
114
114
 
@@ -498,11 +498,13 @@ export class CommonDao<
498
498
  const stream = this.cfg.db.streamQuery<DBM>(q, opt)
499
499
  if (partialQuery || opt.raw) return stream
500
500
 
501
- return stream.pipe(
502
- transformMapSimple<any, DBM>(dbm => this.anyToDBM(dbm, opt), {
503
- errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
504
- }),
505
- )
501
+ return stream
502
+ .on('error', err => stream.emit('error', err))
503
+ .pipe(
504
+ transformMapSimple<any, DBM>(dbm => this.anyToDBM(dbm, opt), {
505
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
506
+ }),
507
+ )
506
508
  }
507
509
 
508
510
  /**
@@ -529,6 +531,7 @@ export class CommonDao<
529
531
  // optimization: 1 validation is enough
530
532
  // .pipe(transformMap<any, DBM>(dbm => this.anyToDBM(dbm, opt), safeOpt))
531
533
  // .pipe(transformMap<DBM, Saved<BM>>(dbm => this.dbmToBM(dbm, opt), safeOpt))
534
+ .on('error', err => stream.emit('error', err))
532
535
  .pipe(
533
536
  transformMap<DBM, Saved<BM>>(async dbm => await this.dbmToBM(dbm, opt), {
534
537
  errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
@@ -550,11 +553,16 @@ export class CommonDao<
550
553
  q.table = opt.table || q.table
551
554
  opt.errorMode ||= ErrorMode.SUPPRESS
552
555
 
553
- return this.cfg.db.streamQuery<DBM>(q.select(['id']), opt).pipe(
554
- transformMapSimple<DBM, ID>(objectWithId => objectWithId.id, {
555
- errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
556
- }),
557
- )
556
+ const stream: ReadableTyped<ID> = this.cfg.db
557
+ .streamQuery<DBM>(q.select(['id']), opt)
558
+ .on('error', err => stream.emit('error', err))
559
+ .pipe(
560
+ transformMapSimple<DBM, ID>(objectWithId => objectWithId.id, {
561
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe() cannot propagate errors
562
+ }),
563
+ )
564
+
565
+ return stream
558
566
  }
559
567
 
560
568
  async streamQueryIdsForEach(
@@ -600,7 +608,9 @@ export class CommonDao<
600
608
  assignIdCreatedUpdated(obj: DBM | BM | Unsaved<BM>, opt: CommonDaoOptions = {}): DBM | Saved<BM> {
601
609
  const now = Math.floor(Date.now() / 1000)
602
610
 
603
- obj.id ||= this.cfg.hooks!.createId?.(obj as BM)
611
+ if (this.cfg.createId) {
612
+ obj.id ||= this.cfg.hooks!.createNaturalId?.(obj as BM) || this.cfg.hooks!.createRandomId!()
613
+ }
604
614
 
605
615
  if (this.cfg.created) {
606
616
  ;(obj as any)['created'] ||= (obj as any)['updated'] || now
@@ -163,11 +163,16 @@ export class CommonKeyValueDao<T> {
163
163
 
164
164
  // todo: consider it when readableMap supports `errorMode: SUPPRESS`
165
165
  // readableMap(this.cfg.db.streamValues(this.cfg.table, limit), async buf => await this.cfg.hooks!.mapBufferToValue(buf))
166
- return this.cfg.db.streamValues(this.cfg.table, limit).pipe(
167
- transformMap(async buf => await this.cfg.hooks!.mapBufferToValue!(buf), {
168
- errorMode: ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
169
- }),
170
- )
166
+ const stream: ReadableTyped<Buffer> = this.cfg.db
167
+ .streamValues(this.cfg.table, limit)
168
+ .on('error', err => stream.emit('error', err))
169
+ .pipe(
170
+ transformMap(async buf => await this.cfg.hooks!.mapBufferToValue!(buf), {
171
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
172
+ }),
173
+ )
174
+
175
+ return stream
171
176
  }
172
177
 
173
178
  streamEntries(limit?: number): ReadableTyped<KeyValueTuple<string, T>> {
@@ -175,10 +180,15 @@ export class CommonKeyValueDao<T> {
175
180
  return this.cfg.db.streamEntries(this.cfg.table, limit)
176
181
  }
177
182
 
178
- return this.cfg.db.streamEntries(this.cfg.table, limit).pipe(
179
- transformMap(async ([id, buf]) => [id, await this.cfg.hooks!.mapBufferToValue!(buf)], {
180
- errorMode: ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
181
- }),
182
- )
183
+ const stream: ReadableTyped<KeyValueTuple<string, T>> = this.cfg.db
184
+ .streamEntries(this.cfg.table, limit)
185
+ .on('error', err => stream.emit('error', err))
186
+ .pipe(
187
+ transformMap(async ([id, buf]) => [id, await this.cfg.hooks!.mapBufferToValue!(buf)], {
188
+ errorMode: ErrorMode.SUPPRESS, // cause .pipe cannot propagate errors
189
+ }),
190
+ )
191
+
192
+ return stream
183
193
  }
184
194
  }
@@ -1,4 +1,4 @@
1
- import { createGzip, ZlibOptions } from 'zlib'
1
+ import { createGzip, ZlibOptions } from 'node:zlib'
2
2
  import {
3
3
  AppError,
4
4
  AsyncMapper,
@@ -1,4 +1,4 @@
1
- import { createUnzip } from 'zlib'
1
+ import { createUnzip } from 'node:zlib'
2
2
  import {
3
3
  AsyncMapper,
4
4
  ErrorMode,