@naturalcycles/db-lib 10.42.0 → 10.42.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/adapter/file/file.db.d.ts +1 -1
  2. package/dist/adapter/file/file.db.js +1 -1
  3. package/dist/adapter/file/localFile.persistence.plugin.js +1 -1
  4. package/dist/cnst.js +2 -1
  5. package/dist/commondao/common.dao.d.ts +69 -7
  6. package/dist/commondao/common.dao.js +202 -66
  7. package/dist/commondao/common.dao.model.d.ts +6 -5
  8. package/dist/commondao/common.dao.model.js +2 -1
  9. package/dist/commondb/common.db.js +2 -1
  10. package/dist/db.model.js +4 -2
  11. package/dist/inmemory/inMemory.db.d.ts +1 -1
  12. package/dist/inmemory/inMemory.db.js +1 -1
  13. package/dist/inmemory/inMemoryKeyValueDB.d.ts +2 -2
  14. package/dist/kv/commonKeyValueDao.d.ts +1 -1
  15. package/dist/kv/commonKeyValueDao.js +7 -7
  16. package/dist/pipeline/dbPipelineBackup.js +1 -1
  17. package/dist/pipeline/dbPipelineCopy.js +2 -2
  18. package/dist/pipeline/dbPipelineRestore.d.ts +2 -1
  19. package/dist/pipeline/dbPipelineRestore.js +4 -4
  20. package/dist/testing/commonDBTest.js +3 -3
  21. package/dist/testing/commonDaoTest.js +4 -4
  22. package/dist/testing/test.model.d.ts +1 -1
  23. package/dist/timeseries/commonTimeSeriesDao.js +1 -1
  24. package/dist/validation/index.d.ts +0 -2
  25. package/dist/validation/index.js +2 -2
  26. package/package.json +2 -1
  27. package/src/adapter/file/file.db.ts +2 -5
  28. package/src/commondao/common.dao.model.ts +6 -5
  29. package/src/commondao/common.dao.ts +208 -68
  30. package/src/inmemory/inMemory.db.ts +2 -7
  31. package/src/kv/commonKeyValueDao.ts +2 -1
  32. package/src/pipeline/dbPipelineRestore.ts +4 -5
  33. package/src/testing/test.model.ts +2 -1
  34. package/src/validation/index.ts +4 -10
@@ -168,14 +168,15 @@ export interface CommonDaoCfg<BM extends BaseDBEntity, DBM extends BaseDBEntity
168
168
  */
169
169
  patchInTransaction?: boolean;
170
170
  /**
171
- * When specified, the listed properties will be compressed under a `data` property in the DBM.
172
- * If DBM already has a `data` property and you don't add it to the list, an error will be thrown.
173
- *
174
- * When specified with an empty `keys` list, then compression will be skipped, but all previously compressed data
175
- * will be decompressed, so the Dao can still work.
171
+ * When specified, the listed properties will be compressed into the `__compressed` property.
176
172
  *
177
173
  * Compression happens after the `beforeBMToDBM` hook and before the DBM is saved to the database.
178
174
  * Decompression happens after the DBM is loaded from the database and before the `beforeDBMToBM` hook.
175
+ *
176
+ * To migrate away from compression:
177
+ * 1. Remove this config (or set `keys` to empty array)
178
+ * 2. Add `beforeDBMToBM: CommonDao.decompressLegacyRow` to your hooks to decompress legacy data on read
179
+ * 3. Once all data has been naturally rewritten without compression, remove the hook
179
180
  */
180
181
  compress?: {
181
182
  keys: (keyof DBM)[];
@@ -1,4 +1,5 @@
1
- export var CommonDaoLogLevel;
1
+ export { CommonDaoLogLevel };
2
+ var CommonDaoLogLevel;
2
3
  (function (CommonDaoLogLevel) {
3
4
  /**
4
5
  * Same as undefined
@@ -1,4 +1,5 @@
1
- export var CommonDBType;
1
+ export { CommonDBType };
2
+ var CommonDBType;
2
3
  (function (CommonDBType) {
3
4
  CommonDBType["document"] = "document";
4
5
  CommonDBType["relational"] = "relational";
package/dist/db.model.js CHANGED
@@ -1,9 +1,11 @@
1
- export var DBRelation;
1
+ export { DBRelation };
2
+ var DBRelation;
2
3
  (function (DBRelation) {
3
4
  DBRelation["ONE_TO_ONE"] = "ONE_TO_ONE";
4
5
  DBRelation["ONE_TO_MANY"] = "ONE_TO_MANY";
5
6
  })(DBRelation || (DBRelation = {}));
6
- export var DBModelType;
7
+ export { DBModelType };
8
+ var DBModelType;
7
9
  (function (DBModelType) {
8
10
  DBModelType["DBM"] = "DBM";
9
11
  DBModelType["BM"] = "BM";
@@ -1,5 +1,5 @@
1
1
  import type { CommonLogger } from '@naturalcycles/js-lib/log';
2
- import { type AnyObjectWithId, type ObjectWithId, type StringMap } from '@naturalcycles/js-lib/types';
2
+ import type { AnyObjectWithId, ObjectWithId, StringMap } from '@naturalcycles/js-lib/types';
3
3
  import type { JsonSchema } from '@naturalcycles/nodejs-lib/ajv';
4
4
  import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
5
5
  import type { CommonDB, CommonDBSupport } from '../commondb/common.db.js';
@@ -1,7 +1,7 @@
1
1
  import { _isEmptyObject } from '@naturalcycles/js-lib';
2
2
  import { _assert } from '@naturalcycles/js-lib/error/assert.js';
3
3
  import { _deepCopy, _sortObjectDeep } from '@naturalcycles/js-lib/object';
4
- import { _stringMapEntries, _stringMapValues, } from '@naturalcycles/js-lib/types';
4
+ import { _stringMapEntries, _stringMapValues } from '@naturalcycles/js-lib/types';
5
5
  import { generateJsonSchemaFromData } from '@naturalcycles/nodejs-lib/ajv';
6
6
  import { Pipeline } from '@naturalcycles/nodejs-lib/stream';
7
7
  import { bufferReviver } from '@naturalcycles/nodejs-lib/stream/ndjson/transformJsonParse.js';
@@ -8,8 +8,8 @@ export declare class InMemoryKeyValueDB implements CommonKeyValueDB {
8
8
  cfg: InMemoryKeyValueDBCfg;
9
9
  constructor(cfg?: InMemoryKeyValueDBCfg);
10
10
  support: {
11
- count?: boolean;
12
- increment?: boolean;
11
+ count?: boolean | undefined;
12
+ increment?: boolean | undefined;
13
13
  };
14
14
  data: StringMap<StringMap<any>>;
15
15
  ping(): Promise<void>;
@@ -1,5 +1,5 @@
1
1
  import type { CommonLogger } from '@naturalcycles/js-lib/log';
2
- import { type Integer, type KeyValueTuple } from '@naturalcycles/js-lib/types';
2
+ import type { Integer, KeyValueTuple } from '@naturalcycles/js-lib/types';
3
3
  import type { Pipeline } from '@naturalcycles/nodejs-lib/stream';
4
4
  import type { CommonDaoLogLevel } from '../commondao/common.dao.model.js';
5
5
  import type { CommonDBCreateOptions } from '../db.model.js';
@@ -7,14 +7,14 @@ import { decompressZstdOrInflateToString, deflateString, inflateToString, zstdCo
7
7
  */
8
8
  export function commonKeyValueDaoDeflatedJsonTransformer() {
9
9
  return {
10
- valueToBuffer: async (v) => await deflateString(JSON.stringify(v)),
11
- bufferToValue: async (buf) => JSON.parse(await inflateToString(buf)),
10
+ valueToBuffer: async v => await deflateString(JSON.stringify(v)),
11
+ bufferToValue: async buf => JSON.parse(await inflateToString(buf)),
12
12
  };
13
13
  }
14
14
  export function commonKeyValueDaoZstdJsonTransformer(level) {
15
15
  return {
16
- valueToBuffer: async (v) => await zstdCompress(JSON.stringify(v), level),
17
- bufferToValue: async (buf) => JSON.parse(await zstdDecompressToString(buf)),
16
+ valueToBuffer: async v => await zstdCompress(JSON.stringify(v), level),
17
+ bufferToValue: async buf => JSON.parse(await zstdDecompressToString(buf)),
18
18
  };
19
19
  }
20
20
  /**
@@ -23,8 +23,8 @@ export function commonKeyValueDaoZstdJsonTransformer(level) {
23
23
  */
24
24
  export function commonKeyValueDaoCompressedTransformer() {
25
25
  return {
26
- valueToBuffer: async (v) => await zstdCompress(JSON.stringify(v)),
27
- bufferToValue: async (buf) => JSON.parse(await decompressZstdOrInflateToString(buf)),
26
+ valueToBuffer: async v => await zstdCompress(JSON.stringify(v)),
27
+ bufferToValue: async buf => JSON.parse(await decompressZstdOrInflateToString(buf)),
28
28
  };
29
29
  }
30
30
  // todo: logging
@@ -117,7 +117,7 @@ export class CommonKeyValueDao {
117
117
  if (!transformer) {
118
118
  return this.cfg.db.streamValues(this.cfg.table, limit);
119
119
  }
120
- return this.cfg.db.streamValues(this.cfg.table, limit).map(async (buf) => {
120
+ return this.cfg.db.streamValues(this.cfg.table, limit).map(async buf => {
121
121
  try {
122
122
  return await transformer.bufferToValue(buf);
123
123
  }
@@ -23,7 +23,7 @@ export async function dbPipelineBackup(opt) {
23
23
  tables ||= await db.getTables();
24
24
  console.log(`${yellow(tables.length)} ${boldWhite('table(s)')}:\n` + tables.join('\n'));
25
25
  const statsPerTable = {};
26
- await pMap(tables, async (table) => {
26
+ await pMap(tables, async table => {
27
27
  let q = DBQuery.create(table).limit(limit);
28
28
  const sinceUpdated = opt.sinceUpdatedPerTable?.[table] ?? opt.sinceUpdated;
29
29
  if (sinceUpdated) {
@@ -19,7 +19,7 @@ export async function dbPipelineCopy(opt) {
19
19
  tables ||= await dbInput.getTables();
20
20
  console.log(`${yellow(tables.length)} ${boldWhite('table(s)')}:\n` + tables.join('\n'));
21
21
  const statsPerTable = {};
22
- await pMap(tables, async (table) => {
22
+ await pMap(tables, async table => {
23
23
  let q = DBQuery.create(table).limit(limit);
24
24
  if (sinceUpdated) {
25
25
  q = q.filter('updated', '>=', sinceUpdated);
@@ -43,7 +43,7 @@ export async function dbPipelineCopy(opt) {
43
43
  .flattenIfNeeded()
44
44
  .tapSync(() => rows++)
45
45
  .chunk(chunkSize)
46
- .forEach(async (dbms) => {
46
+ .forEach(async dbms => {
47
47
  await dbOutput.saveBatch(table, dbms, saveOptions);
48
48
  });
49
49
  const stats = NDJsonStats.create({
@@ -1,6 +1,7 @@
1
1
  import { ErrorMode } from '@naturalcycles/js-lib/error/errorMode.js';
2
2
  import type { AsyncMapper, UnixTimestamp } from '@naturalcycles/js-lib/types';
3
- import { NDJsonStats, type TransformLogProgressOptions, type TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
3
+ import { NDJsonStats } from '@naturalcycles/nodejs-lib/stream';
4
+ import type { TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib/stream';
4
5
  import type { CommonDB } from '../commondb/common.db.js';
5
6
  import type { CommonDBSaveOptions } from '../db.model.js';
6
7
  export interface DBPipelineRestoreOptions extends TransformLogProgressOptions {
@@ -6,7 +6,7 @@ import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
6
6
  import { _passthroughMapper } from '@naturalcycles/js-lib/types';
7
7
  import { boldWhite, dimWhite, grey, yellow } from '@naturalcycles/nodejs-lib/colors';
8
8
  import { fs2 } from '@naturalcycles/nodejs-lib/fs2';
9
- import { NDJsonStats, Pipeline, } from '@naturalcycles/nodejs-lib/stream';
9
+ import { NDJsonStats, Pipeline } from '@naturalcycles/nodejs-lib/stream';
10
10
  /**
11
11
  * Pipeline from NDJSON files in a folder (optionally gzipped) to CommonDB.
12
12
  * Allows to define a mapper and a predicate to map/filter objects between input and output.
@@ -48,7 +48,7 @@ export async function dbPipelineRestore(opt) {
48
48
  console.log(`${yellow(tables.length)} ${boldWhite('table(s)')}:\n`, sizeStrByTable);
49
49
  // const schemaByTable: Record<string, CommonSchema> = {}
50
50
  if (recreateTables) {
51
- await pMap(tables, async (table) => {
51
+ await pMap(tables, async table => {
52
52
  const schemaFilePath = `${inputDirPath}/${table}.schema.json`;
53
53
  if (!fs2.pathExists(schemaFilePath)) {
54
54
  console.warn(`${schemaFilePath} does not exist!`);
@@ -58,7 +58,7 @@ export async function dbPipelineRestore(opt) {
58
58
  await db.createTable(table, schema, { dropIfExists: true });
59
59
  });
60
60
  }
61
- await pMap(tables, async (table) => {
61
+ await pMap(tables, async table => {
62
62
  const zst = tablesToCompress.has(table);
63
63
  const filePath = `${inputDirPath}/${table}.ndjson` + (zst ? '.zst' : '');
64
64
  const saveOptions = saveOptionsPerTable[table] || {};
@@ -82,7 +82,7 @@ export async function dbPipelineRestore(opt) {
82
82
  })
83
83
  .flattenIfNeeded()
84
84
  .chunk(chunkSize)
85
- .forEach(async (dbms) => {
85
+ .forEach(async dbms => {
86
86
  await db.saveBatch(table, dbms, saveOptions);
87
87
  });
88
88
  const stats = NDJsonStats.create({
@@ -169,7 +169,7 @@ export async function runCommonDBTest(db, quirks = {}) {
169
169
  const tables = await db.getTables();
170
170
  // console.log({ tables })
171
171
  if (support.tableSchemas) {
172
- await pMap(tables, async (table) => {
172
+ await pMap(tables, async table => {
173
173
  const schema = await db.getTableSchema(table);
174
174
  // console.log(schema)
175
175
  expect(schema.$id).toBe(`${table}.schema.json`);
@@ -227,7 +227,7 @@ export async function runCommonDBTest(db, quirks = {}) {
227
227
  // save item3 with k1: k1_mod
228
228
  // delete item2
229
229
  // remaining: item1, item3_with_k1_mod
230
- await db.runInTransaction(async (tx) => {
230
+ await db.runInTransaction(async tx => {
231
231
  await tx.saveBatch(TEST_TABLE, items);
232
232
  await tx.saveBatch(TEST_TABLE, [{ ...items[2], k1: 'k1_mod' }]);
233
233
  await tx.deleteByIds(TEST_TABLE, [items[1].id]);
@@ -252,7 +252,7 @@ export async function runCommonDBTest(db, quirks = {}) {
252
252
  const expected = await prepare();
253
253
  let err;
254
254
  try {
255
- await db.runInTransaction(async (tx) => {
255
+ await db.runInTransaction(async tx => {
256
256
  await tx.deleteByIds(TEST_TABLE, [items[2].id]);
257
257
  // It should fail on id == null
258
258
  await tx.saveBatch(TEST_TABLE, [{ ...items[0], k1: 5, id: null }]);
@@ -243,7 +243,7 @@ export async function runCommonDaoTest(db, quirks = {}) {
243
243
  await dao.query().deleteByQuery();
244
244
  // Test that id, created, updated are created
245
245
  const now = localTime.nowUnix();
246
- await dao.runInTransaction(async (tx) => {
246
+ await dao.runInTransaction(async tx => {
247
247
  const row = _omit(item1, ['id', 'created', 'updated']);
248
248
  await tx.save(dao, row);
249
249
  });
@@ -252,14 +252,14 @@ export async function runCommonDaoTest(db, quirks = {}) {
252
252
  expect(loaded[0].id).toBeDefined();
253
253
  expect(loaded[0].created).toBeGreaterThanOrEqual(now);
254
254
  expect(loaded[0].updated).toBe(loaded[0].created);
255
- await dao.runInTransaction(async (tx) => {
255
+ await dao.runInTransaction(async tx => {
256
256
  await tx.deleteById(dao, loaded[0].id);
257
257
  });
258
258
  // saveBatch [item1, 2, 3]
259
259
  // save item3 with k1: k1_mod
260
260
  // delete item2
261
261
  // remaining: item1, item3_with_k1_mod
262
- await dao.runInTransaction(async (tx) => {
262
+ await dao.runInTransaction(async tx => {
263
263
  await tx.saveBatch(dao, items);
264
264
  await tx.save(dao, { ...items[2], k1: 'k1_mod' });
265
265
  await tx.deleteById(dao, items[1].id);
@@ -304,7 +304,7 @@ export async function runCommonDaoTest(db, quirks = {}) {
304
304
  const expected = await prepare();
305
305
  let err;
306
306
  try {
307
- await dao.runInTransaction(async (tx) => {
307
+ await dao.runInTransaction(async tx => {
308
308
  await tx.deleteById(dao, items[2].id);
309
309
  await tx.save(dao, { ...items[0], k1: 5 }); // it should fail here
310
310
  });
@@ -1,5 +1,5 @@
1
1
  import type { BaseDBEntity } from '@naturalcycles/js-lib/types';
2
- import { type JsonSchemaObjectBuilder } from '@naturalcycles/nodejs-lib/ajv';
2
+ import type { JsonSchemaObjectBuilder } from '@naturalcycles/nodejs-lib/ajv';
3
3
  export declare const TEST_TABLE = "TEST_TABLE";
4
4
  export declare const TEST_TABLE_2 = "TEST_TABLE_2";
5
5
  export interface TestItemBM extends BaseDBEntity {
@@ -40,7 +40,7 @@ export class CommonTimeSeriesDao {
40
40
  async commitTransaction(ops) {
41
41
  if (!ops.length)
42
42
  return;
43
- await this.cfg.db.runInTransaction(async (tx) => {
43
+ await this.cfg.db.runInTransaction(async tx => {
44
44
  for (const op of ops) {
45
45
  const rows = op.dataPoints.map(([ts, v]) => ({
46
46
  id: String(ts), // Convert Number id into String id, as per CommonDB
@@ -1,5 +1,3 @@
1
- import type { ObjectWithId } from '@naturalcycles/js-lib/types';
2
- import { type JsonSchemaObjectBuilder } from '@naturalcycles/nodejs-lib/ajv';
3
1
  import type { CommonDBOptions } from '../db.model.js';
4
2
  export declare const commonDBOptionsSchema: () => JsonSchemaObjectBuilder<CommonDBOptions, CommonDBOptions>;
5
3
  export declare const commonDBSaveOptionsSchema: <ROW extends ObjectWithId>() => any;
@@ -1,5 +1,5 @@
1
- import { j, JsonSchemaAnyBuilder, } from '@naturalcycles/nodejs-lib/ajv';
2
- import { dbQueryFilterOperatorValues, } from '../query/dbQuery.js';
1
+ import { j, JsonSchemaAnyBuilder } from '@naturalcycles/nodejs-lib/ajv';
2
+ import { dbQueryFilterOperatorValues } from '../query/dbQuery.js';
3
3
  // oxlint-disable typescript/explicit-function-return-type
4
4
  // DBTransaction schema - validates presence without deep validation
5
5
  const dbTransactionSchema = j.object.any().castAs();
package/package.json CHANGED
@@ -1,12 +1,13 @@
1
1
  {
2
2
  "name": "@naturalcycles/db-lib",
3
3
  "type": "module",
4
- "version": "10.42.0",
4
+ "version": "10.42.2",
5
5
  "dependencies": {
6
6
  "@naturalcycles/js-lib": "^15",
7
7
  "@naturalcycles/nodejs-lib": "^15"
8
8
  },
9
9
  "devDependencies": {
10
+ "@typescript/native-preview": "7.0.0-dev.20260201.1",
10
11
  "@naturalcycles/dev-lib": "18.4.2"
11
12
  },
12
13
  "files": [
@@ -3,11 +3,8 @@ import { _by, _sortBy } from '@naturalcycles/js-lib/array'
3
3
  import { _since, localTime } from '@naturalcycles/js-lib/datetime'
4
4
  import { _assert } from '@naturalcycles/js-lib/error/assert.js'
5
5
  import { _deepEquals, _filterUndefinedValues, _sortObjectDeep } from '@naturalcycles/js-lib/object'
6
- import {
7
- _stringMapValues,
8
- type ObjectWithId,
9
- type UnixTimestampMillis,
10
- } from '@naturalcycles/js-lib/types'
6
+ import { _stringMapValues } from '@naturalcycles/js-lib/types'
7
+ import type { ObjectWithId, UnixTimestampMillis } from '@naturalcycles/js-lib/types'
11
8
  import type { JsonSchema } from '@naturalcycles/nodejs-lib/ajv'
12
9
  import { generateJsonSchemaFromData } from '@naturalcycles/nodejs-lib/ajv'
13
10
  import { dimGrey } from '@naturalcycles/nodejs-lib/colors'
@@ -207,14 +207,15 @@ export interface CommonDaoCfg<
207
207
  patchInTransaction?: boolean
208
208
 
209
209
  /**
210
- * When specified, the listed properties will be compressed under a `data` property in the DBM.
211
- * If DBM already has a `data` property and you don't add it to the list, an error will be thrown.
212
- *
213
- * When specified with an empty `keys` list, then compression will be skipped, but all previously compressed data
214
- * will be decompressed, so the Dao can still work.
210
+ * When specified, the listed properties will be compressed into the `__compressed` property.
215
211
  *
216
212
  * Compression happens after the `beforeBMToDBM` hook and before the DBM is saved to the database.
217
213
  * Decompression happens after the DBM is loaded from the database and before the `beforeDBMToBM` hook.
214
+ *
215
+ * To migrate away from compression:
216
+ * 1. Remove this config (or set `keys` to empty array)
217
+ * 2. Add `beforeDBMToBM: CommonDao.decompressLegacyRow` to your hooks to decompress legacy data on read
218
+ * 3. Once all data has been naturally rewritten without compression, remove the hook
218
219
  */
219
220
  compress?: {
220
221
  keys: (keyof DBM)[]