@naturalcycles/db-lib 8.57.0 → 8.58.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import { ZlibOptions } from 'node:zlib';
3
- import { AsyncMapper, ErrorMode } from '@naturalcycles/js-lib';
3
+ import { AsyncMapper, ErrorMode, UnixTimestampNumber, StringMap } from '@naturalcycles/js-lib';
4
4
  import { NDJsonStats, TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib';
5
5
  import { CommonDB } from '../common.db';
6
6
  export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
@@ -33,10 +33,13 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
33
33
  limit?: number;
34
34
  /**
35
35
  * If set - will do "incremental backup" (not full), only for entities that updated >= `sinceUpdated`
36
- *
37
- * @default undefined
38
36
  */
39
- sinceUpdated?: number;
37
+ sinceUpdated?: UnixTimestampNumber;
38
+ /**
39
+ * Map for each table a `sinceUpdated` timestamp, or `undefined`.
40
+ * If set - will do "incremental backup" (not full), only for entities that updated >= `sinceUpdated` (on a per table basis)
41
+ */
42
+ sinceUpdatedPerTable?: StringMap<UnixTimestampNumber>;
40
43
  /**
41
44
  * Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.gz if gzip=true) files.
42
45
  * All parent directories will be created.
@@ -63,7 +66,7 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
63
66
  * @default `{}`
64
67
  * Default mappers will be "passthroughMapper" (pass all data as-is).
65
68
  */
66
- mapperPerTable?: Record<string, AsyncMapper>;
69
+ mapperPerTable?: StringMap<AsyncMapper>;
67
70
  /**
68
71
  * You can alter default `transformMapOptions` here.
69
72
  *
@@ -18,17 +18,21 @@ const index_1 = require("../index");
18
18
  * Optionally you can provide mapperPerTable and @param transformMapOptions (one for all mappers) - it will run for each table.
19
19
  */
20
20
  async function dbPipelineBackup(opt) {
21
- const { db, concurrency = 16, limit = 0, sinceUpdated, outputDirPath, protectFromOverwrite = false, zlibOptions, mapperPerTable = {}, transformMapOptions, errorMode = js_lib_1.ErrorMode.SUPPRESS, emitSchemaFromDB = false, sortObjects = false, } = opt;
21
+ const { db, concurrency = 16, limit = 0, outputDirPath, protectFromOverwrite = false, zlibOptions, mapperPerTable = {}, transformMapOptions, errorMode = js_lib_1.ErrorMode.SUPPRESS, emitSchemaFromDB = false, sortObjects = false, } = opt;
22
22
  const strict = errorMode !== js_lib_1.ErrorMode.SUPPRESS;
23
23
  const gzip = opt.gzip !== false; // default to true
24
24
  let { tables } = opt;
25
- const sinceUpdatedStr = sinceUpdated ? ' since ' + (0, nodejs_lib_1.grey)((0, js_lib_1.localTime)(sinceUpdated).toPretty()) : '';
26
- console.log(`>> ${(0, nodejs_lib_1.dimWhite)('dbPipelineBackup')} started in ${(0, nodejs_lib_1.grey)(outputDirPath)}...${sinceUpdatedStr}`);
25
+ console.log(`>> ${(0, nodejs_lib_1.dimWhite)('dbPipelineBackup')} started in ${(0, nodejs_lib_1.grey)(outputDirPath)}...`);
27
26
  (0, nodejs_lib_1._ensureDirSync)(outputDirPath);
28
27
  tables ||= await db.getTables();
29
28
  console.log(`${(0, nodejs_lib_1.yellow)(tables.length)} ${(0, nodejs_lib_1.boldWhite)('table(s)')}:\n` + tables.join('\n'));
30
29
  const statsPerTable = {};
31
30
  await (0, js_lib_1.pMap)(tables, async (table) => {
31
+ const sinceUpdated = opt.sinceUpdatedPerTable?.[table] || opt.sinceUpdated;
32
+ const sinceUpdatedStr = sinceUpdated
33
+ ? ' since ' + (0, nodejs_lib_1.grey)((0, js_lib_1.localTime)(sinceUpdated).toPretty())
34
+ : '';
35
+ console.log(`>> ${(0, nodejs_lib_1.grey)(table)}${sinceUpdatedStr}`);
32
36
  let q = index_1.DBQuery.create(table).limit(limit);
33
37
  if (sinceUpdated) {
34
38
  q = q.filter('updated', '>=', sinceUpdated);
package/package.json CHANGED
@@ -40,7 +40,7 @@
40
40
  "engines": {
41
41
  "node": ">=18.12"
42
42
  },
43
- "version": "8.57.0",
43
+ "version": "8.58.0",
44
44
  "description": "Lowest Common Denominator API to supported Databases",
45
45
  "keywords": [
46
46
  "db",
@@ -8,6 +8,8 @@ import {
8
8
  pMap,
9
9
  _passthroughMapper,
10
10
  localTime,
11
+ UnixTimestampNumber,
12
+ StringMap,
11
13
  } from '@naturalcycles/js-lib'
12
14
  import {
13
15
  NDJsonStats,
@@ -65,10 +67,14 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
65
67
 
66
68
  /**
67
69
  * If set - will do "incremental backup" (not full), only for entities that updated >= `sinceUpdated`
68
- *
69
- * @default undefined
70
70
  */
71
- sinceUpdated?: number
71
+ sinceUpdated?: UnixTimestampNumber
72
+
73
+ /**
74
+ * Map for each table a `sinceUpdated` timestamp, or `undefined`.
75
+ * If set - will do "incremental backup" (not full), only for entities that updated >= `sinceUpdated` (on a per table basis)
76
+ */
77
+ sinceUpdatedPerTable?: StringMap<UnixTimestampNumber>
72
78
 
73
79
  /**
74
80
  * Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.gz if gzip=true) files.
@@ -100,7 +106,7 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
100
106
  * @default `{}`
101
107
  * Default mappers will be "passthroughMapper" (pass all data as-is).
102
108
  */
103
- mapperPerTable?: Record<string, AsyncMapper>
109
+ mapperPerTable?: StringMap<AsyncMapper>
104
110
 
105
111
  /**
106
112
  * You can alter default `transformMapOptions` here.
@@ -143,7 +149,6 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
143
149
  db,
144
150
  concurrency = 16,
145
151
  limit = 0,
146
- sinceUpdated,
147
152
  outputDirPath,
148
153
  protectFromOverwrite = false,
149
154
  zlibOptions,
@@ -158,11 +163,7 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
158
163
 
159
164
  let { tables } = opt
160
165
 
161
- const sinceUpdatedStr = sinceUpdated ? ' since ' + grey(localTime(sinceUpdated).toPretty()) : ''
162
-
163
- console.log(
164
- `>> ${dimWhite('dbPipelineBackup')} started in ${grey(outputDirPath)}...${sinceUpdatedStr}`,
165
- )
166
+ console.log(`>> ${dimWhite('dbPipelineBackup')} started in ${grey(outputDirPath)}...`)
166
167
 
167
168
  _ensureDirSync(outputDirPath)
168
169
 
@@ -175,6 +176,14 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
175
176
  await pMap(
176
177
  tables,
177
178
  async table => {
179
+ const sinceUpdated = opt.sinceUpdatedPerTable?.[table] || opt.sinceUpdated
180
+
181
+ const sinceUpdatedStr = sinceUpdated
182
+ ? ' since ' + grey(localTime(sinceUpdated).toPretty())
183
+ : ''
184
+
185
+ console.log(`>> ${grey(table)}${sinceUpdatedStr}`)
186
+
178
187
  let q = DBQuery.create(table).limit(limit)
179
188
 
180
189
  if (sinceUpdated) {