@naturalcycles/db-lib 10.35.0 → 10.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter/file/localFile.persistence.plugin.d.ts +1 -1
- package/dist/adapter/file/localFile.persistence.plugin.js +3 -3
- package/dist/kv/commonKeyValueDao.d.ts +1 -0
- package/dist/kv/commonKeyValueDao.js +5 -1
- package/dist/pipeline/dbPipelineBackup.d.ts +2 -1
- package/dist/pipeline/dbPipelineBackup.js +2 -3
- package/dist/pipeline/dbPipelineRestore.d.ts +1 -1
- package/dist/pipeline/dbPipelineRestore.js +9 -9
- package/package.json +1 -1
- package/src/adapter/cachedb/cache.db.model.ts +1 -2
- package/src/adapter/file/localFile.persistence.plugin.ts +4 -4
- package/src/commondao/common.dao.model.ts +16 -14
- package/src/kv/commonKeyValueDao.ts +11 -1
- package/src/pipeline/dbPipelineBackup.ts +4 -3
- package/src/pipeline/dbPipelineRestore.ts +9 -9
|
@@ -8,7 +8,7 @@ export class LocalFilePersistencePlugin {
|
|
|
8
8
|
constructor(cfg = {}) {
|
|
9
9
|
this.cfg = {
|
|
10
10
|
storagePath: './tmp/localdb',
|
|
11
|
-
|
|
11
|
+
zst: true,
|
|
12
12
|
...cfg,
|
|
13
13
|
};
|
|
14
14
|
}
|
|
@@ -21,7 +21,7 @@ export class LocalFilePersistencePlugin {
|
|
|
21
21
|
}
|
|
22
22
|
async loadFile(table) {
|
|
23
23
|
await fs2.ensureDirAsync(this.cfg.storagePath);
|
|
24
|
-
const ext = `ndjson${this.cfg.
|
|
24
|
+
const ext = `ndjson${this.cfg.zst ? '.zst' : ''}`;
|
|
25
25
|
const filePath = `${this.cfg.storagePath}/${table}.${ext}`;
|
|
26
26
|
if (!(await fs2.pathExistsAsync(filePath)))
|
|
27
27
|
return [];
|
|
@@ -32,7 +32,7 @@ export class LocalFilePersistencePlugin {
|
|
|
32
32
|
}
|
|
33
33
|
async saveFile(table, rows) {
|
|
34
34
|
await fs2.ensureDirAsync(this.cfg.storagePath);
|
|
35
|
-
const ext = `ndjson${this.cfg.
|
|
35
|
+
const ext = `ndjson${this.cfg.zst ? '.zst' : ''}`;
|
|
36
36
|
const filePath = `${this.cfg.storagePath}/${table}.${ext}`;
|
|
37
37
|
await Pipeline.fromArray(rows).toNDJsonFile(filePath);
|
|
38
38
|
}
|
|
@@ -32,6 +32,7 @@ export interface CommonKeyValueDaoTransformer<V> {
|
|
|
32
32
|
bufferToValue: (buf: Buffer) => Promise<V>;
|
|
33
33
|
}
|
|
34
34
|
export declare const commonKeyValueDaoDeflatedJsonTransformer: CommonKeyValueDaoTransformer<any>;
|
|
35
|
+
export declare const commonKeyValueDaoZstdJsonTransformer: CommonKeyValueDaoTransformer<any>;
|
|
35
36
|
export declare class CommonKeyValueDao<K extends string = string, V = Buffer> {
|
|
36
37
|
constructor(cfg: CommonKeyValueDaoCfg<V>);
|
|
37
38
|
cfg: CommonKeyValueDaoCfg<V> & {
|
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
import { AppError } from '@naturalcycles/js-lib/error/error.util.js';
|
|
2
2
|
import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
|
|
3
3
|
import { SKIP } from '@naturalcycles/js-lib/types';
|
|
4
|
-
import { deflateString, inflateToString } from '@naturalcycles/nodejs-lib/zip';
|
|
4
|
+
import { deflateString, inflateToString, zstdCompress, zstdDecompressToString, } from '@naturalcycles/nodejs-lib/zip';
|
|
5
5
|
export const commonKeyValueDaoDeflatedJsonTransformer = {
|
|
6
6
|
valueToBuffer: async (v) => await deflateString(JSON.stringify(v)),
|
|
7
7
|
bufferToValue: async (buf) => JSON.parse(await inflateToString(buf)),
|
|
8
8
|
};
|
|
9
|
+
export const commonKeyValueDaoZstdJsonTransformer = {
|
|
10
|
+
valueToBuffer: async (v) => await zstdCompress(JSON.stringify(v)),
|
|
11
|
+
bufferToValue: async (buf) => JSON.parse(await zstdDecompressToString(buf)),
|
|
12
|
+
};
|
|
9
13
|
// todo: logging
|
|
10
14
|
// todo: readonly
|
|
11
15
|
export class CommonKeyValueDao {
|
|
@@ -60,9 +60,10 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
|
|
|
60
60
|
*/
|
|
61
61
|
protectFromOverwrite?: boolean;
|
|
62
62
|
/**
|
|
63
|
+
* Compress as .zst
|
|
63
64
|
* @default true
|
|
64
65
|
*/
|
|
65
|
-
|
|
66
|
+
zst?: boolean;
|
|
66
67
|
/**
|
|
67
68
|
* Only applicable if `gzip` is enabled
|
|
68
69
|
* Currently not available.
|
|
@@ -16,8 +16,7 @@ import { DBQuery } from '../query/dbQuery.js';
|
|
|
16
16
|
* Optionally you can provide mapperPerTable and @param transformMapOptions (one for all mappers) - it will run for each table.
|
|
17
17
|
*/
|
|
18
18
|
export async function dbPipelineBackup(opt) {
|
|
19
|
-
const { db, concurrency = 16, limit = 0, outputDirPath, protectFromOverwrite = false, mapperPerTable = {}, queryPerTable = {}, logEveryPerTable = {}, transformMapOptions, errorMode = ErrorMode.SUPPRESS, emitSchemaFromDB = false, } = opt;
|
|
20
|
-
const gzip = opt.gzip !== false; // default to true
|
|
19
|
+
const { db, concurrency = 16, limit = 0, outputDirPath, protectFromOverwrite = false, mapperPerTable = {}, queryPerTable = {}, logEveryPerTable = {}, transformMapOptions, errorMode = ErrorMode.SUPPRESS, emitSchemaFromDB = false, zst = true, } = opt;
|
|
21
20
|
let { tables } = opt;
|
|
22
21
|
console.log(`>> ${dimWhite('dbPipelineBackup')} started in ${grey(outputDirPath)}...`);
|
|
23
22
|
fs2.ensureDir(outputDirPath);
|
|
@@ -41,7 +40,7 @@ export async function dbPipelineBackup(opt) {
|
|
|
41
40
|
: '';
|
|
42
41
|
console.log(`>> ${grey(table)}${sinceUpdatedStr}`);
|
|
43
42
|
}
|
|
44
|
-
const filePath = `${outputDirPath}/${table}.ndjson` + (
|
|
43
|
+
const filePath = `${outputDirPath}/${table}.ndjson` + (zst ? '.zst' : '');
|
|
45
44
|
const schemaFilePath = `${outputDirPath}/${table}.schema.json`;
|
|
46
45
|
if (protectFromOverwrite && fs2.pathExists(filePath)) {
|
|
47
46
|
throw new AppError(`dbPipelineBackup: output file exists: ${filePath}`);
|
|
@@ -9,7 +9,7 @@ export interface DBPipelineRestoreOptions extends TransformLogProgressOptions {
|
|
|
9
9
|
*/
|
|
10
10
|
db: CommonDB;
|
|
11
11
|
/**
|
|
12
|
-
* Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.
|
|
12
|
+
* Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.zst if zst=true) files.
|
|
13
13
|
* All parent directories will be created.
|
|
14
14
|
*/
|
|
15
15
|
inputDirPath: string;
|
|
@@ -20,19 +20,19 @@ export async function dbPipelineRestore(opt) {
|
|
|
20
20
|
const sinceUpdatedStr = sinceUpdated ? ' since ' + grey(localTime(sinceUpdated).toPretty()) : '';
|
|
21
21
|
console.log(`>> ${dimWhite('dbPipelineRestore')} started in ${grey(inputDirPath)}...${sinceUpdatedStr}`);
|
|
22
22
|
fs2.ensureDir(inputDirPath);
|
|
23
|
-
const
|
|
23
|
+
const tablesToCompress = new Set();
|
|
24
24
|
const sizeByTable = {};
|
|
25
25
|
const statsPerTable = {};
|
|
26
26
|
const tables = [];
|
|
27
27
|
fs2.readdir(inputDirPath).forEach(f => {
|
|
28
28
|
let table;
|
|
29
|
-
let
|
|
29
|
+
let zst = false;
|
|
30
30
|
if (f.endsWith('.ndjson')) {
|
|
31
31
|
table = f.slice(0, f.length - '.ndjson'.length);
|
|
32
32
|
}
|
|
33
|
-
else if (f.endsWith('.ndjson.
|
|
34
|
-
table = f.slice(0, f.length - '.ndjson.
|
|
35
|
-
|
|
33
|
+
else if (f.endsWith('.ndjson.zst')) {
|
|
34
|
+
table = f.slice(0, f.length - '.ndjson.zst'.length);
|
|
35
|
+
zst = true;
|
|
36
36
|
}
|
|
37
37
|
else {
|
|
38
38
|
return;
|
|
@@ -40,8 +40,8 @@ export async function dbPipelineRestore(opt) {
|
|
|
40
40
|
if (onlyTables && !onlyTables.has(table))
|
|
41
41
|
return; // skip table
|
|
42
42
|
tables.push(table);
|
|
43
|
-
if (
|
|
44
|
-
|
|
43
|
+
if (zst)
|
|
44
|
+
tablesToCompress.add(table);
|
|
45
45
|
sizeByTable[table] = fs2.stat(`${inputDirPath}/${f}`).size;
|
|
46
46
|
});
|
|
47
47
|
const sizeStrByTable = _mapValues(sizeByTable, (_k, b) => _hb(b));
|
|
@@ -59,8 +59,8 @@ export async function dbPipelineRestore(opt) {
|
|
|
59
59
|
});
|
|
60
60
|
}
|
|
61
61
|
await pMap(tables, async (table) => {
|
|
62
|
-
const
|
|
63
|
-
const filePath = `${inputDirPath}/${table}.ndjson` + (
|
|
62
|
+
const zst = tablesToCompress.has(table);
|
|
63
|
+
const filePath = `${inputDirPath}/${table}.ndjson` + (zst ? '.zst' : '');
|
|
64
64
|
const saveOptions = saveOptionsPerTable[table] || {};
|
|
65
65
|
const started = Date.now();
|
|
66
66
|
let rows = 0;
|
package/package.json
CHANGED
|
@@ -64,8 +64,7 @@ export interface CacheDBOptions extends CommonDBOptions {
|
|
|
64
64
|
}
|
|
65
65
|
|
|
66
66
|
export interface CacheDBSaveOptions<ROW extends ObjectWithId>
|
|
67
|
-
extends CacheDBOptions,
|
|
68
|
-
CommonDBSaveOptions<ROW> {}
|
|
67
|
+
extends CacheDBOptions, CommonDBSaveOptions<ROW> {}
|
|
69
68
|
|
|
70
69
|
export interface CacheDBStreamOptions extends CacheDBOptions, CommonDBStreamOptions {}
|
|
71
70
|
|
|
@@ -14,7 +14,7 @@ export interface LocalFilePersistencePluginCfg {
|
|
|
14
14
|
/**
|
|
15
15
|
* @default true
|
|
16
16
|
*/
|
|
17
|
-
|
|
17
|
+
zst: boolean
|
|
18
18
|
}
|
|
19
19
|
|
|
20
20
|
/**
|
|
@@ -24,7 +24,7 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin {
|
|
|
24
24
|
constructor(cfg: Partial<LocalFilePersistencePluginCfg> = {}) {
|
|
25
25
|
this.cfg = {
|
|
26
26
|
storagePath: './tmp/localdb',
|
|
27
|
-
|
|
27
|
+
zst: true,
|
|
28
28
|
...cfg,
|
|
29
29
|
}
|
|
30
30
|
}
|
|
@@ -41,7 +41,7 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin {
|
|
|
41
41
|
|
|
42
42
|
async loadFile<ROW extends ObjectWithId>(table: string): Promise<ROW[]> {
|
|
43
43
|
await fs2.ensureDirAsync(this.cfg.storagePath)
|
|
44
|
-
const ext = `ndjson${this.cfg.
|
|
44
|
+
const ext = `ndjson${this.cfg.zst ? '.zst' : ''}`
|
|
45
45
|
const filePath = `${this.cfg.storagePath}/${table}.${ext}`
|
|
46
46
|
|
|
47
47
|
if (!(await fs2.pathExistsAsync(filePath))) return []
|
|
@@ -55,7 +55,7 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin {
|
|
|
55
55
|
|
|
56
56
|
async saveFile<ROW extends ObjectWithId>(table: string, rows: ROW[]): Promise<void> {
|
|
57
57
|
await fs2.ensureDirAsync(this.cfg.storagePath)
|
|
58
|
-
const ext = `ndjson${this.cfg.
|
|
58
|
+
const ext = `ndjson${this.cfg.zst ? '.zst' : ''}`
|
|
59
59
|
const filePath = `${this.cfg.storagePath}/${table}.${ext}`
|
|
60
60
|
|
|
61
61
|
await Pipeline.fromArray(rows).toNDJsonFile(filePath)
|
|
@@ -282,8 +282,10 @@ export interface CommonDaoReadOptions extends CommonDaoOptions {
|
|
|
282
282
|
readAt?: UnixTimestamp
|
|
283
283
|
}
|
|
284
284
|
|
|
285
|
-
export interface CommonDaoSaveOptions<
|
|
286
|
-
extends
|
|
285
|
+
export interface CommonDaoSaveOptions<
|
|
286
|
+
BM extends BaseDBEntity,
|
|
287
|
+
DBM extends BaseDBEntity,
|
|
288
|
+
> extends CommonDaoSaveBatchOptions<DBM> {
|
|
287
289
|
/**
|
|
288
290
|
* If provided - a check will be made.
|
|
289
291
|
* If the object for saving equals to the object passed to `skipIfEquals` - save operation will be skipped.
|
|
@@ -295,8 +297,9 @@ export interface CommonDaoSaveOptions<BM extends BaseDBEntity, DBM extends BaseD
|
|
|
295
297
|
skipIfEquals?: BM
|
|
296
298
|
}
|
|
297
299
|
|
|
298
|
-
export interface CommonDaoPatchByIdOptions<
|
|
299
|
-
extends
|
|
300
|
+
export interface CommonDaoPatchByIdOptions<
|
|
301
|
+
DBM extends BaseDBEntity,
|
|
302
|
+
> extends CommonDaoSaveBatchOptions<DBM> {
|
|
300
303
|
/**
|
|
301
304
|
* Defaults to false.
|
|
302
305
|
* With false, if the row doesn't exist - it will throw an error.
|
|
@@ -307,8 +310,9 @@ export interface CommonDaoPatchByIdOptions<DBM extends BaseDBEntity>
|
|
|
307
310
|
createIfMissing?: boolean
|
|
308
311
|
}
|
|
309
312
|
|
|
310
|
-
export interface CommonDaoPatchOptions<
|
|
311
|
-
extends
|
|
313
|
+
export interface CommonDaoPatchOptions<
|
|
314
|
+
DBM extends BaseDBEntity,
|
|
315
|
+
> extends CommonDaoSaveBatchOptions<DBM> {
|
|
312
316
|
/**
|
|
313
317
|
* If true - patch will skip loading from DB, and will just optimistically patch passed object.
|
|
314
318
|
*
|
|
@@ -321,8 +325,7 @@ export interface CommonDaoPatchOptions<DBM extends BaseDBEntity>
|
|
|
321
325
|
* All properties default to undefined.
|
|
322
326
|
*/
|
|
323
327
|
export interface CommonDaoSaveBatchOptions<DBM extends BaseDBEntity>
|
|
324
|
-
extends CommonDaoOptions,
|
|
325
|
-
CommonDBSaveOptions<DBM> {
|
|
328
|
+
extends CommonDaoOptions, CommonDBSaveOptions<DBM> {
|
|
326
329
|
/**
|
|
327
330
|
* @default false
|
|
328
331
|
*
|
|
@@ -335,16 +338,15 @@ export interface CommonDaoSaveBatchOptions<DBM extends BaseDBEntity>
|
|
|
335
338
|
// ensureUniqueId?: boolean // feature is currently disabled
|
|
336
339
|
}
|
|
337
340
|
|
|
338
|
-
export interface CommonDaoStreamDeleteOptions<
|
|
339
|
-
extends
|
|
341
|
+
export interface CommonDaoStreamDeleteOptions<
|
|
342
|
+
DBM extends BaseDBEntity,
|
|
343
|
+
> extends CommonDaoStreamOptions<DBM> {}
|
|
340
344
|
|
|
341
345
|
export interface CommonDaoStreamSaveOptions<DBM extends BaseDBEntity>
|
|
342
|
-
extends CommonDaoSaveBatchOptions<DBM>,
|
|
343
|
-
CommonDaoStreamOptions<DBM> {}
|
|
346
|
+
extends CommonDaoSaveBatchOptions<DBM>, CommonDaoStreamOptions<DBM> {}
|
|
344
347
|
|
|
345
348
|
export interface CommonDaoStreamOptions<IN>
|
|
346
|
-
extends CommonDaoReadOptions,
|
|
347
|
-
TransformLogProgressOptions<IN> {
|
|
349
|
+
extends CommonDaoReadOptions, TransformLogProgressOptions<IN> {
|
|
348
350
|
/**
|
|
349
351
|
* @default true (for streams)
|
|
350
352
|
*/
|
|
@@ -3,7 +3,12 @@ import type { CommonLogger } from '@naturalcycles/js-lib/log'
|
|
|
3
3
|
import { pMap } from '@naturalcycles/js-lib/promise/pMap.js'
|
|
4
4
|
import { type KeyValueTuple, SKIP } from '@naturalcycles/js-lib/types'
|
|
5
5
|
import type { Pipeline } from '@naturalcycles/nodejs-lib/stream'
|
|
6
|
-
import {
|
|
6
|
+
import {
|
|
7
|
+
deflateString,
|
|
8
|
+
inflateToString,
|
|
9
|
+
zstdCompress,
|
|
10
|
+
zstdDecompressToString,
|
|
11
|
+
} from '@naturalcycles/nodejs-lib/zip'
|
|
7
12
|
import type { CommonDaoLogLevel } from '../commondao/common.dao.model.js'
|
|
8
13
|
import type { CommonDBCreateOptions } from '../db.model.js'
|
|
9
14
|
import type {
|
|
@@ -54,6 +59,11 @@ export const commonKeyValueDaoDeflatedJsonTransformer: CommonKeyValueDaoTransfor
|
|
|
54
59
|
bufferToValue: async buf => JSON.parse(await inflateToString(buf)),
|
|
55
60
|
}
|
|
56
61
|
|
|
62
|
+
export const commonKeyValueDaoZstdJsonTransformer: CommonKeyValueDaoTransformer<any> = {
|
|
63
|
+
valueToBuffer: async v => await zstdCompress(JSON.stringify(v)),
|
|
64
|
+
bufferToValue: async buf => JSON.parse(await zstdDecompressToString(buf)),
|
|
65
|
+
}
|
|
66
|
+
|
|
57
67
|
// todo: logging
|
|
58
68
|
// todo: readonly
|
|
59
69
|
|
|
@@ -79,9 +79,10 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions {
|
|
|
79
79
|
protectFromOverwrite?: boolean
|
|
80
80
|
|
|
81
81
|
/**
|
|
82
|
+
* Compress as .zst
|
|
82
83
|
* @default true
|
|
83
84
|
*/
|
|
84
|
-
|
|
85
|
+
zst?: boolean
|
|
85
86
|
|
|
86
87
|
/**
|
|
87
88
|
* Only applicable if `gzip` is enabled
|
|
@@ -147,8 +148,8 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
|
|
|
147
148
|
transformMapOptions,
|
|
148
149
|
errorMode = ErrorMode.SUPPRESS,
|
|
149
150
|
emitSchemaFromDB = false,
|
|
151
|
+
zst = true,
|
|
150
152
|
} = opt
|
|
151
|
-
const gzip = opt.gzip !== false // default to true
|
|
152
153
|
|
|
153
154
|
let { tables } = opt
|
|
154
155
|
|
|
@@ -184,7 +185,7 @@ export async function dbPipelineBackup(opt: DBPipelineBackupOptions): Promise<ND
|
|
|
184
185
|
console.log(`>> ${grey(table)}${sinceUpdatedStr}`)
|
|
185
186
|
}
|
|
186
187
|
|
|
187
|
-
const filePath = `${outputDirPath}/${table}.ndjson` + (
|
|
188
|
+
const filePath = `${outputDirPath}/${table}.ndjson` + (zst ? '.zst' : '')
|
|
188
189
|
const schemaFilePath = `${outputDirPath}/${table}.schema.json`
|
|
189
190
|
|
|
190
191
|
if (protectFromOverwrite && fs2.pathExists(filePath)) {
|
|
@@ -24,7 +24,7 @@ export interface DBPipelineRestoreOptions extends TransformLogProgressOptions {
|
|
|
24
24
|
db: CommonDB
|
|
25
25
|
|
|
26
26
|
/**
|
|
27
|
-
* Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.
|
|
27
|
+
* Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.zst if zst=true) files.
|
|
28
28
|
* All parent directories will be created.
|
|
29
29
|
*/
|
|
30
30
|
inputDirPath: string
|
|
@@ -128,19 +128,19 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise<
|
|
|
128
128
|
|
|
129
129
|
fs2.ensureDir(inputDirPath)
|
|
130
130
|
|
|
131
|
-
const
|
|
131
|
+
const tablesToCompress = new Set<string>()
|
|
132
132
|
const sizeByTable: Record<string, number> = {}
|
|
133
133
|
const statsPerTable: Record<string, NDJsonStats> = {}
|
|
134
134
|
const tables: string[] = []
|
|
135
135
|
fs2.readdir(inputDirPath).forEach(f => {
|
|
136
136
|
let table: string
|
|
137
|
-
let
|
|
137
|
+
let zst = false
|
|
138
138
|
|
|
139
139
|
if (f.endsWith('.ndjson')) {
|
|
140
140
|
table = f.slice(0, f.length - '.ndjson'.length)
|
|
141
|
-
} else if (f.endsWith('.ndjson.
|
|
142
|
-
table = f.slice(0, f.length - '.ndjson.
|
|
143
|
-
|
|
141
|
+
} else if (f.endsWith('.ndjson.zst')) {
|
|
142
|
+
table = f.slice(0, f.length - '.ndjson.zst'.length)
|
|
143
|
+
zst = true
|
|
144
144
|
} else {
|
|
145
145
|
return
|
|
146
146
|
}
|
|
@@ -148,7 +148,7 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise<
|
|
|
148
148
|
if (onlyTables && !onlyTables.has(table)) return // skip table
|
|
149
149
|
|
|
150
150
|
tables.push(table)
|
|
151
|
-
if (
|
|
151
|
+
if (zst) tablesToCompress.add(table)
|
|
152
152
|
sizeByTable[table] = fs2.stat(`${inputDirPath}/${f}`).size
|
|
153
153
|
})
|
|
154
154
|
|
|
@@ -174,8 +174,8 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise<
|
|
|
174
174
|
await pMap(
|
|
175
175
|
tables,
|
|
176
176
|
async table => {
|
|
177
|
-
const
|
|
178
|
-
const filePath = `${inputDirPath}/${table}.ndjson` + (
|
|
177
|
+
const zst = tablesToCompress.has(table)
|
|
178
|
+
const filePath = `${inputDirPath}/${table}.ndjson` + (zst ? '.zst' : '')
|
|
179
179
|
const saveOptions: CommonDBSaveOptions<any> = saveOptionsPerTable[table] || {}
|
|
180
180
|
|
|
181
181
|
const started = Date.now()
|