@naturalcycles/nodejs-lib 15.1.0 → 15.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/slack-this.js +1 -1
- package/dist/{util/exec2.js → exec2.js} +1 -1
- package/dist/fs/fs2.d.ts +0 -10
- package/dist/fs/fs2.js +1 -85
- package/dist/fs/yaml2.d.ts +11 -0
- package/dist/fs/yaml2.js +29 -0
- package/dist/index.d.ts +0 -4
- package/dist/index.js +0 -4
- package/dist/script/runScript.d.ts +0 -1
- package/dist/script/runScript.js +4 -4
- package/dist/stream/index.d.ts +2 -0
- package/dist/stream/index.js +2 -0
- package/dist/stream/ndjson/createReadStreamAsNDJSON.d.ts +19 -0
- package/dist/stream/ndjson/createReadStreamAsNDJSON.js +38 -0
- package/dist/stream/ndjson/createWriteStreamAsNDJSON.d.ts +11 -0
- package/dist/stream/ndjson/createWriteStreamAsNDJSON.js +27 -0
- package/dist/stream/ndjson/ndjsonMap.d.ts +1 -1
- package/dist/stream/ndjson/ndjsonMap.js +3 -5
- package/dist/stream/ndjson/ndjsonStreamForEach.d.ts +1 -1
- package/dist/stream/ndjson/ndjsonStreamForEach.js +2 -2
- package/dist/stream/sizeStack.js +1 -1
- package/dist/util/env.util.d.ts +0 -1
- package/dist/util/env.util.js +0 -1
- package/dist/util/git2.js +2 -3
- package/dist/{util → zip}/zip.util.js +0 -1
- package/package.json +12 -5
- package/src/bin/slack-this.ts +1 -1
- package/src/{util/exec2.ts → exec2.ts} +1 -1
- package/src/fs/fs2.ts +1 -101
- package/src/fs/yaml2.ts +37 -0
- package/src/index.ts +0 -4
- package/src/script/runScript.ts +6 -4
- package/src/stream/index.ts +2 -0
- package/src/stream/ndjson/createReadStreamAsNDJSON.ts +46 -0
- package/src/stream/ndjson/createWriteStreamAsNDJSON.ts +30 -0
- package/src/stream/ndjson/ndjsonMap.ts +10 -6
- package/src/stream/ndjson/ndjsonStreamForEach.ts +6 -3
- package/src/stream/sizeStack.ts +1 -1
- package/src/util/env.util.ts +0 -1
- package/src/util/git2.ts +2 -3
- package/src/{util → zip}/zip.util.ts +0 -2
- package/dist/fs/index.d.ts +0 -3
- package/dist/fs/index.js +0 -3
- package/dist/jwt/index.d.ts +0 -1
- package/dist/jwt/index.js +0 -1
- package/dist/yargs/index.d.ts +0 -1
- package/dist/yargs/index.js +0 -1
- package/src/fs/index.ts +0 -3
- package/src/jwt/index.ts +0 -1
- package/src/yargs/index.ts +0 -1
- /package/dist/{util → cache}/lruMemoCache.d.ts +0 -0
- /package/dist/{util → cache}/lruMemoCache.js +0 -0
- /package/dist/{util/exec2.d.ts → exec2.d.ts} +0 -0
- /package/dist/{util → zip}/zip.util.d.ts +0 -0
- /package/src/{util → cache}/lruMemoCache.ts +0 -0
package/dist/bin/slack-this.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { SlackService } from '../index.js';
|
|
3
3
|
import { runScript } from '../script/runScript.js';
|
|
4
|
-
import { _yargs } from '../yargs/
|
|
4
|
+
import { _yargs } from '../yargs/yargs.util.js';
|
|
5
5
|
runScript(async () => {
|
|
6
6
|
const { channel, msg, username, emoji, webhook: webhookUrl, } = _yargs().options({
|
|
7
7
|
channel: {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { execSync, spawn, spawnSync } from 'node:child_process';
|
|
2
2
|
import { _substringAfterLast, } from '@naturalcycles/js-lib';
|
|
3
3
|
import { _since, AppError } from '@naturalcycles/js-lib';
|
|
4
|
-
import { dimGrey, dimRed, hasColors, white } from '
|
|
4
|
+
import { dimGrey, dimRed, hasColors, white } from './colors/colors.js';
|
|
5
5
|
/**
|
|
6
6
|
* Set of utility functions to work with Spawn / Exec.
|
|
7
7
|
*
|
package/dist/fs/fs2.d.ts
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
import type { RmOptions } from 'node:fs';
|
|
2
2
|
import fs from 'node:fs';
|
|
3
|
-
import type { DumpOptions } from 'js-yaml';
|
|
4
|
-
import type { ReadableTyped, TransformTyped } from '../stream/stream.model.js';
|
|
5
3
|
/**
|
|
6
4
|
* fs2 conveniently groups filesystem functions together.
|
|
7
5
|
* Supposed to be almost a drop-in replacement for these things together:
|
|
@@ -23,20 +21,14 @@ declare class FS2 {
|
|
|
23
21
|
readBufferAsync(filePath: string): Promise<Buffer>;
|
|
24
22
|
readJson<T = unknown>(filePath: string): T;
|
|
25
23
|
readJsonAsync<T = unknown>(filePath: string): Promise<T>;
|
|
26
|
-
readYaml<T = unknown>(filePath: string): T;
|
|
27
|
-
readYamlAsync<T = unknown>(filePath: string): Promise<T>;
|
|
28
24
|
writeFile(filePath: string, data: string | Buffer): void;
|
|
29
25
|
writeFileAsync(filePath: string, data: string | Buffer): Promise<void>;
|
|
30
26
|
writeJson(filePath: string, data: any, opt?: JsonOptions): void;
|
|
31
27
|
writeJsonAsync(filePath: string, data: any, opt?: JsonOptions): Promise<void>;
|
|
32
|
-
writeYaml(filePath: string, data: any, opt?: DumpOptions): void;
|
|
33
|
-
writeYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void>;
|
|
34
28
|
appendFile(filePath: string, data: string | Buffer): void;
|
|
35
29
|
appendFileAsync(filePath: string, data: string | Buffer): Promise<void>;
|
|
36
30
|
outputJson(filePath: string, data: any, opt?: JsonOptions): void;
|
|
37
31
|
outputJsonAsync(filePath: string, data: any, opt?: JsonOptions): Promise<void>;
|
|
38
|
-
outputYaml(filePath: string, data: any, opt?: DumpOptions): void;
|
|
39
|
-
outputYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void>;
|
|
40
32
|
outputFile(filePath: string, data: string | Buffer): void;
|
|
41
33
|
outputFileAsync(filePath: string, data: string | Buffer): Promise<void>;
|
|
42
34
|
pathExists(filePath: string): boolean;
|
|
@@ -80,8 +72,6 @@ declare class FS2 {
|
|
|
80
72
|
readdirAsync: typeof fs.promises.readdir;
|
|
81
73
|
createWriteStream: typeof fs.createWriteStream;
|
|
82
74
|
createReadStream: typeof fs.createReadStream;
|
|
83
|
-
createReadStreamAsNDJSON<ROW = any>(inputPath: string): ReadableTyped<ROW>;
|
|
84
|
-
createWriteStreamAsNDJSON(outputPath: string): TransformTyped<any, any>[];
|
|
85
75
|
}
|
|
86
76
|
export declare const fs2: FS2;
|
|
87
77
|
export interface JsonOptions {
|
package/dist/fs/fs2.js
CHANGED
|
@@ -16,11 +16,7 @@ Credit to: fs-extra (https://github.com/jprichardson/node-fs-extra)
|
|
|
16
16
|
import fs from 'node:fs';
|
|
17
17
|
import fsp from 'node:fs/promises';
|
|
18
18
|
import path from 'node:path';
|
|
19
|
-
import {
|
|
20
|
-
import { _isTruthy, _jsonParse } from '@naturalcycles/js-lib';
|
|
21
|
-
import yaml from 'js-yaml';
|
|
22
|
-
import { transformToNDJson } from '../stream/ndjson/transformToNDJson.js';
|
|
23
|
-
import { transformSplitOnNewline } from '../stream/transform/transformSplit.js';
|
|
19
|
+
import { _jsonParse } from '@naturalcycles/js-lib';
|
|
24
20
|
/**
|
|
25
21
|
* fs2 conveniently groups filesystem functions together.
|
|
26
22
|
* Supposed to be almost a drop-in replacement for these things together:
|
|
@@ -60,12 +56,6 @@ class FS2 {
|
|
|
60
56
|
// eslint-disable-next-line @typescript-eslint/return-await
|
|
61
57
|
return _jsonParse(str);
|
|
62
58
|
}
|
|
63
|
-
readYaml(filePath) {
|
|
64
|
-
return yaml.load(fs.readFileSync(filePath, 'utf8'));
|
|
65
|
-
}
|
|
66
|
-
async readYamlAsync(filePath) {
|
|
67
|
-
return yaml.load(await fsp.readFile(filePath, 'utf8'));
|
|
68
|
-
}
|
|
69
59
|
writeFile(filePath, data) {
|
|
70
60
|
fs.writeFileSync(filePath, data);
|
|
71
61
|
}
|
|
@@ -80,14 +70,6 @@ class FS2 {
|
|
|
80
70
|
const str = stringify(data, opt);
|
|
81
71
|
await fsp.writeFile(filePath, str);
|
|
82
72
|
}
|
|
83
|
-
writeYaml(filePath, data, opt) {
|
|
84
|
-
const str = yaml.dump(data, opt);
|
|
85
|
-
fs.writeFileSync(filePath, str);
|
|
86
|
-
}
|
|
87
|
-
async writeYamlAsync(filePath, data, opt) {
|
|
88
|
-
const str = yaml.dump(data, opt);
|
|
89
|
-
await fsp.writeFile(filePath, str);
|
|
90
|
-
}
|
|
91
73
|
appendFile(filePath, data) {
|
|
92
74
|
fs.appendFileSync(filePath, data);
|
|
93
75
|
}
|
|
@@ -102,14 +84,6 @@ class FS2 {
|
|
|
102
84
|
const str = stringify(data, opt);
|
|
103
85
|
await this.outputFileAsync(filePath, str);
|
|
104
86
|
}
|
|
105
|
-
outputYaml(filePath, data, opt) {
|
|
106
|
-
const str = yaml.dump(data, opt);
|
|
107
|
-
this.outputFile(filePath, str);
|
|
108
|
-
}
|
|
109
|
-
async outputYamlAsync(filePath, data, opt) {
|
|
110
|
-
const str = yaml.dump(data, opt);
|
|
111
|
-
await this.outputFileAsync(filePath, str);
|
|
112
|
-
}
|
|
113
87
|
outputFile(filePath, data) {
|
|
114
88
|
const dirPath = path.dirname(filePath);
|
|
115
89
|
if (!fs.existsSync(dirPath)) {
|
|
@@ -289,64 +263,6 @@ class FS2 {
|
|
|
289
263
|
readdirAsync = fsp.readdir;
|
|
290
264
|
createWriteStream = fs.createWriteStream;
|
|
291
265
|
createReadStream = fs.createReadStream;
|
|
292
|
-
/*
|
|
293
|
-
Returns a Readable of [already parsed] NDJSON objects.
|
|
294
|
-
|
|
295
|
-
Replaces a list of operations:
|
|
296
|
-
- requireFileToExist(inputPath)
|
|
297
|
-
- fs.createReadStream
|
|
298
|
-
- createUnzip (only if path ends with '.gz')
|
|
299
|
-
- transformSplitOnNewline
|
|
300
|
-
- transformJsonParse
|
|
301
|
-
|
|
302
|
-
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
303
|
-
|
|
304
|
-
_pipeline([
|
|
305
|
-
fs2.createReadStreamAsNDJSON().take(100),
|
|
306
|
-
transformX(),
|
|
307
|
-
])
|
|
308
|
-
*/
|
|
309
|
-
createReadStreamAsNDJSON(inputPath) {
|
|
310
|
-
this.requireFileToExist(inputPath);
|
|
311
|
-
let stream = fs
|
|
312
|
-
.createReadStream(inputPath, {
|
|
313
|
-
highWaterMark: 64 * 1024, // no observed speedup
|
|
314
|
-
})
|
|
315
|
-
.on('error', err => stream.emit('error', err));
|
|
316
|
-
if (inputPath.endsWith('.gz')) {
|
|
317
|
-
stream = stream.pipe(createUnzip({
|
|
318
|
-
chunkSize: 64 * 1024, // speedup from ~3200 to 3800 rps!
|
|
319
|
-
}));
|
|
320
|
-
}
|
|
321
|
-
return stream.pipe(transformSplitOnNewline()).map(line => JSON.parse(line));
|
|
322
|
-
// For some crazy reason .map is much faster than transformJsonParse!
|
|
323
|
-
// ~5000 vs ~4000 rps !!!
|
|
324
|
-
// .on('error', err => stream.emit('error', err))
|
|
325
|
-
// .pipe(transformJsonParse<ROW>())
|
|
326
|
-
}
|
|
327
|
-
/*
|
|
328
|
-
Returns an array of Transforms, so that you can ...destructure them at
|
|
329
|
-
the end of the _pipeline.
|
|
330
|
-
|
|
331
|
-
Replaces a list of operations:
|
|
332
|
-
- transformToNDJson
|
|
333
|
-
- createGzip (only if path ends with '.gz')
|
|
334
|
-
- fs.createWriteStream
|
|
335
|
-
*/
|
|
336
|
-
createWriteStreamAsNDJSON(outputPath) {
|
|
337
|
-
this.ensureFile(outputPath);
|
|
338
|
-
return [
|
|
339
|
-
transformToNDJson(),
|
|
340
|
-
outputPath.endsWith('.gz')
|
|
341
|
-
? createGzip({
|
|
342
|
-
// chunkSize: 64 * 1024, // no observed speedup
|
|
343
|
-
})
|
|
344
|
-
: undefined,
|
|
345
|
-
fs.createWriteStream(outputPath, {
|
|
346
|
-
// highWaterMark: 64 * 1024, // no observed speedup
|
|
347
|
-
}),
|
|
348
|
-
].filter(_isTruthy);
|
|
349
|
-
}
|
|
350
266
|
}
|
|
351
267
|
export const fs2 = new FS2();
|
|
352
268
|
function stringify(data, opt) {
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { DumpOptions } from 'js-yaml';
|
|
2
|
+
declare class Yaml2 {
|
|
3
|
+
readYaml<T = unknown>(filePath: string): T;
|
|
4
|
+
readYamlAsync<T = unknown>(filePath: string): Promise<T>;
|
|
5
|
+
writeYaml(filePath: string, data: any, opt?: DumpOptions): void;
|
|
6
|
+
writeYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void>;
|
|
7
|
+
outputYaml(filePath: string, data: any, opt?: DumpOptions): void;
|
|
8
|
+
outputYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void>;
|
|
9
|
+
}
|
|
10
|
+
export declare const yaml2: Yaml2;
|
|
11
|
+
export {};
|
package/dist/fs/yaml2.js
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import fsp from 'node:fs/promises';
|
|
3
|
+
import yaml from 'js-yaml';
|
|
4
|
+
import { fs2 } from './fs2.js';
|
|
5
|
+
class Yaml2 {
|
|
6
|
+
readYaml(filePath) {
|
|
7
|
+
return yaml.load(fs.readFileSync(filePath, 'utf8'));
|
|
8
|
+
}
|
|
9
|
+
async readYamlAsync(filePath) {
|
|
10
|
+
return yaml.load(await fsp.readFile(filePath, 'utf8'));
|
|
11
|
+
}
|
|
12
|
+
writeYaml(filePath, data, opt) {
|
|
13
|
+
const str = yaml.dump(data, opt);
|
|
14
|
+
fs.writeFileSync(filePath, str);
|
|
15
|
+
}
|
|
16
|
+
async writeYamlAsync(filePath, data, opt) {
|
|
17
|
+
const str = yaml.dump(data, opt);
|
|
18
|
+
await fsp.writeFile(filePath, str);
|
|
19
|
+
}
|
|
20
|
+
outputYaml(filePath, data, opt) {
|
|
21
|
+
const str = yaml.dump(data, opt);
|
|
22
|
+
fs2.outputFile(filePath, str);
|
|
23
|
+
}
|
|
24
|
+
async outputYamlAsync(filePath, data, opt) {
|
|
25
|
+
const str = yaml.dump(data, opt);
|
|
26
|
+
await fs2.outputFileAsync(filePath, str);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
export const yaml2 = new Yaml2();
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
export * from './buffer/buffer.util.js';
|
|
2
|
-
export * from './colors/colors.js';
|
|
3
2
|
export * from './diff/tableDiff.js';
|
|
4
3
|
export * from './infra/process.util.js';
|
|
5
4
|
export * from './log/log.util.js';
|
|
@@ -14,7 +13,4 @@ export * from './slack/slack.service.model.js';
|
|
|
14
13
|
export * from './string/inspect.js';
|
|
15
14
|
export * from './util/buildInfo.util.js';
|
|
16
15
|
export * from './util/env.util.js';
|
|
17
|
-
export * from './util/exec2.js';
|
|
18
16
|
export * from './util/git2.js';
|
|
19
|
-
export * from './util/lruMemoCache.js';
|
|
20
|
-
export * from './util/zip.util.js';
|
package/dist/index.js
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
export * from './buffer/buffer.util.js';
|
|
2
|
-
export * from './colors/colors.js';
|
|
3
2
|
export * from './diff/tableDiff.js';
|
|
4
3
|
export * from './infra/process.util.js';
|
|
5
4
|
export * from './log/log.util.js';
|
|
@@ -14,7 +13,4 @@ export * from './slack/slack.service.model.js';
|
|
|
14
13
|
export * from './string/inspect.js';
|
|
15
14
|
export * from './util/buildInfo.util.js';
|
|
16
15
|
export * from './util/env.util.js';
|
|
17
|
-
export * from './util/exec2.js';
|
|
18
16
|
export * from './util/git2.js';
|
|
19
|
-
export * from './util/lruMemoCache.js';
|
|
20
|
-
export * from './util/zip.util.js';
|
package/dist/script/runScript.js
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
|
-
import 'dotenv/config';
|
|
2
1
|
import os from 'node:os';
|
|
3
2
|
import { pDelay, setGlobalStringifyFunction } from '@naturalcycles/js-lib';
|
|
4
|
-
import { dimGrey } from '../colors/colors.js';
|
|
5
3
|
import { inspectStringifyFn } from '../string/inspect.js';
|
|
6
4
|
const { DEBUG_RUN_SCRIPT } = process.env;
|
|
7
5
|
/**
|
|
@@ -23,7 +21,6 @@ const { DEBUG_RUN_SCRIPT } = process.env;
|
|
|
23
21
|
* Set env DEBUG_RUN_SCRIPT for extra debugging.
|
|
24
22
|
*/
|
|
25
23
|
export function runScript(fn, opt = {}) {
|
|
26
|
-
checkAndlogEnvironment();
|
|
27
24
|
setGlobalStringifyFunction(inspectStringifyFn);
|
|
28
25
|
const { logger = console, noExit, registerUncaughtExceptionHandlers = true } = opt;
|
|
29
26
|
if (registerUncaughtExceptionHandlers || DEBUG_RUN_SCRIPT) {
|
|
@@ -42,6 +39,8 @@ export function runScript(fn, opt = {}) {
|
|
|
42
39
|
const timeout = setTimeout(() => { }, 10000000);
|
|
43
40
|
void (async () => {
|
|
44
41
|
try {
|
|
42
|
+
await import('dotenv/config');
|
|
43
|
+
await checkAndlogEnvironment();
|
|
45
44
|
await fn();
|
|
46
45
|
await pDelay(); // to ensure all async operations are completed
|
|
47
46
|
if (DEBUG_RUN_SCRIPT)
|
|
@@ -62,7 +61,8 @@ export function runScript(fn, opt = {}) {
|
|
|
62
61
|
}
|
|
63
62
|
})();
|
|
64
63
|
}
|
|
65
|
-
function checkAndlogEnvironment() {
|
|
64
|
+
async function checkAndlogEnvironment() {
|
|
65
|
+
const { dimGrey } = await import('../colors/colors.js');
|
|
66
66
|
const { platform, arch, versions: { node }, env: { CPU_LIMIT, NODE_OPTIONS, TZ }, } = process;
|
|
67
67
|
const cpuLimit = Number(CPU_LIMIT) || undefined;
|
|
68
68
|
const availableParallelism = os.availableParallelism?.();
|
package/dist/stream/index.d.ts
CHANGED
package/dist/stream/index.js
CHANGED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { ReadableTyped } from '../stream.model.js';
|
|
2
|
+
/**
|
|
3
|
+
Returns a Readable of [already parsed] NDJSON objects.
|
|
4
|
+
|
|
5
|
+
Replaces a list of operations:
|
|
6
|
+
- requireFileToExist(inputPath)
|
|
7
|
+
- fs.createReadStream
|
|
8
|
+
- createUnzip (only if path ends with '.gz')
|
|
9
|
+
- transformSplitOnNewline
|
|
10
|
+
- transformJsonParse
|
|
11
|
+
|
|
12
|
+
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
13
|
+
|
|
14
|
+
_pipeline([
|
|
15
|
+
fs2.createReadStreamAsNDJSON().take(100),
|
|
16
|
+
transformX(),
|
|
17
|
+
])
|
|
18
|
+
*/
|
|
19
|
+
export declare function createReadStreamAsNDJSON<ROW = any>(inputPath: string): ReadableTyped<ROW>;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { createUnzip } from 'node:zlib';
|
|
2
|
+
import { fs2 } from '../../fs/fs2.js';
|
|
3
|
+
import { transformSplitOnNewline } from '../transform/transformSplit.js';
|
|
4
|
+
/**
|
|
5
|
+
Returns a Readable of [already parsed] NDJSON objects.
|
|
6
|
+
|
|
7
|
+
Replaces a list of operations:
|
|
8
|
+
- requireFileToExist(inputPath)
|
|
9
|
+
- fs.createReadStream
|
|
10
|
+
- createUnzip (only if path ends with '.gz')
|
|
11
|
+
- transformSplitOnNewline
|
|
12
|
+
- transformJsonParse
|
|
13
|
+
|
|
14
|
+
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
15
|
+
|
|
16
|
+
_pipeline([
|
|
17
|
+
fs2.createReadStreamAsNDJSON().take(100),
|
|
18
|
+
transformX(),
|
|
19
|
+
])
|
|
20
|
+
*/
|
|
21
|
+
export function createReadStreamAsNDJSON(inputPath) {
|
|
22
|
+
fs2.requireFileToExist(inputPath);
|
|
23
|
+
let stream = fs2
|
|
24
|
+
.createReadStream(inputPath, {
|
|
25
|
+
highWaterMark: 64 * 1024, // no observed speedup
|
|
26
|
+
})
|
|
27
|
+
.on('error', err => stream.emit('error', err));
|
|
28
|
+
if (inputPath.endsWith('.gz')) {
|
|
29
|
+
stream = stream.pipe(createUnzip({
|
|
30
|
+
chunkSize: 64 * 1024, // speedup from ~3200 to 3800 rps!
|
|
31
|
+
}));
|
|
32
|
+
}
|
|
33
|
+
return stream.pipe(transformSplitOnNewline()).map(line => JSON.parse(line));
|
|
34
|
+
// For some crazy reason .map is much faster than transformJsonParse!
|
|
35
|
+
// ~5000 vs ~4000 rps !!!
|
|
36
|
+
// .on('error', err => stream.emit('error', err))
|
|
37
|
+
// .pipe(transformJsonParse<ROW>())
|
|
38
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { TransformTyped } from '../stream.model.js';
|
|
2
|
+
/**
|
|
3
|
+
Returns an array of Transforms, so that you can ...destructure them at
|
|
4
|
+
the end of the _pipeline.
|
|
5
|
+
|
|
6
|
+
Replaces a list of operations:
|
|
7
|
+
- transformToNDJson
|
|
8
|
+
- createGzip (only if path ends with '.gz')
|
|
9
|
+
- fs.createWriteStream
|
|
10
|
+
*/
|
|
11
|
+
export declare function createWriteStreamAsNDJSON(outputPath: string): TransformTyped<any, any>[];
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { createGzip } from 'node:zlib';
|
|
2
|
+
import { _isTruthy } from '@naturalcycles/js-lib';
|
|
3
|
+
import { fs2 } from '../../fs/fs2.js';
|
|
4
|
+
import { transformToNDJson } from './transformToNDJson.js';
|
|
5
|
+
/**
|
|
6
|
+
Returns an array of Transforms, so that you can ...destructure them at
|
|
7
|
+
the end of the _pipeline.
|
|
8
|
+
|
|
9
|
+
Replaces a list of operations:
|
|
10
|
+
- transformToNDJson
|
|
11
|
+
- createGzip (only if path ends with '.gz')
|
|
12
|
+
- fs.createWriteStream
|
|
13
|
+
*/
|
|
14
|
+
export function createWriteStreamAsNDJSON(outputPath) {
|
|
15
|
+
fs2.ensureFile(outputPath);
|
|
16
|
+
return [
|
|
17
|
+
transformToNDJson(),
|
|
18
|
+
outputPath.endsWith('.gz')
|
|
19
|
+
? createGzip({
|
|
20
|
+
// chunkSize: 64 * 1024, // no observed speedup
|
|
21
|
+
})
|
|
22
|
+
: undefined,
|
|
23
|
+
fs2.createWriteStream(outputPath, {
|
|
24
|
+
// highWaterMark: 64 * 1024, // no observed speedup
|
|
25
|
+
}),
|
|
26
|
+
].filter(_isTruthy);
|
|
27
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { AbortableAsyncMapper } from '@naturalcycles/js-lib';
|
|
2
|
-
import type
|
|
2
|
+
import { type TransformLogProgressOptions, type TransformMapOptions } from '../index.js';
|
|
3
3
|
export interface NDJSONMapOptions<IN = any, OUT = IN> extends TransformMapOptions<IN, OUT>, TransformLogProgressOptions<IN> {
|
|
4
4
|
inputFilePath: string;
|
|
5
5
|
outputFilePath: string;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ErrorMode } from '@naturalcycles/js-lib';
|
|
2
|
-
import {
|
|
2
|
+
import { createReadStreamAsNDJSON, createWriteStreamAsNDJSON, } from '../index.js';
|
|
3
3
|
import { _pipeline, transformLimit, transformLogProgress, transformMap } from '../index.js';
|
|
4
4
|
/**
|
|
5
5
|
* Unzips input file automatically, if it ends with `.gz`.
|
|
@@ -11,9 +11,7 @@ export async function ndjsonMap(mapper, opt) {
|
|
|
11
11
|
inputFilePath,
|
|
12
12
|
outputFilePath,
|
|
13
13
|
});
|
|
14
|
-
const readable =
|
|
15
|
-
.createReadStreamAsNDJSON(inputFilePath)
|
|
16
|
-
.take(limitInput || Number.POSITIVE_INFINITY);
|
|
14
|
+
const readable = createReadStreamAsNDJSON(inputFilePath).take(limitInput || Number.POSITIVE_INFINITY);
|
|
17
15
|
await _pipeline([
|
|
18
16
|
readable,
|
|
19
17
|
transformLogProgress({ metric: 'read', ...opt }),
|
|
@@ -24,6 +22,6 @@ export async function ndjsonMap(mapper, opt) {
|
|
|
24
22
|
}),
|
|
25
23
|
transformLimit({ limit: limitOutput, sourceReadable: readable }),
|
|
26
24
|
transformLogProgress({ metric: 'saved', logEvery: logEveryOutput }),
|
|
27
|
-
...
|
|
25
|
+
...createWriteStreamAsNDJSON(outputFilePath),
|
|
28
26
|
]);
|
|
29
27
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { AbortableAsyncMapper } from '@naturalcycles/js-lib';
|
|
2
|
-
import type
|
|
2
|
+
import { type TransformLogProgressOptions, type TransformMapOptions } from '../index.js';
|
|
3
3
|
export interface NDJSONStreamForEachOptions<IN = any> extends TransformMapOptions<IN, void>, TransformLogProgressOptions<IN> {
|
|
4
4
|
inputFilePath: string;
|
|
5
5
|
}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { ErrorMode } from '@naturalcycles/js-lib';
|
|
2
|
-
import {
|
|
2
|
+
import { createReadStreamAsNDJSON, } from '../index.js';
|
|
3
3
|
import { _pipeline, transformLogProgress, transformMap, writableVoid } from '../index.js';
|
|
4
4
|
/**
|
|
5
5
|
* Convenience function to `forEach` through an ndjson file.
|
|
6
6
|
*/
|
|
7
7
|
export async function ndjsonStreamForEach(mapper, opt) {
|
|
8
8
|
await _pipeline([
|
|
9
|
-
|
|
9
|
+
createReadStreamAsNDJSON(opt.inputFilePath),
|
|
10
10
|
transformMap(mapper, {
|
|
11
11
|
errorMode: ErrorMode.THROW_AGGREGATED,
|
|
12
12
|
...opt,
|
package/dist/stream/sizeStack.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { _hb, NumberStack } from '@naturalcycles/js-lib';
|
|
2
2
|
import { yellow } from '../colors/colors.js';
|
|
3
|
-
import { gzipBuffer } from '../
|
|
3
|
+
import { gzipBuffer } from '../zip/zip.util.js';
|
|
4
4
|
export class SizeStack extends NumberStack {
|
|
5
5
|
name;
|
|
6
6
|
constructor(name, size) {
|
package/dist/util/env.util.d.ts
CHANGED
package/dist/util/env.util.js
CHANGED
package/dist/util/git2.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { execSync } from 'node:child_process';
|
|
2
2
|
import { basename } from 'node:path';
|
|
3
|
-
import {
|
|
4
|
-
import { exec2 } from './exec2.js';
|
|
3
|
+
import { exec2 } from '../exec2.js';
|
|
5
4
|
/**
|
|
6
5
|
* Set of utility functions to work with git.
|
|
7
6
|
*/
|
|
@@ -34,7 +33,7 @@ class Git2 {
|
|
|
34
33
|
const cmd = `git commit -a --no-verify -m "${msg}"`;
|
|
35
34
|
// const cmd = `git`
|
|
36
35
|
// const args = ['commit', '-a', '--no-verify', '-m', msg]
|
|
37
|
-
console.log(
|
|
36
|
+
console.log(cmd);
|
|
38
37
|
try {
|
|
39
38
|
execSync(cmd, {
|
|
40
39
|
stdio: 'inherit',
|
|
@@ -4,7 +4,6 @@ const deflate = promisify(zlib.deflate.bind(zlib));
|
|
|
4
4
|
const inflate = promisify(zlib.inflate.bind(zlib));
|
|
5
5
|
const gzip = promisify(zlib.gzip.bind(zlib));
|
|
6
6
|
const gunzip = promisify(zlib.gunzip.bind(zlib));
|
|
7
|
-
// string > compressed buffer
|
|
8
7
|
/**
|
|
9
8
|
* deflateBuffer uses `deflate`.
|
|
10
9
|
* It's 9 bytes shorter than `gzip`.
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@naturalcycles/nodejs-lib",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "15.
|
|
4
|
+
"version": "15.2.0",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@naturalcycles/js-lib": "^15",
|
|
7
7
|
"@types/js-yaml": "^4",
|
|
@@ -27,14 +27,21 @@
|
|
|
27
27
|
},
|
|
28
28
|
"exports": {
|
|
29
29
|
".": "./dist/index.js",
|
|
30
|
+
"./lruMemoCache": "./dist/cache/lruMemoCache.js",
|
|
31
|
+
"./colors": "./dist/colors/colors.js",
|
|
30
32
|
"./csv": "./dist/csv/index.js",
|
|
31
|
-
"./
|
|
33
|
+
"./exec2": "./dist/exec2.js",
|
|
34
|
+
"./fs2": "./dist/fs/fs2.js",
|
|
35
|
+
"./env": "./dist/fs/json2env.js",
|
|
36
|
+
"./kpy": "./dist/fs/kpy.js",
|
|
37
|
+
"./yaml2": "./dist/fs/yaml2.js",
|
|
32
38
|
"./glob": "./dist/glob/index.js",
|
|
33
|
-
"./jwt": "./dist/jwt/
|
|
39
|
+
"./jwt": "./dist/jwt/jwt.service.js",
|
|
34
40
|
"./stream": "./dist/stream/index.js",
|
|
35
|
-
"./yargs": "./dist/yargs/
|
|
41
|
+
"./yargs": "./dist/yargs/yargs.util.js",
|
|
36
42
|
"./ajv": "./dist/validation/ajv/index.js",
|
|
37
|
-
"./joi": "./dist/validation/joi/index.js"
|
|
43
|
+
"./joi": "./dist/validation/joi/index.js",
|
|
44
|
+
"./zip": "./dist/zip/zip.util.js"
|
|
38
45
|
},
|
|
39
46
|
"bin": {
|
|
40
47
|
"kpy": "dist/bin/kpy.js",
|
package/src/bin/slack-this.ts
CHANGED
|
@@ -6,7 +6,7 @@ import {
|
|
|
6
6
|
type UnixTimestampMillis,
|
|
7
7
|
} from '@naturalcycles/js-lib'
|
|
8
8
|
import { _since, AppError } from '@naturalcycles/js-lib'
|
|
9
|
-
import { dimGrey, dimRed, hasColors, white } from '
|
|
9
|
+
import { dimGrey, dimRed, hasColors, white } from './colors/colors.js'
|
|
10
10
|
|
|
11
11
|
/**
|
|
12
12
|
* Set of utility functions to work with Spawn / Exec.
|
package/src/fs/fs2.ts
CHANGED
|
@@ -18,13 +18,7 @@ import type { RmOptions, Stats } from 'node:fs'
|
|
|
18
18
|
import fs from 'node:fs'
|
|
19
19
|
import fsp from 'node:fs/promises'
|
|
20
20
|
import path from 'node:path'
|
|
21
|
-
import {
|
|
22
|
-
import { _isTruthy, _jsonParse } from '@naturalcycles/js-lib'
|
|
23
|
-
import type { DumpOptions } from 'js-yaml'
|
|
24
|
-
import yaml from 'js-yaml'
|
|
25
|
-
import { transformToNDJson } from '../stream/ndjson/transformToNDJson.js'
|
|
26
|
-
import type { ReadableTyped, TransformTyped } from '../stream/stream.model.js'
|
|
27
|
-
import { transformSplitOnNewline } from '../stream/transform/transformSplit.js'
|
|
21
|
+
import { _jsonParse } from '@naturalcycles/js-lib'
|
|
28
22
|
|
|
29
23
|
/**
|
|
30
24
|
* fs2 conveniently groups filesystem functions together.
|
|
@@ -72,14 +66,6 @@ class FS2 {
|
|
|
72
66
|
return _jsonParse(str)
|
|
73
67
|
}
|
|
74
68
|
|
|
75
|
-
readYaml<T = unknown>(filePath: string): T {
|
|
76
|
-
return yaml.load(fs.readFileSync(filePath, 'utf8')) as T
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
async readYamlAsync<T = unknown>(filePath: string): Promise<T> {
|
|
80
|
-
return yaml.load(await fsp.readFile(filePath, 'utf8')) as T
|
|
81
|
-
}
|
|
82
|
-
|
|
83
69
|
writeFile(filePath: string, data: string | Buffer): void {
|
|
84
70
|
fs.writeFileSync(filePath, data)
|
|
85
71
|
}
|
|
@@ -98,16 +84,6 @@ class FS2 {
|
|
|
98
84
|
await fsp.writeFile(filePath, str)
|
|
99
85
|
}
|
|
100
86
|
|
|
101
|
-
writeYaml(filePath: string, data: any, opt?: DumpOptions): void {
|
|
102
|
-
const str = yaml.dump(data, opt)
|
|
103
|
-
fs.writeFileSync(filePath, str)
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
async writeYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void> {
|
|
107
|
-
const str = yaml.dump(data, opt)
|
|
108
|
-
await fsp.writeFile(filePath, str)
|
|
109
|
-
}
|
|
110
|
-
|
|
111
87
|
appendFile(filePath: string, data: string | Buffer): void {
|
|
112
88
|
fs.appendFileSync(filePath, data)
|
|
113
89
|
}
|
|
@@ -126,16 +102,6 @@ class FS2 {
|
|
|
126
102
|
await this.outputFileAsync(filePath, str)
|
|
127
103
|
}
|
|
128
104
|
|
|
129
|
-
outputYaml(filePath: string, data: any, opt?: DumpOptions): void {
|
|
130
|
-
const str = yaml.dump(data, opt)
|
|
131
|
-
this.outputFile(filePath, str)
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
async outputYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void> {
|
|
135
|
-
const str = yaml.dump(data, opt)
|
|
136
|
-
await this.outputFileAsync(filePath, str)
|
|
137
|
-
}
|
|
138
|
-
|
|
139
105
|
outputFile(filePath: string, data: string | Buffer): void {
|
|
140
106
|
const dirPath = path.dirname(filePath)
|
|
141
107
|
if (!fs.existsSync(dirPath)) {
|
|
@@ -336,72 +302,6 @@ class FS2 {
|
|
|
336
302
|
readdirAsync = fsp.readdir
|
|
337
303
|
createWriteStream = fs.createWriteStream
|
|
338
304
|
createReadStream = fs.createReadStream
|
|
339
|
-
|
|
340
|
-
/*
|
|
341
|
-
Returns a Readable of [already parsed] NDJSON objects.
|
|
342
|
-
|
|
343
|
-
Replaces a list of operations:
|
|
344
|
-
- requireFileToExist(inputPath)
|
|
345
|
-
- fs.createReadStream
|
|
346
|
-
- createUnzip (only if path ends with '.gz')
|
|
347
|
-
- transformSplitOnNewline
|
|
348
|
-
- transformJsonParse
|
|
349
|
-
|
|
350
|
-
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
351
|
-
|
|
352
|
-
_pipeline([
|
|
353
|
-
fs2.createReadStreamAsNDJSON().take(100),
|
|
354
|
-
transformX(),
|
|
355
|
-
])
|
|
356
|
-
*/
|
|
357
|
-
createReadStreamAsNDJSON<ROW = any>(inputPath: string): ReadableTyped<ROW> {
|
|
358
|
-
this.requireFileToExist(inputPath)
|
|
359
|
-
|
|
360
|
-
let stream: ReadableTyped<ROW> = fs
|
|
361
|
-
.createReadStream(inputPath, {
|
|
362
|
-
highWaterMark: 64 * 1024, // no observed speedup
|
|
363
|
-
})
|
|
364
|
-
.on('error', err => stream.emit('error', err))
|
|
365
|
-
|
|
366
|
-
if (inputPath.endsWith('.gz')) {
|
|
367
|
-
stream = stream.pipe(
|
|
368
|
-
createUnzip({
|
|
369
|
-
chunkSize: 64 * 1024, // speedup from ~3200 to 3800 rps!
|
|
370
|
-
}),
|
|
371
|
-
)
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
return stream.pipe(transformSplitOnNewline()).map(line => JSON.parse(line))
|
|
375
|
-
// For some crazy reason .map is much faster than transformJsonParse!
|
|
376
|
-
// ~5000 vs ~4000 rps !!!
|
|
377
|
-
// .on('error', err => stream.emit('error', err))
|
|
378
|
-
// .pipe(transformJsonParse<ROW>())
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
/*
|
|
382
|
-
Returns an array of Transforms, so that you can ...destructure them at
|
|
383
|
-
the end of the _pipeline.
|
|
384
|
-
|
|
385
|
-
Replaces a list of operations:
|
|
386
|
-
- transformToNDJson
|
|
387
|
-
- createGzip (only if path ends with '.gz')
|
|
388
|
-
- fs.createWriteStream
|
|
389
|
-
*/
|
|
390
|
-
createWriteStreamAsNDJSON(outputPath: string): TransformTyped<any, any>[] {
|
|
391
|
-
this.ensureFile(outputPath)
|
|
392
|
-
|
|
393
|
-
return [
|
|
394
|
-
transformToNDJson(),
|
|
395
|
-
outputPath.endsWith('.gz')
|
|
396
|
-
? createGzip({
|
|
397
|
-
// chunkSize: 64 * 1024, // no observed speedup
|
|
398
|
-
})
|
|
399
|
-
: undefined,
|
|
400
|
-
fs.createWriteStream(outputPath, {
|
|
401
|
-
// highWaterMark: 64 * 1024, // no observed speedup
|
|
402
|
-
}),
|
|
403
|
-
].filter(_isTruthy) as TransformTyped<any, any>[]
|
|
404
|
-
}
|
|
405
305
|
}
|
|
406
306
|
|
|
407
307
|
export const fs2 = new FS2()
|
package/src/fs/yaml2.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import fs from 'node:fs'
|
|
2
|
+
import fsp from 'node:fs/promises'
|
|
3
|
+
import type { DumpOptions } from 'js-yaml'
|
|
4
|
+
import yaml from 'js-yaml'
|
|
5
|
+
import { fs2 } from './fs2.js'
|
|
6
|
+
|
|
7
|
+
class Yaml2 {
|
|
8
|
+
readYaml<T = unknown>(filePath: string): T {
|
|
9
|
+
return yaml.load(fs.readFileSync(filePath, 'utf8')) as T
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async readYamlAsync<T = unknown>(filePath: string): Promise<T> {
|
|
13
|
+
return yaml.load(await fsp.readFile(filePath, 'utf8')) as T
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
writeYaml(filePath: string, data: any, opt?: DumpOptions): void {
|
|
17
|
+
const str = yaml.dump(data, opt)
|
|
18
|
+
fs.writeFileSync(filePath, str)
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async writeYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void> {
|
|
22
|
+
const str = yaml.dump(data, opt)
|
|
23
|
+
await fsp.writeFile(filePath, str)
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
outputYaml(filePath: string, data: any, opt?: DumpOptions): void {
|
|
27
|
+
const str = yaml.dump(data, opt)
|
|
28
|
+
fs2.outputFile(filePath, str)
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async outputYamlAsync(filePath: string, data: any, opt?: DumpOptions): Promise<void> {
|
|
32
|
+
const str = yaml.dump(data, opt)
|
|
33
|
+
await fs2.outputFileAsync(filePath, str)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export const yaml2 = new Yaml2()
|
package/src/index.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
export * from './buffer/buffer.util.js'
|
|
2
|
-
export * from './colors/colors.js'
|
|
3
2
|
export * from './diff/tableDiff.js'
|
|
4
3
|
export * from './infra/process.util.js'
|
|
5
4
|
export * from './log/log.util.js'
|
|
@@ -14,7 +13,4 @@ export * from './slack/slack.service.model.js'
|
|
|
14
13
|
export * from './string/inspect.js'
|
|
15
14
|
export * from './util/buildInfo.util.js'
|
|
16
15
|
export * from './util/env.util.js'
|
|
17
|
-
export * from './util/exec2.js'
|
|
18
16
|
export * from './util/git2.js'
|
|
19
|
-
export * from './util/lruMemoCache.js'
|
|
20
|
-
export * from './util/zip.util.js'
|
package/src/script/runScript.ts
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
import 'dotenv/config'
|
|
2
1
|
import os from 'node:os'
|
|
3
2
|
import type { AnyObject, CommonLogger } from '@naturalcycles/js-lib'
|
|
4
3
|
import { pDelay, setGlobalStringifyFunction } from '@naturalcycles/js-lib'
|
|
5
|
-
import { dimGrey } from '../colors/colors.js'
|
|
6
4
|
import { inspectStringifyFn } from '../string/inspect.js'
|
|
7
5
|
|
|
8
6
|
export interface RunScriptOptions {
|
|
@@ -46,7 +44,6 @@ const { DEBUG_RUN_SCRIPT } = process.env
|
|
|
46
44
|
* Set env DEBUG_RUN_SCRIPT for extra debugging.
|
|
47
45
|
*/
|
|
48
46
|
export function runScript(fn: (...args: any[]) => any, opt: RunScriptOptions = {}): void {
|
|
49
|
-
checkAndlogEnvironment()
|
|
50
47
|
setGlobalStringifyFunction(inspectStringifyFn)
|
|
51
48
|
|
|
52
49
|
const { logger = console, noExit, registerUncaughtExceptionHandlers = true } = opt
|
|
@@ -70,6 +67,9 @@ export function runScript(fn: (...args: any[]) => any, opt: RunScriptOptions = {
|
|
|
70
67
|
|
|
71
68
|
void (async () => {
|
|
72
69
|
try {
|
|
70
|
+
await import('dotenv/config')
|
|
71
|
+
await checkAndlogEnvironment()
|
|
72
|
+
|
|
73
73
|
await fn()
|
|
74
74
|
|
|
75
75
|
await pDelay() // to ensure all async operations are completed
|
|
@@ -91,7 +91,9 @@ export function runScript(fn: (...args: any[]) => any, opt: RunScriptOptions = {
|
|
|
91
91
|
})()
|
|
92
92
|
}
|
|
93
93
|
|
|
94
|
-
function checkAndlogEnvironment(): void {
|
|
94
|
+
async function checkAndlogEnvironment(): Promise<void> {
|
|
95
|
+
const { dimGrey } = await import('../colors/colors.js')
|
|
96
|
+
|
|
95
97
|
const {
|
|
96
98
|
platform,
|
|
97
99
|
arch,
|
package/src/stream/index.ts
CHANGED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { createUnzip } from 'node:zlib'
|
|
2
|
+
import { fs2 } from '../../fs/fs2.js'
|
|
3
|
+
import type { ReadableTyped } from '../stream.model.js'
|
|
4
|
+
import { transformSplitOnNewline } from '../transform/transformSplit.js'
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
Returns a Readable of [already parsed] NDJSON objects.
|
|
8
|
+
|
|
9
|
+
Replaces a list of operations:
|
|
10
|
+
- requireFileToExist(inputPath)
|
|
11
|
+
- fs.createReadStream
|
|
12
|
+
- createUnzip (only if path ends with '.gz')
|
|
13
|
+
- transformSplitOnNewline
|
|
14
|
+
- transformJsonParse
|
|
15
|
+
|
|
16
|
+
To add a Limit or Offset: just add .take() or .drop(), example:
|
|
17
|
+
|
|
18
|
+
_pipeline([
|
|
19
|
+
fs2.createReadStreamAsNDJSON().take(100),
|
|
20
|
+
transformX(),
|
|
21
|
+
])
|
|
22
|
+
*/
|
|
23
|
+
|
|
24
|
+
export function createReadStreamAsNDJSON<ROW = any>(inputPath: string): ReadableTyped<ROW> {
|
|
25
|
+
fs2.requireFileToExist(inputPath)
|
|
26
|
+
|
|
27
|
+
let stream: ReadableTyped<ROW> = fs2
|
|
28
|
+
.createReadStream(inputPath, {
|
|
29
|
+
highWaterMark: 64 * 1024, // no observed speedup
|
|
30
|
+
})
|
|
31
|
+
.on('error', err => stream.emit('error', err))
|
|
32
|
+
|
|
33
|
+
if (inputPath.endsWith('.gz')) {
|
|
34
|
+
stream = stream.pipe(
|
|
35
|
+
createUnzip({
|
|
36
|
+
chunkSize: 64 * 1024, // speedup from ~3200 to 3800 rps!
|
|
37
|
+
}),
|
|
38
|
+
)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return stream.pipe(transformSplitOnNewline()).map(line => JSON.parse(line))
|
|
42
|
+
// For some crazy reason .map is much faster than transformJsonParse!
|
|
43
|
+
// ~5000 vs ~4000 rps !!!
|
|
44
|
+
// .on('error', err => stream.emit('error', err))
|
|
45
|
+
// .pipe(transformJsonParse<ROW>())
|
|
46
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { createGzip } from 'node:zlib'
|
|
2
|
+
import { _isTruthy } from '@naturalcycles/js-lib'
|
|
3
|
+
import { fs2 } from '../../fs/fs2.js'
|
|
4
|
+
import type { TransformTyped } from '../stream.model.js'
|
|
5
|
+
import { transformToNDJson } from './transformToNDJson.js'
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
Returns an array of Transforms, so that you can ...destructure them at
|
|
9
|
+
the end of the _pipeline.
|
|
10
|
+
|
|
11
|
+
Replaces a list of operations:
|
|
12
|
+
- transformToNDJson
|
|
13
|
+
- createGzip (only if path ends with '.gz')
|
|
14
|
+
- fs.createWriteStream
|
|
15
|
+
*/
|
|
16
|
+
export function createWriteStreamAsNDJSON(outputPath: string): TransformTyped<any, any>[] {
|
|
17
|
+
fs2.ensureFile(outputPath)
|
|
18
|
+
|
|
19
|
+
return [
|
|
20
|
+
transformToNDJson(),
|
|
21
|
+
outputPath.endsWith('.gz')
|
|
22
|
+
? createGzip({
|
|
23
|
+
// chunkSize: 64 * 1024, // no observed speedup
|
|
24
|
+
})
|
|
25
|
+
: undefined,
|
|
26
|
+
fs2.createWriteStream(outputPath, {
|
|
27
|
+
// highWaterMark: 64 * 1024, // no observed speedup
|
|
28
|
+
}),
|
|
29
|
+
].filter(_isTruthy) as TransformTyped<any, any>[]
|
|
30
|
+
}
|
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
import type { AbortableAsyncMapper } from '@naturalcycles/js-lib'
|
|
2
2
|
import { ErrorMode } from '@naturalcycles/js-lib'
|
|
3
|
-
import {
|
|
4
|
-
|
|
3
|
+
import {
|
|
4
|
+
createReadStreamAsNDJSON,
|
|
5
|
+
createWriteStreamAsNDJSON,
|
|
6
|
+
type TransformLogProgressOptions,
|
|
7
|
+
type TransformMapOptions,
|
|
8
|
+
} from '../index.js'
|
|
5
9
|
import { _pipeline, transformLimit, transformLogProgress, transformMap } from '../index.js'
|
|
6
10
|
|
|
7
11
|
export interface NDJSONMapOptions<IN = any, OUT = IN>
|
|
@@ -41,9 +45,9 @@ export async function ndjsonMap<IN = any, OUT = any>(
|
|
|
41
45
|
outputFilePath,
|
|
42
46
|
})
|
|
43
47
|
|
|
44
|
-
const readable =
|
|
45
|
-
.
|
|
46
|
-
|
|
48
|
+
const readable = createReadStreamAsNDJSON(inputFilePath).take(
|
|
49
|
+
limitInput || Number.POSITIVE_INFINITY,
|
|
50
|
+
)
|
|
47
51
|
|
|
48
52
|
await _pipeline([
|
|
49
53
|
readable,
|
|
@@ -55,6 +59,6 @@ export async function ndjsonMap<IN = any, OUT = any>(
|
|
|
55
59
|
}),
|
|
56
60
|
transformLimit({ limit: limitOutput, sourceReadable: readable }),
|
|
57
61
|
transformLogProgress({ metric: 'saved', logEvery: logEveryOutput }),
|
|
58
|
-
...
|
|
62
|
+
...createWriteStreamAsNDJSON(outputFilePath),
|
|
59
63
|
])
|
|
60
64
|
}
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
import type { AbortableAsyncMapper } from '@naturalcycles/js-lib'
|
|
2
2
|
import { ErrorMode } from '@naturalcycles/js-lib'
|
|
3
|
-
import {
|
|
4
|
-
|
|
3
|
+
import {
|
|
4
|
+
createReadStreamAsNDJSON,
|
|
5
|
+
type TransformLogProgressOptions,
|
|
6
|
+
type TransformMapOptions,
|
|
7
|
+
} from '../index.js'
|
|
5
8
|
import { _pipeline, transformLogProgress, transformMap, writableVoid } from '../index.js'
|
|
6
9
|
|
|
7
10
|
export interface NDJSONStreamForEachOptions<IN = any>
|
|
@@ -18,7 +21,7 @@ export async function ndjsonStreamForEach<T>(
|
|
|
18
21
|
opt: NDJSONStreamForEachOptions<T>,
|
|
19
22
|
): Promise<void> {
|
|
20
23
|
await _pipeline([
|
|
21
|
-
|
|
24
|
+
createReadStreamAsNDJSON(opt.inputFilePath),
|
|
22
25
|
transformMap<T, any>(mapper, {
|
|
23
26
|
errorMode: ErrorMode.THROW_AGGREGATED,
|
|
24
27
|
...opt,
|
package/src/stream/sizeStack.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { CommonLogger } from '@naturalcycles/js-lib'
|
|
2
2
|
import { _hb, NumberStack } from '@naturalcycles/js-lib'
|
|
3
3
|
import { yellow } from '../colors/colors.js'
|
|
4
|
-
import { gzipBuffer } from '../
|
|
4
|
+
import { gzipBuffer } from '../zip/zip.util.js'
|
|
5
5
|
|
|
6
6
|
export class SizeStack extends NumberStack {
|
|
7
7
|
constructor(
|
package/src/util/env.util.ts
CHANGED
package/src/util/git2.ts
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
import { execSync } from 'node:child_process'
|
|
2
2
|
import { basename } from 'node:path'
|
|
3
3
|
import type { UnixTimestamp } from '@naturalcycles/js-lib'
|
|
4
|
-
import {
|
|
5
|
-
import { exec2 } from './exec2.js'
|
|
4
|
+
import { exec2 } from '../exec2.js'
|
|
6
5
|
|
|
7
6
|
/**
|
|
8
7
|
* Set of utility functions to work with git.
|
|
@@ -38,7 +37,7 @@ class Git2 {
|
|
|
38
37
|
const cmd = `git commit -a --no-verify -m "${msg}"`
|
|
39
38
|
// const cmd = `git`
|
|
40
39
|
// const args = ['commit', '-a', '--no-verify', '-m', msg]
|
|
41
|
-
console.log(
|
|
40
|
+
console.log(cmd)
|
|
42
41
|
|
|
43
42
|
try {
|
|
44
43
|
execSync(cmd, {
|
|
@@ -7,8 +7,6 @@ const inflate = promisify(zlib.inflate.bind(zlib))
|
|
|
7
7
|
const gzip = promisify(zlib.gzip.bind(zlib))
|
|
8
8
|
const gunzip = promisify(zlib.gunzip.bind(zlib))
|
|
9
9
|
|
|
10
|
-
// string > compressed buffer
|
|
11
|
-
|
|
12
10
|
/**
|
|
13
11
|
* deflateBuffer uses `deflate`.
|
|
14
12
|
* It's 9 bytes shorter than `gzip`.
|
package/dist/fs/index.d.ts
DELETED
package/dist/fs/index.js
DELETED
package/dist/jwt/index.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './jwt.service.js';
|
package/dist/jwt/index.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './jwt.service.js';
|
package/dist/yargs/index.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './yargs.util.js';
|
package/dist/yargs/index.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './yargs.util.js';
|
package/src/fs/index.ts
DELETED
package/src/jwt/index.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './jwt.service.js'
|
package/src/yargs/index.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export * from './yargs.util.js'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|