@reliverse/relifso 1.4.0 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSES +16 -0
- package/README.md +48 -17
- package/bin/impl/bun.d.ts +5 -28
- package/bin/impl/bun.js +2 -126
- package/bin/impl/copy.js +8 -7
- package/bin/impl/create.d.ts +34 -0
- package/bin/impl/create.js +54 -0
- package/bin/impl/dive.d.ts +10 -0
- package/bin/impl/dive.js +89 -0
- package/bin/impl/empty.d.ts +28 -0
- package/bin/impl/empty.js +75 -0
- package/bin/impl/extras.d.ts +22 -2
- package/bin/impl/extras.js +68 -3
- package/bin/impl/json-utils.d.ts +30 -0
- package/bin/impl/json-utils.js +46 -0
- package/bin/impl/output-file.d.ts +3 -2
- package/bin/impl/output-json.d.ts +7 -2
- package/bin/impl/output-json.js +73 -11
- package/bin/impl/read-file.d.ts +11 -0
- package/bin/impl/read-file.js +82 -4
- package/bin/impl/read-json.d.ts +6 -0
- package/bin/impl/read-json.js +133 -21
- package/bin/impl/stats.d.ts +31 -0
- package/bin/impl/stats.js +141 -0
- package/bin/impl/write-file.d.ts +19 -8
- package/bin/impl/write-file.js +218 -9
- package/bin/impl/write-json.d.ts +13 -2
- package/bin/impl/write-json.js +46 -7
- package/bin/mod.d.ts +84 -36
- package/bin/mod.js +108 -39
- package/bin/utils/json/helpers/JSONRepairError.d.ts +4 -0
- package/bin/utils/json/helpers/JSONRepairError.js +7 -0
- package/bin/utils/json/helpers/JsonSchemaError.d.ts +6 -0
- package/bin/utils/json/helpers/JsonSchemaError.js +6 -0
- package/bin/utils/json/helpers/stringUtils.d.ts +64 -0
- package/bin/utils/json/helpers/stringUtils.js +87 -0
- package/bin/utils/json/regular/jsonc.d.ts +45 -0
- package/bin/utils/json/regular/jsonc.js +88 -0
- package/bin/utils/json/regular/jsonrepair.d.ts +17 -0
- package/bin/utils/json/regular/jsonrepair.js +576 -0
- package/bin/utils/json/regular/validate.d.ts +22 -0
- package/bin/utils/json/regular/validate.js +52 -0
- package/bin/utils/json/stream/JsonStreamError.d.ts +6 -0
- package/bin/utils/json/stream/JsonStreamError.js +6 -0
- package/bin/utils/json/stream/buffer/InputBuffer.d.ts +13 -0
- package/bin/utils/json/stream/buffer/InputBuffer.js +68 -0
- package/bin/utils/json/stream/buffer/OutputBuffer.d.ts +17 -0
- package/bin/utils/json/stream/buffer/OutputBuffer.js +101 -0
- package/bin/utils/json/stream/core.d.ts +10 -0
- package/bin/utils/json/stream/core.js +695 -0
- package/bin/utils/json/stream/jsonl.d.ts +21 -0
- package/bin/utils/json/stream/jsonl.js +55 -0
- package/bin/utils/json/stream/parser.d.ts +14 -0
- package/bin/utils/json/stream/parser.js +81 -0
- package/bin/utils/json/stream/stack.d.ts +19 -0
- package/bin/utils/json/stream/stack.js +43 -0
- package/bin/utils/json/stream/stream.d.ts +6 -0
- package/bin/utils/json/stream/stream.js +30 -0
- package/bin/utils/json/stream/writer.d.ts +14 -0
- package/bin/utils/json/stream/writer.js +44 -0
- package/package.json +3 -2
- package/bin/impl/create-file.d.ts +0 -2
- package/bin/impl/create-file.js +0 -21
- package/bin/impl/dive-async.d.ts +0 -11
- package/bin/impl/dive-async.js +0 -88
- package/bin/impl/empty-dir.d.ts +0 -2
- package/bin/impl/empty-dir.js +0 -24
- package/bin/impl/file-utils.d.ts +0 -20
- package/bin/impl/file-utils.js +0 -63
- /package/bin/{impl/logger.d.ts → utils/log.d.ts} +0 -0
- /package/bin/{impl/logger.js → utils/log.js} +0 -0
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonlParser(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
reviver
|
|
8
|
+
} = options;
|
|
9
|
+
let buffer = "";
|
|
10
|
+
return new Transform({
|
|
11
|
+
transform(chunk, _encoding, callback) {
|
|
12
|
+
try {
|
|
13
|
+
const text = chunk.toString();
|
|
14
|
+
buffer += text;
|
|
15
|
+
const lines = buffer.split("\n");
|
|
16
|
+
buffer = lines.pop() || "";
|
|
17
|
+
for (const line of lines) {
|
|
18
|
+
if (line.trim()) {
|
|
19
|
+
try {
|
|
20
|
+
const parsed = JSON.parse(line, reviver);
|
|
21
|
+
this.push(parsed);
|
|
22
|
+
} catch (error) {
|
|
23
|
+
const parseError = error;
|
|
24
|
+
throw new JsonStreamError(`Failed to parse JSONL line: ${parseError.message}`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
if (buffer.length > chunkSize) {
|
|
29
|
+
throw new JsonStreamError("Chunk size exceeded without finding complete JSONL line");
|
|
30
|
+
}
|
|
31
|
+
callback();
|
|
32
|
+
} catch (error) {
|
|
33
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
export function createJsonlWriter(options = {}) {
|
|
39
|
+
const { replacer } = options;
|
|
40
|
+
return new Transform({
|
|
41
|
+
transform(chunk, _encoding, callback) {
|
|
42
|
+
try {
|
|
43
|
+
const jsonStr = JSON.stringify(chunk, replacer);
|
|
44
|
+
if (jsonStr === void 0) {
|
|
45
|
+
throw new JsonStreamError("Failed to stringify JSON object");
|
|
46
|
+
}
|
|
47
|
+
this.push(`${jsonStr}
|
|
48
|
+
`);
|
|
49
|
+
callback();
|
|
50
|
+
} catch (error) {
|
|
51
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
interface ParserOptions {
|
|
3
|
+
chunkSize?: number;
|
|
4
|
+
maxDepth?: number;
|
|
5
|
+
reviver?: (key: string, value: unknown) => unknown;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Creates a transform stream that parses JSON data chunk by chunk.
|
|
9
|
+
*
|
|
10
|
+
* @param options - Parser options
|
|
11
|
+
* @returns A transform stream that emits parsed JSON objects
|
|
12
|
+
*/
|
|
13
|
+
export declare function createJsonStreamParser(options?: ParserOptions): Transform;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonStreamParser(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
maxDepth = 100,
|
|
8
|
+
reviver
|
|
9
|
+
} = options;
|
|
10
|
+
let buffer = "";
|
|
11
|
+
let depth = 0;
|
|
12
|
+
let inString = false;
|
|
13
|
+
let escapeNext = false;
|
|
14
|
+
return new Transform({
|
|
15
|
+
transform(chunk, _encoding, callback) {
|
|
16
|
+
try {
|
|
17
|
+
const text = chunk.toString();
|
|
18
|
+
buffer += text;
|
|
19
|
+
while (buffer.length > 0) {
|
|
20
|
+
const start = buffer.indexOf("{");
|
|
21
|
+
if (start === -1) {
|
|
22
|
+
buffer = "";
|
|
23
|
+
break;
|
|
24
|
+
}
|
|
25
|
+
buffer = buffer.slice(start);
|
|
26
|
+
depth = 0;
|
|
27
|
+
inString = false;
|
|
28
|
+
escapeNext = false;
|
|
29
|
+
let end = -1;
|
|
30
|
+
for (let i = 0; i < buffer.length; i++) {
|
|
31
|
+
const char = buffer[i];
|
|
32
|
+
if (escapeNext) {
|
|
33
|
+
escapeNext = false;
|
|
34
|
+
continue;
|
|
35
|
+
}
|
|
36
|
+
if (char === "\\") {
|
|
37
|
+
escapeNext = true;
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
if (char === '"' && !escapeNext) {
|
|
41
|
+
inString = !inString;
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (!inString) {
|
|
45
|
+
if (char === "{") {
|
|
46
|
+
depth++;
|
|
47
|
+
if (depth > maxDepth) {
|
|
48
|
+
throw new JsonStreamError(`Maximum nesting depth of ${maxDepth} exceeded`);
|
|
49
|
+
}
|
|
50
|
+
} else if (char === "}") {
|
|
51
|
+
depth--;
|
|
52
|
+
if (depth === 0) {
|
|
53
|
+
end = i + 1;
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
if (end === -1) {
|
|
60
|
+
if (buffer.length > chunkSize) {
|
|
61
|
+
throw new JsonStreamError("Chunk size exceeded without finding complete JSON object");
|
|
62
|
+
}
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
const jsonStr = buffer.slice(0, end);
|
|
66
|
+
try {
|
|
67
|
+
const parsed = JSON.parse(jsonStr, reviver);
|
|
68
|
+
this.push(parsed);
|
|
69
|
+
} catch (error) {
|
|
70
|
+
const parseError = error;
|
|
71
|
+
throw new JsonStreamError(`Failed to parse JSON: ${parseError.message}`);
|
|
72
|
+
}
|
|
73
|
+
buffer = buffer.slice(end);
|
|
74
|
+
}
|
|
75
|
+
callback();
|
|
76
|
+
} catch (error) {
|
|
77
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export declare enum Caret {
|
|
2
|
+
beforeValue = "beforeValue",
|
|
3
|
+
afterValue = "afterValue",
|
|
4
|
+
beforeKey = "beforeKey"
|
|
5
|
+
}
|
|
6
|
+
export declare enum StackType {
|
|
7
|
+
root = "root",
|
|
8
|
+
object = "object",
|
|
9
|
+
array = "array",
|
|
10
|
+
ndJson = "ndJson",
|
|
11
|
+
functionCall = "dataType"
|
|
12
|
+
}
|
|
13
|
+
export declare function createStack(): {
|
|
14
|
+
readonly type: StackType | undefined;
|
|
15
|
+
readonly caret: Caret;
|
|
16
|
+
pop(): true;
|
|
17
|
+
push(type: StackType, newCaret: Caret): true;
|
|
18
|
+
update(newCaret: Caret): true;
|
|
19
|
+
};
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
export var Caret = /* @__PURE__ */ ((Caret2) => {
|
|
2
|
+
Caret2["beforeValue"] = "beforeValue";
|
|
3
|
+
Caret2["afterValue"] = "afterValue";
|
|
4
|
+
Caret2["beforeKey"] = "beforeKey";
|
|
5
|
+
return Caret2;
|
|
6
|
+
})(Caret || {});
|
|
7
|
+
export var StackType = /* @__PURE__ */ ((StackType2) => {
|
|
8
|
+
StackType2["root"] = "root";
|
|
9
|
+
StackType2["object"] = "object";
|
|
10
|
+
StackType2["array"] = "array";
|
|
11
|
+
StackType2["ndJson"] = "ndJson";
|
|
12
|
+
StackType2["functionCall"] = "dataType";
|
|
13
|
+
return StackType2;
|
|
14
|
+
})(StackType || {});
|
|
15
|
+
export function createStack() {
|
|
16
|
+
const stack = ["root" /* root */];
|
|
17
|
+
let caret = "beforeValue" /* beforeValue */;
|
|
18
|
+
return {
|
|
19
|
+
get type() {
|
|
20
|
+
return last(stack);
|
|
21
|
+
},
|
|
22
|
+
get caret() {
|
|
23
|
+
return caret;
|
|
24
|
+
},
|
|
25
|
+
pop() {
|
|
26
|
+
stack.pop();
|
|
27
|
+
caret = "afterValue" /* afterValue */;
|
|
28
|
+
return true;
|
|
29
|
+
},
|
|
30
|
+
push(type, newCaret) {
|
|
31
|
+
stack.push(type);
|
|
32
|
+
caret = newCaret;
|
|
33
|
+
return true;
|
|
34
|
+
},
|
|
35
|
+
update(newCaret) {
|
|
36
|
+
caret = newCaret;
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
function last(array) {
|
|
42
|
+
return array[array.length - 1];
|
|
43
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { jsonrepairCore } from "./core.js";
|
|
3
|
+
export function jsonrepairTransform(options) {
|
|
4
|
+
const repair = jsonrepairCore({
|
|
5
|
+
onData: (chunk) => transform.push(chunk),
|
|
6
|
+
bufferSize: options?.bufferSize,
|
|
7
|
+
chunkSize: options?.chunkSize
|
|
8
|
+
});
|
|
9
|
+
const transform = new Transform({
|
|
10
|
+
transform(chunk, _encoding, callback) {
|
|
11
|
+
try {
|
|
12
|
+
repair.transform(chunk.toString());
|
|
13
|
+
} catch (err) {
|
|
14
|
+
this.emit("error", err);
|
|
15
|
+
} finally {
|
|
16
|
+
callback();
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
flush(callback) {
|
|
20
|
+
try {
|
|
21
|
+
repair.flush();
|
|
22
|
+
} catch (err) {
|
|
23
|
+
this.emit("error", err);
|
|
24
|
+
} finally {
|
|
25
|
+
callback();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
return transform;
|
|
30
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
interface WriterOptions {
|
|
3
|
+
chunkSize?: number;
|
|
4
|
+
replacer?: (key: string, value: unknown) => unknown | (number | string)[] | null;
|
|
5
|
+
spaces?: string | number;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Creates a transform stream that writes JSON data chunk by chunk.
|
|
9
|
+
*
|
|
10
|
+
* @param options - Writer options
|
|
11
|
+
* @returns A transform stream that accepts objects and outputs JSON strings
|
|
12
|
+
*/
|
|
13
|
+
export declare function createJsonStreamWriter(options?: WriterOptions): Transform;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonStreamWriter(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
replacer,
|
|
8
|
+
spaces
|
|
9
|
+
} = options;
|
|
10
|
+
let isFirst = true;
|
|
11
|
+
let buffer = "";
|
|
12
|
+
return new Transform({
|
|
13
|
+
transform(chunk, _encoding, callback) {
|
|
14
|
+
try {
|
|
15
|
+
const jsonStr = JSON.stringify(chunk, replacer, spaces);
|
|
16
|
+
if (jsonStr === void 0) {
|
|
17
|
+
throw new JsonStreamError("Failed to stringify JSON object");
|
|
18
|
+
}
|
|
19
|
+
if (!isFirst) {
|
|
20
|
+
buffer += ",";
|
|
21
|
+
}
|
|
22
|
+
isFirst = false;
|
|
23
|
+
buffer += jsonStr;
|
|
24
|
+
if (buffer.length >= chunkSize) {
|
|
25
|
+
this.push(buffer);
|
|
26
|
+
buffer = "";
|
|
27
|
+
}
|
|
28
|
+
callback();
|
|
29
|
+
} catch (error) {
|
|
30
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
flush(callback) {
|
|
34
|
+
try {
|
|
35
|
+
if (buffer.length > 0) {
|
|
36
|
+
this.push(buffer);
|
|
37
|
+
}
|
|
38
|
+
callback();
|
|
39
|
+
} catch (error) {
|
|
40
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
package/package.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"name": "@reliverse/relifso",
|
|
7
7
|
"type": "module",
|
|
8
|
-
"version": "1.4.
|
|
8
|
+
"version": "1.4.1",
|
|
9
9
|
"keywords": [
|
|
10
10
|
"fs",
|
|
11
11
|
"file",
|
|
@@ -38,7 +38,8 @@
|
|
|
38
38
|
"bin",
|
|
39
39
|
"package.json",
|
|
40
40
|
"README.md",
|
|
41
|
-
"LICENSE"
|
|
41
|
+
"LICENSE",
|
|
42
|
+
"LICENSES"
|
|
42
43
|
],
|
|
43
44
|
"main": "./bin/mod.js",
|
|
44
45
|
"module": "./bin/mod.js",
|
package/bin/impl/create-file.js
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { existsSync } from "node:fs";
|
|
2
|
-
import { stat } from "node:fs/promises";
|
|
3
|
-
import { writeFileSync } from "./write-file.js";
|
|
4
|
-
import { writeFile } from "./write-file.js";
|
|
5
|
-
export function createFileSync(file, content = "") {
|
|
6
|
-
if (existsSync(file)) {
|
|
7
|
-
return;
|
|
8
|
-
}
|
|
9
|
-
return writeFileSync(file, content);
|
|
10
|
-
}
|
|
11
|
-
export async function createFile(file, content = "") {
|
|
12
|
-
try {
|
|
13
|
-
await stat(file);
|
|
14
|
-
return;
|
|
15
|
-
} catch (error) {
|
|
16
|
-
if (error.code === "ENOENT") {
|
|
17
|
-
return writeFile(file, content);
|
|
18
|
-
}
|
|
19
|
-
throw error;
|
|
20
|
-
}
|
|
21
|
-
}
|
package/bin/impl/dive-async.d.ts
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import type { Stats } from "node:fs";
|
|
2
|
-
import type { DiveOptions } from "./dive.js";
|
|
3
|
-
/**
|
|
4
|
-
* Recursively dives into a directory and yields files and directories.
|
|
5
|
-
* @param directory - The directory to dive into.
|
|
6
|
-
* @param action - An optional callback function to execute for each file or directory.
|
|
7
|
-
* @param options - An optional object containing options for the dive.
|
|
8
|
-
* @returns A Promise that resolves to an array of file paths if no action is provided, or void if an action is provided.
|
|
9
|
-
*/
|
|
10
|
-
export declare function dive(directory: string, action: (file: string, stat: Stats) => void | Promise<void>, options?: DiveOptions): Promise<void>;
|
|
11
|
-
export declare function dive(directory: string, options?: DiveOptions): Promise<string[]>;
|
package/bin/impl/dive-async.js
DELETED
|
@@ -1,88 +0,0 @@
|
|
|
1
|
-
import { readdir as nodeReaddirInternal, stat as nodeStatInternal } from "node:fs/promises";
|
|
2
|
-
import { join as pathJoin } from "node:path";
|
|
3
|
-
import { isBun, getStatsBun } from "./bun.js";
|
|
4
|
-
async function* _diveWorker(currentPath, options, currentDepth) {
|
|
5
|
-
const maxDepth = options.depth ?? Number.POSITIVE_INFINITY;
|
|
6
|
-
if (currentDepth > maxDepth) {
|
|
7
|
-
return;
|
|
8
|
-
}
|
|
9
|
-
let entries;
|
|
10
|
-
try {
|
|
11
|
-
entries = await nodeReaddirInternal(currentPath, { withFileTypes: true });
|
|
12
|
-
} catch (_err) {
|
|
13
|
-
return;
|
|
14
|
-
}
|
|
15
|
-
for (const entry of entries) {
|
|
16
|
-
const entryPath = pathJoin(currentPath, entry.name);
|
|
17
|
-
if (!(options.all ?? false) && entry.name.startsWith(".")) {
|
|
18
|
-
continue;
|
|
19
|
-
}
|
|
20
|
-
if (options.ignore) {
|
|
21
|
-
if (Array.isArray(options.ignore) && options.ignore.some((pattern) => entry.name.includes(pattern))) {
|
|
22
|
-
continue;
|
|
23
|
-
}
|
|
24
|
-
if (options.ignore instanceof RegExp && options.ignore.test(entryPath)) {
|
|
25
|
-
continue;
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
let entryStat;
|
|
29
|
-
try {
|
|
30
|
-
if (isBun) {
|
|
31
|
-
try {
|
|
32
|
-
entryStat = await getStatsBun(entryPath);
|
|
33
|
-
} catch (_error) {
|
|
34
|
-
entryStat = await nodeStatInternal(entryPath);
|
|
35
|
-
}
|
|
36
|
-
} else {
|
|
37
|
-
entryStat = await nodeStatInternal(entryPath);
|
|
38
|
-
}
|
|
39
|
-
} catch (_err) {
|
|
40
|
-
continue;
|
|
41
|
-
}
|
|
42
|
-
if (entry.isDirectory()) {
|
|
43
|
-
if (options.directories ?? false) {
|
|
44
|
-
yield { file: entryPath, stat: entryStat };
|
|
45
|
-
}
|
|
46
|
-
if (options.recursive ?? true) {
|
|
47
|
-
if (currentDepth < maxDepth) {
|
|
48
|
-
yield* _diveWorker(entryPath, options, currentDepth + 1);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
} else if (entry.isFile()) {
|
|
52
|
-
if (options.files ?? true) {
|
|
53
|
-
yield { file: entryPath, stat: entryStat };
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
export async function dive(directory, actionOrOptions, optionsOnly) {
|
|
59
|
-
let action;
|
|
60
|
-
let options;
|
|
61
|
-
if (typeof actionOrOptions === "function") {
|
|
62
|
-
action = actionOrOptions;
|
|
63
|
-
options = optionsOnly;
|
|
64
|
-
} else {
|
|
65
|
-
options = actionOrOptions;
|
|
66
|
-
}
|
|
67
|
-
const currentOptions = {
|
|
68
|
-
recursive: true,
|
|
69
|
-
files: true,
|
|
70
|
-
directories: false,
|
|
71
|
-
all: false,
|
|
72
|
-
depth: Number.POSITIVE_INFINITY,
|
|
73
|
-
...options
|
|
74
|
-
// User options override defaults
|
|
75
|
-
};
|
|
76
|
-
if (action) {
|
|
77
|
-
for await (const { file, stat: entryStat } of _diveWorker(directory, currentOptions, 0)) {
|
|
78
|
-
await action(file, entryStat);
|
|
79
|
-
}
|
|
80
|
-
return;
|
|
81
|
-
} else {
|
|
82
|
-
const results = [];
|
|
83
|
-
for await (const { file } of _diveWorker(directory, currentOptions, 0)) {
|
|
84
|
-
results.push(file);
|
|
85
|
-
}
|
|
86
|
-
return results;
|
|
87
|
-
}
|
|
88
|
-
}
|
package/bin/impl/empty-dir.d.ts
DELETED
package/bin/impl/empty-dir.js
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { existsSync, readdirSync, rmSync } from "node:fs";
|
|
2
|
-
import { readdir, rm, stat } from "node:fs/promises";
|
|
3
|
-
import path from "node:path";
|
|
4
|
-
export function emptyDirSync(dir) {
|
|
5
|
-
if (!existsSync(dir)) {
|
|
6
|
-
return;
|
|
7
|
-
}
|
|
8
|
-
for (const file of readdirSync(dir)) {
|
|
9
|
-
rmSync(path.resolve(dir, file), { recursive: true, force: true });
|
|
10
|
-
}
|
|
11
|
-
}
|
|
12
|
-
export async function emptyDir(dir) {
|
|
13
|
-
try {
|
|
14
|
-
await stat(dir);
|
|
15
|
-
} catch (error) {
|
|
16
|
-
if (error.code === "ENOENT") {
|
|
17
|
-
return;
|
|
18
|
-
}
|
|
19
|
-
throw error;
|
|
20
|
-
}
|
|
21
|
-
for (const file of await readdir(dir)) {
|
|
22
|
-
await rm(path.resolve(dir, file), { recursive: true, force: true });
|
|
23
|
-
}
|
|
24
|
-
}
|
package/bin/impl/file-utils.d.ts
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
export declare function readText(filePath: string, options?: BufferEncoding | {
|
|
2
|
-
encoding?: BufferEncoding | null;
|
|
3
|
-
flag?: string;
|
|
4
|
-
}): Promise<string | Buffer<ArrayBufferLike>>;
|
|
5
|
-
export declare function readTextSync(filePath: string, options?: BufferEncoding | {
|
|
6
|
-
encoding?: BufferEncoding | null;
|
|
7
|
-
flag?: string;
|
|
8
|
-
}): string | Buffer<ArrayBufferLike>;
|
|
9
|
-
export declare function readLines(filePath: string, options?: BufferEncoding | {
|
|
10
|
-
encoding?: BufferEncoding | null;
|
|
11
|
-
flag?: string;
|
|
12
|
-
}): Promise<string[]>;
|
|
13
|
-
export declare function readLinesSync(filePath: string, options?: BufferEncoding | {
|
|
14
|
-
encoding?: BufferEncoding | null;
|
|
15
|
-
flag?: string;
|
|
16
|
-
}): string[];
|
|
17
|
-
export declare function isDirectory(filePath: string): Promise<boolean>;
|
|
18
|
-
export declare function isDirectorySync(filePath: string): boolean;
|
|
19
|
-
export declare function isSymlink(filePath: string): Promise<boolean>;
|
|
20
|
-
export declare function isSymlinkSync(filePath: string): boolean;
|
package/bin/impl/file-utils.js
DELETED
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
import { statSync, lstatSync } from "node:fs";
|
|
2
|
-
import { stat, lstat } from "node:fs/promises";
|
|
3
|
-
import { readFile, readFileSync } from "./read-file.js";
|
|
4
|
-
export async function readText(filePath, options = "utf8") {
|
|
5
|
-
return readFile(filePath, options);
|
|
6
|
-
}
|
|
7
|
-
export function readTextSync(filePath, options = "utf8") {
|
|
8
|
-
return readFileSync(filePath, options);
|
|
9
|
-
}
|
|
10
|
-
export async function readLines(filePath, options = { encoding: "utf8" }) {
|
|
11
|
-
const effectiveOptions = typeof options === "string" ? { encoding: options } : options;
|
|
12
|
-
const contentBuffer = await readFile(filePath, { ...effectiveOptions, encoding: null });
|
|
13
|
-
return contentBuffer.toString().split(/\r?\n/);
|
|
14
|
-
}
|
|
15
|
-
export function readLinesSync(filePath, options = { encoding: "utf8" }) {
|
|
16
|
-
const effectiveOptions = typeof options === "string" ? { encoding: options } : options;
|
|
17
|
-
const contentBuffer = readFileSync(filePath, { ...effectiveOptions, encoding: null });
|
|
18
|
-
return contentBuffer.toString().split(/\r?\n/);
|
|
19
|
-
}
|
|
20
|
-
export async function isDirectory(filePath) {
|
|
21
|
-
try {
|
|
22
|
-
const stats = await stat(filePath);
|
|
23
|
-
return stats.isDirectory();
|
|
24
|
-
} catch (error) {
|
|
25
|
-
if (error.code === "ENOENT" || error.code === "ENOTDIR") {
|
|
26
|
-
return false;
|
|
27
|
-
}
|
|
28
|
-
throw error;
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
export function isDirectorySync(filePath) {
|
|
32
|
-
try {
|
|
33
|
-
const stats = statSync(filePath);
|
|
34
|
-
return stats.isDirectory();
|
|
35
|
-
} catch (error) {
|
|
36
|
-
if (error.code === "ENOENT" || error.code === "ENOTDIR") {
|
|
37
|
-
return false;
|
|
38
|
-
}
|
|
39
|
-
throw error;
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
export async function isSymlink(filePath) {
|
|
43
|
-
try {
|
|
44
|
-
const stats = await lstat(filePath);
|
|
45
|
-
return stats.isSymbolicLink();
|
|
46
|
-
} catch (error) {
|
|
47
|
-
if (error.code === "ENOENT") {
|
|
48
|
-
return false;
|
|
49
|
-
}
|
|
50
|
-
throw error;
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
export function isSymlinkSync(filePath) {
|
|
54
|
-
try {
|
|
55
|
-
const stats = lstatSync(filePath);
|
|
56
|
-
return stats.isSymbolicLink();
|
|
57
|
-
} catch (error) {
|
|
58
|
-
if (error.code === "ENOENT") {
|
|
59
|
-
return false;
|
|
60
|
-
}
|
|
61
|
-
throw error;
|
|
62
|
-
}
|
|
63
|
-
}
|
|
File without changes
|
|
File without changes
|