@reliverse/relifso 1.3.1 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSES +16 -0
- package/README.md +106 -20
- package/bin/impl/bun.d.ts +11 -0
- package/bin/impl/bun.js +23 -0
- package/bin/impl/{node/copy.d.ts → copy.d.ts} +15 -3
- package/bin/impl/copy.js +229 -0
- package/bin/impl/create.d.ts +34 -0
- package/bin/impl/create.js +54 -0
- package/bin/impl/{node/dive.d.ts → dive.d.ts} +10 -0
- package/bin/impl/dive.js +145 -0
- package/bin/impl/empty.d.ts +28 -0
- package/bin/impl/empty.js +75 -0
- package/bin/impl/extras.d.ts +35 -0
- package/bin/impl/extras.js +112 -0
- package/bin/impl/json-utils.d.ts +30 -0
- package/bin/impl/json-utils.js +46 -0
- package/bin/impl/{node/mkdirs.js → mkdirs.js} +10 -3
- package/bin/impl/{node/move.d.ts → move.d.ts} +10 -0
- package/bin/impl/move.js +140 -0
- package/bin/impl/{node/output-file.d.ts → output-file.d.ts} +3 -2
- package/bin/impl/{node/output-json.d.ts → output-json.d.ts} +7 -2
- package/bin/impl/output-json.js +77 -0
- package/bin/impl/read-file.d.ts +31 -0
- package/bin/impl/read-file.js +165 -0
- package/bin/impl/{node/read-json.d.ts → read-json.d.ts} +9 -0
- package/bin/impl/read-json.js +241 -0
- package/bin/impl/stats.d.ts +31 -0
- package/bin/impl/stats.js +141 -0
- package/bin/impl/write-file.d.ts +31 -0
- package/bin/impl/write-file.js +257 -0
- package/bin/impl/write-json.d.ts +41 -0
- package/bin/impl/write-json.js +135 -0
- package/bin/mod.d.ts +89 -57
- package/bin/mod.js +131 -164
- package/bin/utils/json/helpers/JSONRepairError.d.ts +4 -0
- package/bin/utils/json/helpers/JSONRepairError.js +7 -0
- package/bin/utils/json/helpers/JsonSchemaError.d.ts +6 -0
- package/bin/utils/json/helpers/JsonSchemaError.js +6 -0
- package/bin/utils/json/helpers/stringUtils.d.ts +64 -0
- package/bin/utils/json/helpers/stringUtils.js +87 -0
- package/bin/utils/json/regular/jsonc.d.ts +45 -0
- package/bin/utils/json/regular/jsonc.js +88 -0
- package/bin/utils/json/regular/jsonrepair.d.ts +17 -0
- package/bin/utils/json/regular/jsonrepair.js +576 -0
- package/bin/utils/json/regular/validate.d.ts +22 -0
- package/bin/utils/json/regular/validate.js +52 -0
- package/bin/utils/json/stream/JsonStreamError.d.ts +6 -0
- package/bin/utils/json/stream/JsonStreamError.js +6 -0
- package/bin/utils/json/stream/buffer/InputBuffer.d.ts +13 -0
- package/bin/utils/json/stream/buffer/InputBuffer.js +68 -0
- package/bin/utils/json/stream/buffer/OutputBuffer.d.ts +17 -0
- package/bin/utils/json/stream/buffer/OutputBuffer.js +101 -0
- package/bin/utils/json/stream/core.d.ts +10 -0
- package/bin/utils/json/stream/core.js +695 -0
- package/bin/utils/json/stream/jsonl.d.ts +21 -0
- package/bin/utils/json/stream/jsonl.js +55 -0
- package/bin/utils/json/stream/parser.d.ts +14 -0
- package/bin/utils/json/stream/parser.js +81 -0
- package/bin/utils/json/stream/stack.d.ts +19 -0
- package/bin/utils/json/stream/stack.js +43 -0
- package/bin/utils/json/stream/stream.d.ts +6 -0
- package/bin/utils/json/stream/stream.js +30 -0
- package/bin/utils/json/stream/writer.d.ts +14 -0
- package/bin/utils/json/stream/writer.js +44 -0
- package/bin/utils/log.d.ts +1 -0
- package/bin/utils/log.js +7 -0
- package/package.json +4 -3
- package/bin/impl/node/copy.js +0 -94
- package/bin/impl/node/create-file.d.ts +0 -2
- package/bin/impl/node/create-file.js +0 -21
- package/bin/impl/node/dive.js +0 -56
- package/bin/impl/node/empty-dir.d.ts +0 -2
- package/bin/impl/node/empty-dir.js +0 -24
- package/bin/impl/node/move.js +0 -93
- package/bin/impl/node/output-json.js +0 -15
- package/bin/impl/node/read-file.d.ts +0 -30
- package/bin/impl/node/read-file.js +0 -30
- package/bin/impl/node/read-json.js +0 -50
- package/bin/impl/node/write-file.d.ts +0 -20
- package/bin/impl/node/write-file.js +0 -23
- package/bin/impl/node/write-json.d.ts +0 -28
- package/bin/impl/node/write-json.js +0 -22
- package/bin/impl/utils/additional.d.ts +0 -15
- package/bin/impl/utils/additional.js +0 -47
- /package/bin/impl/{node/mkdirs.d.ts → mkdirs.d.ts} +0 -0
- /package/bin/impl/{node/output-file.js → output-file.js} +0 -0
- /package/bin/impl/{node/path-exists.d.ts → path-exists.d.ts} +0 -0
- /package/bin/impl/{node/path-exists.js → path-exists.js} +0 -0
- /package/bin/impl/{node/remove.d.ts → remove.d.ts} +0 -0
- /package/bin/impl/{node/remove.js → remove.js} +0 -0
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonlParser(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
reviver
|
|
8
|
+
} = options;
|
|
9
|
+
let buffer = "";
|
|
10
|
+
return new Transform({
|
|
11
|
+
transform(chunk, _encoding, callback) {
|
|
12
|
+
try {
|
|
13
|
+
const text = chunk.toString();
|
|
14
|
+
buffer += text;
|
|
15
|
+
const lines = buffer.split("\n");
|
|
16
|
+
buffer = lines.pop() || "";
|
|
17
|
+
for (const line of lines) {
|
|
18
|
+
if (line.trim()) {
|
|
19
|
+
try {
|
|
20
|
+
const parsed = JSON.parse(line, reviver);
|
|
21
|
+
this.push(parsed);
|
|
22
|
+
} catch (error) {
|
|
23
|
+
const parseError = error;
|
|
24
|
+
throw new JsonStreamError(`Failed to parse JSONL line: ${parseError.message}`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
if (buffer.length > chunkSize) {
|
|
29
|
+
throw new JsonStreamError("Chunk size exceeded without finding complete JSONL line");
|
|
30
|
+
}
|
|
31
|
+
callback();
|
|
32
|
+
} catch (error) {
|
|
33
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
export function createJsonlWriter(options = {}) {
|
|
39
|
+
const { replacer } = options;
|
|
40
|
+
return new Transform({
|
|
41
|
+
transform(chunk, _encoding, callback) {
|
|
42
|
+
try {
|
|
43
|
+
const jsonStr = JSON.stringify(chunk, replacer);
|
|
44
|
+
if (jsonStr === void 0) {
|
|
45
|
+
throw new JsonStreamError("Failed to stringify JSON object");
|
|
46
|
+
}
|
|
47
|
+
this.push(`${jsonStr}
|
|
48
|
+
`);
|
|
49
|
+
callback();
|
|
50
|
+
} catch (error) {
|
|
51
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
interface ParserOptions {
|
|
3
|
+
chunkSize?: number;
|
|
4
|
+
maxDepth?: number;
|
|
5
|
+
reviver?: (key: string, value: unknown) => unknown;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Creates a transform stream that parses JSON data chunk by chunk.
|
|
9
|
+
*
|
|
10
|
+
* @param options - Parser options
|
|
11
|
+
* @returns A transform stream that emits parsed JSON objects
|
|
12
|
+
*/
|
|
13
|
+
export declare function createJsonStreamParser(options?: ParserOptions): Transform;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonStreamParser(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
maxDepth = 100,
|
|
8
|
+
reviver
|
|
9
|
+
} = options;
|
|
10
|
+
let buffer = "";
|
|
11
|
+
let depth = 0;
|
|
12
|
+
let inString = false;
|
|
13
|
+
let escapeNext = false;
|
|
14
|
+
return new Transform({
|
|
15
|
+
transform(chunk, _encoding, callback) {
|
|
16
|
+
try {
|
|
17
|
+
const text = chunk.toString();
|
|
18
|
+
buffer += text;
|
|
19
|
+
while (buffer.length > 0) {
|
|
20
|
+
const start = buffer.indexOf("{");
|
|
21
|
+
if (start === -1) {
|
|
22
|
+
buffer = "";
|
|
23
|
+
break;
|
|
24
|
+
}
|
|
25
|
+
buffer = buffer.slice(start);
|
|
26
|
+
depth = 0;
|
|
27
|
+
inString = false;
|
|
28
|
+
escapeNext = false;
|
|
29
|
+
let end = -1;
|
|
30
|
+
for (let i = 0; i < buffer.length; i++) {
|
|
31
|
+
const char = buffer[i];
|
|
32
|
+
if (escapeNext) {
|
|
33
|
+
escapeNext = false;
|
|
34
|
+
continue;
|
|
35
|
+
}
|
|
36
|
+
if (char === "\\") {
|
|
37
|
+
escapeNext = true;
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
if (char === '"' && !escapeNext) {
|
|
41
|
+
inString = !inString;
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (!inString) {
|
|
45
|
+
if (char === "{") {
|
|
46
|
+
depth++;
|
|
47
|
+
if (depth > maxDepth) {
|
|
48
|
+
throw new JsonStreamError(`Maximum nesting depth of ${maxDepth} exceeded`);
|
|
49
|
+
}
|
|
50
|
+
} else if (char === "}") {
|
|
51
|
+
depth--;
|
|
52
|
+
if (depth === 0) {
|
|
53
|
+
end = i + 1;
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
if (end === -1) {
|
|
60
|
+
if (buffer.length > chunkSize) {
|
|
61
|
+
throw new JsonStreamError("Chunk size exceeded without finding complete JSON object");
|
|
62
|
+
}
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
const jsonStr = buffer.slice(0, end);
|
|
66
|
+
try {
|
|
67
|
+
const parsed = JSON.parse(jsonStr, reviver);
|
|
68
|
+
this.push(parsed);
|
|
69
|
+
} catch (error) {
|
|
70
|
+
const parseError = error;
|
|
71
|
+
throw new JsonStreamError(`Failed to parse JSON: ${parseError.message}`);
|
|
72
|
+
}
|
|
73
|
+
buffer = buffer.slice(end);
|
|
74
|
+
}
|
|
75
|
+
callback();
|
|
76
|
+
} catch (error) {
|
|
77
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export declare enum Caret {
|
|
2
|
+
beforeValue = "beforeValue",
|
|
3
|
+
afterValue = "afterValue",
|
|
4
|
+
beforeKey = "beforeKey"
|
|
5
|
+
}
|
|
6
|
+
export declare enum StackType {
|
|
7
|
+
root = "root",
|
|
8
|
+
object = "object",
|
|
9
|
+
array = "array",
|
|
10
|
+
ndJson = "ndJson",
|
|
11
|
+
functionCall = "dataType"
|
|
12
|
+
}
|
|
13
|
+
export declare function createStack(): {
|
|
14
|
+
readonly type: StackType | undefined;
|
|
15
|
+
readonly caret: Caret;
|
|
16
|
+
pop(): true;
|
|
17
|
+
push(type: StackType, newCaret: Caret): true;
|
|
18
|
+
update(newCaret: Caret): true;
|
|
19
|
+
};
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
export var Caret = /* @__PURE__ */ ((Caret2) => {
|
|
2
|
+
Caret2["beforeValue"] = "beforeValue";
|
|
3
|
+
Caret2["afterValue"] = "afterValue";
|
|
4
|
+
Caret2["beforeKey"] = "beforeKey";
|
|
5
|
+
return Caret2;
|
|
6
|
+
})(Caret || {});
|
|
7
|
+
export var StackType = /* @__PURE__ */ ((StackType2) => {
|
|
8
|
+
StackType2["root"] = "root";
|
|
9
|
+
StackType2["object"] = "object";
|
|
10
|
+
StackType2["array"] = "array";
|
|
11
|
+
StackType2["ndJson"] = "ndJson";
|
|
12
|
+
StackType2["functionCall"] = "dataType";
|
|
13
|
+
return StackType2;
|
|
14
|
+
})(StackType || {});
|
|
15
|
+
export function createStack() {
|
|
16
|
+
const stack = ["root" /* root */];
|
|
17
|
+
let caret = "beforeValue" /* beforeValue */;
|
|
18
|
+
return {
|
|
19
|
+
get type() {
|
|
20
|
+
return last(stack);
|
|
21
|
+
},
|
|
22
|
+
get caret() {
|
|
23
|
+
return caret;
|
|
24
|
+
},
|
|
25
|
+
pop() {
|
|
26
|
+
stack.pop();
|
|
27
|
+
caret = "afterValue" /* afterValue */;
|
|
28
|
+
return true;
|
|
29
|
+
},
|
|
30
|
+
push(type, newCaret) {
|
|
31
|
+
stack.push(type);
|
|
32
|
+
caret = newCaret;
|
|
33
|
+
return true;
|
|
34
|
+
},
|
|
35
|
+
update(newCaret) {
|
|
36
|
+
caret = newCaret;
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
function last(array) {
|
|
42
|
+
return array[array.length - 1];
|
|
43
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { jsonrepairCore } from "./core.js";
|
|
3
|
+
export function jsonrepairTransform(options) {
|
|
4
|
+
const repair = jsonrepairCore({
|
|
5
|
+
onData: (chunk) => transform.push(chunk),
|
|
6
|
+
bufferSize: options?.bufferSize,
|
|
7
|
+
chunkSize: options?.chunkSize
|
|
8
|
+
});
|
|
9
|
+
const transform = new Transform({
|
|
10
|
+
transform(chunk, _encoding, callback) {
|
|
11
|
+
try {
|
|
12
|
+
repair.transform(chunk.toString());
|
|
13
|
+
} catch (err) {
|
|
14
|
+
this.emit("error", err);
|
|
15
|
+
} finally {
|
|
16
|
+
callback();
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
flush(callback) {
|
|
20
|
+
try {
|
|
21
|
+
repair.flush();
|
|
22
|
+
} catch (err) {
|
|
23
|
+
this.emit("error", err);
|
|
24
|
+
} finally {
|
|
25
|
+
callback();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
return transform;
|
|
30
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
interface WriterOptions {
|
|
3
|
+
chunkSize?: number;
|
|
4
|
+
replacer?: (key: string, value: unknown) => unknown | (number | string)[] | null;
|
|
5
|
+
spaces?: string | number;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Creates a transform stream that writes JSON data chunk by chunk.
|
|
9
|
+
*
|
|
10
|
+
* @param options - Writer options
|
|
11
|
+
* @returns A transform stream that accepts objects and outputs JSON strings
|
|
12
|
+
*/
|
|
13
|
+
export declare function createJsonStreamWriter(options?: WriterOptions): Transform;
|
|
14
|
+
export {};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { Transform } from "node:stream";
|
|
2
|
+
import { JsonStreamError } from "./JsonStreamError.js";
|
|
3
|
+
export function createJsonStreamWriter(options = {}) {
|
|
4
|
+
const {
|
|
5
|
+
chunkSize = 1024 * 1024,
|
|
6
|
+
// 1MB default chunk size
|
|
7
|
+
replacer,
|
|
8
|
+
spaces
|
|
9
|
+
} = options;
|
|
10
|
+
let isFirst = true;
|
|
11
|
+
let buffer = "";
|
|
12
|
+
return new Transform({
|
|
13
|
+
transform(chunk, _encoding, callback) {
|
|
14
|
+
try {
|
|
15
|
+
const jsonStr = JSON.stringify(chunk, replacer, spaces);
|
|
16
|
+
if (jsonStr === void 0) {
|
|
17
|
+
throw new JsonStreamError("Failed to stringify JSON object");
|
|
18
|
+
}
|
|
19
|
+
if (!isFirst) {
|
|
20
|
+
buffer += ",";
|
|
21
|
+
}
|
|
22
|
+
isFirst = false;
|
|
23
|
+
buffer += jsonStr;
|
|
24
|
+
if (buffer.length >= chunkSize) {
|
|
25
|
+
this.push(buffer);
|
|
26
|
+
buffer = "";
|
|
27
|
+
}
|
|
28
|
+
callback();
|
|
29
|
+
} catch (error) {
|
|
30
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
flush(callback) {
|
|
34
|
+
try {
|
|
35
|
+
if (buffer.length > 0) {
|
|
36
|
+
this.push(buffer);
|
|
37
|
+
}
|
|
38
|
+
callback();
|
|
39
|
+
} catch (error) {
|
|
40
|
+
callback(error instanceof Error ? error : new JsonStreamError(String(error)));
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const logInternal: (msg: string | (() => string)) => void;
|
package/bin/utils/log.js
ADDED
package/package.json
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"dependencies": {},
|
|
3
|
-
"description": "@reliverse/relifso is a modern filesystem toolkit
|
|
3
|
+
"description": "@reliverse/relifso is a modern node and bun filesystem toolkit. drop-in replacement for `node:fs` and `fs-extra` — powered by native promises, built with es modules, and packed with dx-focused and bun-aware utilities.",
|
|
4
4
|
"homepage": "https://docs.reliverse.org",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"name": "@reliverse/relifso",
|
|
7
7
|
"type": "module",
|
|
8
|
-
"version": "1.
|
|
8
|
+
"version": "1.4.1",
|
|
9
9
|
"keywords": [
|
|
10
10
|
"fs",
|
|
11
11
|
"file",
|
|
@@ -38,7 +38,8 @@
|
|
|
38
38
|
"bin",
|
|
39
39
|
"package.json",
|
|
40
40
|
"README.md",
|
|
41
|
-
"LICENSE"
|
|
41
|
+
"LICENSE",
|
|
42
|
+
"LICENSES"
|
|
42
43
|
],
|
|
43
44
|
"main": "./bin/mod.js",
|
|
44
45
|
"module": "./bin/mod.js",
|
package/bin/impl/node/copy.js
DELETED
|
@@ -1,94 +0,0 @@
|
|
|
1
|
-
import { copyFileSync, statSync, constants as fsConstants, readdirSync, rmSync } from "node:fs";
|
|
2
|
-
import {
|
|
3
|
-
stat as statAsync,
|
|
4
|
-
copyFile as copyFileAsync,
|
|
5
|
-
constants as fsConstantsAsync,
|
|
6
|
-
readdir,
|
|
7
|
-
rm
|
|
8
|
-
} from "node:fs/promises";
|
|
9
|
-
import { dirname, join as joinPath, basename as basenamePath } from "node:path";
|
|
10
|
-
import { mkdirsSync } from "./mkdirs.js";
|
|
11
|
-
import { mkdirs } from "./mkdirs.js";
|
|
12
|
-
export function copySync(src, dest, options = {}) {
|
|
13
|
-
const { overwrite = options.clobber || false, preserveTimestamps = false } = options;
|
|
14
|
-
const srcStat = statSync(src, { throwIfNoEntry: true });
|
|
15
|
-
if (!srcStat) {
|
|
16
|
-
}
|
|
17
|
-
let destFinal = dest;
|
|
18
|
-
const destStat = statSync(dest, { throwIfNoEntry: false });
|
|
19
|
-
if (!srcStat.isDirectory() && destStat?.isDirectory()) {
|
|
20
|
-
destFinal = joinPath(dest, basenamePath(src));
|
|
21
|
-
}
|
|
22
|
-
const destExists = statSync(destFinal, { throwIfNoEntry: false });
|
|
23
|
-
if (destExists && !overwrite) {
|
|
24
|
-
throw new Error(`Destination ${destFinal} already exists and overwrite is false.`);
|
|
25
|
-
}
|
|
26
|
-
const destDir = dirname(destFinal);
|
|
27
|
-
mkdirsSync(destDir);
|
|
28
|
-
if (srcStat.isDirectory()) {
|
|
29
|
-
if (overwrite && destExists) {
|
|
30
|
-
rmSync(destFinal, { recursive: true, force: true });
|
|
31
|
-
}
|
|
32
|
-
mkdirsSync(destFinal);
|
|
33
|
-
const entries = readdirSync(src);
|
|
34
|
-
for (const entry of entries) {
|
|
35
|
-
const srcEntry = joinPath(src, entry);
|
|
36
|
-
const destEntry = joinPath(destFinal, entry);
|
|
37
|
-
copySync(srcEntry, destEntry, options);
|
|
38
|
-
}
|
|
39
|
-
} else {
|
|
40
|
-
if (overwrite && destExists) {
|
|
41
|
-
rmSync(destFinal, { force: true });
|
|
42
|
-
}
|
|
43
|
-
copyFileSync(src, destFinal, preserveTimestamps ? fsConstants.COPYFILE_FICLONE : 0);
|
|
44
|
-
if (preserveTimestamps) {
|
|
45
|
-
console.warn("preserveTimestamps: utimesSync is not implemented for the moment.");
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
export async function copy(src, dest, options = {}) {
|
|
50
|
-
const { overwrite = options.clobber || false, preserveTimestamps = false } = options;
|
|
51
|
-
const srcStat = await statAsync(src).catch((e) => {
|
|
52
|
-
if (e.code === "ENOENT") return null;
|
|
53
|
-
throw e;
|
|
54
|
-
});
|
|
55
|
-
if (!srcStat) {
|
|
56
|
-
}
|
|
57
|
-
let destFinal = dest;
|
|
58
|
-
const destStat = await statAsync(dest).catch((e) => {
|
|
59
|
-
if (e.code === "ENOENT") return null;
|
|
60
|
-
throw e;
|
|
61
|
-
});
|
|
62
|
-
if (!srcStat?.isDirectory() && destStat?.isDirectory()) {
|
|
63
|
-
destFinal = joinPath(dest, basenamePath(src));
|
|
64
|
-
}
|
|
65
|
-
const destExists = await statAsync(destFinal).catch((e) => {
|
|
66
|
-
if (e.code === "ENOENT") return null;
|
|
67
|
-
throw e;
|
|
68
|
-
});
|
|
69
|
-
if (destExists && !overwrite) {
|
|
70
|
-
throw new Error(`Destination ${destFinal} already exists and overwrite is false.`);
|
|
71
|
-
}
|
|
72
|
-
const destDir = dirname(destFinal);
|
|
73
|
-
await mkdirs(destDir);
|
|
74
|
-
if (srcStat?.isDirectory()) {
|
|
75
|
-
if (overwrite && destExists) {
|
|
76
|
-
await rm(destFinal, { recursive: true, force: true });
|
|
77
|
-
}
|
|
78
|
-
await mkdirs(destFinal);
|
|
79
|
-
const entries = await readdir(src);
|
|
80
|
-
for (const entry of entries) {
|
|
81
|
-
const srcEntry = joinPath(src, entry);
|
|
82
|
-
const destEntry = joinPath(destFinal, entry);
|
|
83
|
-
await copy(srcEntry, destEntry, options);
|
|
84
|
-
}
|
|
85
|
-
} else {
|
|
86
|
-
if (overwrite && destExists) {
|
|
87
|
-
await rm(destFinal, { force: true });
|
|
88
|
-
}
|
|
89
|
-
await copyFileAsync(src, destFinal, preserveTimestamps ? fsConstantsAsync.COPYFILE_FICLONE : 0);
|
|
90
|
-
if (preserveTimestamps) {
|
|
91
|
-
console.warn("preserveTimestamps: utimes is not implemented for the moment.");
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
}
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { existsSync } from "node:fs";
|
|
2
|
-
import { stat } from "node:fs/promises";
|
|
3
|
-
import { writeFileSync } from "./write-file.js";
|
|
4
|
-
import { writeFile } from "./write-file.js";
|
|
5
|
-
export function createFileSync(file, content = "") {
|
|
6
|
-
if (existsSync(file)) {
|
|
7
|
-
return;
|
|
8
|
-
}
|
|
9
|
-
return writeFileSync(file, content);
|
|
10
|
-
}
|
|
11
|
-
export async function createFile(file, content = "") {
|
|
12
|
-
try {
|
|
13
|
-
await stat(file);
|
|
14
|
-
return;
|
|
15
|
-
} catch (error) {
|
|
16
|
-
if (error.code === "ENOENT") {
|
|
17
|
-
return writeFile(file, content);
|
|
18
|
-
}
|
|
19
|
-
throw error;
|
|
20
|
-
}
|
|
21
|
-
}
|
package/bin/impl/node/dive.js
DELETED
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
import { readdirSync, statSync } from "node:fs";
|
|
2
|
-
import { join } from "node:path";
|
|
3
|
-
export function* diveSync(dir, callbackOrOptions, options) {
|
|
4
|
-
const currentDepth = 0;
|
|
5
|
-
let actualOptions = {
|
|
6
|
-
recursive: true,
|
|
7
|
-
files: true,
|
|
8
|
-
directories: false
|
|
9
|
-
// fs-extra's dive by default only yields files
|
|
10
|
-
};
|
|
11
|
-
let callback;
|
|
12
|
-
if (typeof callbackOrOptions === "function") {
|
|
13
|
-
callback = callbackOrOptions;
|
|
14
|
-
if (options) {
|
|
15
|
-
actualOptions = { ...actualOptions, ...options };
|
|
16
|
-
}
|
|
17
|
-
} else if (typeof callbackOrOptions === "object") {
|
|
18
|
-
actualOptions = { ...actualOptions, ...callbackOrOptions };
|
|
19
|
-
}
|
|
20
|
-
function* walk(currentPath, depth) {
|
|
21
|
-
if (actualOptions.depth !== void 0 && depth > actualOptions.depth) {
|
|
22
|
-
return;
|
|
23
|
-
}
|
|
24
|
-
const entries = readdirSync(currentPath, { withFileTypes: true });
|
|
25
|
-
for (const entry of entries) {
|
|
26
|
-
const entryPath = join(currentPath, entry.name);
|
|
27
|
-
if (!actualOptions.all && entry.name.startsWith(".")) {
|
|
28
|
-
continue;
|
|
29
|
-
}
|
|
30
|
-
if (actualOptions.ignore) {
|
|
31
|
-
if (Array.isArray(actualOptions.ignore) && actualOptions.ignore.includes(entry.name)) {
|
|
32
|
-
continue;
|
|
33
|
-
}
|
|
34
|
-
if (actualOptions.ignore instanceof RegExp && actualOptions.ignore.test(entryPath)) {
|
|
35
|
-
continue;
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
const stat = statSync(entryPath);
|
|
39
|
-
if (entry.isFile()) {
|
|
40
|
-
if (actualOptions.files) {
|
|
41
|
-
if (callback) callback(entryPath, stat);
|
|
42
|
-
yield entryPath;
|
|
43
|
-
}
|
|
44
|
-
} else if (entry.isDirectory()) {
|
|
45
|
-
if (actualOptions.directories) {
|
|
46
|
-
if (callback) callback(entryPath, stat);
|
|
47
|
-
yield entryPath;
|
|
48
|
-
}
|
|
49
|
-
if (actualOptions.recursive) {
|
|
50
|
-
yield* walk(entryPath, depth + 1);
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
yield* walk(dir, currentDepth);
|
|
56
|
-
}
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { existsSync, readdirSync, rmSync } from "node:fs";
|
|
2
|
-
import { readdir, rm, stat } from "node:fs/promises";
|
|
3
|
-
import path from "node:path";
|
|
4
|
-
export function emptyDirSync(dir) {
|
|
5
|
-
if (!existsSync(dir)) {
|
|
6
|
-
return;
|
|
7
|
-
}
|
|
8
|
-
for (const file of readdirSync(dir)) {
|
|
9
|
-
rmSync(path.resolve(dir, file), { recursive: true, force: true });
|
|
10
|
-
}
|
|
11
|
-
}
|
|
12
|
-
export async function emptyDir(dir) {
|
|
13
|
-
try {
|
|
14
|
-
await stat(dir);
|
|
15
|
-
} catch (error) {
|
|
16
|
-
if (error.code === "ENOENT") {
|
|
17
|
-
return;
|
|
18
|
-
}
|
|
19
|
-
throw error;
|
|
20
|
-
}
|
|
21
|
-
for (const file of await readdir(dir)) {
|
|
22
|
-
await rm(path.resolve(dir, file), { recursive: true, force: true });
|
|
23
|
-
}
|
|
24
|
-
}
|
package/bin/impl/node/move.js
DELETED
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
import { renameSync, statSync, unlinkSync, copyFileSync } from "node:fs";
|
|
2
|
-
import { rename, stat, unlink, copyFile } from "node:fs/promises";
|
|
3
|
-
import { dirname, basename, join as joinPath } from "node:path";
|
|
4
|
-
import { mkdirsSync } from "./mkdirs.js";
|
|
5
|
-
import { mkdirs } from "./mkdirs.js";
|
|
6
|
-
export function moveSync(src, dest, options = {}) {
|
|
7
|
-
let overwrite;
|
|
8
|
-
if (options.overwrite !== void 0) {
|
|
9
|
-
overwrite = options.overwrite;
|
|
10
|
-
} else if (options.clobber !== void 0) {
|
|
11
|
-
console.warn(
|
|
12
|
-
"Warning: The 'clobber' option in moveSync is deprecated and will be removed in a future version. Please use 'overwrite' instead."
|
|
13
|
-
);
|
|
14
|
-
overwrite = options.clobber;
|
|
15
|
-
} else {
|
|
16
|
-
overwrite = false;
|
|
17
|
-
}
|
|
18
|
-
const srcStat = statSync(src, { throwIfNoEntry: true });
|
|
19
|
-
if (!srcStat) {
|
|
20
|
-
}
|
|
21
|
-
let destFinal = dest;
|
|
22
|
-
const destStat = statSync(dest, { throwIfNoEntry: false });
|
|
23
|
-
if (destStat?.isDirectory()) {
|
|
24
|
-
destFinal = joinPath(dest, basename(src));
|
|
25
|
-
}
|
|
26
|
-
if (statSync(destFinal, { throwIfNoEntry: false }) && !overwrite) {
|
|
27
|
-
throw new Error(`Destination ${destFinal} already exists and overwrite is false.`);
|
|
28
|
-
}
|
|
29
|
-
const destDir = dirname(destFinal);
|
|
30
|
-
mkdirsSync(destDir);
|
|
31
|
-
try {
|
|
32
|
-
renameSync(src, destFinal);
|
|
33
|
-
} catch (err) {
|
|
34
|
-
if (err.code === "EXDEV") {
|
|
35
|
-
copyFileSync(src, destFinal);
|
|
36
|
-
unlinkSync(src);
|
|
37
|
-
} else if (err.code === "EISDIR" || err.code === "EPERM") {
|
|
38
|
-
copyFileSync(src, destFinal);
|
|
39
|
-
unlinkSync(src);
|
|
40
|
-
} else {
|
|
41
|
-
throw err;
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
export async function move(src, dest, options = {}) {
|
|
46
|
-
let overwrite;
|
|
47
|
-
if (options.overwrite !== void 0) {
|
|
48
|
-
overwrite = options.overwrite;
|
|
49
|
-
} else if (options.clobber !== void 0) {
|
|
50
|
-
console.warn(
|
|
51
|
-
"Warning: The 'clobber' option in move is deprecated and will be removed in a future version. Please use 'overwrite' instead."
|
|
52
|
-
);
|
|
53
|
-
overwrite = options.clobber;
|
|
54
|
-
} else {
|
|
55
|
-
overwrite = false;
|
|
56
|
-
}
|
|
57
|
-
const srcStat = await stat(src).catch((e) => {
|
|
58
|
-
if (e.code === "ENOENT") return null;
|
|
59
|
-
throw e;
|
|
60
|
-
});
|
|
61
|
-
if (!srcStat) {
|
|
62
|
-
}
|
|
63
|
-
let destFinal = dest;
|
|
64
|
-
const destStat = await stat(dest).catch((e) => {
|
|
65
|
-
if (e.code === "ENOENT") return null;
|
|
66
|
-
throw e;
|
|
67
|
-
});
|
|
68
|
-
if (destStat?.isDirectory()) {
|
|
69
|
-
destFinal = joinPath(dest, basename(src));
|
|
70
|
-
}
|
|
71
|
-
const destFinalStat = await stat(destFinal).catch((e) => {
|
|
72
|
-
if (e.code === "ENOENT") return null;
|
|
73
|
-
throw e;
|
|
74
|
-
});
|
|
75
|
-
if (destFinalStat && !overwrite) {
|
|
76
|
-
throw new Error(`Destination ${destFinal} already exists and overwrite is false.`);
|
|
77
|
-
}
|
|
78
|
-
const destDir = dirname(destFinal);
|
|
79
|
-
await mkdirs(destDir);
|
|
80
|
-
try {
|
|
81
|
-
await rename(src, destFinal);
|
|
82
|
-
} catch (err) {
|
|
83
|
-
if (err.code === "EXDEV") {
|
|
84
|
-
await copyFile(src, destFinal);
|
|
85
|
-
await unlink(src);
|
|
86
|
-
} else if (err.code === "EISDIR" || err.code === "EPERM") {
|
|
87
|
-
await copyFile(src, destFinal);
|
|
88
|
-
await unlink(src);
|
|
89
|
-
} else {
|
|
90
|
-
throw err;
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { dirname } from "node:path";
|
|
2
|
-
import { mkdirsSync } from "./mkdirs.js";
|
|
3
|
-
import { mkdirs } from "./mkdirs.js";
|
|
4
|
-
import { writeJsonSync } from "./write-json.js";
|
|
5
|
-
import { writeJson } from "./write-json.js";
|
|
6
|
-
export function outputJsonSync(file, data, options) {
|
|
7
|
-
const dir = dirname(file);
|
|
8
|
-
mkdirsSync(dir);
|
|
9
|
-
writeJsonSync(file, data, options);
|
|
10
|
-
}
|
|
11
|
-
export async function outputJson(file, data, options) {
|
|
12
|
-
const dir = dirname(file);
|
|
13
|
-
await mkdirs(dir);
|
|
14
|
-
await writeJson(file, data, options);
|
|
15
|
-
}
|