@entergreat/file-utils 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +120 -0
- package/package.json +32 -0
- package/src/index.js +175 -0
package/README.md
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# @entergreat/file-utils
|
|
2
|
+
|
|
3
|
+
File utilities for reading, writing, converting and managing files.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @entergreat/file-utils
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```js
|
|
14
|
+
import FileUtils from "@entergreat/file-utils";
|
|
15
|
+
|
|
16
|
+
const fu = new FileUtils("/path/to/base/directory");
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## API
|
|
20
|
+
|
|
21
|
+
### Constructor
|
|
22
|
+
|
|
23
|
+
```js
|
|
24
|
+
new FileUtils(baseDir)
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Creates a new FileUtils instance. All relative paths will be resolved from `baseDir`. Defaults to `process.cwd()`.
|
|
28
|
+
|
|
29
|
+
### Methods
|
|
30
|
+
|
|
31
|
+
#### `getFileNamesFromFolder(folderPath)`
|
|
32
|
+
|
|
33
|
+
Returns an array of file names in the specified folder.
|
|
34
|
+
|
|
35
|
+
```js
|
|
36
|
+
const files = await fu.getFileNamesFromFolder("data");
|
|
37
|
+
// ["file1.json", "file2.json"]
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
#### `saveNewFile(text, filename, folderPath)`
|
|
41
|
+
|
|
42
|
+
Saves text content to a file. Creates the folder if it doesn't exist.
|
|
43
|
+
|
|
44
|
+
```js
|
|
45
|
+
const filePath = await fu.saveNewFile("Hello world", "greeting.txt", "output");
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
#### `getFileContent(filename, folderPath)`
|
|
49
|
+
|
|
50
|
+
Reads and returns the content of a file.
|
|
51
|
+
|
|
52
|
+
```js
|
|
53
|
+
const content = await fu.getFileContent("config.json", "settings");
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
#### `deleteFile(fileName, folderPath)`
|
|
57
|
+
|
|
58
|
+
Deletes a single file.
|
|
59
|
+
|
|
60
|
+
```js
|
|
61
|
+
await fu.deleteFile("temp.txt", "cache");
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
#### `deleteDirectory(folderPath)`
|
|
65
|
+
|
|
66
|
+
Recursively deletes a directory and all its contents. Returns `1` if deleted, `0` if the directory didn't exist.
|
|
67
|
+
|
|
68
|
+
```js
|
|
69
|
+
await fu.deleteDirectory("temp");
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
#### `convertJsonToCsv(folderPath, outputFolder)`
|
|
73
|
+
|
|
74
|
+
Converts all JSON files in a folder to CSV format. Handles nested objects by flattening them with dot notation.
|
|
75
|
+
|
|
76
|
+
```js
|
|
77
|
+
const csvFiles = await fu.convertJsonToCsv("json-data", "csv-output");
|
|
78
|
+
// ["/path/to/csv-output/file1.csv", "/path/to/csv-output/file2.csv"]
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
#### `zipFolder(folderPath, zipFileName, outputFolder)`
|
|
82
|
+
|
|
83
|
+
Compresses a folder into a ZIP file with maximum compression.
|
|
84
|
+
|
|
85
|
+
```js
|
|
86
|
+
const zipPath = await fu.zipFolder("data", "backup.zip", "archives");
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
#### `combineJsonFiles(folderPath)`
|
|
90
|
+
|
|
91
|
+
Combines all JSON files in a folder into a single array. Each item gets a `_sourceFile` property indicating which file it came from.
|
|
92
|
+
|
|
93
|
+
```js
|
|
94
|
+
const combined = await fu.combineJsonFiles("json-data");
|
|
95
|
+
// [{ name: "item1", _sourceFile: "file1" }, { name: "item2", _sourceFile: "file2" }]
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
#### `saveAsJsonAndCsv(data, fileName, folderPath)`
|
|
99
|
+
|
|
100
|
+
Saves an array of objects as both JSON and CSV files.
|
|
101
|
+
|
|
102
|
+
```js
|
|
103
|
+
const data = [{ name: "Alice", age: 30 }, { name: "Bob", age: 25 }];
|
|
104
|
+
const { jsonPath, csvPath } = await fu.saveAsJsonAndCsv(data, "users", "output");
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Static Methods
|
|
108
|
+
|
|
109
|
+
#### `FileUtils.extractMainDomainName(url)`
|
|
110
|
+
|
|
111
|
+
Extracts the main domain name from a URL.
|
|
112
|
+
|
|
113
|
+
```js
|
|
114
|
+
const domain = await FileUtils.extractMainDomainName("https://www.example.com/page");
|
|
115
|
+
// "example"
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## License
|
|
119
|
+
|
|
120
|
+
UNLICENSED - Private package for EnterGreat internal use.
|
package/package.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@entergreat/file-utils",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "File utilities for reading, writing, converting and managing files",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/index.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./src/index.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"src"
|
|
12
|
+
],
|
|
13
|
+
"keywords": [
|
|
14
|
+
"file",
|
|
15
|
+
"utils",
|
|
16
|
+
"json",
|
|
17
|
+
"csv",
|
|
18
|
+
"zip"
|
|
19
|
+
],
|
|
20
|
+
"author": "EnterGreat",
|
|
21
|
+
"license": "UNLICENSED",
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "git+https://github.com/entergreat/file-utils.git"
|
|
25
|
+
},
|
|
26
|
+
"engines": {
|
|
27
|
+
"node": ">=18.0.0"
|
|
28
|
+
},
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"archiver": "^7.0.1"
|
|
31
|
+
}
|
|
32
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { readdir, mkdir, writeFile, readFile, access, stat, unlink, rm } from "fs/promises";
|
|
2
|
+
import { createWriteStream } from "fs";
|
|
3
|
+
import archiver from "archiver";
|
|
4
|
+
import path from "path";
|
|
5
|
+
|
|
6
|
+
class FileUtils {
|
|
7
|
+
constructor(baseDir = process.cwd()) {
|
|
8
|
+
this.baseDir = baseDir;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
resolvePath(folder) {
|
|
12
|
+
return path.isAbsolute(folder) ? folder : path.join(this.baseDir, folder);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
static async extractMainDomainName(url) {
|
|
16
|
+
try {
|
|
17
|
+
let cleanUrl = url.trim();
|
|
18
|
+
if (!cleanUrl.startsWith("http://") && !cleanUrl.startsWith("https://")) {
|
|
19
|
+
cleanUrl = "https://" + cleanUrl;
|
|
20
|
+
}
|
|
21
|
+
let hostname = new URL(cleanUrl).hostname;
|
|
22
|
+
if (hostname.startsWith("www.")) {
|
|
23
|
+
hostname = hostname.substring(4);
|
|
24
|
+
}
|
|
25
|
+
const parts = hostname.split(".");
|
|
26
|
+
return parts.length >= 2 ? parts[parts.length - 2] : parts[0];
|
|
27
|
+
} catch {
|
|
28
|
+
const cleaned = url.trim().replace(/^https?:\/\//, "").replace(/^www\./, "");
|
|
29
|
+
return cleaned.split("/")[0].split(".")[0] || "unknown";
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async getFileNamesFromFolder(folderPath) {
|
|
34
|
+
return readdir(this.resolvePath(folderPath));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async saveNewFile(text, filename, folderPath = "files") {
|
|
38
|
+
const filesDir = this.resolvePath(folderPath);
|
|
39
|
+
await mkdir(filesDir, { recursive: true });
|
|
40
|
+
const filePath = path.join(filesDir, filename);
|
|
41
|
+
await writeFile(filePath, text, "utf8");
|
|
42
|
+
return filePath;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async getFileContent(filename, folderPath = "files") {
|
|
46
|
+
return readFile(path.join(this.resolvePath(folderPath), filename), "utf8");
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async deleteFile(fileName, folderPath) {
|
|
50
|
+
const filePath = path.join(this.resolvePath(folderPath), fileName);
|
|
51
|
+
if ((await stat(filePath)).isFile()) {
|
|
52
|
+
await unlink(filePath);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async deleteDirectory(folderPath = "files") {
|
|
57
|
+
const filesDir = this.resolvePath(folderPath);
|
|
58
|
+
try {
|
|
59
|
+
await access(filesDir);
|
|
60
|
+
await rm(filesDir, { recursive: true, force: true });
|
|
61
|
+
return 1;
|
|
62
|
+
} catch {
|
|
63
|
+
return 0;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
async convertJsonToCsv(folderPath, outputFolder = folderPath) {
|
|
68
|
+
const sourceDir = this.resolvePath(folderPath);
|
|
69
|
+
const outputDir = this.resolvePath(outputFolder);
|
|
70
|
+
const files = (await readdir(sourceDir)).filter((f) => f.endsWith(".json"));
|
|
71
|
+
|
|
72
|
+
if (!files.length) {
|
|
73
|
+
throw new Error(`No JSON files in ${folderPath}`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
await mkdir(outputDir, { recursive: true });
|
|
77
|
+
|
|
78
|
+
return (
|
|
79
|
+
await Promise.all(
|
|
80
|
+
files.map(async (f) => {
|
|
81
|
+
const arr = [].concat(JSON.parse(await readFile(path.join(sourceDir, f), "utf8")));
|
|
82
|
+
if (!arr.length) return null;
|
|
83
|
+
|
|
84
|
+
const keys = [...new Set(arr.flatMap((o) => Object.keys(FileUtils._flatten(o))))];
|
|
85
|
+
const csv = [
|
|
86
|
+
keys.join(","),
|
|
87
|
+
...arr.map((o) => {
|
|
88
|
+
const flat = FileUtils._flatten(o);
|
|
89
|
+
return keys.map((k) => FileUtils._escape(flat[k] ?? "")).join(",");
|
|
90
|
+
}),
|
|
91
|
+
].join("\n");
|
|
92
|
+
|
|
93
|
+
const csvPath = path.join(outputDir, f.replace(".json", ".csv"));
|
|
94
|
+
await writeFile(csvPath, csv, "utf8");
|
|
95
|
+
return csvPath;
|
|
96
|
+
})
|
|
97
|
+
)
|
|
98
|
+
).filter(Boolean);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async zipFolder(folderPath, zipFileName = "export.zip", outputFolder = "files") {
|
|
102
|
+
const sourceDir = this.resolvePath(folderPath);
|
|
103
|
+
const zipPath = path.join(this.resolvePath(outputFolder), zipFileName);
|
|
104
|
+
await mkdir(path.dirname(zipPath), { recursive: true });
|
|
105
|
+
|
|
106
|
+
return new Promise((resolve, reject) => {
|
|
107
|
+
const output = createWriteStream(zipPath);
|
|
108
|
+
const archive = archiver("zip", { zlib: { level: 9 } });
|
|
109
|
+
|
|
110
|
+
output.on("close", () => resolve(zipPath));
|
|
111
|
+
archive.on("error", reject);
|
|
112
|
+
archive.pipe(output);
|
|
113
|
+
archive.directory(sourceDir, false);
|
|
114
|
+
archive.finalize();
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async combineJsonFiles(folderPath) {
|
|
119
|
+
const sourceDir = this.resolvePath(folderPath);
|
|
120
|
+
const files = (await readdir(sourceDir)).filter((f) => f.endsWith(".json"));
|
|
121
|
+
const combined = [];
|
|
122
|
+
|
|
123
|
+
for (const file of files) {
|
|
124
|
+
const arr = [].concat(JSON.parse(await readFile(path.join(sourceDir, file), "utf8")));
|
|
125
|
+
const fileName = file.replace(".json", "");
|
|
126
|
+
arr.forEach((item) => (item._sourceFile = fileName));
|
|
127
|
+
combined.push(...arr);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return combined;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async saveAsJsonAndCsv(data, fileName, folderPath) {
|
|
134
|
+
if (!Array.isArray(data) || !data.length) {
|
|
135
|
+
throw new Error("Data must be a non-empty array");
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const filesDir = this.resolvePath(folderPath);
|
|
139
|
+
await mkdir(filesDir, { recursive: true });
|
|
140
|
+
|
|
141
|
+
await writeFile(path.join(filesDir, `${fileName}.json`), JSON.stringify(data, null, 2), "utf8");
|
|
142
|
+
|
|
143
|
+
const keys = Object.keys(data[0]);
|
|
144
|
+
const csv = [
|
|
145
|
+
keys.join(","),
|
|
146
|
+
...data.map((row) => keys.map((k) => FileUtils._escape(row[k] ?? "")).join(",")),
|
|
147
|
+
].join("\n");
|
|
148
|
+
|
|
149
|
+
await writeFile(path.join(filesDir, `${fileName}.csv`), csv, "utf8");
|
|
150
|
+
|
|
151
|
+
return {
|
|
152
|
+
jsonPath: path.join(filesDir, `${fileName}.json`),
|
|
153
|
+
csvPath: path.join(filesDir, `${fileName}.csv`),
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
static _flatten(obj, prefix = "") {
|
|
158
|
+
return Object.entries(obj).reduce((acc, [k, v]) => {
|
|
159
|
+
const key = prefix ? `${prefix}.${k}` : k;
|
|
160
|
+
if (v && typeof v === "object" && !Array.isArray(v)) {
|
|
161
|
+
return { ...acc, ...FileUtils._flatten(v, key) };
|
|
162
|
+
}
|
|
163
|
+
acc[key] = Array.isArray(v) ? JSON.stringify(v) : (v ?? "");
|
|
164
|
+
return acc;
|
|
165
|
+
}, {});
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
static _escape(val) {
|
|
169
|
+
const s = String(val);
|
|
170
|
+
return /[",\n]/.test(s) ? `"${s.replace(/"/g, '""')}"` : s;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
export default FileUtils;
|
|
175
|
+
export { FileUtils };
|