@reliverse/helpers 2.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/impl/clear-logger-internals.d.ts +17 -0
- package/dist/impl/clear-logger-internals.js +109 -0
- package/dist/impl/replace-exports.d.ts +13 -0
- package/dist/impl/replace-exports.js +155 -0
- package/dist/impl/terminal-helpers.d.ts +20 -0
- package/dist/impl/terminal-helpers.js +48 -0
- package/dist/impl/write.d.ts +5 -0
- package/dist/impl/write.js +23 -0
- package/dist/mod.d.ts +4 -0
- package/dist/mod.js +17 -0
- package/package.json +26 -0
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
interface ClearLoggerInternalsOptions {
|
|
2
|
+
packages: Array<{
|
|
3
|
+
name: string;
|
|
4
|
+
outputDir: string;
|
|
5
|
+
path: string;
|
|
6
|
+
}>;
|
|
7
|
+
ignorePackages?: string | string[];
|
|
8
|
+
verbose?: boolean;
|
|
9
|
+
onLog?: (message: string) => void;
|
|
10
|
+
}
|
|
11
|
+
interface ClearResult {
|
|
12
|
+
updated: number;
|
|
13
|
+
skipped: number;
|
|
14
|
+
files: string[];
|
|
15
|
+
}
|
|
16
|
+
export declare function clearLoggerInternalsInPackages(options: ClearLoggerInternalsOptions): Promise<ClearResult>;
|
|
17
|
+
export {};
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { readdirRecursive } from "@reliverse/relifso";
|
|
3
|
+
const patternCache = /* @__PURE__ */ new Map();
|
|
4
|
+
function matchesPattern(str, pattern) {
|
|
5
|
+
if (pattern.includes("*")) {
|
|
6
|
+
let regex = patternCache.get(pattern);
|
|
7
|
+
if (!regex) {
|
|
8
|
+
regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
9
|
+
patternCache.set(pattern, regex);
|
|
10
|
+
}
|
|
11
|
+
return regex.test(str);
|
|
12
|
+
}
|
|
13
|
+
return str === pattern;
|
|
14
|
+
}
|
|
15
|
+
function shouldIgnorePackage(packageName, ignorePackages) {
|
|
16
|
+
const patterns = typeof ignorePackages === "string" ? [ignorePackages] : ignorePackages;
|
|
17
|
+
return patterns.some((pattern) => matchesPattern(packageName, pattern));
|
|
18
|
+
}
|
|
19
|
+
const LOGGER_INTERNAL_REGEX = /logger\.internal\s*\(/;
|
|
20
|
+
const LOG_INTERNAL_REGEX = /logInternal\s*\(/;
|
|
21
|
+
function clearLoggerInternalsInFile(filePath) {
|
|
22
|
+
const content = readFileSync(filePath, "utf-8");
|
|
23
|
+
const lines = content.split("\n");
|
|
24
|
+
const filteredLines = [];
|
|
25
|
+
let hasChanges = false;
|
|
26
|
+
for (const line of lines) {
|
|
27
|
+
if (LOGGER_INTERNAL_REGEX.test(line) || LOG_INTERNAL_REGEX.test(line)) {
|
|
28
|
+
hasChanges = true;
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
filteredLines.push(line);
|
|
32
|
+
}
|
|
33
|
+
if (hasChanges) {
|
|
34
|
+
const updated = filteredLines.join("\n");
|
|
35
|
+
writeFileSync(filePath, updated, "utf-8");
|
|
36
|
+
return true;
|
|
37
|
+
}
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
export async function clearLoggerInternalsInPackages(options) {
|
|
41
|
+
const { packages, ignorePackages, verbose = false, onLog } = options;
|
|
42
|
+
const log = (message) => {
|
|
43
|
+
if (verbose && onLog) {
|
|
44
|
+
onLog(message);
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
if (verbose) {
|
|
48
|
+
log(`
|
|
49
|
+
\u{1F9F9} Clearing logger internals from ${packages.length} package(s)...`);
|
|
50
|
+
}
|
|
51
|
+
let updatedCount = 0;
|
|
52
|
+
let skippedCount = 0;
|
|
53
|
+
const processedFiles = [];
|
|
54
|
+
for (const pkg of packages) {
|
|
55
|
+
log(` Processing package: ${pkg.name}`);
|
|
56
|
+
if (ignorePackages && shouldIgnorePackage(pkg.name, ignorePackages)) {
|
|
57
|
+
log(` \u23ED\uFE0F Skipping ${pkg.name} (ignored by pattern)`);
|
|
58
|
+
skippedCount++;
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
const outputDir = pkg.outputDir;
|
|
62
|
+
if (!existsSync(outputDir)) {
|
|
63
|
+
log(
|
|
64
|
+
` \u23ED\uFE0F Skipping ${pkg.name} (dist directory not found: ${outputDir})`
|
|
65
|
+
);
|
|
66
|
+
skippedCount++;
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
log(` \u{1F4C1} Scanning dist directory: ${outputDir}`);
|
|
70
|
+
let jsFiles = [];
|
|
71
|
+
try {
|
|
72
|
+
jsFiles = await readdirRecursive(outputDir, {
|
|
73
|
+
extensions: ["js"]
|
|
74
|
+
});
|
|
75
|
+
log(` \u{1F4C4} Found ${jsFiles.length} .js file(s)`);
|
|
76
|
+
} catch (error) {
|
|
77
|
+
log(` \u26A0\uFE0F Error reading dist directory for ${pkg.name}: ${error}`);
|
|
78
|
+
skippedCount++;
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
let packageUpdatedCount = 0;
|
|
82
|
+
for (const filePath of jsFiles) {
|
|
83
|
+
try {
|
|
84
|
+
if (clearLoggerInternalsInFile(filePath)) {
|
|
85
|
+
updatedCount++;
|
|
86
|
+
packageUpdatedCount++;
|
|
87
|
+
processedFiles.push(filePath);
|
|
88
|
+
log(` \u2713 Updated: ${filePath}`);
|
|
89
|
+
}
|
|
90
|
+
} catch (error) {
|
|
91
|
+
log(` \u26A0\uFE0F Error processing ${filePath}: ${error}`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
if (packageUpdatedCount === 0 && jsFiles.length > 0) {
|
|
95
|
+
log(` \u2139\uFE0F No logger internals found in ${pkg.name}`);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
if (verbose) {
|
|
99
|
+
log(
|
|
100
|
+
`
|
|
101
|
+
Summary: Updated ${updatedCount} file(s), skipped ${skippedCount} package(s)`
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
return {
|
|
105
|
+
updated: updatedCount,
|
|
106
|
+
skipped: skippedCount,
|
|
107
|
+
files: processedFiles
|
|
108
|
+
};
|
|
109
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
interface ReplaceExportsOptions {
|
|
2
|
+
direction?: "ts-to-js" | "js-to-ts";
|
|
3
|
+
cwd?: string;
|
|
4
|
+
ignorePackages?: string | string[];
|
|
5
|
+
verbose?: boolean;
|
|
6
|
+
}
|
|
7
|
+
interface ReplaceResult {
|
|
8
|
+
updated: number;
|
|
9
|
+
skipped: number;
|
|
10
|
+
files: string[];
|
|
11
|
+
}
|
|
12
|
+
export declare function replaceExportsInPackages(options?: ReplaceExportsOptions): Promise<ReplaceResult>;
|
|
13
|
+
export {};
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { Glob } from "bun";
|
|
3
|
+
const patternCache = /* @__PURE__ */ new Map();
|
|
4
|
+
function matchesPattern(str, pattern) {
|
|
5
|
+
if (pattern.includes("*")) {
|
|
6
|
+
let regex = patternCache.get(pattern);
|
|
7
|
+
if (!regex) {
|
|
8
|
+
regex = new RegExp(`^${pattern.replace(/\*/g, ".*")}$`);
|
|
9
|
+
patternCache.set(pattern, regex);
|
|
10
|
+
}
|
|
11
|
+
return regex.test(str);
|
|
12
|
+
}
|
|
13
|
+
return str === pattern;
|
|
14
|
+
}
|
|
15
|
+
function shouldIgnorePackage(packageName, ignorePackages) {
|
|
16
|
+
const patterns = typeof ignorePackages === "string" ? [ignorePackages] : ignorePackages;
|
|
17
|
+
return patterns.some((pattern) => matchesPattern(packageName, pattern));
|
|
18
|
+
}
|
|
19
|
+
const DEFAULT_PATTERN_TS_TO_JS = /"default":\s*"\.\/src\/([^"]+)\.ts"/g;
|
|
20
|
+
const TYPES_PATTERN_TS_TO_JS = /"types":\s*"\.\/src\/([^"]+)\.ts"/g;
|
|
21
|
+
const ROOT_JS_PATTERN_TS_TO_JS = /"default":\s*"\.\/([^"]+\.js)"/g;
|
|
22
|
+
const ROOT_DTS_PATTERN_TS_TO_JS = /"types":\s*"\.\/([^"]+\.d\.ts)"/g;
|
|
23
|
+
const DIST_JS_PATTERN_JS_TO_TS = /"default":\s*"\.\/dist\/([^"]+)\.js"/g;
|
|
24
|
+
const DIST_DTS_PATTERN_JS_TO_TS = /"types":\s*"\.\/dist\/([^"]+)\.d\.ts"/g;
|
|
25
|
+
const ROOT_JS_PATTERN_JS_TO_TS = /"default":\s*"\.\/([^"]+\.js)"/g;
|
|
26
|
+
const ROOT_DTS_PATTERN_JS_TO_TS = /"types":\s*"\.\/([^"]+\.d\.ts)"/g;
|
|
27
|
+
function replaceInPackageJson(filePath, direction) {
|
|
28
|
+
const content = readFileSync(filePath, "utf-8");
|
|
29
|
+
if (direction === "ts-to-js") {
|
|
30
|
+
let updated = content;
|
|
31
|
+
let hasChanges = false;
|
|
32
|
+
DEFAULT_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
33
|
+
if (DEFAULT_PATTERN_TS_TO_JS.test(content)) {
|
|
34
|
+
DEFAULT_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
35
|
+
updated = updated.replace(
|
|
36
|
+
DEFAULT_PATTERN_TS_TO_JS,
|
|
37
|
+
'"default": "./dist/$1.js"'
|
|
38
|
+
);
|
|
39
|
+
hasChanges = true;
|
|
40
|
+
}
|
|
41
|
+
TYPES_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
42
|
+
if (TYPES_PATTERN_TS_TO_JS.test(content)) {
|
|
43
|
+
TYPES_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
44
|
+
updated = updated.replace(
|
|
45
|
+
TYPES_PATTERN_TS_TO_JS,
|
|
46
|
+
'"types": "./dist/$1.d.ts"'
|
|
47
|
+
);
|
|
48
|
+
hasChanges = true;
|
|
49
|
+
}
|
|
50
|
+
ROOT_JS_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
51
|
+
updated = updated.replace(ROOT_JS_PATTERN_TS_TO_JS, (match, fileName) => {
|
|
52
|
+
if (!fileName.startsWith("dist/")) {
|
|
53
|
+
hasChanges = true;
|
|
54
|
+
return `"default": "./dist/${fileName}"`;
|
|
55
|
+
}
|
|
56
|
+
return match;
|
|
57
|
+
});
|
|
58
|
+
ROOT_DTS_PATTERN_TS_TO_JS.lastIndex = 0;
|
|
59
|
+
updated = updated.replace(ROOT_DTS_PATTERN_TS_TO_JS, (match, fileName) => {
|
|
60
|
+
if (!fileName.startsWith("dist/")) {
|
|
61
|
+
hasChanges = true;
|
|
62
|
+
return `"types": "./dist/${fileName}"`;
|
|
63
|
+
}
|
|
64
|
+
return match;
|
|
65
|
+
});
|
|
66
|
+
if (hasChanges) {
|
|
67
|
+
writeFileSync(filePath, updated, "utf-8");
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
} else {
|
|
71
|
+
let updated = content;
|
|
72
|
+
let hasChanges = false;
|
|
73
|
+
DIST_JS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
74
|
+
if (DIST_JS_PATTERN_JS_TO_TS.test(content)) {
|
|
75
|
+
DIST_JS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
76
|
+
updated = updated.replace(
|
|
77
|
+
DIST_JS_PATTERN_JS_TO_TS,
|
|
78
|
+
'"default": "./src/$1.ts"'
|
|
79
|
+
);
|
|
80
|
+
hasChanges = true;
|
|
81
|
+
}
|
|
82
|
+
DIST_DTS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
83
|
+
if (DIST_DTS_PATTERN_JS_TO_TS.test(content)) {
|
|
84
|
+
DIST_DTS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
85
|
+
updated = updated.replace(
|
|
86
|
+
DIST_DTS_PATTERN_JS_TO_TS,
|
|
87
|
+
'"types": "./src/$1.ts"'
|
|
88
|
+
);
|
|
89
|
+
hasChanges = true;
|
|
90
|
+
}
|
|
91
|
+
ROOT_JS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
92
|
+
updated = updated.replace(ROOT_JS_PATTERN_JS_TO_TS, (match, fileName) => {
|
|
93
|
+
if (!fileName.startsWith("src/") && !fileName.startsWith("dist/")) {
|
|
94
|
+
const baseName = fileName.replace(/\.js$/, "");
|
|
95
|
+
hasChanges = true;
|
|
96
|
+
return `"default": "./src/${baseName}.ts"`;
|
|
97
|
+
}
|
|
98
|
+
return match;
|
|
99
|
+
});
|
|
100
|
+
ROOT_DTS_PATTERN_JS_TO_TS.lastIndex = 0;
|
|
101
|
+
updated = updated.replace(ROOT_DTS_PATTERN_JS_TO_TS, (match, fileName) => {
|
|
102
|
+
if (!fileName.startsWith("src/") && !fileName.startsWith("dist/")) {
|
|
103
|
+
const baseName = fileName.replace(/\.d\.ts$/, "");
|
|
104
|
+
hasChanges = true;
|
|
105
|
+
return `"types": "./src/${baseName}.ts"`;
|
|
106
|
+
}
|
|
107
|
+
return match;
|
|
108
|
+
});
|
|
109
|
+
if (hasChanges) {
|
|
110
|
+
writeFileSync(filePath, updated, "utf-8");
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return false;
|
|
115
|
+
}
|
|
116
|
+
export async function replaceExportsInPackages(options = {}) {
|
|
117
|
+
const {
|
|
118
|
+
direction = "ts-to-js",
|
|
119
|
+
cwd = process.cwd(),
|
|
120
|
+
ignorePackages
|
|
121
|
+
} = options;
|
|
122
|
+
const glob = new Glob("**/package.json");
|
|
123
|
+
const packageJsonFiles = [];
|
|
124
|
+
for await (const file of glob.scan({ cwd, onlyFiles: true })) {
|
|
125
|
+
if (!file.includes("node_modules/")) {
|
|
126
|
+
packageJsonFiles.push(file);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
let filteredFiles = packageJsonFiles;
|
|
130
|
+
if (ignorePackages) {
|
|
131
|
+
filteredFiles = [];
|
|
132
|
+
for (const file of packageJsonFiles) {
|
|
133
|
+
try {
|
|
134
|
+
const content = readFileSync(file, "utf-8");
|
|
135
|
+
const pkg = JSON.parse(content);
|
|
136
|
+
if (!pkg?.name || !shouldIgnorePackage(pkg.name, ignorePackages)) {
|
|
137
|
+
filteredFiles.push(file);
|
|
138
|
+
}
|
|
139
|
+
} catch {
|
|
140
|
+
filteredFiles.push(file);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
let updatedCount = 0;
|
|
145
|
+
for (const file of filteredFiles) {
|
|
146
|
+
if (replaceInPackageJson(file, direction)) {
|
|
147
|
+
updatedCount++;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
return {
|
|
151
|
+
updated: updatedCount,
|
|
152
|
+
skipped: filteredFiles.length - updatedCount,
|
|
153
|
+
files: filteredFiles
|
|
154
|
+
};
|
|
155
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export declare function ensuredir(dir: string): Promise<void>;
|
|
2
|
+
export declare const handleError: (error: unknown) => string;
|
|
3
|
+
/**
|
|
4
|
+
* Changes the current working directory to the specified path.
|
|
5
|
+
* Logs a warning if the target directory does not exist.
|
|
6
|
+
*/
|
|
7
|
+
export declare function cd(dir: string): Promise<void>;
|
|
8
|
+
/**
|
|
9
|
+
* Returns the current working directory.
|
|
10
|
+
*/
|
|
11
|
+
export declare function pwd(): void;
|
|
12
|
+
/**
|
|
13
|
+
* Removes a file or directory (recursively, if it's a directory).
|
|
14
|
+
* Logs an error if removal fails.
|
|
15
|
+
*/
|
|
16
|
+
export declare function rm(target: string): Promise<void>;
|
|
17
|
+
/**
|
|
18
|
+
* Returns the current working directory.
|
|
19
|
+
*/
|
|
20
|
+
export declare function getCurrentWorkingDirectory(useCache?: boolean): string;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import { normalize } from "node:path";
|
|
3
|
+
import { cwd } from "node:process";
|
|
4
|
+
export async function ensuredir(dir) {
|
|
5
|
+
await fs.mkdir(dir, { recursive: true });
|
|
6
|
+
}
|
|
7
|
+
export const handleError = (error) => error instanceof Error ? error.message : "Unknown error";
|
|
8
|
+
export async function cd(dir) {
|
|
9
|
+
try {
|
|
10
|
+
await ensuredir(dir);
|
|
11
|
+
await fs.access(dir);
|
|
12
|
+
process.chdir(dir);
|
|
13
|
+
console.log(`Changed directory to: ${process.cwd()}`);
|
|
14
|
+
} catch (error) {
|
|
15
|
+
console.warn(`Directory does not exist: ${dir}`, handleError(error));
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
export function pwd() {
|
|
19
|
+
const cwd2 = getCurrentWorkingDirectory();
|
|
20
|
+
console.log(`Current working directory: ${cwd2}`);
|
|
21
|
+
}
|
|
22
|
+
export async function rm(target) {
|
|
23
|
+
try {
|
|
24
|
+
await fs.rm(target, { recursive: true, force: true });
|
|
25
|
+
console.log(`Removed: ${target}`);
|
|
26
|
+
} catch (error) {
|
|
27
|
+
console.error(`Failed to remove: ${target}`, handleError(error));
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
let cachedCWD = null;
|
|
31
|
+
export function getCurrentWorkingDirectory(useCache = true) {
|
|
32
|
+
if (useCache && cachedCWD) {
|
|
33
|
+
return cachedCWD;
|
|
34
|
+
}
|
|
35
|
+
try {
|
|
36
|
+
const currentDirectory = normalize(cwd());
|
|
37
|
+
if (useCache) {
|
|
38
|
+
cachedCWD = currentDirectory;
|
|
39
|
+
}
|
|
40
|
+
return currentDirectory;
|
|
41
|
+
} catch (error) {
|
|
42
|
+
console.error(
|
|
43
|
+
"Error getting current working directory:",
|
|
44
|
+
handleError(error)
|
|
45
|
+
);
|
|
46
|
+
throw error;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export declare const writeLine: (text: string) => void;
|
|
2
|
+
export declare const writeError: (text: string) => void;
|
|
3
|
+
export declare const writeErrorLines: (lines: string[]) => void;
|
|
4
|
+
export declare const writeJsonFile: (path: string, data: unknown) => Promise<void>;
|
|
5
|
+
export declare const writeTextFile: (path: string, content: string) => Promise<void>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
const textEncoder = new TextEncoder();
|
|
2
|
+
export const writeLine = (text) => {
|
|
3
|
+
const encoded = textEncoder.encode(`${text}
|
|
4
|
+
`);
|
|
5
|
+
Bun.write(Bun.stdout, encoded);
|
|
6
|
+
};
|
|
7
|
+
export const writeError = (text) => {
|
|
8
|
+
const encoded = textEncoder.encode(`${text}
|
|
9
|
+
`);
|
|
10
|
+
Bun.write(Bun.stderr, encoded);
|
|
11
|
+
};
|
|
12
|
+
export const writeErrorLines = (lines) => {
|
|
13
|
+
const encoded = textEncoder.encode(`${lines.join("\n")}
|
|
14
|
+
`);
|
|
15
|
+
Bun.write(Bun.stderr, encoded);
|
|
16
|
+
};
|
|
17
|
+
export const writeJsonFile = async (path, data) => {
|
|
18
|
+
await Bun.write(path, `${JSON.stringify(data, null, 2)}
|
|
19
|
+
`);
|
|
20
|
+
};
|
|
21
|
+
export const writeTextFile = async (path, content) => {
|
|
22
|
+
await Bun.write(path, content);
|
|
23
|
+
};
|
package/dist/mod.d.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { clearLoggerInternalsInPackages } from "./impl/clear-logger-internals.js";
|
|
2
|
+
export { replaceExportsInPackages } from "./impl/replace-exports.js";
|
|
3
|
+
export { cd, ensuredir, getCurrentWorkingDirectory, handleError, pwd, rm, } from "./impl/terminal-helpers.js";
|
|
4
|
+
export { writeError, writeErrorLines, writeJsonFile, writeLine, writeTextFile, } from "./impl/write.js";
|
package/dist/mod.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
export { clearLoggerInternalsInPackages } from "./impl/clear-logger-internals.js";
|
|
2
|
+
export { replaceExportsInPackages } from "./impl/replace-exports.js";
|
|
3
|
+
export {
|
|
4
|
+
cd,
|
|
5
|
+
ensuredir,
|
|
6
|
+
getCurrentWorkingDirectory,
|
|
7
|
+
handleError,
|
|
8
|
+
pwd,
|
|
9
|
+
rm
|
|
10
|
+
} from "./impl/terminal-helpers.js";
|
|
11
|
+
export {
|
|
12
|
+
writeError,
|
|
13
|
+
writeErrorLines,
|
|
14
|
+
writeJsonFile,
|
|
15
|
+
writeLine,
|
|
16
|
+
writeTextFile
|
|
17
|
+
} from "./impl/write.js";
|
package/package.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@reliverse/helpers",
|
|
3
|
+
"version": "2.2.7",
|
|
4
|
+
"private": false,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"description": "@reliverse/helpers is a collection of helper functions for the dler monorepo.",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./dist/mod.d.ts",
|
|
10
|
+
"default": "./dist/mod.js"
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
"publishConfig": {
|
|
14
|
+
"access": "public"
|
|
15
|
+
},
|
|
16
|
+
"dependencies": {
|
|
17
|
+
"@reliverse/relifso": "2.2.7",
|
|
18
|
+
"@reliverse/matcha": "2.2.7",
|
|
19
|
+
"@reliverse/typerso": "2.2.7"
|
|
20
|
+
},
|
|
21
|
+
"files": [
|
|
22
|
+
"dist",
|
|
23
|
+
"package.json"
|
|
24
|
+
],
|
|
25
|
+
"license": "MIT"
|
|
26
|
+
}
|