@entur/function-tools 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,56 @@
1
+ import { filterMap } from '../utils/array.js';
2
+
3
+ // import type { ExportAllDeclaration, ExportNamedDeclaration, SourceLocation } from "@babel/types"
4
+ // import {
5
+ // isExportAllDeclaration,
6
+ // isExportNamedDeclaration,
7
+ // isExportSpecifier,
8
+ // isIdentifier,
9
+ // } from "@babel/types"
10
+ function getProxyExports(module) {
11
+ return module.body.flatMap((node)=>{
12
+ switch(node.type){
13
+ case "ExportNamedDeclaration":
14
+ return getProxyDetails(node);
15
+ case "ExportAllDeclaration":
16
+ return getPassthroughProxyDetails(node);
17
+ default:
18
+ return [];
19
+ }
20
+ });
21
+ }
22
+ function getProxyDetails(node) {
23
+ if (!node.source?.value) return [];
24
+ const specifier = node.source.value;
25
+ // Case: export { default as sampleFile, firstName as givenName } from './sample-file';
26
+ return filterMap(node.specifiers, (specifierItem)=>{
27
+ switch(specifierItem.type){
28
+ case "ExportSpecifier":
29
+ {
30
+ const name = specifierItem.orig.value;
31
+ const alias = specifierItem.exported?.value;
32
+ return alias && name !== alias ? {
33
+ name,
34
+ alias,
35
+ specifier,
36
+ node
37
+ } : {
38
+ name,
39
+ specifier,
40
+ node
41
+ };
42
+ }
43
+ }
44
+ });
45
+ }
46
+ function getPassthroughProxyDetails(node) {
47
+ const specifier = node.source.value;
48
+ // Case: export * from './sample-file';
49
+ return {
50
+ specifier,
51
+ passthrough: true,
52
+ node
53
+ };
54
+ }
55
+
56
+ export { getProxyExports };
@@ -0,0 +1,31 @@
1
+ import { ResolverFactory } from 'oxc-resolver';
2
+ import { uniq } from '../utils/array.js';
3
+ import { asyncFilterMap } from '../utils/async.js';
4
+ import { createCache, isBuiltinImport } from './utils.js';
5
+
6
+ function createResolver({ extensions, ...options } = {}) {
7
+ const resolvers = extensions?.length ? extensions.map((it)=>new ResolverFactory({
8
+ ...options,
9
+ extensions: [
10
+ it
11
+ ]
12
+ })) : [
13
+ new ResolverFactory(options)
14
+ ];
15
+ return createCache(async (basedir, importValue)=>{
16
+ if (isBuiltinImport(importValue)) {
17
+ return [];
18
+ }
19
+ const resolutions = await asyncFilterMap(resolvers, async (resolver)=>{
20
+ const { error, path } = await resolver.async(basedir, importValue);
21
+ if (error) return;
22
+ return path;
23
+ });
24
+ if (resolutions.length === 0) {
25
+ throw new Error("Module not found");
26
+ }
27
+ return uniq(resolutions);
28
+ });
29
+ }
30
+
31
+ export { createResolver };
@@ -0,0 +1,53 @@
1
+ import { dirname } from 'node:path';
2
+ import { partition } from '../utils/array.js';
3
+ import { asyncFlatMap } from '../utils/async.js';
4
+ import { isBuiltinImport } from './utils.js';
5
+
6
+ function createResolveImport(resolver, getExports, getProxyExports) {
7
+ const walk = async (directory, specifier, currentName)=>{
8
+ const resolved = await resolver(directory, specifier);
9
+ return asyncFlatMap(resolved, async (resolvedPath)=>{
10
+ if (resolvedPath.includes("/node_modules/")) {
11
+ return {
12
+ name: currentName,
13
+ resolvedPath
14
+ };
15
+ }
16
+ const exports$1 = await getExports(resolvedPath);
17
+ const matchingExport = exports$1.find(({ name })=>name === currentName);
18
+ if (matchingExport) {
19
+ return {
20
+ name: matchingExport.name,
21
+ resolvedPath
22
+ };
23
+ }
24
+ const proxyExports = await getProxyExports(resolvedPath);
25
+ const [namedProxyExports, passthroughProxyExports] = partition(proxyExports, (proxyExport)=>"name" in proxyExport);
26
+ const foundNamedProxyExport = namedProxyExports.find(({ name, alias })=>(alias ?? name) === currentName);
27
+ if (foundNamedProxyExport) {
28
+ return walk(dirname(resolvedPath), foundNamedProxyExport.specifier, foundNamedProxyExport.name);
29
+ }
30
+ return asyncFlatMap(passthroughProxyExports, (it)=>walk(dirname(resolvedPath), it.specifier, currentName));
31
+ });
32
+ };
33
+ return async ({ name, specifier, node }, initialDirectory)=>{
34
+ if (!name || isBuiltinImport(specifier)) return [];
35
+ const found = await walk(initialDirectory, specifier, name);
36
+ if (found.length === 0) {
37
+ throw new Error(`No corresponding exports found of ${name}`);
38
+ }
39
+ return found.map((it)=>it.name !== name ? {
40
+ ...it,
41
+ alias: name,
42
+ specifier,
43
+ node
44
+ } : {
45
+ ...it,
46
+ name,
47
+ specifier,
48
+ node
49
+ });
50
+ };
51
+ }
52
+
53
+ export { createResolveImport };
@@ -0,0 +1,41 @@
1
+ function traverse(node, visitors) {
2
+ const traverseNode = (currentNode)=>{
3
+ if (isMemberExpression(currentNode)) {
4
+ visitors.visitMemberExpression?.(currentNode);
5
+ } else if (isTsQualifiedName(currentNode)) {
6
+ visitors.visitTsQualifiedName?.(currentNode);
7
+ }
8
+ for (const value of Object.values(currentNode)){
9
+ if (isNode(value)) {
10
+ traverseNode(value);
11
+ }
12
+ if (Array.isArray(value)) {
13
+ for (const item of value){
14
+ if (isNode(item)) {
15
+ traverseNode(item);
16
+ } else if (isArgument(item)) {
17
+ traverseNode(item.expression);
18
+ }
19
+ }
20
+ }
21
+ }
22
+ };
23
+ traverseNode(node);
24
+ }
25
+ function isMemberExpression(node) {
26
+ return node.type === "MemberExpression";
27
+ }
28
+ function isTsQualifiedName(node) {
29
+ return node.type === "TsQualifiedName";
30
+ }
31
+ function isNode(value) {
32
+ return isObject(value) && "type" in value && typeof value.type === "string";
33
+ }
34
+ function isArgument(value) {
35
+ return isObject(value) && "expression" in value && isObject(value.expression);
36
+ }
37
+ function isObject(value) {
38
+ return typeof value === "object" && value !== null;
39
+ }
40
+
41
+ export { traverse };
@@ -0,0 +1,76 @@
1
+ import { dirname } from 'node:path';
2
+ import { minimatch } from 'minimatch';
3
+ import { filterMap } from '../utils/array.js';
4
+ import { asyncReduce } from '../utils/async.js';
5
+
6
+ function createReadFileDependencies(getImports, getExports, resolveImport) {
7
+ return async (filePath)=>{
8
+ const exports$1 = await getExports(filePath);
9
+ const { imports, missing } = await asyncReduce(await getImports(filePath), async (acc, it)=>{
10
+ try {
11
+ const resolvedImports = await resolveImport(it, dirname(filePath));
12
+ acc.imports.push(...resolvedImports);
13
+ } catch (error) {
14
+ acc.missing.push({
15
+ ...it,
16
+ message: error instanceof Error ? error.message : `Unknown error`
17
+ });
18
+ }
19
+ return acc;
20
+ }, {
21
+ imports: [],
22
+ missing: []
23
+ });
24
+ return {
25
+ filePath,
26
+ imports,
27
+ exports: exports$1,
28
+ missing
29
+ };
30
+ };
31
+ }
32
+ function createIsImported(fileDependecies) {
33
+ const table = new Set();
34
+ const getKey = (filePath, name)=>`${filePath}:${name}`;
35
+ const getAliasKey = (filePath, name)=>`${filePath}:alias:${name}`;
36
+ // Build a set for quick lookup, where key is combined using path and name
37
+ fileDependecies.forEach(({ imports })=>{
38
+ imports.forEach(({ resolvedPath, name, alias })=>{
39
+ table.add(getKey(resolvedPath, name));
40
+ if (alias) {
41
+ table.add(getAliasKey(resolvedPath, alias));
42
+ }
43
+ });
44
+ });
45
+ return (filePath, name)=>table.has(getKey(filePath, name)) || table.has(getAliasKey(filePath, name));
46
+ }
47
+ function extractUnusedExports(fileDependencies, ignorePatterns) {
48
+ const isImported = createIsImported(fileDependencies);
49
+ const unusedExportEntries = filterMap(fileDependencies, ({ filePath, exports: exports$1 })=>{
50
+ const filteredExports = exports$1.filter(({ name })=>!isImported(filePath, name));
51
+ if (filteredExports.length === 0) return;
52
+ return [
53
+ filePath,
54
+ filteredExports
55
+ ];
56
+ });
57
+ const missingImportsEntries = filterMap(fileDependencies, ({ filePath, missing })=>{
58
+ if (missing.length === 0) return;
59
+ return [
60
+ filePath,
61
+ missing
62
+ ];
63
+ });
64
+ if (!ignorePatterns?.length) {
65
+ return {
66
+ missingImports: new Map(missingImportsEntries),
67
+ unusedExports: new Map(unusedExportEntries)
68
+ };
69
+ }
70
+ return {
71
+ missingImports: new Map(missingImportsEntries),
72
+ unusedExports: new Map(unusedExportEntries.filter(([filePath])=>ignorePatterns.every((ignorePattern)=>!minimatch(filePath, ignorePattern))))
73
+ };
74
+ }
75
+
76
+ export { createReadFileDependencies, extractUnusedExports };
@@ -0,0 +1,18 @@
1
+ import { builtinModules } from 'node:module';
2
+
3
+ function isBuiltinImport(specifier) {
4
+ return specifier.startsWith("node:") || builtinModules.includes(specifier);
5
+ }
6
+ function createCache(fn) {
7
+ const cache = new Map();
8
+ return (...args)=>{
9
+ const key = args.map((it)=>String(it)).join(":");
10
+ const cached = cache.get(key);
11
+ if (cached !== undefined) return cached;
12
+ const result = fn(...args);
13
+ cache.set(key, result);
14
+ return result;
15
+ };
16
+ }
17
+
18
+ export { createCache, isBuiltinImport };
package/lib/index.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ export { Export, getExports } from './importExports/exports.js';
2
+ export { Import, getImports } from './importExports/imports.js';
package/lib/index.js ADDED
@@ -0,0 +1,2 @@
1
+ export { getExports } from './importExports/exports.js';
2
+ export { getImports } from './importExports/imports.js';
@@ -0,0 +1,44 @@
1
+ function uniq(array) {
2
+ return [
3
+ ...new Set(array)
4
+ ];
5
+ }
6
+ function filterMap(arr, mapper) {
7
+ return arr.reduce((acc, item, index, array)=>{
8
+ const b = mapper(item, index, array);
9
+ if (b !== undefined) {
10
+ acc.push(b);
11
+ }
12
+ return acc;
13
+ }, []);
14
+ }
15
+ function groupBy(array, iteratee) {
16
+ return array.reduce((map, item, index, array_)=>{
17
+ const key = iteratee(item, index, array_);
18
+ const existingGroup = map.get(key);
19
+ if (existingGroup) {
20
+ existingGroup.push(item);
21
+ } else {
22
+ map.set(key, [
23
+ item
24
+ ]);
25
+ }
26
+ return map;
27
+ }, new Map());
28
+ }
29
+ function partition(array, predicate) {
30
+ return array.reduce((result, value, index, arr)=>{
31
+ const [include, exclude] = result;
32
+ if (predicate(value, index, arr)) {
33
+ include.push(value);
34
+ } else {
35
+ exclude.push(value);
36
+ }
37
+ return result;
38
+ }, [
39
+ [],
40
+ []
41
+ ]);
42
+ }
43
+
44
+ export { filterMap, groupBy, partition, uniq };
@@ -0,0 +1,34 @@
1
+ function asyncMap(array, iteratee) {
2
+ return Promise.all(array.map(iteratee));
3
+ }
4
+ async function asyncFilterMap(arr, mapper) {
5
+ const mapped = await asyncMap(arr, mapper);
6
+ return mapped.filter((it)=>it !== undefined);
7
+ }
8
+ async function asyncFlatMap(array, iteratee) {
9
+ const result = await Promise.all(array.map(iteratee));
10
+ return result.flat();
11
+ }
12
+ async function asyncReduce(array, iteratee, initialValue) {
13
+ return array.reduce(async (queue, item, index)=>{
14
+ const accumulator = await queue;
15
+ return iteratee(accumulator, item, index, array);
16
+ }, Promise.resolve(initialValue));
17
+ }
18
+ function groupByAsync(array, iteratee) {
19
+ return array.reduce(async (queue, item, index, array_)=>{
20
+ const map = await queue;
21
+ const key = await iteratee(item, index, array_);
22
+ const existingGroup = map.get(key);
23
+ if (existingGroup) {
24
+ existingGroup.push(item);
25
+ } else {
26
+ map.set(key, [
27
+ item
28
+ ]);
29
+ }
30
+ return map;
31
+ }, Promise.resolve(new Map()));
32
+ }
33
+
34
+ export { asyncFilterMap, asyncFlatMap, asyncMap, asyncReduce, groupByAsync };
@@ -0,0 +1,77 @@
1
+ import { fileURLToPath } from 'node:url';
2
+ import { nodeResolve } from '@rollup/plugin-node-resolve';
3
+ import swc from '@rollup/plugin-swc';
4
+ import { rollup, watch } from 'rollup';
5
+
6
+ async function bundle(entryFile, outputDir, { packagesToInline } = {}) {
7
+ const bundle1 = await rollup({
8
+ input: fileURLToPath(entryFile),
9
+ plugins: plugins(packagesToInline),
10
+ treeshake: "smallest"
11
+ });
12
+ return bundle1.write({
13
+ ...outputOptions(),
14
+ sourcemap: true,
15
+ dir: fileURLToPath(outputDir)
16
+ });
17
+ }
18
+ async function bundleAndWatch(entryFile, outputDir, { packagesToInline, onBundleEnd }) {
19
+ const watchOptions = {
20
+ plugins: plugins(packagesToInline),
21
+ input: fileURLToPath(entryFile),
22
+ treeshake: "smallest",
23
+ output: {
24
+ ...outputOptions(),
25
+ sourcemap: true,
26
+ dir: fileURLToPath(outputDir)
27
+ },
28
+ watch: {
29
+ exclude: [
30
+ fileURLToPath(outputDir)
31
+ ]
32
+ }
33
+ };
34
+ const watcher = watch(watchOptions);
35
+ watcher.on("event", async (event)=>{
36
+ switch(event.code){
37
+ case "BUNDLE_START":
38
+ {
39
+ console.log("🏗️ Start building...");
40
+ return;
41
+ }
42
+ case "BUNDLE_END":
43
+ {
44
+ const output = await event.result.generate(outputOptions());
45
+ console.log(`🏠 Finished build in ${event.duration}ms`);
46
+ await onBundleEnd?.(output);
47
+ return event.result.close();
48
+ }
49
+ case "ERROR":
50
+ {
51
+ console.log("🛑 Encountered an error while bundling");
52
+ console.error(event.error);
53
+ return event.result?.close();
54
+ }
55
+ }
56
+ });
57
+ }
58
+ const plugins = (workspacePackages)=>[
59
+ swc({
60
+ swc: {
61
+ jsc: {
62
+ preserveAllComments: true
63
+ }
64
+ }
65
+ }),
66
+ nodeResolve({
67
+ resolveOnly: (moduleId)=>moduleId.startsWith(".") || workspacePackages?.some((it)=>moduleId.startsWith(it)) || moduleId.startsWith("@entur-private/") || false
68
+ })
69
+ ];
70
+ const outputOptions = ()=>({
71
+ preserveModules: true,
72
+ dir: "lib",
73
+ format: "esm",
74
+ entryFileNames: ({ isEntry })=>isEntry ? "index.js" : "[hash].js"
75
+ });
76
+
77
+ export { bundle, bundleAndWatch };
@@ -0,0 +1,159 @@
1
+ import { builtinModules } from 'node:module';
2
+ import { dirname } from 'node:path';
3
+ import { coerce, satisfies } from 'semver';
4
+ import { groupBy } from './array.js';
5
+ import { groupByAsync, asyncMap } from './async.js';
6
+ import { spawnAsync } from './exec.js';
7
+ import { readPackageJSON, packageUp } from './packageJSON.js';
8
+
9
+ const alwaysIncludePackageNames = [
10
+ "@google-cloud/functions-framework"
11
+ ];
12
+ async function calculateDependencies(outputs) {
13
+ const outputGroupedByPackage = await groupByAsync(outputs.filter((it)=>Boolean(it.type === "chunk" && it.facadeModuleId && it.imports.length > 0)), async (output)=>{
14
+ const key = output.facadeModuleId && await packageUp({
15
+ cwd: output.facadeModuleId
16
+ });
17
+ if (!key) {
18
+ throw new Error(`No package.json found for ${output.facadeModuleId}`);
19
+ }
20
+ return key;
21
+ });
22
+ const filterDependencies = (lookup, dependencies)=>{
23
+ if (!dependencies) return;
24
+ const entries = Object.entries(dependencies).filter(([key])=>lookup.has(key));
25
+ return Object.fromEntries(entries);
26
+ };
27
+ const entries = await asyncMap([
28
+ ...outputGroupedByPackage
29
+ ], async ([packageJSONPath, outputs])=>{
30
+ const packageJSON = await readPackageJSON(packageJSONPath);
31
+ const importedPackageNames = new Set(outputs.flatMap((it)=>it.imports).filter((it)=>!isBuiltinImport(it)).map(specifierToPackageName));
32
+ if (alwaysIncludePackageNames?.length) {
33
+ alwaysIncludePackageNames.forEach((it)=>{
34
+ importedPackageNames.add(it);
35
+ });
36
+ }
37
+ return [
38
+ dirname(packageJSONPath),
39
+ {
40
+ ...packageJSON,
41
+ dependencies: filterDependencies(importedPackageNames, packageJSON.dependencies),
42
+ devDependencies: filterDependencies(importedPackageNames, packageJSON.devDependencies),
43
+ optionalDependencies: filterDependencies(importedPackageNames, packageJSON.optionalDependencies)
44
+ }
45
+ ];
46
+ });
47
+ return new Map(entries);
48
+ }
49
+ function specifierToPackageName(specifier) {
50
+ if (specifier.startsWith("@")) {
51
+ return specifier.split("/", 2).join("/");
52
+ }
53
+ return specifier.split("/", 1).join("/");
54
+ }
55
+ function isBuiltinImport(specifier) {
56
+ return builtinModules.includes(specifier) || specifier.startsWith("node:");
57
+ }
58
+ function harmonizeDependencies(initialDependecies) {
59
+ const entries = [
60
+ ...initialDependecies.values()
61
+ ].flatMap((it)=>[
62
+ ...Object.entries(it.dependencies ?? {}),
63
+ ...Object.entries(it.devDependencies ?? {}),
64
+ ...Object.entries(it.peerDependencies ?? {}),
65
+ ...Object.entries(it.optionalDependencies ?? {})
66
+ ]);
67
+ const harmonizedSemvers = new Map([
68
+ ...groupBy(entries, ([packageName])=>packageName)
69
+ ].map(([packageName, entriesForPackageName])=>{
70
+ const firstVersion = entriesForPackageName[0][1];
71
+ if (entriesForPackageName.length === 1) {
72
+ return [
73
+ packageName,
74
+ firstVersion
75
+ ];
76
+ }
77
+ if (entriesForPackageName.every(([, semver])=>semver === firstVersion)) {
78
+ return [
79
+ packageName,
80
+ firstVersion
81
+ ];
82
+ }
83
+ const satisfiedVersion = entriesForPackageName.reduce((acc, [, semver])=>{
84
+ if (acc === semver) {
85
+ return acc;
86
+ }
87
+ const semVerOrNull = coerce(semver);
88
+ if (semVerOrNull !== null && satisfies(semVerOrNull, acc)) {
89
+ console.log(`${packageName} ${semver} over ${acc}`);
90
+ return semver;
91
+ }
92
+ if (semVerOrNull !== null && satisfies(semVerOrNull, semver)) {
93
+ console.log(`${packageName} ${acc} over ${semver}`);
94
+ return acc;
95
+ }
96
+ throw new Error(`Unable to find determine correct version of ${packageName} between ${semver} and ${acc}`);
97
+ }, firstVersion);
98
+ return [
99
+ packageName,
100
+ satisfiedVersion
101
+ ];
102
+ }));
103
+ const harmonizeSemver = (dependencies)=>{
104
+ if (!dependencies) return;
105
+ const entries = Object.entries(dependencies).map(([packageName, initialSemver])=>{
106
+ const semver = harmonizedSemvers.get(packageName);
107
+ return [
108
+ packageName,
109
+ semver ?? initialSemver
110
+ ];
111
+ });
112
+ return Object.fromEntries(entries);
113
+ };
114
+ return new Map([
115
+ ...initialDependecies
116
+ ].map(([packageName, packageJSON])=>{
117
+ return [
118
+ packageName,
119
+ {
120
+ ...packageJSON,
121
+ dependencies: harmonizeSemver(packageJSON.dependencies),
122
+ devDependencies: harmonizeSemver(packageJSON.devDependencies),
123
+ peerDependencies: harmonizeSemver(packageJSON.peerDependencies)
124
+ }
125
+ ];
126
+ }));
127
+ }
128
+ // optionalDependencies: harmonizeSemver(packageJSON.optionalDependencies),
129
+ async function flattenDependencies(dependencies) {
130
+ const listOfDependencies = [
131
+ ...dependencies.values()
132
+ ].map((it)=>it.dependencies);
133
+ const listOfDevDependencies = [
134
+ ...dependencies.values()
135
+ ].map((it)=>it.devDependencies);
136
+ const listOfPeerDependencies = [
137
+ ...dependencies.values()
138
+ ].map((it)=>it.peerDependencies);
139
+ const listOfOptionalDependencies = [
140
+ ...dependencies.values()
141
+ ].map((it)=>it.optionalDependencies);
142
+ return {
143
+ dependencies: Object.assign({}, ...listOfDependencies),
144
+ devDependencies: Object.assign({}, ...listOfDevDependencies),
145
+ peerDependencies: Object.assign({}, ...listOfPeerDependencies),
146
+ optionalDependencies: Object.assign({}, ...listOfOptionalDependencies)
147
+ };
148
+ }
149
+ async function linkDependencies(workingDir) {
150
+ await spawnAsync("pnpm", [
151
+ "install",
152
+ "--ignore-workspace",
153
+ "--prefer-offline"
154
+ ], {
155
+ cwd: workingDir
156
+ });
157
+ }
158
+
159
+ export { calculateDependencies, flattenDependencies, harmonizeDependencies, linkDependencies };
@@ -0,0 +1,20 @@
1
+ import { spawn } from 'node:child_process';
2
+
3
+ function spawnAsync(command, args, options) {
4
+ return new Promise((resolve, reject)=>{
5
+ const run = spawn(command, args, {
6
+ ...options,
7
+ stdio: "inherit"
8
+ });
9
+ run.on("exit", (exitCode)=>{
10
+ if (exitCode === 0) {
11
+ resolve();
12
+ } else {
13
+ const commandWithArguments = args.length > 0 ? `${command} ${args.join(" ")}` : command;
14
+ reject(new Error(`an error occurred while executing "${commandWithArguments}" in folder ${options?.cwd}`));
15
+ }
16
+ });
17
+ });
18
+ }
19
+
20
+ export { spawnAsync };
@@ -0,0 +1,18 @@
1
+ import { writeFile } from 'node:fs/promises';
2
+ import { readJSON } from './fs.js';
3
+
4
+ function getFirebaseJSON(file) {
5
+ return readJSON(file);
6
+ }
7
+ async function writeFirebaseJSON(codebase, { functions }, outputDir) {
8
+ const newFirebaseJSON = {
9
+ functions: {
10
+ ...functions,
11
+ codebase,
12
+ source: "."
13
+ }
14
+ };
15
+ await writeFile(new URL("firebase.json", outputDir), JSON.stringify(newFirebaseJSON, undefined, 4));
16
+ }
17
+
18
+ export { getFirebaseJSON, writeFirebaseJSON };
@@ -0,0 +1,20 @@
1
+ import { rm, mkdir, readFile, writeFile } from 'node:fs/promises';
2
+
3
+ async function cleanDir(dir) {
4
+ await rm(dir, {
5
+ recursive: true,
6
+ force: true
7
+ });
8
+ await mkdir(dir, {
9
+ recursive: true
10
+ });
11
+ }
12
+ async function readJSON(file) {
13
+ const fileContent = await readFile(file, "utf-8");
14
+ return JSON.parse(fileContent);
15
+ }
16
+ async function writeJSON(file, data) {
17
+ await writeFile(file, JSON.stringify(data, undefined, 4));
18
+ }
19
+
20
+ export { cleanDir, readJSON, writeJSON };
@@ -0,0 +1,21 @@
1
+ import process from 'node:process';
2
+ import { findUp } from 'find-up-simple';
3
+ import { readJSON, writeJSON } from './fs.js';
4
+
5
+ function readPackageJSON(file) {
6
+ return readJSON(file);
7
+ }
8
+ async function writePackageJSON(file, packageJSON) {
9
+ await writeJSON(file, packageJSON);
10
+ }
11
+ function getPackageName(name) {
12
+ const match = name.match(/@[a-z-]+\/([a-z-]+)$/);
13
+ return match?.[1] ?? name;
14
+ }
15
+ async function packageUp({ cwd = process.cwd() } = {}) {
16
+ return findUp("package.json", {
17
+ cwd
18
+ });
19
+ }
20
+
21
+ export { getPackageName, packageUp, readPackageJSON, writePackageJSON };