@entur/function-tools 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/bin/enturFunctions.js +21 -0
- package/lib/commands/build.js +38 -0
- package/lib/commands/deploy.js +91 -0
- package/lib/commands/start.js +90 -0
- package/lib/commands/unusedExports.js +168 -0
- package/lib/commands/utils.js +73 -0
- package/lib/importExports/exports.d.ts +10 -0
- package/lib/importExports/exports.js +88 -0
- package/lib/importExports/imports.d.ts +11 -0
- package/lib/importExports/imports.js +74 -0
- package/lib/importExports/proxy.js +56 -0
- package/lib/importExports/resolve.js +31 -0
- package/lib/importExports/resolveImports.js +53 -0
- package/lib/importExports/traverse.js +41 -0
- package/lib/importExports/unused.js +76 -0
- package/lib/importExports/utils.js +18 -0
- package/lib/index.d.ts +2 -0
- package/lib/index.js +2 -0
- package/lib/utils/array.js +44 -0
- package/lib/utils/async.js +34 -0
- package/lib/utils/bundle.js +77 -0
- package/lib/utils/dependencies.js +159 -0
- package/lib/utils/exec.js +20 -0
- package/lib/utils/firebase.js +18 -0
- package/lib/utils/fs.js +20 -0
- package/lib/utils/packageJSON.js +21 -0
- package/lib/utils/workspace.js +20 -0
- package/package.json +27 -0
package/README.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
import { registerBuild } from '../lib/commands/build.js';
|
|
3
|
+
import { registerDeploy } from '../lib/commands/deploy.js';
|
|
4
|
+
import { registerStart } from '../lib/commands/start.js';
|
|
5
|
+
import { registerUnusedExports } from '../lib/commands/unusedExports.js';
|
|
6
|
+
|
|
7
|
+
const program = new Command();
|
|
8
|
+
program.name("entur-firebase").description("A multi-command CLI built with commander").version("0.0.1").option("-v, --verbose", "Enable verbose output");
|
|
9
|
+
registerBuild(program);
|
|
10
|
+
registerDeploy(program);
|
|
11
|
+
registerStart(program);
|
|
12
|
+
registerUnusedExports(program);
|
|
13
|
+
// Show help when no subcommand is passed
|
|
14
|
+
if (!process.argv.slice(2).length) {
|
|
15
|
+
program.outputHelp();
|
|
16
|
+
process.exit(0);
|
|
17
|
+
}
|
|
18
|
+
program.parseAsync(process.argv).catch((error)=>{
|
|
19
|
+
console.error(error?.message || error);
|
|
20
|
+
process.exit(1);
|
|
21
|
+
});
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { bundle } from '../utils/bundle.js';
|
|
2
|
+
import { flattenDependencies, harmonizeDependencies, calculateDependencies } from '../utils/dependencies.js';
|
|
3
|
+
import { cleanDir } from '../utils/fs.js';
|
|
4
|
+
import { readPackageJSON } from '../utils/packageJSON.js';
|
|
5
|
+
import { getWorkspacePackageNames } from '../utils/workspace.js';
|
|
6
|
+
import { createContext } from './utils.js';
|
|
7
|
+
|
|
8
|
+
function registerBuild(program) {
|
|
9
|
+
program.command("build").description("Build the project").option("-o, --output-dir <dir>", "Output directory").action(async (options)=>{
|
|
10
|
+
try {
|
|
11
|
+
const { packageRoot, packageJSON, outputDir, pnpmWorkspaceYAML } = await createContext(options);
|
|
12
|
+
const { name, exports: exports$1 } = await readPackageJSON(packageJSON);
|
|
13
|
+
console.log("🧹 Cleaning dist folder");
|
|
14
|
+
await cleanDir(outputDir);
|
|
15
|
+
console.log(`🔨 Building ${name}`);
|
|
16
|
+
const workspacePackages = await getWorkspacePackageNames(pnpmWorkspaceYAML);
|
|
17
|
+
const entryFile = new URL(exports$1?.["."] ?? "./index.js", packageRoot);
|
|
18
|
+
await build(entryFile, outputDir, workspacePackages);
|
|
19
|
+
} catch (error) {
|
|
20
|
+
console.error(error);
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
async function build(entryFile, outputDir, packagesToInline) {
|
|
26
|
+
const bundleOutputDir = new URL("lib", outputDir);
|
|
27
|
+
const { output } = await bundle(entryFile, bundleOutputDir, {
|
|
28
|
+
packagesToInline
|
|
29
|
+
});
|
|
30
|
+
const dependencies = await flattenDependencies(harmonizeDependencies(await calculateDependencies(output)));
|
|
31
|
+
// const { contentHash } = await pack(functionGroupDir, distDir)
|
|
32
|
+
return {
|
|
33
|
+
main: `./lib/${output[0].fileName}`,
|
|
34
|
+
dependencies
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export { registerBuild };
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { writeFile } from 'node:fs/promises';
|
|
2
|
+
import { bundle } from '../utils/bundle.js';
|
|
3
|
+
import { flattenDependencies, harmonizeDependencies, calculateDependencies, linkDependencies } from '../utils/dependencies.js';
|
|
4
|
+
import { spawnAsync } from '../utils/exec.js';
|
|
5
|
+
import { getFirebaseJSON, writeFirebaseJSON } from '../utils/firebase.js';
|
|
6
|
+
import { cleanDir, writeJSON } from '../utils/fs.js';
|
|
7
|
+
import { readPackageJSON, getPackageName, writePackageJSON } from '../utils/packageJSON.js';
|
|
8
|
+
import { getWorkspacePackageNames } from '../utils/workspace.js';
|
|
9
|
+
import { createContext } from './utils.js';
|
|
10
|
+
|
|
11
|
+
function registerDeploy(program) {
|
|
12
|
+
program.command("deploy").description("Deploy the project").option("-o, --output-dir <dir>", "Output directory").option("-P, --project <project id or alias>", "Project id or alias").action(async (options)=>{
|
|
13
|
+
try {
|
|
14
|
+
const context = await createContext(options);
|
|
15
|
+
await deploy(context);
|
|
16
|
+
} catch (error) {
|
|
17
|
+
console.error(error);
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
async function deploy({ projectAlias, projectId, packageJSON, packageRoot, pnpmWorkspaceYAML, projectRoot, outputDir }) {
|
|
23
|
+
const { name, version, exports: exports$1 } = await readPackageJSON(packageJSON);
|
|
24
|
+
const workspacePackages = await getWorkspacePackageNames(pnpmWorkspaceYAML);
|
|
25
|
+
const entryFile = new URL(exports$1?.["."] ?? "./index.js", packageRoot);
|
|
26
|
+
console.log("🧹 Cleaning dist folder");
|
|
27
|
+
await cleanDir(outputDir);
|
|
28
|
+
console.log(`🔨 Building ${name}`);
|
|
29
|
+
const { main, dependencies } = await build(entryFile, outputDir, workspacePackages);
|
|
30
|
+
console.log("⛷️ Prepare deploy");
|
|
31
|
+
const firebaseJSON = await getFirebaseJSON(new URL("firebase.json", projectRoot));
|
|
32
|
+
await prepareDeploy(getPackageName(name), projectAlias, projectId, {
|
|
33
|
+
name,
|
|
34
|
+
type: "module",
|
|
35
|
+
version,
|
|
36
|
+
main,
|
|
37
|
+
...dependencies
|
|
38
|
+
}, firebaseJSON, outputDir);
|
|
39
|
+
// contentHash,
|
|
40
|
+
console.log(`🚢 Deploying to ${projectAlias} (${projectId})`);
|
|
41
|
+
await deployToFirebase(outputDir, projectId);
|
|
42
|
+
}
|
|
43
|
+
async function build(entryFile, outputDir, packagesToInline) {
|
|
44
|
+
const bundleOutputDir = new URL("lib", outputDir);
|
|
45
|
+
const { output } = await bundle(entryFile, bundleOutputDir, {
|
|
46
|
+
packagesToInline
|
|
47
|
+
});
|
|
48
|
+
const dependencies = await flattenDependencies(harmonizeDependencies(await calculateDependencies(output)));
|
|
49
|
+
// const { contentHash } = await pack(functionGroupDir, distDir)
|
|
50
|
+
return {
|
|
51
|
+
main: `./lib/${output[0].fileName}`,
|
|
52
|
+
dependencies
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
async function prepareDeploy(codebase, projectAlias, projectId, packageJSON, firebaseJSON, outputDir) {
|
|
56
|
+
await Promise.all([
|
|
57
|
+
writeFirebaseJSON(codebase, firebaseJSON, outputDir),
|
|
58
|
+
createFirebaseRC(projectAlias, projectId, outputDir),
|
|
59
|
+
writePackageJSON(new URL("./package.json", outputDir), packageJSON),
|
|
60
|
+
createDotenv(codebase, projectAlias, outputDir)
|
|
61
|
+
]);
|
|
62
|
+
await linkDependencies(outputDir);
|
|
63
|
+
}
|
|
64
|
+
async function createFirebaseRC(projectAlias, projectId, outputDir) {
|
|
65
|
+
await writeJSON(new URL("./.firebaserc", outputDir), {
|
|
66
|
+
projects: {
|
|
67
|
+
[projectAlias]: projectId
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
function deployToFirebase(workingDir, projectId, extraArgs = []) {
|
|
72
|
+
return spawnAsync("firebase", [
|
|
73
|
+
"deploy",
|
|
74
|
+
"--only",
|
|
75
|
+
"functions,firestore",
|
|
76
|
+
"-P",
|
|
77
|
+
projectId,
|
|
78
|
+
...extraArgs
|
|
79
|
+
], {
|
|
80
|
+
cwd: workingDir
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
async function createDotenv(codebase, projectAlias, outputDir) {
|
|
84
|
+
const envFile = `
|
|
85
|
+
NODE_OPTIONS='--enable-source-maps'
|
|
86
|
+
FUNCTION_CODEBASE='${codebase}'
|
|
87
|
+
ENTUR_PROJECT_ALIAS='${projectAlias}'`;
|
|
88
|
+
await writeFile(new URL("./.env", outputDir), envFile.trim());
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export { registerDeploy };
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { bundleAndWatch } from '../utils/bundle.js';
|
|
2
|
+
import { flattenDependencies, harmonizeDependencies, calculateDependencies, linkDependencies } from '../utils/dependencies.js';
|
|
3
|
+
import { spawnAsync } from '../utils/exec.js';
|
|
4
|
+
import { getFirebaseJSON, writeFirebaseJSON } from '../utils/firebase.js';
|
|
5
|
+
import { cleanDir, writeJSON } from '../utils/fs.js';
|
|
6
|
+
import { readPackageJSON, getPackageName, writePackageJSON } from '../utils/packageJSON.js';
|
|
7
|
+
import { getWorkspacePackageNames } from '../utils/workspace.js';
|
|
8
|
+
import { createContext } from './utils.js';
|
|
9
|
+
|
|
10
|
+
function registerStart(program) {
|
|
11
|
+
program.command("start").description("Start function emulator").option("-o, --output-dir <dir>", "Output directory").option("-P, --project <project id or alias>", "Project id or alias").action(async (options)=>{
|
|
12
|
+
try {
|
|
13
|
+
const context = await createContext(options);
|
|
14
|
+
await start(context);
|
|
15
|
+
} catch (error) {
|
|
16
|
+
console.error(error);
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
async function start({ packageRoot, packageJSON, projectAlias, projectId, projectRoot, pnpmWorkspaceYAML, outputDir }) {
|
|
22
|
+
const { name, version, exports: exports$1 } = await readPackageJSON(packageJSON);
|
|
23
|
+
const codebase = getPackageName(name);
|
|
24
|
+
const packagesToInline = await getWorkspacePackageNames(pnpmWorkspaceYAML);
|
|
25
|
+
const entryFile = new URL(exports$1?.["."] ?? "./index.js", packageRoot);
|
|
26
|
+
console.log("🧹 Cleaning dist folder");
|
|
27
|
+
await cleanDir(outputDir);
|
|
28
|
+
console.log(`🔨 Building ${name}`);
|
|
29
|
+
const bundleOutputDir = new URL("lib", outputDir);
|
|
30
|
+
let firstRun = true;
|
|
31
|
+
const onBundleEnd = async ({ output })=>{
|
|
32
|
+
const main = `./lib/${output[0].fileName}`;
|
|
33
|
+
const dependencies = await flattenDependencies(harmonizeDependencies(await calculateDependencies(output)));
|
|
34
|
+
// const { contentHash } = await pack(functionGroupDir, distDir)
|
|
35
|
+
const firebaseJSON = await getFirebaseJSON(new URL("firebase.json", projectRoot));
|
|
36
|
+
await prepareStart(codebase, projectAlias, projectId, {
|
|
37
|
+
name,
|
|
38
|
+
type: "module",
|
|
39
|
+
version,
|
|
40
|
+
main,
|
|
41
|
+
...dependencies
|
|
42
|
+
}, firebaseJSON, outputDir);
|
|
43
|
+
if (firstRun) {
|
|
44
|
+
firstRun = false;
|
|
45
|
+
runEmulator(codebase, projectAlias, projectId, outputDir, []).catch((error)=>{
|
|
46
|
+
console.error(error);
|
|
47
|
+
process.exit(1);
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
await bundleAndWatch(entryFile, bundleOutputDir, {
|
|
52
|
+
packagesToInline,
|
|
53
|
+
onBundleEnd
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
async function prepareStart(codebase, projectAlias, projectId, packageJSON, firebaseJSON, outputDir) {
|
|
57
|
+
await Promise.all([
|
|
58
|
+
writeFirebaseJSON(codebase, firebaseJSON, outputDir),
|
|
59
|
+
createFirebaseRC(projectAlias, projectId, outputDir),
|
|
60
|
+
writePackageJSON(new URL("./package.json", outputDir), packageJSON)
|
|
61
|
+
]);
|
|
62
|
+
await linkDependencies(outputDir);
|
|
63
|
+
}
|
|
64
|
+
async function createFirebaseRC(projectAlias, projectId, outputDir) {
|
|
65
|
+
await writeJSON(new URL("./.firebaserc", outputDir), {
|
|
66
|
+
projects: {
|
|
67
|
+
[projectAlias]: projectId
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
function runEmulator(codebase, projectAlias, projectId, workingDir, extraArgs) {
|
|
72
|
+
return spawnAsync("firebase", [
|
|
73
|
+
"emulators:start",
|
|
74
|
+
"--only",
|
|
75
|
+
"functions",
|
|
76
|
+
"-P",
|
|
77
|
+
projectId,
|
|
78
|
+
...extraArgs
|
|
79
|
+
], {
|
|
80
|
+
cwd: workingDir,
|
|
81
|
+
env: {
|
|
82
|
+
...process.env,
|
|
83
|
+
FUNCTION_CODEBASE: codebase,
|
|
84
|
+
ENTUR_PROJECT_ALIAS: projectAlias,
|
|
85
|
+
NODE_OPTIONS: process.env.NODE_OPTIONS ? `${process.env.NODE_OPTIONS} --enable-source-maps` : "--enable-source-maps"
|
|
86
|
+
}
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export { registerStart };
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import { glob, readFile } from 'node:fs/promises';
|
|
2
|
+
import { join, relative } from 'node:path';
|
|
3
|
+
import { parse } from '@swc/core';
|
|
4
|
+
import { EnforceExtension } from 'oxc-resolver';
|
|
5
|
+
import { getExports } from '../importExports/exports.js';
|
|
6
|
+
import { getImports } from '../importExports/imports.js';
|
|
7
|
+
import { getProxyExports } from '../importExports/proxy.js';
|
|
8
|
+
import { createResolver } from '../importExports/resolve.js';
|
|
9
|
+
import { createResolveImport } from '../importExports/resolveImports.js';
|
|
10
|
+
import { createReadFileDependencies, extractUnusedExports } from '../importExports/unused.js';
|
|
11
|
+
import { createCache } from '../importExports/utils.js';
|
|
12
|
+
import { createContext } from './utils.js';
|
|
13
|
+
|
|
14
|
+
function registerUnusedExports(program) {
|
|
15
|
+
program.command("unused-exports").description("Check for unused exports").action(async ()=>{
|
|
16
|
+
try {
|
|
17
|
+
const context = await createContext();
|
|
18
|
+
await unusedExports([
|
|
19
|
+
"**/*.+(ts|tsx)"
|
|
20
|
+
], [], {
|
|
21
|
+
extensions: [
|
|
22
|
+
".ts"
|
|
23
|
+
]
|
|
24
|
+
}, context);
|
|
25
|
+
} catch (error) {
|
|
26
|
+
console.error(error);
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
async function unusedExports(patterns, ignorePatterns, resolverOptions, { cwd }) {
|
|
32
|
+
const resolver = createResolver({
|
|
33
|
+
conditionNames: [
|
|
34
|
+
"import",
|
|
35
|
+
"node",
|
|
36
|
+
"default"
|
|
37
|
+
],
|
|
38
|
+
enforceExtension: EnforceExtension.Auto,
|
|
39
|
+
...resolverOptions
|
|
40
|
+
});
|
|
41
|
+
const resolveImport = createResolveImport(resolver, getFileExports, getFileProxyExports);
|
|
42
|
+
const readFileDependencies = createReadFileDependencies(getFileImports, getFileExports, resolveImport);
|
|
43
|
+
const exclude = [
|
|
44
|
+
"**/node_modules",
|
|
45
|
+
"tools/createPackage/template",
|
|
46
|
+
"tools/dependency/mocks"
|
|
47
|
+
];
|
|
48
|
+
const fileDependencies = [];
|
|
49
|
+
for await (const filePath of glob(patterns, {
|
|
50
|
+
cwd,
|
|
51
|
+
exclude
|
|
52
|
+
})){
|
|
53
|
+
fileDependencies.push(await readFileDependencies(join(cwd, filePath)));
|
|
54
|
+
}
|
|
55
|
+
const { unusedExports, missingImports } = extractUnusedExports(fileDependencies, ignorePatterns.map((it)=>join(cwd, it)));
|
|
56
|
+
if (unusedExports.size > 0) {
|
|
57
|
+
console.log("Exports not used in any files:\n");
|
|
58
|
+
await print(unusedExports, {
|
|
59
|
+
cwd
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
if (unusedExports.size > 0 && missingImports.size > 0) {
|
|
63
|
+
console.log();
|
|
64
|
+
}
|
|
65
|
+
if (missingImports.size > 0) {
|
|
66
|
+
console.log("Unresolvable import paths:\n");
|
|
67
|
+
await print(missingImports, {
|
|
68
|
+
cwd
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
if (unusedExports.size > 0 || missingImports.size > 0) {
|
|
72
|
+
process.exitCode = 1;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
const parseFile = createCache(async (filePath)=>{
|
|
76
|
+
let options;
|
|
77
|
+
if (filePath.endsWith(".ts")) {
|
|
78
|
+
options = {
|
|
79
|
+
syntax: "typescript"
|
|
80
|
+
};
|
|
81
|
+
} else if (filePath.endsWith(".tsx")) {
|
|
82
|
+
options = {
|
|
83
|
+
syntax: "typescript",
|
|
84
|
+
tsx: true
|
|
85
|
+
};
|
|
86
|
+
} else if (filePath.endsWith(".js")) {
|
|
87
|
+
options = {
|
|
88
|
+
syntax: "ecmascript"
|
|
89
|
+
};
|
|
90
|
+
} else if (filePath.endsWith(".jsx")) {
|
|
91
|
+
options = {
|
|
92
|
+
syntax: "ecmascript",
|
|
93
|
+
jsx: true
|
|
94
|
+
};
|
|
95
|
+
} else {
|
|
96
|
+
return {
|
|
97
|
+
type: "Module",
|
|
98
|
+
span: {
|
|
99
|
+
start: 0,
|
|
100
|
+
end: 17,
|
|
101
|
+
ctxt: 2
|
|
102
|
+
},
|
|
103
|
+
interpreter: "",
|
|
104
|
+
body: [
|
|
105
|
+
{
|
|
106
|
+
type: "ExportDefaultExpression",
|
|
107
|
+
span: {
|
|
108
|
+
start: 0,
|
|
109
|
+
end: 17,
|
|
110
|
+
ctxt: 2
|
|
111
|
+
},
|
|
112
|
+
expression: {
|
|
113
|
+
type: "ObjectExpression",
|
|
114
|
+
span: {
|
|
115
|
+
start: 15,
|
|
116
|
+
end: 17,
|
|
117
|
+
ctxt: 2
|
|
118
|
+
},
|
|
119
|
+
properties: []
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
]
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
const content = await readFile(filePath, "utf-8");
|
|
126
|
+
return parse(content, options);
|
|
127
|
+
});
|
|
128
|
+
const getFileImports = createCache(async (filePath)=>{
|
|
129
|
+
const module = await parseFile(filePath);
|
|
130
|
+
return getImports(module);
|
|
131
|
+
});
|
|
132
|
+
const getFileExports = createCache(async (filePath)=>{
|
|
133
|
+
const module = await parseFile(filePath);
|
|
134
|
+
return getExports(module);
|
|
135
|
+
});
|
|
136
|
+
const getFileProxyExports = createCache(async (filePath)=>{
|
|
137
|
+
const module = await parseFile(filePath);
|
|
138
|
+
return getProxyExports(module);
|
|
139
|
+
});
|
|
140
|
+
async function print(printMap, { limit = 50, cwd = process.cwd() } = {}) {
|
|
141
|
+
let count = 0;
|
|
142
|
+
for (const [filePath, items] of printMap){
|
|
143
|
+
count++;
|
|
144
|
+
if (count > limit) {
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
const relativePath = relative(cwd, filePath);
|
|
148
|
+
console.log(highlight(relativePath));
|
|
149
|
+
for (const item of items){
|
|
150
|
+
if ("specifier" in item) {
|
|
151
|
+
console.log(` - ${item.specifier}`);
|
|
152
|
+
} else {
|
|
153
|
+
console.log(` - ${item.name}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
console.log("");
|
|
157
|
+
}
|
|
158
|
+
if (printMap.size > limit) {
|
|
159
|
+
console.log("");
|
|
160
|
+
console.log(`Showing ${limit} of ${printMap.size} affected files`);
|
|
161
|
+
console.log("");
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
function highlight(message) {
|
|
165
|
+
return `\u001B[33m${message}\u001B[0m`;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
export { registerUnusedExports };
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { pathToFileURL } from 'node:url';
|
|
2
|
+
import { findUp } from 'find-up-simple';
|
|
3
|
+
import { readJSON } from '../utils/fs.js';
|
|
4
|
+
|
|
5
|
+
async function createContext({ outputDir = "dist", project } = {}) {
|
|
6
|
+
const cwd = process.cwd();
|
|
7
|
+
const [pnpmWorkspacePath, packageJSONPath, firebaseRCPath] = await Promise.all([
|
|
8
|
+
findUp("pnpm-workspace.yaml", {
|
|
9
|
+
cwd
|
|
10
|
+
}),
|
|
11
|
+
findUp("package.json", {
|
|
12
|
+
cwd
|
|
13
|
+
}),
|
|
14
|
+
findUp(".firebaserc", {
|
|
15
|
+
cwd
|
|
16
|
+
})
|
|
17
|
+
]);
|
|
18
|
+
if (!pnpmWorkspacePath) {
|
|
19
|
+
throw new Error("Unable to find pnpm-workspace.yaml");
|
|
20
|
+
}
|
|
21
|
+
if (!packageJSONPath) {
|
|
22
|
+
throw new Error("Unable to find package.json");
|
|
23
|
+
}
|
|
24
|
+
if (!firebaseRCPath) {
|
|
25
|
+
throw new Error("Unable to find .firebaserc");
|
|
26
|
+
}
|
|
27
|
+
const { projects } = await readJSON(firebaseRCPath);
|
|
28
|
+
const { projectAlias, projectId } = getProjectAliasAndId(projects, project);
|
|
29
|
+
const outputDirUrl = new URL(`./${outputDir}/`, `${pathToFileURL(cwd)}/`);
|
|
30
|
+
const pnpmWorkspaceYAML = pathToFileURL(pnpmWorkspacePath);
|
|
31
|
+
const packageJSONUrl = pathToFileURL(packageJSONPath);
|
|
32
|
+
return {
|
|
33
|
+
pnpmWorkspaceYAML,
|
|
34
|
+
packageJSON: packageJSONUrl,
|
|
35
|
+
outputDir: outputDirUrl,
|
|
36
|
+
packageRoot: new URL("./", packageJSONUrl),
|
|
37
|
+
projectRoot: new URL("./", pnpmWorkspaceYAML),
|
|
38
|
+
projectAlias,
|
|
39
|
+
projectId,
|
|
40
|
+
cwd
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
function getProjectAliasAndId({ default: defaultProjectId, ...projects }, project) {
|
|
44
|
+
if (!project) {
|
|
45
|
+
const defaultProjectEntry = Object.entries(projects).find(([, value])=>value === defaultProjectId);
|
|
46
|
+
if (!defaultProjectEntry) {
|
|
47
|
+
throw new Error("No default project found in .firebaserc");
|
|
48
|
+
}
|
|
49
|
+
const [projectAlias, projectId] = defaultProjectEntry;
|
|
50
|
+
return {
|
|
51
|
+
projectAlias,
|
|
52
|
+
projectId
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
if (projects[project]) {
|
|
56
|
+
return {
|
|
57
|
+
projectAlias: project,
|
|
58
|
+
projectId: projects[project]
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
const projectEntries = Object.entries(projects);
|
|
62
|
+
const matchingProjectEntry = projectEntries.find(([, value])=>value === project);
|
|
63
|
+
if (matchingProjectEntry) {
|
|
64
|
+
const [projectAlias, projectId] = matchingProjectEntry;
|
|
65
|
+
return {
|
|
66
|
+
projectAlias,
|
|
67
|
+
projectId
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
throw new Error(`No project with alias or id ${project} found in .firebaserc`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export { createContext };
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { filterMap } from '../utils/array.js';
|
|
2
|
+
|
|
3
|
+
function getExports(module) {
|
|
4
|
+
return module.body.flatMap((node)=>{
|
|
5
|
+
switch(node.type){
|
|
6
|
+
case "ExportDeclaration":
|
|
7
|
+
return getExportDetails(node);
|
|
8
|
+
case "ExportNamedDeclaration":
|
|
9
|
+
return getNamedExportDetails(node);
|
|
10
|
+
case "ExportDefaultDeclaration":
|
|
11
|
+
case "ExportDefaultExpression":
|
|
12
|
+
return getExportDefaultDetails(node);
|
|
13
|
+
default:
|
|
14
|
+
return [];
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
function getExportDetails(node) {
|
|
19
|
+
switch(node.declaration.type){
|
|
20
|
+
case "VariableDeclaration":
|
|
21
|
+
{
|
|
22
|
+
return node.declaration.declarations.flatMap((declaration)=>{
|
|
23
|
+
switch(declaration.id.type){
|
|
24
|
+
case "Identifier":
|
|
25
|
+
return {
|
|
26
|
+
name: declaration.id.value,
|
|
27
|
+
node
|
|
28
|
+
};
|
|
29
|
+
case "ObjectPattern":
|
|
30
|
+
{
|
|
31
|
+
return filterMap(declaration.id.properties, (property)=>{
|
|
32
|
+
if (property.type !== "AssignmentPatternProperty") return;
|
|
33
|
+
return {
|
|
34
|
+
name: property.key.value,
|
|
35
|
+
node
|
|
36
|
+
};
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
default:
|
|
40
|
+
{
|
|
41
|
+
return [];
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
case "ClassDeclaration":
|
|
47
|
+
case "FunctionDeclaration":
|
|
48
|
+
{
|
|
49
|
+
return {
|
|
50
|
+
name: node.declaration.identifier.value,
|
|
51
|
+
node
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
case "TsEnumDeclaration":
|
|
55
|
+
case "TsInterfaceDeclaration":
|
|
56
|
+
case "TsTypeAliasDeclaration":
|
|
57
|
+
{
|
|
58
|
+
return {
|
|
59
|
+
name: node.declaration.id.value,
|
|
60
|
+
node
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
default:
|
|
64
|
+
{
|
|
65
|
+
return [];
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
function getNamedExportDetails(node) {
|
|
70
|
+
if (node.source) return [];
|
|
71
|
+
return filterMap(node.specifiers, (specifier)=>{
|
|
72
|
+
if (specifier.type !== "ExportSpecifier") return;
|
|
73
|
+
return {
|
|
74
|
+
name: specifier.orig.value,
|
|
75
|
+
node: node
|
|
76
|
+
};
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
function getExportDefaultDetails(node) {
|
|
80
|
+
return [
|
|
81
|
+
{
|
|
82
|
+
name: "default",
|
|
83
|
+
node
|
|
84
|
+
}
|
|
85
|
+
];
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export { getExports };
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { filterMap } from '../utils/array.js';
|
|
2
|
+
import { traverse } from './traverse.js';
|
|
3
|
+
|
|
4
|
+
function getImports(module) {
|
|
5
|
+
return module.body.flatMap((node)=>{
|
|
6
|
+
switch(node.type){
|
|
7
|
+
case "ImportDeclaration":
|
|
8
|
+
{
|
|
9
|
+
return getImportDetails(node, module);
|
|
10
|
+
}
|
|
11
|
+
default:
|
|
12
|
+
{
|
|
13
|
+
return [];
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
function getImportDetails(node, module) {
|
|
19
|
+
const specifier = node.source.value;
|
|
20
|
+
if (node.specifiers.length === 0) {
|
|
21
|
+
return {
|
|
22
|
+
specifier,
|
|
23
|
+
node
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
return filterMap(node.specifiers, (it)=>{
|
|
27
|
+
switch(it.type){
|
|
28
|
+
case "ImportDefaultSpecifier":
|
|
29
|
+
{
|
|
30
|
+
return {
|
|
31
|
+
name: "default",
|
|
32
|
+
specifier,
|
|
33
|
+
node
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
case "ImportSpecifier":
|
|
37
|
+
{
|
|
38
|
+
return {
|
|
39
|
+
name: it.imported?.value ?? it.local.value,
|
|
40
|
+
specifier,
|
|
41
|
+
node
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
case "ImportNamespaceSpecifier":
|
|
45
|
+
{
|
|
46
|
+
const namespace = it.local.value;
|
|
47
|
+
const names = new Set();
|
|
48
|
+
traverse(module, {
|
|
49
|
+
visitMemberExpression (node) {
|
|
50
|
+
if (node.object.type === "Identifier" && node.object.value === namespace && node.property.type === "Identifier") {
|
|
51
|
+
names.add(node.property.value);
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
visitTsQualifiedName (node) {
|
|
55
|
+
if (node.left.type === "Identifier" && node.left.value === namespace && node.right.type === "Identifier") {
|
|
56
|
+
names.add(node.right.value);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
return [
|
|
61
|
+
...names
|
|
62
|
+
].map((name)=>({
|
|
63
|
+
name,
|
|
64
|
+
specifier,
|
|
65
|
+
node
|
|
66
|
+
}));
|
|
67
|
+
}
|
|
68
|
+
default:
|
|
69
|
+
console.log(it);
|
|
70
|
+
}
|
|
71
|
+
}).flat();
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export { getImports };
|