@powerhousedao/ph-cli 6.0.0-dev.17 → 6.0.0-dev.171
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assign-env-vars-CzHgn5ax.mjs +15 -0
- package/dist/assign-env-vars-CzHgn5ax.mjs.map +1 -0
- package/dist/auth-BeA5gDPQ.mjs +23 -0
- package/dist/auth-BeA5gDPQ.mjs.map +1 -0
- package/dist/build-BflSHYLP.mjs +33 -0
- package/dist/build-BflSHYLP.mjs.map +1 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.mjs +745 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/connect-build-CrnmJlav.mjs +21 -0
- package/dist/connect-build-CrnmJlav.mjs.map +1 -0
- package/dist/connect-preview-4Xe7Lm1V.mjs +27 -0
- package/dist/connect-preview-4Xe7Lm1V.mjs.map +1 -0
- package/dist/connect-studio-38_NrT_a.mjs +28 -0
- package/dist/connect-studio-38_NrT_a.mjs.map +1 -0
- package/dist/connect-studio-DuH6WcoA.mjs +3 -0
- package/dist/generate-CMQGYRrW.mjs +2 -0
- package/dist/generate-Dq80G8n4.mjs +58 -0
- package/dist/generate-Dq80G8n4.mjs.map +1 -0
- package/dist/init-BsmDWu9-.mjs +124 -0
- package/dist/init-BsmDWu9-.mjs.map +1 -0
- package/dist/inspect-Dl8KWl3u.mjs +45 -0
- package/dist/inspect-Dl8KWl3u.mjs.map +1 -0
- package/dist/migrate-CMNF8puQ.mjs +245 -0
- package/dist/migrate-CMNF8puQ.mjs.map +1 -0
- package/dist/scripts/generate-commands-docs.ts +45 -0
- package/dist/switchboard-Bht39Myv.mjs +2 -0
- package/dist/switchboard-DBqOSS0c.mjs +72 -0
- package/dist/switchboard-DBqOSS0c.mjs.map +1 -0
- package/dist/switchboard-migrate-1lOCPmX0.mjs +50 -0
- package/dist/switchboard-migrate-1lOCPmX0.mjs.map +1 -0
- package/dist/utils-DbFSkp_Q.mjs +161 -0
- package/dist/utils-DbFSkp_Q.mjs.map +1 -0
- package/dist/vetra-CuBxTrw7.mjs +360 -0
- package/dist/vetra-CuBxTrw7.mjs.map +1 -0
- package/package.json +38 -33
- package/dist/scripts/generate-commands-md.d.ts +0 -2
- package/dist/scripts/generate-commands-md.d.ts.map +0 -1
- package/dist/scripts/generate-commands-md.js +0 -72
- package/dist/scripts/generate-commands-md.js.map +0 -1
- package/dist/scripts/generate-commands-md.ts +0 -84
- package/dist/scripts/generate-version.d.ts +0 -2
- package/dist/scripts/generate-version.d.ts.map +0 -1
- package/dist/scripts/generate-version.js +0 -13
- package/dist/scripts/generate-version.js.map +0 -1
- package/dist/scripts/generate-version.ts +0 -22
- package/dist/src/cli.d.ts +0 -3
- package/dist/src/cli.d.ts.map +0 -1
- package/dist/src/cli.js +0 -42
- package/dist/src/cli.js.map +0 -1
- package/dist/src/commands/access-token.d.ts +0 -9
- package/dist/src/commands/access-token.d.ts.map +0 -1
- package/dist/src/commands/access-token.js +0 -110
- package/dist/src/commands/access-token.js.map +0 -1
- package/dist/src/commands/connect.d.ts +0 -19
- package/dist/src/commands/connect.d.ts.map +0 -1
- package/dist/src/commands/connect.js +0 -85
- package/dist/src/commands/connect.js.map +0 -1
- package/dist/src/commands/generate.d.ts +0 -9
- package/dist/src/commands/generate.d.ts.map +0 -1
- package/dist/src/commands/generate.js +0 -41
- package/dist/src/commands/generate.js.map +0 -1
- package/dist/src/commands/help.d.ts +0 -3
- package/dist/src/commands/help.d.ts.map +0 -1
- package/dist/src/commands/help.js +0 -9
- package/dist/src/commands/help.js.map +0 -1
- package/dist/src/commands/index.d.ts +0 -14
- package/dist/src/commands/index.d.ts.map +0 -1
- package/dist/src/commands/index.js +0 -14
- package/dist/src/commands/index.js.map +0 -1
- package/dist/src/commands/inspect.d.ts +0 -6
- package/dist/src/commands/inspect.d.ts.map +0 -1
- package/dist/src/commands/inspect.js +0 -21
- package/dist/src/commands/inspect.js.map +0 -1
- package/dist/src/commands/install.d.ts +0 -15
- package/dist/src/commands/install.d.ts.map +0 -1
- package/dist/src/commands/install.js +0 -127
- package/dist/src/commands/install.js.map +0 -1
- package/dist/src/commands/list.d.ts +0 -9
- package/dist/src/commands/list.d.ts.map +0 -1
- package/dist/src/commands/list.js +0 -36
- package/dist/src/commands/list.js.map +0 -1
- package/dist/src/commands/login.d.ts +0 -12
- package/dist/src/commands/login.d.ts.map +0 -1
- package/dist/src/commands/login.js +0 -208
- package/dist/src/commands/login.js.map +0 -1
- package/dist/src/commands/migrate.d.ts +0 -10
- package/dist/src/commands/migrate.d.ts.map +0 -1
- package/dist/src/commands/migrate.js +0 -12
- package/dist/src/commands/migrate.js.map +0 -1
- package/dist/src/commands/register-commands.d.ts +0 -5
- package/dist/src/commands/register-commands.d.ts.map +0 -1
- package/dist/src/commands/register-commands.js +0 -32
- package/dist/src/commands/register-commands.js.map +0 -1
- package/dist/src/commands/service.d.ts +0 -5
- package/dist/src/commands/service.d.ts.map +0 -1
- package/dist/src/commands/service.js +0 -67
- package/dist/src/commands/service.js.map +0 -1
- package/dist/src/commands/switchboard.d.ts +0 -9
- package/dist/src/commands/switchboard.d.ts.map +0 -1
- package/dist/src/commands/switchboard.js +0 -78
- package/dist/src/commands/switchboard.js.map +0 -1
- package/dist/src/commands/uninstall.d.ts +0 -15
- package/dist/src/commands/uninstall.d.ts.map +0 -1
- package/dist/src/commands/uninstall.js +0 -120
- package/dist/src/commands/uninstall.js.map +0 -1
- package/dist/src/commands/vetra.d.ts +0 -11
- package/dist/src/commands/vetra.d.ts.map +0 -1
- package/dist/src/commands/vetra.js +0 -35
- package/dist/src/commands/vetra.js.map +0 -1
- package/dist/src/help.d.ts +0 -65
- package/dist/src/help.d.ts.map +0 -1
- package/dist/src/help.js +0 -770
- package/dist/src/help.js.map +0 -1
- package/dist/src/index.d.ts +0 -5
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -5
- package/dist/src/index.js.map +0 -1
- package/dist/src/services/auth.d.ts +0 -69
- package/dist/src/services/auth.d.ts.map +0 -1
- package/dist/src/services/auth.js +0 -171
- package/dist/src/services/auth.js.map +0 -1
- package/dist/src/services/connect.d.ts +0 -2
- package/dist/src/services/connect.d.ts.map +0 -1
- package/dist/src/services/connect.js +0 -2
- package/dist/src/services/connect.js.map +0 -1
- package/dist/src/services/generate.d.ts +0 -30
- package/dist/src/services/generate.d.ts.map +0 -1
- package/dist/src/services/generate.js +0 -106
- package/dist/src/services/generate.js.map +0 -1
- package/dist/src/services/inspect.d.ts +0 -5
- package/dist/src/services/inspect.d.ts.map +0 -1
- package/dist/src/services/inspect.js +0 -49
- package/dist/src/services/inspect.js.map +0 -1
- package/dist/src/services/migrate.d.ts +0 -7
- package/dist/src/services/migrate.d.ts.map +0 -1
- package/dist/src/services/migrate.js +0 -289
- package/dist/src/services/migrate.js.map +0 -1
- package/dist/src/services/switchboard-migrate.d.ts +0 -7
- package/dist/src/services/switchboard-migrate.d.ts.map +0 -1
- package/dist/src/services/switchboard-migrate.js +0 -60
- package/dist/src/services/switchboard-migrate.js.map +0 -1
- package/dist/src/services/switchboard.d.ts +0 -54
- package/dist/src/services/switchboard.d.ts.map +0 -1
- package/dist/src/services/switchboard.js +0 -79
- package/dist/src/services/switchboard.js.map +0 -1
- package/dist/src/services/vetra.d.ts +0 -15
- package/dist/src/services/vetra.d.ts.map +0 -1
- package/dist/src/services/vetra.js +0 -176
- package/dist/src/services/vetra.js.map +0 -1
- package/dist/src/types.d.ts +0 -2
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -2
- package/dist/src/types.js.map +0 -1
- package/dist/src/utils/configure-vetra-github-url.d.ts +0 -12
- package/dist/src/utils/configure-vetra-github-url.d.ts.map +0 -1
- package/dist/src/utils/configure-vetra-github-url.js +0 -230
- package/dist/src/utils/configure-vetra-github-url.js.map +0 -1
- package/dist/src/utils.d.ts +0 -116
- package/dist/src/utils.d.ts.map +0 -1
- package/dist/src/utils.js +0 -261
- package/dist/src/utils.js.map +0 -1
- package/dist/src/version.d.ts +0 -2
- package/dist/src/version.d.ts.map +0 -1
- package/dist/src/version.js +0 -3
- package/dist/src/version.js.map +0 -1
- package/dist/test/utils.test.d.ts +0 -2
- package/dist/test/utils.test.d.ts.map +0 -1
- package/dist/test/utils.test.js +0 -132
- package/dist/test/utils.test.js.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/vitest.config.d.ts +0 -3
- package/dist/vitest.config.d.ts.map +0 -1
- package/dist/vitest.config.js +0 -7
- package/dist/vitest.config.js.map +0 -1
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import { t as startGenerate } from "./generate-Dq80G8n4.mjs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { existsSync, readdirSync } from "node:fs";
|
|
4
|
+
import { connectEntrypointTemplate, dockerfileTemplate, indexHtmlTemplate, indexTsTemplate, nginxConfTemplate, packageJsonExportsTemplate, packageJsonScriptsTemplate, switchboardEntrypointTemplate, syncAndPublishWorkflowTemplate, tsConfigTemplate } from "@powerhousedao/codegen/templates";
|
|
5
|
+
import { mkdir, readdir, stat, writeFile } from "node:fs/promises";
|
|
6
|
+
import { readPackage } from "read-pkg";
|
|
7
|
+
import { Project, SyntaxKind } from "ts-morph";
|
|
8
|
+
import { writePackage } from "write-package";
|
|
9
|
+
//#region src/services/migrate.ts
|
|
10
|
+
async function startMigrate(_args) {
|
|
11
|
+
await migratePackageJson();
|
|
12
|
+
await migrateTsConfig();
|
|
13
|
+
await migrateIndexHtml();
|
|
14
|
+
await migrateCIFiles();
|
|
15
|
+
await runGenerateOnAllDocumentModels();
|
|
16
|
+
await runGenerateOnAllEditors();
|
|
17
|
+
const project = new Project({
|
|
18
|
+
tsConfigFilePath: path.resolve("tsconfig.json"),
|
|
19
|
+
compilerOptions: { verbatimModuleSyntax: false }
|
|
20
|
+
});
|
|
21
|
+
deleteLegacyEditorDirIndexFiles(project);
|
|
22
|
+
migrateEditorFiles(project);
|
|
23
|
+
migrateRootIndex(project);
|
|
24
|
+
removeZDotSchemaUsage(project);
|
|
25
|
+
removeCreatorsUsage(project);
|
|
26
|
+
removeUtilsDefaultExportUsage(project);
|
|
27
|
+
fixImports(project);
|
|
28
|
+
}
|
|
29
|
+
/** Ensure that the project package.json has the correct scripts and exports. */
|
|
30
|
+
async function migratePackageJson() {
|
|
31
|
+
const packageJson = await readPackage();
|
|
32
|
+
const existingScripts = packageJson.scripts;
|
|
33
|
+
const existingExports = !!packageJson.exports && !Array.isArray(packageJson.exports) && typeof packageJson.exports !== "string" ? packageJson.exports : {};
|
|
34
|
+
const newScripts = {
|
|
35
|
+
...existingScripts,
|
|
36
|
+
...packageJsonScriptsTemplate
|
|
37
|
+
};
|
|
38
|
+
const newExports = {
|
|
39
|
+
...existingExports,
|
|
40
|
+
...packageJsonExportsTemplate
|
|
41
|
+
};
|
|
42
|
+
packageJson.scripts = newScripts;
|
|
43
|
+
packageJson.exports = newExports;
|
|
44
|
+
await writePackage(packageJson);
|
|
45
|
+
}
|
|
46
|
+
/** Ensure that the project index.html matches the boilerplate index.html. */
|
|
47
|
+
async function migrateIndexHtml() {
|
|
48
|
+
await writeFile(path.join(process.cwd(), "index.html"), indexHtmlTemplate);
|
|
49
|
+
}
|
|
50
|
+
/** Ensure that the project tsconfig.json matches the boilerplate tsconfig.json. */
|
|
51
|
+
async function migrateTsConfig() {
|
|
52
|
+
await writeFile(path.join(process.cwd(), "tsconfig.json"), tsConfigTemplate);
|
|
53
|
+
}
|
|
54
|
+
/** Check if a file exists */
|
|
55
|
+
async function fileExists(filePath) {
|
|
56
|
+
try {
|
|
57
|
+
await stat(filePath);
|
|
58
|
+
return true;
|
|
59
|
+
} catch {
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
/** Write a file with optional warning if it already exists */
|
|
64
|
+
async function writeFileWithWarning(filePath, content) {
|
|
65
|
+
if (await fileExists(filePath)) console.warn(`Warning: Overwriting existing file: ${filePath}`);
|
|
66
|
+
await writeFile(filePath, content);
|
|
67
|
+
}
|
|
68
|
+
/** Add CI/CD workflow and Docker files to the project. */
|
|
69
|
+
async function migrateCIFiles() {
|
|
70
|
+
const cwd = process.cwd();
|
|
71
|
+
try {
|
|
72
|
+
await mkdir(path.join(cwd, ".github/workflows"), { recursive: true });
|
|
73
|
+
await mkdir(path.join(cwd, "docker"), { recursive: true });
|
|
74
|
+
await writeFileWithWarning(path.join(cwd, ".github/workflows/sync-and-publish.yml"), syncAndPublishWorkflowTemplate);
|
|
75
|
+
await writeFileWithWarning(path.join(cwd, "Dockerfile"), dockerfileTemplate);
|
|
76
|
+
await writeFileWithWarning(path.join(cwd, "docker/nginx.conf"), nginxConfTemplate);
|
|
77
|
+
await writeFileWithWarning(path.join(cwd, "docker/connect-entrypoint.sh"), connectEntrypointTemplate);
|
|
78
|
+
await writeFileWithWarning(path.join(cwd, "docker/switchboard-entrypoint.sh"), switchboardEntrypointTemplate);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.error("Error migrating CI files:", error);
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/** Ensure that the project index.ts file uses the new exports for editors and document models */
|
|
85
|
+
function migrateRootIndex(project) {
|
|
86
|
+
const indexPath = path.join(process.cwd(), "index.ts");
|
|
87
|
+
let source = project.getSourceFile(indexPath);
|
|
88
|
+
if (!source) source = project.createSourceFile(indexPath);
|
|
89
|
+
source.replaceWithText(indexTsTemplate);
|
|
90
|
+
project.saveSync();
|
|
91
|
+
}
|
|
92
|
+
/** Ensure that the project's editor.tsx files use default exports for lazy loading */
|
|
93
|
+
function migrateEditorFiles(project) {
|
|
94
|
+
const editorsPath = path.join(process.cwd(), "editors");
|
|
95
|
+
const dirs = readdirSync(editorsPath, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
|
|
96
|
+
for (const dir of dirs) {
|
|
97
|
+
const editorFilePath = path.join(editorsPath, dir, "editor.tsx");
|
|
98
|
+
const source = project.getSourceFile(editorFilePath);
|
|
99
|
+
if (!source) continue;
|
|
100
|
+
const replaceNamedExportWithDefaultExport = source.getFullText().replace("export function Editor", "export default function Editor");
|
|
101
|
+
source.replaceWithText(replaceNamedExportWithDefaultExport);
|
|
102
|
+
project.saveSync();
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
/** Delete the legacy index files in editor directories which are now replaced by module.ts files */
|
|
106
|
+
function deleteLegacyEditorDirIndexFiles(project) {
|
|
107
|
+
const editorsPath = path.join(process.cwd(), "editors");
|
|
108
|
+
const dirs = readdirSync(editorsPath, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
|
|
109
|
+
for (const dir of dirs) {
|
|
110
|
+
const indexFilePath = path.join(editorsPath, dir, "index.ts");
|
|
111
|
+
const source = project.getSourceFile(indexFilePath);
|
|
112
|
+
if (!source) continue;
|
|
113
|
+
source.delete();
|
|
114
|
+
project.saveSync();
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
/** Remove usage of the `z` re-export of document model schemas which caused naming conflicts */
|
|
118
|
+
function removeZDotSchemaUsage(project) {
|
|
119
|
+
const sourceFiles = project.getSourceFiles();
|
|
120
|
+
for (const sourceFile of sourceFiles) {
|
|
121
|
+
const path = sourceFile.getFilePath();
|
|
122
|
+
if (!path.includes(process.cwd())) continue;
|
|
123
|
+
if (path.includes("zod.ts")) continue;
|
|
124
|
+
const text = sourceFile.getFullText();
|
|
125
|
+
if (/import\s+(?:\{\s*z\s*\}|z)\s+from\s+['"]zod['"]/.test(text)) continue;
|
|
126
|
+
const withoutZDot = text.replace(/z\./g, "");
|
|
127
|
+
sourceFile.replaceWithText(withoutZDot);
|
|
128
|
+
project.saveSync();
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
/** Remove usage of the `creators` as an aliased full module export which is no longer needed */
|
|
132
|
+
function removeCreatorsUsage(project) {
|
|
133
|
+
const sourceFiles = project.getSourceFiles();
|
|
134
|
+
for (const sourceFile of sourceFiles) {
|
|
135
|
+
if (!sourceFile.getFilePath().includes(process.cwd())) continue;
|
|
136
|
+
const creatorsInvocations = sourceFile.getStatements().filter((statement) => statement.getKind() === SyntaxKind.PropertyAccessExpression).filter((statement) => statement.getText().includes("creators."));
|
|
137
|
+
for (const creatorInvocation of creatorsInvocations) {
|
|
138
|
+
const withoutCreators = creatorInvocation.getText().replace(/creators\./g, "");
|
|
139
|
+
creatorInvocation.replaceWithText(withoutCreators);
|
|
140
|
+
project.saveSync();
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
/** Remove usage of the `utils` import which is no longer exported as a default import */
|
|
145
|
+
function removeUtilsDefaultExportUsage(project) {
|
|
146
|
+
const sourceFiles = project.getSourceFiles();
|
|
147
|
+
for (const sourceFile of sourceFiles) {
|
|
148
|
+
if (!sourceFile.getFilePath().includes(process.cwd())) continue;
|
|
149
|
+
const statement = sourceFile.getImportDeclarations().find((importDeclaration) => importDeclaration.getText().includes("import utils"));
|
|
150
|
+
if (statement) {
|
|
151
|
+
statement.remove();
|
|
152
|
+
project.saveSync();
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
/** Fix missing imports in the project */
|
|
157
|
+
function fixImports(project) {
|
|
158
|
+
const sourceFiles = project.getSourceFiles();
|
|
159
|
+
for (const sourceFile of sourceFiles) {
|
|
160
|
+
if (!sourceFile.getFilePath().includes(process.cwd())) continue;
|
|
161
|
+
sourceFile.fixMissingImports(void 0, {
|
|
162
|
+
importModuleSpecifierPreference: "project-relative",
|
|
163
|
+
autoImportSpecifierExcludeRegexes: ["document-model", "document-drive"],
|
|
164
|
+
importModuleSpecifierEnding: "js",
|
|
165
|
+
preferTypeOnlyAutoImports: false
|
|
166
|
+
});
|
|
167
|
+
sourceFile.fixUnusedIdentifiers();
|
|
168
|
+
project.saveSync();
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
/** Run the generate command on all document models */
|
|
172
|
+
async function runGenerateOnAllDocumentModels() {
|
|
173
|
+
await startGenerate({});
|
|
174
|
+
}
|
|
175
|
+
/** Run the generate command on all editors */
|
|
176
|
+
async function runGenerateOnAllEditors() {
|
|
177
|
+
const editorsPath = path.join(process.cwd(), "editors");
|
|
178
|
+
const dirs = (await readdir(editorsPath, { withFileTypes: true })).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
|
|
179
|
+
for (const dir of dirs) {
|
|
180
|
+
const moduleFilePath = path.join(editorsPath, dir, "module.ts");
|
|
181
|
+
const indexFilePath = path.join(editorsPath, dir, "index.ts");
|
|
182
|
+
const hasModuleFile = existsSync(moduleFilePath);
|
|
183
|
+
const hasIndexFile = existsSync(indexFilePath);
|
|
184
|
+
if (!hasModuleFile && !hasIndexFile) continue;
|
|
185
|
+
const { id, name, documentTypes, isApp } = extractEditorModuleInfo(hasModuleFile ? moduleFilePath : indexFilePath);
|
|
186
|
+
if (!name) throw new Error(`Editor ${dir} is missing name`);
|
|
187
|
+
if (!id) throw new Error(`Editor ${dir} is missing id`);
|
|
188
|
+
if (isApp) {
|
|
189
|
+
const configFilePath = path.join(editorsPath, dir, "config.ts");
|
|
190
|
+
await startGenerate({
|
|
191
|
+
appName: name,
|
|
192
|
+
appId: id,
|
|
193
|
+
appDirName: dir,
|
|
194
|
+
allowedDocumentTypes: existsSync(configFilePath) ? extractAllowedDocumentTypes(configFilePath) : void 0
|
|
195
|
+
});
|
|
196
|
+
} else await startGenerate({
|
|
197
|
+
editorName: name,
|
|
198
|
+
editorId: id,
|
|
199
|
+
editorDirName: dir,
|
|
200
|
+
documentType: documentTypes?.[0]
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
/** Extract the name, id, document types, and whether the editor is a app from the editor module */
|
|
205
|
+
function extractEditorModuleInfo(filePath) {
|
|
206
|
+
const variable = getVariableDeclarationByTypeName(new Project({
|
|
207
|
+
tsConfigFilePath: path.resolve("tsconfig.json"),
|
|
208
|
+
compilerOptions: { verbatimModuleSyntax: false }
|
|
209
|
+
}).getSourceFileOrThrow(filePath), "EditorModule")?.getInitializerIfKind(SyntaxKind.ObjectLiteralExpression);
|
|
210
|
+
const documentTypes = getObjectProperty(variable, "documentTypes", SyntaxKind.ArrayLiteralExpression)?.getElements().map((element) => element.getText()).map((text) => text.replace(/["']/g, ""));
|
|
211
|
+
const configProperty = getObjectProperty(variable, "config", SyntaxKind.ObjectLiteralExpression);
|
|
212
|
+
return {
|
|
213
|
+
id: getStringLiteralValue(getObjectProperty(configProperty, "id", SyntaxKind.StringLiteral)),
|
|
214
|
+
name: getStringLiteralValue(getObjectProperty(configProperty, "name", SyntaxKind.StringLiteral)),
|
|
215
|
+
documentTypes,
|
|
216
|
+
isApp: documentTypes?.includes("powerhouse/document-drive")
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
/** Extract the allowed document types from the app config */
|
|
220
|
+
function extractAllowedDocumentTypes(filePath) {
|
|
221
|
+
const sourceFile = new Project({
|
|
222
|
+
tsConfigFilePath: path.resolve("tsconfig.json"),
|
|
223
|
+
compilerOptions: { verbatimModuleSyntax: false }
|
|
224
|
+
}).getSourceFile(filePath);
|
|
225
|
+
if (!sourceFile) return;
|
|
226
|
+
const configVariable = getVariableDeclarationByTypeName(sourceFile, "PHAppConfig")?.getInitializerIfKind(SyntaxKind.ObjectLiteralExpression);
|
|
227
|
+
if (!configVariable) return;
|
|
228
|
+
return getArrayLiteralExpressionElementsText(getObjectProperty(configVariable, "allowedDocumentTypes", SyntaxKind.ArrayLiteralExpression));
|
|
229
|
+
}
|
|
230
|
+
function getVariableDeclarationByTypeName(sourceFile, typeName) {
|
|
231
|
+
return sourceFile.getVariableDeclarations().find((declaration) => declaration.getType().getText().includes(typeName));
|
|
232
|
+
}
|
|
233
|
+
function getStringLiteralValue(stringLiteral) {
|
|
234
|
+
return stringLiteral?.getText().replace(/["']/g, "");
|
|
235
|
+
}
|
|
236
|
+
function getObjectProperty(object, propertyName, propertyType) {
|
|
237
|
+
return object?.getProperty(propertyName)?.asKind(SyntaxKind.PropertyAssignment)?.getChildren().find((child) => child.getKind() === propertyType)?.asKind(propertyType);
|
|
238
|
+
}
|
|
239
|
+
function getArrayLiteralExpressionElementsText(arrayLiteralExpression) {
|
|
240
|
+
return arrayLiteralExpression?.getElements().map((element) => element.getText()).map((text) => text.replace(/["']/g, ""));
|
|
241
|
+
}
|
|
242
|
+
//#endregion
|
|
243
|
+
export { startMigrate };
|
|
244
|
+
|
|
245
|
+
//# sourceMappingURL=migrate-CMNF8puQ.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrate-CMNF8puQ.mjs","names":[],"sources":["../src/services/migrate.ts"],"sourcesContent":["import {\n connectEntrypointTemplate,\n dockerfileTemplate,\n indexHtmlTemplate,\n indexTsTemplate,\n nginxConfTemplate,\n packageJsonExportsTemplate,\n packageJsonScriptsTemplate,\n switchboardEntrypointTemplate,\n syncAndPublishWorkflowTemplate,\n tsConfigTemplate,\n} from \"@powerhousedao/codegen/templates\";\nimport { existsSync, readdirSync } from \"node:fs\";\nimport { mkdir, readdir, stat, writeFile } from \"node:fs/promises\";\nimport path from \"path\";\nimport { readPackage } from \"read-pkg\";\nimport type {\n ArrayLiteralExpression,\n ObjectLiteralExpression,\n SourceFile,\n StringLiteral,\n} from \"ts-morph\";\nimport { Project, SyntaxKind } from \"ts-morph\";\nimport { writePackage } from \"write-package\";\nimport type { GenerateArgs, MigrateArgs } from \"../types.js\";\nimport { startGenerate } from \"./generate.js\";\n\nexport async function startMigrate(_args: MigrateArgs) {\n await migratePackageJson();\n await migrateTsConfig();\n await migrateIndexHtml();\n await migrateCIFiles();\n await runGenerateOnAllDocumentModels();\n await runGenerateOnAllEditors();\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n deleteLegacyEditorDirIndexFiles(project);\n migrateEditorFiles(project);\n migrateRootIndex(project);\n removeZDotSchemaUsage(project);\n removeCreatorsUsage(project);\n removeUtilsDefaultExportUsage(project);\n fixImports(project);\n}\n\n/** Ensure that the project package.json has the correct scripts and exports. */\nasync function migratePackageJson() {\n const packageJson = await readPackage();\n const existingScripts = packageJson.scripts;\n const existingExports =\n !!packageJson.exports &&\n !Array.isArray(packageJson.exports) &&\n typeof packageJson.exports !== \"string\"\n ? packageJson.exports\n : {};\n const newScripts = {\n ...existingScripts,\n ...packageJsonScriptsTemplate,\n };\n const newExports = {\n ...existingExports,\n ...packageJsonExportsTemplate,\n };\n packageJson.scripts = newScripts;\n packageJson.exports = newExports;\n await writePackage(packageJson);\n}\n\n/** Ensure that the project index.html matches the boilerplate index.html. */\nasync function migrateIndexHtml() {\n const indexHtmlPath = path.join(process.cwd(), \"index.html\");\n await writeFile(indexHtmlPath, indexHtmlTemplate);\n}\n\n/** Ensure that the project tsconfig.json matches the boilerplate tsconfig.json. */\nasync function migrateTsConfig() {\n const tsConfigPath = path.join(process.cwd(), \"tsconfig.json\");\n await writeFile(tsConfigPath, tsConfigTemplate);\n}\n\n/** Check if a file exists */\nasync function fileExists(filePath: string): Promise<boolean> {\n try {\n await stat(filePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/** Write a file with optional warning if it already exists */\nasync function writeFileWithWarning(\n filePath: string,\n content: string,\n): Promise<void> {\n const exists = await fileExists(filePath);\n if (exists) {\n console.warn(`Warning: Overwriting existing file: ${filePath}`);\n }\n await writeFile(filePath, content);\n}\n\n/** Add CI/CD workflow and Docker files to the project. */\nasync function migrateCIFiles() {\n const cwd = process.cwd();\n\n try {\n // Create directories if they don't exist\n await mkdir(path.join(cwd, \".github/workflows\"), { recursive: true });\n await mkdir(path.join(cwd, \"docker\"), { recursive: true });\n\n // Write CI/CD workflow\n await writeFileWithWarning(\n path.join(cwd, \".github/workflows/sync-and-publish.yml\"),\n syncAndPublishWorkflowTemplate,\n );\n\n // Write Docker files\n await writeFileWithWarning(\n path.join(cwd, \"Dockerfile\"),\n dockerfileTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/nginx.conf\"),\n nginxConfTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/connect-entrypoint.sh\"),\n connectEntrypointTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/switchboard-entrypoint.sh\"),\n switchboardEntrypointTemplate,\n );\n } catch (error) {\n console.error(\"Error migrating CI files:\", error);\n throw error;\n }\n}\n\n/** Ensure that the project index.ts file uses the new exports for editors and document models */\nfunction migrateRootIndex(project: Project) {\n const indexPath = path.join(process.cwd(), \"index.ts\");\n let source = project.getSourceFile(indexPath);\n if (!source) {\n source = project.createSourceFile(indexPath);\n }\n source.replaceWithText(indexTsTemplate);\n project.saveSync();\n}\n\n/** Ensure that the project's editor.tsx files use default exports for lazy loading */\nfunction migrateEditorFiles(project: Project) {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = readdirSync(editorsPath, { withFileTypes: true })\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const editorFilePath = path.join(editorsPath, dir, \"editor.tsx\");\n const source = project.getSourceFile(editorFilePath);\n if (!source) continue;\n const text = source.getFullText();\n const replaceNamedExportWithDefaultExport = text.replace(\n \"export function Editor\",\n \"export default function Editor\",\n );\n source.replaceWithText(replaceNamedExportWithDefaultExport);\n project.saveSync();\n }\n}\n\n/** Delete the legacy index files in editor directories which are now replaced by module.ts files */\nfunction deleteLegacyEditorDirIndexFiles(project: Project) {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = readdirSync(editorsPath, { withFileTypes: true })\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const indexFilePath = path.join(editorsPath, dir, \"index.ts\");\n const source = project.getSourceFile(indexFilePath);\n if (!source) continue;\n source.delete();\n project.saveSync();\n }\n}\n\n/** Remove usage of the `z` re-export of document model schemas which caused naming conflicts */\nfunction removeZDotSchemaUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n if (path.includes(\"zod.ts\")) continue;\n const text = sourceFile.getFullText();\n if (/import\\s+(?:\\{\\s*z\\s*\\}|z)\\s+from\\s+['\"]zod['\"]/.test(text)) continue;\n const withoutZDot = text.replace(/z\\./g, \"\");\n sourceFile.replaceWithText(withoutZDot);\n project.saveSync();\n }\n}\n\n/** Remove usage of the `creators` as an aliased full module export which is no longer needed */\nfunction removeCreatorsUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n const creatorsInvocations = sourceFile\n .getStatements()\n .filter(\n (statement) =>\n statement.getKind() === SyntaxKind.PropertyAccessExpression,\n )\n .filter((statement) => statement.getText().includes(\"creators.\"));\n for (const creatorInvocation of creatorsInvocations) {\n const withoutCreators = creatorInvocation\n .getText()\n .replace(/creators\\./g, \"\");\n creatorInvocation.replaceWithText(withoutCreators);\n project.saveSync();\n }\n }\n}\n\n/** Remove usage of the `utils` import which is no longer exported as a default import */\nfunction removeUtilsDefaultExportUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n const statement = sourceFile\n .getImportDeclarations()\n .find((importDeclaration) =>\n importDeclaration.getText().includes(\"import utils\"),\n );\n if (statement) {\n statement.remove();\n project.saveSync();\n }\n }\n}\n\n/** Fix missing imports in the project */\nfunction fixImports(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n sourceFile.fixMissingImports(undefined, {\n importModuleSpecifierPreference: \"project-relative\",\n autoImportSpecifierExcludeRegexes: [\"document-model\", \"document-drive\"],\n importModuleSpecifierEnding: \"js\",\n preferTypeOnlyAutoImports: false,\n });\n sourceFile.fixUnusedIdentifiers();\n\n project.saveSync();\n }\n}\n\n/** Run the generate command on all document models */\nasync function runGenerateOnAllDocumentModels() {\n await startGenerate({} as GenerateArgs);\n}\n\n/** Run the generate command on all editors */\nasync function runGenerateOnAllEditors() {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = (await readdir(editorsPath, { withFileTypes: true }))\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const moduleFilePath = path.join(editorsPath, dir, \"module.ts\");\n const indexFilePath = path.join(editorsPath, dir, \"index.ts\");\n const hasModuleFile = existsSync(moduleFilePath);\n const hasIndexFile = existsSync(indexFilePath);\n if (!hasModuleFile && !hasIndexFile) {\n continue;\n }\n const filePathToUse = hasModuleFile ? moduleFilePath : indexFilePath;\n const { id, name, documentTypes, isApp } =\n extractEditorModuleInfo(filePathToUse);\n\n if (!name) {\n throw new Error(`Editor ${dir} is missing name`);\n }\n if (!id) {\n throw new Error(`Editor ${dir} is missing id`);\n }\n if (isApp) {\n const configFilePath = path.join(editorsPath, dir, \"config.ts\");\n const hasConfigFile = existsSync(configFilePath);\n const allowedDocumentTypes = hasConfigFile\n ? extractAllowedDocumentTypes(configFilePath)\n : undefined;\n const args = {\n appName: name,\n appId: id,\n appDirName: dir,\n allowedDocumentTypes,\n } as GenerateArgs;\n await startGenerate(args);\n } else {\n const args = {\n editorName: name,\n editorId: id,\n editorDirName: dir,\n documentType: documentTypes?.[0],\n } as GenerateArgs;\n await startGenerate(args);\n }\n }\n}\n\n/** Extract the name, id, document types, and whether the editor is a app from the editor module */\nfunction extractEditorModuleInfo(filePath: string) {\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n const sourceFile = project.getSourceFileOrThrow(filePath);\n const moduleDeclaration = getVariableDeclarationByTypeName(\n sourceFile,\n \"EditorModule\",\n );\n\n const variable = moduleDeclaration?.getInitializerIfKind(\n SyntaxKind.ObjectLiteralExpression,\n );\n const documentTypes = getObjectProperty(\n variable,\n \"documentTypes\",\n SyntaxKind.ArrayLiteralExpression,\n )\n ?.getElements()\n .map((element) => element.getText())\n .map((text) => text.replace(/[\"']/g, \"\"));\n\n const configProperty = getObjectProperty(\n variable,\n \"config\",\n SyntaxKind.ObjectLiteralExpression,\n );\n\n const id = getStringLiteralValue(\n getObjectProperty(configProperty, \"id\", SyntaxKind.StringLiteral),\n );\n\n const name = getStringLiteralValue(\n getObjectProperty(configProperty, \"name\", SyntaxKind.StringLiteral),\n );\n const isApp = documentTypes?.includes(\"powerhouse/document-drive\");\n return { id, name, documentTypes, isApp };\n}\n\n/** Extract the allowed document types from the app config */\nfunction extractAllowedDocumentTypes(filePath: string) {\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n const sourceFile = project.getSourceFile(filePath);\n if (!sourceFile) return;\n const configVariableDeclaration = getVariableDeclarationByTypeName(\n sourceFile,\n \"PHAppConfig\",\n );\n const configVariable = configVariableDeclaration?.getInitializerIfKind(\n SyntaxKind.ObjectLiteralExpression,\n );\n if (!configVariable) return;\n const allowedDocumentTypes = getArrayLiteralExpressionElementsText(\n getObjectProperty(\n configVariable,\n \"allowedDocumentTypes\",\n SyntaxKind.ArrayLiteralExpression,\n ),\n );\n return allowedDocumentTypes;\n}\n\nfunction getVariableDeclarationByTypeName(\n sourceFile: SourceFile,\n typeName: string,\n) {\n const variableDeclarations = sourceFile.getVariableDeclarations();\n return variableDeclarations.find((declaration) =>\n declaration.getType().getText().includes(typeName),\n );\n}\n\nfunction getStringLiteralValue(stringLiteral: StringLiteral | undefined) {\n return stringLiteral?.getText().replace(/[\"']/g, \"\");\n}\n\nfunction getObjectProperty<T extends SyntaxKind>(\n object: ObjectLiteralExpression | undefined,\n propertyName: string,\n propertyType: T,\n) {\n return object\n ?.getProperty(propertyName)\n ?.asKind(SyntaxKind.PropertyAssignment)\n ?.getChildren()\n .find((child) => child.getKind() === propertyType)\n ?.asKind(propertyType);\n}\n\nfunction getArrayLiteralExpressionElementsText(\n arrayLiteralExpression: ArrayLiteralExpression | undefined,\n) {\n return arrayLiteralExpression\n ?.getElements()\n .map((element) => element.getText())\n .map((text) => text.replace(/[\"']/g, \"\"));\n}\n"],"mappings":";;;;;;;;;AA2BA,eAAsB,aAAa,OAAoB;AACrD,OAAM,oBAAoB;AAC1B,OAAM,iBAAiB;AACvB,OAAM,kBAAkB;AACxB,OAAM,gBAAgB;AACtB,OAAM,gCAAgC;AACtC,OAAM,yBAAyB;CAC/B,MAAM,UAAU,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC;AACF,iCAAgC,QAAQ;AACxC,oBAAmB,QAAQ;AAC3B,kBAAiB,QAAQ;AACzB,uBAAsB,QAAQ;AAC9B,qBAAoB,QAAQ;AAC5B,+BAA8B,QAAQ;AACtC,YAAW,QAAQ;;;AAIrB,eAAe,qBAAqB;CAClC,MAAM,cAAc,MAAM,aAAa;CACvC,MAAM,kBAAkB,YAAY;CACpC,MAAM,kBACJ,CAAC,CAAC,YAAY,WACd,CAAC,MAAM,QAAQ,YAAY,QAAQ,IACnC,OAAO,YAAY,YAAY,WAC3B,YAAY,UACZ,EAAE;CACR,MAAM,aAAa;EACjB,GAAG;EACH,GAAG;EACJ;CACD,MAAM,aAAa;EACjB,GAAG;EACH,GAAG;EACJ;AACD,aAAY,UAAU;AACtB,aAAY,UAAU;AACtB,OAAM,aAAa,YAAY;;;AAIjC,eAAe,mBAAmB;AAEhC,OAAM,UADgB,KAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,EAC7B,kBAAkB;;;AAInD,eAAe,kBAAkB;AAE/B,OAAM,UADe,KAAK,KAAK,QAAQ,KAAK,EAAE,gBAAgB,EAChC,iBAAiB;;;AAIjD,eAAe,WAAW,UAAoC;AAC5D,KAAI;AACF,QAAM,KAAK,SAAS;AACpB,SAAO;SACD;AACN,SAAO;;;;AAKX,eAAe,qBACb,UACA,SACe;AAEf,KADe,MAAM,WAAW,SAAS,CAEvC,SAAQ,KAAK,uCAAuC,WAAW;AAEjE,OAAM,UAAU,UAAU,QAAQ;;;AAIpC,eAAe,iBAAiB;CAC9B,MAAM,MAAM,QAAQ,KAAK;AAEzB,KAAI;AAEF,QAAM,MAAM,KAAK,KAAK,KAAK,oBAAoB,EAAE,EAAE,WAAW,MAAM,CAAC;AACrE,QAAM,MAAM,KAAK,KAAK,KAAK,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC;AAG1D,QAAM,qBACJ,KAAK,KAAK,KAAK,yCAAyC,EACxD,+BACD;AAGD,QAAM,qBACJ,KAAK,KAAK,KAAK,aAAa,EAC5B,mBACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,oBAAoB,EACnC,kBACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,+BAA+B,EAC9C,0BACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,mCAAmC,EAClD,8BACD;UACM,OAAO;AACd,UAAQ,MAAM,6BAA6B,MAAM;AACjD,QAAM;;;;AAKV,SAAS,iBAAiB,SAAkB;CAC1C,MAAM,YAAY,KAAK,KAAK,QAAQ,KAAK,EAAE,WAAW;CACtD,IAAI,SAAS,QAAQ,cAAc,UAAU;AAC7C,KAAI,CAAC,OACH,UAAS,QAAQ,iBAAiB,UAAU;AAE9C,QAAO,gBAAgB,gBAAgB;AACvC,SAAQ,UAAU;;;AAIpB,SAAS,mBAAmB,SAAkB;CAC5C,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,OAAO,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC3D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,aAAa;EAChE,MAAM,SAAS,QAAQ,cAAc,eAAe;AACpD,MAAI,CAAC,OAAQ;EAEb,MAAM,sCADO,OAAO,aAAa,CACgB,QAC/C,0BACA,iCACD;AACD,SAAO,gBAAgB,oCAAoC;AAC3D,UAAQ,UAAU;;;;AAKtB,SAAS,gCAAgC,SAAkB;CACzD,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,OAAO,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC3D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,gBAAgB,KAAK,KAAK,aAAa,KAAK,WAAW;EAC7D,MAAM,SAAS,QAAQ,cAAc,cAAc;AACnD,MAAI,CAAC,OAAQ;AACb,SAAO,QAAQ;AACf,UAAQ,UAAU;;;;AAKtB,SAAS,sBAAsB,SAAkB;CAC/C,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;EACpC,MAAM,OAAO,WAAW,aAAa;AACrC,MAAI,CAAC,KAAK,SAAS,QAAQ,KAAK,CAAC,CAAE;AACnC,MAAI,KAAK,SAAS,SAAS,CAAE;EAC7B,MAAM,OAAO,WAAW,aAAa;AACrC,MAAI,kDAAkD,KAAK,KAAK,CAAE;EAClE,MAAM,cAAc,KAAK,QAAQ,QAAQ,GAAG;AAC5C,aAAW,gBAAgB,YAAY;AACvC,UAAQ,UAAU;;;;AAKtB,SAAS,oBAAoB,SAAkB;CAC7C,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;EACnC,MAAM,sBAAsB,WACzB,eAAe,CACf,QACE,cACC,UAAU,SAAS,KAAK,WAAW,yBACtC,CACA,QAAQ,cAAc,UAAU,SAAS,CAAC,SAAS,YAAY,CAAC;AACnE,OAAK,MAAM,qBAAqB,qBAAqB;GACnD,MAAM,kBAAkB,kBACrB,SAAS,CACT,QAAQ,eAAe,GAAG;AAC7B,qBAAkB,gBAAgB,gBAAgB;AAClD,WAAQ,UAAU;;;;;AAMxB,SAAS,8BAA8B,SAAkB;CACvD,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;EACnC,MAAM,YAAY,WACf,uBAAuB,CACvB,MAAM,sBACL,kBAAkB,SAAS,CAAC,SAAS,eAAe,CACrD;AACH,MAAI,WAAW;AACb,aAAU,QAAQ;AAClB,WAAQ,UAAU;;;;;AAMxB,SAAS,WAAW,SAAkB;CACpC,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;AACnC,aAAW,kBAAkB,KAAA,GAAW;GACtC,iCAAiC;GACjC,mCAAmC,CAAC,kBAAkB,iBAAiB;GACvE,6BAA6B;GAC7B,2BAA2B;GAC5B,CAAC;AACF,aAAW,sBAAsB;AAEjC,UAAQ,UAAU;;;;AAKtB,eAAe,iCAAiC;AAC9C,OAAM,cAAc,EAAE,CAAiB;;;AAIzC,eAAe,0BAA0B;CACvC,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,QAAQ,MAAM,QAAQ,aAAa,EAAE,eAAe,MAAM,CAAC,EAC9D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,YAAY;EAC/D,MAAM,gBAAgB,KAAK,KAAK,aAAa,KAAK,WAAW;EAC7D,MAAM,gBAAgB,WAAW,eAAe;EAChD,MAAM,eAAe,WAAW,cAAc;AAC9C,MAAI,CAAC,iBAAiB,CAAC,aACrB;EAGF,MAAM,EAAE,IAAI,MAAM,eAAe,UAC/B,wBAFoB,gBAAgB,iBAAiB,cAEf;AAExC,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,UAAU,IAAI,kBAAkB;AAElD,MAAI,CAAC,GACH,OAAM,IAAI,MAAM,UAAU,IAAI,gBAAgB;AAEhD,MAAI,OAAO;GACT,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,YAAY;AAW/D,SAAM,cANO;IACX,SAAS;IACT,OAAO;IACP,YAAY;IACZ,sBARoB,WAAW,eAAe,GAE5C,4BAA4B,eAAe,GAC3C,KAAA;IAMH,CACwB;QAQzB,OAAM,cANO;GACX,YAAY;GACZ,UAAU;GACV,eAAe;GACf,cAAc,gBAAgB;GAC/B,CACwB;;;;AAM/B,SAAS,wBAAwB,UAAkB;CAajD,MAAM,WALoB,iCAPV,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC,CACyB,qBAAqB,SAAS,EAGvD,eACD,EAEmC,qBAClC,WAAW,wBACZ;CACD,MAAM,gBAAgB,kBACpB,UACA,iBACA,WAAW,uBACZ,EACG,aAAa,CACd,KAAK,YAAY,QAAQ,SAAS,CAAC,CACnC,KAAK,SAAS,KAAK,QAAQ,SAAS,GAAG,CAAC;CAE3C,MAAM,iBAAiB,kBACrB,UACA,UACA,WAAW,wBACZ;AAUD,QAAO;EAAE,IARE,sBACT,kBAAkB,gBAAgB,MAAM,WAAW,cAAc,CAClE;EAMY,MAJA,sBACX,kBAAkB,gBAAgB,QAAQ,WAAW,cAAc,CACpE;EAEkB;EAAe,OADpB,eAAe,SAAS,4BAA4B;EACzB;;;AAI3C,SAAS,4BAA4B,UAAkB;CAOrD,MAAM,aANU,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC,CACyB,cAAc,SAAS;AAClD,KAAI,CAAC,WAAY;CAKjB,MAAM,iBAJ4B,iCAChC,YACA,cACD,EACiD,qBAChD,WAAW,wBACZ;AACD,KAAI,CAAC,eAAgB;AAQrB,QAP6B,sCAC3B,kBACE,gBACA,wBACA,WAAW,uBACZ,CACF;;AAIH,SAAS,iCACP,YACA,UACA;AAEA,QAD6B,WAAW,yBAAyB,CACrC,MAAM,gBAChC,YAAY,SAAS,CAAC,SAAS,CAAC,SAAS,SAAS,CACnD;;AAGH,SAAS,sBAAsB,eAA0C;AACvE,QAAO,eAAe,SAAS,CAAC,QAAQ,SAAS,GAAG;;AAGtD,SAAS,kBACP,QACA,cACA,cACA;AACA,QAAO,QACH,YAAY,aAAa,EACzB,OAAO,WAAW,mBAAmB,EACrC,aAAa,CACd,MAAM,UAAU,MAAM,SAAS,KAAK,aAAa,EAChD,OAAO,aAAa;;AAG1B,SAAS,sCACP,wBACA;AACA,QAAO,wBACH,aAAa,CACd,KAAK,YAAY,QAAQ,SAAS,CAAC,CACnC,KAAK,SAAS,KAAK,QAAQ,SAAS,GAAG,CAAC"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { writeCliDocsMarkdownFile } from "@powerhousedao/codegen/file-builders";
|
|
2
|
+
import { accessToken } from "../src/commands/access-token.js";
|
|
3
|
+
import { build, connect, preview, studio } from "../src/commands/connect.js";
|
|
4
|
+
import { generate } from "../src/commands/generate.js";
|
|
5
|
+
import { inspect } from "../src/commands/inspect.js";
|
|
6
|
+
import { install } from "../src/commands/install.js";
|
|
7
|
+
import { list } from "../src/commands/list.js";
|
|
8
|
+
import { login } from "../src/commands/login.js";
|
|
9
|
+
import { migrate } from "../src/commands/migrate.js";
|
|
10
|
+
import { phCli } from "../src/commands/ph-cli.js";
|
|
11
|
+
import { switchboard } from "../src/commands/switchboard.js";
|
|
12
|
+
import { uninstall } from "../src/commands/uninstall.js";
|
|
13
|
+
import { vetra } from "../src/commands/vetra.js";
|
|
14
|
+
|
|
15
|
+
const commands = [
|
|
16
|
+
{ name: "generate", command: generate },
|
|
17
|
+
{ name: "vetra", command: vetra },
|
|
18
|
+
{ name: "connect", command: connect },
|
|
19
|
+
{ name: "connect studio", command: studio },
|
|
20
|
+
{ name: "connect build", command: build },
|
|
21
|
+
{ name: "connect preview", command: preview },
|
|
22
|
+
{ name: "access token", command: accessToken },
|
|
23
|
+
{ name: "inspect", command: inspect },
|
|
24
|
+
{ name: "list", command: list },
|
|
25
|
+
{ name: "migrate", command: migrate },
|
|
26
|
+
{ name: "switchboard", command: switchboard },
|
|
27
|
+
{ name: "login", command: login },
|
|
28
|
+
{ name: "install", command: install },
|
|
29
|
+
{ name: "uninstall", command: uninstall },
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
const cliDescription = phCli.description ?? "";
|
|
33
|
+
|
|
34
|
+
async function main() {
|
|
35
|
+
await writeCliDocsMarkdownFile({
|
|
36
|
+
filePath: "COMMANDS.md",
|
|
37
|
+
docsTitle: `Powerhouse CLI Commands (${process.env.WORKSPACE_VERSION || process.env.npm_package_version})`,
|
|
38
|
+
docsIntroduction:
|
|
39
|
+
"This document provides detailed information about the available commands in the Powerhouse CLI.",
|
|
40
|
+
cliDescription,
|
|
41
|
+
entries: commands,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
await main();
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { startSwitchboard } from "@powerhousedao/switchboard/server";
|
|
3
|
+
//#region src/services/switchboard.ts
|
|
4
|
+
const defaultSwitchboardOptions = {
|
|
5
|
+
port: 4001,
|
|
6
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
7
|
+
drive: {
|
|
8
|
+
id: "powerhouse",
|
|
9
|
+
slug: "powerhouse",
|
|
10
|
+
global: {
|
|
11
|
+
name: "Powerhouse",
|
|
12
|
+
icon: "https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7"
|
|
13
|
+
},
|
|
14
|
+
local: {
|
|
15
|
+
availableOffline: true,
|
|
16
|
+
listeners: [],
|
|
17
|
+
sharingType: "public",
|
|
18
|
+
triggers: []
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
mcp: true
|
|
22
|
+
};
|
|
23
|
+
function getDefaultVetraSwitchboardOptions(vetraDriveId) {
|
|
24
|
+
return {
|
|
25
|
+
port: 4001,
|
|
26
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
27
|
+
drive: {
|
|
28
|
+
id: vetraDriveId,
|
|
29
|
+
slug: vetraDriveId,
|
|
30
|
+
global: {
|
|
31
|
+
name: "Vetra",
|
|
32
|
+
icon: "https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u"
|
|
33
|
+
},
|
|
34
|
+
preferredEditor: "vetra-drive-app",
|
|
35
|
+
local: {
|
|
36
|
+
availableOffline: true,
|
|
37
|
+
listeners: [],
|
|
38
|
+
sharingType: "public",
|
|
39
|
+
triggers: []
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
async function startSwitchboard$1(options, logger) {
|
|
45
|
+
const { packages: packagesString, remoteDrives, useVetraDrive, vetraDriveId, useIdentity, keypairPath, requireIdentity, ...serverOptions } = options;
|
|
46
|
+
const defaultOptions = useVetraDrive ? getDefaultVetraSwitchboardOptions(vetraDriveId) : defaultSwitchboardOptions;
|
|
47
|
+
const identity = useIdentity || keypairPath || requireIdentity ? {
|
|
48
|
+
keypairPath,
|
|
49
|
+
requireExisting: requireIdentity
|
|
50
|
+
} : void 0;
|
|
51
|
+
const packages = packagesString?.split(",");
|
|
52
|
+
return await startSwitchboard(remoteDrives.length > 0 ? {
|
|
53
|
+
...defaultOptions,
|
|
54
|
+
drive: void 0,
|
|
55
|
+
...serverOptions,
|
|
56
|
+
remoteDrives,
|
|
57
|
+
identity,
|
|
58
|
+
packages,
|
|
59
|
+
logger
|
|
60
|
+
} : {
|
|
61
|
+
...defaultOptions,
|
|
62
|
+
...serverOptions,
|
|
63
|
+
remoteDrives,
|
|
64
|
+
identity,
|
|
65
|
+
packages,
|
|
66
|
+
logger
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
//#endregion
|
|
70
|
+
export { startSwitchboard$1 as n, defaultSwitchboardOptions as t };
|
|
71
|
+
|
|
72
|
+
//# sourceMappingURL=switchboard-DBqOSS0c.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-DBqOSS0c.mjs","names":["startSwitchboard","startSwitchboardServer"],"sources":["../src/services/switchboard.ts"],"sourcesContent":["import type {\n IdentityOptions,\n StartServerOptions,\n} from \"@powerhousedao/switchboard/server\";\nimport { startSwitchboard as startSwitchboardServer } from \"@powerhousedao/switchboard/server\";\nimport type { ILogger } from \"document-model\";\nimport path from \"node:path\";\nimport type { SwitchboardArgs } from \"../types.js\";\n\nexport const defaultSwitchboardOptions = {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: \"powerhouse\",\n slug: \"powerhouse\",\n global: {\n name: \"Powerhouse\",\n icon: \"https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7\",\n },\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n mcp: true,\n} satisfies StartServerOptions;\n\nfunction getDefaultVetraSwitchboardOptions(\n vetraDriveId: string,\n): Partial<StartServerOptions> {\n return {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: vetraDriveId,\n slug: vetraDriveId,\n global: {\n name: \"Vetra\",\n icon: \"https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u\",\n },\n preferredEditor: \"vetra-drive-app\",\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n };\n}\n\nexport async function startSwitchboard(\n options: SwitchboardArgs,\n logger?: ILogger,\n) {\n const {\n packages: packagesString,\n remoteDrives,\n useVetraDrive,\n vetraDriveId,\n useIdentity,\n keypairPath,\n requireIdentity,\n ...serverOptions\n } = options;\n\n // Choose the appropriate default configuration\n const defaultOptions = useVetraDrive\n ? getDefaultVetraSwitchboardOptions(vetraDriveId)\n : defaultSwitchboardOptions;\n\n // Build identity options if enabled\n const identity: IdentityOptions | undefined =\n useIdentity || keypairPath || requireIdentity\n ? {\n keypairPath,\n requireExisting: requireIdentity,\n }\n : undefined;\n\n const packages = packagesString?.split(\",\");\n\n // Only include the default drive if no remote drives are provided\n const finalOptions =\n remoteDrives.length > 0\n ? {\n ...defaultOptions,\n drive: undefined, // Don't create default drive when syncing with remote\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n }\n : {\n ...defaultOptions,\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n };\n\n const reactor = await startSwitchboardServer(finalOptions);\n\n return reactor;\n}\n"],"mappings":";;;AASA,MAAa,4BAA4B;CACvC,MAAM;CACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;CACrD,OAAO;EACL,IAAI;EACJ,MAAM;EACN,QAAQ;GACN,MAAM;GACN,MAAM;GACP;EACD,OAAO;GACL,kBAAkB;GAClB,WAAW,EAAE;GACb,aAAa;GACb,UAAU,EAAE;GACb;EACF;CACD,KAAK;CACN;AAED,SAAS,kCACP,cAC6B;AAC7B,QAAO;EACL,MAAM;EACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;EACrD,OAAO;GACL,IAAI;GACJ,MAAM;GACN,QAAQ;IACN,MAAM;IACN,MAAM;IACP;GACD,iBAAiB;GACjB,OAAO;IACL,kBAAkB;IAClB,WAAW,EAAE;IACb,aAAa;IACb,UAAU,EAAE;IACb;GACF;EACF;;AAGH,eAAsBA,mBACpB,SACA,QACA;CACA,MAAM,EACJ,UAAU,gBACV,cACA,eACA,cACA,aACA,aACA,iBACA,GAAG,kBACD;CAGJ,MAAM,iBAAiB,gBACnB,kCAAkC,aAAa,GAC/C;CAGJ,MAAM,WACJ,eAAe,eAAe,kBAC1B;EACE;EACA,iBAAiB;EAClB,GACD,KAAA;CAEN,MAAM,WAAW,gBAAgB,MAAM,IAAI;AAyB3C,QAFgB,MAAMC,iBAnBpB,aAAa,SAAS,IAClB;EACE,GAAG;EACH,OAAO,KAAA;EACP,GAAG;EACH;EACA;EACA;EACA;EACD,GACD;EACE,GAAG;EACH,GAAG;EACH;EACA;EACA;EACA;EACD,CAEmD"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Kysely, PostgresDialect } from "kysely";
|
|
2
|
+
import { Pool } from "pg";
|
|
3
|
+
import { REACTOR_SCHEMA, getMigrationStatus, runMigrations } from "@powerhousedao/reactor";
|
|
4
|
+
//#region src/services/switchboard-migrate.ts
|
|
5
|
+
function isPostgresUrl(url) {
|
|
6
|
+
return url.startsWith("postgresql://") || url.startsWith("postgres://");
|
|
7
|
+
}
|
|
8
|
+
async function runSwitchboardMigrations(options) {
|
|
9
|
+
const dbPath = options.dbPath ?? process.env.PH_REACTOR_DATABASE_URL ?? process.env.DATABASE_URL;
|
|
10
|
+
if (!dbPath || !isPostgresUrl(dbPath)) {
|
|
11
|
+
console.log("No PostgreSQL URL configured. Skipping migrations.");
|
|
12
|
+
console.log("(PGlite migrations are handled automatically on startup)");
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
console.log(`Database: ${dbPath}`);
|
|
16
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: dbPath }) }) });
|
|
17
|
+
try {
|
|
18
|
+
if (options.statusOnly) {
|
|
19
|
+
console.log("\nChecking migration status...");
|
|
20
|
+
const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);
|
|
21
|
+
console.log("\nMigration Status:");
|
|
22
|
+
console.log("=================");
|
|
23
|
+
for (const migration of migrations) {
|
|
24
|
+
const status = migration.executedAt ? `[OK] Executed at ${migration.executedAt.toISOString()}` : "[--] Pending";
|
|
25
|
+
console.log(`${status} - ${migration.name}`);
|
|
26
|
+
}
|
|
27
|
+
} else {
|
|
28
|
+
console.log("\nRunning migrations...");
|
|
29
|
+
const result = await runMigrations(db, REACTOR_SCHEMA);
|
|
30
|
+
if (!result.success) {
|
|
31
|
+
console.error("Migration failed:", result.error?.message);
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
if (result.migrationsExecuted.length === 0) console.log("No migrations to run - database is up to date");
|
|
35
|
+
else {
|
|
36
|
+
console.log(`Successfully executed ${result.migrationsExecuted.length} migration(s):`);
|
|
37
|
+
for (const name of result.migrationsExecuted) console.log(` - ${name}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
} catch (error) {
|
|
41
|
+
console.error("Error:", error instanceof Error ? error.message : String(error));
|
|
42
|
+
process.exit(1);
|
|
43
|
+
} finally {
|
|
44
|
+
await db.destroy();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
//#endregion
|
|
48
|
+
export { runSwitchboardMigrations };
|
|
49
|
+
|
|
50
|
+
//# sourceMappingURL=switchboard-migrate-1lOCPmX0.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-migrate-1lOCPmX0.mjs","names":[],"sources":["../src/services/switchboard-migrate.ts"],"sourcesContent":["import { Kysely, PostgresDialect } from \"kysely\";\nimport { Pool } from \"pg\";\nimport {\n runMigrations,\n getMigrationStatus,\n REACTOR_SCHEMA,\n} from \"@powerhousedao/reactor\";\n\ninterface MigrationOptions {\n dbPath?: string;\n statusOnly?: boolean;\n}\n\nfunction isPostgresUrl(url: string): boolean {\n return url.startsWith(\"postgresql://\") || url.startsWith(\"postgres://\");\n}\n\nexport async function runSwitchboardMigrations(\n options: MigrationOptions,\n): Promise<void> {\n const dbPath =\n options.dbPath ??\n process.env.PH_REACTOR_DATABASE_URL ??\n process.env.DATABASE_URL;\n\n if (!dbPath || !isPostgresUrl(dbPath)) {\n console.log(\"No PostgreSQL URL configured. Skipping migrations.\");\n console.log(\"(PGlite migrations are handled automatically on startup)\");\n return;\n }\n\n console.log(`Database: ${dbPath}`);\n\n const pool = new Pool({ connectionString: dbPath });\n\n const db = new Kysely<any>({\n dialect: new PostgresDialect({ pool }),\n });\n\n try {\n if (options.statusOnly) {\n console.log(\"\\nChecking migration status...\");\n const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);\n\n console.log(\"\\nMigration Status:\");\n console.log(\"=================\");\n\n for (const migration of migrations) {\n const status = migration.executedAt\n ? `[OK] Executed at ${migration.executedAt.toISOString()}`\n : \"[--] Pending\";\n console.log(`${status} - ${migration.name}`);\n }\n } else {\n console.log(\"\\nRunning migrations...\");\n const result = await runMigrations(db, REACTOR_SCHEMA);\n\n if (!result.success) {\n console.error(\"Migration failed:\", result.error?.message);\n process.exit(1);\n }\n\n if (result.migrationsExecuted.length === 0) {\n console.log(\"No migrations to run - database is up to date\");\n } else {\n console.log(\n `Successfully executed ${result.migrationsExecuted.length} migration(s):`,\n );\n for (const name of result.migrationsExecuted) {\n console.log(` - ${name}`);\n }\n }\n }\n } catch (error) {\n console.error(\n \"Error:\",\n error instanceof Error ? error.message : String(error),\n );\n process.exit(1);\n } finally {\n await db.destroy();\n }\n}\n"],"mappings":";;;;AAaA,SAAS,cAAc,KAAsB;AAC3C,QAAO,IAAI,WAAW,gBAAgB,IAAI,IAAI,WAAW,cAAc;;AAGzE,eAAsB,yBACpB,SACe;CACf,MAAM,SACJ,QAAQ,UACR,QAAQ,IAAI,2BACZ,QAAQ,IAAI;AAEd,KAAI,CAAC,UAAU,CAAC,cAAc,OAAO,EAAE;AACrC,UAAQ,IAAI,qDAAqD;AACjE,UAAQ,IAAI,2DAA2D;AACvE;;AAGF,SAAQ,IAAI,aAAa,SAAS;CAIlC,MAAM,KAAK,IAAI,OAAY,EACzB,SAAS,IAAI,gBAAgB,EAAE,MAHpB,IAAI,KAAK,EAAE,kBAAkB,QAAQ,CAAC,EAGZ,CAAC,EACvC,CAAC;AAEF,KAAI;AACF,MAAI,QAAQ,YAAY;AACtB,WAAQ,IAAI,iCAAiC;GAC7C,MAAM,aAAa,MAAM,mBAAmB,IAAI,eAAe;AAE/D,WAAQ,IAAI,sBAAsB;AAClC,WAAQ,IAAI,oBAAoB;AAEhC,QAAK,MAAM,aAAa,YAAY;IAClC,MAAM,SAAS,UAAU,aACrB,oBAAoB,UAAU,WAAW,aAAa,KACtD;AACJ,YAAQ,IAAI,GAAG,OAAO,KAAK,UAAU,OAAO;;SAEzC;AACL,WAAQ,IAAI,0BAA0B;GACtC,MAAM,SAAS,MAAM,cAAc,IAAI,eAAe;AAEtD,OAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,qBAAqB,OAAO,OAAO,QAAQ;AACzD,YAAQ,KAAK,EAAE;;AAGjB,OAAI,OAAO,mBAAmB,WAAW,EACvC,SAAQ,IAAI,gDAAgD;QACvD;AACL,YAAQ,IACN,yBAAyB,OAAO,mBAAmB,OAAO,gBAC3D;AACD,SAAK,MAAM,QAAQ,OAAO,mBACxB,SAAQ,IAAI,OAAO,OAAO;;;UAIzB,OAAO;AACd,UAAQ,MACN,UACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;AACD,UAAQ,KAAK,EAAE;WACP;AACR,QAAM,GAAG,SAAS"}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import path, { dirname } from "node:path";
|
|
2
|
+
import crypto from "node:crypto";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import { homedir } from "node:os";
|
|
5
|
+
//#region src/utils.ts
|
|
6
|
+
const POWERHOUSE_CONFIG_FILE = "powerhouse.config.json";
|
|
7
|
+
const POWERHOUSE_GLOBAL_DIR = path.join(homedir(), ".ph");
|
|
8
|
+
const packageManagers = {
|
|
9
|
+
bun: {
|
|
10
|
+
globalPathRegexp: /[\\/].bun[\\/]/,
|
|
11
|
+
installCommand: "bun add {{dependency}}",
|
|
12
|
+
uninstallCommand: "bun remove {{dependency}}",
|
|
13
|
+
workspaceOption: "",
|
|
14
|
+
lockfile: "bun.lock",
|
|
15
|
+
updateCommand: "bun update {{dependency}}",
|
|
16
|
+
buildAffected: "bun run build:affected"
|
|
17
|
+
},
|
|
18
|
+
pnpm: {
|
|
19
|
+
globalPathRegexp: /[\\/]pnpm[\\/]/,
|
|
20
|
+
installCommand: "pnpm add {{dependency}}",
|
|
21
|
+
uninstallCommand: "pnpm remove {{dependency}}",
|
|
22
|
+
workspaceOption: "--workspace-root",
|
|
23
|
+
lockfile: "pnpm-lock.yaml",
|
|
24
|
+
updateCommand: "pnpm update {{dependency}}",
|
|
25
|
+
buildAffected: "pnpm run build:affected"
|
|
26
|
+
},
|
|
27
|
+
yarn: {
|
|
28
|
+
globalPathRegexp: /[\\/]yarn[\\/]/,
|
|
29
|
+
installCommand: "yarn add {{dependency}}",
|
|
30
|
+
uninstallCommand: "yarn remove {{dependency}}",
|
|
31
|
+
workspaceOption: "-W",
|
|
32
|
+
lockfile: "yarn.lock",
|
|
33
|
+
updateCommand: "yarn upgrade {{dependency}}",
|
|
34
|
+
buildAffected: "yarn run build:affected"
|
|
35
|
+
},
|
|
36
|
+
npm: {
|
|
37
|
+
installCommand: "npm install {{dependency}}",
|
|
38
|
+
uninstallCommand: "npm uninstall {{dependency}}",
|
|
39
|
+
workspaceOption: "",
|
|
40
|
+
lockfile: "package-lock.json",
|
|
41
|
+
updateCommand: "npm update {{dependency}} --save",
|
|
42
|
+
buildAffected: "npm run build:affected"
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
function defaultPathValidation() {
|
|
46
|
+
return true;
|
|
47
|
+
}
|
|
48
|
+
function isPowerhouseProject(dir) {
|
|
49
|
+
const powerhouseConfigPath = path.join(dir, POWERHOUSE_CONFIG_FILE);
|
|
50
|
+
return fs.existsSync(powerhouseConfigPath);
|
|
51
|
+
}
|
|
52
|
+
function findNodeProjectRoot(dir, pathValidation = defaultPathValidation) {
|
|
53
|
+
const packageJsonPath = path.join(dir, "package.json");
|
|
54
|
+
if (fs.existsSync(packageJsonPath) && pathValidation(dir)) return dir;
|
|
55
|
+
const parentDir = dirname(dir);
|
|
56
|
+
if (parentDir === dir) return null;
|
|
57
|
+
return findNodeProjectRoot(parentDir, pathValidation);
|
|
58
|
+
}
|
|
59
|
+
function getProjectInfo(debug) {
|
|
60
|
+
const currentPath = process.cwd();
|
|
61
|
+
if (debug) console.log(">>> currentPath", currentPath);
|
|
62
|
+
const projectPath = findNodeProjectRoot(currentPath, isPowerhouseProject);
|
|
63
|
+
if (!projectPath) return {
|
|
64
|
+
isGlobal: true,
|
|
65
|
+
path: POWERHOUSE_GLOBAL_DIR,
|
|
66
|
+
packageManager: getPackageManagerFromLockfile(POWERHOUSE_GLOBAL_DIR)
|
|
67
|
+
};
|
|
68
|
+
return {
|
|
69
|
+
isGlobal: false,
|
|
70
|
+
path: projectPath,
|
|
71
|
+
packageManager: getPackageManagerFromLockfile(projectPath)
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Generates a unique drive ID based on the project path.
|
|
76
|
+
* The same project path will always generate the same ID.
|
|
77
|
+
* @param name - The name prefix for the drive ID (e.g., "vetra", "powerhouse")
|
|
78
|
+
* @returns A unique drive ID in the format "{name}-{hash}"
|
|
79
|
+
*/
|
|
80
|
+
function generateProjectDriveId(name) {
|
|
81
|
+
const projectInfo = getProjectInfo();
|
|
82
|
+
return `${name}-${crypto.createHash("sha256").update(projectInfo.path).digest("hex").substring(0, 8)}`;
|
|
83
|
+
}
|
|
84
|
+
function getPackageManagerFromLockfile(dir) {
|
|
85
|
+
if (fs.existsSync(path.join(dir, packageManagers.pnpm.lockfile))) return "pnpm";
|
|
86
|
+
else if (fs.existsSync(path.join(dir, packageManagers.yarn.lockfile))) return "yarn";
|
|
87
|
+
else if (fs.existsSync(path.join(dir, packageManagers.bun.lockfile))) return "bun";
|
|
88
|
+
return "npm";
|
|
89
|
+
}
|
|
90
|
+
function updatePackagesArray(currentPackages = [], dependencies, task = "install") {
|
|
91
|
+
const isInstall = task === "install";
|
|
92
|
+
const mappedPackages = dependencies.map((dep) => ({
|
|
93
|
+
packageName: dep.name,
|
|
94
|
+
version: dep.version,
|
|
95
|
+
provider: "registry"
|
|
96
|
+
}));
|
|
97
|
+
if (isInstall) return [...currentPackages.filter((pkg) => !dependencies.find((dep) => dep.name === pkg.packageName)), ...mappedPackages];
|
|
98
|
+
return currentPackages.filter((pkg) => !dependencies.map((dep) => dep.name).includes(pkg.packageName));
|
|
99
|
+
}
|
|
100
|
+
function updateConfigFile(dependencies, projectPath, task = "install") {
|
|
101
|
+
const configPath = path.join(projectPath, POWERHOUSE_CONFIG_FILE);
|
|
102
|
+
if (!fs.existsSync(configPath)) throw new Error(`powerhouse.config.json file not found. projectPath: ${projectPath}`);
|
|
103
|
+
const config = JSON.parse(fs.readFileSync(configPath, "utf-8"));
|
|
104
|
+
const updatedConfig = {
|
|
105
|
+
...config,
|
|
106
|
+
packages: updatePackagesArray(config.packages, dependencies, task)
|
|
107
|
+
};
|
|
108
|
+
fs.writeFileSync(configPath, JSON.stringify(updatedConfig, null, 2));
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Updates the styles.css file to include imports for newly installed packages
|
|
112
|
+
* @param dependencies - Array of dependencies that were installed
|
|
113
|
+
* @param projectPath - Path to the project root
|
|
114
|
+
*/
|
|
115
|
+
function updateStylesFile(dependencies, projectPath) {
|
|
116
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
117
|
+
if (!fs.existsSync(stylesPath)) {
|
|
118
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
122
|
+
let updatedStyles = currentStyles;
|
|
123
|
+
for (const dep of dependencies) {
|
|
124
|
+
const cssPath = `./node_modules/${dep.name}/dist/style.css`;
|
|
125
|
+
const fullCssPath = path.join(projectPath, cssPath);
|
|
126
|
+
const importStatement = `@import '${cssPath}';`;
|
|
127
|
+
if (!fs.existsSync(fullCssPath)) {
|
|
128
|
+
console.warn(`⚠️ Warning: CSS file not found at ${cssPath}`);
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
if (currentStyles.includes(importStatement)) continue;
|
|
132
|
+
const importLines = currentStyles.split("\n").filter((line) => line.trim().startsWith("@import"));
|
|
133
|
+
const lastImport = importLines[importLines.length - 1];
|
|
134
|
+
if (lastImport) updatedStyles = currentStyles.replace(lastImport, `${lastImport}\n${importStatement}`);
|
|
135
|
+
else updatedStyles = `${importStatement}\n${currentStyles}`;
|
|
136
|
+
}
|
|
137
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Removes CSS imports for uninstalled packages from styles.css
|
|
141
|
+
*/
|
|
142
|
+
function removeStylesImports(dependencies, projectPath) {
|
|
143
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
144
|
+
if (!fs.existsSync(stylesPath)) {
|
|
145
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
149
|
+
let updatedStyles = currentStyles;
|
|
150
|
+
for (const dep of dependencies) {
|
|
151
|
+
const importStatement = `@import '${`./node_modules/${dep.name}/dist/style.css`}';`;
|
|
152
|
+
const lines = updatedStyles.split("\n");
|
|
153
|
+
const filteredLines = lines.filter((line) => !line.trim().includes(importStatement));
|
|
154
|
+
if (filteredLines.length !== lines.length) updatedStyles = filteredLines.join("\n");
|
|
155
|
+
}
|
|
156
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
157
|
+
}
|
|
158
|
+
//#endregion
|
|
159
|
+
export { updateStylesFile as a, updateConfigFile as i, getProjectInfo as n, removeStylesImports as r, generateProjectDriveId as t };
|
|
160
|
+
|
|
161
|
+
//# sourceMappingURL=utils-DbFSkp_Q.mjs.map
|