@powerhousedao/ph-cli 6.0.0-dev.16 → 6.0.0-dev.160
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assign-env-vars-CzHgn5ax.mjs +15 -0
- package/dist/assign-env-vars-CzHgn5ax.mjs.map +1 -0
- package/dist/auth-BeA5gDPQ.mjs +23 -0
- package/dist/auth-BeA5gDPQ.mjs.map +1 -0
- package/dist/build-BflSHYLP.mjs +33 -0
- package/dist/build-BflSHYLP.mjs.map +1 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.mjs +731 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/connect-build-CrnmJlav.mjs +21 -0
- package/dist/connect-build-CrnmJlav.mjs.map +1 -0
- package/dist/connect-preview-4Xe7Lm1V.mjs +27 -0
- package/dist/connect-preview-4Xe7Lm1V.mjs.map +1 -0
- package/dist/connect-studio-38_NrT_a.mjs +28 -0
- package/dist/connect-studio-38_NrT_a.mjs.map +1 -0
- package/dist/connect-studio-DuH6WcoA.mjs +3 -0
- package/dist/generate-CMQGYRrW.mjs +2 -0
- package/dist/generate-Dq80G8n4.mjs +58 -0
- package/dist/generate-Dq80G8n4.mjs.map +1 -0
- package/dist/inspect-nPp5CN8a.mjs +45 -0
- package/dist/inspect-nPp5CN8a.mjs.map +1 -0
- package/dist/migrate-DzLTX506.mjs +245 -0
- package/dist/migrate-DzLTX506.mjs.map +1 -0
- package/dist/scripts/generate-commands-docs.ts +45 -0
- package/dist/switchboard-BclxuF4j.mjs +72 -0
- package/dist/switchboard-BclxuF4j.mjs.map +1 -0
- package/dist/switchboard-DXF1APL1.mjs +2 -0
- package/dist/switchboard-migrate-Cwx-8MnF.mjs +50 -0
- package/dist/switchboard-migrate-Cwx-8MnF.mjs.map +1 -0
- package/dist/utils-DbFSkp_Q.mjs +161 -0
- package/dist/utils-DbFSkp_Q.mjs.map +1 -0
- package/dist/vetra-CjawXTZJ.mjs +360 -0
- package/dist/vetra-CjawXTZJ.mjs.map +1 -0
- package/package.json +34 -33
- package/dist/scripts/generate-commands-md.d.ts +0 -2
- package/dist/scripts/generate-commands-md.d.ts.map +0 -1
- package/dist/scripts/generate-commands-md.js +0 -72
- package/dist/scripts/generate-commands-md.js.map +0 -1
- package/dist/scripts/generate-commands-md.ts +0 -84
- package/dist/scripts/generate-version.d.ts +0 -2
- package/dist/scripts/generate-version.d.ts.map +0 -1
- package/dist/scripts/generate-version.js +0 -13
- package/dist/scripts/generate-version.js.map +0 -1
- package/dist/scripts/generate-version.ts +0 -22
- package/dist/src/cli.d.ts +0 -3
- package/dist/src/cli.d.ts.map +0 -1
- package/dist/src/cli.js +0 -42
- package/dist/src/cli.js.map +0 -1
- package/dist/src/commands/access-token.d.ts +0 -9
- package/dist/src/commands/access-token.d.ts.map +0 -1
- package/dist/src/commands/access-token.js +0 -110
- package/dist/src/commands/access-token.js.map +0 -1
- package/dist/src/commands/connect.d.ts +0 -19
- package/dist/src/commands/connect.d.ts.map +0 -1
- package/dist/src/commands/connect.js +0 -85
- package/dist/src/commands/connect.js.map +0 -1
- package/dist/src/commands/generate.d.ts +0 -9
- package/dist/src/commands/generate.d.ts.map +0 -1
- package/dist/src/commands/generate.js +0 -41
- package/dist/src/commands/generate.js.map +0 -1
- package/dist/src/commands/help.d.ts +0 -3
- package/dist/src/commands/help.d.ts.map +0 -1
- package/dist/src/commands/help.js +0 -9
- package/dist/src/commands/help.js.map +0 -1
- package/dist/src/commands/index.d.ts +0 -14
- package/dist/src/commands/index.d.ts.map +0 -1
- package/dist/src/commands/index.js +0 -14
- package/dist/src/commands/index.js.map +0 -1
- package/dist/src/commands/inspect.d.ts +0 -6
- package/dist/src/commands/inspect.d.ts.map +0 -1
- package/dist/src/commands/inspect.js +0 -21
- package/dist/src/commands/inspect.js.map +0 -1
- package/dist/src/commands/install.d.ts +0 -15
- package/dist/src/commands/install.d.ts.map +0 -1
- package/dist/src/commands/install.js +0 -127
- package/dist/src/commands/install.js.map +0 -1
- package/dist/src/commands/list.d.ts +0 -9
- package/dist/src/commands/list.d.ts.map +0 -1
- package/dist/src/commands/list.js +0 -36
- package/dist/src/commands/list.js.map +0 -1
- package/dist/src/commands/login.d.ts +0 -12
- package/dist/src/commands/login.d.ts.map +0 -1
- package/dist/src/commands/login.js +0 -208
- package/dist/src/commands/login.js.map +0 -1
- package/dist/src/commands/migrate.d.ts +0 -10
- package/dist/src/commands/migrate.d.ts.map +0 -1
- package/dist/src/commands/migrate.js +0 -12
- package/dist/src/commands/migrate.js.map +0 -1
- package/dist/src/commands/register-commands.d.ts +0 -5
- package/dist/src/commands/register-commands.d.ts.map +0 -1
- package/dist/src/commands/register-commands.js +0 -32
- package/dist/src/commands/register-commands.js.map +0 -1
- package/dist/src/commands/service.d.ts +0 -5
- package/dist/src/commands/service.d.ts.map +0 -1
- package/dist/src/commands/service.js +0 -67
- package/dist/src/commands/service.js.map +0 -1
- package/dist/src/commands/switchboard.d.ts +0 -9
- package/dist/src/commands/switchboard.d.ts.map +0 -1
- package/dist/src/commands/switchboard.js +0 -78
- package/dist/src/commands/switchboard.js.map +0 -1
- package/dist/src/commands/uninstall.d.ts +0 -15
- package/dist/src/commands/uninstall.d.ts.map +0 -1
- package/dist/src/commands/uninstall.js +0 -120
- package/dist/src/commands/uninstall.js.map +0 -1
- package/dist/src/commands/vetra.d.ts +0 -11
- package/dist/src/commands/vetra.d.ts.map +0 -1
- package/dist/src/commands/vetra.js +0 -35
- package/dist/src/commands/vetra.js.map +0 -1
- package/dist/src/help.d.ts +0 -65
- package/dist/src/help.d.ts.map +0 -1
- package/dist/src/help.js +0 -770
- package/dist/src/help.js.map +0 -1
- package/dist/src/index.d.ts +0 -5
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -5
- package/dist/src/index.js.map +0 -1
- package/dist/src/services/auth.d.ts +0 -69
- package/dist/src/services/auth.d.ts.map +0 -1
- package/dist/src/services/auth.js +0 -171
- package/dist/src/services/auth.js.map +0 -1
- package/dist/src/services/connect.d.ts +0 -2
- package/dist/src/services/connect.d.ts.map +0 -1
- package/dist/src/services/connect.js +0 -2
- package/dist/src/services/connect.js.map +0 -1
- package/dist/src/services/generate.d.ts +0 -30
- package/dist/src/services/generate.d.ts.map +0 -1
- package/dist/src/services/generate.js +0 -106
- package/dist/src/services/generate.js.map +0 -1
- package/dist/src/services/inspect.d.ts +0 -5
- package/dist/src/services/inspect.d.ts.map +0 -1
- package/dist/src/services/inspect.js +0 -49
- package/dist/src/services/inspect.js.map +0 -1
- package/dist/src/services/migrate.d.ts +0 -7
- package/dist/src/services/migrate.d.ts.map +0 -1
- package/dist/src/services/migrate.js +0 -289
- package/dist/src/services/migrate.js.map +0 -1
- package/dist/src/services/switchboard-migrate.d.ts +0 -7
- package/dist/src/services/switchboard-migrate.d.ts.map +0 -1
- package/dist/src/services/switchboard-migrate.js +0 -60
- package/dist/src/services/switchboard-migrate.js.map +0 -1
- package/dist/src/services/switchboard.d.ts +0 -54
- package/dist/src/services/switchboard.d.ts.map +0 -1
- package/dist/src/services/switchboard.js +0 -79
- package/dist/src/services/switchboard.js.map +0 -1
- package/dist/src/services/vetra.d.ts +0 -15
- package/dist/src/services/vetra.d.ts.map +0 -1
- package/dist/src/services/vetra.js +0 -176
- package/dist/src/services/vetra.js.map +0 -1
- package/dist/src/types.d.ts +0 -2
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -2
- package/dist/src/types.js.map +0 -1
- package/dist/src/utils/configure-vetra-github-url.d.ts +0 -12
- package/dist/src/utils/configure-vetra-github-url.d.ts.map +0 -1
- package/dist/src/utils/configure-vetra-github-url.js +0 -230
- package/dist/src/utils/configure-vetra-github-url.js.map +0 -1
- package/dist/src/utils.d.ts +0 -116
- package/dist/src/utils.d.ts.map +0 -1
- package/dist/src/utils.js +0 -261
- package/dist/src/utils.js.map +0 -1
- package/dist/src/version.d.ts +0 -2
- package/dist/src/version.d.ts.map +0 -1
- package/dist/src/version.js +0 -3
- package/dist/src/version.js.map +0 -1
- package/dist/test/utils.test.d.ts +0 -2
- package/dist/test/utils.test.d.ts.map +0 -1
- package/dist/test/utils.test.js +0 -132
- package/dist/test/utils.test.js.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/vitest.config.d.ts +0 -3
- package/dist/vitest.config.d.ts.map +0 -1
- package/dist/vitest.config.js +0 -7
- package/dist/vitest.config.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrate-DzLTX506.mjs","names":[],"sources":["../src/services/migrate.ts"],"sourcesContent":["import {\n connectEntrypointTemplate,\n dockerfileTemplate,\n indexHtmlTemplate,\n indexTsTemplate,\n nginxConfTemplate,\n packageJsonExportsTemplate,\n packageJsonScriptsTemplate,\n switchboardEntrypointTemplate,\n syncAndPublishWorkflowTemplate,\n tsConfigTemplate,\n} from \"@powerhousedao/codegen/templates\";\nimport { existsSync, readdirSync } from \"node:fs\";\nimport { mkdir, readdir, stat, writeFile } from \"node:fs/promises\";\nimport path from \"path\";\nimport { readPackage } from \"read-pkg\";\nimport type {\n ArrayLiteralExpression,\n ObjectLiteralExpression,\n SourceFile,\n StringLiteral,\n} from \"ts-morph\";\nimport { Project, SyntaxKind } from \"ts-morph\";\nimport { writePackage } from \"write-package\";\nimport type { GenerateArgs, MigrateArgs } from \"../types.js\";\nimport { startGenerate } from \"./generate.js\";\n\nexport async function startMigrate(_args: MigrateArgs) {\n await migratePackageJson();\n await migrateTsConfig();\n await migrateIndexHtml();\n await migrateCIFiles();\n await runGenerateOnAllDocumentModels();\n await runGenerateOnAllEditors();\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n deleteLegacyEditorDirIndexFiles(project);\n migrateEditorFiles(project);\n migrateRootIndex(project);\n removeZDotSchemaUsage(project);\n removeCreatorsUsage(project);\n removeUtilsDefaultExportUsage(project);\n fixImports(project);\n}\n\n/** Ensure that the project package.json has the correct scripts and exports. */\nasync function migratePackageJson() {\n const packageJson = await readPackage();\n const existingScripts = packageJson.scripts;\n const existingExports =\n !!packageJson.exports &&\n !Array.isArray(packageJson.exports) &&\n typeof packageJson.exports !== \"string\"\n ? packageJson.exports\n : {};\n const newScripts = {\n ...existingScripts,\n ...packageJsonScriptsTemplate,\n };\n const newExports = {\n ...existingExports,\n ...packageJsonExportsTemplate,\n };\n packageJson.scripts = newScripts;\n packageJson.exports = newExports;\n await writePackage(packageJson);\n}\n\n/** Ensure that the project index.html matches the boilerplate index.html. */\nasync function migrateIndexHtml() {\n const indexHtmlPath = path.join(process.cwd(), \"index.html\");\n await writeFile(indexHtmlPath, indexHtmlTemplate);\n}\n\n/** Ensure that the project tsconfig.json matches the boilerplate tsconfig.json. */\nasync function migrateTsConfig() {\n const tsConfigPath = path.join(process.cwd(), \"tsconfig.json\");\n await writeFile(tsConfigPath, tsConfigTemplate);\n}\n\n/** Check if a file exists */\nasync function fileExists(filePath: string): Promise<boolean> {\n try {\n await stat(filePath);\n return true;\n } catch {\n return false;\n }\n}\n\n/** Write a file with optional warning if it already exists */\nasync function writeFileWithWarning(\n filePath: string,\n content: string,\n): Promise<void> {\n const exists = await fileExists(filePath);\n if (exists) {\n console.warn(`Warning: Overwriting existing file: ${filePath}`);\n }\n await writeFile(filePath, content);\n}\n\n/** Add CI/CD workflow and Docker files to the project. */\nasync function migrateCIFiles() {\n const cwd = process.cwd();\n\n try {\n // Create directories if they don't exist\n await mkdir(path.join(cwd, \".github/workflows\"), { recursive: true });\n await mkdir(path.join(cwd, \"docker\"), { recursive: true });\n\n // Write CI/CD workflow\n await writeFileWithWarning(\n path.join(cwd, \".github/workflows/sync-and-publish.yml\"),\n syncAndPublishWorkflowTemplate,\n );\n\n // Write Docker files\n await writeFileWithWarning(\n path.join(cwd, \"Dockerfile\"),\n dockerfileTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/nginx.conf\"),\n nginxConfTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/connect-entrypoint.sh\"),\n connectEntrypointTemplate,\n );\n await writeFileWithWarning(\n path.join(cwd, \"docker/switchboard-entrypoint.sh\"),\n switchboardEntrypointTemplate,\n );\n } catch (error) {\n console.error(\"Error migrating CI files:\", error);\n throw error;\n }\n}\n\n/** Ensure that the project index.ts file uses the new exports for editors and document models */\nfunction migrateRootIndex(project: Project) {\n const indexPath = path.join(process.cwd(), \"index.ts\");\n let source = project.getSourceFile(indexPath);\n if (!source) {\n source = project.createSourceFile(indexPath);\n }\n source.replaceWithText(indexTsTemplate);\n project.saveSync();\n}\n\n/** Ensure that the project's editor.tsx files use default exports for lazy loading */\nfunction migrateEditorFiles(project: Project) {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = readdirSync(editorsPath, { withFileTypes: true })\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const editorFilePath = path.join(editorsPath, dir, \"editor.tsx\");\n const source = project.getSourceFile(editorFilePath);\n if (!source) continue;\n const text = source.getFullText();\n const replaceNamedExportWithDefaultExport = text.replace(\n \"export function Editor\",\n \"export default function Editor\",\n );\n source.replaceWithText(replaceNamedExportWithDefaultExport);\n project.saveSync();\n }\n}\n\n/** Delete the legacy index files in editor directories which are now replaced by module.ts files */\nfunction deleteLegacyEditorDirIndexFiles(project: Project) {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = readdirSync(editorsPath, { withFileTypes: true })\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const indexFilePath = path.join(editorsPath, dir, \"index.ts\");\n const source = project.getSourceFile(indexFilePath);\n if (!source) continue;\n source.delete();\n project.saveSync();\n }\n}\n\n/** Remove usage of the `z` re-export of document model schemas which caused naming conflicts */\nfunction removeZDotSchemaUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n if (path.includes(\"zod.ts\")) continue;\n const text = sourceFile.getFullText();\n if (/import\\s+(?:\\{\\s*z\\s*\\}|z)\\s+from\\s+['\"]zod['\"]/.test(text)) continue;\n const withoutZDot = text.replace(/z\\./g, \"\");\n sourceFile.replaceWithText(withoutZDot);\n project.saveSync();\n }\n}\n\n/** Remove usage of the `creators` as an aliased full module export which is no longer needed */\nfunction removeCreatorsUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n const creatorsInvocations = sourceFile\n .getStatements()\n .filter(\n (statement) =>\n statement.getKind() === SyntaxKind.PropertyAccessExpression,\n )\n .filter((statement) => statement.getText().includes(\"creators.\"));\n for (const creatorInvocation of creatorsInvocations) {\n const withoutCreators = creatorInvocation\n .getText()\n .replace(/creators\\./g, \"\");\n creatorInvocation.replaceWithText(withoutCreators);\n project.saveSync();\n }\n }\n}\n\n/** Remove usage of the `utils` import which is no longer exported as a default import */\nfunction removeUtilsDefaultExportUsage(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n const statement = sourceFile\n .getImportDeclarations()\n .find((importDeclaration) =>\n importDeclaration.getText().includes(\"import utils\"),\n );\n if (statement) {\n statement.remove();\n project.saveSync();\n }\n }\n}\n\n/** Fix missing imports in the project */\nfunction fixImports(project: Project) {\n const sourceFiles = project.getSourceFiles();\n for (const sourceFile of sourceFiles) {\n const path = sourceFile.getFilePath();\n if (!path.includes(process.cwd())) continue;\n sourceFile.fixMissingImports(undefined, {\n importModuleSpecifierPreference: \"project-relative\",\n autoImportSpecifierExcludeRegexes: [\"document-model\", \"document-drive\"],\n importModuleSpecifierEnding: \"js\",\n preferTypeOnlyAutoImports: false,\n });\n sourceFile.fixUnusedIdentifiers();\n\n project.saveSync();\n }\n}\n\n/** Run the generate command on all document models */\nasync function runGenerateOnAllDocumentModels() {\n await startGenerate({} as GenerateArgs);\n}\n\n/** Run the generate command on all editors */\nasync function runGenerateOnAllEditors() {\n const editorsPath = path.join(process.cwd(), \"editors\");\n const dirs = (await readdir(editorsPath, { withFileTypes: true }))\n .filter((entry) => entry.isDirectory())\n .map((entry) => entry.name);\n for (const dir of dirs) {\n const moduleFilePath = path.join(editorsPath, dir, \"module.ts\");\n const indexFilePath = path.join(editorsPath, dir, \"index.ts\");\n const hasModuleFile = existsSync(moduleFilePath);\n const hasIndexFile = existsSync(indexFilePath);\n if (!hasModuleFile && !hasIndexFile) {\n continue;\n }\n const filePathToUse = hasModuleFile ? moduleFilePath : indexFilePath;\n const { id, name, documentTypes, isApp } =\n extractEditorModuleInfo(filePathToUse);\n\n if (!name) {\n throw new Error(`Editor ${dir} is missing name`);\n }\n if (!id) {\n throw new Error(`Editor ${dir} is missing id`);\n }\n if (isApp) {\n const configFilePath = path.join(editorsPath, dir, \"config.ts\");\n const hasConfigFile = existsSync(configFilePath);\n const allowedDocumentTypes = hasConfigFile\n ? extractAllowedDocumentTypes(configFilePath)\n : undefined;\n const args = {\n appName: name,\n appId: id,\n appDirName: dir,\n allowedDocumentTypes,\n } as GenerateArgs;\n await startGenerate(args);\n } else {\n const args = {\n editorName: name,\n editorId: id,\n editorDirName: dir,\n documentType: documentTypes?.[0],\n } as GenerateArgs;\n await startGenerate(args);\n }\n }\n}\n\n/** Extract the name, id, document types, and whether the editor is a app from the editor module */\nfunction extractEditorModuleInfo(filePath: string) {\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n const sourceFile = project.getSourceFileOrThrow(filePath);\n const moduleDeclaration = getVariableDeclarationByTypeName(\n sourceFile,\n \"EditorModule\",\n );\n\n const variable = moduleDeclaration?.getInitializerIfKind(\n SyntaxKind.ObjectLiteralExpression,\n );\n const documentTypes = getObjectProperty(\n variable,\n \"documentTypes\",\n SyntaxKind.ArrayLiteralExpression,\n )\n ?.getElements()\n .map((element) => element.getText())\n .map((text) => text.replace(/[\"']/g, \"\"));\n\n const configProperty = getObjectProperty(\n variable,\n \"config\",\n SyntaxKind.ObjectLiteralExpression,\n );\n\n const id = getStringLiteralValue(\n getObjectProperty(configProperty, \"id\", SyntaxKind.StringLiteral),\n );\n\n const name = getStringLiteralValue(\n getObjectProperty(configProperty, \"name\", SyntaxKind.StringLiteral),\n );\n const isApp = documentTypes?.includes(\"powerhouse/document-drive\");\n return { id, name, documentTypes, isApp };\n}\n\n/** Extract the allowed document types from the app config */\nfunction extractAllowedDocumentTypes(filePath: string) {\n const project = new Project({\n tsConfigFilePath: path.resolve(\"tsconfig.json\"),\n compilerOptions: {\n verbatimModuleSyntax: false,\n },\n });\n const sourceFile = project.getSourceFile(filePath);\n if (!sourceFile) return;\n const configVariableDeclaration = getVariableDeclarationByTypeName(\n sourceFile,\n \"PHAppConfig\",\n );\n const configVariable = configVariableDeclaration?.getInitializerIfKind(\n SyntaxKind.ObjectLiteralExpression,\n );\n if (!configVariable) return;\n const allowedDocumentTypes = getArrayLiteralExpressionElementsText(\n getObjectProperty(\n configVariable,\n \"allowedDocumentTypes\",\n SyntaxKind.ArrayLiteralExpression,\n ),\n );\n return allowedDocumentTypes;\n}\n\nfunction getVariableDeclarationByTypeName(\n sourceFile: SourceFile,\n typeName: string,\n) {\n const variableDeclarations = sourceFile.getVariableDeclarations();\n return variableDeclarations.find((declaration) =>\n declaration.getType().getText().includes(typeName),\n );\n}\n\nfunction getStringLiteralValue(stringLiteral: StringLiteral | undefined) {\n return stringLiteral?.getText().replace(/[\"']/g, \"\");\n}\n\nfunction getObjectProperty<T extends SyntaxKind>(\n object: ObjectLiteralExpression | undefined,\n propertyName: string,\n propertyType: T,\n) {\n return object\n ?.getProperty(propertyName)\n ?.asKind(SyntaxKind.PropertyAssignment)\n ?.getChildren()\n .find((child) => child.getKind() === propertyType)\n ?.asKind(propertyType);\n}\n\nfunction getArrayLiteralExpressionElementsText(\n arrayLiteralExpression: ArrayLiteralExpression | undefined,\n) {\n return arrayLiteralExpression\n ?.getElements()\n .map((element) => element.getText())\n .map((text) => text.replace(/[\"']/g, \"\"));\n}\n"],"mappings":";;;;;;;;;AA2BA,eAAsB,aAAa,OAAoB;AACrD,OAAM,oBAAoB;AAC1B,OAAM,iBAAiB;AACvB,OAAM,kBAAkB;AACxB,OAAM,gBAAgB;AACtB,OAAM,gCAAgC;AACtC,OAAM,yBAAyB;CAC/B,MAAM,UAAU,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC;AACF,iCAAgC,QAAQ;AACxC,oBAAmB,QAAQ;AAC3B,kBAAiB,QAAQ;AACzB,uBAAsB,QAAQ;AAC9B,qBAAoB,QAAQ;AAC5B,+BAA8B,QAAQ;AACtC,YAAW,QAAQ;;;AAIrB,eAAe,qBAAqB;CAClC,MAAM,cAAc,MAAM,aAAa;CACvC,MAAM,kBAAkB,YAAY;CACpC,MAAM,kBACJ,CAAC,CAAC,YAAY,WACd,CAAC,MAAM,QAAQ,YAAY,QAAQ,IACnC,OAAO,YAAY,YAAY,WAC3B,YAAY,UACZ,EAAE;CACR,MAAM,aAAa;EACjB,GAAG;EACH,GAAG;EACJ;CACD,MAAM,aAAa;EACjB,GAAG;EACH,GAAG;EACJ;AACD,aAAY,UAAU;AACtB,aAAY,UAAU;AACtB,OAAM,aAAa,YAAY;;;AAIjC,eAAe,mBAAmB;AAEhC,OAAM,UADgB,KAAK,KAAK,QAAQ,KAAK,EAAE,aAAa,EAC7B,kBAAkB;;;AAInD,eAAe,kBAAkB;AAE/B,OAAM,UADe,KAAK,KAAK,QAAQ,KAAK,EAAE,gBAAgB,EAChC,iBAAiB;;;AAIjD,eAAe,WAAW,UAAoC;AAC5D,KAAI;AACF,QAAM,KAAK,SAAS;AACpB,SAAO;SACD;AACN,SAAO;;;;AAKX,eAAe,qBACb,UACA,SACe;AAEf,KADe,MAAM,WAAW,SAAS,CAEvC,SAAQ,KAAK,uCAAuC,WAAW;AAEjE,OAAM,UAAU,UAAU,QAAQ;;;AAIpC,eAAe,iBAAiB;CAC9B,MAAM,MAAM,QAAQ,KAAK;AAEzB,KAAI;AAEF,QAAM,MAAM,KAAK,KAAK,KAAK,oBAAoB,EAAE,EAAE,WAAW,MAAM,CAAC;AACrE,QAAM,MAAM,KAAK,KAAK,KAAK,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC;AAG1D,QAAM,qBACJ,KAAK,KAAK,KAAK,yCAAyC,EACxD,+BACD;AAGD,QAAM,qBACJ,KAAK,KAAK,KAAK,aAAa,EAC5B,mBACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,oBAAoB,EACnC,kBACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,+BAA+B,EAC9C,0BACD;AACD,QAAM,qBACJ,KAAK,KAAK,KAAK,mCAAmC,EAClD,8BACD;UACM,OAAO;AACd,UAAQ,MAAM,6BAA6B,MAAM;AACjD,QAAM;;;;AAKV,SAAS,iBAAiB,SAAkB;CAC1C,MAAM,YAAY,KAAK,KAAK,QAAQ,KAAK,EAAE,WAAW;CACtD,IAAI,SAAS,QAAQ,cAAc,UAAU;AAC7C,KAAI,CAAC,OACH,UAAS,QAAQ,iBAAiB,UAAU;AAE9C,QAAO,gBAAgB,gBAAgB;AACvC,SAAQ,UAAU;;;AAIpB,SAAS,mBAAmB,SAAkB;CAC5C,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,OAAO,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC3D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,aAAa;EAChE,MAAM,SAAS,QAAQ,cAAc,eAAe;AACpD,MAAI,CAAC,OAAQ;EAEb,MAAM,sCADO,OAAO,aAAa,CACgB,QAC/C,0BACA,iCACD;AACD,SAAO,gBAAgB,oCAAoC;AAC3D,UAAQ,UAAU;;;;AAKtB,SAAS,gCAAgC,SAAkB;CACzD,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,OAAO,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC3D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,gBAAgB,KAAK,KAAK,aAAa,KAAK,WAAW;EAC7D,MAAM,SAAS,QAAQ,cAAc,cAAc;AACnD,MAAI,CAAC,OAAQ;AACb,SAAO,QAAQ;AACf,UAAQ,UAAU;;;;AAKtB,SAAS,sBAAsB,SAAkB;CAC/C,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;EACpC,MAAM,OAAO,WAAW,aAAa;AACrC,MAAI,CAAC,KAAK,SAAS,QAAQ,KAAK,CAAC,CAAE;AACnC,MAAI,KAAK,SAAS,SAAS,CAAE;EAC7B,MAAM,OAAO,WAAW,aAAa;AACrC,MAAI,kDAAkD,KAAK,KAAK,CAAE;EAClE,MAAM,cAAc,KAAK,QAAQ,QAAQ,GAAG;AAC5C,aAAW,gBAAgB,YAAY;AACvC,UAAQ,UAAU;;;;AAKtB,SAAS,oBAAoB,SAAkB;CAC7C,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;EACnC,MAAM,sBAAsB,WACzB,eAAe,CACf,QACE,cACC,UAAU,SAAS,KAAK,WAAW,yBACtC,CACA,QAAQ,cAAc,UAAU,SAAS,CAAC,SAAS,YAAY,CAAC;AACnE,OAAK,MAAM,qBAAqB,qBAAqB;GACnD,MAAM,kBAAkB,kBACrB,SAAS,CACT,QAAQ,eAAe,GAAG;AAC7B,qBAAkB,gBAAgB,gBAAgB;AAClD,WAAQ,UAAU;;;;;AAMxB,SAAS,8BAA8B,SAAkB;CACvD,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;EACnC,MAAM,YAAY,WACf,uBAAuB,CACvB,MAAM,sBACL,kBAAkB,SAAS,CAAC,SAAS,eAAe,CACrD;AACH,MAAI,WAAW;AACb,aAAU,QAAQ;AAClB,WAAQ,UAAU;;;;;AAMxB,SAAS,WAAW,SAAkB;CACpC,MAAM,cAAc,QAAQ,gBAAgB;AAC5C,MAAK,MAAM,cAAc,aAAa;AAEpC,MAAI,CADS,WAAW,aAAa,CAC3B,SAAS,QAAQ,KAAK,CAAC,CAAE;AACnC,aAAW,kBAAkB,KAAA,GAAW;GACtC,iCAAiC;GACjC,mCAAmC,CAAC,kBAAkB,iBAAiB;GACvE,6BAA6B;GAC7B,2BAA2B;GAC5B,CAAC;AACF,aAAW,sBAAsB;AAEjC,UAAQ,UAAU;;;;AAKtB,eAAe,iCAAiC;AAC9C,OAAM,cAAc,EAAE,CAAiB;;;AAIzC,eAAe,0BAA0B;CACvC,MAAM,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU;CACvD,MAAM,QAAQ,MAAM,QAAQ,aAAa,EAAE,eAAe,MAAM,CAAC,EAC9D,QAAQ,UAAU,MAAM,aAAa,CAAC,CACtC,KAAK,UAAU,MAAM,KAAK;AAC7B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,YAAY;EAC/D,MAAM,gBAAgB,KAAK,KAAK,aAAa,KAAK,WAAW;EAC7D,MAAM,gBAAgB,WAAW,eAAe;EAChD,MAAM,eAAe,WAAW,cAAc;AAC9C,MAAI,CAAC,iBAAiB,CAAC,aACrB;EAGF,MAAM,EAAE,IAAI,MAAM,eAAe,UAC/B,wBAFoB,gBAAgB,iBAAiB,cAEf;AAExC,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,UAAU,IAAI,kBAAkB;AAElD,MAAI,CAAC,GACH,OAAM,IAAI,MAAM,UAAU,IAAI,gBAAgB;AAEhD,MAAI,OAAO;GACT,MAAM,iBAAiB,KAAK,KAAK,aAAa,KAAK,YAAY;AAW/D,SAAM,cANO;IACX,SAAS;IACT,OAAO;IACP,YAAY;IACZ,sBARoB,WAAW,eAAe,GAE5C,4BAA4B,eAAe,GAC3C,KAAA;IAMH,CACwB;QAQzB,OAAM,cANO;GACX,YAAY;GACZ,UAAU;GACV,eAAe;GACf,cAAc,gBAAgB;GAC/B,CACwB;;;;AAM/B,SAAS,wBAAwB,UAAkB;CAajD,MAAM,WALoB,iCAPV,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC,CACyB,qBAAqB,SAAS,EAGvD,eACD,EAEmC,qBAClC,WAAW,wBACZ;CACD,MAAM,gBAAgB,kBACpB,UACA,iBACA,WAAW,uBACZ,EACG,aAAa,CACd,KAAK,YAAY,QAAQ,SAAS,CAAC,CACnC,KAAK,SAAS,KAAK,QAAQ,SAAS,GAAG,CAAC;CAE3C,MAAM,iBAAiB,kBACrB,UACA,UACA,WAAW,wBACZ;AAUD,QAAO;EAAE,IARE,sBACT,kBAAkB,gBAAgB,MAAM,WAAW,cAAc,CAClE;EAMY,MAJA,sBACX,kBAAkB,gBAAgB,QAAQ,WAAW,cAAc,CACpE;EAEkB;EAAe,OADpB,eAAe,SAAS,4BAA4B;EACzB;;;AAI3C,SAAS,4BAA4B,UAAkB;CAOrD,MAAM,aANU,IAAI,QAAQ;EAC1B,kBAAkB,KAAK,QAAQ,gBAAgB;EAC/C,iBAAiB,EACf,sBAAsB,OACvB;EACF,CAAC,CACyB,cAAc,SAAS;AAClD,KAAI,CAAC,WAAY;CAKjB,MAAM,iBAJ4B,iCAChC,YACA,cACD,EACiD,qBAChD,WAAW,wBACZ;AACD,KAAI,CAAC,eAAgB;AAQrB,QAP6B,sCAC3B,kBACE,gBACA,wBACA,WAAW,uBACZ,CACF;;AAIH,SAAS,iCACP,YACA,UACA;AAEA,QAD6B,WAAW,yBAAyB,CACrC,MAAM,gBAChC,YAAY,SAAS,CAAC,SAAS,CAAC,SAAS,SAAS,CACnD;;AAGH,SAAS,sBAAsB,eAA0C;AACvE,QAAO,eAAe,SAAS,CAAC,QAAQ,SAAS,GAAG;;AAGtD,SAAS,kBACP,QACA,cACA,cACA;AACA,QAAO,QACH,YAAY,aAAa,EACzB,OAAO,WAAW,mBAAmB,EACrC,aAAa,CACd,MAAM,UAAU,MAAM,SAAS,KAAK,aAAa,EAChD,OAAO,aAAa;;AAG1B,SAAS,sCACP,wBACA;AACA,QAAO,wBACH,aAAa,CACd,KAAK,YAAY,QAAQ,SAAS,CAAC,CACnC,KAAK,SAAS,KAAK,QAAQ,SAAS,GAAG,CAAC"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { writeCliDocsMarkdownFile } from "@powerhousedao/codegen/file-builders";
|
|
2
|
+
import { accessToken } from "../src/commands/access-token.js";
|
|
3
|
+
import { build, connect, preview, studio } from "../src/commands/connect.js";
|
|
4
|
+
import { generate } from "../src/commands/generate.js";
|
|
5
|
+
import { inspect } from "../src/commands/inspect.js";
|
|
6
|
+
import { install } from "../src/commands/install.js";
|
|
7
|
+
import { list } from "../src/commands/list.js";
|
|
8
|
+
import { login } from "../src/commands/login.js";
|
|
9
|
+
import { migrate } from "../src/commands/migrate.js";
|
|
10
|
+
import { phCli } from "../src/commands/ph-cli.js";
|
|
11
|
+
import { switchboard } from "../src/commands/switchboard.js";
|
|
12
|
+
import { uninstall } from "../src/commands/uninstall.js";
|
|
13
|
+
import { vetra } from "../src/commands/vetra.js";
|
|
14
|
+
|
|
15
|
+
const commands = [
|
|
16
|
+
{ name: "generate", command: generate },
|
|
17
|
+
{ name: "vetra", command: vetra },
|
|
18
|
+
{ name: "connect", command: connect },
|
|
19
|
+
{ name: "connect studio", command: studio },
|
|
20
|
+
{ name: "connect build", command: build },
|
|
21
|
+
{ name: "connect preview", command: preview },
|
|
22
|
+
{ name: "access token", command: accessToken },
|
|
23
|
+
{ name: "inspect", command: inspect },
|
|
24
|
+
{ name: "list", command: list },
|
|
25
|
+
{ name: "migrate", command: migrate },
|
|
26
|
+
{ name: "switchboard", command: switchboard },
|
|
27
|
+
{ name: "login", command: login },
|
|
28
|
+
{ name: "install", command: install },
|
|
29
|
+
{ name: "uninstall", command: uninstall },
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
const cliDescription = phCli.description ?? "";
|
|
33
|
+
|
|
34
|
+
async function main() {
|
|
35
|
+
await writeCliDocsMarkdownFile({
|
|
36
|
+
filePath: "COMMANDS.md",
|
|
37
|
+
docsTitle: `Powerhouse CLI Commands (${process.env.WORKSPACE_VERSION || process.env.npm_package_version})`,
|
|
38
|
+
docsIntroduction:
|
|
39
|
+
"This document provides detailed information about the available commands in the Powerhouse CLI.",
|
|
40
|
+
cliDescription,
|
|
41
|
+
entries: commands,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
await main();
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { startSwitchboard } from "@powerhousedao/switchboard/server";
|
|
3
|
+
//#region src/services/switchboard.ts
|
|
4
|
+
const defaultSwitchboardOptions = {
|
|
5
|
+
port: 4001,
|
|
6
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
7
|
+
drive: {
|
|
8
|
+
id: "powerhouse",
|
|
9
|
+
slug: "powerhouse",
|
|
10
|
+
global: {
|
|
11
|
+
name: "Powerhouse",
|
|
12
|
+
icon: "https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7"
|
|
13
|
+
},
|
|
14
|
+
local: {
|
|
15
|
+
availableOffline: true,
|
|
16
|
+
listeners: [],
|
|
17
|
+
sharingType: "public",
|
|
18
|
+
triggers: []
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
mcp: true
|
|
22
|
+
};
|
|
23
|
+
function getDefaultVetraSwitchboardOptions(vetraDriveId) {
|
|
24
|
+
return {
|
|
25
|
+
port: 4001,
|
|
26
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
27
|
+
drive: {
|
|
28
|
+
id: vetraDriveId,
|
|
29
|
+
slug: vetraDriveId,
|
|
30
|
+
global: {
|
|
31
|
+
name: "Vetra",
|
|
32
|
+
icon: "https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u"
|
|
33
|
+
},
|
|
34
|
+
preferredEditor: "vetra-drive-app",
|
|
35
|
+
local: {
|
|
36
|
+
availableOffline: true,
|
|
37
|
+
listeners: [],
|
|
38
|
+
sharingType: "public",
|
|
39
|
+
triggers: []
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
async function startSwitchboard$1(options, logger) {
|
|
45
|
+
const { packages: packagesString, remoteDrives, useVetraDrive, vetraDriveId, useIdentity, keypairPath, requireIdentity, ...serverOptions } = options;
|
|
46
|
+
const defaultOptions = useVetraDrive ? getDefaultVetraSwitchboardOptions(vetraDriveId) : defaultSwitchboardOptions;
|
|
47
|
+
const identity = useIdentity || keypairPath || requireIdentity ? {
|
|
48
|
+
keypairPath,
|
|
49
|
+
requireExisting: requireIdentity
|
|
50
|
+
} : void 0;
|
|
51
|
+
const packages = packagesString?.split(",");
|
|
52
|
+
return await startSwitchboard(remoteDrives.length > 0 ? {
|
|
53
|
+
...defaultOptions,
|
|
54
|
+
drive: void 0,
|
|
55
|
+
...serverOptions,
|
|
56
|
+
remoteDrives,
|
|
57
|
+
identity,
|
|
58
|
+
packages,
|
|
59
|
+
logger
|
|
60
|
+
} : {
|
|
61
|
+
...defaultOptions,
|
|
62
|
+
...serverOptions,
|
|
63
|
+
remoteDrives,
|
|
64
|
+
identity,
|
|
65
|
+
packages,
|
|
66
|
+
logger
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
//#endregion
|
|
70
|
+
export { startSwitchboard$1 as n, defaultSwitchboardOptions as t };
|
|
71
|
+
|
|
72
|
+
//# sourceMappingURL=switchboard-BclxuF4j.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-BclxuF4j.mjs","names":["startSwitchboard","startSwitchboardServer"],"sources":["../src/services/switchboard.ts"],"sourcesContent":["import type {\n IdentityOptions,\n StartServerOptions,\n} from \"@powerhousedao/switchboard/server\";\nimport { startSwitchboard as startSwitchboardServer } from \"@powerhousedao/switchboard/server\";\nimport type { ILogger } from \"document-model\";\nimport path from \"node:path\";\nimport type { SwitchboardArgs } from \"../types.js\";\n\nexport const defaultSwitchboardOptions = {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: \"powerhouse\",\n slug: \"powerhouse\",\n global: {\n name: \"Powerhouse\",\n icon: \"https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7\",\n },\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n mcp: true,\n} satisfies StartServerOptions;\n\nfunction getDefaultVetraSwitchboardOptions(\n vetraDriveId: string,\n): Partial<StartServerOptions> {\n return {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: vetraDriveId,\n slug: vetraDriveId,\n global: {\n name: \"Vetra\",\n icon: \"https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u\",\n },\n preferredEditor: \"vetra-drive-app\",\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n };\n}\n\nexport async function startSwitchboard(\n options: SwitchboardArgs,\n logger?: ILogger,\n) {\n const {\n packages: packagesString,\n remoteDrives,\n useVetraDrive,\n vetraDriveId,\n useIdentity,\n keypairPath,\n requireIdentity,\n ...serverOptions\n } = options;\n\n // Choose the appropriate default configuration\n const defaultOptions = useVetraDrive\n ? getDefaultVetraSwitchboardOptions(vetraDriveId)\n : defaultSwitchboardOptions;\n\n // Build identity options if enabled\n const identity: IdentityOptions | undefined =\n useIdentity || keypairPath || requireIdentity\n ? {\n keypairPath,\n requireExisting: requireIdentity,\n }\n : undefined;\n\n const packages = packagesString?.split(\",\");\n\n // Only include the default drive if no remote drives are provided\n const finalOptions =\n remoteDrives.length > 0\n ? {\n ...defaultOptions,\n drive: undefined, // Don't create default drive when syncing with remote\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n }\n : {\n ...defaultOptions,\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n };\n\n const reactor = await startSwitchboardServer(finalOptions);\n\n return reactor;\n}\n"],"mappings":";;;AASA,MAAa,4BAA4B;CACvC,MAAM;CACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;CACrD,OAAO;EACL,IAAI;EACJ,MAAM;EACN,QAAQ;GACN,MAAM;GACN,MAAM;GACP;EACD,OAAO;GACL,kBAAkB;GAClB,WAAW,EAAE;GACb,aAAa;GACb,UAAU,EAAE;GACb;EACF;CACD,KAAK;CACN;AAED,SAAS,kCACP,cAC6B;AAC7B,QAAO;EACL,MAAM;EACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;EACrD,OAAO;GACL,IAAI;GACJ,MAAM;GACN,QAAQ;IACN,MAAM;IACN,MAAM;IACP;GACD,iBAAiB;GACjB,OAAO;IACL,kBAAkB;IAClB,WAAW,EAAE;IACb,aAAa;IACb,UAAU,EAAE;IACb;GACF;EACF;;AAGH,eAAsBA,mBACpB,SACA,QACA;CACA,MAAM,EACJ,UAAU,gBACV,cACA,eACA,cACA,aACA,aACA,iBACA,GAAG,kBACD;CAGJ,MAAM,iBAAiB,gBACnB,kCAAkC,aAAa,GAC/C;CAGJ,MAAM,WACJ,eAAe,eAAe,kBAC1B;EACE;EACA,iBAAiB;EAClB,GACD,KAAA;CAEN,MAAM,WAAW,gBAAgB,MAAM,IAAI;AAyB3C,QAFgB,MAAMC,iBAnBpB,aAAa,SAAS,IAClB;EACE,GAAG;EACH,OAAO,KAAA;EACP,GAAG;EACH;EACA;EACA;EACA;EACD,GACD;EACE,GAAG;EACH,GAAG;EACH;EACA;EACA;EACA;EACD,CAEmD"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Kysely, PostgresDialect } from "kysely";
|
|
2
|
+
import { Pool } from "pg";
|
|
3
|
+
import { REACTOR_SCHEMA, getMigrationStatus, runMigrations } from "@powerhousedao/reactor";
|
|
4
|
+
//#region src/services/switchboard-migrate.ts
|
|
5
|
+
function isPostgresUrl(url) {
|
|
6
|
+
return url.startsWith("postgresql://") || url.startsWith("postgres://");
|
|
7
|
+
}
|
|
8
|
+
async function runSwitchboardMigrations(options) {
|
|
9
|
+
const dbPath = options.dbPath ?? process.env.PH_REACTOR_DATABASE_URL ?? process.env.DATABASE_URL;
|
|
10
|
+
if (!dbPath || !isPostgresUrl(dbPath)) {
|
|
11
|
+
console.log("No PostgreSQL URL configured. Skipping migrations.");
|
|
12
|
+
console.log("(PGlite migrations are handled automatically on startup)");
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
console.log(`Database: ${dbPath}`);
|
|
16
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: dbPath }) }) });
|
|
17
|
+
try {
|
|
18
|
+
if (options.statusOnly) {
|
|
19
|
+
console.log("\nChecking migration status...");
|
|
20
|
+
const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);
|
|
21
|
+
console.log("\nMigration Status:");
|
|
22
|
+
console.log("=================");
|
|
23
|
+
for (const migration of migrations) {
|
|
24
|
+
const status = migration.executedAt ? `[OK] Executed at ${migration.executedAt.toISOString()}` : "[--] Pending";
|
|
25
|
+
console.log(`${status} - ${migration.name}`);
|
|
26
|
+
}
|
|
27
|
+
} else {
|
|
28
|
+
console.log("\nRunning migrations...");
|
|
29
|
+
const result = await runMigrations(db, REACTOR_SCHEMA);
|
|
30
|
+
if (!result.success) {
|
|
31
|
+
console.error("Migration failed:", result.error?.message);
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
if (result.migrationsExecuted.length === 0) console.log("No migrations to run - database is up to date");
|
|
35
|
+
else {
|
|
36
|
+
console.log(`Successfully executed ${result.migrationsExecuted.length} migration(s):`);
|
|
37
|
+
for (const name of result.migrationsExecuted) console.log(` - ${name}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
} catch (error) {
|
|
41
|
+
console.error("Error:", error instanceof Error ? error.message : String(error));
|
|
42
|
+
process.exit(1);
|
|
43
|
+
} finally {
|
|
44
|
+
await db.destroy();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
//#endregion
|
|
48
|
+
export { runSwitchboardMigrations };
|
|
49
|
+
|
|
50
|
+
//# sourceMappingURL=switchboard-migrate-Cwx-8MnF.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-migrate-Cwx-8MnF.mjs","names":[],"sources":["../src/services/switchboard-migrate.ts"],"sourcesContent":["import { Kysely, PostgresDialect } from \"kysely\";\nimport { Pool } from \"pg\";\nimport {\n runMigrations,\n getMigrationStatus,\n REACTOR_SCHEMA,\n} from \"@powerhousedao/reactor\";\n\ninterface MigrationOptions {\n dbPath?: string;\n statusOnly?: boolean;\n}\n\nfunction isPostgresUrl(url: string): boolean {\n return url.startsWith(\"postgresql://\") || url.startsWith(\"postgres://\");\n}\n\nexport async function runSwitchboardMigrations(\n options: MigrationOptions,\n): Promise<void> {\n const dbPath =\n options.dbPath ??\n process.env.PH_REACTOR_DATABASE_URL ??\n process.env.DATABASE_URL;\n\n if (!dbPath || !isPostgresUrl(dbPath)) {\n console.log(\"No PostgreSQL URL configured. Skipping migrations.\");\n console.log(\"(PGlite migrations are handled automatically on startup)\");\n return;\n }\n\n console.log(`Database: ${dbPath}`);\n\n const pool = new Pool({ connectionString: dbPath });\n\n const db = new Kysely<any>({\n dialect: new PostgresDialect({ pool }),\n });\n\n try {\n if (options.statusOnly) {\n console.log(\"\\nChecking migration status...\");\n const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);\n\n console.log(\"\\nMigration Status:\");\n console.log(\"=================\");\n\n for (const migration of migrations) {\n const status = migration.executedAt\n ? `[OK] Executed at ${migration.executedAt.toISOString()}`\n : \"[--] Pending\";\n console.log(`${status} - ${migration.name}`);\n }\n } else {\n console.log(\"\\nRunning migrations...\");\n const result = await runMigrations(db, REACTOR_SCHEMA);\n\n if (!result.success) {\n console.error(\"Migration failed:\", result.error?.message);\n process.exit(1);\n }\n\n if (result.migrationsExecuted.length === 0) {\n console.log(\"No migrations to run - database is up to date\");\n } else {\n console.log(\n `Successfully executed ${result.migrationsExecuted.length} migration(s):`,\n );\n for (const name of result.migrationsExecuted) {\n console.log(` - ${name}`);\n }\n }\n }\n } catch (error) {\n console.error(\n \"Error:\",\n error instanceof Error ? error.message : String(error),\n );\n process.exit(1);\n } finally {\n await db.destroy();\n }\n}\n"],"mappings":";;;;AAaA,SAAS,cAAc,KAAsB;AAC3C,QAAO,IAAI,WAAW,gBAAgB,IAAI,IAAI,WAAW,cAAc;;AAGzE,eAAsB,yBACpB,SACe;CACf,MAAM,SACJ,QAAQ,UACR,QAAQ,IAAI,2BACZ,QAAQ,IAAI;AAEd,KAAI,CAAC,UAAU,CAAC,cAAc,OAAO,EAAE;AACrC,UAAQ,IAAI,qDAAqD;AACjE,UAAQ,IAAI,2DAA2D;AACvE;;AAGF,SAAQ,IAAI,aAAa,SAAS;CAIlC,MAAM,KAAK,IAAI,OAAY,EACzB,SAAS,IAAI,gBAAgB,EAAE,MAHpB,IAAI,KAAK,EAAE,kBAAkB,QAAQ,CAAC,EAGZ,CAAC,EACvC,CAAC;AAEF,KAAI;AACF,MAAI,QAAQ,YAAY;AACtB,WAAQ,IAAI,iCAAiC;GAC7C,MAAM,aAAa,MAAM,mBAAmB,IAAI,eAAe;AAE/D,WAAQ,IAAI,sBAAsB;AAClC,WAAQ,IAAI,oBAAoB;AAEhC,QAAK,MAAM,aAAa,YAAY;IAClC,MAAM,SAAS,UAAU,aACrB,oBAAoB,UAAU,WAAW,aAAa,KACtD;AACJ,YAAQ,IAAI,GAAG,OAAO,KAAK,UAAU,OAAO;;SAEzC;AACL,WAAQ,IAAI,0BAA0B;GACtC,MAAM,SAAS,MAAM,cAAc,IAAI,eAAe;AAEtD,OAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,qBAAqB,OAAO,OAAO,QAAQ;AACzD,YAAQ,KAAK,EAAE;;AAGjB,OAAI,OAAO,mBAAmB,WAAW,EACvC,SAAQ,IAAI,gDAAgD;QACvD;AACL,YAAQ,IACN,yBAAyB,OAAO,mBAAmB,OAAO,gBAC3D;AACD,SAAK,MAAM,QAAQ,OAAO,mBACxB,SAAQ,IAAI,OAAO,OAAO;;;UAIzB,OAAO;AACd,UAAQ,MACN,UACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;AACD,UAAQ,KAAK,EAAE;WACP;AACR,QAAM,GAAG,SAAS"}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import path, { dirname } from "node:path";
|
|
2
|
+
import crypto from "node:crypto";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import { homedir } from "node:os";
|
|
5
|
+
//#region src/utils.ts
|
|
6
|
+
const POWERHOUSE_CONFIG_FILE = "powerhouse.config.json";
|
|
7
|
+
const POWERHOUSE_GLOBAL_DIR = path.join(homedir(), ".ph");
|
|
8
|
+
const packageManagers = {
|
|
9
|
+
bun: {
|
|
10
|
+
globalPathRegexp: /[\\/].bun[\\/]/,
|
|
11
|
+
installCommand: "bun add {{dependency}}",
|
|
12
|
+
uninstallCommand: "bun remove {{dependency}}",
|
|
13
|
+
workspaceOption: "",
|
|
14
|
+
lockfile: "bun.lock",
|
|
15
|
+
updateCommand: "bun update {{dependency}}",
|
|
16
|
+
buildAffected: "bun run build:affected"
|
|
17
|
+
},
|
|
18
|
+
pnpm: {
|
|
19
|
+
globalPathRegexp: /[\\/]pnpm[\\/]/,
|
|
20
|
+
installCommand: "pnpm add {{dependency}}",
|
|
21
|
+
uninstallCommand: "pnpm remove {{dependency}}",
|
|
22
|
+
workspaceOption: "--workspace-root",
|
|
23
|
+
lockfile: "pnpm-lock.yaml",
|
|
24
|
+
updateCommand: "pnpm update {{dependency}}",
|
|
25
|
+
buildAffected: "pnpm run build:affected"
|
|
26
|
+
},
|
|
27
|
+
yarn: {
|
|
28
|
+
globalPathRegexp: /[\\/]yarn[\\/]/,
|
|
29
|
+
installCommand: "yarn add {{dependency}}",
|
|
30
|
+
uninstallCommand: "yarn remove {{dependency}}",
|
|
31
|
+
workspaceOption: "-W",
|
|
32
|
+
lockfile: "yarn.lock",
|
|
33
|
+
updateCommand: "yarn upgrade {{dependency}}",
|
|
34
|
+
buildAffected: "yarn run build:affected"
|
|
35
|
+
},
|
|
36
|
+
npm: {
|
|
37
|
+
installCommand: "npm install {{dependency}}",
|
|
38
|
+
uninstallCommand: "npm uninstall {{dependency}}",
|
|
39
|
+
workspaceOption: "",
|
|
40
|
+
lockfile: "package-lock.json",
|
|
41
|
+
updateCommand: "npm update {{dependency}} --save",
|
|
42
|
+
buildAffected: "npm run build:affected"
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
function defaultPathValidation() {
|
|
46
|
+
return true;
|
|
47
|
+
}
|
|
48
|
+
function isPowerhouseProject(dir) {
|
|
49
|
+
const powerhouseConfigPath = path.join(dir, POWERHOUSE_CONFIG_FILE);
|
|
50
|
+
return fs.existsSync(powerhouseConfigPath);
|
|
51
|
+
}
|
|
52
|
+
function findNodeProjectRoot(dir, pathValidation = defaultPathValidation) {
|
|
53
|
+
const packageJsonPath = path.join(dir, "package.json");
|
|
54
|
+
if (fs.existsSync(packageJsonPath) && pathValidation(dir)) return dir;
|
|
55
|
+
const parentDir = dirname(dir);
|
|
56
|
+
if (parentDir === dir) return null;
|
|
57
|
+
return findNodeProjectRoot(parentDir, pathValidation);
|
|
58
|
+
}
|
|
59
|
+
function getProjectInfo(debug) {
|
|
60
|
+
const currentPath = process.cwd();
|
|
61
|
+
if (debug) console.log(">>> currentPath", currentPath);
|
|
62
|
+
const projectPath = findNodeProjectRoot(currentPath, isPowerhouseProject);
|
|
63
|
+
if (!projectPath) return {
|
|
64
|
+
isGlobal: true,
|
|
65
|
+
path: POWERHOUSE_GLOBAL_DIR,
|
|
66
|
+
packageManager: getPackageManagerFromLockfile(POWERHOUSE_GLOBAL_DIR)
|
|
67
|
+
};
|
|
68
|
+
return {
|
|
69
|
+
isGlobal: false,
|
|
70
|
+
path: projectPath,
|
|
71
|
+
packageManager: getPackageManagerFromLockfile(projectPath)
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Generates a unique drive ID based on the project path.
|
|
76
|
+
* The same project path will always generate the same ID.
|
|
77
|
+
* @param name - The name prefix for the drive ID (e.g., "vetra", "powerhouse")
|
|
78
|
+
* @returns A unique drive ID in the format "{name}-{hash}"
|
|
79
|
+
*/
|
|
80
|
+
function generateProjectDriveId(name) {
|
|
81
|
+
const projectInfo = getProjectInfo();
|
|
82
|
+
return `${name}-${crypto.createHash("sha256").update(projectInfo.path).digest("hex").substring(0, 8)}`;
|
|
83
|
+
}
|
|
84
|
+
function getPackageManagerFromLockfile(dir) {
|
|
85
|
+
if (fs.existsSync(path.join(dir, packageManagers.pnpm.lockfile))) return "pnpm";
|
|
86
|
+
else if (fs.existsSync(path.join(dir, packageManagers.yarn.lockfile))) return "yarn";
|
|
87
|
+
else if (fs.existsSync(path.join(dir, packageManagers.bun.lockfile))) return "bun";
|
|
88
|
+
return "npm";
|
|
89
|
+
}
|
|
90
|
+
function updatePackagesArray(currentPackages = [], dependencies, task = "install") {
|
|
91
|
+
const isInstall = task === "install";
|
|
92
|
+
const mappedPackages = dependencies.map((dep) => ({
|
|
93
|
+
packageName: dep.name,
|
|
94
|
+
version: dep.version,
|
|
95
|
+
provider: "registry"
|
|
96
|
+
}));
|
|
97
|
+
if (isInstall) return [...currentPackages.filter((pkg) => !dependencies.find((dep) => dep.name === pkg.packageName)), ...mappedPackages];
|
|
98
|
+
return currentPackages.filter((pkg) => !dependencies.map((dep) => dep.name).includes(pkg.packageName));
|
|
99
|
+
}
|
|
100
|
+
function updateConfigFile(dependencies, projectPath, task = "install") {
|
|
101
|
+
const configPath = path.join(projectPath, POWERHOUSE_CONFIG_FILE);
|
|
102
|
+
if (!fs.existsSync(configPath)) throw new Error(`powerhouse.config.json file not found. projectPath: ${projectPath}`);
|
|
103
|
+
const config = JSON.parse(fs.readFileSync(configPath, "utf-8"));
|
|
104
|
+
const updatedConfig = {
|
|
105
|
+
...config,
|
|
106
|
+
packages: updatePackagesArray(config.packages, dependencies, task)
|
|
107
|
+
};
|
|
108
|
+
fs.writeFileSync(configPath, JSON.stringify(updatedConfig, null, 2));
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Updates the styles.css file to include imports for newly installed packages
|
|
112
|
+
* @param dependencies - Array of dependencies that were installed
|
|
113
|
+
* @param projectPath - Path to the project root
|
|
114
|
+
*/
|
|
115
|
+
function updateStylesFile(dependencies, projectPath) {
|
|
116
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
117
|
+
if (!fs.existsSync(stylesPath)) {
|
|
118
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
122
|
+
let updatedStyles = currentStyles;
|
|
123
|
+
for (const dep of dependencies) {
|
|
124
|
+
const cssPath = `./node_modules/${dep.name}/dist/style.css`;
|
|
125
|
+
const fullCssPath = path.join(projectPath, cssPath);
|
|
126
|
+
const importStatement = `@import '${cssPath}';`;
|
|
127
|
+
if (!fs.existsSync(fullCssPath)) {
|
|
128
|
+
console.warn(`⚠️ Warning: CSS file not found at ${cssPath}`);
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
if (currentStyles.includes(importStatement)) continue;
|
|
132
|
+
const importLines = currentStyles.split("\n").filter((line) => line.trim().startsWith("@import"));
|
|
133
|
+
const lastImport = importLines[importLines.length - 1];
|
|
134
|
+
if (lastImport) updatedStyles = currentStyles.replace(lastImport, `${lastImport}\n${importStatement}`);
|
|
135
|
+
else updatedStyles = `${importStatement}\n${currentStyles}`;
|
|
136
|
+
}
|
|
137
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Removes CSS imports for uninstalled packages from styles.css
|
|
141
|
+
*/
|
|
142
|
+
function removeStylesImports(dependencies, projectPath) {
|
|
143
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
144
|
+
if (!fs.existsSync(stylesPath)) {
|
|
145
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
149
|
+
let updatedStyles = currentStyles;
|
|
150
|
+
for (const dep of dependencies) {
|
|
151
|
+
const importStatement = `@import '${`./node_modules/${dep.name}/dist/style.css`}';`;
|
|
152
|
+
const lines = updatedStyles.split("\n");
|
|
153
|
+
const filteredLines = lines.filter((line) => !line.trim().includes(importStatement));
|
|
154
|
+
if (filteredLines.length !== lines.length) updatedStyles = filteredLines.join("\n");
|
|
155
|
+
}
|
|
156
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
157
|
+
}
|
|
158
|
+
//#endregion
|
|
159
|
+
export { updateStylesFile as a, updateConfigFile as i, getProjectInfo as n, removeStylesImports as r, generateProjectDriveId as t };
|
|
160
|
+
|
|
161
|
+
//# sourceMappingURL=utils-DbFSkp_Q.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils-DbFSkp_Q.mjs","names":[],"sources":["../src/utils.ts"],"sourcesContent":["import type { PowerhouseConfig } from \"@powerhousedao/config\";\nimport crypto from \"node:crypto\";\nimport fs from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport path, { dirname } from \"node:path\";\nexport const POWERHOUSE_CONFIG_FILE = \"powerhouse.config.json\";\nexport const POWERHOUSE_GLOBAL_DIR = path.join(homedir(), \".ph\");\nexport const SUPPORTED_PACKAGE_MANAGERS = [\"npm\", \"yarn\", \"pnpm\", \"bun\"];\n\nexport const packageManagers = {\n bun: {\n globalPathRegexp: /[\\\\/].bun[\\\\/]/,\n installCommand: \"bun add {{dependency}}\",\n uninstallCommand: \"bun remove {{dependency}}\",\n workspaceOption: \"\",\n lockfile: \"bun.lock\",\n updateCommand: \"bun update {{dependency}}\",\n buildAffected: \"bun run build:affected\",\n },\n pnpm: {\n globalPathRegexp: /[\\\\/]pnpm[\\\\/]/,\n installCommand: \"pnpm add {{dependency}}\",\n uninstallCommand: \"pnpm remove {{dependency}}\",\n workspaceOption: \"--workspace-root\",\n lockfile: \"pnpm-lock.yaml\",\n updateCommand: \"pnpm update {{dependency}}\",\n buildAffected: \"pnpm run build:affected\",\n },\n yarn: {\n globalPathRegexp: /[\\\\/]yarn[\\\\/]/,\n installCommand: \"yarn add {{dependency}}\",\n uninstallCommand: \"yarn remove {{dependency}}\",\n workspaceOption: \"-W\",\n lockfile: \"yarn.lock\",\n updateCommand: \"yarn upgrade {{dependency}}\",\n buildAffected: \"yarn run build:affected\",\n },\n npm: {\n installCommand: \"npm install {{dependency}}\",\n uninstallCommand: \"npm uninstall {{dependency}}\",\n workspaceOption: \"\",\n lockfile: \"package-lock.json\",\n updateCommand: \"npm update {{dependency}} --save\",\n buildAffected: \"npm run build:affected\",\n },\n};\n\ntype PathValidation = (dir: string) => boolean;\n\nexport type PackageManager = \"npm\" | \"yarn\" | \"pnpm\" | \"bun\";\n\nexport type ProjectInfo = {\n isGlobal: boolean;\n path: string;\n packageManager: PackageManager;\n};\n\nexport function defaultPathValidation() {\n return true;\n}\n\nexport function isPowerhouseProject(dir: string) {\n const powerhouseConfigPath = path.join(dir, POWERHOUSE_CONFIG_FILE);\n\n return fs.existsSync(powerhouseConfigPath);\n}\n\nexport function findNodeProjectRoot(\n dir: string,\n pathValidation: PathValidation = defaultPathValidation,\n) {\n const packageJsonPath = path.join(dir, \"package.json\");\n\n if (fs.existsSync(packageJsonPath) && pathValidation(dir)) {\n return dir;\n }\n\n const parentDir = dirname(dir);\n\n if (parentDir === dir) {\n return null;\n }\n\n return findNodeProjectRoot(parentDir, pathValidation);\n}\n\nexport function getProjectInfo(debug?: boolean): ProjectInfo {\n const currentPath = process.cwd();\n\n if (debug) {\n console.log(\">>> currentPath\", currentPath);\n }\n\n const projectPath = findNodeProjectRoot(currentPath, isPowerhouseProject);\n\n if (!projectPath) {\n return {\n isGlobal: true,\n path: POWERHOUSE_GLOBAL_DIR,\n packageManager: getPackageManagerFromLockfile(POWERHOUSE_GLOBAL_DIR),\n };\n }\n\n return {\n isGlobal: false,\n path: projectPath,\n packageManager: getPackageManagerFromLockfile(projectPath),\n };\n}\n\n/**\n * Generates a unique drive ID based on the project path.\n * The same project path will always generate the same ID.\n * @param name - The name prefix for the drive ID (e.g., \"vetra\", \"powerhouse\")\n * @returns A unique drive ID in the format \"{name}-{hash}\"\n */\nexport function generateProjectDriveId(name: string): string {\n const projectInfo = getProjectInfo();\n const hash = crypto\n .createHash(\"sha256\")\n .update(projectInfo.path)\n .digest(\"hex\");\n const shortHash = hash.substring(0, 8);\n return `${name}-${shortHash}`;\n}\n\nexport function getPackageManagerFromLockfile(dir: string): PackageManager {\n if (fs.existsSync(path.join(dir, packageManagers.pnpm.lockfile))) {\n return \"pnpm\";\n } else if (fs.existsSync(path.join(dir, packageManagers.yarn.lockfile))) {\n return \"yarn\";\n } else if (fs.existsSync(path.join(dir, packageManagers.bun.lockfile))) {\n return \"bun\";\n }\n\n return \"npm\";\n}\n\nexport function updatePackagesArray(\n currentPackages: PowerhouseConfig[\"packages\"] = [],\n dependencies: { name: string; version: string | undefined }[],\n task: \"install\" | \"uninstall\" = \"install\",\n): PowerhouseConfig[\"packages\"] {\n const isInstall = task === \"install\";\n const mappedPackages = dependencies.map((dep) => ({\n packageName: dep.name,\n version: dep.version,\n provider: \"registry\" as const,\n }));\n\n if (isInstall) {\n // Overwrite existing package if version is different\n const filteredPackages = currentPackages.filter(\n (pkg) => !dependencies.find((dep) => dep.name === pkg.packageName),\n );\n return [...filteredPackages, ...mappedPackages];\n }\n\n return currentPackages.filter(\n (pkg) => !dependencies.map((dep) => dep.name).includes(pkg.packageName),\n );\n}\n\n// Modify updateConfigFile to use the new function\nexport function updateConfigFile(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n task: \"install\" | \"uninstall\" = \"install\",\n) {\n const configPath = path.join(projectPath, POWERHOUSE_CONFIG_FILE);\n\n if (!fs.existsSync(configPath)) {\n throw new Error(\n `powerhouse.config.json file not found. projectPath: ${projectPath}`,\n );\n }\n\n const config = JSON.parse(\n fs.readFileSync(configPath, \"utf-8\"),\n ) as PowerhouseConfig;\n\n const updatedConfig: PowerhouseConfig = {\n ...config,\n packages: updatePackagesArray(config.packages, dependencies, task),\n };\n\n fs.writeFileSync(configPath, JSON.stringify(updatedConfig, null, 2));\n}\n\n/**\n * Recursively searches for a specific file by traversing up the directory tree.\n * Starting from the given path, it checks each parent directory until it finds\n * the target file or reaches the root directory.\n *\n * @param startPath - The absolute path of the directory to start searching from\n * @param targetFile - The name of the file to search for (e.g., 'package.json', 'pnpm-workspace.yaml')\n * @returns The absolute path of the directory containing the target file, or null if not found\n *\n * @example\n * // Find the workspace root directory\n * const workspaceRoot = findContainerDirectory('/path/to/project/src', 'pnpm-workspace.yaml');\n *\n * // Find the nearest package.json\n * const packageDir = findContainerDirectory('/path/to/project/src/components', 'package.json');\n */\nexport const findContainerDirectory = (\n startPath: string,\n targetFile: string,\n): string | null => {\n const filePath = path.join(startPath, targetFile);\n\n if (fs.existsSync(filePath)) {\n return startPath;\n }\n\n const parentDir = path.dirname(startPath);\n\n //reached the root directory and haven't found the file\n if (parentDir === startPath) {\n return null;\n }\n\n return findContainerDirectory(parentDir, targetFile);\n};\n\n/**\n * Updates the styles.css file to include imports for newly installed packages\n * @param dependencies - Array of dependencies that were installed\n * @param projectPath - Path to the project root\n */\nexport function updateStylesFile(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n) {\n const stylesPath = path.join(projectPath, \"style.css\");\n\n // Check if styles.css exists\n if (!fs.existsSync(stylesPath)) {\n console.warn(\"⚠️ Warning: style.css file not found in project root\");\n return;\n }\n\n const currentStyles = fs.readFileSync(stylesPath, \"utf-8\");\n let updatedStyles = currentStyles;\n\n for (const dep of dependencies) {\n const cssPath = `./node_modules/${dep.name}/dist/style.css`;\n const fullCssPath = path.join(projectPath, cssPath);\n const importStatement = `@import '${cssPath}';`;\n\n // Check if the CSS file exists\n if (!fs.existsSync(fullCssPath)) {\n console.warn(`⚠️ Warning: CSS file not found at ${cssPath}`);\n continue;\n }\n\n // Check if import already exists\n if (currentStyles.includes(importStatement)) {\n continue;\n }\n\n // Find the last @import statement\n const importLines = currentStyles\n .split(\"\\n\")\n .filter((line) => line.trim().startsWith(\"@import\"));\n const lastImport = importLines[importLines.length - 1];\n\n if (lastImport) {\n // Insert new import after the last existing import\n updatedStyles = currentStyles.replace(\n lastImport,\n `${lastImport}\\n${importStatement}`,\n );\n } else {\n // If no imports exist, add at the top of the file\n updatedStyles = `${importStatement}\\n${currentStyles}`;\n }\n }\n\n // Only write if changes were made\n if (updatedStyles !== currentStyles) {\n fs.writeFileSync(stylesPath, updatedStyles);\n }\n}\n\n/**\n * Removes CSS imports for uninstalled packages from styles.css\n */\nexport function removeStylesImports(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n) {\n const stylesPath = path.join(projectPath, \"style.css\");\n\n // Check if styles.css exists\n if (!fs.existsSync(stylesPath)) {\n console.warn(\"⚠️ Warning: style.css file not found in project root\");\n return;\n }\n\n const currentStyles = fs.readFileSync(stylesPath, \"utf-8\");\n let updatedStyles = currentStyles;\n\n for (const dep of dependencies) {\n const cssPath = `./node_modules/${dep.name}/dist/style.css`;\n const importStatement = `@import '${cssPath}';`;\n\n // Remove the import line if it exists\n const lines = updatedStyles.split(\"\\n\");\n const filteredLines = lines.filter(\n (line) => !line.trim().includes(importStatement),\n );\n\n if (filteredLines.length !== lines.length) {\n updatedStyles = filteredLines.join(\"\\n\");\n }\n }\n\n // Only write if changes were made\n if (updatedStyles !== currentStyles) {\n fs.writeFileSync(stylesPath, updatedStyles);\n }\n}\n"],"mappings":";;;;;AAKA,MAAa,yBAAyB;AACtC,MAAa,wBAAwB,KAAK,KAAK,SAAS,EAAE,MAAM;AAGhE,MAAa,kBAAkB;CAC7B,KAAK;EACH,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,MAAM;EACJ,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,MAAM;EACJ,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,KAAK;EACH,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACF;AAYD,SAAgB,wBAAwB;AACtC,QAAO;;AAGT,SAAgB,oBAAoB,KAAa;CAC/C,MAAM,uBAAuB,KAAK,KAAK,KAAK,uBAAuB;AAEnE,QAAO,GAAG,WAAW,qBAAqB;;AAG5C,SAAgB,oBACd,KACA,iBAAiC,uBACjC;CACA,MAAM,kBAAkB,KAAK,KAAK,KAAK,eAAe;AAEtD,KAAI,GAAG,WAAW,gBAAgB,IAAI,eAAe,IAAI,CACvD,QAAO;CAGT,MAAM,YAAY,QAAQ,IAAI;AAE9B,KAAI,cAAc,IAChB,QAAO;AAGT,QAAO,oBAAoB,WAAW,eAAe;;AAGvD,SAAgB,eAAe,OAA8B;CAC3D,MAAM,cAAc,QAAQ,KAAK;AAEjC,KAAI,MACF,SAAQ,IAAI,mBAAmB,YAAY;CAG7C,MAAM,cAAc,oBAAoB,aAAa,oBAAoB;AAEzE,KAAI,CAAC,YACH,QAAO;EACL,UAAU;EACV,MAAM;EACN,gBAAgB,8BAA8B,sBAAsB;EACrE;AAGH,QAAO;EACL,UAAU;EACV,MAAM;EACN,gBAAgB,8BAA8B,YAAY;EAC3D;;;;;;;;AASH,SAAgB,uBAAuB,MAAsB;CAC3D,MAAM,cAAc,gBAAgB;AAMpC,QAAO,GAAG,KAAK,GALF,OACV,WAAW,SAAS,CACpB,OAAO,YAAY,KAAK,CACxB,OAAO,MAAM,CACO,UAAU,GAAG,EAAE;;AAIxC,SAAgB,8BAA8B,KAA6B;AACzE,KAAI,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,KAAK,SAAS,CAAC,CAC9D,QAAO;UACE,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,KAAK,SAAS,CAAC,CACrE,QAAO;UACE,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,IAAI,SAAS,CAAC,CACpE,QAAO;AAGT,QAAO;;AAGT,SAAgB,oBACd,kBAAgD,EAAE,EAClD,cACA,OAAgC,WACF;CAC9B,MAAM,YAAY,SAAS;CAC3B,MAAM,iBAAiB,aAAa,KAAK,SAAS;EAChD,aAAa,IAAI;EACjB,SAAS,IAAI;EACb,UAAU;EACX,EAAE;AAEH,KAAI,UAKF,QAAO,CAAC,GAHiB,gBAAgB,QACtC,QAAQ,CAAC,aAAa,MAAM,QAAQ,IAAI,SAAS,IAAI,YAAY,CACnE,EAC4B,GAAG,eAAe;AAGjD,QAAO,gBAAgB,QACpB,QAAQ,CAAC,aAAa,KAAK,QAAQ,IAAI,KAAK,CAAC,SAAS,IAAI,YAAY,CACxE;;AAIH,SAAgB,iBACd,cACA,aACA,OAAgC,WAChC;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,uBAAuB;AAEjE,KAAI,CAAC,GAAG,WAAW,WAAW,CAC5B,OAAM,IAAI,MACR,uDAAuD,cACxD;CAGH,MAAM,SAAS,KAAK,MAClB,GAAG,aAAa,YAAY,QAAQ,CACrC;CAED,MAAM,gBAAkC;EACtC,GAAG;EACH,UAAU,oBAAoB,OAAO,UAAU,cAAc,KAAK;EACnE;AAED,IAAG,cAAc,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;;;;;;AA4CtE,SAAgB,iBACd,cACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,YAAY;AAGtD,KAAI,CAAC,GAAG,WAAW,WAAW,EAAE;AAC9B,UAAQ,KAAK,uDAAuD;AACpE;;CAGF,MAAM,gBAAgB,GAAG,aAAa,YAAY,QAAQ;CAC1D,IAAI,gBAAgB;AAEpB,MAAK,MAAM,OAAO,cAAc;EAC9B,MAAM,UAAU,kBAAkB,IAAI,KAAK;EAC3C,MAAM,cAAc,KAAK,KAAK,aAAa,QAAQ;EACnD,MAAM,kBAAkB,YAAY,QAAQ;AAG5C,MAAI,CAAC,GAAG,WAAW,YAAY,EAAE;AAC/B,WAAQ,KAAK,qCAAqC,UAAU;AAC5D;;AAIF,MAAI,cAAc,SAAS,gBAAgB,CACzC;EAIF,MAAM,cAAc,cACjB,MAAM,KAAK,CACX,QAAQ,SAAS,KAAK,MAAM,CAAC,WAAW,UAAU,CAAC;EACtD,MAAM,aAAa,YAAY,YAAY,SAAS;AAEpD,MAAI,WAEF,iBAAgB,cAAc,QAC5B,YACA,GAAG,WAAW,IAAI,kBACnB;MAGD,iBAAgB,GAAG,gBAAgB,IAAI;;AAK3C,KAAI,kBAAkB,cACpB,IAAG,cAAc,YAAY,cAAc;;;;;AAO/C,SAAgB,oBACd,cACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,YAAY;AAGtD,KAAI,CAAC,GAAG,WAAW,WAAW,EAAE;AAC9B,UAAQ,KAAK,uDAAuD;AACpE;;CAGF,MAAM,gBAAgB,GAAG,aAAa,YAAY,QAAQ;CAC1D,IAAI,gBAAgB;AAEpB,MAAK,MAAM,OAAO,cAAc;EAE9B,MAAM,kBAAkB,YADR,kBAAkB,IAAI,KAAK,iBACC;EAG5C,MAAM,QAAQ,cAAc,MAAM,KAAK;EACvC,MAAM,gBAAgB,MAAM,QACzB,SAAS,CAAC,KAAK,MAAM,CAAC,SAAS,gBAAgB,CACjD;AAED,MAAI,cAAc,WAAW,MAAM,OACjC,iBAAgB,cAAc,KAAK,KAAK;;AAK5C,KAAI,kBAAkB,cACpB,IAAG,cAAc,YAAY,cAAc"}
|