@powerhousedao/ph-cli 6.0.2-staging.1 → 6.0.2-staging.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assign-env-vars-B-oPplBd.mjs +15 -0
- package/dist/assign-env-vars-B-oPplBd.mjs.map +1 -0
- package/dist/auth-mwX4ugZt.mjs +15 -0
- package/dist/auth-mwX4ugZt.mjs.map +1 -0
- package/dist/build-Fq1pNRsl.mjs +33 -0
- package/dist/build-Fq1pNRsl.mjs.map +1 -0
- package/dist/cli-CYgcGGKv.mjs +140 -0
- package/dist/cli-CYgcGGKv.mjs.map +1 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.mjs +860 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/connect-build-DUiJQGfx.mjs +35 -0
- package/dist/connect-build-DUiJQGfx.mjs.map +1 -0
- package/dist/connect-preview-hSphy1QP.mjs +27 -0
- package/dist/connect-preview-hSphy1QP.mjs.map +1 -0
- package/dist/connect-studio-CFH0kGW1.mjs +28 -0
- package/dist/connect-studio-CFH0kGW1.mjs.map +1 -0
- package/dist/connect-studio-CV-T5IfA.mjs +3 -0
- package/dist/generate-all-yTycRkd3.mjs +10 -0
- package/dist/generate-all-yTycRkd3.mjs.map +1 -0
- package/dist/generate-app-hSK1SlOn.mjs +31 -0
- package/dist/generate-app-hSK1SlOn.mjs.map +1 -0
- package/dist/generate-document-model-ZowVACXL.mjs +17 -0
- package/dist/generate-document-model-ZowVACXL.mjs.map +1 -0
- package/dist/generate-editor-C53z-03Q.mjs +31 -0
- package/dist/generate-editor-C53z-03Q.mjs.map +1 -0
- package/dist/generate-processor-BS4F98d6.mjs +22 -0
- package/dist/generate-processor-BS4F98d6.mjs.map +1 -0
- package/dist/generate-subgraph-D1cYT9GQ.mjs +20 -0
- package/dist/generate-subgraph-D1cYT9GQ.mjs.map +1 -0
- package/dist/init-BgEVXxOR.mjs +124 -0
- package/dist/init-BgEVXxOR.mjs.map +1 -0
- package/dist/inspect-DRvimHBb.mjs +45 -0
- package/dist/inspect-DRvimHBb.mjs.map +1 -0
- package/dist/logout-BpJEO2Ij.mjs +230 -0
- package/dist/logout-BpJEO2Ij.mjs.map +1 -0
- package/dist/migrate-CSaC4bDh.mjs +11 -0
- package/dist/migrate-CSaC4bDh.mjs.map +1 -0
- package/dist/scripts/generate-commands-docs.ts +14 -0
- package/dist/switchboard-Bl2llp5U.mjs +72 -0
- package/dist/switchboard-Bl2llp5U.mjs.map +1 -0
- package/dist/switchboard-DUlj8v2F.mjs +2 -0
- package/dist/switchboard-migrate-BiQJfIrc.mjs +50 -0
- package/dist/switchboard-migrate-BiQJfIrc.mjs.map +1 -0
- package/dist/utils-C6_gv4nB.mjs +162 -0
- package/dist/utils-C6_gv4nB.mjs.map +1 -0
- package/dist/vetra-D2zbrNY1.mjs +420 -0
- package/dist/vetra-D2zbrNY1.mjs.map +1 -0
- package/package.json +35 -26
- package/dist/bundle.d.ts +0 -2
- package/dist/bundle.d.ts.map +0 -1
- package/dist/scripts/generate-commands-docs.d.ts +0 -2
- package/dist/scripts/generate-commands-docs.d.ts.map +0 -1
- package/dist/src/cli.d.ts +0 -3
- package/dist/src/cli.d.ts.map +0 -1
- package/dist/src/cli.js +0 -395510
- package/dist/src/commands/access-token.d.ts +0 -14
- package/dist/src/commands/access-token.d.ts.map +0 -1
- package/dist/src/commands/connect.d.ts +0 -249
- package/dist/src/commands/connect.d.ts.map +0 -1
- package/dist/src/commands/generate.d.ts +0 -62
- package/dist/src/commands/generate.d.ts.map +0 -1
- package/dist/src/commands/index.d.ts +0 -13
- package/dist/src/commands/index.d.ts.map +0 -1
- package/dist/src/commands/inspect.d.ts +0 -12
- package/dist/src/commands/inspect.d.ts.map +0 -1
- package/dist/src/commands/install.d.ts +0 -22
- package/dist/src/commands/install.d.ts.map +0 -1
- package/dist/src/commands/list.d.ts +0 -10
- package/dist/src/commands/list.d.ts.map +0 -1
- package/dist/src/commands/login.d.ts +0 -24
- package/dist/src/commands/login.d.ts.map +0 -1
- package/dist/src/commands/logout.d.ts +0 -6
- package/dist/src/commands/logout.d.ts.map +0 -1
- package/dist/src/commands/migrate.d.ts +0 -12
- package/dist/src/commands/migrate.d.ts.map +0 -1
- package/dist/src/commands/ph-cli-commands.d.ts +0 -419
- package/dist/src/commands/ph-cli-commands.d.ts.map +0 -1
- package/dist/src/commands/ph-cli-help.d.ts +0 -400
- package/dist/src/commands/ph-cli-help.d.ts.map +0 -1
- package/dist/src/commands/ph-cli.d.ts +0 -506
- package/dist/src/commands/ph-cli.d.ts.map +0 -1
- package/dist/src/commands/service.d.ts +0 -12
- package/dist/src/commands/service.d.ts.map +0 -1
- package/dist/src/commands/switchboard.d.ts +0 -46
- package/dist/src/commands/switchboard.d.ts.map +0 -1
- package/dist/src/commands/uninstall.d.ts +0 -22
- package/dist/src/commands/uninstall.d.ts.map +0 -1
- package/dist/src/commands/vetra.d.ts +0 -64
- package/dist/src/commands/vetra.d.ts.map +0 -1
- package/dist/src/get-version.d.ts +0 -2
- package/dist/src/get-version.d.ts.map +0 -1
- package/dist/src/help.d.ts +0 -65
- package/dist/src/help.d.ts.map +0 -1
- package/dist/src/services/auth.d.ts +0 -21
- package/dist/src/services/auth.d.ts.map +0 -1
- package/dist/src/services/connect-build.d.ts +0 -3
- package/dist/src/services/connect-build.d.ts.map +0 -1
- package/dist/src/services/connect-preview.d.ts +0 -3
- package/dist/src/services/connect-preview.d.ts.map +0 -1
- package/dist/src/services/connect-studio.d.ts +0 -4
- package/dist/src/services/connect-studio.d.ts.map +0 -1
- package/dist/src/services/generate.d.ts +0 -3
- package/dist/src/services/generate.d.ts.map +0 -1
- package/dist/src/services/inspect.d.ts +0 -3
- package/dist/src/services/inspect.d.ts.map +0 -1
- package/dist/src/services/migrate.d.ts +0 -3
- package/dist/src/services/migrate.d.ts.map +0 -1
- package/dist/src/services/switchboard-migrate.d.ts +0 -7
- package/dist/src/services/switchboard-migrate.d.ts.map +0 -1
- package/dist/src/services/switchboard.d.ts +0 -23
- package/dist/src/services/switchboard.d.ts.map +0 -1
- package/dist/src/services/vetra.d.ts +0 -3
- package/dist/src/services/vetra.d.ts.map +0 -1
- package/dist/src/types.d.ts +0 -28
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/utils/assign-env-vars.d.ts +0 -3
- package/dist/src/utils/assign-env-vars.d.ts.map +0 -1
- package/dist/src/utils/configure-vetra-github-url.d.ts +0 -12
- package/dist/src/utils/configure-vetra-github-url.d.ts.map +0 -1
- package/dist/src/utils/constants.d.ts +0 -2
- package/dist/src/utils/constants.d.ts.map +0 -1
- package/dist/src/utils/resolve-connect-dirs.d.ts +0 -5
- package/dist/src/utils/resolve-connect-dirs.d.ts.map +0 -1
- package/dist/src/utils.d.ts +0 -112
- package/dist/src/utils.d.ts.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/vitest.config.d.ts +0 -3
- package/dist/vitest.config.d.ts.map +0 -1
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import { DEFAULT_EXPIRY_SECONDS, accessTokenArgs, getPowerhouseProjectInfo, listArgs, loginArgs } from "@powerhousedao/shared/clis";
|
|
2
|
+
import { command } from "cmd-ts";
|
|
3
|
+
import { getConfig as getConfig$1 } from "@powerhousedao/config/node";
|
|
4
|
+
//#region src/get-version.ts
|
|
5
|
+
function getVersion() {
|
|
6
|
+
return process.env.WORKSPACE_VERSION || process.env.npm_package_version || "unknown";
|
|
7
|
+
}
|
|
8
|
+
//#endregion
|
|
9
|
+
//#region src/commands/access-token.ts
|
|
10
|
+
const accessToken = command({
|
|
11
|
+
name: "access-token",
|
|
12
|
+
description: `
|
|
13
|
+
The access-token command generates a bearer token for API authentication. This token
|
|
14
|
+
can be used to authenticate requests to Powerhouse APIs like reactor-api (Switchboard).
|
|
15
|
+
|
|
16
|
+
This command:
|
|
17
|
+
1. Uses your CLI's cryptographic identity (DID) to sign a verifiable credential
|
|
18
|
+
2. Creates a JWT bearer token with configurable expiration
|
|
19
|
+
3. Outputs the token to stdout (info to stderr) for easy piping
|
|
20
|
+
|
|
21
|
+
Prerequisites:
|
|
22
|
+
You must have a cryptographic identity. Run 'ph login' first to:
|
|
23
|
+
- Generate a keypair (stored in .ph/.keypair.json)
|
|
24
|
+
- Optionally link your Ethereum address (stored in .ph/.renown.json)
|
|
25
|
+
|
|
26
|
+
Token Details:
|
|
27
|
+
The generated token is a JWT (JSON Web Token) containing:
|
|
28
|
+
- Issuer (iss): Your CLI's DID (did:key:...)
|
|
29
|
+
- Subject (sub): Your CLI's DID
|
|
30
|
+
- Credential Subject: Chain ID, network ID, and address (if authenticated)
|
|
31
|
+
- Expiration (exp): Based on --expiry option
|
|
32
|
+
- Audience (aud): If --audience is specified
|
|
33
|
+
|
|
34
|
+
Output:
|
|
35
|
+
- Token information (DID, address, expiry) is printed to stderr
|
|
36
|
+
- The token itself is printed to stdout for easy piping/copying
|
|
37
|
+
|
|
38
|
+
This allows you to use the command in scripts:
|
|
39
|
+
TOKEN=$(ph access-token)
|
|
40
|
+
curl -H "Authorization: Bearer $TOKEN" http://localhost:4001/graphql
|
|
41
|
+
|
|
42
|
+
Usage with APIs:
|
|
43
|
+
Generate token and use with curl
|
|
44
|
+
TOKEN=$(ph access-token --expiry 1d)
|
|
45
|
+
curl -X POST http://localhost:4001/graphql \\
|
|
46
|
+
-H "Content-Type: application/json" \\
|
|
47
|
+
-H "Authorization: Bearer $TOKEN" \\
|
|
48
|
+
-d '{"query": "{ drives { id name } }"}'
|
|
49
|
+
|
|
50
|
+
Export as environment variable
|
|
51
|
+
export PH_ACCESS_TOKEN=$(ph access-token)
|
|
52
|
+
|
|
53
|
+
Notes:
|
|
54
|
+
- Tokens are self-signed using your CLI's private key
|
|
55
|
+
- No network request is made; tokens are generated locally
|
|
56
|
+
- The recipient API must trust your CLI's DID to accept the token
|
|
57
|
+
- For reactor-api, ensure AUTH_ENABLED=true to require authentication
|
|
58
|
+
`,
|
|
59
|
+
args: accessTokenArgs,
|
|
60
|
+
handler: async (args) => {
|
|
61
|
+
if (args.debug) console.log(args);
|
|
62
|
+
const { generateAccessToken, parseExpiry, formatExpiry } = await import("@renown/sdk/node");
|
|
63
|
+
const { getRenown } = await import("./auth-mwX4ugZt.mjs");
|
|
64
|
+
const renown = await getRenown();
|
|
65
|
+
let expiresIn = DEFAULT_EXPIRY_SECONDS;
|
|
66
|
+
if (args.expiry) expiresIn = parseExpiry(args.expiry);
|
|
67
|
+
const result = await generateAccessToken(renown, {
|
|
68
|
+
expiresIn,
|
|
69
|
+
aud: args.audience
|
|
70
|
+
});
|
|
71
|
+
console.error(`CLI DID: ${result.did}`);
|
|
72
|
+
console.error(`ETH Address: ${result.address}`);
|
|
73
|
+
console.error(`Token expires in: ${formatExpiry(expiresIn)}`);
|
|
74
|
+
console.error("");
|
|
75
|
+
console.log(result.token);
|
|
76
|
+
process.exit(0);
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
//#endregion
|
|
80
|
+
//#region src/commands/list.ts
|
|
81
|
+
const list = command({
|
|
82
|
+
name: "list",
|
|
83
|
+
description: `
|
|
84
|
+
The list command displays information about installed Powerhouse packages in your project.
|
|
85
|
+
It reads the powerhouse.config.json file and shows the packages that are currently installed.
|
|
86
|
+
|
|
87
|
+
This command:
|
|
88
|
+
1. Examines your project configuration
|
|
89
|
+
2. Lists all installed Powerhouse packages
|
|
90
|
+
3. Provides a clear overview of your project's dependencies
|
|
91
|
+
4. Helps you manage and track your Powerhouse components
|
|
92
|
+
`,
|
|
93
|
+
aliases: ["l"],
|
|
94
|
+
args: listArgs,
|
|
95
|
+
handler: async (args) => {
|
|
96
|
+
if (args.debug) console.log(args);
|
|
97
|
+
try {
|
|
98
|
+
const projectInfo = await getPowerhouseProjectInfo();
|
|
99
|
+
console.log("\n>>> projectInfo", projectInfo);
|
|
100
|
+
const phConfig = getConfig$1(projectInfo.projectPath + "/powerhouse.config.json");
|
|
101
|
+
if (!phConfig.packages || phConfig.packages.length === 0) {
|
|
102
|
+
console.log("No packages found in the project");
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
console.log("Installed Packages:\n");
|
|
106
|
+
phConfig.packages.forEach((pkg) => {
|
|
107
|
+
console.log(pkg.packageName);
|
|
108
|
+
});
|
|
109
|
+
} catch (e) {
|
|
110
|
+
console.log("No packages found in the project");
|
|
111
|
+
}
|
|
112
|
+
process.exit(0);
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
//#endregion
|
|
116
|
+
//#region src/commands/login.ts
|
|
117
|
+
const login = command({
|
|
118
|
+
name: "login",
|
|
119
|
+
description: `
|
|
120
|
+
The login command authenticates you with Renown using your Ethereum wallet. This enables
|
|
121
|
+
the CLI to act on behalf of your Ethereum identity for authenticated operations.
|
|
122
|
+
|
|
123
|
+
This command:
|
|
124
|
+
1. Generates or loads a cryptographic identity (DID) for the CLI
|
|
125
|
+
2. Opens your browser to the Renown authentication page
|
|
126
|
+
3. You authorize the CLI's DID to act on behalf of your Ethereum address
|
|
127
|
+
4. Stores the credentials locally in .ph/.renown.json
|
|
128
|
+
`,
|
|
129
|
+
args: loginArgs,
|
|
130
|
+
handler: async (args) => {
|
|
131
|
+
if (args.debug) console.log(args);
|
|
132
|
+
const { getRenown } = await import("./auth-mwX4ugZt.mjs");
|
|
133
|
+
const renown = await getRenown(args.renownUrl);
|
|
134
|
+
if (args.showDid) {
|
|
135
|
+
console.log(renown.did);
|
|
136
|
+
process.exit(0);
|
|
137
|
+
}
|
|
138
|
+
if (args.status) {
|
|
139
|
+
const { getAuthStatus } = await import("@renown/sdk/node");
|
|
140
|
+
const status = getAuthStatus(renown);
|
|
141
|
+
if (!status.authenticated || !status.address) {
|
|
142
|
+
console.log("Not authenticated with an Ethereum address.");
|
|
143
|
+
console.log("Run \"ph login\" to authenticate.");
|
|
144
|
+
} else {
|
|
145
|
+
console.log("Authenticated");
|
|
146
|
+
console.log(` ETH Address: ${status.address}`);
|
|
147
|
+
console.log(` User DID: ${status.userDid}`);
|
|
148
|
+
console.log(` Chain ID: ${status.chainId}`);
|
|
149
|
+
console.log(` CLI DID: ${status.cliDid}`);
|
|
150
|
+
console.log(` Authenticated at: ${status.authenticatedAt?.toLocaleString()}`);
|
|
151
|
+
console.log(` Renown URL: ${status.baseUrl}`);
|
|
152
|
+
}
|
|
153
|
+
process.exit(0);
|
|
154
|
+
}
|
|
155
|
+
if (args.logout) {
|
|
156
|
+
await handleLogout();
|
|
157
|
+
process.exit(0);
|
|
158
|
+
}
|
|
159
|
+
const { browserLogin } = await import("@renown/sdk/node");
|
|
160
|
+
console.debug("Initializing cryptographic identity...");
|
|
161
|
+
console.log(`CLI DID: ${renown.did}`);
|
|
162
|
+
try {
|
|
163
|
+
const timeoutMs = args.timeout ? args.timeout * 1e3 : void 0;
|
|
164
|
+
const result = await browserLogin(renown, {
|
|
165
|
+
renownUrl: args.renownUrl,
|
|
166
|
+
timeoutMs,
|
|
167
|
+
onLoginUrl: (url, sessionId) => {
|
|
168
|
+
console.log("Opening browser for authentication...");
|
|
169
|
+
console.log(`Session ID: ${sessionId.slice(0, 8)}...`);
|
|
170
|
+
console.log(`Login URL: ${url}`);
|
|
171
|
+
console.log();
|
|
172
|
+
console.log("Waiting for authentication in browser");
|
|
173
|
+
console.log(`(timeout in ${(timeoutMs ?? 3e5) / 1e3} seconds)`);
|
|
174
|
+
console.log();
|
|
175
|
+
console.log("Please connect your wallet and authorize this CLI to act on your behalf.");
|
|
176
|
+
console.log();
|
|
177
|
+
process.stdout.write("Waiting");
|
|
178
|
+
},
|
|
179
|
+
onPollTick: () => process.stdout.write("."),
|
|
180
|
+
onBrowserOpenFailed: (url) => {
|
|
181
|
+
console.error("Failed to open browser automatically.");
|
|
182
|
+
console.log(`Please open this URL manually: ${url}`);
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
console.log();
|
|
186
|
+
console.log();
|
|
187
|
+
console.log("Successfully authenticated!");
|
|
188
|
+
console.log(` ETH Address: ${result.user.address}`);
|
|
189
|
+
console.log(` User DID: ${result.user.did}`);
|
|
190
|
+
console.log(` CLI DID: ${result.cliDid}`);
|
|
191
|
+
console.log();
|
|
192
|
+
console.log("The CLI can now act on behalf of your Ethereum identity.");
|
|
193
|
+
} catch (error) {
|
|
194
|
+
console.log();
|
|
195
|
+
throw error;
|
|
196
|
+
}
|
|
197
|
+
process.exit(0);
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
async function handleLogout() {
|
|
201
|
+
const { getRenown } = await import("./auth-mwX4ugZt.mjs");
|
|
202
|
+
const renown = await getRenown();
|
|
203
|
+
if (!renown.user) {
|
|
204
|
+
console.log("Not currently authenticated.");
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
try {
|
|
208
|
+
await renown.logout();
|
|
209
|
+
console.log("Successfully logged out.");
|
|
210
|
+
} catch (error) {
|
|
211
|
+
console.error("Failed to clear credentials.");
|
|
212
|
+
console.debug(error);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
//#endregion
|
|
216
|
+
//#region src/commands/logout.ts
|
|
217
|
+
const logout = command({
|
|
218
|
+
name: "logout",
|
|
219
|
+
description: `
|
|
220
|
+
The logout command removes an existing session created with 'ph login'`,
|
|
221
|
+
args: {},
|
|
222
|
+
handler: async () => {
|
|
223
|
+
await handleLogout();
|
|
224
|
+
process.exit(0);
|
|
225
|
+
}
|
|
226
|
+
});
|
|
227
|
+
//#endregion
|
|
228
|
+
export { getVersion as a, accessToken as i, login as n, list as r, logout as t };
|
|
229
|
+
|
|
230
|
+
//# sourceMappingURL=logout-BpJEO2Ij.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"logout-BpJEO2Ij.mjs","names":["getConfig"],"sources":["../src/get-version.ts","../src/commands/access-token.ts","../src/commands/list.ts","../src/commands/login.ts","../src/commands/logout.ts"],"sourcesContent":["export function getVersion() {\n return (\n process.env.WORKSPACE_VERSION ||\n process.env.npm_package_version ||\n \"unknown\"\n );\n}\n","import {\n accessTokenArgs,\n DEFAULT_EXPIRY_SECONDS,\n} from \"@powerhousedao/shared/clis\";\nimport { command } from \"cmd-ts\";\n\nexport const accessToken = command({\n name: \"access-token\",\n description: `\nThe access-token command generates a bearer token for API authentication. This token\ncan be used to authenticate requests to Powerhouse APIs like reactor-api (Switchboard).\n\nThis command:\n1. Uses your CLI's cryptographic identity (DID) to sign a verifiable credential\n2. Creates a JWT bearer token with configurable expiration\n3. Outputs the token to stdout (info to stderr) for easy piping\n\nPrerequisites:\n You must have a cryptographic identity. Run 'ph login' first to:\n - Generate a keypair (stored in .ph/.keypair.json)\n - Optionally link your Ethereum address (stored in .ph/.renown.json)\n\nToken Details:\n The generated token is a JWT (JSON Web Token) containing:\n - Issuer (iss): Your CLI's DID (did:key:...)\n - Subject (sub): Your CLI's DID\n - Credential Subject: Chain ID, network ID, and address (if authenticated)\n - Expiration (exp): Based on --expiry option\n - Audience (aud): If --audience is specified\n\nOutput:\n- Token information (DID, address, expiry) is printed to stderr\n- The token itself is printed to stdout for easy piping/copying\n\nThis allows you to use the command in scripts:\n TOKEN=$(ph access-token)\n curl -H \"Authorization: Bearer $TOKEN\" http://localhost:4001/graphql\n\nUsage with APIs:\n Generate token and use with curl\n TOKEN=$(ph access-token --expiry 1d)\n curl -X POST http://localhost:4001/graphql \\\\\n -H \"Content-Type: application/json\" \\\\\n -H \"Authorization: Bearer $TOKEN\" \\\\\n -d '{\"query\": \"{ drives { id name } }\"}'\n\n Export as environment variable\n export PH_ACCESS_TOKEN=$(ph access-token)\n\nNotes:\n - Tokens are self-signed using your CLI's private key\n - No network request is made; tokens are generated locally\n - The recipient API must trust your CLI's DID to accept the token\n - For reactor-api, ensure AUTH_ENABLED=true to require authentication\n`,\n args: accessTokenArgs,\n handler: async (args) => {\n if (args.debug) {\n console.log(args);\n }\n\n const { generateAccessToken, parseExpiry, formatExpiry } =\n await import(\"@renown/sdk/node\");\n const { getRenown } = await import(\"../services/auth.js\");\n const renown = await getRenown();\n\n let expiresIn = DEFAULT_EXPIRY_SECONDS;\n if (args.expiry) expiresIn = parseExpiry(args.expiry);\n\n const result = await generateAccessToken(renown, {\n expiresIn,\n aud: args.audience,\n });\n\n // Output token info to stderr, token itself to stdout for piping\n console.error(`CLI DID: ${result.did}`);\n console.error(`ETH Address: ${result.address}`);\n console.error(`Token expires in: ${formatExpiry(expiresIn)}`);\n console.error(\"\");\n\n console.log(result.token);\n process.exit(0);\n },\n});\n","import { getConfig } from \"@powerhousedao/config/node\";\nimport { getPowerhouseProjectInfo, listArgs } from \"@powerhousedao/shared/clis\";\nimport { command } from \"cmd-ts\";\n\nexport const list = command({\n name: \"list\",\n description: `\nThe list command displays information about installed Powerhouse packages in your project.\nIt reads the powerhouse.config.json file and shows the packages that are currently installed.\n\nThis command:\n1. Examines your project configuration\n2. Lists all installed Powerhouse packages\n3. Provides a clear overview of your project's dependencies\n4. Helps you manage and track your Powerhouse components\n`,\n aliases: [\"l\"],\n args: listArgs,\n handler: async (args) => {\n if (args.debug) {\n console.log(args);\n }\n\n try {\n const projectInfo = await getPowerhouseProjectInfo();\n console.log(\"\\n>>> projectInfo\", projectInfo);\n\n const phConfig = getConfig(\n projectInfo.projectPath + \"/powerhouse.config.json\",\n );\n\n if (!phConfig.packages || phConfig.packages.length === 0) {\n console.log(\"No packages found in the project\");\n return;\n }\n\n console.log(\"Installed Packages:\\n\");\n phConfig.packages.forEach((pkg) => {\n console.log(pkg.packageName);\n });\n } catch (e) {\n console.log(\"No packages found in the project\");\n }\n process.exit(0);\n },\n});\n","import { loginArgs } from \"@powerhousedao/shared/clis\";\nimport { command } from \"cmd-ts\";\n\nexport const login = command({\n name: \"login\",\n description: `\nThe login command authenticates you with Renown using your Ethereum wallet. This enables\nthe CLI to act on behalf of your Ethereum identity for authenticated operations.\n\nThis command:\n1. Generates or loads a cryptographic identity (DID) for the CLI\n2. Opens your browser to the Renown authentication page\n3. You authorize the CLI's DID to act on behalf of your Ethereum address\n4. Stores the credentials locally in .ph/.renown.json\n `,\n args: loginArgs,\n handler: async (args) => {\n if (args.debug) {\n console.log(args);\n }\n\n const { getRenown } = await import(\"../services/auth.js\");\n const renown = await getRenown(args.renownUrl);\n\n if (args.showDid) {\n console.log(renown.did);\n process.exit(0);\n }\n\n if (args.status) {\n const { getAuthStatus } = await import(\"@renown/sdk/node\");\n const status = getAuthStatus(renown);\n if (!status.authenticated || !status.address) {\n console.log(\"Not authenticated with an Ethereum address.\");\n console.log('Run \"ph login\" to authenticate.');\n } else {\n console.log(\"Authenticated\");\n console.log(` ETH Address: ${status.address}`);\n console.log(` User DID: ${status.userDid}`);\n console.log(` Chain ID: ${status.chainId}`);\n console.log(` CLI DID: ${status.cliDid}`);\n console.log(\n ` Authenticated at: ${status.authenticatedAt?.toLocaleString()}`,\n );\n console.log(` Renown URL: ${status.baseUrl}`);\n }\n process.exit(0);\n }\n\n if (args.logout) {\n await handleLogout();\n process.exit(0);\n }\n\n const { browserLogin } = await import(\"@renown/sdk/node\");\n\n console.debug(\"Initializing cryptographic identity...\");\n console.log(`CLI DID: ${renown.did}`);\n\n try {\n const timeoutMs = args.timeout ? args.timeout * 1000 : undefined;\n\n const result = await browserLogin(renown, {\n renownUrl: args.renownUrl,\n timeoutMs,\n onLoginUrl: (url, sessionId) => {\n console.log(\"Opening browser for authentication...\");\n console.log(`Session ID: ${sessionId.slice(0, 8)}...`);\n console.log(`Login URL: ${url}`);\n console.log();\n console.log(\"Waiting for authentication in browser\");\n console.log(`(timeout in ${(timeoutMs ?? 300_000) / 1000} seconds)`);\n console.log();\n console.log(\n \"Please connect your wallet and authorize this CLI to act on your behalf.\",\n );\n console.log();\n process.stdout.write(\"Waiting\");\n },\n onPollTick: () => process.stdout.write(\".\"),\n onBrowserOpenFailed: (url) => {\n console.error(\"Failed to open browser automatically.\");\n console.log(`Please open this URL manually: ${url}`);\n },\n });\n\n console.log(); // New line after dots\n console.log();\n console.log(\"Successfully authenticated!\");\n console.log(` ETH Address: ${result.user.address}`);\n console.log(` User DID: ${result.user.did}`);\n console.log(` CLI DID: ${result.cliDid}`);\n console.log();\n console.log(\"The CLI can now act on behalf of your Ethereum identity.\");\n } catch (error) {\n console.log(); // New line after dots\n throw error;\n }\n\n process.exit(0);\n },\n});\n\nexport async function handleLogout() {\n const { getRenown } = await import(\"../services/auth.js\");\n const renown = await getRenown();\n if (!renown.user) {\n console.log(\"Not currently authenticated.\");\n return;\n }\n\n try {\n await renown.logout();\n console.log(\"Successfully logged out.\");\n } catch (error) {\n console.error(\"Failed to clear credentials.\");\n console.debug(error);\n }\n}\n","import { command } from \"cmd-ts\";\nimport { handleLogout } from \"./login.js\";\n\nexport const logout = command({\n name: \"logout\",\n description: `\nThe logout command removes an existing session created with 'ph login'`,\n args: {},\n handler: async () => {\n await handleLogout();\n process.exit(0);\n },\n});\n"],"mappings":";;;;AAAA,SAAgB,aAAa;AAC3B,QACE,QAAQ,IAAI,qBACZ,QAAQ,IAAI,uBACZ;;;;ACEJ,MAAa,cAAc,QAAQ;CACjC,MAAM;CACN,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+Cb,MAAM;CACN,SAAS,OAAO,SAAS;AACvB,MAAI,KAAK,MACP,SAAQ,IAAI,KAAK;EAGnB,MAAM,EAAE,qBAAqB,aAAa,iBACxC,MAAM,OAAO;EACf,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,SAAS,MAAM,WAAW;EAEhC,IAAI,YAAY;AAChB,MAAI,KAAK,OAAQ,aAAY,YAAY,KAAK,OAAO;EAErD,MAAM,SAAS,MAAM,oBAAoB,QAAQ;GAC/C;GACA,KAAK,KAAK;GACX,CAAC;AAGF,UAAQ,MAAM,YAAY,OAAO,MAAM;AACvC,UAAQ,MAAM,gBAAgB,OAAO,UAAU;AAC/C,UAAQ,MAAM,qBAAqB,aAAa,UAAU,GAAG;AAC7D,UAAQ,MAAM,GAAG;AAEjB,UAAQ,IAAI,OAAO,MAAM;AACzB,UAAQ,KAAK,EAAE;;CAElB,CAAC;;;AC/EF,MAAa,OAAO,QAAQ;CAC1B,MAAM;CACN,aAAa;;;;;;;;;;CAUb,SAAS,CAAC,IAAI;CACd,MAAM;CACN,SAAS,OAAO,SAAS;AACvB,MAAI,KAAK,MACP,SAAQ,IAAI,KAAK;AAGnB,MAAI;GACF,MAAM,cAAc,MAAM,0BAA0B;AACpD,WAAQ,IAAI,qBAAqB,YAAY;GAE7C,MAAM,WAAWA,YACf,YAAY,cAAc,0BAC3B;AAED,OAAI,CAAC,SAAS,YAAY,SAAS,SAAS,WAAW,GAAG;AACxD,YAAQ,IAAI,mCAAmC;AAC/C;;AAGF,WAAQ,IAAI,wBAAwB;AACpC,YAAS,SAAS,SAAS,QAAQ;AACjC,YAAQ,IAAI,IAAI,YAAY;KAC5B;WACK,GAAG;AACV,WAAQ,IAAI,mCAAmC;;AAEjD,UAAQ,KAAK,EAAE;;CAElB,CAAC;;;AC1CF,MAAa,QAAQ,QAAQ;CAC3B,MAAM;CACN,aAAa;;;;;;;;;;CAUb,MAAM;CACN,SAAS,OAAO,SAAS;AACvB,MAAI,KAAK,MACP,SAAQ,IAAI,KAAK;EAGnB,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,SAAS,MAAM,UAAU,KAAK,UAAU;AAE9C,MAAI,KAAK,SAAS;AAChB,WAAQ,IAAI,OAAO,IAAI;AACvB,WAAQ,KAAK,EAAE;;AAGjB,MAAI,KAAK,QAAQ;GACf,MAAM,EAAE,kBAAkB,MAAM,OAAO;GACvC,MAAM,SAAS,cAAc,OAAO;AACpC,OAAI,CAAC,OAAO,iBAAiB,CAAC,OAAO,SAAS;AAC5C,YAAQ,IAAI,8CAA8C;AAC1D,YAAQ,IAAI,oCAAkC;UACzC;AACL,YAAQ,IAAI,gBAAgB;AAC5B,YAAQ,IAAI,kBAAkB,OAAO,UAAU;AAC/C,YAAQ,IAAI,eAAe,OAAO,UAAU;AAC5C,YAAQ,IAAI,eAAe,OAAO,UAAU;AAC5C,YAAQ,IAAI,cAAc,OAAO,SAAS;AAC1C,YAAQ,IACN,uBAAuB,OAAO,iBAAiB,gBAAgB,GAChE;AACD,YAAQ,IAAI,iBAAiB,OAAO,UAAU;;AAEhD,WAAQ,KAAK,EAAE;;AAGjB,MAAI,KAAK,QAAQ;AACf,SAAM,cAAc;AACpB,WAAQ,KAAK,EAAE;;EAGjB,MAAM,EAAE,iBAAiB,MAAM,OAAO;AAEtC,UAAQ,MAAM,yCAAyC;AACvD,UAAQ,IAAI,YAAY,OAAO,MAAM;AAErC,MAAI;GACF,MAAM,YAAY,KAAK,UAAU,KAAK,UAAU,MAAO,KAAA;GAEvD,MAAM,SAAS,MAAM,aAAa,QAAQ;IACxC,WAAW,KAAK;IAChB;IACA,aAAa,KAAK,cAAc;AAC9B,aAAQ,IAAI,wCAAwC;AACpD,aAAQ,IAAI,eAAe,UAAU,MAAM,GAAG,EAAE,CAAC,KAAK;AACtD,aAAQ,IAAI,cAAc,MAAM;AAChC,aAAQ,KAAK;AACb,aAAQ,IAAI,wCAAwC;AACpD,aAAQ,IAAI,gBAAgB,aAAa,OAAW,IAAK,WAAW;AACpE,aAAQ,KAAK;AACb,aAAQ,IACN,2EACD;AACD,aAAQ,KAAK;AACb,aAAQ,OAAO,MAAM,UAAU;;IAEjC,kBAAkB,QAAQ,OAAO,MAAM,IAAI;IAC3C,sBAAsB,QAAQ;AAC5B,aAAQ,MAAM,wCAAwC;AACtD,aAAQ,IAAI,kCAAkC,MAAM;;IAEvD,CAAC;AAEF,WAAQ,KAAK;AACb,WAAQ,KAAK;AACb,WAAQ,IAAI,8BAA8B;AAC1C,WAAQ,IAAI,kBAAkB,OAAO,KAAK,UAAU;AACpD,WAAQ,IAAI,eAAe,OAAO,KAAK,MAAM;AAC7C,WAAQ,IAAI,cAAc,OAAO,SAAS;AAC1C,WAAQ,KAAK;AACb,WAAQ,IAAI,2DAA2D;WAChE,OAAO;AACd,WAAQ,KAAK;AACb,SAAM;;AAGR,UAAQ,KAAK,EAAE;;CAElB,CAAC;AAEF,eAAsB,eAAe;CACnC,MAAM,EAAE,cAAc,MAAM,OAAO;CACnC,MAAM,SAAS,MAAM,WAAW;AAChC,KAAI,CAAC,OAAO,MAAM;AAChB,UAAQ,IAAI,+BAA+B;AAC3C;;AAGF,KAAI;AACF,QAAM,OAAO,QAAQ;AACrB,UAAQ,IAAI,2BAA2B;UAChC,OAAO;AACd,UAAQ,MAAM,+BAA+B;AAC7C,UAAQ,MAAM,MAAM;;;;;ACjHxB,MAAa,SAAS,QAAQ;CAC5B,MAAM;CACN,aAAa;;CAEb,MAAM,EAAE;CACR,SAAS,YAAY;AACnB,QAAM,cAAc;AACpB,UAAQ,KAAK,EAAE;;CAElB,CAAC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { migrate } from "@powerhousedao/codegen";
|
|
2
|
+
//#region src/services/migrate.ts
|
|
3
|
+
async function startMigrate(args) {
|
|
4
|
+
const { version, debug } = args;
|
|
5
|
+
if (debug) console.log({ args });
|
|
6
|
+
await migrate(version);
|
|
7
|
+
}
|
|
8
|
+
//#endregion
|
|
9
|
+
export { startMigrate };
|
|
10
|
+
|
|
11
|
+
//# sourceMappingURL=migrate-CSaC4bDh.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrate-CSaC4bDh.mjs","names":[],"sources":["../src/services/migrate.ts"],"sourcesContent":["import { migrate } from \"@powerhousedao/codegen\";\nimport type { MigrateArgs } from \"../types.js\";\n\nexport async function startMigrate(args: MigrateArgs) {\n const { version, debug } = args;\n if (debug) {\n console.log({ args });\n }\n await migrate(version);\n}\n"],"mappings":";;AAGA,eAAsB,aAAa,MAAmB;CACpD,MAAM,EAAE,SAAS,UAAU;AAC3B,KAAI,MACF,SAAQ,IAAI,EAAE,MAAM,CAAC;AAEvB,OAAM,QAAQ,QAAQ"}
|
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
import { writeCliDocsMarkdownFile } from "@powerhousedao/codegen/file-builders";
|
|
2
2
|
import { accessToken } from "../src/commands/access-token.js";
|
|
3
3
|
import { build, connect, preview, studio } from "../src/commands/connect.js";
|
|
4
|
+
import { generateAllCmd } from "../src/commands/generate-all.js";
|
|
5
|
+
import { generateAppCmd } from "../src/commands/generate-app.js";
|
|
6
|
+
import { generateDocumentModelCmd } from "../src/commands/generate-document-model.js";
|
|
7
|
+
import { generateEditorCmd } from "../src/commands/generate-editor.js";
|
|
8
|
+
import { generateMigrationFileCmd } from "../src/commands/generate-migration-file.js";
|
|
9
|
+
import { generateProcessorCmd } from "../src/commands/generate-processor.js";
|
|
10
|
+
import { generateSubgraphCmd } from "../src/commands/generate-subgraph.js";
|
|
4
11
|
import { generate } from "../src/commands/generate.js";
|
|
5
12
|
import { inspect } from "../src/commands/inspect.js";
|
|
6
13
|
import { install } from "../src/commands/install.js";
|
|
@@ -14,6 +21,13 @@ import { vetra } from "../src/commands/vetra.js";
|
|
|
14
21
|
|
|
15
22
|
const commands = [
|
|
16
23
|
{ name: "generate", command: generate },
|
|
24
|
+
{ name: "all", command: generateAllCmd },
|
|
25
|
+
{ name: "document-model", command: generateDocumentModelCmd },
|
|
26
|
+
{ name: "editor", command: generateEditorCmd },
|
|
27
|
+
{ name: "app", command: generateAppCmd },
|
|
28
|
+
{ name: "processor", command: generateProcessorCmd },
|
|
29
|
+
{ name: "subgraph", command: generateSubgraphCmd },
|
|
30
|
+
{ name: "migration-file", command: generateMigrationFileCmd },
|
|
17
31
|
{ name: "vetra", command: vetra },
|
|
18
32
|
{ name: "connect", command: connect },
|
|
19
33
|
{ name: "connect studio", command: studio },
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { startSwitchboard } from "@powerhousedao/switchboard/server";
|
|
3
|
+
//#region src/services/switchboard.ts
|
|
4
|
+
const defaultSwitchboardOptions = {
|
|
5
|
+
port: 4001,
|
|
6
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
7
|
+
drive: {
|
|
8
|
+
id: "powerhouse",
|
|
9
|
+
slug: "powerhouse",
|
|
10
|
+
global: {
|
|
11
|
+
name: "Powerhouse",
|
|
12
|
+
icon: "https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7"
|
|
13
|
+
},
|
|
14
|
+
local: {
|
|
15
|
+
availableOffline: true,
|
|
16
|
+
listeners: [],
|
|
17
|
+
sharingType: "public",
|
|
18
|
+
triggers: []
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
mcp: true
|
|
22
|
+
};
|
|
23
|
+
function getDefaultVetraSwitchboardOptions(vetraDriveId) {
|
|
24
|
+
return {
|
|
25
|
+
port: 4001,
|
|
26
|
+
dbPath: path.join(process.cwd(), ".ph/read-model.db"),
|
|
27
|
+
drive: {
|
|
28
|
+
id: vetraDriveId,
|
|
29
|
+
slug: vetraDriveId,
|
|
30
|
+
global: {
|
|
31
|
+
name: "Vetra",
|
|
32
|
+
icon: "https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u"
|
|
33
|
+
},
|
|
34
|
+
preferredEditor: "vetra-drive-app",
|
|
35
|
+
local: {
|
|
36
|
+
availableOffline: true,
|
|
37
|
+
listeners: [],
|
|
38
|
+
sharingType: "public",
|
|
39
|
+
triggers: []
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
async function startSwitchboard$1(options, logger) {
|
|
45
|
+
const { packages: packagesString, remoteDrives, useVetraDrive, vetraDriveId, useIdentity, keypairPath, requireIdentity, ...serverOptions } = options;
|
|
46
|
+
const defaultOptions = useVetraDrive ? getDefaultVetraSwitchboardOptions(vetraDriveId) : defaultSwitchboardOptions;
|
|
47
|
+
const identity = useIdentity || keypairPath || requireIdentity ? {
|
|
48
|
+
keypairPath,
|
|
49
|
+
requireExisting: requireIdentity
|
|
50
|
+
} : void 0;
|
|
51
|
+
const packages = packagesString?.split(",");
|
|
52
|
+
return await startSwitchboard(remoteDrives.length > 0 ? {
|
|
53
|
+
...defaultOptions,
|
|
54
|
+
drive: void 0,
|
|
55
|
+
...serverOptions,
|
|
56
|
+
remoteDrives,
|
|
57
|
+
identity,
|
|
58
|
+
packages,
|
|
59
|
+
logger
|
|
60
|
+
} : {
|
|
61
|
+
...defaultOptions,
|
|
62
|
+
...serverOptions,
|
|
63
|
+
remoteDrives,
|
|
64
|
+
identity,
|
|
65
|
+
packages,
|
|
66
|
+
logger
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
//#endregion
|
|
70
|
+
export { startSwitchboard$1 as n, defaultSwitchboardOptions as t };
|
|
71
|
+
|
|
72
|
+
//# sourceMappingURL=switchboard-Bl2llp5U.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-Bl2llp5U.mjs","names":["startSwitchboard","startSwitchboardServer"],"sources":["../src/services/switchboard.ts"],"sourcesContent":["import type {\n IdentityOptions,\n StartServerOptions,\n} from \"@powerhousedao/switchboard/server\";\nimport { startSwitchboard as startSwitchboardServer } from \"@powerhousedao/switchboard/server\";\nimport type { ILogger } from \"document-model\";\nimport path from \"node:path\";\nimport type { SwitchboardArgs } from \"../types.js\";\n\nexport const defaultSwitchboardOptions = {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: \"powerhouse\",\n slug: \"powerhouse\",\n global: {\n name: \"Powerhouse\",\n icon: \"https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7\",\n },\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n mcp: true,\n} satisfies StartServerOptions;\n\nfunction getDefaultVetraSwitchboardOptions(\n vetraDriveId: string,\n): Partial<StartServerOptions> {\n return {\n port: 4001,\n dbPath: path.join(process.cwd(), \".ph/read-model.db\"),\n drive: {\n id: vetraDriveId,\n slug: vetraDriveId,\n global: {\n name: \"Vetra\",\n icon: \"https://azure-elderly-tortoise-212.mypinata.cloud/ipfs/bafkreibf2xokjqqtomqjd2w2xxmmhvogq4262csevclxh6sbrjgmjfre5u\",\n },\n preferredEditor: \"vetra-drive-app\",\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n };\n}\n\nexport async function startSwitchboard(\n options: SwitchboardArgs & {\n strictPort?: boolean;\n processorConfig?: Map<string, unknown>;\n },\n logger?: ILogger,\n) {\n const {\n packages: packagesString,\n remoteDrives,\n useVetraDrive,\n vetraDriveId,\n useIdentity,\n keypairPath,\n requireIdentity,\n ...serverOptions\n } = options;\n\n // Choose the appropriate default configuration\n const defaultOptions = useVetraDrive\n ? getDefaultVetraSwitchboardOptions(vetraDriveId)\n : defaultSwitchboardOptions;\n\n // Build identity options if enabled\n const identity: IdentityOptions | undefined =\n useIdentity || keypairPath || requireIdentity\n ? {\n keypairPath,\n requireExisting: requireIdentity,\n }\n : undefined;\n\n const packages = packagesString?.split(\",\");\n\n // Only include the default drive if no remote drives are provided\n const finalOptions =\n remoteDrives.length > 0\n ? {\n ...defaultOptions,\n drive: undefined, // Don't create default drive when syncing with remote\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n }\n : {\n ...defaultOptions,\n ...serverOptions,\n remoteDrives,\n identity,\n packages,\n logger,\n };\n\n const reactor = await startSwitchboardServer(finalOptions);\n\n return reactor;\n}\n"],"mappings":";;;AASA,MAAa,4BAA4B;CACvC,MAAM;CACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;CACrD,OAAO;EACL,IAAI;EACJ,MAAM;EACN,QAAQ;GACN,MAAM;GACN,MAAM;GACP;EACD,OAAO;GACL,kBAAkB;GAClB,WAAW,EAAE;GACb,aAAa;GACb,UAAU,EAAE;GACb;EACF;CACD,KAAK;CACN;AAED,SAAS,kCACP,cAC6B;AAC7B,QAAO;EACL,MAAM;EACN,QAAQ,KAAK,KAAK,QAAQ,KAAK,EAAE,oBAAoB;EACrD,OAAO;GACL,IAAI;GACJ,MAAM;GACN,QAAQ;IACN,MAAM;IACN,MAAM;IACP;GACD,iBAAiB;GACjB,OAAO;IACL,kBAAkB;IAClB,WAAW,EAAE;IACb,aAAa;IACb,UAAU,EAAE;IACb;GACF;EACF;;AAGH,eAAsBA,mBACpB,SAIA,QACA;CACA,MAAM,EACJ,UAAU,gBACV,cACA,eACA,cACA,aACA,aACA,iBACA,GAAG,kBACD;CAGJ,MAAM,iBAAiB,gBACnB,kCAAkC,aAAa,GAC/C;CAGJ,MAAM,WACJ,eAAe,eAAe,kBAC1B;EACE;EACA,iBAAiB;EAClB,GACD,KAAA;CAEN,MAAM,WAAW,gBAAgB,MAAM,IAAI;AAyB3C,QAFgB,MAAMC,iBAnBpB,aAAa,SAAS,IAClB;EACE,GAAG;EACH,OAAO,KAAA;EACP,GAAG;EACH;EACA;EACA;EACA;EACD,GACD;EACE,GAAG;EACH,GAAG;EACH;EACA;EACA;EACA;EACD,CAEmD"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Kysely, PostgresDialect } from "kysely";
|
|
2
|
+
import { Pool } from "pg";
|
|
3
|
+
import { REACTOR_SCHEMA, getMigrationStatus, runMigrations } from "@powerhousedao/reactor";
|
|
4
|
+
//#region src/services/switchboard-migrate.ts
|
|
5
|
+
function isPostgresUrl(url) {
|
|
6
|
+
return url.startsWith("postgresql://") || url.startsWith("postgres://");
|
|
7
|
+
}
|
|
8
|
+
async function runSwitchboardMigrations(options) {
|
|
9
|
+
const dbPath = options.dbPath ?? process.env.PH_REACTOR_DATABASE_URL ?? process.env.DATABASE_URL;
|
|
10
|
+
if (!dbPath || !isPostgresUrl(dbPath)) {
|
|
11
|
+
console.log("No PostgreSQL URL configured. Skipping migrations.");
|
|
12
|
+
console.log("(PGlite migrations are handled automatically on startup)");
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
console.log(`Database: ${dbPath}`);
|
|
16
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: dbPath }) }) });
|
|
17
|
+
try {
|
|
18
|
+
if (options.statusOnly) {
|
|
19
|
+
console.log("\nChecking migration status...");
|
|
20
|
+
const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);
|
|
21
|
+
console.log("\nMigration Status:");
|
|
22
|
+
console.log("=================");
|
|
23
|
+
for (const migration of migrations) {
|
|
24
|
+
const status = migration.executedAt ? `[OK] Executed at ${migration.executedAt.toISOString()}` : "[--] Pending";
|
|
25
|
+
console.log(`${status} - ${migration.name}`);
|
|
26
|
+
}
|
|
27
|
+
} else {
|
|
28
|
+
console.log("\nRunning migrations...");
|
|
29
|
+
const result = await runMigrations(db, REACTOR_SCHEMA);
|
|
30
|
+
if (!result.success) {
|
|
31
|
+
console.error("Migration failed:", result.error?.message);
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
if (result.migrationsExecuted.length === 0) console.log("No migrations to run - database is up to date");
|
|
35
|
+
else {
|
|
36
|
+
console.log(`Successfully executed ${result.migrationsExecuted.length} migration(s):`);
|
|
37
|
+
for (const name of result.migrationsExecuted) console.log(` - ${name}`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
} catch (error) {
|
|
41
|
+
console.error("Error:", error instanceof Error ? error.message : String(error));
|
|
42
|
+
process.exit(1);
|
|
43
|
+
} finally {
|
|
44
|
+
await db.destroy();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
//#endregion
|
|
48
|
+
export { runSwitchboardMigrations };
|
|
49
|
+
|
|
50
|
+
//# sourceMappingURL=switchboard-migrate-BiQJfIrc.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"switchboard-migrate-BiQJfIrc.mjs","names":[],"sources":["../src/services/switchboard-migrate.ts"],"sourcesContent":["import { Kysely, PostgresDialect } from \"kysely\";\nimport { Pool } from \"pg\";\nimport {\n runMigrations,\n getMigrationStatus,\n REACTOR_SCHEMA,\n} from \"@powerhousedao/reactor\";\n\ninterface MigrationOptions {\n dbPath?: string;\n statusOnly?: boolean;\n}\n\nfunction isPostgresUrl(url: string): boolean {\n return url.startsWith(\"postgresql://\") || url.startsWith(\"postgres://\");\n}\n\nexport async function runSwitchboardMigrations(\n options: MigrationOptions,\n): Promise<void> {\n const dbPath =\n options.dbPath ??\n process.env.PH_REACTOR_DATABASE_URL ??\n process.env.DATABASE_URL;\n\n if (!dbPath || !isPostgresUrl(dbPath)) {\n console.log(\"No PostgreSQL URL configured. Skipping migrations.\");\n console.log(\"(PGlite migrations are handled automatically on startup)\");\n return;\n }\n\n console.log(`Database: ${dbPath}`);\n\n const pool = new Pool({ connectionString: dbPath });\n\n const db = new Kysely<any>({\n dialect: new PostgresDialect({ pool }),\n });\n\n try {\n if (options.statusOnly) {\n console.log(\"\\nChecking migration status...\");\n const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);\n\n console.log(\"\\nMigration Status:\");\n console.log(\"=================\");\n\n for (const migration of migrations) {\n const status = migration.executedAt\n ? `[OK] Executed at ${migration.executedAt.toISOString()}`\n : \"[--] Pending\";\n console.log(`${status} - ${migration.name}`);\n }\n } else {\n console.log(\"\\nRunning migrations...\");\n const result = await runMigrations(db, REACTOR_SCHEMA);\n\n if (!result.success) {\n console.error(\"Migration failed:\", result.error?.message);\n process.exit(1);\n }\n\n if (result.migrationsExecuted.length === 0) {\n console.log(\"No migrations to run - database is up to date\");\n } else {\n console.log(\n `Successfully executed ${result.migrationsExecuted.length} migration(s):`,\n );\n for (const name of result.migrationsExecuted) {\n console.log(` - ${name}`);\n }\n }\n }\n } catch (error) {\n console.error(\n \"Error:\",\n error instanceof Error ? error.message : String(error),\n );\n process.exit(1);\n } finally {\n await db.destroy();\n }\n}\n"],"mappings":";;;;AAaA,SAAS,cAAc,KAAsB;AAC3C,QAAO,IAAI,WAAW,gBAAgB,IAAI,IAAI,WAAW,cAAc;;AAGzE,eAAsB,yBACpB,SACe;CACf,MAAM,SACJ,QAAQ,UACR,QAAQ,IAAI,2BACZ,QAAQ,IAAI;AAEd,KAAI,CAAC,UAAU,CAAC,cAAc,OAAO,EAAE;AACrC,UAAQ,IAAI,qDAAqD;AACjE,UAAQ,IAAI,2DAA2D;AACvE;;AAGF,SAAQ,IAAI,aAAa,SAAS;CAIlC,MAAM,KAAK,IAAI,OAAY,EACzB,SAAS,IAAI,gBAAgB,EAAE,MAHpB,IAAI,KAAK,EAAE,kBAAkB,QAAQ,CAAC,EAGZ,CAAC,EACvC,CAAC;AAEF,KAAI;AACF,MAAI,QAAQ,YAAY;AACtB,WAAQ,IAAI,iCAAiC;GAC7C,MAAM,aAAa,MAAM,mBAAmB,IAAI,eAAe;AAE/D,WAAQ,IAAI,sBAAsB;AAClC,WAAQ,IAAI,oBAAoB;AAEhC,QAAK,MAAM,aAAa,YAAY;IAClC,MAAM,SAAS,UAAU,aACrB,oBAAoB,UAAU,WAAW,aAAa,KACtD;AACJ,YAAQ,IAAI,GAAG,OAAO,KAAK,UAAU,OAAO;;SAEzC;AACL,WAAQ,IAAI,0BAA0B;GACtC,MAAM,SAAS,MAAM,cAAc,IAAI,eAAe;AAEtD,OAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,qBAAqB,OAAO,OAAO,QAAQ;AACzD,YAAQ,KAAK,EAAE;;AAGjB,OAAI,OAAO,mBAAmB,WAAW,EACvC,SAAQ,IAAI,gDAAgD;QACvD;AACL,YAAQ,IACN,yBAAyB,OAAO,mBAAmB,OAAO,gBAC3D;AACD,SAAK,MAAM,QAAQ,OAAO,mBACxB,SAAQ,IAAI,OAAO,OAAO;;;UAIzB,OAAO;AACd,UAAQ,MACN,UACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;AACD,UAAQ,KAAK,EAAE;WACP;AACR,QAAM,GAAG,SAAS"}
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
import path, { dirname } from "node:path";
|
|
2
|
+
import crypto from "node:crypto";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import { homedir } from "node:os";
|
|
5
|
+
//#region src/utils.ts
|
|
6
|
+
const POWERHOUSE_CONFIG_FILE = "powerhouse.config.json";
|
|
7
|
+
const POWERHOUSE_GLOBAL_DIR = path.join(homedir(), ".ph");
|
|
8
|
+
const packageManagers = {
|
|
9
|
+
bun: {
|
|
10
|
+
globalPathRegexp: /[\\/].bun[\\/]/,
|
|
11
|
+
installCommand: "bun add {{dependency}}",
|
|
12
|
+
uninstallCommand: "bun remove {{dependency}}",
|
|
13
|
+
workspaceOption: "",
|
|
14
|
+
lockfile: "bun.lock",
|
|
15
|
+
updateCommand: "bun update {{dependency}}",
|
|
16
|
+
buildAffected: "bun run build:affected"
|
|
17
|
+
},
|
|
18
|
+
pnpm: {
|
|
19
|
+
globalPathRegexp: /[\\/]pnpm[\\/]/,
|
|
20
|
+
installCommand: "pnpm add {{dependency}}",
|
|
21
|
+
uninstallCommand: "pnpm remove {{dependency}}",
|
|
22
|
+
workspaceOption: "--workspace-root",
|
|
23
|
+
lockfile: "pnpm-lock.yaml",
|
|
24
|
+
updateCommand: "pnpm update {{dependency}}",
|
|
25
|
+
buildAffected: "pnpm run build:affected"
|
|
26
|
+
},
|
|
27
|
+
yarn: {
|
|
28
|
+
globalPathRegexp: /[\\/]yarn[\\/]/,
|
|
29
|
+
installCommand: "yarn add {{dependency}}",
|
|
30
|
+
uninstallCommand: "yarn remove {{dependency}}",
|
|
31
|
+
workspaceOption: "-W",
|
|
32
|
+
lockfile: "yarn.lock",
|
|
33
|
+
updateCommand: "yarn upgrade {{dependency}}",
|
|
34
|
+
buildAffected: "yarn run build:affected"
|
|
35
|
+
},
|
|
36
|
+
npm: {
|
|
37
|
+
installCommand: "npm install {{dependency}}",
|
|
38
|
+
uninstallCommand: "npm uninstall {{dependency}}",
|
|
39
|
+
workspaceOption: "",
|
|
40
|
+
lockfile: "package-lock.json",
|
|
41
|
+
updateCommand: "npm update {{dependency}} --save",
|
|
42
|
+
buildAffected: "npm run build:affected"
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
function defaultPathValidation() {
|
|
46
|
+
return true;
|
|
47
|
+
}
|
|
48
|
+
function isPowerhouseProject(dir) {
|
|
49
|
+
const powerhouseConfigPath = path.join(dir, POWERHOUSE_CONFIG_FILE);
|
|
50
|
+
return fs.existsSync(powerhouseConfigPath);
|
|
51
|
+
}
|
|
52
|
+
function findNodeProjectRoot(dir, pathValidation = defaultPathValidation) {
|
|
53
|
+
const packageJsonPath = path.join(dir, "package.json");
|
|
54
|
+
if (fs.existsSync(packageJsonPath) && pathValidation(dir)) return dir;
|
|
55
|
+
const parentDir = dirname(dir);
|
|
56
|
+
if (parentDir === dir) return null;
|
|
57
|
+
return findNodeProjectRoot(parentDir, pathValidation);
|
|
58
|
+
}
|
|
59
|
+
function getProjectInfo(debug) {
|
|
60
|
+
const currentPath = process.cwd();
|
|
61
|
+
if (debug) console.log(">>> currentPath", currentPath);
|
|
62
|
+
const projectPath = findNodeProjectRoot(currentPath, isPowerhouseProject);
|
|
63
|
+
if (!projectPath) return {
|
|
64
|
+
isGlobal: true,
|
|
65
|
+
path: POWERHOUSE_GLOBAL_DIR,
|
|
66
|
+
packageManager: getPackageManagerFromLockfile(POWERHOUSE_GLOBAL_DIR)
|
|
67
|
+
};
|
|
68
|
+
return {
|
|
69
|
+
isGlobal: false,
|
|
70
|
+
path: projectPath,
|
|
71
|
+
packageManager: getPackageManagerFromLockfile(projectPath)
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Generates a unique drive ID based on the project path.
|
|
76
|
+
* The same project path will always generate the same ID.
|
|
77
|
+
* @param name - The name prefix for the drive ID (e.g., "vetra", "powerhouse")
|
|
78
|
+
* @returns A unique drive ID in the format "{name}-{hash}"
|
|
79
|
+
*/
|
|
80
|
+
function generateProjectDriveId(name) {
|
|
81
|
+
const projectInfo = getProjectInfo();
|
|
82
|
+
return `${name}-${crypto.createHash("sha256").update(projectInfo.path).digest("hex").substring(0, 8)}`;
|
|
83
|
+
}
|
|
84
|
+
function getPackageManagerFromLockfile(dir) {
|
|
85
|
+
if (fs.existsSync(path.join(dir, packageManagers.pnpm.lockfile))) return "pnpm";
|
|
86
|
+
else if (fs.existsSync(path.join(dir, packageManagers.yarn.lockfile))) return "yarn";
|
|
87
|
+
else if (fs.existsSync(path.join(dir, packageManagers.bun.lockfile))) return "bun";
|
|
88
|
+
return "npm";
|
|
89
|
+
}
|
|
90
|
+
function updatePackagesArray(currentPackages = [], dependencies, task = "install", provider = "registry") {
|
|
91
|
+
const isInstall = task === "install";
|
|
92
|
+
const mappedPackages = dependencies.map((dep) => ({
|
|
93
|
+
packageName: dep.name,
|
|
94
|
+
version: dep.version,
|
|
95
|
+
provider
|
|
96
|
+
}));
|
|
97
|
+
if (isInstall) return [...currentPackages.filter((pkg) => !dependencies.find((dep) => dep.name === pkg.packageName)), ...mappedPackages];
|
|
98
|
+
return currentPackages.filter((pkg) => !dependencies.map((dep) => dep.name).includes(pkg.packageName));
|
|
99
|
+
}
|
|
100
|
+
function updateConfigFile(dependencies, projectPath, task = "install", provider = "registry", registryUrl) {
|
|
101
|
+
const configPath = path.join(projectPath, POWERHOUSE_CONFIG_FILE);
|
|
102
|
+
if (!fs.existsSync(configPath)) throw new Error(`powerhouse.config.json file not found. projectPath: ${projectPath}`);
|
|
103
|
+
const config = JSON.parse(fs.readFileSync(configPath, "utf-8"));
|
|
104
|
+
const updatedConfig = {
|
|
105
|
+
...config,
|
|
106
|
+
packages: updatePackagesArray(config.packages, dependencies, task, provider)
|
|
107
|
+
};
|
|
108
|
+
if (task === "install" && registryUrl && !config.packageRegistryUrl && dependencies.length > 0) updatedConfig.packageRegistryUrl = registryUrl;
|
|
109
|
+
fs.writeFileSync(configPath, JSON.stringify(updatedConfig, null, 2));
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Updates the styles.css file to include imports for newly installed packages
|
|
113
|
+
* @param dependencies - Array of dependencies that were installed
|
|
114
|
+
* @param projectPath - Path to the project root
|
|
115
|
+
*/
|
|
116
|
+
function updateStylesFile(dependencies, projectPath) {
|
|
117
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
118
|
+
if (!fs.existsSync(stylesPath)) {
|
|
119
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
123
|
+
let updatedStyles = currentStyles;
|
|
124
|
+
for (const dep of dependencies) {
|
|
125
|
+
const cssPath = `./node_modules/${dep.name}/dist/style.css`;
|
|
126
|
+
const fullCssPath = path.join(projectPath, cssPath);
|
|
127
|
+
const importStatement = `@import '${cssPath}';`;
|
|
128
|
+
if (!fs.existsSync(fullCssPath)) {
|
|
129
|
+
console.warn(`⚠️ Warning: CSS file not found at ${cssPath}`);
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
if (currentStyles.includes(importStatement)) continue;
|
|
133
|
+
const importLines = currentStyles.split("\n").filter((line) => line.trim().startsWith("@import"));
|
|
134
|
+
const lastImport = importLines[importLines.length - 1];
|
|
135
|
+
if (lastImport) updatedStyles = currentStyles.replace(lastImport, `${lastImport}\n${importStatement}`);
|
|
136
|
+
else updatedStyles = `${importStatement}\n${currentStyles}`;
|
|
137
|
+
}
|
|
138
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Removes CSS imports for uninstalled packages from styles.css
|
|
142
|
+
*/
|
|
143
|
+
function removeStylesImports(dependencies, projectPath) {
|
|
144
|
+
const stylesPath = path.join(projectPath, "style.css");
|
|
145
|
+
if (!fs.existsSync(stylesPath)) {
|
|
146
|
+
console.warn("⚠️ Warning: style.css file not found in project root");
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
const currentStyles = fs.readFileSync(stylesPath, "utf-8");
|
|
150
|
+
let updatedStyles = currentStyles;
|
|
151
|
+
for (const dep of dependencies) {
|
|
152
|
+
const importStatement = `@import '${`./node_modules/${dep.name}/dist/style.css`}';`;
|
|
153
|
+
const lines = updatedStyles.split("\n");
|
|
154
|
+
const filteredLines = lines.filter((line) => !line.trim().includes(importStatement));
|
|
155
|
+
if (filteredLines.length !== lines.length) updatedStyles = filteredLines.join("\n");
|
|
156
|
+
}
|
|
157
|
+
if (updatedStyles !== currentStyles) fs.writeFileSync(stylesPath, updatedStyles);
|
|
158
|
+
}
|
|
159
|
+
//#endregion
|
|
160
|
+
export { updateStylesFile as a, updateConfigFile as i, getProjectInfo as n, removeStylesImports as r, generateProjectDriveId as t };
|
|
161
|
+
|
|
162
|
+
//# sourceMappingURL=utils-C6_gv4nB.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils-C6_gv4nB.mjs","names":[],"sources":["../src/utils.ts"],"sourcesContent":["import type { PowerhouseConfig } from \"@powerhousedao/config\";\nimport crypto from \"node:crypto\";\nimport fs from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport path, { dirname } from \"node:path\";\nexport const POWERHOUSE_CONFIG_FILE = \"powerhouse.config.json\";\nexport const POWERHOUSE_GLOBAL_DIR = path.join(homedir(), \".ph\");\nexport const SUPPORTED_PACKAGE_MANAGERS = [\"npm\", \"yarn\", \"pnpm\", \"bun\"];\n\nexport const packageManagers = {\n bun: {\n globalPathRegexp: /[\\\\/].bun[\\\\/]/,\n installCommand: \"bun add {{dependency}}\",\n uninstallCommand: \"bun remove {{dependency}}\",\n workspaceOption: \"\",\n lockfile: \"bun.lock\",\n updateCommand: \"bun update {{dependency}}\",\n buildAffected: \"bun run build:affected\",\n },\n pnpm: {\n globalPathRegexp: /[\\\\/]pnpm[\\\\/]/,\n installCommand: \"pnpm add {{dependency}}\",\n uninstallCommand: \"pnpm remove {{dependency}}\",\n workspaceOption: \"--workspace-root\",\n lockfile: \"pnpm-lock.yaml\",\n updateCommand: \"pnpm update {{dependency}}\",\n buildAffected: \"pnpm run build:affected\",\n },\n yarn: {\n globalPathRegexp: /[\\\\/]yarn[\\\\/]/,\n installCommand: \"yarn add {{dependency}}\",\n uninstallCommand: \"yarn remove {{dependency}}\",\n workspaceOption: \"-W\",\n lockfile: \"yarn.lock\",\n updateCommand: \"yarn upgrade {{dependency}}\",\n buildAffected: \"yarn run build:affected\",\n },\n npm: {\n installCommand: \"npm install {{dependency}}\",\n uninstallCommand: \"npm uninstall {{dependency}}\",\n workspaceOption: \"\",\n lockfile: \"package-lock.json\",\n updateCommand: \"npm update {{dependency}} --save\",\n buildAffected: \"npm run build:affected\",\n },\n};\n\ntype PathValidation = (dir: string) => boolean;\n\nexport type PackageManager = \"npm\" | \"yarn\" | \"pnpm\" | \"bun\";\n\nexport type ProjectInfo = {\n isGlobal: boolean;\n path: string;\n packageManager: PackageManager;\n};\n\nexport function defaultPathValidation() {\n return true;\n}\n\nexport function isPowerhouseProject(dir: string) {\n const powerhouseConfigPath = path.join(dir, POWERHOUSE_CONFIG_FILE);\n\n return fs.existsSync(powerhouseConfigPath);\n}\n\nexport function findNodeProjectRoot(\n dir: string,\n pathValidation: PathValidation = defaultPathValidation,\n) {\n const packageJsonPath = path.join(dir, \"package.json\");\n\n if (fs.existsSync(packageJsonPath) && pathValidation(dir)) {\n return dir;\n }\n\n const parentDir = dirname(dir);\n\n if (parentDir === dir) {\n return null;\n }\n\n return findNodeProjectRoot(parentDir, pathValidation);\n}\n\nexport function getProjectInfo(debug?: boolean): ProjectInfo {\n const currentPath = process.cwd();\n\n if (debug) {\n console.log(\">>> currentPath\", currentPath);\n }\n\n const projectPath = findNodeProjectRoot(currentPath, isPowerhouseProject);\n\n if (!projectPath) {\n return {\n isGlobal: true,\n path: POWERHOUSE_GLOBAL_DIR,\n packageManager: getPackageManagerFromLockfile(POWERHOUSE_GLOBAL_DIR),\n };\n }\n\n return {\n isGlobal: false,\n path: projectPath,\n packageManager: getPackageManagerFromLockfile(projectPath),\n };\n}\n\n/**\n * Generates a unique drive ID based on the project path.\n * The same project path will always generate the same ID.\n * @param name - The name prefix for the drive ID (e.g., \"vetra\", \"powerhouse\")\n * @returns A unique drive ID in the format \"{name}-{hash}\"\n */\nexport function generateProjectDriveId(name: string): string {\n const projectInfo = getProjectInfo();\n const hash = crypto\n .createHash(\"sha256\")\n .update(projectInfo.path)\n .digest(\"hex\");\n const shortHash = hash.substring(0, 8);\n return `${name}-${shortHash}`;\n}\n\nexport function getPackageManagerFromLockfile(dir: string): PackageManager {\n if (fs.existsSync(path.join(dir, packageManagers.pnpm.lockfile))) {\n return \"pnpm\";\n } else if (fs.existsSync(path.join(dir, packageManagers.yarn.lockfile))) {\n return \"yarn\";\n } else if (fs.existsSync(path.join(dir, packageManagers.bun.lockfile))) {\n return \"bun\";\n }\n\n return \"npm\";\n}\n\nexport function updatePackagesArray(\n currentPackages: PowerhouseConfig[\"packages\"] = [],\n dependencies: { name: string; version: string | undefined }[],\n task: \"install\" | \"uninstall\" = \"install\",\n provider: \"registry\" | \"local\" = \"registry\",\n): PowerhouseConfig[\"packages\"] {\n const isInstall = task === \"install\";\n const mappedPackages = dependencies.map((dep) => ({\n packageName: dep.name,\n version: dep.version,\n provider,\n }));\n\n if (isInstall) {\n // Overwrite existing package if version is different\n const filteredPackages = currentPackages.filter(\n (pkg) => !dependencies.find((dep) => dep.name === pkg.packageName),\n );\n return [...filteredPackages, ...mappedPackages];\n }\n\n return currentPackages.filter(\n (pkg) => !dependencies.map((dep) => dep.name).includes(pkg.packageName),\n );\n}\n\n// Modify updateConfigFile to use the new function\nexport function updateConfigFile(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n task: \"install\" | \"uninstall\" = \"install\",\n provider: \"registry\" | \"local\" = \"registry\",\n registryUrl?: string,\n) {\n const configPath = path.join(projectPath, POWERHOUSE_CONFIG_FILE);\n\n if (!fs.existsSync(configPath)) {\n throw new Error(\n `powerhouse.config.json file not found. projectPath: ${projectPath}`,\n );\n }\n\n const config = JSON.parse(\n fs.readFileSync(configPath, \"utf-8\"),\n ) as PowerhouseConfig;\n\n const updatedConfig: PowerhouseConfig = {\n ...config,\n packages: updatePackagesArray(\n config.packages,\n dependencies,\n task,\n provider,\n ),\n };\n\n if (\n task === \"install\" &&\n registryUrl &&\n !config.packageRegistryUrl &&\n dependencies.length > 0\n ) {\n updatedConfig.packageRegistryUrl = registryUrl;\n }\n\n fs.writeFileSync(configPath, JSON.stringify(updatedConfig, null, 2));\n}\n\n/**\n * Recursively searches for a specific file by traversing up the directory tree.\n * Starting from the given path, it checks each parent directory until it finds\n * the target file or reaches the root directory.\n *\n * @param startPath - The absolute path of the directory to start searching from\n * @param targetFile - The name of the file to search for (e.g., 'package.json', 'pnpm-workspace.yaml')\n * @returns The absolute path of the directory containing the target file, or null if not found\n *\n * @example\n * // Find the workspace root directory\n * const workspaceRoot = findContainerDirectory('/path/to/project/src', 'pnpm-workspace.yaml');\n *\n * // Find the nearest package.json\n * const packageDir = findContainerDirectory('/path/to/project/src/components', 'package.json');\n */\nexport const findContainerDirectory = (\n startPath: string,\n targetFile: string,\n): string | null => {\n const filePath = path.join(startPath, targetFile);\n\n if (fs.existsSync(filePath)) {\n return startPath;\n }\n\n const parentDir = path.dirname(startPath);\n\n //reached the root directory and haven't found the file\n if (parentDir === startPath) {\n return null;\n }\n\n return findContainerDirectory(parentDir, targetFile);\n};\n\n/**\n * Updates the styles.css file to include imports for newly installed packages\n * @param dependencies - Array of dependencies that were installed\n * @param projectPath - Path to the project root\n */\nexport function updateStylesFile(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n) {\n const stylesPath = path.join(projectPath, \"style.css\");\n\n // Check if styles.css exists\n if (!fs.existsSync(stylesPath)) {\n console.warn(\"⚠️ Warning: style.css file not found in project root\");\n return;\n }\n\n const currentStyles = fs.readFileSync(stylesPath, \"utf-8\");\n let updatedStyles = currentStyles;\n\n for (const dep of dependencies) {\n const cssPath = `./node_modules/${dep.name}/dist/style.css`;\n const fullCssPath = path.join(projectPath, cssPath);\n const importStatement = `@import '${cssPath}';`;\n\n // Check if the CSS file exists\n if (!fs.existsSync(fullCssPath)) {\n console.warn(`⚠️ Warning: CSS file not found at ${cssPath}`);\n continue;\n }\n\n // Check if import already exists\n if (currentStyles.includes(importStatement)) {\n continue;\n }\n\n // Find the last @import statement\n const importLines = currentStyles\n .split(\"\\n\")\n .filter((line) => line.trim().startsWith(\"@import\"));\n const lastImport = importLines[importLines.length - 1];\n\n if (lastImport) {\n // Insert new import after the last existing import\n updatedStyles = currentStyles.replace(\n lastImport,\n `${lastImport}\\n${importStatement}`,\n );\n } else {\n // If no imports exist, add at the top of the file\n updatedStyles = `${importStatement}\\n${currentStyles}`;\n }\n }\n\n // Only write if changes were made\n if (updatedStyles !== currentStyles) {\n fs.writeFileSync(stylesPath, updatedStyles);\n }\n}\n\n/**\n * Removes CSS imports for uninstalled packages from styles.css\n */\nexport function removeStylesImports(\n dependencies: { name: string; version: string | undefined }[],\n projectPath: string,\n) {\n const stylesPath = path.join(projectPath, \"style.css\");\n\n // Check if styles.css exists\n if (!fs.existsSync(stylesPath)) {\n console.warn(\"⚠️ Warning: style.css file not found in project root\");\n return;\n }\n\n const currentStyles = fs.readFileSync(stylesPath, \"utf-8\");\n let updatedStyles = currentStyles;\n\n for (const dep of dependencies) {\n const cssPath = `./node_modules/${dep.name}/dist/style.css`;\n const importStatement = `@import '${cssPath}';`;\n\n // Remove the import line if it exists\n const lines = updatedStyles.split(\"\\n\");\n const filteredLines = lines.filter(\n (line) => !line.trim().includes(importStatement),\n );\n\n if (filteredLines.length !== lines.length) {\n updatedStyles = filteredLines.join(\"\\n\");\n }\n }\n\n // Only write if changes were made\n if (updatedStyles !== currentStyles) {\n fs.writeFileSync(stylesPath, updatedStyles);\n }\n}\n"],"mappings":";;;;;AAKA,MAAa,yBAAyB;AACtC,MAAa,wBAAwB,KAAK,KAAK,SAAS,EAAE,MAAM;AAGhE,MAAa,kBAAkB;CAC7B,KAAK;EACH,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,MAAM;EACJ,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,MAAM;EACJ,kBAAkB;EAClB,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACD,KAAK;EACH,gBAAgB;EAChB,kBAAkB;EAClB,iBAAiB;EACjB,UAAU;EACV,eAAe;EACf,eAAe;EAChB;CACF;AAYD,SAAgB,wBAAwB;AACtC,QAAO;;AAGT,SAAgB,oBAAoB,KAAa;CAC/C,MAAM,uBAAuB,KAAK,KAAK,KAAK,uBAAuB;AAEnE,QAAO,GAAG,WAAW,qBAAqB;;AAG5C,SAAgB,oBACd,KACA,iBAAiC,uBACjC;CACA,MAAM,kBAAkB,KAAK,KAAK,KAAK,eAAe;AAEtD,KAAI,GAAG,WAAW,gBAAgB,IAAI,eAAe,IAAI,CACvD,QAAO;CAGT,MAAM,YAAY,QAAQ,IAAI;AAE9B,KAAI,cAAc,IAChB,QAAO;AAGT,QAAO,oBAAoB,WAAW,eAAe;;AAGvD,SAAgB,eAAe,OAA8B;CAC3D,MAAM,cAAc,QAAQ,KAAK;AAEjC,KAAI,MACF,SAAQ,IAAI,mBAAmB,YAAY;CAG7C,MAAM,cAAc,oBAAoB,aAAa,oBAAoB;AAEzE,KAAI,CAAC,YACH,QAAO;EACL,UAAU;EACV,MAAM;EACN,gBAAgB,8BAA8B,sBAAsB;EACrE;AAGH,QAAO;EACL,UAAU;EACV,MAAM;EACN,gBAAgB,8BAA8B,YAAY;EAC3D;;;;;;;;AASH,SAAgB,uBAAuB,MAAsB;CAC3D,MAAM,cAAc,gBAAgB;AAMpC,QAAO,GAAG,KAAK,GALF,OACV,WAAW,SAAS,CACpB,OAAO,YAAY,KAAK,CACxB,OAAO,MAAM,CACO,UAAU,GAAG,EAAE;;AAIxC,SAAgB,8BAA8B,KAA6B;AACzE,KAAI,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,KAAK,SAAS,CAAC,CAC9D,QAAO;UACE,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,KAAK,SAAS,CAAC,CACrE,QAAO;UACE,GAAG,WAAW,KAAK,KAAK,KAAK,gBAAgB,IAAI,SAAS,CAAC,CACpE,QAAO;AAGT,QAAO;;AAGT,SAAgB,oBACd,kBAAgD,EAAE,EAClD,cACA,OAAgC,WAChC,WAAiC,YACH;CAC9B,MAAM,YAAY,SAAS;CAC3B,MAAM,iBAAiB,aAAa,KAAK,SAAS;EAChD,aAAa,IAAI;EACjB,SAAS,IAAI;EACb;EACD,EAAE;AAEH,KAAI,UAKF,QAAO,CAAC,GAHiB,gBAAgB,QACtC,QAAQ,CAAC,aAAa,MAAM,QAAQ,IAAI,SAAS,IAAI,YAAY,CACnE,EAC4B,GAAG,eAAe;AAGjD,QAAO,gBAAgB,QACpB,QAAQ,CAAC,aAAa,KAAK,QAAQ,IAAI,KAAK,CAAC,SAAS,IAAI,YAAY,CACxE;;AAIH,SAAgB,iBACd,cACA,aACA,OAAgC,WAChC,WAAiC,YACjC,aACA;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,uBAAuB;AAEjE,KAAI,CAAC,GAAG,WAAW,WAAW,CAC5B,OAAM,IAAI,MACR,uDAAuD,cACxD;CAGH,MAAM,SAAS,KAAK,MAClB,GAAG,aAAa,YAAY,QAAQ,CACrC;CAED,MAAM,gBAAkC;EACtC,GAAG;EACH,UAAU,oBACR,OAAO,UACP,cACA,MACA,SACD;EACF;AAED,KACE,SAAS,aACT,eACA,CAAC,OAAO,sBACR,aAAa,SAAS,EAEtB,eAAc,qBAAqB;AAGrC,IAAG,cAAc,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;;;;;;AA4CtE,SAAgB,iBACd,cACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,YAAY;AAGtD,KAAI,CAAC,GAAG,WAAW,WAAW,EAAE;AAC9B,UAAQ,KAAK,uDAAuD;AACpE;;CAGF,MAAM,gBAAgB,GAAG,aAAa,YAAY,QAAQ;CAC1D,IAAI,gBAAgB;AAEpB,MAAK,MAAM,OAAO,cAAc;EAC9B,MAAM,UAAU,kBAAkB,IAAI,KAAK;EAC3C,MAAM,cAAc,KAAK,KAAK,aAAa,QAAQ;EACnD,MAAM,kBAAkB,YAAY,QAAQ;AAG5C,MAAI,CAAC,GAAG,WAAW,YAAY,EAAE;AAC/B,WAAQ,KAAK,qCAAqC,UAAU;AAC5D;;AAIF,MAAI,cAAc,SAAS,gBAAgB,CACzC;EAIF,MAAM,cAAc,cACjB,MAAM,KAAK,CACX,QAAQ,SAAS,KAAK,MAAM,CAAC,WAAW,UAAU,CAAC;EACtD,MAAM,aAAa,YAAY,YAAY,SAAS;AAEpD,MAAI,WAEF,iBAAgB,cAAc,QAC5B,YACA,GAAG,WAAW,IAAI,kBACnB;MAGD,iBAAgB,GAAG,gBAAgB,IAAI;;AAK3C,KAAI,kBAAkB,cACpB,IAAG,cAAc,YAAY,cAAc;;;;;AAO/C,SAAgB,oBACd,cACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,aAAa,YAAY;AAGtD,KAAI,CAAC,GAAG,WAAW,WAAW,EAAE;AAC9B,UAAQ,KAAK,uDAAuD;AACpE;;CAGF,MAAM,gBAAgB,GAAG,aAAa,YAAY,QAAQ;CAC1D,IAAI,gBAAgB;AAEpB,MAAK,MAAM,OAAO,cAAc;EAE9B,MAAM,kBAAkB,YADR,kBAAkB,IAAI,KAAK,iBACC;EAG5C,MAAM,QAAQ,cAAc,MAAM,KAAK;EACvC,MAAM,gBAAgB,MAAM,QACzB,SAAS,CAAC,KAAK,MAAM,CAAC,SAAS,gBAAgB,CACjD;AAED,MAAI,cAAc,WAAW,MAAM,OACjC,iBAAgB,cAAc,KAAK,KAAK;;AAK5C,KAAI,kBAAkB,cACpB,IAAG,cAAc,YAAY,cAAc"}
|