@geekmidas/cli 0.9.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +525 -0
  2. package/dist/bundler-DRXCw_YR.mjs +70 -0
  3. package/dist/bundler-DRXCw_YR.mjs.map +1 -0
  4. package/dist/bundler-WsEvH_b2.cjs +71 -0
  5. package/dist/bundler-WsEvH_b2.cjs.map +1 -0
  6. package/dist/{config-CFls09Ey.cjs → config-AmInkU7k.cjs} +10 -8
  7. package/dist/config-AmInkU7k.cjs.map +1 -0
  8. package/dist/{config-Bq72aj8e.mjs → config-DYULeEv8.mjs} +6 -4
  9. package/dist/config-DYULeEv8.mjs.map +1 -0
  10. package/dist/config.cjs +1 -1
  11. package/dist/config.d.cts +2 -1
  12. package/dist/config.d.cts.map +1 -0
  13. package/dist/config.d.mts +2 -1
  14. package/dist/config.d.mts.map +1 -0
  15. package/dist/config.mjs +1 -1
  16. package/dist/encryption-C8H-38Yy.mjs +42 -0
  17. package/dist/encryption-C8H-38Yy.mjs.map +1 -0
  18. package/dist/encryption-Dyf_r1h-.cjs +44 -0
  19. package/dist/encryption-Dyf_r1h-.cjs.map +1 -0
  20. package/dist/index.cjs +2125 -184
  21. package/dist/index.cjs.map +1 -1
  22. package/dist/index.mjs +2143 -197
  23. package/dist/index.mjs.map +1 -1
  24. package/dist/{openapi--vOy9mo4.mjs → openapi-BfFlOBCG.mjs} +812 -49
  25. package/dist/openapi-BfFlOBCG.mjs.map +1 -0
  26. package/dist/{openapi-CHhTPief.cjs → openapi-Bt_1FDpT.cjs} +805 -42
  27. package/dist/openapi-Bt_1FDpT.cjs.map +1 -0
  28. package/dist/{openapi-react-query-o5iMi8tz.cjs → openapi-react-query-B-sNWHFU.cjs} +5 -5
  29. package/dist/openapi-react-query-B-sNWHFU.cjs.map +1 -0
  30. package/dist/{openapi-react-query-CcciaVu5.mjs → openapi-react-query-B6XTeGqS.mjs} +5 -5
  31. package/dist/openapi-react-query-B6XTeGqS.mjs.map +1 -0
  32. package/dist/openapi-react-query.cjs +1 -1
  33. package/dist/openapi-react-query.d.cts.map +1 -0
  34. package/dist/openapi-react-query.d.mts.map +1 -0
  35. package/dist/openapi-react-query.mjs +1 -1
  36. package/dist/openapi.cjs +2 -2
  37. package/dist/openapi.d.cts +1 -1
  38. package/dist/openapi.d.cts.map +1 -0
  39. package/dist/openapi.d.mts +1 -1
  40. package/dist/openapi.d.mts.map +1 -0
  41. package/dist/openapi.mjs +2 -2
  42. package/dist/storage-BUYQJgz7.cjs +4 -0
  43. package/dist/storage-BXoJvmv2.cjs +149 -0
  44. package/dist/storage-BXoJvmv2.cjs.map +1 -0
  45. package/dist/storage-C9PU_30f.mjs +101 -0
  46. package/dist/storage-C9PU_30f.mjs.map +1 -0
  47. package/dist/storage-DLJAYxzJ.mjs +3 -0
  48. package/dist/{types-b-vwGpqc.d.cts → types-BR0M2v_c.d.mts} +100 -1
  49. package/dist/types-BR0M2v_c.d.mts.map +1 -0
  50. package/dist/{types-DXgiA1sF.d.mts → types-BhkZc-vm.d.cts} +100 -1
  51. package/dist/types-BhkZc-vm.d.cts.map +1 -0
  52. package/examples/cron-example.ts +27 -27
  53. package/examples/env.ts +27 -27
  54. package/examples/function-example.ts +31 -31
  55. package/examples/gkm.config.json +20 -20
  56. package/examples/gkm.config.ts +8 -8
  57. package/examples/gkm.minimal.config.json +5 -5
  58. package/examples/gkm.production.config.json +25 -25
  59. package/examples/logger.ts +2 -2
  60. package/package.json +6 -6
  61. package/src/__tests__/EndpointGenerator.hooks.spec.ts +191 -191
  62. package/src/__tests__/config.spec.ts +55 -55
  63. package/src/__tests__/loadEnvFiles.spec.ts +93 -93
  64. package/src/__tests__/normalizeHooksConfig.spec.ts +58 -58
  65. package/src/__tests__/openapi-react-query.spec.ts +497 -497
  66. package/src/__tests__/openapi.spec.ts +428 -428
  67. package/src/__tests__/test-helpers.ts +77 -76
  68. package/src/auth/__tests__/credentials.spec.ts +204 -0
  69. package/src/auth/__tests__/index.spec.ts +168 -0
  70. package/src/auth/credentials.ts +187 -0
  71. package/src/auth/index.ts +226 -0
  72. package/src/build/__tests__/index-new.spec.ts +474 -474
  73. package/src/build/__tests__/manifests.spec.ts +333 -333
  74. package/src/build/bundler.ts +141 -0
  75. package/src/build/endpoint-analyzer.ts +236 -0
  76. package/src/build/handler-templates.ts +1253 -0
  77. package/src/build/index.ts +250 -179
  78. package/src/build/manifests.ts +52 -52
  79. package/src/build/providerResolver.ts +145 -145
  80. package/src/build/types.ts +64 -43
  81. package/src/config.ts +39 -37
  82. package/src/deploy/__tests__/docker.spec.ts +111 -0
  83. package/src/deploy/__tests__/dokploy.spec.ts +245 -0
  84. package/src/deploy/__tests__/init.spec.ts +662 -0
  85. package/src/deploy/docker.ts +128 -0
  86. package/src/deploy/dokploy.ts +204 -0
  87. package/src/deploy/index.ts +136 -0
  88. package/src/deploy/init.ts +484 -0
  89. package/src/deploy/types.ts +48 -0
  90. package/src/dev/__tests__/index.spec.ts +266 -266
  91. package/src/dev/index.ts +647 -593
  92. package/src/docker/__tests__/compose.spec.ts +531 -0
  93. package/src/docker/__tests__/templates.spec.ts +280 -0
  94. package/src/docker/compose.ts +273 -0
  95. package/src/docker/index.ts +230 -0
  96. package/src/docker/templates.ts +446 -0
  97. package/src/generators/CronGenerator.ts +72 -72
  98. package/src/generators/EndpointGenerator.ts +699 -398
  99. package/src/generators/FunctionGenerator.ts +84 -84
  100. package/src/generators/Generator.ts +72 -72
  101. package/src/generators/OpenApiTsGenerator.ts +589 -589
  102. package/src/generators/SubscriberGenerator.ts +124 -124
  103. package/src/generators/__tests__/CronGenerator.spec.ts +433 -433
  104. package/src/generators/__tests__/EndpointGenerator.spec.ts +532 -382
  105. package/src/generators/__tests__/FunctionGenerator.spec.ts +244 -244
  106. package/src/generators/__tests__/SubscriberGenerator.spec.ts +397 -382
  107. package/src/generators/index.ts +4 -4
  108. package/src/index.ts +628 -206
  109. package/src/init/__tests__/generators.spec.ts +334 -334
  110. package/src/init/__tests__/init.spec.ts +332 -332
  111. package/src/init/__tests__/utils.spec.ts +89 -89
  112. package/src/init/generators/config.ts +175 -175
  113. package/src/init/generators/docker.ts +41 -41
  114. package/src/init/generators/env.ts +72 -72
  115. package/src/init/generators/index.ts +1 -1
  116. package/src/init/generators/models.ts +64 -64
  117. package/src/init/generators/monorepo.ts +161 -161
  118. package/src/init/generators/package.ts +71 -71
  119. package/src/init/generators/source.ts +6 -6
  120. package/src/init/index.ts +203 -208
  121. package/src/init/templates/api.ts +115 -115
  122. package/src/init/templates/index.ts +75 -75
  123. package/src/init/templates/minimal.ts +98 -98
  124. package/src/init/templates/serverless.ts +89 -89
  125. package/src/init/templates/worker.ts +98 -98
  126. package/src/init/utils.ts +54 -56
  127. package/src/openapi-react-query.ts +194 -194
  128. package/src/openapi.ts +63 -63
  129. package/src/secrets/__tests__/encryption.spec.ts +226 -0
  130. package/src/secrets/__tests__/generator.spec.ts +319 -0
  131. package/src/secrets/__tests__/index.spec.ts +91 -0
  132. package/src/secrets/__tests__/storage.spec.ts +403 -0
  133. package/src/secrets/encryption.ts +91 -0
  134. package/src/secrets/generator.ts +164 -0
  135. package/src/secrets/index.ts +383 -0
  136. package/src/secrets/storage.ts +134 -0
  137. package/src/secrets/types.ts +53 -0
  138. package/src/types.ts +295 -176
  139. package/tsconfig.json +9 -0
  140. package/tsdown.config.ts +11 -8
  141. package/dist/config-Bq72aj8e.mjs.map +0 -1
  142. package/dist/config-CFls09Ey.cjs.map +0 -1
  143. package/dist/openapi--vOy9mo4.mjs.map +0 -1
  144. package/dist/openapi-CHhTPief.cjs.map +0 -1
  145. package/dist/openapi-react-query-CcciaVu5.mjs.map +0 -1
  146. package/dist/openapi-react-query-o5iMi8tz.cjs.map +0 -1
package/dist/index.mjs CHANGED
@@ -1,13 +1,17 @@
1
1
  #!/usr/bin/env -S npx tsx
2
- import { loadConfig, parseModuleConfig } from "./config-Bq72aj8e.mjs";
3
- import { ConstructGenerator, EndpointGenerator, OPENAPI_OUTPUT_PATH, generateOpenApi, openapiCommand, resolveOpenApiConfig } from "./openapi--vOy9mo4.mjs";
4
- import { generateReactQueryCommand } from "./openapi-react-query-CcciaVu5.mjs";
5
- import { join, relative } from "path";
2
+ import { loadConfig, parseModuleConfig } from "./config-DYULeEv8.mjs";
3
+ import { ConstructGenerator, EndpointGenerator, OPENAPI_OUTPUT_PATH, generateOpenApi, openapiCommand, resolveOpenApiConfig } from "./openapi-BfFlOBCG.mjs";
4
+ import { generateReactQueryCommand } from "./openapi-react-query-B6XTeGqS.mjs";
5
+ import { maskPassword, readStageSecrets, secretsExist, setCustomSecret, writeStageSecrets } from "./storage-C9PU_30f.mjs";
6
+ import { createRequire } from "node:module";
7
+ import { existsSync, mkdirSync } from "node:fs";
8
+ import { dirname, join, parse, relative, resolve } from "node:path";
6
9
  import { Command } from "commander";
7
- import { mkdir, writeFile } from "node:fs/promises";
8
- import { dirname, join as join$1, relative as relative$1, resolve } from "node:path";
10
+ import { stdin, stdout } from "node:process";
11
+ import * as readline from "node:readline/promises";
12
+ import { mkdir, readFile, writeFile } from "node:fs/promises";
13
+ import { homedir } from "node:os";
9
14
  import { execSync, spawn } from "node:child_process";
10
- import { existsSync } from "node:fs";
11
15
  import { createServer } from "node:net";
12
16
  import chokidar from "chokidar";
13
17
  import { config } from "dotenv";
@@ -16,10 +20,15 @@ import { Cron } from "@geekmidas/constructs/crons";
16
20
  import { Function } from "@geekmidas/constructs/functions";
17
21
  import { Subscriber } from "@geekmidas/constructs/subscribers";
18
22
  import prompts from "prompts";
23
+ import { randomBytes } from "node:crypto";
19
24
 
25
+ //#region rolldown:runtime
26
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
27
+
28
+ //#endregion
20
29
  //#region package.json
21
30
  var name = "@geekmidas/cli";
22
- var version = "0.9.0";
31
+ var version = "0.12.0";
23
32
  var description = "CLI tools for building Lambda handlers, server applications, and generating OpenAPI specs";
24
33
  var private$1 = false;
25
34
  var type = "module";
@@ -99,6 +108,227 @@ var package_default = {
99
108
  peerDependenciesMeta
100
109
  };
101
110
 
111
+ //#endregion
112
+ //#region src/auth/credentials.ts
113
+ /**
114
+ * Get the path to the credentials directory
115
+ */
116
+ function getCredentialsDir(options) {
117
+ const root = options?.root ?? homedir();
118
+ return join(root, ".gkm");
119
+ }
120
+ /**
121
+ * Get the path to the credentials file
122
+ */
123
+ function getCredentialsPath(options) {
124
+ return join(getCredentialsDir(options), "credentials.json");
125
+ }
126
+ /**
127
+ * Ensure the credentials directory exists
128
+ */
129
+ function ensureCredentialsDir(options) {
130
+ const dir = getCredentialsDir(options);
131
+ if (!existsSync(dir)) mkdirSync(dir, {
132
+ recursive: true,
133
+ mode: 448
134
+ });
135
+ }
136
+ /**
137
+ * Read stored credentials from disk
138
+ */
139
+ async function readCredentials(options) {
140
+ const path = getCredentialsPath(options);
141
+ if (!existsSync(path)) return {};
142
+ try {
143
+ const content = await readFile(path, "utf-8");
144
+ return JSON.parse(content);
145
+ } catch {
146
+ return {};
147
+ }
148
+ }
149
+ /**
150
+ * Write credentials to disk
151
+ */
152
+ async function writeCredentials(credentials, options) {
153
+ ensureCredentialsDir(options);
154
+ const path = getCredentialsPath(options);
155
+ await writeFile(path, JSON.stringify(credentials, null, 2), { mode: 384 });
156
+ }
157
+ /**
158
+ * Store Dokploy credentials
159
+ */
160
+ async function storeDokployCredentials(token, endpoint, options) {
161
+ const credentials = await readCredentials(options);
162
+ credentials.dokploy = {
163
+ token,
164
+ endpoint,
165
+ storedAt: (/* @__PURE__ */ new Date()).toISOString()
166
+ };
167
+ await writeCredentials(credentials, options);
168
+ }
169
+ /**
170
+ * Get stored Dokploy credentials
171
+ */
172
+ async function getDokployCredentials(options) {
173
+ const credentials = await readCredentials(options);
174
+ if (!credentials.dokploy) return null;
175
+ return {
176
+ token: credentials.dokploy.token,
177
+ endpoint: credentials.dokploy.endpoint
178
+ };
179
+ }
180
+ /**
181
+ * Remove Dokploy credentials
182
+ */
183
+ async function removeDokployCredentials(options) {
184
+ const credentials = await readCredentials(options);
185
+ if (!credentials.dokploy) return false;
186
+ delete credentials.dokploy;
187
+ await writeCredentials(credentials, options);
188
+ return true;
189
+ }
190
+ /**
191
+ * Get Dokploy API token, checking stored credentials first, then environment
192
+ */
193
+ async function getDokployToken(options) {
194
+ const envToken = process.env.DOKPLOY_API_TOKEN;
195
+ if (envToken) return envToken;
196
+ const stored = await getDokployCredentials(options);
197
+ if (stored) return stored.token;
198
+ return null;
199
+ }
200
+
201
+ //#endregion
202
+ //#region src/auth/index.ts
203
+ const logger$9 = console;
204
+ /**
205
+ * Validate Dokploy token by making a test API call
206
+ */
207
+ async function validateDokployToken(endpoint, token) {
208
+ try {
209
+ const response = await fetch(`${endpoint}/api/project.all`, {
210
+ method: "GET",
211
+ headers: {
212
+ "Content-Type": "application/json",
213
+ Authorization: `Bearer ${token}`
214
+ }
215
+ });
216
+ return response.ok;
217
+ } catch {
218
+ return false;
219
+ }
220
+ }
221
+ /**
222
+ * Prompt for input (handles both TTY and non-TTY)
223
+ */
224
+ async function prompt(message, hidden = false) {
225
+ if (!process.stdin.isTTY) throw new Error("Interactive input required. Please provide --token option.");
226
+ const rl = readline.createInterface({
227
+ input: stdin,
228
+ output: stdout
229
+ });
230
+ try {
231
+ if (hidden) {
232
+ process.stdout.write(message);
233
+ return new Promise((resolve$1) => {
234
+ let value = "";
235
+ const onData = (char) => {
236
+ const c = char.toString();
237
+ if (c === "\n" || c === "\r") {
238
+ process.stdin.removeListener("data", onData);
239
+ process.stdin.setRawMode(false);
240
+ process.stdout.write("\n");
241
+ resolve$1(value);
242
+ } else if (c === "") process.exit(1);
243
+ else if (c === "" || c === "\b") {
244
+ if (value.length > 0) value = value.slice(0, -1);
245
+ } else value += c;
246
+ };
247
+ process.stdin.setRawMode(true);
248
+ process.stdin.resume();
249
+ process.stdin.on("data", onData);
250
+ });
251
+ } else return await rl.question(message);
252
+ } finally {
253
+ rl.close();
254
+ }
255
+ }
256
+ /**
257
+ * Login to a service
258
+ */
259
+ async function loginCommand(options) {
260
+ const { service, token: providedToken, endpoint: providedEndpoint } = options;
261
+ if (service === "dokploy") {
262
+ logger$9.log("\n🔐 Logging in to Dokploy...\n");
263
+ let endpoint = providedEndpoint;
264
+ if (!endpoint) endpoint = await prompt("Dokploy URL (e.g., https://dokploy.example.com): ");
265
+ endpoint = endpoint.replace(/\/$/, "");
266
+ try {
267
+ new URL(endpoint);
268
+ } catch {
269
+ logger$9.error("Invalid URL format");
270
+ process.exit(1);
271
+ }
272
+ let token = providedToken;
273
+ if (!token) {
274
+ logger$9.log(`\nGenerate a token at: ${endpoint}/settings/profile\n`);
275
+ token = await prompt("API Token: ", true);
276
+ }
277
+ if (!token) {
278
+ logger$9.error("Token is required");
279
+ process.exit(1);
280
+ }
281
+ logger$9.log("\nValidating credentials...");
282
+ const isValid = await validateDokployToken(endpoint, token);
283
+ if (!isValid) {
284
+ logger$9.error("\n✗ Invalid credentials. Please check your token and try again.");
285
+ process.exit(1);
286
+ }
287
+ await storeDokployCredentials(token, endpoint);
288
+ logger$9.log("\n✓ Successfully logged in to Dokploy!");
289
+ logger$9.log(` Endpoint: ${endpoint}`);
290
+ logger$9.log(` Credentials stored in: ${getCredentialsPath()}`);
291
+ logger$9.log("\nYou can now use deploy commands without setting DOKPLOY_API_TOKEN.");
292
+ }
293
+ }
294
+ /**
295
+ * Logout from a service
296
+ */
297
+ async function logoutCommand(options) {
298
+ const { service = "dokploy" } = options;
299
+ if (service === "all") {
300
+ const dokployRemoved = await removeDokployCredentials();
301
+ if (dokployRemoved) logger$9.log("\n✓ Logged out from all services");
302
+ else logger$9.log("\nNo stored credentials found");
303
+ return;
304
+ }
305
+ if (service === "dokploy") {
306
+ const removed = await removeDokployCredentials();
307
+ if (removed) logger$9.log("\n✓ Logged out from Dokploy");
308
+ else logger$9.log("\nNo Dokploy credentials found");
309
+ }
310
+ }
311
+ /**
312
+ * Show current login status
313
+ */
314
+ async function whoamiCommand() {
315
+ logger$9.log("\n📋 Current credentials:\n");
316
+ const dokploy = await getDokployCredentials();
317
+ if (dokploy) {
318
+ logger$9.log(" Dokploy:");
319
+ logger$9.log(` Endpoint: ${dokploy.endpoint}`);
320
+ logger$9.log(` Token: ${maskToken(dokploy.token)}`);
321
+ } else logger$9.log(" Dokploy: Not logged in");
322
+ logger$9.log(`\n Credentials file: ${getCredentialsPath()}`);
323
+ }
324
+ /**
325
+ * Mask a token for display
326
+ */
327
+ function maskToken(token) {
328
+ if (token.length <= 8) return "****";
329
+ return `${token.slice(0, 4)}...${token.slice(-4)}`;
330
+ }
331
+
102
332
  //#endregion
103
333
  //#region src/build/providerResolver.ts
104
334
  /**
@@ -175,22 +405,22 @@ function isEnabled(config$1) {
175
405
  var CronGenerator = class extends ConstructGenerator {
176
406
  async build(context, constructs, outputDir, options) {
177
407
  const provider = options?.provider || "aws-lambda";
178
- const logger$3 = console;
408
+ const logger$10 = console;
179
409
  const cronInfos = [];
180
410
  if (constructs.length === 0 || provider !== "aws-lambda") return cronInfos;
181
- const cronsDir = join$1(outputDir, "crons");
411
+ const cronsDir = join(outputDir, "crons");
182
412
  await mkdir(cronsDir, { recursive: true });
183
413
  for (const { key, construct, path } of constructs) {
184
414
  const handlerFile = await this.generateCronHandler(cronsDir, path.relative, key, context);
185
415
  cronInfos.push({
186
416
  name: key,
187
- handler: relative$1(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
417
+ handler: relative(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
188
418
  schedule: construct.schedule || "rate(1 hour)",
189
419
  timeout: construct.timeout,
190
420
  memorySize: construct.memorySize,
191
421
  environment: await construct.getEnvironment()
192
422
  });
193
- logger$3.log(`Generated cron handler: ${key}`);
423
+ logger$10.log(`Generated cron handler: ${key}`);
194
424
  }
195
425
  return cronInfos;
196
426
  }
@@ -199,11 +429,11 @@ var CronGenerator = class extends ConstructGenerator {
199
429
  }
200
430
  async generateCronHandler(outputDir, sourceFile, exportName, context) {
201
431
  const handlerFileName = `${exportName}.ts`;
202
- const handlerPath = join$1(outputDir, handlerFileName);
203
- const relativePath = relative$1(dirname(handlerPath), sourceFile);
432
+ const handlerPath = join(outputDir, handlerFileName);
433
+ const relativePath = relative(dirname(handlerPath), sourceFile);
204
434
  const importPath = relativePath.replace(/\.ts$/, ".js");
205
- const relativeEnvParserPath = relative$1(dirname(handlerPath), context.envParserPath);
206
- const relativeLoggerPath = relative$1(dirname(handlerPath), context.loggerPath);
435
+ const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
436
+ const relativeLoggerPath = relative(dirname(handlerPath), context.loggerPath);
207
437
  const content = `import { AWSScheduledFunction } from '@geekmidas/constructs/crons';
208
438
  import { ${exportName} } from '${importPath}';
209
439
  import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
@@ -226,31 +456,31 @@ var FunctionGenerator = class extends ConstructGenerator {
226
456
  }
227
457
  async build(context, constructs, outputDir, options) {
228
458
  const provider = options?.provider || "aws-lambda";
229
- const logger$3 = console;
459
+ const logger$10 = console;
230
460
  const functionInfos = [];
231
461
  if (constructs.length === 0 || provider !== "aws-lambda") return functionInfos;
232
- const functionsDir = join$1(outputDir, "functions");
462
+ const functionsDir = join(outputDir, "functions");
233
463
  await mkdir(functionsDir, { recursive: true });
234
464
  for (const { key, construct, path } of constructs) {
235
465
  const handlerFile = await this.generateFunctionHandler(functionsDir, path.relative, key, context);
236
466
  functionInfos.push({
237
467
  name: key,
238
- handler: relative$1(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
468
+ handler: relative(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
239
469
  timeout: construct.timeout,
240
470
  memorySize: construct.memorySize,
241
471
  environment: await construct.getEnvironment()
242
472
  });
243
- logger$3.log(`Generated function handler: ${key}`);
473
+ logger$10.log(`Generated function handler: ${key}`);
244
474
  }
245
475
  return functionInfos;
246
476
  }
247
477
  async generateFunctionHandler(outputDir, sourceFile, exportName, context) {
248
478
  const handlerFileName = `${exportName}.ts`;
249
- const handlerPath = join$1(outputDir, handlerFileName);
250
- const relativePath = relative$1(dirname(handlerPath), sourceFile);
479
+ const handlerPath = join(outputDir, handlerFileName);
480
+ const relativePath = relative(dirname(handlerPath), sourceFile);
251
481
  const importPath = relativePath.replace(/\.ts$/, ".js");
252
- const relativeEnvParserPath = relative$1(dirname(handlerPath), context.envParserPath);
253
- const relativeLoggerPath = relative$1(dirname(handlerPath), context.loggerPath);
482
+ const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
483
+ const relativeLoggerPath = relative(dirname(handlerPath), context.loggerPath);
254
484
  const content = `import { AWSLambdaFunction } from '@geekmidas/constructs/functions';
255
485
  import { ${exportName} } from '${importPath}';
256
486
  import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
@@ -273,37 +503,37 @@ var SubscriberGenerator = class extends ConstructGenerator {
273
503
  }
274
504
  async build(context, constructs, outputDir, options) {
275
505
  const provider = options?.provider || "aws-lambda";
276
- const logger$3 = console;
506
+ const logger$10 = console;
277
507
  const subscriberInfos = [];
278
508
  if (provider === "server") {
279
509
  await this.generateServerSubscribersFile(outputDir, constructs);
280
- logger$3.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
510
+ logger$10.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
281
511
  return subscriberInfos;
282
512
  }
283
513
  if (constructs.length === 0) return subscriberInfos;
284
514
  if (provider !== "aws-lambda") return subscriberInfos;
285
- const subscribersDir = join$1(outputDir, "subscribers");
515
+ const subscribersDir = join(outputDir, "subscribers");
286
516
  await mkdir(subscribersDir, { recursive: true });
287
517
  for (const { key, construct, path } of constructs) {
288
518
  const handlerFile = await this.generateSubscriberHandler(subscribersDir, path.relative, key, construct, context);
289
519
  subscriberInfos.push({
290
520
  name: key,
291
- handler: relative$1(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
521
+ handler: relative(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
292
522
  subscribedEvents: construct.subscribedEvents || [],
293
523
  timeout: construct.timeout,
294
524
  memorySize: construct.memorySize,
295
525
  environment: await construct.getEnvironment()
296
526
  });
297
- logger$3.log(`Generated subscriber handler: ${key}`);
527
+ logger$10.log(`Generated subscriber handler: ${key}`);
298
528
  }
299
529
  return subscriberInfos;
300
530
  }
301
531
  async generateSubscriberHandler(outputDir, sourceFile, exportName, _subscriber, context) {
302
532
  const handlerFileName = `${exportName}.ts`;
303
- const handlerPath = join$1(outputDir, handlerFileName);
304
- const relativePath = relative$1(dirname(handlerPath), sourceFile);
533
+ const handlerPath = join(outputDir, handlerFileName);
534
+ const relativePath = relative(dirname(handlerPath), sourceFile);
305
535
  const importPath = relativePath.replace(/\.ts$/, ".js");
306
- const relativeEnvParserPath = relative$1(dirname(handlerPath), context.envParserPath);
536
+ const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
307
537
  const content = `import { AWSLambdaSubscriber } from '@geekmidas/constructs/aws';
308
538
  import { ${exportName} } from '${importPath}';
309
539
  import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
@@ -318,13 +548,13 @@ export const handler = adapter.handler;
318
548
  async generateServerSubscribersFile(outputDir, subscribers) {
319
549
  await mkdir(outputDir, { recursive: true });
320
550
  const subscribersFileName = "subscribers.ts";
321
- const subscribersPath = join$1(outputDir, subscribersFileName);
551
+ const subscribersPath = join(outputDir, subscribersFileName);
322
552
  const importsByFile = /* @__PURE__ */ new Map();
323
553
  for (const { path, key } of subscribers) {
324
- const relativePath = relative$1(dirname(subscribersPath), path.relative);
554
+ const relativePath = relative(dirname(subscribersPath), path.relative);
325
555
  const importPath = relativePath.replace(/\.ts$/, ".js");
326
556
  if (!importsByFile.has(importPath)) importsByFile.set(importPath, []);
327
- importsByFile.get(importPath).push(key);
557
+ importsByFile.get(importPath)?.push(key);
328
558
  }
329
559
  const imports = Array.from(importsByFile.entries()).map(([importPath, exports$1]) => `import { ${exports$1.join(", ")} } from '${importPath}';`).join("\n");
330
560
  const allExportNames = subscribers.map(({ key }) => key);
@@ -379,7 +609,7 @@ export async function setupSubscribers(
379
609
  return;
380
610
  }
381
611
 
382
- const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);
612
+ const serviceDiscovery = ServiceDiscovery.getInstance(envParser);
383
613
 
384
614
  // Create connection once, outside the loop (more efficient)
385
615
  // EventConnectionFactory automatically determines the right connection type
@@ -460,7 +690,7 @@ export async function setupSubscribers(
460
690
 
461
691
  //#endregion
462
692
  //#region src/dev/index.ts
463
- const logger$2 = console;
693
+ const logger$8 = console;
464
694
  /**
465
695
  * Load environment files
466
696
  * @internal Exported for testing
@@ -511,7 +741,7 @@ async function findAvailablePort(preferredPort, maxAttempts = 10) {
511
741
  for (let i = 0; i < maxAttempts; i++) {
512
742
  const port = preferredPort + i;
513
743
  if (await isPortAvailable(port)) return port;
514
- logger$2.log(`⚠️ Port ${port} is in use, trying ${port + 1}...`);
744
+ logger$8.log(`⚠️ Port ${port} is in use, trying ${port + 1}...`);
515
745
  }
516
746
  throw new Error(`Could not find an available port after trying ${maxAttempts} ports starting from ${preferredPort}`);
517
747
  }
@@ -581,33 +811,61 @@ function normalizeHooksConfig(config$1) {
581
811
  const resolvedPath = resolve(process.cwd(), serverPath);
582
812
  return { serverHooksPath: resolvedPath };
583
813
  }
814
+ /**
815
+ * Normalize production configuration
816
+ * @internal Exported for testing
817
+ */
818
+ function normalizeProductionConfig(cliProduction, configProduction) {
819
+ if (!cliProduction) return void 0;
820
+ const config$1 = configProduction ?? {};
821
+ return {
822
+ enabled: true,
823
+ bundle: config$1.bundle ?? true,
824
+ minify: config$1.minify ?? true,
825
+ healthCheck: config$1.healthCheck ?? "/health",
826
+ gracefulShutdown: config$1.gracefulShutdown ?? true,
827
+ external: config$1.external ?? [],
828
+ subscribers: config$1.subscribers ?? "exclude",
829
+ openapi: config$1.openapi ?? false,
830
+ optimizedHandlers: config$1.optimizedHandlers ?? true
831
+ };
832
+ }
833
+ /**
834
+ * Get production config from GkmConfig
835
+ * @internal
836
+ */
837
+ function getProductionConfigFromGkm(config$1) {
838
+ const serverConfig = config$1.providers?.server;
839
+ if (typeof serverConfig === "object") return serverConfig.production;
840
+ return void 0;
841
+ }
584
842
  async function devCommand(options) {
585
843
  const defaultEnv = loadEnvFiles(".env");
586
- if (defaultEnv.loaded.length > 0) logger$2.log(`📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
844
+ if (defaultEnv.loaded.length > 0) logger$8.log(`📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
587
845
  const config$1 = await loadConfig();
588
846
  if (config$1.env) {
589
847
  const { loaded, missing } = loadEnvFiles(config$1.env);
590
- if (loaded.length > 0) logger$2.log(`📦 Loaded env: ${loaded.join(", ")}`);
591
- if (missing.length > 0) logger$2.warn(`⚠️ Missing env files: ${missing.join(", ")}`);
848
+ if (loaded.length > 0) logger$8.log(`📦 Loaded env: ${loaded.join(", ")}`);
849
+ if (missing.length > 0) logger$8.warn(`⚠️ Missing env files: ${missing.join(", ")}`);
592
850
  }
593
851
  const resolved = resolveProviders(config$1, { provider: "server" });
594
- logger$2.log("🚀 Starting development server...");
595
- logger$2.log(`Loading routes from: ${config$1.routes}`);
596
- if (config$1.functions) logger$2.log(`Loading functions from: ${config$1.functions}`);
597
- if (config$1.crons) logger$2.log(`Loading crons from: ${config$1.crons}`);
598
- if (config$1.subscribers) logger$2.log(`Loading subscribers from: ${config$1.subscribers}`);
599
- logger$2.log(`Using envParser: ${config$1.envParser}`);
852
+ logger$8.log("🚀 Starting development server...");
853
+ logger$8.log(`Loading routes from: ${config$1.routes}`);
854
+ if (config$1.functions) logger$8.log(`Loading functions from: ${config$1.functions}`);
855
+ if (config$1.crons) logger$8.log(`Loading crons from: ${config$1.crons}`);
856
+ if (config$1.subscribers) logger$8.log(`Loading subscribers from: ${config$1.subscribers}`);
857
+ logger$8.log(`Using envParser: ${config$1.envParser}`);
600
858
  const { path: envParserPath, importPattern: envParserImportPattern } = parseModuleConfig(config$1.envParser, "envParser");
601
859
  const { path: loggerPath, importPattern: loggerImportPattern } = parseModuleConfig(config$1.logger, "logger");
602
860
  const telescope = normalizeTelescopeConfig(config$1.telescope);
603
- if (telescope) logger$2.log(`🔭 Telescope enabled at ${telescope.path}`);
861
+ if (telescope) logger$8.log(`🔭 Telescope enabled at ${telescope.path}`);
604
862
  const studio = normalizeStudioConfig(config$1.studio);
605
- if (studio) logger$2.log(`🗄️ Studio enabled at ${studio.path}`);
863
+ if (studio) logger$8.log(`🗄️ Studio enabled at ${studio.path}`);
606
864
  const hooks = normalizeHooksConfig(config$1.hooks);
607
- if (hooks) logger$2.log(`🪝 Server hooks enabled from ${config$1.hooks?.server}`);
865
+ if (hooks) logger$8.log(`🪝 Server hooks enabled from ${config$1.hooks?.server}`);
608
866
  const openApiConfig = resolveOpenApiConfig(config$1);
609
867
  const enableOpenApi = openApiConfig.enabled || resolved.enableOpenApi;
610
- if (enableOpenApi) logger$2.log(`📄 OpenAPI output: ${OPENAPI_OUTPUT_PATH}`);
868
+ if (enableOpenApi) logger$8.log(`📄 OpenAPI output: ${OPENAPI_OUTPUT_PATH}`);
611
869
  const buildContext = {
612
870
  envParserPath,
613
871
  envParserImportPattern,
@@ -622,9 +880,10 @@ async function devCommand(options) {
622
880
  const runtime = config$1.runtime ?? "node";
623
881
  const devServer = new DevServer(resolved.providers[0], options.port || 3e3, options.portExplicit ?? false, enableOpenApi, telescope, studio, runtime);
624
882
  await devServer.start();
625
- const envParserFile = config$1.envParser.split("#")[0];
626
- const loggerFile = config$1.logger.split("#")[0];
627
- const hooksFile = config$1.hooks?.server?.split("#")[0];
883
+ const envParserFile = config$1.envParser.split("#")[0] ?? config$1.envParser;
884
+ const loggerFile = config$1.logger.split("#")[0] ?? config$1.logger;
885
+ const hooksFileParts = config$1.hooks?.server?.split("#");
886
+ const hooksFile = hooksFileParts?.[0];
628
887
  const watchPatterns = [
629
888
  config$1.routes,
630
889
  ...config$1.functions ? [config$1.functions] : [],
@@ -633,41 +892,44 @@ async function devCommand(options) {
633
892
  envParserFile.endsWith(".ts") ? envParserFile : `${envParserFile}.ts`,
634
893
  loggerFile.endsWith(".ts") ? loggerFile : `${loggerFile}.ts`,
635
894
  ...hooksFile ? [hooksFile.endsWith(".ts") ? hooksFile : `${hooksFile}.ts`] : []
636
- ].flat();
895
+ ].flat().filter((p) => typeof p === "string");
637
896
  const normalizedPatterns = watchPatterns.map((p) => p.startsWith("./") ? p.slice(2) : p);
638
- logger$2.log(`👀 Watching for changes in: ${normalizedPatterns.join(", ")}`);
897
+ logger$8.log(`👀 Watching for changes in: ${normalizedPatterns.join(", ")}`);
639
898
  const resolvedFiles = await fg(normalizedPatterns, {
640
899
  cwd: process.cwd(),
641
900
  absolute: false,
642
901
  onlyFiles: true
643
902
  });
644
- const dirsToWatch = [...new Set(resolvedFiles.map((f) => f.split("/").slice(0, -1).join("/")))];
645
- logger$2.log(`📁 Found ${resolvedFiles.length} files in ${dirsToWatch.length} directories`);
903
+ const dirsToWatch = [...new Set(resolvedFiles.map((f) => {
904
+ const parts = f.split("/");
905
+ return parts.slice(0, -1).join("/");
906
+ }))];
907
+ logger$8.log(`📁 Found ${resolvedFiles.length} files in ${dirsToWatch.length} directories`);
646
908
  const watcher = chokidar.watch([...resolvedFiles, ...dirsToWatch], {
647
- ignored: /(^|[\/\\])\../,
909
+ ignored: /(^|[/\\])\../,
648
910
  persistent: true,
649
911
  ignoreInitial: true,
650
912
  cwd: process.cwd()
651
913
  });
652
914
  watcher.on("ready", () => {
653
- logger$2.log("🔍 File watcher ready");
915
+ logger$8.log("🔍 File watcher ready");
654
916
  });
655
917
  watcher.on("error", (error) => {
656
- logger$2.error("❌ Watcher error:", error);
918
+ logger$8.error("❌ Watcher error:", error);
657
919
  });
658
920
  let rebuildTimeout = null;
659
921
  watcher.on("change", async (path) => {
660
- logger$2.log(`📝 File changed: ${path}`);
922
+ logger$8.log(`📝 File changed: ${path}`);
661
923
  if (rebuildTimeout) clearTimeout(rebuildTimeout);
662
924
  rebuildTimeout = setTimeout(async () => {
663
925
  try {
664
- logger$2.log("🔄 Rebuilding...");
926
+ logger$8.log("🔄 Rebuilding...");
665
927
  await buildServer(config$1, buildContext, resolved.providers[0], enableOpenApi);
666
928
  if (enableOpenApi) await generateOpenApi(config$1, { silent: true });
667
- logger$2.log("✅ Rebuild complete, restarting server...");
929
+ logger$8.log("✅ Rebuild complete, restarting server...");
668
930
  await devServer.restart();
669
931
  } catch (error) {
670
- logger$2.error("❌ Rebuild failed:", error.message);
932
+ logger$8.error("❌ Rebuild failed:", error.message);
671
933
  }
672
934
  }, 300);
673
935
  });
@@ -675,9 +937,9 @@ async function devCommand(options) {
675
937
  const shutdown = () => {
676
938
  if (isShuttingDown) return;
677
939
  isShuttingDown = true;
678
- logger$2.log("\n🛑 Shutting down...");
940
+ logger$8.log("\n🛑 Shutting down...");
679
941
  Promise.all([watcher.close(), devServer.stop()]).catch((err) => {
680
- logger$2.error("Error during shutdown:", err);
942
+ logger$8.error("Error during shutdown:", err);
681
943
  }).finally(() => {
682
944
  process.exit(0);
683
945
  });
@@ -696,7 +958,7 @@ async function buildServer(config$1, context, provider, enableOpenApi) {
696
958
  config$1.crons ? cronGenerator.load(config$1.crons) : [],
697
959
  config$1.subscribers ? subscriberGenerator.load(config$1.subscribers) : []
698
960
  ]);
699
- const outputDir = join$1(process.cwd(), ".gkm", provider);
961
+ const outputDir = join(process.cwd(), ".gkm", provider);
700
962
  await mkdir(outputDir, { recursive: true });
701
963
  await Promise.all([
702
964
  endpointGenerator.build(context, allEndpoints, outputDir, {
@@ -730,11 +992,11 @@ var DevServer = class {
730
992
  this.actualPort = this.requestedPort;
731
993
  } else {
732
994
  this.actualPort = await findAvailablePort(this.requestedPort);
733
- if (this.actualPort !== this.requestedPort) logger$2.log(`ℹ️ Port ${this.requestedPort} was in use, using port ${this.actualPort} instead`);
995
+ if (this.actualPort !== this.requestedPort) logger$8.log(`ℹ️ Port ${this.requestedPort} was in use, using port ${this.actualPort} instead`);
734
996
  }
735
- const serverEntryPath = join$1(process.cwd(), ".gkm", this.provider, "server.ts");
997
+ const serverEntryPath = join(process.cwd(), ".gkm", this.provider, "server.ts");
736
998
  await this.createServerEntry();
737
- logger$2.log(`\n✨ Starting server on port ${this.actualPort}...`);
999
+ logger$8.log(`\n✨ Starting server on port ${this.actualPort}...`);
738
1000
  this.serverProcess = spawn("npx", [
739
1001
  "tsx",
740
1002
  serverEntryPath,
@@ -750,18 +1012,18 @@ var DevServer = class {
750
1012
  });
751
1013
  this.isRunning = true;
752
1014
  this.serverProcess.on("error", (error) => {
753
- logger$2.error("❌ Server error:", error);
1015
+ logger$8.error("❌ Server error:", error);
754
1016
  });
755
1017
  this.serverProcess.on("exit", (code, signal) => {
756
- if (code !== null && code !== 0 && signal !== "SIGTERM") logger$2.error(`❌ Server exited with code ${code}`);
1018
+ if (code !== null && code !== 0 && signal !== "SIGTERM") logger$8.error(`❌ Server exited with code ${code}`);
757
1019
  this.isRunning = false;
758
1020
  });
759
1021
  await new Promise((resolve$1) => setTimeout(resolve$1, 1e3));
760
1022
  if (this.isRunning) {
761
- logger$2.log(`\n🎉 Server running at http://localhost:${this.actualPort}`);
762
- if (this.enableOpenApi) logger$2.log(`📚 API Docs available at http://localhost:${this.actualPort}/__docs`);
763
- if (this.telescope) logger$2.log(`🔭 Telescope available at http://localhost:${this.actualPort}${this.telescope.path}`);
764
- if (this.studio) logger$2.log(`🗄️ Studio available at http://localhost:${this.actualPort}${this.studio.path}`);
1023
+ logger$8.log(`\n🎉 Server running at http://localhost:${this.actualPort}`);
1024
+ if (this.enableOpenApi) logger$8.log(`📚 API Docs available at http://localhost:${this.actualPort}/__docs`);
1025
+ if (this.telescope) logger$8.log(`🔭 Telescope available at http://localhost:${this.actualPort}${this.telescope.path}`);
1026
+ if (this.studio) logger$8.log(`🗄️ Studio available at http://localhost:${this.actualPort}${this.studio.path}`);
765
1027
  }
766
1028
  }
767
1029
  async stop() {
@@ -799,9 +1061,9 @@ var DevServer = class {
799
1061
  }
800
1062
  async createServerEntry() {
801
1063
  const { writeFile: writeFile$1 } = await import("node:fs/promises");
802
- const { relative: relative$2, dirname: dirname$1 } = await import("node:path");
803
- const serverPath = join$1(process.cwd(), ".gkm", this.provider, "server.ts");
804
- const relativeAppPath = relative$2(dirname$1(serverPath), join$1(dirname$1(serverPath), "app.js"));
1064
+ const { relative: relative$1, dirname: dirname$1 } = await import("node:path");
1065
+ const serverPath = join(process.cwd(), ".gkm", this.provider, "server.ts");
1066
+ const relativeAppPath = relative$1(dirname$1(serverPath), join(dirname$1(serverPath), "app.js"));
805
1067
  const serveCode = this.runtime === "bun" ? `Bun.serve({
806
1068
  port,
807
1069
  fetch: app.fetch,
@@ -821,7 +1083,7 @@ var DevServer = class {
821
1083
  * Development server entry point
822
1084
  * This file is auto-generated by 'gkm dev'
823
1085
  */
824
- import { createApp } from './${relativeAppPath.startsWith(".") ? relativeAppPath : "./" + relativeAppPath}';
1086
+ import { createApp } from './${relativeAppPath.startsWith(".") ? relativeAppPath : `./${relativeAppPath}`}';
825
1087
 
826
1088
  const port = process.argv.includes('--port')
827
1089
  ? Number.parseInt(process.argv[process.argv.indexOf('--port') + 1])
@@ -847,7 +1109,7 @@ start({
847
1109
 
848
1110
  //#endregion
849
1111
  //#region src/build/manifests.ts
850
- const logger$1 = console;
1112
+ const logger$7 = console;
851
1113
  async function generateAwsManifest(outputDir, routes, functions, crons, subscribers) {
852
1114
  const manifestDir = join(outputDir, "manifest");
853
1115
  await mkdir(manifestDir, { recursive: true });
@@ -872,8 +1134,8 @@ export type RoutePath = Route['path'];
872
1134
  `;
873
1135
  const manifestPath = join(manifestDir, "aws.ts");
874
1136
  await writeFile(manifestPath, content);
875
- logger$1.log(`Generated AWS manifest with ${awsRoutes.length} routes, ${functions.length} functions, ${crons.length} crons, ${subscribers.length} subscribers`);
876
- logger$1.log(`Manifest: ${relative(process.cwd(), manifestPath)}`);
1137
+ logger$7.log(`Generated AWS manifest with ${awsRoutes.length} routes, ${functions.length} functions, ${crons.length} crons, ${subscribers.length} subscribers`);
1138
+ logger$7.log(`Manifest: ${relative(process.cwd(), manifestPath)}`);
877
1139
  }
878
1140
  async function generateServerManifest(outputDir, appInfo, routes, subscribers) {
879
1141
  const manifestDir = join(outputDir, "manifest");
@@ -904,35 +1166,42 @@ export type RoutePath = Route['path'];
904
1166
  `;
905
1167
  const manifestPath = join(manifestDir, "server.ts");
906
1168
  await writeFile(manifestPath, content);
907
- logger$1.log(`Generated server manifest with ${serverRoutes.length} routes, ${serverSubscribers.length} subscribers`);
908
- logger$1.log(`Manifest: ${relative(process.cwd(), manifestPath)}`);
1169
+ logger$7.log(`Generated server manifest with ${serverRoutes.length} routes, ${serverSubscribers.length} subscribers`);
1170
+ logger$7.log(`Manifest: ${relative(process.cwd(), manifestPath)}`);
909
1171
  }
910
1172
 
911
1173
  //#endregion
912
1174
  //#region src/build/index.ts
913
- const logger = console;
1175
+ const logger$6 = console;
914
1176
  async function buildCommand(options) {
915
1177
  const config$1 = await loadConfig();
916
1178
  const resolved = resolveProviders(config$1, options);
917
- logger.log(`Building with providers: ${resolved.providers.join(", ")}`);
918
- logger.log(`Loading routes from: ${config$1.routes}`);
919
- if (config$1.functions) logger.log(`Loading functions from: ${config$1.functions}`);
920
- if (config$1.crons) logger.log(`Loading crons from: ${config$1.crons}`);
921
- if (config$1.subscribers) logger.log(`Loading subscribers from: ${config$1.subscribers}`);
922
- logger.log(`Using envParser: ${config$1.envParser}`);
1179
+ const productionConfigFromGkm = getProductionConfigFromGkm(config$1);
1180
+ const production = normalizeProductionConfig(options.production ?? false, productionConfigFromGkm);
1181
+ if (production) logger$6.log(`🏭 Building for PRODUCTION`);
1182
+ logger$6.log(`Building with providers: ${resolved.providers.join(", ")}`);
1183
+ logger$6.log(`Loading routes from: ${config$1.routes}`);
1184
+ if (config$1.functions) logger$6.log(`Loading functions from: ${config$1.functions}`);
1185
+ if (config$1.crons) logger$6.log(`Loading crons from: ${config$1.crons}`);
1186
+ if (config$1.subscribers) logger$6.log(`Loading subscribers from: ${config$1.subscribers}`);
1187
+ logger$6.log(`Using envParser: ${config$1.envParser}`);
923
1188
  const { path: envParserPath, importPattern: envParserImportPattern } = parseModuleConfig(config$1.envParser, "envParser");
924
1189
  const { path: loggerPath, importPattern: loggerImportPattern } = parseModuleConfig(config$1.logger, "logger");
925
- const telescope = normalizeTelescopeConfig(config$1.telescope);
926
- if (telescope) logger.log(`🔭 Telescope enabled at ${telescope.path}`);
1190
+ const telescope = production ? void 0 : normalizeTelescopeConfig(config$1.telescope);
1191
+ if (telescope) logger$6.log(`🔭 Telescope enabled at ${telescope.path}`);
1192
+ const studio = production ? void 0 : normalizeStudioConfig(config$1.studio);
1193
+ if (studio) logger$6.log(`🗄️ Studio enabled at ${studio.path}`);
927
1194
  const hooks = normalizeHooksConfig(config$1.hooks);
928
- if (hooks) logger.log(`🪝 Server hooks enabled`);
1195
+ if (hooks) logger$6.log(`🪝 Server hooks enabled`);
929
1196
  const buildContext = {
930
1197
  envParserPath,
931
1198
  envParserImportPattern,
932
1199
  loggerPath,
933
1200
  loggerImportPattern,
934
1201
  telescope,
935
- hooks
1202
+ studio,
1203
+ hooks,
1204
+ production
936
1205
  };
937
1206
  const endpointGenerator = new EndpointGenerator();
938
1207
  const functionGenerator = new FunctionGenerator();
@@ -944,45 +1213,1243 @@ async function buildCommand(options) {
944
1213
  config$1.crons ? cronGenerator.load(config$1.crons) : [],
945
1214
  config$1.subscribers ? subscriberGenerator.load(config$1.subscribers) : []
946
1215
  ]);
947
- logger.log(`Found ${allEndpoints.length} endpoints`);
948
- logger.log(`Found ${allFunctions.length} functions`);
949
- logger.log(`Found ${allCrons.length} crons`);
950
- logger.log(`Found ${allSubscribers.length} subscribers`);
1216
+ logger$6.log(`Found ${allEndpoints.length} endpoints`);
1217
+ logger$6.log(`Found ${allFunctions.length} functions`);
1218
+ logger$6.log(`Found ${allCrons.length} crons`);
1219
+ logger$6.log(`Found ${allSubscribers.length} subscribers`);
951
1220
  if (allEndpoints.length === 0 && allFunctions.length === 0 && allCrons.length === 0 && allSubscribers.length === 0) {
952
- logger.log("No endpoints, functions, crons, or subscribers found to process");
1221
+ logger$6.log("No endpoints, functions, crons, or subscribers found to process");
1222
+ return {};
1223
+ }
1224
+ const rootOutputDir = join(process.cwd(), ".gkm");
1225
+ await mkdir(rootOutputDir, { recursive: true });
1226
+ let result = {};
1227
+ for (const provider of resolved.providers) {
1228
+ const providerResult = await buildForProvider(provider, buildContext, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, allEndpoints, allFunctions, allCrons, allSubscribers, resolved.enableOpenApi, options.skipBundle ?? false, options.stage);
1229
+ if (providerResult.masterKey) result = providerResult;
1230
+ }
1231
+ return result;
1232
+ }
1233
+ async function buildForProvider(provider, context, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, endpoints, functions, crons, subscribers, enableOpenApi, skipBundle, stage) {
1234
+ const outputDir = join(process.cwd(), ".gkm", provider);
1235
+ await mkdir(outputDir, { recursive: true });
1236
+ logger$6.log(`\nGenerating handlers for provider: ${provider}`);
1237
+ const [routes, functionInfos, cronInfos, subscriberInfos] = await Promise.all([
1238
+ endpointGenerator.build(context, endpoints, outputDir, {
1239
+ provider,
1240
+ enableOpenApi
1241
+ }),
1242
+ functionGenerator.build(context, functions, outputDir, { provider }),
1243
+ cronGenerator.build(context, crons, outputDir, { provider }),
1244
+ subscriberGenerator.build(context, subscribers, outputDir, { provider })
1245
+ ]);
1246
+ logger$6.log(`Generated ${routes.length} routes, ${functionInfos.length} functions, ${cronInfos.length} crons, ${subscriberInfos.length} subscribers for ${provider}`);
1247
+ if (provider === "server") {
1248
+ const routeMetadata = await Promise.all(endpoints.map(async ({ construct }) => ({
1249
+ path: construct._path,
1250
+ method: construct.method,
1251
+ handler: "",
1252
+ authorizer: construct.authorizer?.name ?? "none"
1253
+ })));
1254
+ const appInfo = {
1255
+ handler: relative(process.cwd(), join(outputDir, "app.ts")),
1256
+ endpoints: relative(process.cwd(), join(outputDir, "endpoints.ts"))
1257
+ };
1258
+ await generateServerManifest(rootOutputDir, appInfo, routeMetadata, subscriberInfos);
1259
+ let masterKey;
1260
+ if (context.production?.bundle && !skipBundle) {
1261
+ logger$6.log(`\n📦 Bundling production server...`);
1262
+ const { bundleServer } = await import("./bundler-DRXCw_YR.mjs");
1263
+ const bundleResult = await bundleServer({
1264
+ entryPoint: join(outputDir, "server.ts"),
1265
+ outputDir: join(outputDir, "dist"),
1266
+ minify: context.production.minify,
1267
+ sourcemap: false,
1268
+ external: context.production.external,
1269
+ stage
1270
+ });
1271
+ masterKey = bundleResult.masterKey;
1272
+ logger$6.log(`✅ Bundle complete: .gkm/server/dist/server.mjs`);
1273
+ if (masterKey) {
1274
+ logger$6.log(`\n🔐 Secrets encrypted for deployment`);
1275
+ logger$6.log(` Deploy with: GKM_MASTER_KEY=${masterKey}`);
1276
+ }
1277
+ }
1278
+ return { masterKey };
1279
+ } else await generateAwsManifest(rootOutputDir, routes, functionInfos, cronInfos, subscriberInfos);
1280
+ return {};
1281
+ }
1282
+
1283
+ //#endregion
1284
+ //#region src/deploy/docker.ts
1285
+ const logger$5 = console;
1286
+ /**
1287
+ * Get the full image reference
1288
+ */
1289
+ function getImageRef(registry, imageName, tag) {
1290
+ if (registry) return `${registry}/${imageName}:${tag}`;
1291
+ return `${imageName}:${tag}`;
1292
+ }
1293
+ /**
1294
+ * Build Docker image
1295
+ */
1296
+ async function buildImage(imageRef) {
1297
+ logger$5.log(`\n🔨 Building Docker image: ${imageRef}`);
1298
+ try {
1299
+ execSync(`DOCKER_BUILDKIT=1 docker build -f .gkm/docker/Dockerfile -t ${imageRef} .`, {
1300
+ cwd: process.cwd(),
1301
+ stdio: "inherit",
1302
+ env: {
1303
+ ...process.env,
1304
+ DOCKER_BUILDKIT: "1"
1305
+ }
1306
+ });
1307
+ logger$5.log(`✅ Image built: ${imageRef}`);
1308
+ } catch (error) {
1309
+ throw new Error(`Failed to build Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
1310
+ }
1311
+ }
1312
+ /**
1313
+ * Push Docker image to registry
1314
+ */
1315
+ async function pushImage(imageRef) {
1316
+ logger$5.log(`\n☁️ Pushing image: ${imageRef}`);
1317
+ try {
1318
+ execSync(`docker push ${imageRef}`, {
1319
+ cwd: process.cwd(),
1320
+ stdio: "inherit"
1321
+ });
1322
+ logger$5.log(`✅ Image pushed: ${imageRef}`);
1323
+ } catch (error) {
1324
+ throw new Error(`Failed to push Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
1325
+ }
1326
+ }
1327
+ /**
1328
+ * Deploy using Docker (build and optionally push image)
1329
+ */
1330
+ async function deployDocker(options) {
1331
+ const { stage, tag, skipPush, masterKey, config: config$1 } = options;
1332
+ const imageName = config$1.imageName ?? "app";
1333
+ const imageRef = getImageRef(config$1.registry, imageName, tag);
1334
+ await buildImage(imageRef);
1335
+ if (!skipPush) if (!config$1.registry) logger$5.warn("\n⚠️ No registry configured. Use --skip-push or configure docker.registry in gkm.config.ts");
1336
+ else await pushImage(imageRef);
1337
+ logger$5.log("\n✅ Docker deployment ready!");
1338
+ logger$5.log(`\n📋 Deployment details:`);
1339
+ logger$5.log(` Image: ${imageRef}`);
1340
+ logger$5.log(` Stage: ${stage}`);
1341
+ if (masterKey) {
1342
+ logger$5.log(`\n🔐 Deploy with this environment variable:`);
1343
+ logger$5.log(` GKM_MASTER_KEY=${masterKey}`);
1344
+ logger$5.log("\n Example docker run:");
1345
+ logger$5.log(` docker run -e GKM_MASTER_KEY=${masterKey} ${imageRef}`);
1346
+ }
1347
+ return {
1348
+ imageRef,
1349
+ masterKey
1350
+ };
1351
+ }
1352
+ /**
1353
+ * Resolve Docker deploy config from gkm config
1354
+ */
1355
+ function resolveDockerConfig$1(config$1) {
1356
+ return {
1357
+ registry: config$1.docker?.registry,
1358
+ imageName: config$1.docker?.imageName
1359
+ };
1360
+ }
1361
+
1362
+ //#endregion
1363
+ //#region src/deploy/dokploy.ts
1364
+ const logger$4 = console;
1365
+ /**
1366
+ * Get the Dokploy API token from stored credentials or environment
1367
+ */
1368
+ async function getApiToken$1() {
1369
+ const token = await getDokployToken();
1370
+ if (!token) throw new Error("Dokploy credentials not found.\nRun \"gkm login --service dokploy\" to authenticate, or set DOKPLOY_API_TOKEN.");
1371
+ return token;
1372
+ }
1373
+ /**
1374
+ * Make a request to the Dokploy API
1375
+ */
1376
+ async function dokployRequest$1(endpoint, baseUrl, token, body) {
1377
+ const url = `${baseUrl}/api/${endpoint}`;
1378
+ const response = await fetch(url, {
1379
+ method: "POST",
1380
+ headers: {
1381
+ "Content-Type": "application/json",
1382
+ Authorization: `Bearer ${token}`
1383
+ },
1384
+ body: JSON.stringify(body)
1385
+ });
1386
+ if (!response.ok) {
1387
+ let errorMessage = `Dokploy API error: ${response.status} ${response.statusText}`;
1388
+ try {
1389
+ const errorBody = await response.json();
1390
+ if (errorBody.message) errorMessage = `Dokploy API error: ${errorBody.message}`;
1391
+ if (errorBody.issues?.length) errorMessage += `\n Issues: ${errorBody.issues.map((i) => i.message).join(", ")}`;
1392
+ } catch {}
1393
+ throw new Error(errorMessage);
1394
+ }
1395
+ return response.json();
1396
+ }
1397
+ /**
1398
+ * Update application environment variables
1399
+ */
1400
+ async function updateEnvironment(baseUrl, token, applicationId, envVars) {
1401
+ logger$4.log(" Updating environment variables...");
1402
+ const envString = Object.entries(envVars).map(([key, value]) => `${key}=${value}`).join("\n");
1403
+ await dokployRequest$1("application.update", baseUrl, token, {
1404
+ applicationId,
1405
+ env: envString
1406
+ });
1407
+ logger$4.log(" ✓ Environment variables updated");
1408
+ }
1409
+ /**
1410
+ * Trigger application deployment
1411
+ */
1412
+ async function triggerDeploy(baseUrl, token, applicationId) {
1413
+ logger$4.log(" Triggering deployment...");
1414
+ await dokployRequest$1("application.deploy", baseUrl, token, { applicationId });
1415
+ logger$4.log(" ✓ Deployment triggered");
1416
+ }
1417
+ /**
1418
+ * Deploy to Dokploy
1419
+ */
1420
+ async function deployDokploy(options) {
1421
+ const { stage, imageRef, masterKey, config: config$1 } = options;
1422
+ logger$4.log(`\n🎯 Deploying to Dokploy...`);
1423
+ logger$4.log(` Endpoint: ${config$1.endpoint}`);
1424
+ logger$4.log(` Application: ${config$1.applicationId}`);
1425
+ const token = await getApiToken$1();
1426
+ const envVars = {};
1427
+ if (masterKey) envVars.GKM_MASTER_KEY = masterKey;
1428
+ if (Object.keys(envVars).length > 0) await updateEnvironment(config$1.endpoint, token, config$1.applicationId, envVars);
1429
+ await triggerDeploy(config$1.endpoint, token, config$1.applicationId);
1430
+ logger$4.log("\n✅ Dokploy deployment initiated!");
1431
+ logger$4.log(`\n📋 Deployment details:`);
1432
+ logger$4.log(` Image: ${imageRef}`);
1433
+ logger$4.log(` Stage: ${stage}`);
1434
+ logger$4.log(` Application ID: ${config$1.applicationId}`);
1435
+ if (masterKey) logger$4.log(`\n🔐 GKM_MASTER_KEY has been set in Dokploy environment`);
1436
+ const deploymentUrl = `${config$1.endpoint}/project/${config$1.projectId}`;
1437
+ logger$4.log(`\n🔗 View deployment: ${deploymentUrl}`);
1438
+ return {
1439
+ imageRef,
1440
+ masterKey,
1441
+ url: deploymentUrl
1442
+ };
1443
+ }
1444
+ /**
1445
+ * Validate Dokploy configuration
1446
+ */
1447
+ function validateDokployConfig(config$1) {
1448
+ if (!config$1) return false;
1449
+ const required = [
1450
+ "endpoint",
1451
+ "projectId",
1452
+ "applicationId"
1453
+ ];
1454
+ const missing = required.filter((key) => !config$1[key]);
1455
+ if (missing.length > 0) throw new Error(`Missing Dokploy configuration: ${missing.join(", ")}\nConfigure in gkm.config.ts:
1456
+ providers: {
1457
+ dokploy: {
1458
+ endpoint: 'https://dokploy.example.com',
1459
+ projectId: 'proj_xxx',
1460
+ applicationId: 'app_xxx',
1461
+ },
1462
+ }`);
1463
+ return true;
1464
+ }
1465
+
1466
+ //#endregion
1467
+ //#region src/deploy/index.ts
1468
+ const logger$3 = console;
1469
+ /**
1470
+ * Generate image tag from stage and timestamp
1471
+ */
1472
+ function generateTag(stage) {
1473
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").slice(0, 19);
1474
+ return `${stage}-${timestamp}`;
1475
+ }
1476
+ /**
1477
+ * Main deploy command
1478
+ */
1479
+ async function deployCommand(options) {
1480
+ const { provider, stage, tag, skipPush, skipBuild } = options;
1481
+ logger$3.log(`\n🚀 Deploying to ${provider}...`);
1482
+ logger$3.log(` Stage: ${stage}`);
1483
+ const config$1 = await loadConfig();
1484
+ const imageTag = tag ?? generateTag(stage);
1485
+ logger$3.log(` Tag: ${imageTag}`);
1486
+ let masterKey;
1487
+ if (!skipBuild) {
1488
+ logger$3.log(`\n📦 Building for production...`);
1489
+ const buildResult = await buildCommand({
1490
+ provider: "server",
1491
+ production: true,
1492
+ stage
1493
+ });
1494
+ masterKey = buildResult.masterKey;
1495
+ } else logger$3.log(`\n⏭️ Skipping build (--skip-build)`);
1496
+ const dockerConfig = resolveDockerConfig$1(config$1);
1497
+ const imageName = dockerConfig.imageName ?? "app";
1498
+ const registry = dockerConfig.registry;
1499
+ const imageRef = registry ? `${registry}/${imageName}:${imageTag}` : `${imageName}:${imageTag}`;
1500
+ let result;
1501
+ switch (provider) {
1502
+ case "docker": {
1503
+ result = await deployDocker({
1504
+ stage,
1505
+ tag: imageTag,
1506
+ skipPush,
1507
+ masterKey,
1508
+ config: dockerConfig
1509
+ });
1510
+ break;
1511
+ }
1512
+ case "dokploy": {
1513
+ const dokployConfigRaw = config$1.providers?.dokploy;
1514
+ if (typeof dokployConfigRaw === "boolean" || !dokployConfigRaw) throw new Error("Dokploy provider requires configuration.\nConfigure in gkm.config.ts:\n providers: {\n dokploy: {\n endpoint: 'https://dokploy.example.com',\n projectId: 'proj_xxx',\n applicationId: 'app_xxx',\n },\n }");
1515
+ validateDokployConfig(dokployConfigRaw);
1516
+ const dokployConfig = dokployConfigRaw;
1517
+ await deployDocker({
1518
+ stage,
1519
+ tag: imageTag,
1520
+ skipPush: false,
1521
+ masterKey,
1522
+ config: {
1523
+ registry: dokployConfig.registry ?? dockerConfig.registry,
1524
+ imageName: dockerConfig.imageName
1525
+ }
1526
+ });
1527
+ result = await deployDokploy({
1528
+ stage,
1529
+ tag: imageTag,
1530
+ imageRef,
1531
+ masterKey,
1532
+ config: dokployConfig
1533
+ });
1534
+ break;
1535
+ }
1536
+ case "aws-lambda": {
1537
+ logger$3.log("\n⚠️ AWS Lambda deployment is not yet implemented.");
1538
+ logger$3.log(" Use SST or AWS CDK for Lambda deployments.");
1539
+ result = {
1540
+ imageRef,
1541
+ masterKey
1542
+ };
1543
+ break;
1544
+ }
1545
+ default: throw new Error(`Unknown deploy provider: ${provider}\nSupported providers: docker, dokploy, aws-lambda`);
1546
+ }
1547
+ logger$3.log("\n✅ Deployment complete!");
1548
+ return result;
1549
+ }
1550
+
1551
+ //#endregion
1552
+ //#region src/deploy/init.ts
1553
+ const logger$2 = console;
1554
+ /**
1555
+ * Get the Dokploy API token from stored credentials or environment
1556
+ */
1557
+ async function getApiToken() {
1558
+ const token = await getDokployToken();
1559
+ if (!token) throw new Error("Dokploy credentials not found.\nRun \"gkm login --service dokploy\" to authenticate, or set DOKPLOY_API_TOKEN.");
1560
+ return token;
1561
+ }
1562
+ /**
1563
+ * Make a request to the Dokploy API
1564
+ */
1565
+ async function dokployRequest(method, endpoint, baseUrl, token, body) {
1566
+ const url = `${baseUrl}/api/${endpoint}`;
1567
+ const response = await fetch(url, {
1568
+ method,
1569
+ headers: {
1570
+ "Content-Type": "application/json",
1571
+ Authorization: `Bearer ${token}`
1572
+ },
1573
+ body: body ? JSON.stringify(body) : void 0
1574
+ });
1575
+ if (!response.ok) {
1576
+ let errorMessage = `Dokploy API error: ${response.status} ${response.statusText}`;
1577
+ try {
1578
+ const errorBody = await response.json();
1579
+ if (errorBody.message) errorMessage = `Dokploy API error: ${errorBody.message}`;
1580
+ } catch {}
1581
+ throw new Error(errorMessage);
1582
+ }
1583
+ const text = await response.text();
1584
+ if (!text) return {};
1585
+ return JSON.parse(text);
1586
+ }
1587
+ /**
1588
+ * Get all projects from Dokploy
1589
+ */
1590
+ async function getProjects(baseUrl, token) {
1591
+ return dokployRequest("GET", "project.all", baseUrl, token);
1592
+ }
1593
+ /**
1594
+ * Create a new project in Dokploy
1595
+ */
1596
+ async function createProject(baseUrl, token, name$1, description$1) {
1597
+ return dokployRequest("POST", "project.create", baseUrl, token, {
1598
+ name: name$1,
1599
+ description: description$1 || `Created by gkm CLI`
1600
+ });
1601
+ }
1602
+ /**
1603
+ * Get project by ID to get environment info
1604
+ */
1605
+ async function getProject(baseUrl, token, projectId) {
1606
+ return dokployRequest("POST", "project.one", baseUrl, token, { projectId });
1607
+ }
1608
+ /**
1609
+ * Create a new application in Dokploy
1610
+ */
1611
+ async function createApplication(baseUrl, token, name$1, projectId) {
1612
+ const project = await getProject(baseUrl, token, projectId);
1613
+ let environmentId;
1614
+ const firstEnv = project.environments?.[0];
1615
+ if (firstEnv) environmentId = firstEnv.environmentId;
1616
+ else {
1617
+ const env = await dokployRequest("POST", "environment.create", baseUrl, token, {
1618
+ projectId,
1619
+ name: "production",
1620
+ description: "Production environment"
1621
+ });
1622
+ environmentId = env.environmentId;
1623
+ }
1624
+ return dokployRequest("POST", "application.create", baseUrl, token, {
1625
+ name: name$1,
1626
+ projectId,
1627
+ environmentId
1628
+ });
1629
+ }
1630
+ /**
1631
+ * Configure application for Docker registry deployment
1632
+ */
1633
+ async function configureApplicationRegistry(baseUrl, token, applicationId, registryId) {
1634
+ await dokployRequest("POST", "application.update", baseUrl, token, {
1635
+ applicationId,
1636
+ registryId
1637
+ });
1638
+ }
1639
+ /**
1640
+ * Get available registries
1641
+ */
1642
+ async function getRegistries(baseUrl, token) {
1643
+ return dokployRequest("GET", "registry.all", baseUrl, token);
1644
+ }
1645
+ /**
1646
+ * Update gkm.config.ts with Dokploy configuration
1647
+ */
1648
+ async function updateConfig(config$1, cwd = process.cwd()) {
1649
+ const configPath = join(cwd, "gkm.config.ts");
1650
+ if (!existsSync(configPath)) {
1651
+ logger$2.warn("\n gkm.config.ts not found. Add this configuration manually:\n");
1652
+ logger$2.log(` providers: {`);
1653
+ logger$2.log(` dokploy: {`);
1654
+ logger$2.log(` endpoint: '${config$1.endpoint}',`);
1655
+ logger$2.log(` projectId: '${config$1.projectId}',`);
1656
+ logger$2.log(` applicationId: '${config$1.applicationId}',`);
1657
+ logger$2.log(` },`);
1658
+ logger$2.log(` },`);
953
1659
  return;
954
1660
  }
955
- const rootOutputDir = join$1(process.cwd(), ".gkm");
956
- await mkdir(rootOutputDir, { recursive: true });
957
- for (const provider of resolved.providers) await buildForProvider(provider, buildContext, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, allEndpoints, allFunctions, allCrons, allSubscribers, resolved.enableOpenApi);
1661
+ const content = await readFile(configPath, "utf-8");
1662
+ if (content.includes("dokploy:") && content.includes("applicationId:")) {
1663
+ logger$2.log("\n Dokploy config already exists in gkm.config.ts");
1664
+ logger$2.log(" Updating with new values...");
1665
+ }
1666
+ let newContent;
1667
+ if (content.includes("providers:")) if (content.includes("dokploy:")) newContent = content.replace(/dokploy:\s*\{[^}]*\}/, `dokploy: {
1668
+ endpoint: '${config$1.endpoint}',
1669
+ projectId: '${config$1.projectId}',
1670
+ applicationId: '${config$1.applicationId}',
1671
+ }`);
1672
+ else newContent = content.replace(/providers:\s*\{/, `providers: {
1673
+ dokploy: {
1674
+ endpoint: '${config$1.endpoint}',
1675
+ projectId: '${config$1.projectId}',
1676
+ applicationId: '${config$1.applicationId}',
1677
+ },`);
1678
+ else newContent = content.replace(/}\s*\)\s*;?\s*$/, `
1679
+ providers: {
1680
+ dokploy: {
1681
+ endpoint: '${config$1.endpoint}',
1682
+ projectId: '${config$1.projectId}',
1683
+ applicationId: '${config$1.applicationId}',
1684
+ },
1685
+ },
1686
+ });`);
1687
+ await writeFile(configPath, newContent);
1688
+ logger$2.log("\n ✓ Updated gkm.config.ts with Dokploy configuration");
1689
+ }
1690
+ /**
1691
+ * Initialize Dokploy deployment configuration
1692
+ */
1693
+ async function deployInitCommand(options) {
1694
+ const { projectName, appName, projectId: existingProjectId, registryId } = options;
1695
+ let endpoint = options.endpoint;
1696
+ if (!endpoint) {
1697
+ const stored = await getDokployCredentials();
1698
+ if (stored) endpoint = stored.endpoint;
1699
+ else throw new Error("Dokploy endpoint not specified.\nEither run \"gkm login --service dokploy\" first, or provide --endpoint.");
1700
+ }
1701
+ logger$2.log(`\n🚀 Initializing Dokploy deployment...`);
1702
+ logger$2.log(` Endpoint: ${endpoint}`);
1703
+ const token = await getApiToken();
1704
+ let projectId;
1705
+ if (existingProjectId) {
1706
+ projectId = existingProjectId;
1707
+ logger$2.log(`\n📁 Using existing project: ${projectId}`);
1708
+ } else {
1709
+ logger$2.log(`\n📁 Looking for project: ${projectName}`);
1710
+ const projects = await getProjects(endpoint, token);
1711
+ const existingProject = projects.find((p) => p.name.toLowerCase() === projectName.toLowerCase());
1712
+ if (existingProject) {
1713
+ projectId = existingProject.projectId;
1714
+ logger$2.log(` Found existing project: ${projectId}`);
1715
+ } else {
1716
+ logger$2.log(` Creating new project...`);
1717
+ const project = await createProject(endpoint, token, projectName);
1718
+ projectId = project.projectId;
1719
+ logger$2.log(` ✓ Created project: ${projectId}`);
1720
+ }
1721
+ }
1722
+ logger$2.log(`\n📦 Creating application: ${appName}`);
1723
+ const application = await createApplication(endpoint, token, appName, projectId);
1724
+ logger$2.log(` ✓ Created application: ${application.applicationId}`);
1725
+ if (registryId) {
1726
+ logger$2.log(`\n🔧 Configuring registry: ${registryId}`);
1727
+ await configureApplicationRegistry(endpoint, token, application.applicationId, registryId);
1728
+ logger$2.log(` ✓ Registry configured`);
1729
+ } else try {
1730
+ const registries = await getRegistries(endpoint, token);
1731
+ if (registries.length > 0) {
1732
+ logger$2.log(`\n📋 Available registries:`);
1733
+ for (const reg of registries) logger$2.log(` - ${reg.registryName}: ${reg.registryUrl} (${reg.registryId})`);
1734
+ logger$2.log(`\n To use a registry, run with --registry-id <id>`);
1735
+ }
1736
+ } catch {}
1737
+ const config$1 = {
1738
+ endpoint,
1739
+ projectId,
1740
+ applicationId: application.applicationId
1741
+ };
1742
+ await updateConfig(config$1);
1743
+ logger$2.log(`\n✅ Dokploy deployment initialized!`);
1744
+ logger$2.log(`\n📋 Configuration:`);
1745
+ logger$2.log(` Project ID: ${projectId}`);
1746
+ logger$2.log(` Application ID: ${application.applicationId}`);
1747
+ logger$2.log(`\n🔗 View in Dokploy: ${endpoint}/project/${projectId}`);
1748
+ logger$2.log(`\n📝 Next steps:`);
1749
+ logger$2.log(` 1. Initialize secrets: gkm secrets:init --stage production`);
1750
+ logger$2.log(` 2. Deploy: gkm deploy --provider dokploy --stage production`);
1751
+ return config$1;
1752
+ }
1753
+ /**
1754
+ * List available Dokploy resources
1755
+ */
1756
+ async function deployListCommand(options) {
1757
+ let endpoint = options.endpoint;
1758
+ if (!endpoint) {
1759
+ const stored = await getDokployCredentials();
1760
+ if (stored) endpoint = stored.endpoint;
1761
+ else throw new Error("Dokploy endpoint not specified.\nEither run \"gkm login --service dokploy\" first, or provide --endpoint.");
1762
+ }
1763
+ const { resource } = options;
1764
+ const token = await getApiToken();
1765
+ if (resource === "projects") {
1766
+ logger$2.log(`\n📁 Projects in ${endpoint}:`);
1767
+ const projects = await getProjects(endpoint, token);
1768
+ if (projects.length === 0) {
1769
+ logger$2.log(" No projects found");
1770
+ return;
1771
+ }
1772
+ for (const project of projects) {
1773
+ logger$2.log(`\n ${project.name} (${project.projectId})`);
1774
+ if (project.description) logger$2.log(` ${project.description}`);
1775
+ }
1776
+ } else if (resource === "registries") {
1777
+ logger$2.log(`\n🐳 Registries in ${endpoint}:`);
1778
+ const registries = await getRegistries(endpoint, token);
1779
+ if (registries.length === 0) {
1780
+ logger$2.log(" No registries configured");
1781
+ logger$2.log(" Add a registry in Dokploy: Settings > Docker Registry");
1782
+ return;
1783
+ }
1784
+ for (const registry of registries) {
1785
+ logger$2.log(`\n ${registry.registryName} (${registry.registryId})`);
1786
+ logger$2.log(` URL: ${registry.registryUrl}`);
1787
+ logger$2.log(` Username: ${registry.username}`);
1788
+ if (registry.imagePrefix) logger$2.log(` Prefix: ${registry.imagePrefix}`);
1789
+ }
1790
+ }
1791
+ }
1792
+
1793
+ //#endregion
1794
+ //#region src/docker/compose.ts
1795
+ /** Default Docker images for services */
1796
+ const DEFAULT_SERVICE_IMAGES = {
1797
+ postgres: "postgres",
1798
+ redis: "redis",
1799
+ rabbitmq: "rabbitmq"
1800
+ };
1801
+ /** Default Docker image versions for services */
1802
+ const DEFAULT_SERVICE_VERSIONS = {
1803
+ postgres: "16-alpine",
1804
+ redis: "7-alpine",
1805
+ rabbitmq: "3-management-alpine"
1806
+ };
1807
+ /** Get the default full image reference for a service */
1808
+ function getDefaultImage(serviceName) {
1809
+ return `${DEFAULT_SERVICE_IMAGES[serviceName]}:${DEFAULT_SERVICE_VERSIONS[serviceName]}`;
1810
+ }
1811
+ /** Normalize services config to a consistent format - returns Map of service name to full image reference */
1812
+ function normalizeServices(services) {
1813
+ const result = /* @__PURE__ */ new Map();
1814
+ if (Array.isArray(services)) for (const name$1 of services) result.set(name$1, getDefaultImage(name$1));
1815
+ else for (const [name$1, config$1] of Object.entries(services)) {
1816
+ const serviceName = name$1;
1817
+ if (config$1 === true) result.set(serviceName, getDefaultImage(serviceName));
1818
+ else if (config$1 && typeof config$1 === "object") {
1819
+ const serviceConfig = config$1;
1820
+ if (serviceConfig.image) result.set(serviceName, serviceConfig.image);
1821
+ else {
1822
+ const version$1 = serviceConfig.version ?? DEFAULT_SERVICE_VERSIONS[serviceName];
1823
+ result.set(serviceName, `${DEFAULT_SERVICE_IMAGES[serviceName]}:${version$1}`);
1824
+ }
1825
+ }
1826
+ }
1827
+ return result;
1828
+ }
1829
+ /**
1830
+ * Generate docker-compose.yml for production deployment
1831
+ */
1832
+ function generateDockerCompose(options) {
1833
+ const { imageName, registry, port, healthCheckPath, services } = options;
1834
+ const serviceMap = normalizeServices(services);
1835
+ const imageRef = registry ? `\${REGISTRY:-${registry}}/` : "";
1836
+ let yaml = `version: '3.8'
1837
+
1838
+ services:
1839
+ api:
1840
+ build:
1841
+ context: ../..
1842
+ dockerfile: .gkm/docker/Dockerfile
1843
+ image: ${imageRef}\${IMAGE_NAME:-${imageName}}:\${TAG:-latest}
1844
+ container_name: ${imageName}
1845
+ restart: unless-stopped
1846
+ ports:
1847
+ - "\${PORT:-${port}}:${port}"
1848
+ environment:
1849
+ - NODE_ENV=production
1850
+ `;
1851
+ if (serviceMap.has("postgres")) yaml += ` - DATABASE_URL=\${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/app}
1852
+ `;
1853
+ if (serviceMap.has("redis")) yaml += ` - REDIS_URL=\${REDIS_URL:-redis://redis:6379}
1854
+ `;
1855
+ if (serviceMap.has("rabbitmq")) yaml += ` - RABBITMQ_URL=\${RABBITMQ_URL:-amqp://rabbitmq:5672}
1856
+ `;
1857
+ yaml += ` healthcheck:
1858
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:${port}${healthCheckPath}"]
1859
+ interval: 30s
1860
+ timeout: 3s
1861
+ retries: 3
1862
+ `;
1863
+ if (serviceMap.size > 0) {
1864
+ yaml += ` depends_on:
1865
+ `;
1866
+ for (const serviceName of serviceMap.keys()) yaml += ` ${serviceName}:
1867
+ condition: service_healthy
1868
+ `;
1869
+ }
1870
+ yaml += ` networks:
1871
+ - app-network
1872
+ `;
1873
+ const postgresImage = serviceMap.get("postgres");
1874
+ if (postgresImage) yaml += `
1875
+ postgres:
1876
+ image: ${postgresImage}
1877
+ container_name: postgres
1878
+ restart: unless-stopped
1879
+ environment:
1880
+ POSTGRES_USER: \${POSTGRES_USER:-postgres}
1881
+ POSTGRES_PASSWORD: \${POSTGRES_PASSWORD:-postgres}
1882
+ POSTGRES_DB: \${POSTGRES_DB:-app}
1883
+ volumes:
1884
+ - postgres_data:/var/lib/postgresql/data
1885
+ healthcheck:
1886
+ test: ["CMD-SHELL", "pg_isready -U postgres"]
1887
+ interval: 5s
1888
+ timeout: 5s
1889
+ retries: 5
1890
+ networks:
1891
+ - app-network
1892
+ `;
1893
+ const redisImage = serviceMap.get("redis");
1894
+ if (redisImage) yaml += `
1895
+ redis:
1896
+ image: ${redisImage}
1897
+ container_name: redis
1898
+ restart: unless-stopped
1899
+ volumes:
1900
+ - redis_data:/data
1901
+ healthcheck:
1902
+ test: ["CMD", "redis-cli", "ping"]
1903
+ interval: 5s
1904
+ timeout: 5s
1905
+ retries: 5
1906
+ networks:
1907
+ - app-network
1908
+ `;
1909
+ const rabbitmqImage = serviceMap.get("rabbitmq");
1910
+ if (rabbitmqImage) yaml += `
1911
+ rabbitmq:
1912
+ image: ${rabbitmqImage}
1913
+ container_name: rabbitmq
1914
+ restart: unless-stopped
1915
+ environment:
1916
+ RABBITMQ_DEFAULT_USER: \${RABBITMQ_USER:-guest}
1917
+ RABBITMQ_DEFAULT_PASS: \${RABBITMQ_PASSWORD:-guest}
1918
+ ports:
1919
+ - "15672:15672" # Management UI
1920
+ volumes:
1921
+ - rabbitmq_data:/var/lib/rabbitmq
1922
+ healthcheck:
1923
+ test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"]
1924
+ interval: 10s
1925
+ timeout: 5s
1926
+ retries: 5
1927
+ networks:
1928
+ - app-network
1929
+ `;
1930
+ yaml += `
1931
+ volumes:
1932
+ `;
1933
+ if (serviceMap.has("postgres")) yaml += ` postgres_data:
1934
+ `;
1935
+ if (serviceMap.has("redis")) yaml += ` redis_data:
1936
+ `;
1937
+ if (serviceMap.has("rabbitmq")) yaml += ` rabbitmq_data:
1938
+ `;
1939
+ yaml += `
1940
+ networks:
1941
+ app-network:
1942
+ driver: bridge
1943
+ `;
1944
+ return yaml;
1945
+ }
1946
+ /**
1947
+ * Generate a minimal docker-compose.yml for API only
1948
+ */
1949
+ function generateMinimalDockerCompose(options) {
1950
+ const { imageName, registry, port, healthCheckPath } = options;
1951
+ const imageRef = registry ? `\${REGISTRY:-${registry}}/` : "";
1952
+ return `version: '3.8'
1953
+
1954
+ services:
1955
+ api:
1956
+ build:
1957
+ context: ../..
1958
+ dockerfile: .gkm/docker/Dockerfile
1959
+ image: ${imageRef}\${IMAGE_NAME:-${imageName}}:\${TAG:-latest}
1960
+ container_name: ${imageName}
1961
+ restart: unless-stopped
1962
+ ports:
1963
+ - "\${PORT:-${port}}:${port}"
1964
+ environment:
1965
+ - NODE_ENV=production
1966
+ healthcheck:
1967
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:${port}${healthCheckPath}"]
1968
+ interval: 30s
1969
+ timeout: 3s
1970
+ retries: 3
1971
+ networks:
1972
+ - app-network
1973
+
1974
+ networks:
1975
+ app-network:
1976
+ driver: bridge
1977
+ `;
1978
+ }
1979
+
1980
+ //#endregion
1981
+ //#region src/docker/templates.ts
1982
+ /**
1983
+ * Detect package manager from lockfiles
1984
+ * Walks up the directory tree to find lockfile (for monorepos)
1985
+ */
1986
+ function detectPackageManager$1(cwd = process.cwd()) {
1987
+ const lockfiles = [
1988
+ ["pnpm-lock.yaml", "pnpm"],
1989
+ ["bun.lockb", "bun"],
1990
+ ["yarn.lock", "yarn"],
1991
+ ["package-lock.json", "npm"]
1992
+ ];
1993
+ let dir = cwd;
1994
+ const root = parse(dir).root;
1995
+ while (dir !== root) {
1996
+ for (const [lockfile, pm] of lockfiles) if (existsSync(join(dir, lockfile))) return pm;
1997
+ dir = dirname(dir);
1998
+ }
1999
+ for (const [lockfile, pm] of lockfiles) if (existsSync(join(root, lockfile))) return pm;
2000
+ return "pnpm";
2001
+ }
2002
+ /**
2003
+ * Get package manager specific commands and paths
2004
+ */
2005
+ function getPmConfig(pm) {
2006
+ const configs = {
2007
+ pnpm: {
2008
+ install: "corepack enable && corepack prepare pnpm@latest --activate",
2009
+ lockfile: "pnpm-lock.yaml",
2010
+ fetch: "pnpm fetch",
2011
+ installCmd: "pnpm install --frozen-lockfile --offline",
2012
+ cacheTarget: "/root/.local/share/pnpm/store",
2013
+ cacheId: "pnpm",
2014
+ run: "pnpm",
2015
+ addGlobal: "pnpm add -g"
2016
+ },
2017
+ npm: {
2018
+ install: "",
2019
+ lockfile: "package-lock.json",
2020
+ fetch: "",
2021
+ installCmd: "npm ci",
2022
+ cacheTarget: "/root/.npm",
2023
+ cacheId: "npm",
2024
+ run: "npm run",
2025
+ addGlobal: "npm install -g"
2026
+ },
2027
+ yarn: {
2028
+ install: "corepack enable && corepack prepare yarn@stable --activate",
2029
+ lockfile: "yarn.lock",
2030
+ fetch: "",
2031
+ installCmd: "yarn install --frozen-lockfile",
2032
+ cacheTarget: "/root/.yarn/cache",
2033
+ cacheId: "yarn",
2034
+ run: "yarn",
2035
+ addGlobal: "yarn global add"
2036
+ },
2037
+ bun: {
2038
+ install: "npm install -g bun",
2039
+ lockfile: "bun.lockb",
2040
+ fetch: "",
2041
+ installCmd: "bun install --frozen-lockfile",
2042
+ cacheTarget: "/root/.bun/install/cache",
2043
+ cacheId: "bun",
2044
+ run: "bun run",
2045
+ addGlobal: "bun add -g"
2046
+ }
2047
+ };
2048
+ return configs[pm];
2049
+ }
2050
+ /**
2051
+ * Generate a multi-stage Dockerfile for building from source
2052
+ * Optimized for build speed with:
2053
+ * - BuildKit cache mounts for package manager store
2054
+ * - pnpm fetch for better layer caching (when using pnpm)
2055
+ * - Optional turbo prune for monorepos
2056
+ */
2057
+ function generateMultiStageDockerfile(options) {
2058
+ const { baseImage, port, healthCheckPath, turbo, turboPackage, packageManager } = options;
2059
+ if (turbo) return generateTurboDockerfile({
2060
+ ...options,
2061
+ turboPackage: turboPackage ?? "api"
2062
+ });
2063
+ const pm = getPmConfig(packageManager);
2064
+ const installPm = pm.install ? `\n# Install ${packageManager}\nRUN ${pm.install}\n` : "";
2065
+ const hasFetch = packageManager === "pnpm";
2066
+ const depsStage = hasFetch ? `# Copy lockfile first for better caching
2067
+ COPY ${pm.lockfile} ./
2068
+
2069
+ # Fetch dependencies (downloads to virtual store, cached separately)
2070
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2071
+ ${pm.fetch}
2072
+
2073
+ # Copy package.json after fetch
2074
+ COPY package.json ./
2075
+
2076
+ # Install from cache (fast - no network needed)
2077
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2078
+ ${pm.installCmd}` : `# Copy package files
2079
+ COPY package.json ${pm.lockfile} ./
2080
+
2081
+ # Install dependencies with cache
2082
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2083
+ ${pm.installCmd}`;
2084
+ return `# syntax=docker/dockerfile:1
2085
+ # Stage 1: Dependencies
2086
+ FROM ${baseImage} AS deps
2087
+
2088
+ WORKDIR /app
2089
+ ${installPm}
2090
+ ${depsStage}
2091
+
2092
+ # Stage 2: Build
2093
+ FROM deps AS builder
2094
+
2095
+ WORKDIR /app
2096
+
2097
+ # Copy source (deps already installed)
2098
+ COPY . .
2099
+
2100
+ # Build production server
2101
+ RUN ${pm.run} gkm build --provider server --production
2102
+
2103
+ # Stage 3: Production
2104
+ FROM ${baseImage} AS runner
2105
+
2106
+ WORKDIR /app
2107
+
2108
+ # Install tini for proper signal handling as PID 1
2109
+ RUN apk add --no-cache tini
2110
+
2111
+ # Create non-root user
2112
+ RUN addgroup --system --gid 1001 nodejs && \\
2113
+ adduser --system --uid 1001 hono
2114
+
2115
+ # Copy bundled server
2116
+ COPY --from=builder --chown=hono:nodejs /app/.gkm/server/dist/server.mjs ./
2117
+
2118
+ # Environment
2119
+ ENV NODE_ENV=production
2120
+ ENV PORT=${port}
2121
+
2122
+ # Health check
2123
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2124
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2125
+
2126
+ # Switch to non-root user
2127
+ USER hono
2128
+
2129
+ EXPOSE ${port}
2130
+
2131
+ # Use tini as entrypoint to handle PID 1 responsibilities
2132
+ ENTRYPOINT ["/sbin/tini", "--"]
2133
+ CMD ["node", "server.mjs"]
2134
+ `;
2135
+ }
2136
+ /**
2137
+ * Generate a Dockerfile optimized for Turbo monorepos
2138
+ * Uses turbo prune to create minimal Docker context
2139
+ */
2140
+ function generateTurboDockerfile(options) {
2141
+ const { baseImage, port, healthCheckPath, turboPackage, packageManager } = options;
2142
+ const pm = getPmConfig(packageManager);
2143
+ const installPm = pm.install ? `RUN ${pm.install}` : "";
2144
+ const hasFetch = packageManager === "pnpm";
2145
+ const depsInstall = hasFetch ? `# Fetch and install from cache
2146
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2147
+ ${pm.fetch}
2148
+
2149
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2150
+ ${pm.installCmd}` : `# Install dependencies with cache
2151
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2152
+ ${pm.installCmd}`;
2153
+ return `# syntax=docker/dockerfile:1
2154
+ # Stage 1: Prune monorepo
2155
+ FROM ${baseImage} AS pruner
2156
+
2157
+ WORKDIR /app
2158
+
2159
+ ${installPm}
2160
+ RUN ${pm.addGlobal} turbo
2161
+
2162
+ COPY . .
2163
+
2164
+ # Prune to only include necessary packages
2165
+ RUN turbo prune ${turboPackage} --docker
2166
+
2167
+ # Stage 2: Install dependencies
2168
+ FROM ${baseImage} AS deps
2169
+
2170
+ WORKDIR /app
2171
+
2172
+ ${installPm}
2173
+
2174
+ # Copy pruned lockfile and package.jsons
2175
+ COPY --from=pruner /app/out/${pm.lockfile} ./
2176
+ COPY --from=pruner /app/out/json/ ./
2177
+
2178
+ ${depsInstall}
2179
+
2180
+ # Stage 3: Build
2181
+ FROM deps AS builder
2182
+
2183
+ WORKDIR /app
2184
+
2185
+ # Copy pruned source
2186
+ COPY --from=pruner /app/out/full/ ./
2187
+
2188
+ # Build production server
2189
+ RUN ${pm.run} gkm build --provider server --production
2190
+
2191
+ # Stage 4: Production
2192
+ FROM ${baseImage} AS runner
2193
+
2194
+ WORKDIR /app
2195
+
2196
+ RUN apk add --no-cache tini
2197
+
2198
+ RUN addgroup --system --gid 1001 nodejs && \\
2199
+ adduser --system --uid 1001 hono
2200
+
2201
+ COPY --from=builder --chown=hono:nodejs /app/.gkm/server/dist/server.mjs ./
2202
+
2203
+ ENV NODE_ENV=production
2204
+ ENV PORT=${port}
2205
+
2206
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2207
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2208
+
2209
+ USER hono
2210
+
2211
+ EXPOSE ${port}
2212
+
2213
+ ENTRYPOINT ["/sbin/tini", "--"]
2214
+ CMD ["node", "server.mjs"]
2215
+ `;
2216
+ }
2217
+ /**
2218
+ * Generate a slim Dockerfile for pre-built bundles
2219
+ */
2220
+ function generateSlimDockerfile(options) {
2221
+ const { baseImage, port, healthCheckPath } = options;
2222
+ return `# Slim Dockerfile for pre-built production bundle
2223
+ FROM ${baseImage}
2224
+
2225
+ WORKDIR /app
2226
+
2227
+ # Install tini for proper signal handling as PID 1
2228
+ # Handles SIGTERM propagation and zombie process reaping
2229
+ RUN apk add --no-cache tini
2230
+
2231
+ # Create non-root user
2232
+ RUN addgroup --system --gid 1001 nodejs && \\
2233
+ adduser --system --uid 1001 hono
2234
+
2235
+ # Copy pre-built bundle
2236
+ COPY .gkm/server/dist/server.mjs ./
2237
+
2238
+ # Environment
2239
+ ENV NODE_ENV=production
2240
+ ENV PORT=${port}
2241
+
2242
+ # Health check
2243
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2244
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2245
+
2246
+ # Switch to non-root user
2247
+ USER hono
2248
+
2249
+ EXPOSE ${port}
2250
+
2251
+ # Use tini as entrypoint to handle PID 1 responsibilities
2252
+ ENTRYPOINT ["/sbin/tini", "--"]
2253
+ CMD ["node", "server.mjs"]
2254
+ `;
2255
+ }
2256
+ /**
2257
+ * Generate .dockerignore file
2258
+ */
2259
+ function generateDockerignore() {
2260
+ return `# Dependencies
2261
+ node_modules
2262
+ .pnpm-store
2263
+
2264
+ # Build output (except what we need)
2265
+ .gkm/aws*
2266
+ .gkm/server/*.ts
2267
+ !.gkm/server/dist
2268
+
2269
+ # IDE and editor
2270
+ .idea
2271
+ .vscode
2272
+ *.swp
2273
+ *.swo
2274
+
2275
+ # Git
2276
+ .git
2277
+ .gitignore
2278
+
2279
+ # Logs
2280
+ *.log
2281
+ npm-debug.log*
2282
+ pnpm-debug.log*
2283
+
2284
+ # Test files
2285
+ **/*.test.ts
2286
+ **/*.spec.ts
2287
+ **/__tests__
2288
+ coverage
2289
+
2290
+ # Documentation
2291
+ docs
2292
+ *.md
2293
+ !README.md
2294
+
2295
+ # Environment files (handle secrets separately)
2296
+ .env
2297
+ .env.*
2298
+ !.env.example
2299
+
2300
+ # Docker files (don't copy recursively)
2301
+ Dockerfile*
2302
+ docker-compose*
2303
+ .dockerignore
2304
+ `;
2305
+ }
2306
+ /**
2307
+ * Generate docker-entrypoint.sh for custom startup logic
2308
+ */
2309
+ function generateDockerEntrypoint() {
2310
+ return `#!/bin/sh
2311
+ set -e
2312
+
2313
+ # Run any custom startup scripts here
2314
+ # Example: wait for database
2315
+ # until nc -z $DB_HOST $DB_PORT; do
2316
+ # echo "Waiting for database..."
2317
+ # sleep 1
2318
+ # done
2319
+
2320
+ # Execute the main command
2321
+ exec "$@"
2322
+ `;
2323
+ }
2324
+ /**
2325
+ * Resolve Docker configuration from GkmConfig with defaults
2326
+ */
2327
+ function resolveDockerConfig(config$1) {
2328
+ const docker = config$1.docker ?? {};
2329
+ let defaultImageName = "api";
2330
+ try {
2331
+ const pkg = __require(`${process.cwd()}/package.json`);
2332
+ if (pkg.name) defaultImageName = pkg.name.replace(/^@[^/]+\//, "");
2333
+ } catch {}
2334
+ return {
2335
+ registry: docker.registry ?? "",
2336
+ imageName: docker.imageName ?? defaultImageName,
2337
+ baseImage: docker.baseImage ?? "node:22-alpine",
2338
+ port: docker.port ?? 3e3,
2339
+ compose: docker.compose
2340
+ };
2341
+ }
2342
+
2343
+ //#endregion
2344
+ //#region src/docker/index.ts
2345
+ const logger$1 = console;
2346
+ /**
2347
+ * Docker command implementation
2348
+ * Generates Dockerfile, docker-compose.yml, and related files
2349
+ *
2350
+ * Default: Multi-stage Dockerfile that builds from source inside Docker
2351
+ * --slim: Slim Dockerfile that copies pre-built bundle (requires prior build)
2352
+ */
2353
+ async function dockerCommand(options) {
2354
+ const config$1 = await loadConfig();
2355
+ const dockerConfig = resolveDockerConfig(config$1);
2356
+ const serverConfig = typeof config$1.providers?.server === "object" ? config$1.providers.server : void 0;
2357
+ const healthCheckPath = serverConfig?.production?.healthCheck ?? "/health";
2358
+ const useSlim = options.slim === true;
2359
+ if (useSlim) {
2360
+ const distDir = join(process.cwd(), ".gkm", "server", "dist");
2361
+ const hasBuild = existsSync(join(distDir, "server.mjs"));
2362
+ if (!hasBuild) throw new Error("Slim Dockerfile requires a pre-built bundle. Run `gkm build --provider server --production` first, or omit --slim to use multi-stage build.");
2363
+ }
2364
+ const dockerDir = join(process.cwd(), ".gkm", "docker");
2365
+ await mkdir(dockerDir, { recursive: true });
2366
+ const packageManager = detectPackageManager$1();
2367
+ const templateOptions = {
2368
+ imageName: dockerConfig.imageName,
2369
+ baseImage: dockerConfig.baseImage,
2370
+ port: dockerConfig.port,
2371
+ healthCheckPath,
2372
+ prebuilt: useSlim,
2373
+ turbo: options.turbo,
2374
+ turboPackage: options.turboPackage ?? dockerConfig.imageName,
2375
+ packageManager
2376
+ };
2377
+ const dockerfile = useSlim ? generateSlimDockerfile(templateOptions) : generateMultiStageDockerfile(templateOptions);
2378
+ const dockerMode = useSlim ? "slim" : options.turbo ? "turbo" : "multi-stage";
2379
+ const dockerfilePath = join(dockerDir, "Dockerfile");
2380
+ await writeFile(dockerfilePath, dockerfile);
2381
+ logger$1.log(`Generated: .gkm/docker/Dockerfile (${dockerMode}, ${packageManager})`);
2382
+ const composeOptions = {
2383
+ imageName: dockerConfig.imageName,
2384
+ registry: options.registry ?? dockerConfig.registry,
2385
+ port: dockerConfig.port,
2386
+ healthCheckPath,
2387
+ services: dockerConfig.compose?.services ?? {}
2388
+ };
2389
+ const hasServices = Array.isArray(composeOptions.services) ? composeOptions.services.length > 0 : Object.keys(composeOptions.services).length > 0;
2390
+ const dockerCompose = hasServices ? generateDockerCompose(composeOptions) : generateMinimalDockerCompose(composeOptions);
2391
+ const composePath = join(dockerDir, "docker-compose.yml");
2392
+ await writeFile(composePath, dockerCompose);
2393
+ logger$1.log("Generated: .gkm/docker/docker-compose.yml");
2394
+ const dockerignore = generateDockerignore();
2395
+ const dockerignorePath = join(process.cwd(), ".dockerignore");
2396
+ await writeFile(dockerignorePath, dockerignore);
2397
+ logger$1.log("Generated: .dockerignore (project root)");
2398
+ const entrypoint = generateDockerEntrypoint();
2399
+ const entrypointPath = join(dockerDir, "docker-entrypoint.sh");
2400
+ await writeFile(entrypointPath, entrypoint);
2401
+ logger$1.log("Generated: .gkm/docker/docker-entrypoint.sh");
2402
+ const result = {
2403
+ dockerfile: dockerfilePath,
2404
+ dockerCompose: composePath,
2405
+ dockerignore: dockerignorePath,
2406
+ entrypoint: entrypointPath
2407
+ };
2408
+ if (options.build) await buildDockerImage(dockerConfig.imageName, options);
2409
+ if (options.push) await pushDockerImage(dockerConfig.imageName, options);
2410
+ return result;
2411
+ }
2412
+ /**
2413
+ * Build Docker image
2414
+ * Uses BuildKit for cache mount support
2415
+ */
2416
+ async function buildDockerImage(imageName, options) {
2417
+ const tag = options.tag ?? "latest";
2418
+ const registry = options.registry;
2419
+ const fullImageName = registry ? `${registry}/${imageName}:${tag}` : `${imageName}:${tag}`;
2420
+ logger$1.log(`\n🐳 Building Docker image: ${fullImageName}`);
2421
+ try {
2422
+ execSync(`DOCKER_BUILDKIT=1 docker build -f .gkm/docker/Dockerfile -t ${fullImageName} .`, {
2423
+ cwd: process.cwd(),
2424
+ stdio: "inherit",
2425
+ env: {
2426
+ ...process.env,
2427
+ DOCKER_BUILDKIT: "1"
2428
+ }
2429
+ });
2430
+ logger$1.log(`✅ Docker image built: ${fullImageName}`);
2431
+ } catch (error) {
2432
+ throw new Error(`Failed to build Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
2433
+ }
958
2434
  }
959
- async function buildForProvider(provider, context, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, endpoints, functions, crons, subscribers, enableOpenApi) {
960
- const outputDir = join$1(process.cwd(), ".gkm", provider);
961
- await mkdir(outputDir, { recursive: true });
962
- logger.log(`\nGenerating handlers for provider: ${provider}`);
963
- const [routes, functionInfos, cronInfos, subscriberInfos] = await Promise.all([
964
- endpointGenerator.build(context, endpoints, outputDir, {
965
- provider,
966
- enableOpenApi
967
- }),
968
- functionGenerator.build(context, functions, outputDir, { provider }),
969
- cronGenerator.build(context, crons, outputDir, { provider }),
970
- subscriberGenerator.build(context, subscribers, outputDir, { provider })
971
- ]);
972
- logger.log(`Generated ${routes.length} routes, ${functionInfos.length} functions, ${cronInfos.length} crons, ${subscriberInfos.length} subscribers for ${provider}`);
973
- if (provider === "server") {
974
- const routeMetadata = await Promise.all(endpoints.map(async ({ construct }) => ({
975
- path: construct._path,
976
- method: construct.method,
977
- handler: "",
978
- authorizer: construct.authorizer?.name ?? "none"
979
- })));
980
- const appInfo = {
981
- handler: relative$1(process.cwd(), join$1(outputDir, "app.ts")),
982
- endpoints: relative$1(process.cwd(), join$1(outputDir, "endpoints.ts"))
983
- };
984
- await generateServerManifest(rootOutputDir, appInfo, routeMetadata, subscriberInfos);
985
- } else await generateAwsManifest(rootOutputDir, routes, functionInfos, cronInfos, subscriberInfos);
2435
+ /**
2436
+ * Push Docker image to registry
2437
+ */
2438
+ async function pushDockerImage(imageName, options) {
2439
+ const tag = options.tag ?? "latest";
2440
+ const registry = options.registry;
2441
+ if (!registry) throw new Error("Registry is required to push Docker image. Use --registry or configure docker.registry in gkm.config.ts");
2442
+ const fullImageName = `${registry}/${imageName}:${tag}`;
2443
+ logger$1.log(`\n🚀 Pushing Docker image: ${fullImageName}`);
2444
+ try {
2445
+ execSync(`docker push ${fullImageName}`, {
2446
+ cwd: process.cwd(),
2447
+ stdio: "inherit"
2448
+ });
2449
+ logger$1.log(`✅ Docker image pushed: ${fullImageName}`);
2450
+ } catch (error) {
2451
+ throw new Error(`Failed to push Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
2452
+ }
986
2453
  }
987
2454
 
988
2455
  //#endregion
@@ -1060,7 +2527,7 @@ export default defineConfig({
1060
2527
  content: gkmConfig
1061
2528
  }, {
1062
2529
  path: "tsconfig.json",
1063
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2530
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1064
2531
  }];
1065
2532
  const biomeConfig = {
1066
2533
  $schema: "https://biomejs.dev/schemas/1.9.4/schema.json",
@@ -1134,15 +2601,15 @@ export default defineConfig({
1134
2601
  },
1135
2602
  {
1136
2603
  path: "tsconfig.json",
1137
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2604
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1138
2605
  },
1139
2606
  {
1140
2607
  path: "biome.json",
1141
- content: JSON.stringify(biomeConfig, null, 2) + "\n"
2608
+ content: `${JSON.stringify(biomeConfig, null, 2)}\n`
1142
2609
  },
1143
2610
  {
1144
2611
  path: "turbo.json",
1145
- content: JSON.stringify(turboConfig, null, 2) + "\n"
2612
+ content: `${JSON.stringify(turboConfig, null, 2)}\n`
1146
2613
  }
1147
2614
  ];
1148
2615
  }
@@ -1494,11 +2961,11 @@ export type UpdateUser = z.infer<typeof updateUserSchema>;
1494
2961
  return [
1495
2962
  {
1496
2963
  path: "packages/models/package.json",
1497
- content: JSON.stringify(packageJson, null, 2) + "\n"
2964
+ content: `${JSON.stringify(packageJson, null, 2)}\n`
1498
2965
  },
1499
2966
  {
1500
2967
  path: "packages/models/tsconfig.json",
1501
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2968
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1502
2969
  },
1503
2970
  {
1504
2971
  path: "packages/models/src/index.ts",
@@ -1664,7 +3131,7 @@ coverage/
1664
3131
  return [
1665
3132
  {
1666
3133
  path: "package.json",
1667
- content: JSON.stringify(rootPackageJson, null, 2) + "\n"
3134
+ content: `${JSON.stringify(rootPackageJson, null, 2)}\n`
1668
3135
  },
1669
3136
  {
1670
3137
  path: "pnpm-workspace.yaml",
@@ -1672,15 +3139,15 @@ coverage/
1672
3139
  },
1673
3140
  {
1674
3141
  path: "tsconfig.json",
1675
- content: JSON.stringify(tsConfig, null, 2) + "\n"
3142
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1676
3143
  },
1677
3144
  {
1678
3145
  path: "biome.json",
1679
- content: JSON.stringify(biomeConfig, null, 2) + "\n"
3146
+ content: `${JSON.stringify(biomeConfig, null, 2)}\n`
1680
3147
  },
1681
3148
  {
1682
3149
  path: "turbo.json",
1683
- content: JSON.stringify(turboConfig, null, 2) + "\n"
3150
+ content: `${JSON.stringify(turboConfig, null, 2)}\n`
1684
3151
  },
1685
3152
  {
1686
3153
  path: ".gitignore",
@@ -2401,19 +3868,19 @@ function generatePackageJson(options, template) {
2401
3868
  if (studio) dependencies$1["@geekmidas/studio"] = "workspace:*";
2402
3869
  if (database) {
2403
3870
  dependencies$1["@geekmidas/db"] = "workspace:*";
2404
- dependencies$1["kysely"] = "~0.28.2";
2405
- dependencies$1["pg"] = "~8.16.0";
3871
+ dependencies$1.kysely = "~0.28.2";
3872
+ dependencies$1.pg = "~8.16.0";
2406
3873
  devDependencies$1["@types/pg"] = "~8.15.0";
2407
3874
  }
2408
- dependencies$1["zod"] = "~4.1.0";
3875
+ dependencies$1.zod = "~4.1.0";
2409
3876
  if (monorepo) {
2410
3877
  delete devDependencies$1["@biomejs/biome"];
2411
- delete devDependencies$1["turbo"];
2412
- delete scripts$1["lint"];
2413
- delete scripts$1["fmt"];
3878
+ delete devDependencies$1.turbo;
3879
+ delete scripts$1.lint;
3880
+ delete scripts$1.fmt;
2414
3881
  delete scripts$1["fmt:check"];
2415
3882
  dependencies$1[`@${name$1}/models`] = "workspace:*";
2416
- delete dependencies$1["zod"];
3883
+ delete dependencies$1.zod;
2417
3884
  }
2418
3885
  const sortObject = (obj) => Object.fromEntries(Object.entries(obj).sort(([a], [b]) => a.localeCompare(b)));
2419
3886
  let packageName = name$1;
@@ -2437,7 +3904,7 @@ function generatePackageJson(options, template) {
2437
3904
  };
2438
3905
  return [{
2439
3906
  path: "package.json",
2440
- content: JSON.stringify(packageJson, null, 2) + "\n"
3907
+ content: `${JSON.stringify(packageJson, null, 2)}\n`
2441
3908
  }];
2442
3909
  }
2443
3910
 
@@ -2456,10 +3923,10 @@ function generateSourceFiles(options, template) {
2456
3923
  * Detect the package manager being used based on lockfiles or npm_config_user_agent
2457
3924
  */
2458
3925
  function detectPackageManager(cwd = process.cwd()) {
2459
- if (existsSync(join$1(cwd, "pnpm-lock.yaml"))) return "pnpm";
2460
- if (existsSync(join$1(cwd, "yarn.lock"))) return "yarn";
2461
- if (existsSync(join$1(cwd, "bun.lockb"))) return "bun";
2462
- if (existsSync(join$1(cwd, "package-lock.json"))) return "npm";
3926
+ if (existsSync(join(cwd, "pnpm-lock.yaml"))) return "pnpm";
3927
+ if (existsSync(join(cwd, "yarn.lock"))) return "yarn";
3928
+ if (existsSync(join(cwd, "bun.lockb"))) return "bun";
3929
+ if (existsSync(join(cwd, "package-lock.json"))) return "npm";
2463
3930
  const userAgent = process.env.npm_config_user_agent || "";
2464
3931
  if (userAgent.includes("pnpm")) return "pnpm";
2465
3932
  if (userAgent.includes("yarn")) return "yarn";
@@ -2485,7 +3952,7 @@ function validateProjectName(name$1) {
2485
3952
  * Check if a directory already exists at the target path
2486
3953
  */
2487
3954
  function checkDirectoryExists(name$1, cwd = process.cwd()) {
2488
- const targetPath = join$1(cwd, name$1);
3955
+ const targetPath = join(cwd, name$1);
2489
3956
  if (existsSync(targetPath)) return `Directory "${name$1}" already exists`;
2490
3957
  return true;
2491
3958
  }
@@ -2497,7 +3964,6 @@ function getInstallCommand(pkgManager) {
2497
3964
  case "pnpm": return "pnpm install";
2498
3965
  case "yarn": return "yarn";
2499
3966
  case "bun": return "bun install";
2500
- case "npm":
2501
3967
  default: return "npm install";
2502
3968
  }
2503
3969
  }
@@ -2509,7 +3975,6 @@ function getRunCommand(pkgManager, script) {
2509
3975
  case "pnpm": return `pnpm ${script}`;
2510
3976
  case "yarn": return `yarn ${script}`;
2511
3977
  case "bun": return `bun run ${script}`;
2512
- case "npm":
2513
3978
  default: return `npm run ${script}`;
2514
3979
  }
2515
3980
  }
@@ -2593,21 +4058,12 @@ async function initCommand(projectName, options = {}) {
2593
4058
  }
2594
4059
  ], { onCancel });
2595
4060
  const name$1 = projectName || answers.name;
2596
- if (!name$1) {
2597
- console.error(" Error: Project name is required\n");
2598
- process.exit(1);
2599
- }
4061
+ if (!name$1) process.exit(1);
2600
4062
  if (projectName) {
2601
4063
  const nameValid = validateProjectName(projectName);
2602
- if (nameValid !== true) {
2603
- console.error(` Error: ${nameValid}\n`);
2604
- process.exit(1);
2605
- }
4064
+ if (nameValid !== true) process.exit(1);
2606
4065
  const dirValid = checkDirectoryExists(projectName, cwd);
2607
- if (dirValid !== true) {
2608
- console.error(` Error: ${dirValid}\n`);
2609
- process.exit(1);
2610
- }
4066
+ if (dirValid !== true) process.exit(1);
2611
4067
  }
2612
4068
  const monorepo = options.monorepo ?? (options.yes ? false : answers.monorepo ?? false);
2613
4069
  const database = options.yes ? true : answers.database ?? true;
@@ -2622,12 +4078,12 @@ async function initCommand(projectName, options = {}) {
2622
4078
  monorepo,
2623
4079
  apiPath: monorepo ? options.apiPath ?? answers.apiPath ?? "apps/api" : ""
2624
4080
  };
2625
- const targetDir = join$1(cwd, name$1);
4081
+ const targetDir = join(cwd, name$1);
2626
4082
  const template = getTemplate(templateOptions.template);
2627
4083
  const isMonorepo = templateOptions.monorepo;
2628
4084
  const apiPath = templateOptions.apiPath;
2629
4085
  await mkdir(targetDir, { recursive: true });
2630
- const appDir = isMonorepo ? join$1(targetDir, apiPath) : targetDir;
4086
+ const appDir = isMonorepo ? join(targetDir, apiPath) : targetDir;
2631
4087
  if (isMonorepo) await mkdir(appDir, { recursive: true });
2632
4088
  const appFiles = [
2633
4089
  ...generatePackageJson(templateOptions, template),
@@ -2638,13 +4094,13 @@ async function initCommand(projectName, options = {}) {
2638
4094
  ];
2639
4095
  const rootFiles = [...generateMonorepoFiles(templateOptions, template), ...generateModelsPackage(templateOptions)];
2640
4096
  for (const { path, content } of rootFiles) {
2641
- const fullPath = join$1(targetDir, path);
4097
+ const fullPath = join(targetDir, path);
2642
4098
  await mkdir(dirname(fullPath), { recursive: true });
2643
4099
  await writeFile(fullPath, content);
2644
4100
  }
2645
4101
  for (const { path, content } of appFiles) {
2646
- const fullPath = join$1(appDir, path);
2647
- const displayPath = isMonorepo ? `${apiPath}/${path}` : path;
4102
+ const fullPath = join(appDir, path);
4103
+ const _displayPath = isMonorepo ? `${apiPath}/${path}` : path;
2648
4104
  await mkdir(dirname(fullPath), { recursive: true });
2649
4105
  await writeFile(fullPath, content);
2650
4106
  }
@@ -2654,9 +4110,7 @@ async function initCommand(projectName, options = {}) {
2654
4110
  cwd: targetDir,
2655
4111
  stdio: "inherit"
2656
4112
  });
2657
- } catch {
2658
- console.error("\n Warning: Failed to install dependencies.");
2659
- }
4113
+ } catch {}
2660
4114
  try {
2661
4115
  execSync("npx @biomejs/biome format --write --unsafe .", {
2662
4116
  cwd: targetDir,
@@ -2664,7 +4118,310 @@ async function initCommand(projectName, options = {}) {
2664
4118
  });
2665
4119
  } catch {}
2666
4120
  }
2667
- const devCommand$1 = getRunCommand(pkgManager, "dev");
4121
+ const _devCommand = getRunCommand(pkgManager, "dev");
4122
+ }
4123
+
4124
+ //#endregion
4125
+ //#region src/secrets/generator.ts
4126
+ /**
4127
+ * Generate a secure random password using URL-safe base64 characters.
4128
+ * @param length Password length (default: 32)
4129
+ */
4130
+ function generateSecurePassword(length = 32) {
4131
+ return randomBytes(Math.ceil(length * 3 / 4)).toString("base64url").slice(0, length);
4132
+ }
4133
+ /** Default service configurations */
4134
+ const SERVICE_DEFAULTS = {
4135
+ postgres: {
4136
+ host: "postgres",
4137
+ port: 5432,
4138
+ username: "app",
4139
+ database: "app"
4140
+ },
4141
+ redis: {
4142
+ host: "redis",
4143
+ port: 6379,
4144
+ username: "default"
4145
+ },
4146
+ rabbitmq: {
4147
+ host: "rabbitmq",
4148
+ port: 5672,
4149
+ username: "app",
4150
+ vhost: "/"
4151
+ }
4152
+ };
4153
+ /**
4154
+ * Generate credentials for a specific service.
4155
+ */
4156
+ function generateServiceCredentials(service) {
4157
+ const defaults = SERVICE_DEFAULTS[service];
4158
+ return {
4159
+ ...defaults,
4160
+ password: generateSecurePassword()
4161
+ };
4162
+ }
4163
+ /**
4164
+ * Generate credentials for multiple services.
4165
+ */
4166
+ function generateServicesCredentials(services) {
4167
+ const result = {};
4168
+ for (const service of services) result[service] = generateServiceCredentials(service);
4169
+ return result;
4170
+ }
4171
+ /**
4172
+ * Generate connection URL for PostgreSQL.
4173
+ */
4174
+ function generatePostgresUrl(creds) {
4175
+ const { username, password, host, port, database } = creds;
4176
+ return `postgresql://${username}:${encodeURIComponent(password)}@${host}:${port}/${database}`;
4177
+ }
4178
+ /**
4179
+ * Generate connection URL for Redis.
4180
+ */
4181
+ function generateRedisUrl(creds) {
4182
+ const { password, host, port } = creds;
4183
+ return `redis://:${encodeURIComponent(password)}@${host}:${port}`;
4184
+ }
4185
+ /**
4186
+ * Generate connection URL for RabbitMQ.
4187
+ */
4188
+ function generateRabbitmqUrl(creds) {
4189
+ const { username, password, host, port, vhost } = creds;
4190
+ const encodedVhost = encodeURIComponent(vhost ?? "/");
4191
+ return `amqp://${username}:${encodeURIComponent(password)}@${host}:${port}/${encodedVhost}`;
4192
+ }
4193
+ /**
4194
+ * Generate connection URLs from service credentials.
4195
+ */
4196
+ function generateConnectionUrls(services) {
4197
+ const urls = {};
4198
+ if (services.postgres) urls.DATABASE_URL = generatePostgresUrl(services.postgres);
4199
+ if (services.redis) urls.REDIS_URL = generateRedisUrl(services.redis);
4200
+ if (services.rabbitmq) urls.RABBITMQ_URL = generateRabbitmqUrl(services.rabbitmq);
4201
+ return urls;
4202
+ }
4203
+ /**
4204
+ * Create a new StageSecrets object with generated credentials.
4205
+ */
4206
+ function createStageSecrets(stage, services) {
4207
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4208
+ const serviceCredentials = generateServicesCredentials(services);
4209
+ const urls = generateConnectionUrls(serviceCredentials);
4210
+ return {
4211
+ stage,
4212
+ createdAt: now,
4213
+ updatedAt: now,
4214
+ services: serviceCredentials,
4215
+ urls,
4216
+ custom: {}
4217
+ };
4218
+ }
4219
+ /**
4220
+ * Rotate password for a specific service.
4221
+ */
4222
+ function rotateServicePassword(secrets, service) {
4223
+ const currentCreds = secrets.services[service];
4224
+ if (!currentCreds) throw new Error(`Service "${service}" not configured in secrets`);
4225
+ const newCreds = {
4226
+ ...currentCreds,
4227
+ password: generateSecurePassword()
4228
+ };
4229
+ const newServices = {
4230
+ ...secrets.services,
4231
+ [service]: newCreds
4232
+ };
4233
+ return {
4234
+ ...secrets,
4235
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
4236
+ services: newServices,
4237
+ urls: generateConnectionUrls(newServices)
4238
+ };
4239
+ }
4240
+
4241
+ //#endregion
4242
+ //#region src/secrets/index.ts
4243
+ const logger = console;
4244
+ /**
4245
+ * Extract service names from compose config.
4246
+ */
4247
+ function getServicesFromConfig(services) {
4248
+ if (!services) return [];
4249
+ if (Array.isArray(services)) return services;
4250
+ return Object.entries(services).filter(([, config$1]) => config$1).map(([name$1]) => name$1);
4251
+ }
4252
+ /**
4253
+ * Initialize secrets for a stage.
4254
+ * Generates secure random passwords for configured services.
4255
+ */
4256
+ async function secretsInitCommand(options) {
4257
+ const { stage, force } = options;
4258
+ if (!force && secretsExist(stage)) {
4259
+ logger.error(`Secrets already exist for stage "${stage}". Use --force to overwrite.`);
4260
+ process.exit(1);
4261
+ }
4262
+ const config$1 = await loadConfig();
4263
+ const services = getServicesFromConfig(config$1.docker?.compose?.services);
4264
+ if (services.length === 0) logger.warn("No services configured in docker.compose.services. Creating secrets with empty services.");
4265
+ const secrets = createStageSecrets(stage, services);
4266
+ await writeStageSecrets(secrets);
4267
+ logger.log(`\n✓ Secrets initialized for stage "${stage}"`);
4268
+ logger.log(` Location: .gkm/secrets/${stage}.json`);
4269
+ logger.log("\n Generated credentials for:");
4270
+ for (const service of services) logger.log(` - ${service}`);
4271
+ if (secrets.urls.DATABASE_URL) logger.log(`\n DATABASE_URL: ${maskUrl(secrets.urls.DATABASE_URL)}`);
4272
+ if (secrets.urls.REDIS_URL) logger.log(` REDIS_URL: ${maskUrl(secrets.urls.REDIS_URL)}`);
4273
+ if (secrets.urls.RABBITMQ_URL) logger.log(` RABBITMQ_URL: ${maskUrl(secrets.urls.RABBITMQ_URL)}`);
4274
+ logger.log(`\n Use "gkm secrets:show --stage ${stage}" to view secrets`);
4275
+ logger.log(" Use \"gkm secrets:set <KEY> <VALUE> --stage " + stage + "\" to add custom secrets");
4276
+ }
4277
+ /**
4278
+ * Read all data from stdin.
4279
+ */
4280
+ async function readStdin() {
4281
+ const chunks = [];
4282
+ for await (const chunk of process.stdin) chunks.push(chunk);
4283
+ return Buffer.concat(chunks).toString("utf-8").trim();
4284
+ }
4285
+ /**
4286
+ * Set a custom secret.
4287
+ * If value is not provided, reads from stdin.
4288
+ */
4289
+ async function secretsSetCommand(key, value, options) {
4290
+ const { stage } = options;
4291
+ let secretValue = value;
4292
+ if (!secretValue) {
4293
+ if (process.stdin.isTTY) {
4294
+ logger.error("No value provided. Use: gkm secrets:set KEY VALUE --stage <stage>");
4295
+ logger.error("Or pipe from stdin: echo \"value\" | gkm secrets:set KEY --stage <stage>");
4296
+ process.exit(1);
4297
+ }
4298
+ secretValue = await readStdin();
4299
+ if (!secretValue) {
4300
+ logger.error("No value received from stdin");
4301
+ process.exit(1);
4302
+ }
4303
+ }
4304
+ try {
4305
+ await setCustomSecret(stage, key, secretValue);
4306
+ logger.log(`\n✓ Secret "${key}" set for stage "${stage}"`);
4307
+ } catch (error) {
4308
+ logger.error(error instanceof Error ? error.message : "Failed to set secret");
4309
+ process.exit(1);
4310
+ }
4311
+ }
4312
+ /**
4313
+ * Show secrets for a stage.
4314
+ */
4315
+ async function secretsShowCommand(options) {
4316
+ const { stage, reveal } = options;
4317
+ const secrets = await readStageSecrets(stage);
4318
+ if (!secrets) {
4319
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4320
+ process.exit(1);
4321
+ }
4322
+ logger.log(`\nSecrets for stage "${stage}":`);
4323
+ logger.log(` Created: ${secrets.createdAt}`);
4324
+ logger.log(` Updated: ${secrets.updatedAt}`);
4325
+ logger.log("\nService Credentials:");
4326
+ for (const [service, creds] of Object.entries(secrets.services)) if (creds) {
4327
+ logger.log(`\n ${service}:`);
4328
+ logger.log(` host: ${creds.host}`);
4329
+ logger.log(` port: ${creds.port}`);
4330
+ logger.log(` username: ${creds.username}`);
4331
+ logger.log(` password: ${reveal ? creds.password : maskPassword(creds.password)}`);
4332
+ if (creds.database) logger.log(` database: ${creds.database}`);
4333
+ if (creds.vhost) logger.log(` vhost: ${creds.vhost}`);
4334
+ }
4335
+ logger.log("\nConnection URLs:");
4336
+ if (secrets.urls.DATABASE_URL) logger.log(` DATABASE_URL: ${reveal ? secrets.urls.DATABASE_URL : maskUrl(secrets.urls.DATABASE_URL)}`);
4337
+ if (secrets.urls.REDIS_URL) logger.log(` REDIS_URL: ${reveal ? secrets.urls.REDIS_URL : maskUrl(secrets.urls.REDIS_URL)}`);
4338
+ if (secrets.urls.RABBITMQ_URL) logger.log(` RABBITMQ_URL: ${reveal ? secrets.urls.RABBITMQ_URL : maskUrl(secrets.urls.RABBITMQ_URL)}`);
4339
+ const customKeys = Object.keys(secrets.custom);
4340
+ if (customKeys.length > 0) {
4341
+ logger.log("\nCustom Secrets:");
4342
+ for (const [key, value] of Object.entries(secrets.custom)) logger.log(` ${key}: ${reveal ? value : maskPassword(value)}`);
4343
+ }
4344
+ if (!reveal) logger.log("\nUse --reveal to show actual values");
4345
+ }
4346
+ /**
4347
+ * Rotate passwords for services.
4348
+ */
4349
+ async function secretsRotateCommand(options) {
4350
+ const { stage, service } = options;
4351
+ const secrets = await readStageSecrets(stage);
4352
+ if (!secrets) {
4353
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4354
+ process.exit(1);
4355
+ }
4356
+ if (service) {
4357
+ if (!secrets.services[service]) {
4358
+ logger.error(`Service "${service}" not configured in stage "${stage}"`);
4359
+ process.exit(1);
4360
+ }
4361
+ const updated = rotateServicePassword(secrets, service);
4362
+ await writeStageSecrets(updated);
4363
+ logger.log(`\n✓ Password rotated for ${service} in stage "${stage}"`);
4364
+ } else {
4365
+ let updated = secrets;
4366
+ const services = Object.keys(secrets.services);
4367
+ for (const svc of services) updated = rotateServicePassword(updated, svc);
4368
+ await writeStageSecrets(updated);
4369
+ logger.log(`\n✓ Passwords rotated for all services in stage "${stage}": ${services.join(", ")}`);
4370
+ }
4371
+ logger.log(`\nUse "gkm secrets:show --stage ${stage}" to view new values`);
4372
+ }
4373
+ /**
4374
+ * Import secrets from a JSON file.
4375
+ */
4376
+ async function secretsImportCommand(file, options) {
4377
+ const { stage, merge = true } = options;
4378
+ if (!existsSync(file)) {
4379
+ logger.error(`File not found: ${file}`);
4380
+ process.exit(1);
4381
+ }
4382
+ let importedSecrets;
4383
+ try {
4384
+ const content = await readFile(file, "utf-8");
4385
+ importedSecrets = JSON.parse(content);
4386
+ if (typeof importedSecrets !== "object" || importedSecrets === null) throw new Error("JSON must be an object");
4387
+ for (const [key, value] of Object.entries(importedSecrets)) if (typeof value !== "string") throw new Error(`Value for "${key}" must be a string, got ${typeof value}`);
4388
+ } catch (error) {
4389
+ logger.error(`Failed to parse JSON file: ${error instanceof Error ? error.message : "Invalid JSON"}`);
4390
+ process.exit(1);
4391
+ }
4392
+ const secrets = await readStageSecrets(stage);
4393
+ if (!secrets) {
4394
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4395
+ process.exit(1);
4396
+ }
4397
+ const updatedCustom = merge ? {
4398
+ ...secrets.custom,
4399
+ ...importedSecrets
4400
+ } : importedSecrets;
4401
+ const updated = {
4402
+ ...secrets,
4403
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
4404
+ custom: updatedCustom
4405
+ };
4406
+ await writeStageSecrets(updated);
4407
+ const importedCount = Object.keys(importedSecrets).length;
4408
+ const totalCount = Object.keys(updatedCustom).length;
4409
+ logger.log(`\n✓ Imported ${importedCount} secrets for stage "${stage}"`);
4410
+ if (merge && totalCount > importedCount) logger.log(` Total custom secrets: ${totalCount}`);
4411
+ logger.log("\n Imported keys:");
4412
+ for (const key of Object.keys(importedSecrets)) logger.log(` - ${key}`);
4413
+ }
4414
+ /**
4415
+ * Mask password in a URL for display.
4416
+ */
4417
+ function maskUrl(url) {
4418
+ try {
4419
+ const parsed = new URL(url);
4420
+ if (parsed.password) parsed.password = maskPassword(parsed.password);
4421
+ return parsed.toString();
4422
+ } catch {
4423
+ return url;
4424
+ }
2668
4425
  }
2669
4426
 
2670
4427
  //#endregion
@@ -2676,34 +4433,39 @@ program.command("init").description("Scaffold a new project").argument("[name]",
2676
4433
  const globalOptions = program.opts();
2677
4434
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2678
4435
  await initCommand(name$1, options);
2679
- } catch (error) {
2680
- console.error("Init failed:", error.message);
4436
+ } catch (_error) {
2681
4437
  process.exit(1);
2682
4438
  }
2683
4439
  });
2684
- program.command("build").description("Build handlers from endpoints, functions, and crons").option("--provider <provider>", "Target provider for generated handlers (aws, server)").option("--providers <providers>", "[DEPRECATED] Use --provider instead. Target providers for generated handlers (comma-separated)").option("--enable-openapi", "Enable OpenAPI documentation generation for server builds").action(async (options) => {
4440
+ program.command("build").description("Build handlers from endpoints, functions, and crons").option("--provider <provider>", "Target provider for generated handlers (aws, server)").option("--providers <providers>", "[DEPRECATED] Use --provider instead. Target providers for generated handlers (comma-separated)").option("--enable-openapi", "Enable OpenAPI documentation generation for server builds").option("--production", "Build for production (no dev tools, bundled output)").option("--skip-bundle", "Skip bundling step in production build").option("--stage <stage>", "Inject encrypted secrets for deployment stage").action(async (options) => {
2685
4441
  try {
2686
4442
  const globalOptions = program.opts();
2687
4443
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2688
4444
  if (options.provider) {
2689
- if (!["aws", "server"].includes(options.provider)) {
2690
- console.error(`Invalid provider: ${options.provider}. Must be 'aws' or 'server'.`);
2691
- process.exit(1);
2692
- }
4445
+ if (!["aws", "server"].includes(options.provider)) process.exit(1);
2693
4446
  await buildCommand({
2694
4447
  provider: options.provider,
2695
- enableOpenApi: options.enableOpenapi || false
4448
+ enableOpenApi: options.enableOpenapi || false,
4449
+ production: options.production || false,
4450
+ skipBundle: options.skipBundle || false,
4451
+ stage: options.stage
2696
4452
  });
2697
4453
  } else if (options.providers) {
2698
- console.warn("⚠️ --providers flag is deprecated. Use --provider instead.");
2699
4454
  const providerList = [...new Set(options.providers.split(",").map((p) => p.trim()))];
2700
4455
  await buildCommand({
2701
4456
  providers: providerList,
2702
- enableOpenApi: options.enableOpenapi || false
4457
+ enableOpenApi: options.enableOpenapi || false,
4458
+ production: options.production || false,
4459
+ skipBundle: options.skipBundle || false,
4460
+ stage: options.stage
2703
4461
  });
2704
- } else await buildCommand({ enableOpenApi: options.enableOpenapi || false });
2705
- } catch (error) {
2706
- console.error("Build failed:", error.message);
4462
+ } else await buildCommand({
4463
+ enableOpenApi: options.enableOpenapi || false,
4464
+ production: options.production || false,
4465
+ skipBundle: options.skipBundle || false,
4466
+ stage: options.stage
4467
+ });
4468
+ } catch (_error) {
2707
4469
  process.exit(1);
2708
4470
  }
2709
4471
  });
@@ -2712,12 +4474,11 @@ program.command("dev").description("Start development server with automatic relo
2712
4474
  const globalOptions = program.opts();
2713
4475
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2714
4476
  await devCommand({
2715
- port: options.port ? Number.parseInt(options.port) : 3e3,
4477
+ port: options.port ? Number.parseInt(options.port, 10) : 3e3,
2716
4478
  portExplicit: !!options.port,
2717
4479
  enableOpenApi: options.enableOpenapi ?? true
2718
4480
  });
2719
- } catch (error) {
2720
- console.error("Dev server failed:", error.message);
4481
+ } catch (_error) {
2721
4482
  process.exit(1);
2722
4483
  }
2723
4484
  });
@@ -2741,8 +4502,7 @@ program.command("openapi").description("Generate OpenAPI specification from endp
2741
4502
  const globalOptions = program.opts();
2742
4503
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2743
4504
  await openapiCommand({});
2744
- } catch (error) {
2745
- console.error("OpenAPI generation failed:", error.message);
4505
+ } catch (_error) {
2746
4506
  process.exit(1);
2747
4507
  }
2748
4508
  });
@@ -2751,8 +4511,194 @@ program.command("generate:react-query").description("Generate React Query hooks
2751
4511
  const globalOptions = program.opts();
2752
4512
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2753
4513
  await generateReactQueryCommand(options);
4514
+ } catch (_error) {
4515
+ process.exit(1);
4516
+ }
4517
+ });
4518
+ program.command("docker").description("Generate Docker deployment files").option("--build", "Build Docker image after generating files").option("--push", "Push image to registry after building").option("--tag <tag>", "Image tag", "latest").option("--registry <registry>", "Container registry URL").option("--slim", "Use slim Dockerfile (assumes pre-built bundle exists)").option("--turbo", "Use turbo prune for monorepo optimization").option("--turbo-package <name>", "Package name for turbo prune").action(async (options) => {
4519
+ try {
4520
+ const globalOptions = program.opts();
4521
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4522
+ await dockerCommand(options);
4523
+ } catch (_error) {
4524
+ process.exit(1);
4525
+ }
4526
+ });
4527
+ program.command("prepack").description("Generate Docker files for production deployment").option("--build", "Build Docker image after generating files").option("--push", "Push image to registry after building").option("--tag <tag>", "Image tag", "latest").option("--registry <registry>", "Container registry URL").option("--slim", "Build locally first, then use slim Dockerfile").option("--skip-bundle", "Skip bundling step (only with --slim)").option("--turbo", "Use turbo prune for monorepo optimization").option("--turbo-package <name>", "Package name for turbo prune").action(async (options) => {
4528
+ try {
4529
+ const globalOptions = program.opts();
4530
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4531
+ if (options.slim) await buildCommand({
4532
+ provider: "server",
4533
+ production: true,
4534
+ skipBundle: options.skipBundle
4535
+ });
4536
+ await dockerCommand({
4537
+ build: options.build,
4538
+ push: options.push,
4539
+ tag: options.tag,
4540
+ registry: options.registry,
4541
+ slim: options.slim,
4542
+ turbo: options.turbo,
4543
+ turboPackage: options.turboPackage
4544
+ });
4545
+ if (options.slim) {}
4546
+ if (options.build) {
4547
+ const tag = options.tag ?? "latest";
4548
+ const registry = options.registry;
4549
+ const _imageRef = registry ? `${registry}/api:${tag}` : `api:${tag}`;
4550
+ }
4551
+ } catch (_error) {
4552
+ process.exit(1);
4553
+ }
4554
+ });
4555
+ program.command("secrets:init").description("Initialize secrets for a deployment stage").requiredOption("--stage <stage>", "Stage name (e.g., production, staging)").option("--force", "Overwrite existing secrets").action(async (options) => {
4556
+ try {
4557
+ const globalOptions = program.opts();
4558
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4559
+ await secretsInitCommand(options);
4560
+ } catch (_error) {
4561
+ process.exit(1);
4562
+ }
4563
+ });
4564
+ program.command("secrets:set").description("Set a custom secret for a stage").argument("<key>", "Secret key (e.g., API_KEY)").argument("[value]", "Secret value (reads from stdin if omitted)").requiredOption("--stage <stage>", "Stage name").action(async (key, value, options) => {
4565
+ try {
4566
+ const globalOptions = program.opts();
4567
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4568
+ await secretsSetCommand(key, value, options);
4569
+ } catch (_error) {
4570
+ process.exit(1);
4571
+ }
4572
+ });
4573
+ program.command("secrets:show").description("Show secrets for a stage").requiredOption("--stage <stage>", "Stage name").option("--reveal", "Show actual secret values (not masked)").action(async (options) => {
4574
+ try {
4575
+ const globalOptions = program.opts();
4576
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4577
+ await secretsShowCommand(options);
4578
+ } catch (_error) {
4579
+ process.exit(1);
4580
+ }
4581
+ });
4582
+ program.command("secrets:rotate").description("Rotate service passwords").requiredOption("--stage <stage>", "Stage name").option("--service <service>", "Specific service to rotate (postgres, redis, rabbitmq)").action(async (options) => {
4583
+ try {
4584
+ const globalOptions = program.opts();
4585
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4586
+ await secretsRotateCommand(options);
4587
+ } catch (_error) {
4588
+ process.exit(1);
4589
+ }
4590
+ });
4591
+ program.command("secrets:import").description("Import secrets from a JSON file").argument("<file>", "JSON file path (e.g., secrets.json)").requiredOption("--stage <stage>", "Stage name").option("--no-merge", "Replace all custom secrets instead of merging").action(async (file, options) => {
4592
+ try {
4593
+ const globalOptions = program.opts();
4594
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4595
+ await secretsImportCommand(file, options);
4596
+ } catch (_error) {
4597
+ process.exit(1);
4598
+ }
4599
+ });
4600
+ program.command("deploy").description("Deploy application to a provider").requiredOption("--provider <provider>", "Deploy provider (docker, dokploy, aws-lambda)").requiredOption("--stage <stage>", "Deployment stage (e.g., production, staging)").option("--tag <tag>", "Image tag (default: stage-timestamp)").option("--skip-push", "Skip pushing image to registry").option("--skip-build", "Skip build step (use existing build)").action(async (options) => {
4601
+ try {
4602
+ const globalOptions = program.opts();
4603
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4604
+ const validProviders = [
4605
+ "docker",
4606
+ "dokploy",
4607
+ "aws-lambda"
4608
+ ];
4609
+ if (!validProviders.includes(options.provider)) {
4610
+ console.error(`Invalid provider: ${options.provider}\nValid providers: ${validProviders.join(", ")}`);
4611
+ process.exit(1);
4612
+ }
4613
+ await deployCommand({
4614
+ provider: options.provider,
4615
+ stage: options.stage,
4616
+ tag: options.tag,
4617
+ skipPush: options.skipPush,
4618
+ skipBuild: options.skipBuild
4619
+ });
4620
+ } catch (_error) {
4621
+ process.exit(1);
4622
+ }
4623
+ });
4624
+ program.command("deploy:init").description("Initialize Dokploy deployment (create project and application)").option("--endpoint <url>", "Dokploy server URL (uses stored credentials if logged in)").requiredOption("--project <name>", "Project name (creates if not exists)").requiredOption("--app <name>", "Application name").option("--project-id <id>", "Use existing project ID instead of creating").option("--registry-id <id>", "Configure registry for the application").action(async (options) => {
4625
+ try {
4626
+ const globalOptions = program.opts();
4627
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4628
+ await deployInitCommand({
4629
+ endpoint: options.endpoint,
4630
+ projectName: options.project,
4631
+ appName: options.app,
4632
+ projectId: options.projectId,
4633
+ registryId: options.registryId
4634
+ });
4635
+ } catch (error) {
4636
+ console.error(error instanceof Error ? error.message : "Failed to initialize deployment");
4637
+ process.exit(1);
4638
+ }
4639
+ });
4640
+ program.command("deploy:list").description("List Dokploy resources (projects, registries)").option("--endpoint <url>", "Dokploy server URL (uses stored credentials if logged in)").option("--projects", "List projects").option("--registries", "List registries").action(async (options) => {
4641
+ try {
4642
+ const globalOptions = program.opts();
4643
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4644
+ if (options.projects) await deployListCommand({
4645
+ endpoint: options.endpoint,
4646
+ resource: "projects"
4647
+ });
4648
+ if (options.registries) await deployListCommand({
4649
+ endpoint: options.endpoint,
4650
+ resource: "registries"
4651
+ });
4652
+ if (!options.projects && !options.registries) {
4653
+ await deployListCommand({
4654
+ endpoint: options.endpoint,
4655
+ resource: "projects"
4656
+ });
4657
+ await deployListCommand({
4658
+ endpoint: options.endpoint,
4659
+ resource: "registries"
4660
+ });
4661
+ }
4662
+ } catch (error) {
4663
+ console.error(error instanceof Error ? error.message : "Failed to list resources");
4664
+ process.exit(1);
4665
+ }
4666
+ });
4667
+ program.command("login").description("Authenticate with a deployment service").option("--service <service>", "Service to login to (dokploy)", "dokploy").option("--token <token>", "API token (will prompt if not provided)").option("--endpoint <url>", "Service endpoint URL").action(async (options) => {
4668
+ try {
4669
+ const globalOptions = program.opts();
4670
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4671
+ if (options.service !== "dokploy") {
4672
+ console.error(`Unknown service: ${options.service}. Supported: dokploy`);
4673
+ process.exit(1);
4674
+ }
4675
+ await loginCommand({
4676
+ service: options.service,
4677
+ token: options.token,
4678
+ endpoint: options.endpoint
4679
+ });
4680
+ } catch (error) {
4681
+ console.error(error instanceof Error ? error.message : "Failed to login");
4682
+ process.exit(1);
4683
+ }
4684
+ });
4685
+ program.command("logout").description("Remove stored credentials").option("--service <service>", "Service to logout from (dokploy, all)", "dokploy").action(async (options) => {
4686
+ try {
4687
+ const globalOptions = program.opts();
4688
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4689
+ await logoutCommand({ service: options.service });
4690
+ } catch (error) {
4691
+ console.error(error instanceof Error ? error.message : "Failed to logout");
4692
+ process.exit(1);
4693
+ }
4694
+ });
4695
+ program.command("whoami").description("Show current authentication status").action(async () => {
4696
+ try {
4697
+ const globalOptions = program.opts();
4698
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4699
+ await whoamiCommand();
2754
4700
  } catch (error) {
2755
- console.error("React Query generation failed:", error.message);
4701
+ console.error(error instanceof Error ? error.message : "Failed to get status");
2756
4702
  process.exit(1);
2757
4703
  }
2758
4704
  });