@geekmidas/cli 0.10.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +525 -0
  2. package/dist/bundler-B1qy9b-j.cjs +112 -0
  3. package/dist/bundler-B1qy9b-j.cjs.map +1 -0
  4. package/dist/bundler-DskIqW2t.mjs +111 -0
  5. package/dist/bundler-DskIqW2t.mjs.map +1 -0
  6. package/dist/{config-C9aXOHBe.cjs → config-AmInkU7k.cjs} +8 -8
  7. package/dist/config-AmInkU7k.cjs.map +1 -0
  8. package/dist/{config-BrkUalUh.mjs → config-DYULeEv8.mjs} +3 -3
  9. package/dist/config-DYULeEv8.mjs.map +1 -0
  10. package/dist/config.cjs +1 -1
  11. package/dist/config.d.cts +1 -1
  12. package/dist/config.d.mts +1 -1
  13. package/dist/config.mjs +1 -1
  14. package/dist/encryption-C8H-38Yy.mjs +42 -0
  15. package/dist/encryption-C8H-38Yy.mjs.map +1 -0
  16. package/dist/encryption-Dyf_r1h-.cjs +44 -0
  17. package/dist/encryption-Dyf_r1h-.cjs.map +1 -0
  18. package/dist/index.cjs +2123 -179
  19. package/dist/index.cjs.map +1 -1
  20. package/dist/index.mjs +2141 -192
  21. package/dist/index.mjs.map +1 -1
  22. package/dist/{openapi-CZLI4QTr.mjs → openapi-BfFlOBCG.mjs} +801 -38
  23. package/dist/openapi-BfFlOBCG.mjs.map +1 -0
  24. package/dist/{openapi-BeHLKcwP.cjs → openapi-Bt_1FDpT.cjs} +794 -31
  25. package/dist/openapi-Bt_1FDpT.cjs.map +1 -0
  26. package/dist/{openapi-react-query-o5iMi8tz.cjs → openapi-react-query-B-sNWHFU.cjs} +5 -5
  27. package/dist/openapi-react-query-B-sNWHFU.cjs.map +1 -0
  28. package/dist/{openapi-react-query-CcciaVu5.mjs → openapi-react-query-B6XTeGqS.mjs} +5 -5
  29. package/dist/openapi-react-query-B6XTeGqS.mjs.map +1 -0
  30. package/dist/openapi-react-query.cjs +1 -1
  31. package/dist/openapi-react-query.d.cts.map +1 -1
  32. package/dist/openapi-react-query.d.mts.map +1 -1
  33. package/dist/openapi-react-query.mjs +1 -1
  34. package/dist/openapi.cjs +2 -2
  35. package/dist/openapi.d.cts +1 -1
  36. package/dist/openapi.d.cts.map +1 -1
  37. package/dist/openapi.d.mts +1 -1
  38. package/dist/openapi.d.mts.map +1 -1
  39. package/dist/openapi.mjs +2 -2
  40. package/dist/storage-BOOpAF8N.cjs +5 -0
  41. package/dist/storage-Bj1E26lU.cjs +187 -0
  42. package/dist/storage-Bj1E26lU.cjs.map +1 -0
  43. package/dist/storage-kSxTjkNb.mjs +133 -0
  44. package/dist/storage-kSxTjkNb.mjs.map +1 -0
  45. package/dist/storage-tgZSUnKl.mjs +3 -0
  46. package/dist/{types-b-vwGpqc.d.cts → types-BR0M2v_c.d.mts} +100 -1
  47. package/dist/types-BR0M2v_c.d.mts.map +1 -0
  48. package/dist/{types-DXgiA1sF.d.mts → types-BhkZc-vm.d.cts} +100 -1
  49. package/dist/types-BhkZc-vm.d.cts.map +1 -0
  50. package/examples/cron-example.ts +27 -27
  51. package/examples/env.ts +27 -27
  52. package/examples/function-example.ts +31 -31
  53. package/examples/gkm.config.json +20 -20
  54. package/examples/gkm.config.ts +8 -8
  55. package/examples/gkm.minimal.config.json +5 -5
  56. package/examples/gkm.production.config.json +25 -25
  57. package/examples/logger.ts +2 -2
  58. package/package.json +6 -6
  59. package/src/__tests__/EndpointGenerator.hooks.spec.ts +191 -191
  60. package/src/__tests__/config.spec.ts +55 -55
  61. package/src/__tests__/loadEnvFiles.spec.ts +93 -93
  62. package/src/__tests__/normalizeHooksConfig.spec.ts +58 -58
  63. package/src/__tests__/openapi-react-query.spec.ts +497 -497
  64. package/src/__tests__/openapi.spec.ts +428 -428
  65. package/src/__tests__/test-helpers.ts +76 -76
  66. package/src/auth/__tests__/credentials.spec.ts +204 -0
  67. package/src/auth/__tests__/index.spec.ts +168 -0
  68. package/src/auth/credentials.ts +187 -0
  69. package/src/auth/index.ts +226 -0
  70. package/src/build/__tests__/bundler.spec.ts +444 -0
  71. package/src/build/__tests__/index-new.spec.ts +474 -474
  72. package/src/build/__tests__/manifests.spec.ts +333 -333
  73. package/src/build/bundler.ts +210 -0
  74. package/src/build/endpoint-analyzer.ts +236 -0
  75. package/src/build/handler-templates.ts +1253 -0
  76. package/src/build/index.ts +260 -179
  77. package/src/build/manifests.ts +52 -52
  78. package/src/build/providerResolver.ts +145 -145
  79. package/src/build/types.ts +64 -43
  80. package/src/config.ts +39 -39
  81. package/src/deploy/__tests__/docker.spec.ts +111 -0
  82. package/src/deploy/__tests__/dokploy.spec.ts +245 -0
  83. package/src/deploy/__tests__/init.spec.ts +662 -0
  84. package/src/deploy/docker.ts +128 -0
  85. package/src/deploy/dokploy.ts +204 -0
  86. package/src/deploy/index.ts +136 -0
  87. package/src/deploy/init.ts +484 -0
  88. package/src/deploy/types.ts +48 -0
  89. package/src/dev/__tests__/index.spec.ts +266 -266
  90. package/src/dev/index.ts +647 -601
  91. package/src/docker/__tests__/compose.spec.ts +531 -0
  92. package/src/docker/__tests__/templates.spec.ts +280 -0
  93. package/src/docker/compose.ts +273 -0
  94. package/src/docker/index.ts +230 -0
  95. package/src/docker/templates.ts +446 -0
  96. package/src/generators/CronGenerator.ts +72 -72
  97. package/src/generators/EndpointGenerator.ts +699 -398
  98. package/src/generators/FunctionGenerator.ts +84 -84
  99. package/src/generators/Generator.ts +72 -72
  100. package/src/generators/OpenApiTsGenerator.ts +577 -577
  101. package/src/generators/SubscriberGenerator.ts +124 -124
  102. package/src/generators/__tests__/CronGenerator.spec.ts +433 -433
  103. package/src/generators/__tests__/EndpointGenerator.spec.ts +532 -382
  104. package/src/generators/__tests__/FunctionGenerator.spec.ts +244 -244
  105. package/src/generators/__tests__/SubscriberGenerator.spec.ts +397 -382
  106. package/src/generators/index.ts +4 -4
  107. package/src/index.ts +623 -201
  108. package/src/init/__tests__/generators.spec.ts +334 -334
  109. package/src/init/__tests__/init.spec.ts +332 -332
  110. package/src/init/__tests__/utils.spec.ts +89 -89
  111. package/src/init/generators/config.ts +175 -175
  112. package/src/init/generators/docker.ts +41 -41
  113. package/src/init/generators/env.ts +72 -72
  114. package/src/init/generators/index.ts +1 -1
  115. package/src/init/generators/models.ts +64 -64
  116. package/src/init/generators/monorepo.ts +161 -161
  117. package/src/init/generators/package.ts +71 -71
  118. package/src/init/generators/source.ts +6 -6
  119. package/src/init/index.ts +203 -208
  120. package/src/init/templates/api.ts +115 -115
  121. package/src/init/templates/index.ts +75 -75
  122. package/src/init/templates/minimal.ts +98 -98
  123. package/src/init/templates/serverless.ts +89 -89
  124. package/src/init/templates/worker.ts +98 -98
  125. package/src/init/utils.ts +54 -56
  126. package/src/openapi-react-query.ts +194 -194
  127. package/src/openapi.ts +63 -63
  128. package/src/secrets/__tests__/encryption.spec.ts +226 -0
  129. package/src/secrets/__tests__/generator.spec.ts +319 -0
  130. package/src/secrets/__tests__/index.spec.ts +91 -0
  131. package/src/secrets/__tests__/storage.spec.ts +611 -0
  132. package/src/secrets/encryption.ts +91 -0
  133. package/src/secrets/generator.ts +164 -0
  134. package/src/secrets/index.ts +383 -0
  135. package/src/secrets/storage.ts +192 -0
  136. package/src/secrets/types.ts +53 -0
  137. package/src/types.ts +295 -176
  138. package/tsdown.config.ts +11 -8
  139. package/dist/config-BrkUalUh.mjs.map +0 -1
  140. package/dist/config-C9aXOHBe.cjs.map +0 -1
  141. package/dist/openapi-BeHLKcwP.cjs.map +0 -1
  142. package/dist/openapi-CZLI4QTr.mjs.map +0 -1
  143. package/dist/openapi-react-query-CcciaVu5.mjs.map +0 -1
  144. package/dist/openapi-react-query-o5iMi8tz.cjs.map +0 -1
  145. package/dist/types-DXgiA1sF.d.mts.map +0 -1
  146. package/dist/types-b-vwGpqc.d.cts.map +0 -1
package/dist/index.cjs CHANGED
@@ -1,14 +1,17 @@
1
1
  #!/usr/bin/env -S npx tsx
2
2
  const require_chunk = require('./chunk-CUT6urMc.cjs');
3
- const require_config = require('./config-C9aXOHBe.cjs');
4
- const require_openapi = require('./openapi-BeHLKcwP.cjs');
5
- const require_openapi_react_query = require('./openapi-react-query-o5iMi8tz.cjs');
6
- const path = require_chunk.__toESM(require("path"));
3
+ const require_config = require('./config-AmInkU7k.cjs');
4
+ const require_openapi = require('./openapi-Bt_1FDpT.cjs');
5
+ const require_openapi_react_query = require('./openapi-react-query-B-sNWHFU.cjs');
6
+ const require_storage = require('./storage-Bj1E26lU.cjs');
7
+ const node_fs = require_chunk.__toESM(require("node:fs"));
8
+ const node_path = require_chunk.__toESM(require("node:path"));
7
9
  const commander = require_chunk.__toESM(require("commander"));
10
+ const node_process = require_chunk.__toESM(require("node:process"));
11
+ const node_readline_promises = require_chunk.__toESM(require("node:readline/promises"));
8
12
  const node_fs_promises = require_chunk.__toESM(require("node:fs/promises"));
9
- const node_path = require_chunk.__toESM(require("node:path"));
13
+ const node_os = require_chunk.__toESM(require("node:os"));
10
14
  const node_child_process = require_chunk.__toESM(require("node:child_process"));
11
- const node_fs = require_chunk.__toESM(require("node:fs"));
12
15
  const node_net = require_chunk.__toESM(require("node:net"));
13
16
  const chokidar = require_chunk.__toESM(require("chokidar"));
14
17
  const dotenv = require_chunk.__toESM(require("dotenv"));
@@ -17,10 +20,11 @@ const __geekmidas_constructs_crons = require_chunk.__toESM(require("@geekmidas/c
17
20
  const __geekmidas_constructs_functions = require_chunk.__toESM(require("@geekmidas/constructs/functions"));
18
21
  const __geekmidas_constructs_subscribers = require_chunk.__toESM(require("@geekmidas/constructs/subscribers"));
19
22
  const prompts = require_chunk.__toESM(require("prompts"));
23
+ const node_crypto = require_chunk.__toESM(require("node:crypto"));
20
24
 
21
25
  //#region package.json
22
26
  var name = "@geekmidas/cli";
23
- var version = "0.10.0";
27
+ var version = "0.13.0";
24
28
  var description = "CLI tools for building Lambda handlers, server applications, and generating OpenAPI specs";
25
29
  var private$1 = false;
26
30
  var type = "module";
@@ -100,6 +104,227 @@ var package_default = {
100
104
  peerDependenciesMeta
101
105
  };
102
106
 
107
+ //#endregion
108
+ //#region src/auth/credentials.ts
109
+ /**
110
+ * Get the path to the credentials directory
111
+ */
112
+ function getCredentialsDir(options) {
113
+ const root = options?.root ?? (0, node_os.homedir)();
114
+ return (0, node_path.join)(root, ".gkm");
115
+ }
116
+ /**
117
+ * Get the path to the credentials file
118
+ */
119
+ function getCredentialsPath(options) {
120
+ return (0, node_path.join)(getCredentialsDir(options), "credentials.json");
121
+ }
122
+ /**
123
+ * Ensure the credentials directory exists
124
+ */
125
+ function ensureCredentialsDir(options) {
126
+ const dir = getCredentialsDir(options);
127
+ if (!(0, node_fs.existsSync)(dir)) (0, node_fs.mkdirSync)(dir, {
128
+ recursive: true,
129
+ mode: 448
130
+ });
131
+ }
132
+ /**
133
+ * Read stored credentials from disk
134
+ */
135
+ async function readCredentials(options) {
136
+ const path = getCredentialsPath(options);
137
+ if (!(0, node_fs.existsSync)(path)) return {};
138
+ try {
139
+ const content = await (0, node_fs_promises.readFile)(path, "utf-8");
140
+ return JSON.parse(content);
141
+ } catch {
142
+ return {};
143
+ }
144
+ }
145
+ /**
146
+ * Write credentials to disk
147
+ */
148
+ async function writeCredentials(credentials, options) {
149
+ ensureCredentialsDir(options);
150
+ const path = getCredentialsPath(options);
151
+ await (0, node_fs_promises.writeFile)(path, JSON.stringify(credentials, null, 2), { mode: 384 });
152
+ }
153
+ /**
154
+ * Store Dokploy credentials
155
+ */
156
+ async function storeDokployCredentials(token, endpoint, options) {
157
+ const credentials = await readCredentials(options);
158
+ credentials.dokploy = {
159
+ token,
160
+ endpoint,
161
+ storedAt: (/* @__PURE__ */ new Date()).toISOString()
162
+ };
163
+ await writeCredentials(credentials, options);
164
+ }
165
+ /**
166
+ * Get stored Dokploy credentials
167
+ */
168
+ async function getDokployCredentials(options) {
169
+ const credentials = await readCredentials(options);
170
+ if (!credentials.dokploy) return null;
171
+ return {
172
+ token: credentials.dokploy.token,
173
+ endpoint: credentials.dokploy.endpoint
174
+ };
175
+ }
176
+ /**
177
+ * Remove Dokploy credentials
178
+ */
179
+ async function removeDokployCredentials(options) {
180
+ const credentials = await readCredentials(options);
181
+ if (!credentials.dokploy) return false;
182
+ delete credentials.dokploy;
183
+ await writeCredentials(credentials, options);
184
+ return true;
185
+ }
186
+ /**
187
+ * Get Dokploy API token, checking stored credentials first, then environment
188
+ */
189
+ async function getDokployToken(options) {
190
+ const envToken = process.env.DOKPLOY_API_TOKEN;
191
+ if (envToken) return envToken;
192
+ const stored = await getDokployCredentials(options);
193
+ if (stored) return stored.token;
194
+ return null;
195
+ }
196
+
197
+ //#endregion
198
+ //#region src/auth/index.ts
199
+ const logger$9 = console;
200
+ /**
201
+ * Validate Dokploy token by making a test API call
202
+ */
203
+ async function validateDokployToken(endpoint, token) {
204
+ try {
205
+ const response = await fetch(`${endpoint}/api/project.all`, {
206
+ method: "GET",
207
+ headers: {
208
+ "Content-Type": "application/json",
209
+ Authorization: `Bearer ${token}`
210
+ }
211
+ });
212
+ return response.ok;
213
+ } catch {
214
+ return false;
215
+ }
216
+ }
217
+ /**
218
+ * Prompt for input (handles both TTY and non-TTY)
219
+ */
220
+ async function prompt(message, hidden = false) {
221
+ if (!process.stdin.isTTY) throw new Error("Interactive input required. Please provide --token option.");
222
+ const rl = node_readline_promises.createInterface({
223
+ input: node_process.stdin,
224
+ output: node_process.stdout
225
+ });
226
+ try {
227
+ if (hidden) {
228
+ process.stdout.write(message);
229
+ return new Promise((resolve$1) => {
230
+ let value = "";
231
+ const onData = (char) => {
232
+ const c = char.toString();
233
+ if (c === "\n" || c === "\r") {
234
+ process.stdin.removeListener("data", onData);
235
+ process.stdin.setRawMode(false);
236
+ process.stdout.write("\n");
237
+ resolve$1(value);
238
+ } else if (c === "") process.exit(1);
239
+ else if (c === "" || c === "\b") {
240
+ if (value.length > 0) value = value.slice(0, -1);
241
+ } else value += c;
242
+ };
243
+ process.stdin.setRawMode(true);
244
+ process.stdin.resume();
245
+ process.stdin.on("data", onData);
246
+ });
247
+ } else return await rl.question(message);
248
+ } finally {
249
+ rl.close();
250
+ }
251
+ }
252
+ /**
253
+ * Login to a service
254
+ */
255
+ async function loginCommand(options) {
256
+ const { service, token: providedToken, endpoint: providedEndpoint } = options;
257
+ if (service === "dokploy") {
258
+ logger$9.log("\n🔐 Logging in to Dokploy...\n");
259
+ let endpoint = providedEndpoint;
260
+ if (!endpoint) endpoint = await prompt("Dokploy URL (e.g., https://dokploy.example.com): ");
261
+ endpoint = endpoint.replace(/\/$/, "");
262
+ try {
263
+ new URL(endpoint);
264
+ } catch {
265
+ logger$9.error("Invalid URL format");
266
+ process.exit(1);
267
+ }
268
+ let token = providedToken;
269
+ if (!token) {
270
+ logger$9.log(`\nGenerate a token at: ${endpoint}/settings/profile\n`);
271
+ token = await prompt("API Token: ", true);
272
+ }
273
+ if (!token) {
274
+ logger$9.error("Token is required");
275
+ process.exit(1);
276
+ }
277
+ logger$9.log("\nValidating credentials...");
278
+ const isValid = await validateDokployToken(endpoint, token);
279
+ if (!isValid) {
280
+ logger$9.error("\n✗ Invalid credentials. Please check your token and try again.");
281
+ process.exit(1);
282
+ }
283
+ await storeDokployCredentials(token, endpoint);
284
+ logger$9.log("\n✓ Successfully logged in to Dokploy!");
285
+ logger$9.log(` Endpoint: ${endpoint}`);
286
+ logger$9.log(` Credentials stored in: ${getCredentialsPath()}`);
287
+ logger$9.log("\nYou can now use deploy commands without setting DOKPLOY_API_TOKEN.");
288
+ }
289
+ }
290
+ /**
291
+ * Logout from a service
292
+ */
293
+ async function logoutCommand(options) {
294
+ const { service = "dokploy" } = options;
295
+ if (service === "all") {
296
+ const dokployRemoved = await removeDokployCredentials();
297
+ if (dokployRemoved) logger$9.log("\n✓ Logged out from all services");
298
+ else logger$9.log("\nNo stored credentials found");
299
+ return;
300
+ }
301
+ if (service === "dokploy") {
302
+ const removed = await removeDokployCredentials();
303
+ if (removed) logger$9.log("\n✓ Logged out from Dokploy");
304
+ else logger$9.log("\nNo Dokploy credentials found");
305
+ }
306
+ }
307
+ /**
308
+ * Show current login status
309
+ */
310
+ async function whoamiCommand() {
311
+ logger$9.log("\n📋 Current credentials:\n");
312
+ const dokploy = await getDokployCredentials();
313
+ if (dokploy) {
314
+ logger$9.log(" Dokploy:");
315
+ logger$9.log(` Endpoint: ${dokploy.endpoint}`);
316
+ logger$9.log(` Token: ${maskToken(dokploy.token)}`);
317
+ } else logger$9.log(" Dokploy: Not logged in");
318
+ logger$9.log(`\n Credentials file: ${getCredentialsPath()}`);
319
+ }
320
+ /**
321
+ * Mask a token for display
322
+ */
323
+ function maskToken(token) {
324
+ if (token.length <= 8) return "****";
325
+ return `${token.slice(0, 4)}...${token.slice(-4)}`;
326
+ }
327
+
103
328
  //#endregion
104
329
  //#region src/build/providerResolver.ts
105
330
  /**
@@ -176,13 +401,13 @@ function isEnabled(config) {
176
401
  var CronGenerator = class extends require_openapi.ConstructGenerator {
177
402
  async build(context, constructs, outputDir, options) {
178
403
  const provider = options?.provider || "aws-lambda";
179
- const logger$3 = console;
404
+ const logger$10 = console;
180
405
  const cronInfos = [];
181
406
  if (constructs.length === 0 || provider !== "aws-lambda") return cronInfos;
182
407
  const cronsDir = (0, node_path.join)(outputDir, "crons");
183
408
  await (0, node_fs_promises.mkdir)(cronsDir, { recursive: true });
184
- for (const { key, construct, path: path$1 } of constructs) {
185
- const handlerFile = await this.generateCronHandler(cronsDir, path$1.relative, key, context);
409
+ for (const { key, construct, path } of constructs) {
410
+ const handlerFile = await this.generateCronHandler(cronsDir, path.relative, key, context);
186
411
  cronInfos.push({
187
412
  name: key,
188
413
  handler: (0, node_path.relative)(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
@@ -191,7 +416,7 @@ var CronGenerator = class extends require_openapi.ConstructGenerator {
191
416
  memorySize: construct.memorySize,
192
417
  environment: await construct.getEnvironment()
193
418
  });
194
- logger$3.log(`Generated cron handler: ${key}`);
419
+ logger$10.log(`Generated cron handler: ${key}`);
195
420
  }
196
421
  return cronInfos;
197
422
  }
@@ -227,13 +452,13 @@ var FunctionGenerator = class extends require_openapi.ConstructGenerator {
227
452
  }
228
453
  async build(context, constructs, outputDir, options) {
229
454
  const provider = options?.provider || "aws-lambda";
230
- const logger$3 = console;
455
+ const logger$10 = console;
231
456
  const functionInfos = [];
232
457
  if (constructs.length === 0 || provider !== "aws-lambda") return functionInfos;
233
458
  const functionsDir = (0, node_path.join)(outputDir, "functions");
234
459
  await (0, node_fs_promises.mkdir)(functionsDir, { recursive: true });
235
- for (const { key, construct, path: path$1 } of constructs) {
236
- const handlerFile = await this.generateFunctionHandler(functionsDir, path$1.relative, key, context);
460
+ for (const { key, construct, path } of constructs) {
461
+ const handlerFile = await this.generateFunctionHandler(functionsDir, path.relative, key, context);
237
462
  functionInfos.push({
238
463
  name: key,
239
464
  handler: (0, node_path.relative)(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
@@ -241,7 +466,7 @@ var FunctionGenerator = class extends require_openapi.ConstructGenerator {
241
466
  memorySize: construct.memorySize,
242
467
  environment: await construct.getEnvironment()
243
468
  });
244
- logger$3.log(`Generated function handler: ${key}`);
469
+ logger$10.log(`Generated function handler: ${key}`);
245
470
  }
246
471
  return functionInfos;
247
472
  }
@@ -274,19 +499,19 @@ var SubscriberGenerator = class extends require_openapi.ConstructGenerator {
274
499
  }
275
500
  async build(context, constructs, outputDir, options) {
276
501
  const provider = options?.provider || "aws-lambda";
277
- const logger$3 = console;
502
+ const logger$10 = console;
278
503
  const subscriberInfos = [];
279
504
  if (provider === "server") {
280
505
  await this.generateServerSubscribersFile(outputDir, constructs);
281
- logger$3.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
506
+ logger$10.log(`Generated server subscribers file with ${constructs.length} subscribers (polling mode)`);
282
507
  return subscriberInfos;
283
508
  }
284
509
  if (constructs.length === 0) return subscriberInfos;
285
510
  if (provider !== "aws-lambda") return subscriberInfos;
286
511
  const subscribersDir = (0, node_path.join)(outputDir, "subscribers");
287
512
  await (0, node_fs_promises.mkdir)(subscribersDir, { recursive: true });
288
- for (const { key, construct, path: path$1 } of constructs) {
289
- const handlerFile = await this.generateSubscriberHandler(subscribersDir, path$1.relative, key, construct, context);
513
+ for (const { key, construct, path } of constructs) {
514
+ const handlerFile = await this.generateSubscriberHandler(subscribersDir, path.relative, key, construct, context);
290
515
  subscriberInfos.push({
291
516
  name: key,
292
517
  handler: (0, node_path.relative)(process.cwd(), handlerFile).replace(/\.ts$/, ".handler"),
@@ -295,7 +520,7 @@ var SubscriberGenerator = class extends require_openapi.ConstructGenerator {
295
520
  memorySize: construct.memorySize,
296
521
  environment: await construct.getEnvironment()
297
522
  });
298
- logger$3.log(`Generated subscriber handler: ${key}`);
523
+ logger$10.log(`Generated subscriber handler: ${key}`);
299
524
  }
300
525
  return subscriberInfos;
301
526
  }
@@ -321,11 +546,11 @@ export const handler = adapter.handler;
321
546
  const subscribersFileName = "subscribers.ts";
322
547
  const subscribersPath = (0, node_path.join)(outputDir, subscribersFileName);
323
548
  const importsByFile = /* @__PURE__ */ new Map();
324
- for (const { path: path$1, key } of subscribers) {
325
- const relativePath = (0, node_path.relative)((0, node_path.dirname)(subscribersPath), path$1.relative);
549
+ for (const { path, key } of subscribers) {
550
+ const relativePath = (0, node_path.relative)((0, node_path.dirname)(subscribersPath), path.relative);
326
551
  const importPath = relativePath.replace(/\.ts$/, ".js");
327
552
  if (!importsByFile.has(importPath)) importsByFile.set(importPath, []);
328
- importsByFile.get(importPath).push(key);
553
+ importsByFile.get(importPath)?.push(key);
329
554
  }
330
555
  const imports = Array.from(importsByFile.entries()).map(([importPath, exports$2]) => `import { ${exports$2.join(", ")} } from '${importPath}';`).join("\n");
331
556
  const allExportNames = subscribers.map(({ key }) => key);
@@ -380,7 +605,7 @@ export async function setupSubscribers(
380
605
  return;
381
606
  }
382
607
 
383
- const serviceDiscovery = ServiceDiscovery.getInstance(logger, envParser);
608
+ const serviceDiscovery = ServiceDiscovery.getInstance(envParser);
384
609
 
385
610
  // Create connection once, outside the loop (more efficient)
386
611
  // EventConnectionFactory automatically determines the right connection type
@@ -461,7 +686,7 @@ export async function setupSubscribers(
461
686
 
462
687
  //#endregion
463
688
  //#region src/dev/index.ts
464
- const logger$2 = console;
689
+ const logger$8 = console;
465
690
  /**
466
691
  * Load environment files
467
692
  * @internal Exported for testing
@@ -512,7 +737,7 @@ async function findAvailablePort(preferredPort, maxAttempts = 10) {
512
737
  for (let i = 0; i < maxAttempts; i++) {
513
738
  const port = preferredPort + i;
514
739
  if (await isPortAvailable(port)) return port;
515
- logger$2.log(`⚠️ Port ${port} is in use, trying ${port + 1}...`);
740
+ logger$8.log(`⚠️ Port ${port} is in use, trying ${port + 1}...`);
516
741
  }
517
742
  throw new Error(`Could not find an available port after trying ${maxAttempts} ports starting from ${preferredPort}`);
518
743
  }
@@ -582,33 +807,61 @@ function normalizeHooksConfig(config) {
582
807
  const resolvedPath = (0, node_path.resolve)(process.cwd(), serverPath);
583
808
  return { serverHooksPath: resolvedPath };
584
809
  }
810
+ /**
811
+ * Normalize production configuration
812
+ * @internal Exported for testing
813
+ */
814
+ function normalizeProductionConfig(cliProduction, configProduction) {
815
+ if (!cliProduction) return void 0;
816
+ const config = configProduction ?? {};
817
+ return {
818
+ enabled: true,
819
+ bundle: config.bundle ?? true,
820
+ minify: config.minify ?? true,
821
+ healthCheck: config.healthCheck ?? "/health",
822
+ gracefulShutdown: config.gracefulShutdown ?? true,
823
+ external: config.external ?? [],
824
+ subscribers: config.subscribers ?? "exclude",
825
+ openapi: config.openapi ?? false,
826
+ optimizedHandlers: config.optimizedHandlers ?? true
827
+ };
828
+ }
829
+ /**
830
+ * Get production config from GkmConfig
831
+ * @internal
832
+ */
833
+ function getProductionConfigFromGkm(config) {
834
+ const serverConfig = config.providers?.server;
835
+ if (typeof serverConfig === "object") return serverConfig.production;
836
+ return void 0;
837
+ }
585
838
  async function devCommand(options) {
586
839
  const defaultEnv = loadEnvFiles(".env");
587
- if (defaultEnv.loaded.length > 0) logger$2.log(`📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
840
+ if (defaultEnv.loaded.length > 0) logger$8.log(`📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
588
841
  const config = await require_config.loadConfig();
589
842
  if (config.env) {
590
843
  const { loaded, missing } = loadEnvFiles(config.env);
591
- if (loaded.length > 0) logger$2.log(`📦 Loaded env: ${loaded.join(", ")}`);
592
- if (missing.length > 0) logger$2.warn(`⚠️ Missing env files: ${missing.join(", ")}`);
844
+ if (loaded.length > 0) logger$8.log(`📦 Loaded env: ${loaded.join(", ")}`);
845
+ if (missing.length > 0) logger$8.warn(`⚠️ Missing env files: ${missing.join(", ")}`);
593
846
  }
594
847
  const resolved = resolveProviders(config, { provider: "server" });
595
- logger$2.log("🚀 Starting development server...");
596
- logger$2.log(`Loading routes from: ${config.routes}`);
597
- if (config.functions) logger$2.log(`Loading functions from: ${config.functions}`);
598
- if (config.crons) logger$2.log(`Loading crons from: ${config.crons}`);
599
- if (config.subscribers) logger$2.log(`Loading subscribers from: ${config.subscribers}`);
600
- logger$2.log(`Using envParser: ${config.envParser}`);
848
+ logger$8.log("🚀 Starting development server...");
849
+ logger$8.log(`Loading routes from: ${config.routes}`);
850
+ if (config.functions) logger$8.log(`Loading functions from: ${config.functions}`);
851
+ if (config.crons) logger$8.log(`Loading crons from: ${config.crons}`);
852
+ if (config.subscribers) logger$8.log(`Loading subscribers from: ${config.subscribers}`);
853
+ logger$8.log(`Using envParser: ${config.envParser}`);
601
854
  const { path: envParserPath, importPattern: envParserImportPattern } = require_config.parseModuleConfig(config.envParser, "envParser");
602
855
  const { path: loggerPath, importPattern: loggerImportPattern } = require_config.parseModuleConfig(config.logger, "logger");
603
856
  const telescope = normalizeTelescopeConfig(config.telescope);
604
- if (telescope) logger$2.log(`🔭 Telescope enabled at ${telescope.path}`);
857
+ if (telescope) logger$8.log(`🔭 Telescope enabled at ${telescope.path}`);
605
858
  const studio = normalizeStudioConfig(config.studio);
606
- if (studio) logger$2.log(`🗄️ Studio enabled at ${studio.path}`);
859
+ if (studio) logger$8.log(`🗄️ Studio enabled at ${studio.path}`);
607
860
  const hooks = normalizeHooksConfig(config.hooks);
608
- if (hooks) logger$2.log(`🪝 Server hooks enabled from ${config.hooks?.server}`);
861
+ if (hooks) logger$8.log(`🪝 Server hooks enabled from ${config.hooks?.server}`);
609
862
  const openApiConfig = require_openapi.resolveOpenApiConfig(config);
610
863
  const enableOpenApi = openApiConfig.enabled || resolved.enableOpenApi;
611
- if (enableOpenApi) logger$2.log(`📄 OpenAPI output: ${require_openapi.OPENAPI_OUTPUT_PATH}`);
864
+ if (enableOpenApi) logger$8.log(`📄 OpenAPI output: ${require_openapi.OPENAPI_OUTPUT_PATH}`);
612
865
  const buildContext = {
613
866
  envParserPath,
614
867
  envParserImportPattern,
@@ -637,7 +890,7 @@ async function devCommand(options) {
637
890
  ...hooksFile ? [hooksFile.endsWith(".ts") ? hooksFile : `${hooksFile}.ts`] : []
638
891
  ].flat().filter((p) => typeof p === "string");
639
892
  const normalizedPatterns = watchPatterns.map((p) => p.startsWith("./") ? p.slice(2) : p);
640
- logger$2.log(`👀 Watching for changes in: ${normalizedPatterns.join(", ")}`);
893
+ logger$8.log(`👀 Watching for changes in: ${normalizedPatterns.join(", ")}`);
641
894
  const resolvedFiles = await (0, fast_glob.default)(normalizedPatterns, {
642
895
  cwd: process.cwd(),
643
896
  absolute: false,
@@ -647,32 +900,32 @@ async function devCommand(options) {
647
900
  const parts = f.split("/");
648
901
  return parts.slice(0, -1).join("/");
649
902
  }))];
650
- logger$2.log(`📁 Found ${resolvedFiles.length} files in ${dirsToWatch.length} directories`);
903
+ logger$8.log(`📁 Found ${resolvedFiles.length} files in ${dirsToWatch.length} directories`);
651
904
  const watcher = chokidar.default.watch([...resolvedFiles, ...dirsToWatch], {
652
- ignored: /(^|[\/\\])\../,
905
+ ignored: /(^|[/\\])\../,
653
906
  persistent: true,
654
907
  ignoreInitial: true,
655
908
  cwd: process.cwd()
656
909
  });
657
910
  watcher.on("ready", () => {
658
- logger$2.log("🔍 File watcher ready");
911
+ logger$8.log("🔍 File watcher ready");
659
912
  });
660
913
  watcher.on("error", (error) => {
661
- logger$2.error("❌ Watcher error:", error);
914
+ logger$8.error("❌ Watcher error:", error);
662
915
  });
663
916
  let rebuildTimeout = null;
664
- watcher.on("change", async (path$1) => {
665
- logger$2.log(`📝 File changed: ${path$1}`);
917
+ watcher.on("change", async (path) => {
918
+ logger$8.log(`📝 File changed: ${path}`);
666
919
  if (rebuildTimeout) clearTimeout(rebuildTimeout);
667
920
  rebuildTimeout = setTimeout(async () => {
668
921
  try {
669
- logger$2.log("🔄 Rebuilding...");
922
+ logger$8.log("🔄 Rebuilding...");
670
923
  await buildServer(config, buildContext, resolved.providers[0], enableOpenApi);
671
924
  if (enableOpenApi) await require_openapi.generateOpenApi(config, { silent: true });
672
- logger$2.log("✅ Rebuild complete, restarting server...");
925
+ logger$8.log("✅ Rebuild complete, restarting server...");
673
926
  await devServer.restart();
674
927
  } catch (error) {
675
- logger$2.error("❌ Rebuild failed:", error.message);
928
+ logger$8.error("❌ Rebuild failed:", error.message);
676
929
  }
677
930
  }, 300);
678
931
  });
@@ -680,9 +933,9 @@ async function devCommand(options) {
680
933
  const shutdown = () => {
681
934
  if (isShuttingDown) return;
682
935
  isShuttingDown = true;
683
- logger$2.log("\n🛑 Shutting down...");
936
+ logger$8.log("\n🛑 Shutting down...");
684
937
  Promise.all([watcher.close(), devServer.stop()]).catch((err) => {
685
- logger$2.error("Error during shutdown:", err);
938
+ logger$8.error("Error during shutdown:", err);
686
939
  }).finally(() => {
687
940
  process.exit(0);
688
941
  });
@@ -735,11 +988,11 @@ var DevServer = class {
735
988
  this.actualPort = this.requestedPort;
736
989
  } else {
737
990
  this.actualPort = await findAvailablePort(this.requestedPort);
738
- if (this.actualPort !== this.requestedPort) logger$2.log(`ℹ️ Port ${this.requestedPort} was in use, using port ${this.actualPort} instead`);
991
+ if (this.actualPort !== this.requestedPort) logger$8.log(`ℹ️ Port ${this.requestedPort} was in use, using port ${this.actualPort} instead`);
739
992
  }
740
993
  const serverEntryPath = (0, node_path.join)(process.cwd(), ".gkm", this.provider, "server.ts");
741
994
  await this.createServerEntry();
742
- logger$2.log(`\n✨ Starting server on port ${this.actualPort}...`);
995
+ logger$8.log(`\n✨ Starting server on port ${this.actualPort}...`);
743
996
  this.serverProcess = (0, node_child_process.spawn)("npx", [
744
997
  "tsx",
745
998
  serverEntryPath,
@@ -755,18 +1008,18 @@ var DevServer = class {
755
1008
  });
756
1009
  this.isRunning = true;
757
1010
  this.serverProcess.on("error", (error) => {
758
- logger$2.error("❌ Server error:", error);
1011
+ logger$8.error("❌ Server error:", error);
759
1012
  });
760
1013
  this.serverProcess.on("exit", (code, signal) => {
761
- if (code !== null && code !== 0 && signal !== "SIGTERM") logger$2.error(`❌ Server exited with code ${code}`);
1014
+ if (code !== null && code !== 0 && signal !== "SIGTERM") logger$8.error(`❌ Server exited with code ${code}`);
762
1015
  this.isRunning = false;
763
1016
  });
764
1017
  await new Promise((resolve$1) => setTimeout(resolve$1, 1e3));
765
1018
  if (this.isRunning) {
766
- logger$2.log(`\n🎉 Server running at http://localhost:${this.actualPort}`);
767
- if (this.enableOpenApi) logger$2.log(`📚 API Docs available at http://localhost:${this.actualPort}/__docs`);
768
- if (this.telescope) logger$2.log(`🔭 Telescope available at http://localhost:${this.actualPort}${this.telescope.path}`);
769
- if (this.studio) logger$2.log(`🗄️ Studio available at http://localhost:${this.actualPort}${this.studio.path}`);
1019
+ logger$8.log(`\n🎉 Server running at http://localhost:${this.actualPort}`);
1020
+ if (this.enableOpenApi) logger$8.log(`📚 API Docs available at http://localhost:${this.actualPort}/__docs`);
1021
+ if (this.telescope) logger$8.log(`🔭 Telescope available at http://localhost:${this.actualPort}${this.telescope.path}`);
1022
+ if (this.studio) logger$8.log(`🗄️ Studio available at http://localhost:${this.actualPort}${this.studio.path}`);
770
1023
  }
771
1024
  }
772
1025
  async stop() {
@@ -803,10 +1056,10 @@ var DevServer = class {
803
1056
  await this.start();
804
1057
  }
805
1058
  async createServerEntry() {
806
- const { writeFile: writeFile$5 } = await import("node:fs/promises");
807
- const { relative: relative$5, dirname: dirname$4 } = await import("node:path");
1059
+ const { writeFile: writeFile$8 } = await import("node:fs/promises");
1060
+ const { relative: relative$5, dirname: dirname$5 } = await import("node:path");
808
1061
  const serverPath = (0, node_path.join)(process.cwd(), ".gkm", this.provider, "server.ts");
809
- const relativeAppPath = relative$5(dirname$4(serverPath), (0, node_path.join)(dirname$4(serverPath), "app.js"));
1062
+ const relativeAppPath = relative$5(dirname$5(serverPath), (0, node_path.join)(dirname$5(serverPath), "app.js"));
810
1063
  const serveCode = this.runtime === "bun" ? `Bun.serve({
811
1064
  port,
812
1065
  fetch: app.fetch,
@@ -826,7 +1079,7 @@ var DevServer = class {
826
1079
  * Development server entry point
827
1080
  * This file is auto-generated by 'gkm dev'
828
1081
  */
829
- import { createApp } from './${relativeAppPath.startsWith(".") ? relativeAppPath : "./" + relativeAppPath}';
1082
+ import { createApp } from './${relativeAppPath.startsWith(".") ? relativeAppPath : `./${relativeAppPath}`}';
830
1083
 
831
1084
  const port = process.argv.includes('--port')
832
1085
  ? Number.parseInt(process.argv[process.argv.indexOf('--port') + 1])
@@ -846,15 +1099,15 @@ start({
846
1099
  process.exit(1);
847
1100
  });
848
1101
  `;
849
- await writeFile$5(serverPath, content);
1102
+ await writeFile$8(serverPath, content);
850
1103
  }
851
1104
  };
852
1105
 
853
1106
  //#endregion
854
1107
  //#region src/build/manifests.ts
855
- const logger$1 = console;
1108
+ const logger$7 = console;
856
1109
  async function generateAwsManifest(outputDir, routes, functions, crons, subscribers) {
857
- const manifestDir = (0, path.join)(outputDir, "manifest");
1110
+ const manifestDir = (0, node_path.join)(outputDir, "manifest");
858
1111
  await (0, node_fs_promises.mkdir)(manifestDir, { recursive: true });
859
1112
  const awsRoutes = routes.filter((r) => r.method !== "ALL");
860
1113
  const content = `export const manifest = {
@@ -875,13 +1128,13 @@ export type Authorizer = Route['authorizer'];
875
1128
  export type HttpMethod = Route['method'];
876
1129
  export type RoutePath = Route['path'];
877
1130
  `;
878
- const manifestPath = (0, path.join)(manifestDir, "aws.ts");
1131
+ const manifestPath = (0, node_path.join)(manifestDir, "aws.ts");
879
1132
  await (0, node_fs_promises.writeFile)(manifestPath, content);
880
- logger$1.log(`Generated AWS manifest with ${awsRoutes.length} routes, ${functions.length} functions, ${crons.length} crons, ${subscribers.length} subscribers`);
881
- logger$1.log(`Manifest: ${(0, path.relative)(process.cwd(), manifestPath)}`);
1133
+ logger$7.log(`Generated AWS manifest with ${awsRoutes.length} routes, ${functions.length} functions, ${crons.length} crons, ${subscribers.length} subscribers`);
1134
+ logger$7.log(`Manifest: ${(0, node_path.relative)(process.cwd(), manifestPath)}`);
882
1135
  }
883
1136
  async function generateServerManifest(outputDir, appInfo, routes, subscribers) {
884
- const manifestDir = (0, path.join)(outputDir, "manifest");
1137
+ const manifestDir = (0, node_path.join)(outputDir, "manifest");
885
1138
  await (0, node_fs_promises.mkdir)(manifestDir, { recursive: true });
886
1139
  const serverRoutes = routes.filter((r) => r.method !== "ALL").map((r) => ({
887
1140
  path: r.path,
@@ -907,37 +1160,44 @@ export type Authorizer = Route['authorizer'];
907
1160
  export type HttpMethod = Route['method'];
908
1161
  export type RoutePath = Route['path'];
909
1162
  `;
910
- const manifestPath = (0, path.join)(manifestDir, "server.ts");
1163
+ const manifestPath = (0, node_path.join)(manifestDir, "server.ts");
911
1164
  await (0, node_fs_promises.writeFile)(manifestPath, content);
912
- logger$1.log(`Generated server manifest with ${serverRoutes.length} routes, ${serverSubscribers.length} subscribers`);
913
- logger$1.log(`Manifest: ${(0, path.relative)(process.cwd(), manifestPath)}`);
1165
+ logger$7.log(`Generated server manifest with ${serverRoutes.length} routes, ${serverSubscribers.length} subscribers`);
1166
+ logger$7.log(`Manifest: ${(0, node_path.relative)(process.cwd(), manifestPath)}`);
914
1167
  }
915
1168
 
916
1169
  //#endregion
917
1170
  //#region src/build/index.ts
918
- const logger = console;
1171
+ const logger$6 = console;
919
1172
  async function buildCommand(options) {
920
1173
  const config = await require_config.loadConfig();
921
1174
  const resolved = resolveProviders(config, options);
922
- logger.log(`Building with providers: ${resolved.providers.join(", ")}`);
923
- logger.log(`Loading routes from: ${config.routes}`);
924
- if (config.functions) logger.log(`Loading functions from: ${config.functions}`);
925
- if (config.crons) logger.log(`Loading crons from: ${config.crons}`);
926
- if (config.subscribers) logger.log(`Loading subscribers from: ${config.subscribers}`);
927
- logger.log(`Using envParser: ${config.envParser}`);
1175
+ const productionConfigFromGkm = getProductionConfigFromGkm(config);
1176
+ const production = normalizeProductionConfig(options.production ?? false, productionConfigFromGkm);
1177
+ if (production) logger$6.log(`🏭 Building for PRODUCTION`);
1178
+ logger$6.log(`Building with providers: ${resolved.providers.join(", ")}`);
1179
+ logger$6.log(`Loading routes from: ${config.routes}`);
1180
+ if (config.functions) logger$6.log(`Loading functions from: ${config.functions}`);
1181
+ if (config.crons) logger$6.log(`Loading crons from: ${config.crons}`);
1182
+ if (config.subscribers) logger$6.log(`Loading subscribers from: ${config.subscribers}`);
1183
+ logger$6.log(`Using envParser: ${config.envParser}`);
928
1184
  const { path: envParserPath, importPattern: envParserImportPattern } = require_config.parseModuleConfig(config.envParser, "envParser");
929
1185
  const { path: loggerPath, importPattern: loggerImportPattern } = require_config.parseModuleConfig(config.logger, "logger");
930
- const telescope = normalizeTelescopeConfig(config.telescope);
931
- if (telescope) logger.log(`🔭 Telescope enabled at ${telescope.path}`);
1186
+ const telescope = production ? void 0 : normalizeTelescopeConfig(config.telescope);
1187
+ if (telescope) logger$6.log(`🔭 Telescope enabled at ${telescope.path}`);
1188
+ const studio = production ? void 0 : normalizeStudioConfig(config.studio);
1189
+ if (studio) logger$6.log(`🗄️ Studio enabled at ${studio.path}`);
932
1190
  const hooks = normalizeHooksConfig(config.hooks);
933
- if (hooks) logger.log(`🪝 Server hooks enabled`);
1191
+ if (hooks) logger$6.log(`🪝 Server hooks enabled`);
934
1192
  const buildContext = {
935
1193
  envParserPath,
936
1194
  envParserImportPattern,
937
1195
  loggerPath,
938
1196
  loggerImportPattern,
939
1197
  telescope,
940
- hooks
1198
+ studio,
1199
+ hooks,
1200
+ production
941
1201
  };
942
1202
  const endpointGenerator = new require_openapi.EndpointGenerator();
943
1203
  const functionGenerator = new FunctionGenerator();
@@ -949,45 +1209,1250 @@ async function buildCommand(options) {
949
1209
  config.crons ? cronGenerator.load(config.crons) : [],
950
1210
  config.subscribers ? subscriberGenerator.load(config.subscribers) : []
951
1211
  ]);
952
- logger.log(`Found ${allEndpoints.length} endpoints`);
953
- logger.log(`Found ${allFunctions.length} functions`);
954
- logger.log(`Found ${allCrons.length} crons`);
955
- logger.log(`Found ${allSubscribers.length} subscribers`);
1212
+ logger$6.log(`Found ${allEndpoints.length} endpoints`);
1213
+ logger$6.log(`Found ${allFunctions.length} functions`);
1214
+ logger$6.log(`Found ${allCrons.length} crons`);
1215
+ logger$6.log(`Found ${allSubscribers.length} subscribers`);
956
1216
  if (allEndpoints.length === 0 && allFunctions.length === 0 && allCrons.length === 0 && allSubscribers.length === 0) {
957
- logger.log("No endpoints, functions, crons, or subscribers found to process");
1217
+ logger$6.log("No endpoints, functions, crons, or subscribers found to process");
1218
+ return {};
1219
+ }
1220
+ const rootOutputDir = (0, node_path.join)(process.cwd(), ".gkm");
1221
+ await (0, node_fs_promises.mkdir)(rootOutputDir, { recursive: true });
1222
+ let result = {};
1223
+ for (const provider of resolved.providers) {
1224
+ const providerResult = await buildForProvider(provider, buildContext, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, allEndpoints, allFunctions, allCrons, allSubscribers, resolved.enableOpenApi, options.skipBundle ?? false, options.stage);
1225
+ if (providerResult.masterKey) result = providerResult;
1226
+ }
1227
+ return result;
1228
+ }
1229
+ async function buildForProvider(provider, context, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, endpoints, functions, crons, subscribers, enableOpenApi, skipBundle, stage) {
1230
+ const outputDir = (0, node_path.join)(process.cwd(), ".gkm", provider);
1231
+ await (0, node_fs_promises.mkdir)(outputDir, { recursive: true });
1232
+ logger$6.log(`\nGenerating handlers for provider: ${provider}`);
1233
+ const [routes, functionInfos, cronInfos, subscriberInfos] = await Promise.all([
1234
+ endpointGenerator.build(context, endpoints, outputDir, {
1235
+ provider,
1236
+ enableOpenApi
1237
+ }),
1238
+ functionGenerator.build(context, functions, outputDir, { provider }),
1239
+ cronGenerator.build(context, crons, outputDir, { provider }),
1240
+ subscriberGenerator.build(context, subscribers, outputDir, { provider })
1241
+ ]);
1242
+ logger$6.log(`Generated ${routes.length} routes, ${functionInfos.length} functions, ${cronInfos.length} crons, ${subscriberInfos.length} subscribers for ${provider}`);
1243
+ if (provider === "server") {
1244
+ const routeMetadata = await Promise.all(endpoints.map(async ({ construct }) => ({
1245
+ path: construct._path,
1246
+ method: construct.method,
1247
+ handler: "",
1248
+ authorizer: construct.authorizer?.name ?? "none"
1249
+ })));
1250
+ const appInfo = {
1251
+ handler: (0, node_path.relative)(process.cwd(), (0, node_path.join)(outputDir, "app.ts")),
1252
+ endpoints: (0, node_path.relative)(process.cwd(), (0, node_path.join)(outputDir, "endpoints.ts"))
1253
+ };
1254
+ await generateServerManifest(rootOutputDir, appInfo, routeMetadata, subscriberInfos);
1255
+ let masterKey;
1256
+ if (context.production?.bundle && !skipBundle) {
1257
+ logger$6.log(`\n📦 Bundling production server...`);
1258
+ const { bundleServer } = await Promise.resolve().then(() => require("./bundler-B1qy9b-j.cjs"));
1259
+ const allConstructs = [
1260
+ ...endpoints.map((e) => e.construct),
1261
+ ...functions.map((f) => f.construct),
1262
+ ...crons.map((c) => c.construct),
1263
+ ...subscribers.map((s) => s.construct)
1264
+ ];
1265
+ const bundleResult = await bundleServer({
1266
+ entryPoint: (0, node_path.join)(outputDir, "server.ts"),
1267
+ outputDir: (0, node_path.join)(outputDir, "dist"),
1268
+ minify: context.production.minify,
1269
+ sourcemap: false,
1270
+ external: context.production.external,
1271
+ stage,
1272
+ constructs: allConstructs
1273
+ });
1274
+ masterKey = bundleResult.masterKey;
1275
+ logger$6.log(`✅ Bundle complete: .gkm/server/dist/server.mjs`);
1276
+ if (masterKey) {
1277
+ logger$6.log(`\n🔐 Secrets encrypted for deployment`);
1278
+ logger$6.log(` Deploy with: GKM_MASTER_KEY=${masterKey}`);
1279
+ }
1280
+ }
1281
+ return { masterKey };
1282
+ } else await generateAwsManifest(rootOutputDir, routes, functionInfos, cronInfos, subscriberInfos);
1283
+ return {};
1284
+ }
1285
+
1286
+ //#endregion
1287
+ //#region src/deploy/docker.ts
1288
+ const logger$5 = console;
1289
+ /**
1290
+ * Get the full image reference
1291
+ */
1292
+ function getImageRef(registry, imageName, tag) {
1293
+ if (registry) return `${registry}/${imageName}:${tag}`;
1294
+ return `${imageName}:${tag}`;
1295
+ }
1296
+ /**
1297
+ * Build Docker image
1298
+ */
1299
+ async function buildImage(imageRef) {
1300
+ logger$5.log(`\n🔨 Building Docker image: ${imageRef}`);
1301
+ try {
1302
+ (0, node_child_process.execSync)(`DOCKER_BUILDKIT=1 docker build -f .gkm/docker/Dockerfile -t ${imageRef} .`, {
1303
+ cwd: process.cwd(),
1304
+ stdio: "inherit",
1305
+ env: {
1306
+ ...process.env,
1307
+ DOCKER_BUILDKIT: "1"
1308
+ }
1309
+ });
1310
+ logger$5.log(`✅ Image built: ${imageRef}`);
1311
+ } catch (error) {
1312
+ throw new Error(`Failed to build Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
1313
+ }
1314
+ }
1315
+ /**
1316
+ * Push Docker image to registry
1317
+ */
1318
+ async function pushImage(imageRef) {
1319
+ logger$5.log(`\n☁️ Pushing image: ${imageRef}`);
1320
+ try {
1321
+ (0, node_child_process.execSync)(`docker push ${imageRef}`, {
1322
+ cwd: process.cwd(),
1323
+ stdio: "inherit"
1324
+ });
1325
+ logger$5.log(`✅ Image pushed: ${imageRef}`);
1326
+ } catch (error) {
1327
+ throw new Error(`Failed to push Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
1328
+ }
1329
+ }
1330
+ /**
1331
+ * Deploy using Docker (build and optionally push image)
1332
+ */
1333
+ async function deployDocker(options) {
1334
+ const { stage, tag, skipPush, masterKey, config } = options;
1335
+ const imageName = config.imageName ?? "app";
1336
+ const imageRef = getImageRef(config.registry, imageName, tag);
1337
+ await buildImage(imageRef);
1338
+ if (!skipPush) if (!config.registry) logger$5.warn("\n⚠️ No registry configured. Use --skip-push or configure docker.registry in gkm.config.ts");
1339
+ else await pushImage(imageRef);
1340
+ logger$5.log("\n✅ Docker deployment ready!");
1341
+ logger$5.log(`\n📋 Deployment details:`);
1342
+ logger$5.log(` Image: ${imageRef}`);
1343
+ logger$5.log(` Stage: ${stage}`);
1344
+ if (masterKey) {
1345
+ logger$5.log(`\n🔐 Deploy with this environment variable:`);
1346
+ logger$5.log(` GKM_MASTER_KEY=${masterKey}`);
1347
+ logger$5.log("\n Example docker run:");
1348
+ logger$5.log(` docker run -e GKM_MASTER_KEY=${masterKey} ${imageRef}`);
1349
+ }
1350
+ return {
1351
+ imageRef,
1352
+ masterKey
1353
+ };
1354
+ }
1355
+ /**
1356
+ * Resolve Docker deploy config from gkm config
1357
+ */
1358
+ function resolveDockerConfig$1(config) {
1359
+ return {
1360
+ registry: config.docker?.registry,
1361
+ imageName: config.docker?.imageName
1362
+ };
1363
+ }
1364
+
1365
+ //#endregion
1366
+ //#region src/deploy/dokploy.ts
1367
+ const logger$4 = console;
1368
+ /**
1369
+ * Get the Dokploy API token from stored credentials or environment
1370
+ */
1371
+ async function getApiToken$1() {
1372
+ const token = await getDokployToken();
1373
+ if (!token) throw new Error("Dokploy credentials not found.\nRun \"gkm login --service dokploy\" to authenticate, or set DOKPLOY_API_TOKEN.");
1374
+ return token;
1375
+ }
1376
+ /**
1377
+ * Make a request to the Dokploy API
1378
+ */
1379
+ async function dokployRequest$1(endpoint, baseUrl, token, body) {
1380
+ const url = `${baseUrl}/api/${endpoint}`;
1381
+ const response = await fetch(url, {
1382
+ method: "POST",
1383
+ headers: {
1384
+ "Content-Type": "application/json",
1385
+ Authorization: `Bearer ${token}`
1386
+ },
1387
+ body: JSON.stringify(body)
1388
+ });
1389
+ if (!response.ok) {
1390
+ let errorMessage = `Dokploy API error: ${response.status} ${response.statusText}`;
1391
+ try {
1392
+ const errorBody = await response.json();
1393
+ if (errorBody.message) errorMessage = `Dokploy API error: ${errorBody.message}`;
1394
+ if (errorBody.issues?.length) errorMessage += `\n Issues: ${errorBody.issues.map((i) => i.message).join(", ")}`;
1395
+ } catch {}
1396
+ throw new Error(errorMessage);
1397
+ }
1398
+ return response.json();
1399
+ }
1400
+ /**
1401
+ * Update application environment variables
1402
+ */
1403
+ async function updateEnvironment(baseUrl, token, applicationId, envVars) {
1404
+ logger$4.log(" Updating environment variables...");
1405
+ const envString = Object.entries(envVars).map(([key, value]) => `${key}=${value}`).join("\n");
1406
+ await dokployRequest$1("application.update", baseUrl, token, {
1407
+ applicationId,
1408
+ env: envString
1409
+ });
1410
+ logger$4.log(" ✓ Environment variables updated");
1411
+ }
1412
+ /**
1413
+ * Trigger application deployment
1414
+ */
1415
+ async function triggerDeploy(baseUrl, token, applicationId) {
1416
+ logger$4.log(" Triggering deployment...");
1417
+ await dokployRequest$1("application.deploy", baseUrl, token, { applicationId });
1418
+ logger$4.log(" ✓ Deployment triggered");
1419
+ }
1420
+ /**
1421
+ * Deploy to Dokploy
1422
+ */
1423
+ async function deployDokploy(options) {
1424
+ const { stage, imageRef, masterKey, config } = options;
1425
+ logger$4.log(`\n🎯 Deploying to Dokploy...`);
1426
+ logger$4.log(` Endpoint: ${config.endpoint}`);
1427
+ logger$4.log(` Application: ${config.applicationId}`);
1428
+ const token = await getApiToken$1();
1429
+ const envVars = {};
1430
+ if (masterKey) envVars.GKM_MASTER_KEY = masterKey;
1431
+ if (Object.keys(envVars).length > 0) await updateEnvironment(config.endpoint, token, config.applicationId, envVars);
1432
+ await triggerDeploy(config.endpoint, token, config.applicationId);
1433
+ logger$4.log("\n✅ Dokploy deployment initiated!");
1434
+ logger$4.log(`\n📋 Deployment details:`);
1435
+ logger$4.log(` Image: ${imageRef}`);
1436
+ logger$4.log(` Stage: ${stage}`);
1437
+ logger$4.log(` Application ID: ${config.applicationId}`);
1438
+ if (masterKey) logger$4.log(`\n🔐 GKM_MASTER_KEY has been set in Dokploy environment`);
1439
+ const deploymentUrl = `${config.endpoint}/project/${config.projectId}`;
1440
+ logger$4.log(`\n🔗 View deployment: ${deploymentUrl}`);
1441
+ return {
1442
+ imageRef,
1443
+ masterKey,
1444
+ url: deploymentUrl
1445
+ };
1446
+ }
1447
+ /**
1448
+ * Validate Dokploy configuration
1449
+ */
1450
+ function validateDokployConfig(config) {
1451
+ if (!config) return false;
1452
+ const required = [
1453
+ "endpoint",
1454
+ "projectId",
1455
+ "applicationId"
1456
+ ];
1457
+ const missing = required.filter((key) => !config[key]);
1458
+ if (missing.length > 0) throw new Error(`Missing Dokploy configuration: ${missing.join(", ")}\nConfigure in gkm.config.ts:
1459
+ providers: {
1460
+ dokploy: {
1461
+ endpoint: 'https://dokploy.example.com',
1462
+ projectId: 'proj_xxx',
1463
+ applicationId: 'app_xxx',
1464
+ },
1465
+ }`);
1466
+ return true;
1467
+ }
1468
+
1469
+ //#endregion
1470
+ //#region src/deploy/index.ts
1471
+ const logger$3 = console;
1472
+ /**
1473
+ * Generate image tag from stage and timestamp
1474
+ */
1475
+ function generateTag(stage) {
1476
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").slice(0, 19);
1477
+ return `${stage}-${timestamp}`;
1478
+ }
1479
+ /**
1480
+ * Main deploy command
1481
+ */
1482
+ async function deployCommand(options) {
1483
+ const { provider, stage, tag, skipPush, skipBuild } = options;
1484
+ logger$3.log(`\n🚀 Deploying to ${provider}...`);
1485
+ logger$3.log(` Stage: ${stage}`);
1486
+ const config = await require_config.loadConfig();
1487
+ const imageTag = tag ?? generateTag(stage);
1488
+ logger$3.log(` Tag: ${imageTag}`);
1489
+ let masterKey;
1490
+ if (!skipBuild) {
1491
+ logger$3.log(`\n📦 Building for production...`);
1492
+ const buildResult = await buildCommand({
1493
+ provider: "server",
1494
+ production: true,
1495
+ stage
1496
+ });
1497
+ masterKey = buildResult.masterKey;
1498
+ } else logger$3.log(`\n⏭️ Skipping build (--skip-build)`);
1499
+ const dockerConfig = resolveDockerConfig$1(config);
1500
+ const imageName = dockerConfig.imageName ?? "app";
1501
+ const registry = dockerConfig.registry;
1502
+ const imageRef = registry ? `${registry}/${imageName}:${imageTag}` : `${imageName}:${imageTag}`;
1503
+ let result;
1504
+ switch (provider) {
1505
+ case "docker": {
1506
+ result = await deployDocker({
1507
+ stage,
1508
+ tag: imageTag,
1509
+ skipPush,
1510
+ masterKey,
1511
+ config: dockerConfig
1512
+ });
1513
+ break;
1514
+ }
1515
+ case "dokploy": {
1516
+ const dokployConfigRaw = config.providers?.dokploy;
1517
+ if (typeof dokployConfigRaw === "boolean" || !dokployConfigRaw) throw new Error("Dokploy provider requires configuration.\nConfigure in gkm.config.ts:\n providers: {\n dokploy: {\n endpoint: 'https://dokploy.example.com',\n projectId: 'proj_xxx',\n applicationId: 'app_xxx',\n },\n }");
1518
+ validateDokployConfig(dokployConfigRaw);
1519
+ const dokployConfig = dokployConfigRaw;
1520
+ await deployDocker({
1521
+ stage,
1522
+ tag: imageTag,
1523
+ skipPush: false,
1524
+ masterKey,
1525
+ config: {
1526
+ registry: dokployConfig.registry ?? dockerConfig.registry,
1527
+ imageName: dockerConfig.imageName
1528
+ }
1529
+ });
1530
+ result = await deployDokploy({
1531
+ stage,
1532
+ tag: imageTag,
1533
+ imageRef,
1534
+ masterKey,
1535
+ config: dokployConfig
1536
+ });
1537
+ break;
1538
+ }
1539
+ case "aws-lambda": {
1540
+ logger$3.log("\n⚠️ AWS Lambda deployment is not yet implemented.");
1541
+ logger$3.log(" Use SST or AWS CDK for Lambda deployments.");
1542
+ result = {
1543
+ imageRef,
1544
+ masterKey
1545
+ };
1546
+ break;
1547
+ }
1548
+ default: throw new Error(`Unknown deploy provider: ${provider}\nSupported providers: docker, dokploy, aws-lambda`);
1549
+ }
1550
+ logger$3.log("\n✅ Deployment complete!");
1551
+ return result;
1552
+ }
1553
+
1554
+ //#endregion
1555
+ //#region src/deploy/init.ts
1556
+ const logger$2 = console;
1557
+ /**
1558
+ * Get the Dokploy API token from stored credentials or environment
1559
+ */
1560
+ async function getApiToken() {
1561
+ const token = await getDokployToken();
1562
+ if (!token) throw new Error("Dokploy credentials not found.\nRun \"gkm login --service dokploy\" to authenticate, or set DOKPLOY_API_TOKEN.");
1563
+ return token;
1564
+ }
1565
+ /**
1566
+ * Make a request to the Dokploy API
1567
+ */
1568
+ async function dokployRequest(method, endpoint, baseUrl, token, body) {
1569
+ const url = `${baseUrl}/api/${endpoint}`;
1570
+ const response = await fetch(url, {
1571
+ method,
1572
+ headers: {
1573
+ "Content-Type": "application/json",
1574
+ Authorization: `Bearer ${token}`
1575
+ },
1576
+ body: body ? JSON.stringify(body) : void 0
1577
+ });
1578
+ if (!response.ok) {
1579
+ let errorMessage = `Dokploy API error: ${response.status} ${response.statusText}`;
1580
+ try {
1581
+ const errorBody = await response.json();
1582
+ if (errorBody.message) errorMessage = `Dokploy API error: ${errorBody.message}`;
1583
+ } catch {}
1584
+ throw new Error(errorMessage);
1585
+ }
1586
+ const text = await response.text();
1587
+ if (!text) return {};
1588
+ return JSON.parse(text);
1589
+ }
1590
+ /**
1591
+ * Get all projects from Dokploy
1592
+ */
1593
+ async function getProjects(baseUrl, token) {
1594
+ return dokployRequest("GET", "project.all", baseUrl, token);
1595
+ }
1596
+ /**
1597
+ * Create a new project in Dokploy
1598
+ */
1599
+ async function createProject(baseUrl, token, name$1, description$1) {
1600
+ return dokployRequest("POST", "project.create", baseUrl, token, {
1601
+ name: name$1,
1602
+ description: description$1 || `Created by gkm CLI`
1603
+ });
1604
+ }
1605
+ /**
1606
+ * Get project by ID to get environment info
1607
+ */
1608
+ async function getProject(baseUrl, token, projectId) {
1609
+ return dokployRequest("POST", "project.one", baseUrl, token, { projectId });
1610
+ }
1611
+ /**
1612
+ * Create a new application in Dokploy
1613
+ */
1614
+ async function createApplication(baseUrl, token, name$1, projectId) {
1615
+ const project = await getProject(baseUrl, token, projectId);
1616
+ let environmentId;
1617
+ const firstEnv = project.environments?.[0];
1618
+ if (firstEnv) environmentId = firstEnv.environmentId;
1619
+ else {
1620
+ const env = await dokployRequest("POST", "environment.create", baseUrl, token, {
1621
+ projectId,
1622
+ name: "production",
1623
+ description: "Production environment"
1624
+ });
1625
+ environmentId = env.environmentId;
1626
+ }
1627
+ return dokployRequest("POST", "application.create", baseUrl, token, {
1628
+ name: name$1,
1629
+ projectId,
1630
+ environmentId
1631
+ });
1632
+ }
1633
+ /**
1634
+ * Configure application for Docker registry deployment
1635
+ */
1636
+ async function configureApplicationRegistry(baseUrl, token, applicationId, registryId) {
1637
+ await dokployRequest("POST", "application.update", baseUrl, token, {
1638
+ applicationId,
1639
+ registryId
1640
+ });
1641
+ }
1642
+ /**
1643
+ * Get available registries
1644
+ */
1645
+ async function getRegistries(baseUrl, token) {
1646
+ return dokployRequest("GET", "registry.all", baseUrl, token);
1647
+ }
1648
+ /**
1649
+ * Update gkm.config.ts with Dokploy configuration
1650
+ */
1651
+ async function updateConfig(config, cwd = process.cwd()) {
1652
+ const configPath = (0, node_path.join)(cwd, "gkm.config.ts");
1653
+ if (!(0, node_fs.existsSync)(configPath)) {
1654
+ logger$2.warn("\n gkm.config.ts not found. Add this configuration manually:\n");
1655
+ logger$2.log(` providers: {`);
1656
+ logger$2.log(` dokploy: {`);
1657
+ logger$2.log(` endpoint: '${config.endpoint}',`);
1658
+ logger$2.log(` projectId: '${config.projectId}',`);
1659
+ logger$2.log(` applicationId: '${config.applicationId}',`);
1660
+ logger$2.log(` },`);
1661
+ logger$2.log(` },`);
958
1662
  return;
959
1663
  }
960
- const rootOutputDir = (0, node_path.join)(process.cwd(), ".gkm");
961
- await (0, node_fs_promises.mkdir)(rootOutputDir, { recursive: true });
962
- for (const provider of resolved.providers) await buildForProvider(provider, buildContext, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, allEndpoints, allFunctions, allCrons, allSubscribers, resolved.enableOpenApi);
1664
+ const content = await (0, node_fs_promises.readFile)(configPath, "utf-8");
1665
+ if (content.includes("dokploy:") && content.includes("applicationId:")) {
1666
+ logger$2.log("\n Dokploy config already exists in gkm.config.ts");
1667
+ logger$2.log(" Updating with new values...");
1668
+ }
1669
+ let newContent;
1670
+ if (content.includes("providers:")) if (content.includes("dokploy:")) newContent = content.replace(/dokploy:\s*\{[^}]*\}/, `dokploy: {
1671
+ endpoint: '${config.endpoint}',
1672
+ projectId: '${config.projectId}',
1673
+ applicationId: '${config.applicationId}',
1674
+ }`);
1675
+ else newContent = content.replace(/providers:\s*\{/, `providers: {
1676
+ dokploy: {
1677
+ endpoint: '${config.endpoint}',
1678
+ projectId: '${config.projectId}',
1679
+ applicationId: '${config.applicationId}',
1680
+ },`);
1681
+ else newContent = content.replace(/}\s*\)\s*;?\s*$/, `
1682
+ providers: {
1683
+ dokploy: {
1684
+ endpoint: '${config.endpoint}',
1685
+ projectId: '${config.projectId}',
1686
+ applicationId: '${config.applicationId}',
1687
+ },
1688
+ },
1689
+ });`);
1690
+ await (0, node_fs_promises.writeFile)(configPath, newContent);
1691
+ logger$2.log("\n ✓ Updated gkm.config.ts with Dokploy configuration");
1692
+ }
1693
+ /**
1694
+ * Initialize Dokploy deployment configuration
1695
+ */
1696
+ async function deployInitCommand(options) {
1697
+ const { projectName, appName, projectId: existingProjectId, registryId } = options;
1698
+ let endpoint = options.endpoint;
1699
+ if (!endpoint) {
1700
+ const stored = await getDokployCredentials();
1701
+ if (stored) endpoint = stored.endpoint;
1702
+ else throw new Error("Dokploy endpoint not specified.\nEither run \"gkm login --service dokploy\" first, or provide --endpoint.");
1703
+ }
1704
+ logger$2.log(`\n🚀 Initializing Dokploy deployment...`);
1705
+ logger$2.log(` Endpoint: ${endpoint}`);
1706
+ const token = await getApiToken();
1707
+ let projectId;
1708
+ if (existingProjectId) {
1709
+ projectId = existingProjectId;
1710
+ logger$2.log(`\n📁 Using existing project: ${projectId}`);
1711
+ } else {
1712
+ logger$2.log(`\n📁 Looking for project: ${projectName}`);
1713
+ const projects = await getProjects(endpoint, token);
1714
+ const existingProject = projects.find((p) => p.name.toLowerCase() === projectName.toLowerCase());
1715
+ if (existingProject) {
1716
+ projectId = existingProject.projectId;
1717
+ logger$2.log(` Found existing project: ${projectId}`);
1718
+ } else {
1719
+ logger$2.log(` Creating new project...`);
1720
+ const project = await createProject(endpoint, token, projectName);
1721
+ projectId = project.projectId;
1722
+ logger$2.log(` ✓ Created project: ${projectId}`);
1723
+ }
1724
+ }
1725
+ logger$2.log(`\n📦 Creating application: ${appName}`);
1726
+ const application = await createApplication(endpoint, token, appName, projectId);
1727
+ logger$2.log(` ✓ Created application: ${application.applicationId}`);
1728
+ if (registryId) {
1729
+ logger$2.log(`\n🔧 Configuring registry: ${registryId}`);
1730
+ await configureApplicationRegistry(endpoint, token, application.applicationId, registryId);
1731
+ logger$2.log(` ✓ Registry configured`);
1732
+ } else try {
1733
+ const registries = await getRegistries(endpoint, token);
1734
+ if (registries.length > 0) {
1735
+ logger$2.log(`\n📋 Available registries:`);
1736
+ for (const reg of registries) logger$2.log(` - ${reg.registryName}: ${reg.registryUrl} (${reg.registryId})`);
1737
+ logger$2.log(`\n To use a registry, run with --registry-id <id>`);
1738
+ }
1739
+ } catch {}
1740
+ const config = {
1741
+ endpoint,
1742
+ projectId,
1743
+ applicationId: application.applicationId
1744
+ };
1745
+ await updateConfig(config);
1746
+ logger$2.log(`\n✅ Dokploy deployment initialized!`);
1747
+ logger$2.log(`\n📋 Configuration:`);
1748
+ logger$2.log(` Project ID: ${projectId}`);
1749
+ logger$2.log(` Application ID: ${application.applicationId}`);
1750
+ logger$2.log(`\n🔗 View in Dokploy: ${endpoint}/project/${projectId}`);
1751
+ logger$2.log(`\n📝 Next steps:`);
1752
+ logger$2.log(` 1. Initialize secrets: gkm secrets:init --stage production`);
1753
+ logger$2.log(` 2. Deploy: gkm deploy --provider dokploy --stage production`);
1754
+ return config;
1755
+ }
1756
+ /**
1757
+ * List available Dokploy resources
1758
+ */
1759
+ async function deployListCommand(options) {
1760
+ let endpoint = options.endpoint;
1761
+ if (!endpoint) {
1762
+ const stored = await getDokployCredentials();
1763
+ if (stored) endpoint = stored.endpoint;
1764
+ else throw new Error("Dokploy endpoint not specified.\nEither run \"gkm login --service dokploy\" first, or provide --endpoint.");
1765
+ }
1766
+ const { resource } = options;
1767
+ const token = await getApiToken();
1768
+ if (resource === "projects") {
1769
+ logger$2.log(`\n📁 Projects in ${endpoint}:`);
1770
+ const projects = await getProjects(endpoint, token);
1771
+ if (projects.length === 0) {
1772
+ logger$2.log(" No projects found");
1773
+ return;
1774
+ }
1775
+ for (const project of projects) {
1776
+ logger$2.log(`\n ${project.name} (${project.projectId})`);
1777
+ if (project.description) logger$2.log(` ${project.description}`);
1778
+ }
1779
+ } else if (resource === "registries") {
1780
+ logger$2.log(`\n🐳 Registries in ${endpoint}:`);
1781
+ const registries = await getRegistries(endpoint, token);
1782
+ if (registries.length === 0) {
1783
+ logger$2.log(" No registries configured");
1784
+ logger$2.log(" Add a registry in Dokploy: Settings > Docker Registry");
1785
+ return;
1786
+ }
1787
+ for (const registry of registries) {
1788
+ logger$2.log(`\n ${registry.registryName} (${registry.registryId})`);
1789
+ logger$2.log(` URL: ${registry.registryUrl}`);
1790
+ logger$2.log(` Username: ${registry.username}`);
1791
+ if (registry.imagePrefix) logger$2.log(` Prefix: ${registry.imagePrefix}`);
1792
+ }
1793
+ }
1794
+ }
1795
+
1796
+ //#endregion
1797
+ //#region src/docker/compose.ts
1798
+ /** Default Docker images for services */
1799
+ const DEFAULT_SERVICE_IMAGES = {
1800
+ postgres: "postgres",
1801
+ redis: "redis",
1802
+ rabbitmq: "rabbitmq"
1803
+ };
1804
+ /** Default Docker image versions for services */
1805
+ const DEFAULT_SERVICE_VERSIONS = {
1806
+ postgres: "16-alpine",
1807
+ redis: "7-alpine",
1808
+ rabbitmq: "3-management-alpine"
1809
+ };
1810
+ /** Get the default full image reference for a service */
1811
+ function getDefaultImage(serviceName) {
1812
+ return `${DEFAULT_SERVICE_IMAGES[serviceName]}:${DEFAULT_SERVICE_VERSIONS[serviceName]}`;
1813
+ }
1814
+ /** Normalize services config to a consistent format - returns Map of service name to full image reference */
1815
+ function normalizeServices(services) {
1816
+ const result = /* @__PURE__ */ new Map();
1817
+ if (Array.isArray(services)) for (const name$1 of services) result.set(name$1, getDefaultImage(name$1));
1818
+ else for (const [name$1, config] of Object.entries(services)) {
1819
+ const serviceName = name$1;
1820
+ if (config === true) result.set(serviceName, getDefaultImage(serviceName));
1821
+ else if (config && typeof config === "object") {
1822
+ const serviceConfig = config;
1823
+ if (serviceConfig.image) result.set(serviceName, serviceConfig.image);
1824
+ else {
1825
+ const version$1 = serviceConfig.version ?? DEFAULT_SERVICE_VERSIONS[serviceName];
1826
+ result.set(serviceName, `${DEFAULT_SERVICE_IMAGES[serviceName]}:${version$1}`);
1827
+ }
1828
+ }
1829
+ }
1830
+ return result;
1831
+ }
1832
+ /**
1833
+ * Generate docker-compose.yml for production deployment
1834
+ */
1835
+ function generateDockerCompose(options) {
1836
+ const { imageName, registry, port, healthCheckPath, services } = options;
1837
+ const serviceMap = normalizeServices(services);
1838
+ const imageRef = registry ? `\${REGISTRY:-${registry}}/` : "";
1839
+ let yaml = `version: '3.8'
1840
+
1841
+ services:
1842
+ api:
1843
+ build:
1844
+ context: ../..
1845
+ dockerfile: .gkm/docker/Dockerfile
1846
+ image: ${imageRef}\${IMAGE_NAME:-${imageName}}:\${TAG:-latest}
1847
+ container_name: ${imageName}
1848
+ restart: unless-stopped
1849
+ ports:
1850
+ - "\${PORT:-${port}}:${port}"
1851
+ environment:
1852
+ - NODE_ENV=production
1853
+ `;
1854
+ if (serviceMap.has("postgres")) yaml += ` - DATABASE_URL=\${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/app}
1855
+ `;
1856
+ if (serviceMap.has("redis")) yaml += ` - REDIS_URL=\${REDIS_URL:-redis://redis:6379}
1857
+ `;
1858
+ if (serviceMap.has("rabbitmq")) yaml += ` - RABBITMQ_URL=\${RABBITMQ_URL:-amqp://rabbitmq:5672}
1859
+ `;
1860
+ yaml += ` healthcheck:
1861
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:${port}${healthCheckPath}"]
1862
+ interval: 30s
1863
+ timeout: 3s
1864
+ retries: 3
1865
+ `;
1866
+ if (serviceMap.size > 0) {
1867
+ yaml += ` depends_on:
1868
+ `;
1869
+ for (const serviceName of serviceMap.keys()) yaml += ` ${serviceName}:
1870
+ condition: service_healthy
1871
+ `;
1872
+ }
1873
+ yaml += ` networks:
1874
+ - app-network
1875
+ `;
1876
+ const postgresImage = serviceMap.get("postgres");
1877
+ if (postgresImage) yaml += `
1878
+ postgres:
1879
+ image: ${postgresImage}
1880
+ container_name: postgres
1881
+ restart: unless-stopped
1882
+ environment:
1883
+ POSTGRES_USER: \${POSTGRES_USER:-postgres}
1884
+ POSTGRES_PASSWORD: \${POSTGRES_PASSWORD:-postgres}
1885
+ POSTGRES_DB: \${POSTGRES_DB:-app}
1886
+ volumes:
1887
+ - postgres_data:/var/lib/postgresql/data
1888
+ healthcheck:
1889
+ test: ["CMD-SHELL", "pg_isready -U postgres"]
1890
+ interval: 5s
1891
+ timeout: 5s
1892
+ retries: 5
1893
+ networks:
1894
+ - app-network
1895
+ `;
1896
+ const redisImage = serviceMap.get("redis");
1897
+ if (redisImage) yaml += `
1898
+ redis:
1899
+ image: ${redisImage}
1900
+ container_name: redis
1901
+ restart: unless-stopped
1902
+ volumes:
1903
+ - redis_data:/data
1904
+ healthcheck:
1905
+ test: ["CMD", "redis-cli", "ping"]
1906
+ interval: 5s
1907
+ timeout: 5s
1908
+ retries: 5
1909
+ networks:
1910
+ - app-network
1911
+ `;
1912
+ const rabbitmqImage = serviceMap.get("rabbitmq");
1913
+ if (rabbitmqImage) yaml += `
1914
+ rabbitmq:
1915
+ image: ${rabbitmqImage}
1916
+ container_name: rabbitmq
1917
+ restart: unless-stopped
1918
+ environment:
1919
+ RABBITMQ_DEFAULT_USER: \${RABBITMQ_USER:-guest}
1920
+ RABBITMQ_DEFAULT_PASS: \${RABBITMQ_PASSWORD:-guest}
1921
+ ports:
1922
+ - "15672:15672" # Management UI
1923
+ volumes:
1924
+ - rabbitmq_data:/var/lib/rabbitmq
1925
+ healthcheck:
1926
+ test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"]
1927
+ interval: 10s
1928
+ timeout: 5s
1929
+ retries: 5
1930
+ networks:
1931
+ - app-network
1932
+ `;
1933
+ yaml += `
1934
+ volumes:
1935
+ `;
1936
+ if (serviceMap.has("postgres")) yaml += ` postgres_data:
1937
+ `;
1938
+ if (serviceMap.has("redis")) yaml += ` redis_data:
1939
+ `;
1940
+ if (serviceMap.has("rabbitmq")) yaml += ` rabbitmq_data:
1941
+ `;
1942
+ yaml += `
1943
+ networks:
1944
+ app-network:
1945
+ driver: bridge
1946
+ `;
1947
+ return yaml;
1948
+ }
1949
+ /**
1950
+ * Generate a minimal docker-compose.yml for API only
1951
+ */
1952
+ function generateMinimalDockerCompose(options) {
1953
+ const { imageName, registry, port, healthCheckPath } = options;
1954
+ const imageRef = registry ? `\${REGISTRY:-${registry}}/` : "";
1955
+ return `version: '3.8'
1956
+
1957
+ services:
1958
+ api:
1959
+ build:
1960
+ context: ../..
1961
+ dockerfile: .gkm/docker/Dockerfile
1962
+ image: ${imageRef}\${IMAGE_NAME:-${imageName}}:\${TAG:-latest}
1963
+ container_name: ${imageName}
1964
+ restart: unless-stopped
1965
+ ports:
1966
+ - "\${PORT:-${port}}:${port}"
1967
+ environment:
1968
+ - NODE_ENV=production
1969
+ healthcheck:
1970
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:${port}${healthCheckPath}"]
1971
+ interval: 30s
1972
+ timeout: 3s
1973
+ retries: 3
1974
+ networks:
1975
+ - app-network
1976
+
1977
+ networks:
1978
+ app-network:
1979
+ driver: bridge
1980
+ `;
1981
+ }
1982
+
1983
+ //#endregion
1984
+ //#region src/docker/templates.ts
1985
+ /**
1986
+ * Detect package manager from lockfiles
1987
+ * Walks up the directory tree to find lockfile (for monorepos)
1988
+ */
1989
+ function detectPackageManager$1(cwd = process.cwd()) {
1990
+ const lockfiles = [
1991
+ ["pnpm-lock.yaml", "pnpm"],
1992
+ ["bun.lockb", "bun"],
1993
+ ["yarn.lock", "yarn"],
1994
+ ["package-lock.json", "npm"]
1995
+ ];
1996
+ let dir = cwd;
1997
+ const root = (0, node_path.parse)(dir).root;
1998
+ while (dir !== root) {
1999
+ for (const [lockfile, pm] of lockfiles) if ((0, node_fs.existsSync)((0, node_path.join)(dir, lockfile))) return pm;
2000
+ dir = (0, node_path.dirname)(dir);
2001
+ }
2002
+ for (const [lockfile, pm] of lockfiles) if ((0, node_fs.existsSync)((0, node_path.join)(root, lockfile))) return pm;
2003
+ return "pnpm";
2004
+ }
2005
+ /**
2006
+ * Get package manager specific commands and paths
2007
+ */
2008
+ function getPmConfig(pm) {
2009
+ const configs = {
2010
+ pnpm: {
2011
+ install: "corepack enable && corepack prepare pnpm@latest --activate",
2012
+ lockfile: "pnpm-lock.yaml",
2013
+ fetch: "pnpm fetch",
2014
+ installCmd: "pnpm install --frozen-lockfile --offline",
2015
+ cacheTarget: "/root/.local/share/pnpm/store",
2016
+ cacheId: "pnpm",
2017
+ run: "pnpm",
2018
+ addGlobal: "pnpm add -g"
2019
+ },
2020
+ npm: {
2021
+ install: "",
2022
+ lockfile: "package-lock.json",
2023
+ fetch: "",
2024
+ installCmd: "npm ci",
2025
+ cacheTarget: "/root/.npm",
2026
+ cacheId: "npm",
2027
+ run: "npm run",
2028
+ addGlobal: "npm install -g"
2029
+ },
2030
+ yarn: {
2031
+ install: "corepack enable && corepack prepare yarn@stable --activate",
2032
+ lockfile: "yarn.lock",
2033
+ fetch: "",
2034
+ installCmd: "yarn install --frozen-lockfile",
2035
+ cacheTarget: "/root/.yarn/cache",
2036
+ cacheId: "yarn",
2037
+ run: "yarn",
2038
+ addGlobal: "yarn global add"
2039
+ },
2040
+ bun: {
2041
+ install: "npm install -g bun",
2042
+ lockfile: "bun.lockb",
2043
+ fetch: "",
2044
+ installCmd: "bun install --frozen-lockfile",
2045
+ cacheTarget: "/root/.bun/install/cache",
2046
+ cacheId: "bun",
2047
+ run: "bun run",
2048
+ addGlobal: "bun add -g"
2049
+ }
2050
+ };
2051
+ return configs[pm];
2052
+ }
2053
+ /**
2054
+ * Generate a multi-stage Dockerfile for building from source
2055
+ * Optimized for build speed with:
2056
+ * - BuildKit cache mounts for package manager store
2057
+ * - pnpm fetch for better layer caching (when using pnpm)
2058
+ * - Optional turbo prune for monorepos
2059
+ */
2060
+ function generateMultiStageDockerfile(options) {
2061
+ const { baseImage, port, healthCheckPath, turbo, turboPackage, packageManager } = options;
2062
+ if (turbo) return generateTurboDockerfile({
2063
+ ...options,
2064
+ turboPackage: turboPackage ?? "api"
2065
+ });
2066
+ const pm = getPmConfig(packageManager);
2067
+ const installPm = pm.install ? `\n# Install ${packageManager}\nRUN ${pm.install}\n` : "";
2068
+ const hasFetch = packageManager === "pnpm";
2069
+ const depsStage = hasFetch ? `# Copy lockfile first for better caching
2070
+ COPY ${pm.lockfile} ./
2071
+
2072
+ # Fetch dependencies (downloads to virtual store, cached separately)
2073
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2074
+ ${pm.fetch}
2075
+
2076
+ # Copy package.json after fetch
2077
+ COPY package.json ./
2078
+
2079
+ # Install from cache (fast - no network needed)
2080
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2081
+ ${pm.installCmd}` : `# Copy package files
2082
+ COPY package.json ${pm.lockfile} ./
2083
+
2084
+ # Install dependencies with cache
2085
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2086
+ ${pm.installCmd}`;
2087
+ return `# syntax=docker/dockerfile:1
2088
+ # Stage 1: Dependencies
2089
+ FROM ${baseImage} AS deps
2090
+
2091
+ WORKDIR /app
2092
+ ${installPm}
2093
+ ${depsStage}
2094
+
2095
+ # Stage 2: Build
2096
+ FROM deps AS builder
2097
+
2098
+ WORKDIR /app
2099
+
2100
+ # Copy source (deps already installed)
2101
+ COPY . .
2102
+
2103
+ # Build production server
2104
+ RUN ${pm.run} gkm build --provider server --production
2105
+
2106
+ # Stage 3: Production
2107
+ FROM ${baseImage} AS runner
2108
+
2109
+ WORKDIR /app
2110
+
2111
+ # Install tini for proper signal handling as PID 1
2112
+ RUN apk add --no-cache tini
2113
+
2114
+ # Create non-root user
2115
+ RUN addgroup --system --gid 1001 nodejs && \\
2116
+ adduser --system --uid 1001 hono
2117
+
2118
+ # Copy bundled server
2119
+ COPY --from=builder --chown=hono:nodejs /app/.gkm/server/dist/server.mjs ./
2120
+
2121
+ # Environment
2122
+ ENV NODE_ENV=production
2123
+ ENV PORT=${port}
2124
+
2125
+ # Health check
2126
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2127
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2128
+
2129
+ # Switch to non-root user
2130
+ USER hono
2131
+
2132
+ EXPOSE ${port}
2133
+
2134
+ # Use tini as entrypoint to handle PID 1 responsibilities
2135
+ ENTRYPOINT ["/sbin/tini", "--"]
2136
+ CMD ["node", "server.mjs"]
2137
+ `;
2138
+ }
2139
+ /**
2140
+ * Generate a Dockerfile optimized for Turbo monorepos
2141
+ * Uses turbo prune to create minimal Docker context
2142
+ */
2143
+ function generateTurboDockerfile(options) {
2144
+ const { baseImage, port, healthCheckPath, turboPackage, packageManager } = options;
2145
+ const pm = getPmConfig(packageManager);
2146
+ const installPm = pm.install ? `RUN ${pm.install}` : "";
2147
+ const hasFetch = packageManager === "pnpm";
2148
+ const depsInstall = hasFetch ? `# Fetch and install from cache
2149
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2150
+ ${pm.fetch}
2151
+
2152
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2153
+ ${pm.installCmd}` : `# Install dependencies with cache
2154
+ RUN --mount=type=cache,id=${pm.cacheId},target=${pm.cacheTarget} \\
2155
+ ${pm.installCmd}`;
2156
+ return `# syntax=docker/dockerfile:1
2157
+ # Stage 1: Prune monorepo
2158
+ FROM ${baseImage} AS pruner
2159
+
2160
+ WORKDIR /app
2161
+
2162
+ ${installPm}
2163
+ RUN ${pm.addGlobal} turbo
2164
+
2165
+ COPY . .
2166
+
2167
+ # Prune to only include necessary packages
2168
+ RUN turbo prune ${turboPackage} --docker
2169
+
2170
+ # Stage 2: Install dependencies
2171
+ FROM ${baseImage} AS deps
2172
+
2173
+ WORKDIR /app
2174
+
2175
+ ${installPm}
2176
+
2177
+ # Copy pruned lockfile and package.jsons
2178
+ COPY --from=pruner /app/out/${pm.lockfile} ./
2179
+ COPY --from=pruner /app/out/json/ ./
2180
+
2181
+ ${depsInstall}
2182
+
2183
+ # Stage 3: Build
2184
+ FROM deps AS builder
2185
+
2186
+ WORKDIR /app
2187
+
2188
+ # Copy pruned source
2189
+ COPY --from=pruner /app/out/full/ ./
2190
+
2191
+ # Build production server
2192
+ RUN ${pm.run} gkm build --provider server --production
2193
+
2194
+ # Stage 4: Production
2195
+ FROM ${baseImage} AS runner
2196
+
2197
+ WORKDIR /app
2198
+
2199
+ RUN apk add --no-cache tini
2200
+
2201
+ RUN addgroup --system --gid 1001 nodejs && \\
2202
+ adduser --system --uid 1001 hono
2203
+
2204
+ COPY --from=builder --chown=hono:nodejs /app/.gkm/server/dist/server.mjs ./
2205
+
2206
+ ENV NODE_ENV=production
2207
+ ENV PORT=${port}
2208
+
2209
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2210
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2211
+
2212
+ USER hono
2213
+
2214
+ EXPOSE ${port}
2215
+
2216
+ ENTRYPOINT ["/sbin/tini", "--"]
2217
+ CMD ["node", "server.mjs"]
2218
+ `;
2219
+ }
2220
+ /**
2221
+ * Generate a slim Dockerfile for pre-built bundles
2222
+ */
2223
+ function generateSlimDockerfile(options) {
2224
+ const { baseImage, port, healthCheckPath } = options;
2225
+ return `# Slim Dockerfile for pre-built production bundle
2226
+ FROM ${baseImage}
2227
+
2228
+ WORKDIR /app
2229
+
2230
+ # Install tini for proper signal handling as PID 1
2231
+ # Handles SIGTERM propagation and zombie process reaping
2232
+ RUN apk add --no-cache tini
2233
+
2234
+ # Create non-root user
2235
+ RUN addgroup --system --gid 1001 nodejs && \\
2236
+ adduser --system --uid 1001 hono
2237
+
2238
+ # Copy pre-built bundle
2239
+ COPY .gkm/server/dist/server.mjs ./
2240
+
2241
+ # Environment
2242
+ ENV NODE_ENV=production
2243
+ ENV PORT=${port}
2244
+
2245
+ # Health check
2246
+ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
2247
+ CMD wget -q --spider http://localhost:${port}${healthCheckPath} || exit 1
2248
+
2249
+ # Switch to non-root user
2250
+ USER hono
2251
+
2252
+ EXPOSE ${port}
2253
+
2254
+ # Use tini as entrypoint to handle PID 1 responsibilities
2255
+ ENTRYPOINT ["/sbin/tini", "--"]
2256
+ CMD ["node", "server.mjs"]
2257
+ `;
2258
+ }
2259
+ /**
2260
+ * Generate .dockerignore file
2261
+ */
2262
+ function generateDockerignore() {
2263
+ return `# Dependencies
2264
+ node_modules
2265
+ .pnpm-store
2266
+
2267
+ # Build output (except what we need)
2268
+ .gkm/aws*
2269
+ .gkm/server/*.ts
2270
+ !.gkm/server/dist
2271
+
2272
+ # IDE and editor
2273
+ .idea
2274
+ .vscode
2275
+ *.swp
2276
+ *.swo
2277
+
2278
+ # Git
2279
+ .git
2280
+ .gitignore
2281
+
2282
+ # Logs
2283
+ *.log
2284
+ npm-debug.log*
2285
+ pnpm-debug.log*
2286
+
2287
+ # Test files
2288
+ **/*.test.ts
2289
+ **/*.spec.ts
2290
+ **/__tests__
2291
+ coverage
2292
+
2293
+ # Documentation
2294
+ docs
2295
+ *.md
2296
+ !README.md
2297
+
2298
+ # Environment files (handle secrets separately)
2299
+ .env
2300
+ .env.*
2301
+ !.env.example
2302
+
2303
+ # Docker files (don't copy recursively)
2304
+ Dockerfile*
2305
+ docker-compose*
2306
+ .dockerignore
2307
+ `;
2308
+ }
2309
+ /**
2310
+ * Generate docker-entrypoint.sh for custom startup logic
2311
+ */
2312
+ function generateDockerEntrypoint() {
2313
+ return `#!/bin/sh
2314
+ set -e
2315
+
2316
+ # Run any custom startup scripts here
2317
+ # Example: wait for database
2318
+ # until nc -z $DB_HOST $DB_PORT; do
2319
+ # echo "Waiting for database..."
2320
+ # sleep 1
2321
+ # done
2322
+
2323
+ # Execute the main command
2324
+ exec "$@"
2325
+ `;
2326
+ }
2327
+ /**
2328
+ * Resolve Docker configuration from GkmConfig with defaults
2329
+ */
2330
+ function resolveDockerConfig(config) {
2331
+ const docker = config.docker ?? {};
2332
+ let defaultImageName = "api";
2333
+ try {
2334
+ const pkg = require(`${process.cwd()}/package.json`);
2335
+ if (pkg.name) defaultImageName = pkg.name.replace(/^@[^/]+\//, "");
2336
+ } catch {}
2337
+ return {
2338
+ registry: docker.registry ?? "",
2339
+ imageName: docker.imageName ?? defaultImageName,
2340
+ baseImage: docker.baseImage ?? "node:22-alpine",
2341
+ port: docker.port ?? 3e3,
2342
+ compose: docker.compose
2343
+ };
2344
+ }
2345
+
2346
+ //#endregion
2347
+ //#region src/docker/index.ts
2348
+ const logger$1 = console;
2349
+ /**
2350
+ * Docker command implementation
2351
+ * Generates Dockerfile, docker-compose.yml, and related files
2352
+ *
2353
+ * Default: Multi-stage Dockerfile that builds from source inside Docker
2354
+ * --slim: Slim Dockerfile that copies pre-built bundle (requires prior build)
2355
+ */
2356
+ async function dockerCommand(options) {
2357
+ const config = await require_config.loadConfig();
2358
+ const dockerConfig = resolveDockerConfig(config);
2359
+ const serverConfig = typeof config.providers?.server === "object" ? config.providers.server : void 0;
2360
+ const healthCheckPath = serverConfig?.production?.healthCheck ?? "/health";
2361
+ const useSlim = options.slim === true;
2362
+ if (useSlim) {
2363
+ const distDir = (0, node_path.join)(process.cwd(), ".gkm", "server", "dist");
2364
+ const hasBuild = (0, node_fs.existsSync)((0, node_path.join)(distDir, "server.mjs"));
2365
+ if (!hasBuild) throw new Error("Slim Dockerfile requires a pre-built bundle. Run `gkm build --provider server --production` first, or omit --slim to use multi-stage build.");
2366
+ }
2367
+ const dockerDir = (0, node_path.join)(process.cwd(), ".gkm", "docker");
2368
+ await (0, node_fs_promises.mkdir)(dockerDir, { recursive: true });
2369
+ const packageManager = detectPackageManager$1();
2370
+ const templateOptions = {
2371
+ imageName: dockerConfig.imageName,
2372
+ baseImage: dockerConfig.baseImage,
2373
+ port: dockerConfig.port,
2374
+ healthCheckPath,
2375
+ prebuilt: useSlim,
2376
+ turbo: options.turbo,
2377
+ turboPackage: options.turboPackage ?? dockerConfig.imageName,
2378
+ packageManager
2379
+ };
2380
+ const dockerfile = useSlim ? generateSlimDockerfile(templateOptions) : generateMultiStageDockerfile(templateOptions);
2381
+ const dockerMode = useSlim ? "slim" : options.turbo ? "turbo" : "multi-stage";
2382
+ const dockerfilePath = (0, node_path.join)(dockerDir, "Dockerfile");
2383
+ await (0, node_fs_promises.writeFile)(dockerfilePath, dockerfile);
2384
+ logger$1.log(`Generated: .gkm/docker/Dockerfile (${dockerMode}, ${packageManager})`);
2385
+ const composeOptions = {
2386
+ imageName: dockerConfig.imageName,
2387
+ registry: options.registry ?? dockerConfig.registry,
2388
+ port: dockerConfig.port,
2389
+ healthCheckPath,
2390
+ services: dockerConfig.compose?.services ?? {}
2391
+ };
2392
+ const hasServices = Array.isArray(composeOptions.services) ? composeOptions.services.length > 0 : Object.keys(composeOptions.services).length > 0;
2393
+ const dockerCompose = hasServices ? generateDockerCompose(composeOptions) : generateMinimalDockerCompose(composeOptions);
2394
+ const composePath = (0, node_path.join)(dockerDir, "docker-compose.yml");
2395
+ await (0, node_fs_promises.writeFile)(composePath, dockerCompose);
2396
+ logger$1.log("Generated: .gkm/docker/docker-compose.yml");
2397
+ const dockerignore = generateDockerignore();
2398
+ const dockerignorePath = (0, node_path.join)(process.cwd(), ".dockerignore");
2399
+ await (0, node_fs_promises.writeFile)(dockerignorePath, dockerignore);
2400
+ logger$1.log("Generated: .dockerignore (project root)");
2401
+ const entrypoint = generateDockerEntrypoint();
2402
+ const entrypointPath = (0, node_path.join)(dockerDir, "docker-entrypoint.sh");
2403
+ await (0, node_fs_promises.writeFile)(entrypointPath, entrypoint);
2404
+ logger$1.log("Generated: .gkm/docker/docker-entrypoint.sh");
2405
+ const result = {
2406
+ dockerfile: dockerfilePath,
2407
+ dockerCompose: composePath,
2408
+ dockerignore: dockerignorePath,
2409
+ entrypoint: entrypointPath
2410
+ };
2411
+ if (options.build) await buildDockerImage(dockerConfig.imageName, options);
2412
+ if (options.push) await pushDockerImage(dockerConfig.imageName, options);
2413
+ return result;
2414
+ }
2415
+ /**
2416
+ * Build Docker image
2417
+ * Uses BuildKit for cache mount support
2418
+ */
2419
+ async function buildDockerImage(imageName, options) {
2420
+ const tag = options.tag ?? "latest";
2421
+ const registry = options.registry;
2422
+ const fullImageName = registry ? `${registry}/${imageName}:${tag}` : `${imageName}:${tag}`;
2423
+ logger$1.log(`\n🐳 Building Docker image: ${fullImageName}`);
2424
+ try {
2425
+ (0, node_child_process.execSync)(`DOCKER_BUILDKIT=1 docker build -f .gkm/docker/Dockerfile -t ${fullImageName} .`, {
2426
+ cwd: process.cwd(),
2427
+ stdio: "inherit",
2428
+ env: {
2429
+ ...process.env,
2430
+ DOCKER_BUILDKIT: "1"
2431
+ }
2432
+ });
2433
+ logger$1.log(`✅ Docker image built: ${fullImageName}`);
2434
+ } catch (error) {
2435
+ throw new Error(`Failed to build Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
2436
+ }
963
2437
  }
964
- async function buildForProvider(provider, context, rootOutputDir, endpointGenerator, functionGenerator, cronGenerator, subscriberGenerator, endpoints, functions, crons, subscribers, enableOpenApi) {
965
- const outputDir = (0, node_path.join)(process.cwd(), ".gkm", provider);
966
- await (0, node_fs_promises.mkdir)(outputDir, { recursive: true });
967
- logger.log(`\nGenerating handlers for provider: ${provider}`);
968
- const [routes, functionInfos, cronInfos, subscriberInfos] = await Promise.all([
969
- endpointGenerator.build(context, endpoints, outputDir, {
970
- provider,
971
- enableOpenApi
972
- }),
973
- functionGenerator.build(context, functions, outputDir, { provider }),
974
- cronGenerator.build(context, crons, outputDir, { provider }),
975
- subscriberGenerator.build(context, subscribers, outputDir, { provider })
976
- ]);
977
- logger.log(`Generated ${routes.length} routes, ${functionInfos.length} functions, ${cronInfos.length} crons, ${subscriberInfos.length} subscribers for ${provider}`);
978
- if (provider === "server") {
979
- const routeMetadata = await Promise.all(endpoints.map(async ({ construct }) => ({
980
- path: construct._path,
981
- method: construct.method,
982
- handler: "",
983
- authorizer: construct.authorizer?.name ?? "none"
984
- })));
985
- const appInfo = {
986
- handler: (0, node_path.relative)(process.cwd(), (0, node_path.join)(outputDir, "app.ts")),
987
- endpoints: (0, node_path.relative)(process.cwd(), (0, node_path.join)(outputDir, "endpoints.ts"))
988
- };
989
- await generateServerManifest(rootOutputDir, appInfo, routeMetadata, subscriberInfos);
990
- } else await generateAwsManifest(rootOutputDir, routes, functionInfos, cronInfos, subscriberInfos);
2438
+ /**
2439
+ * Push Docker image to registry
2440
+ */
2441
+ async function pushDockerImage(imageName, options) {
2442
+ const tag = options.tag ?? "latest";
2443
+ const registry = options.registry;
2444
+ if (!registry) throw new Error("Registry is required to push Docker image. Use --registry or configure docker.registry in gkm.config.ts");
2445
+ const fullImageName = `${registry}/${imageName}:${tag}`;
2446
+ logger$1.log(`\n🚀 Pushing Docker image: ${fullImageName}`);
2447
+ try {
2448
+ (0, node_child_process.execSync)(`docker push ${fullImageName}`, {
2449
+ cwd: process.cwd(),
2450
+ stdio: "inherit"
2451
+ });
2452
+ logger$1.log(`✅ Docker image pushed: ${fullImageName}`);
2453
+ } catch (error) {
2454
+ throw new Error(`Failed to push Docker image: ${error instanceof Error ? error.message : "Unknown error"}`);
2455
+ }
991
2456
  }
992
2457
 
993
2458
  //#endregion
@@ -1065,7 +2530,7 @@ export default defineConfig({
1065
2530
  content: gkmConfig
1066
2531
  }, {
1067
2532
  path: "tsconfig.json",
1068
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2533
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1069
2534
  }];
1070
2535
  const biomeConfig = {
1071
2536
  $schema: "https://biomejs.dev/schemas/1.9.4/schema.json",
@@ -1139,15 +2604,15 @@ export default defineConfig({
1139
2604
  },
1140
2605
  {
1141
2606
  path: "tsconfig.json",
1142
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2607
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1143
2608
  },
1144
2609
  {
1145
2610
  path: "biome.json",
1146
- content: JSON.stringify(biomeConfig, null, 2) + "\n"
2611
+ content: `${JSON.stringify(biomeConfig, null, 2)}\n`
1147
2612
  },
1148
2613
  {
1149
2614
  path: "turbo.json",
1150
- content: JSON.stringify(turboConfig, null, 2) + "\n"
2615
+ content: `${JSON.stringify(turboConfig, null, 2)}\n`
1151
2616
  }
1152
2617
  ];
1153
2618
  }
@@ -1499,11 +2964,11 @@ export type UpdateUser = z.infer<typeof updateUserSchema>;
1499
2964
  return [
1500
2965
  {
1501
2966
  path: "packages/models/package.json",
1502
- content: JSON.stringify(packageJson, null, 2) + "\n"
2967
+ content: `${JSON.stringify(packageJson, null, 2)}\n`
1503
2968
  },
1504
2969
  {
1505
2970
  path: "packages/models/tsconfig.json",
1506
- content: JSON.stringify(tsConfig, null, 2) + "\n"
2971
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1507
2972
  },
1508
2973
  {
1509
2974
  path: "packages/models/src/index.ts",
@@ -1669,7 +3134,7 @@ coverage/
1669
3134
  return [
1670
3135
  {
1671
3136
  path: "package.json",
1672
- content: JSON.stringify(rootPackageJson, null, 2) + "\n"
3137
+ content: `${JSON.stringify(rootPackageJson, null, 2)}\n`
1673
3138
  },
1674
3139
  {
1675
3140
  path: "pnpm-workspace.yaml",
@@ -1677,15 +3142,15 @@ coverage/
1677
3142
  },
1678
3143
  {
1679
3144
  path: "tsconfig.json",
1680
- content: JSON.stringify(tsConfig, null, 2) + "\n"
3145
+ content: `${JSON.stringify(tsConfig, null, 2)}\n`
1681
3146
  },
1682
3147
  {
1683
3148
  path: "biome.json",
1684
- content: JSON.stringify(biomeConfig, null, 2) + "\n"
3149
+ content: `${JSON.stringify(biomeConfig, null, 2)}\n`
1685
3150
  },
1686
3151
  {
1687
3152
  path: "turbo.json",
1688
- content: JSON.stringify(turboConfig, null, 2) + "\n"
3153
+ content: `${JSON.stringify(turboConfig, null, 2)}\n`
1689
3154
  },
1690
3155
  {
1691
3156
  path: ".gitignore",
@@ -2406,19 +3871,19 @@ function generatePackageJson(options, template) {
2406
3871
  if (studio) dependencies$1["@geekmidas/studio"] = "workspace:*";
2407
3872
  if (database) {
2408
3873
  dependencies$1["@geekmidas/db"] = "workspace:*";
2409
- dependencies$1["kysely"] = "~0.28.2";
2410
- dependencies$1["pg"] = "~8.16.0";
3874
+ dependencies$1.kysely = "~0.28.2";
3875
+ dependencies$1.pg = "~8.16.0";
2411
3876
  devDependencies$1["@types/pg"] = "~8.15.0";
2412
3877
  }
2413
- dependencies$1["zod"] = "~4.1.0";
3878
+ dependencies$1.zod = "~4.1.0";
2414
3879
  if (monorepo) {
2415
3880
  delete devDependencies$1["@biomejs/biome"];
2416
- delete devDependencies$1["turbo"];
2417
- delete scripts$1["lint"];
2418
- delete scripts$1["fmt"];
3881
+ delete devDependencies$1.turbo;
3882
+ delete scripts$1.lint;
3883
+ delete scripts$1.fmt;
2419
3884
  delete scripts$1["fmt:check"];
2420
3885
  dependencies$1[`@${name$1}/models`] = "workspace:*";
2421
- delete dependencies$1["zod"];
3886
+ delete dependencies$1.zod;
2422
3887
  }
2423
3888
  const sortObject = (obj) => Object.fromEntries(Object.entries(obj).sort(([a], [b]) => a.localeCompare(b)));
2424
3889
  let packageName = name$1;
@@ -2442,7 +3907,7 @@ function generatePackageJson(options, template) {
2442
3907
  };
2443
3908
  return [{
2444
3909
  path: "package.json",
2445
- content: JSON.stringify(packageJson, null, 2) + "\n"
3910
+ content: `${JSON.stringify(packageJson, null, 2)}\n`
2446
3911
  }];
2447
3912
  }
2448
3913
 
@@ -2502,7 +3967,6 @@ function getInstallCommand(pkgManager) {
2502
3967
  case "pnpm": return "pnpm install";
2503
3968
  case "yarn": return "yarn";
2504
3969
  case "bun": return "bun install";
2505
- case "npm":
2506
3970
  default: return "npm install";
2507
3971
  }
2508
3972
  }
@@ -2514,7 +3978,6 @@ function getRunCommand(pkgManager, script) {
2514
3978
  case "pnpm": return `pnpm ${script}`;
2515
3979
  case "yarn": return `yarn ${script}`;
2516
3980
  case "bun": return `bun run ${script}`;
2517
- case "npm":
2518
3981
  default: return `npm run ${script}`;
2519
3982
  }
2520
3983
  }
@@ -2598,21 +4061,12 @@ async function initCommand(projectName, options = {}) {
2598
4061
  }
2599
4062
  ], { onCancel });
2600
4063
  const name$1 = projectName || answers.name;
2601
- if (!name$1) {
2602
- console.error(" Error: Project name is required\n");
2603
- process.exit(1);
2604
- }
4064
+ if (!name$1) process.exit(1);
2605
4065
  if (projectName) {
2606
4066
  const nameValid = validateProjectName(projectName);
2607
- if (nameValid !== true) {
2608
- console.error(` Error: ${nameValid}\n`);
2609
- process.exit(1);
2610
- }
4067
+ if (nameValid !== true) process.exit(1);
2611
4068
  const dirValid = checkDirectoryExists(projectName, cwd);
2612
- if (dirValid !== true) {
2613
- console.error(` Error: ${dirValid}\n`);
2614
- process.exit(1);
2615
- }
4069
+ if (dirValid !== true) process.exit(1);
2616
4070
  }
2617
4071
  const monorepo = options.monorepo ?? (options.yes ? false : answers.monorepo ?? false);
2618
4072
  const database = options.yes ? true : answers.database ?? true;
@@ -2642,14 +4096,14 @@ async function initCommand(projectName, options = {}) {
2642
4096
  ...generateDockerFiles(templateOptions, template)
2643
4097
  ];
2644
4098
  const rootFiles = [...generateMonorepoFiles(templateOptions, template), ...generateModelsPackage(templateOptions)];
2645
- for (const { path: path$1, content } of rootFiles) {
2646
- const fullPath = (0, node_path.join)(targetDir, path$1);
4099
+ for (const { path, content } of rootFiles) {
4100
+ const fullPath = (0, node_path.join)(targetDir, path);
2647
4101
  await (0, node_fs_promises.mkdir)((0, node_path.dirname)(fullPath), { recursive: true });
2648
4102
  await (0, node_fs_promises.writeFile)(fullPath, content);
2649
4103
  }
2650
- for (const { path: path$1, content } of appFiles) {
2651
- const fullPath = (0, node_path.join)(appDir, path$1);
2652
- const displayPath = isMonorepo ? `${apiPath}/${path$1}` : path$1;
4104
+ for (const { path, content } of appFiles) {
4105
+ const fullPath = (0, node_path.join)(appDir, path);
4106
+ const _displayPath = isMonorepo ? `${apiPath}/${path}` : path;
2653
4107
  await (0, node_fs_promises.mkdir)((0, node_path.dirname)(fullPath), { recursive: true });
2654
4108
  await (0, node_fs_promises.writeFile)(fullPath, content);
2655
4109
  }
@@ -2659,9 +4113,7 @@ async function initCommand(projectName, options = {}) {
2659
4113
  cwd: targetDir,
2660
4114
  stdio: "inherit"
2661
4115
  });
2662
- } catch {
2663
- console.error("\n Warning: Failed to install dependencies.");
2664
- }
4116
+ } catch {}
2665
4117
  try {
2666
4118
  (0, node_child_process.execSync)("npx @biomejs/biome format --write --unsafe .", {
2667
4119
  cwd: targetDir,
@@ -2669,7 +4121,310 @@ async function initCommand(projectName, options = {}) {
2669
4121
  });
2670
4122
  } catch {}
2671
4123
  }
2672
- const devCommand$1 = getRunCommand(pkgManager, "dev");
4124
+ const _devCommand = getRunCommand(pkgManager, "dev");
4125
+ }
4126
+
4127
+ //#endregion
4128
+ //#region src/secrets/generator.ts
4129
+ /**
4130
+ * Generate a secure random password using URL-safe base64 characters.
4131
+ * @param length Password length (default: 32)
4132
+ */
4133
+ function generateSecurePassword(length = 32) {
4134
+ return (0, node_crypto.randomBytes)(Math.ceil(length * 3 / 4)).toString("base64url").slice(0, length);
4135
+ }
4136
+ /** Default service configurations */
4137
+ const SERVICE_DEFAULTS = {
4138
+ postgres: {
4139
+ host: "postgres",
4140
+ port: 5432,
4141
+ username: "app",
4142
+ database: "app"
4143
+ },
4144
+ redis: {
4145
+ host: "redis",
4146
+ port: 6379,
4147
+ username: "default"
4148
+ },
4149
+ rabbitmq: {
4150
+ host: "rabbitmq",
4151
+ port: 5672,
4152
+ username: "app",
4153
+ vhost: "/"
4154
+ }
4155
+ };
4156
+ /**
4157
+ * Generate credentials for a specific service.
4158
+ */
4159
+ function generateServiceCredentials(service) {
4160
+ const defaults = SERVICE_DEFAULTS[service];
4161
+ return {
4162
+ ...defaults,
4163
+ password: generateSecurePassword()
4164
+ };
4165
+ }
4166
+ /**
4167
+ * Generate credentials for multiple services.
4168
+ */
4169
+ function generateServicesCredentials(services) {
4170
+ const result = {};
4171
+ for (const service of services) result[service] = generateServiceCredentials(service);
4172
+ return result;
4173
+ }
4174
+ /**
4175
+ * Generate connection URL for PostgreSQL.
4176
+ */
4177
+ function generatePostgresUrl(creds) {
4178
+ const { username, password, host, port, database } = creds;
4179
+ return `postgresql://${username}:${encodeURIComponent(password)}@${host}:${port}/${database}`;
4180
+ }
4181
+ /**
4182
+ * Generate connection URL for Redis.
4183
+ */
4184
+ function generateRedisUrl(creds) {
4185
+ const { password, host, port } = creds;
4186
+ return `redis://:${encodeURIComponent(password)}@${host}:${port}`;
4187
+ }
4188
+ /**
4189
+ * Generate connection URL for RabbitMQ.
4190
+ */
4191
+ function generateRabbitmqUrl(creds) {
4192
+ const { username, password, host, port, vhost } = creds;
4193
+ const encodedVhost = encodeURIComponent(vhost ?? "/");
4194
+ return `amqp://${username}:${encodeURIComponent(password)}@${host}:${port}/${encodedVhost}`;
4195
+ }
4196
+ /**
4197
+ * Generate connection URLs from service credentials.
4198
+ */
4199
+ function generateConnectionUrls(services) {
4200
+ const urls = {};
4201
+ if (services.postgres) urls.DATABASE_URL = generatePostgresUrl(services.postgres);
4202
+ if (services.redis) urls.REDIS_URL = generateRedisUrl(services.redis);
4203
+ if (services.rabbitmq) urls.RABBITMQ_URL = generateRabbitmqUrl(services.rabbitmq);
4204
+ return urls;
4205
+ }
4206
+ /**
4207
+ * Create a new StageSecrets object with generated credentials.
4208
+ */
4209
+ function createStageSecrets(stage, services) {
4210
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4211
+ const serviceCredentials = generateServicesCredentials(services);
4212
+ const urls = generateConnectionUrls(serviceCredentials);
4213
+ return {
4214
+ stage,
4215
+ createdAt: now,
4216
+ updatedAt: now,
4217
+ services: serviceCredentials,
4218
+ urls,
4219
+ custom: {}
4220
+ };
4221
+ }
4222
+ /**
4223
+ * Rotate password for a specific service.
4224
+ */
4225
+ function rotateServicePassword(secrets, service) {
4226
+ const currentCreds = secrets.services[service];
4227
+ if (!currentCreds) throw new Error(`Service "${service}" not configured in secrets`);
4228
+ const newCreds = {
4229
+ ...currentCreds,
4230
+ password: generateSecurePassword()
4231
+ };
4232
+ const newServices = {
4233
+ ...secrets.services,
4234
+ [service]: newCreds
4235
+ };
4236
+ return {
4237
+ ...secrets,
4238
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
4239
+ services: newServices,
4240
+ urls: generateConnectionUrls(newServices)
4241
+ };
4242
+ }
4243
+
4244
+ //#endregion
4245
+ //#region src/secrets/index.ts
4246
+ const logger = console;
4247
+ /**
4248
+ * Extract service names from compose config.
4249
+ */
4250
+ function getServicesFromConfig(services) {
4251
+ if (!services) return [];
4252
+ if (Array.isArray(services)) return services;
4253
+ return Object.entries(services).filter(([, config]) => config).map(([name$1]) => name$1);
4254
+ }
4255
+ /**
4256
+ * Initialize secrets for a stage.
4257
+ * Generates secure random passwords for configured services.
4258
+ */
4259
+ async function secretsInitCommand(options) {
4260
+ const { stage, force } = options;
4261
+ if (!force && require_storage.secretsExist(stage)) {
4262
+ logger.error(`Secrets already exist for stage "${stage}". Use --force to overwrite.`);
4263
+ process.exit(1);
4264
+ }
4265
+ const config = await require_config.loadConfig();
4266
+ const services = getServicesFromConfig(config.docker?.compose?.services);
4267
+ if (services.length === 0) logger.warn("No services configured in docker.compose.services. Creating secrets with empty services.");
4268
+ const secrets = createStageSecrets(stage, services);
4269
+ await require_storage.writeStageSecrets(secrets);
4270
+ logger.log(`\n✓ Secrets initialized for stage "${stage}"`);
4271
+ logger.log(` Location: .gkm/secrets/${stage}.json`);
4272
+ logger.log("\n Generated credentials for:");
4273
+ for (const service of services) logger.log(` - ${service}`);
4274
+ if (secrets.urls.DATABASE_URL) logger.log(`\n DATABASE_URL: ${maskUrl(secrets.urls.DATABASE_URL)}`);
4275
+ if (secrets.urls.REDIS_URL) logger.log(` REDIS_URL: ${maskUrl(secrets.urls.REDIS_URL)}`);
4276
+ if (secrets.urls.RABBITMQ_URL) logger.log(` RABBITMQ_URL: ${maskUrl(secrets.urls.RABBITMQ_URL)}`);
4277
+ logger.log(`\n Use "gkm secrets:show --stage ${stage}" to view secrets`);
4278
+ logger.log(" Use \"gkm secrets:set <KEY> <VALUE> --stage " + stage + "\" to add custom secrets");
4279
+ }
4280
+ /**
4281
+ * Read all data from stdin.
4282
+ */
4283
+ async function readStdin() {
4284
+ const chunks = [];
4285
+ for await (const chunk of process.stdin) chunks.push(chunk);
4286
+ return Buffer.concat(chunks).toString("utf-8").trim();
4287
+ }
4288
+ /**
4289
+ * Set a custom secret.
4290
+ * If value is not provided, reads from stdin.
4291
+ */
4292
+ async function secretsSetCommand(key, value, options) {
4293
+ const { stage } = options;
4294
+ let secretValue = value;
4295
+ if (!secretValue) {
4296
+ if (process.stdin.isTTY) {
4297
+ logger.error("No value provided. Use: gkm secrets:set KEY VALUE --stage <stage>");
4298
+ logger.error("Or pipe from stdin: echo \"value\" | gkm secrets:set KEY --stage <stage>");
4299
+ process.exit(1);
4300
+ }
4301
+ secretValue = await readStdin();
4302
+ if (!secretValue) {
4303
+ logger.error("No value received from stdin");
4304
+ process.exit(1);
4305
+ }
4306
+ }
4307
+ try {
4308
+ await require_storage.setCustomSecret(stage, key, secretValue);
4309
+ logger.log(`\n✓ Secret "${key}" set for stage "${stage}"`);
4310
+ } catch (error) {
4311
+ logger.error(error instanceof Error ? error.message : "Failed to set secret");
4312
+ process.exit(1);
4313
+ }
4314
+ }
4315
+ /**
4316
+ * Show secrets for a stage.
4317
+ */
4318
+ async function secretsShowCommand(options) {
4319
+ const { stage, reveal } = options;
4320
+ const secrets = await require_storage.readStageSecrets(stage);
4321
+ if (!secrets) {
4322
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4323
+ process.exit(1);
4324
+ }
4325
+ logger.log(`\nSecrets for stage "${stage}":`);
4326
+ logger.log(` Created: ${secrets.createdAt}`);
4327
+ logger.log(` Updated: ${secrets.updatedAt}`);
4328
+ logger.log("\nService Credentials:");
4329
+ for (const [service, creds] of Object.entries(secrets.services)) if (creds) {
4330
+ logger.log(`\n ${service}:`);
4331
+ logger.log(` host: ${creds.host}`);
4332
+ logger.log(` port: ${creds.port}`);
4333
+ logger.log(` username: ${creds.username}`);
4334
+ logger.log(` password: ${reveal ? creds.password : require_storage.maskPassword(creds.password)}`);
4335
+ if (creds.database) logger.log(` database: ${creds.database}`);
4336
+ if (creds.vhost) logger.log(` vhost: ${creds.vhost}`);
4337
+ }
4338
+ logger.log("\nConnection URLs:");
4339
+ if (secrets.urls.DATABASE_URL) logger.log(` DATABASE_URL: ${reveal ? secrets.urls.DATABASE_URL : maskUrl(secrets.urls.DATABASE_URL)}`);
4340
+ if (secrets.urls.REDIS_URL) logger.log(` REDIS_URL: ${reveal ? secrets.urls.REDIS_URL : maskUrl(secrets.urls.REDIS_URL)}`);
4341
+ if (secrets.urls.RABBITMQ_URL) logger.log(` RABBITMQ_URL: ${reveal ? secrets.urls.RABBITMQ_URL : maskUrl(secrets.urls.RABBITMQ_URL)}`);
4342
+ const customKeys = Object.keys(secrets.custom);
4343
+ if (customKeys.length > 0) {
4344
+ logger.log("\nCustom Secrets:");
4345
+ for (const [key, value] of Object.entries(secrets.custom)) logger.log(` ${key}: ${reveal ? value : require_storage.maskPassword(value)}`);
4346
+ }
4347
+ if (!reveal) logger.log("\nUse --reveal to show actual values");
4348
+ }
4349
+ /**
4350
+ * Rotate passwords for services.
4351
+ */
4352
+ async function secretsRotateCommand(options) {
4353
+ const { stage, service } = options;
4354
+ const secrets = await require_storage.readStageSecrets(stage);
4355
+ if (!secrets) {
4356
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4357
+ process.exit(1);
4358
+ }
4359
+ if (service) {
4360
+ if (!secrets.services[service]) {
4361
+ logger.error(`Service "${service}" not configured in stage "${stage}"`);
4362
+ process.exit(1);
4363
+ }
4364
+ const updated = rotateServicePassword(secrets, service);
4365
+ await require_storage.writeStageSecrets(updated);
4366
+ logger.log(`\n✓ Password rotated for ${service} in stage "${stage}"`);
4367
+ } else {
4368
+ let updated = secrets;
4369
+ const services = Object.keys(secrets.services);
4370
+ for (const svc of services) updated = rotateServicePassword(updated, svc);
4371
+ await require_storage.writeStageSecrets(updated);
4372
+ logger.log(`\n✓ Passwords rotated for all services in stage "${stage}": ${services.join(", ")}`);
4373
+ }
4374
+ logger.log(`\nUse "gkm secrets:show --stage ${stage}" to view new values`);
4375
+ }
4376
+ /**
4377
+ * Import secrets from a JSON file.
4378
+ */
4379
+ async function secretsImportCommand(file, options) {
4380
+ const { stage, merge = true } = options;
4381
+ if (!(0, node_fs.existsSync)(file)) {
4382
+ logger.error(`File not found: ${file}`);
4383
+ process.exit(1);
4384
+ }
4385
+ let importedSecrets;
4386
+ try {
4387
+ const content = await (0, node_fs_promises.readFile)(file, "utf-8");
4388
+ importedSecrets = JSON.parse(content);
4389
+ if (typeof importedSecrets !== "object" || importedSecrets === null) throw new Error("JSON must be an object");
4390
+ for (const [key, value] of Object.entries(importedSecrets)) if (typeof value !== "string") throw new Error(`Value for "${key}" must be a string, got ${typeof value}`);
4391
+ } catch (error) {
4392
+ logger.error(`Failed to parse JSON file: ${error instanceof Error ? error.message : "Invalid JSON"}`);
4393
+ process.exit(1);
4394
+ }
4395
+ const secrets = await require_storage.readStageSecrets(stage);
4396
+ if (!secrets) {
4397
+ logger.error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
4398
+ process.exit(1);
4399
+ }
4400
+ const updatedCustom = merge ? {
4401
+ ...secrets.custom,
4402
+ ...importedSecrets
4403
+ } : importedSecrets;
4404
+ const updated = {
4405
+ ...secrets,
4406
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
4407
+ custom: updatedCustom
4408
+ };
4409
+ await require_storage.writeStageSecrets(updated);
4410
+ const importedCount = Object.keys(importedSecrets).length;
4411
+ const totalCount = Object.keys(updatedCustom).length;
4412
+ logger.log(`\n✓ Imported ${importedCount} secrets for stage "${stage}"`);
4413
+ if (merge && totalCount > importedCount) logger.log(` Total custom secrets: ${totalCount}`);
4414
+ logger.log("\n Imported keys:");
4415
+ for (const key of Object.keys(importedSecrets)) logger.log(` - ${key}`);
4416
+ }
4417
+ /**
4418
+ * Mask password in a URL for display.
4419
+ */
4420
+ function maskUrl(url) {
4421
+ try {
4422
+ const parsed = new URL(url);
4423
+ if (parsed.password) parsed.password = require_storage.maskPassword(parsed.password);
4424
+ return parsed.toString();
4425
+ } catch {
4426
+ return url;
4427
+ }
2673
4428
  }
2674
4429
 
2675
4430
  //#endregion
@@ -2681,34 +4436,39 @@ program.command("init").description("Scaffold a new project").argument("[name]",
2681
4436
  const globalOptions = program.opts();
2682
4437
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2683
4438
  await initCommand(name$1, options);
2684
- } catch (error) {
2685
- console.error("Init failed:", error.message);
4439
+ } catch (_error) {
2686
4440
  process.exit(1);
2687
4441
  }
2688
4442
  });
2689
- program.command("build").description("Build handlers from endpoints, functions, and crons").option("--provider <provider>", "Target provider for generated handlers (aws, server)").option("--providers <providers>", "[DEPRECATED] Use --provider instead. Target providers for generated handlers (comma-separated)").option("--enable-openapi", "Enable OpenAPI documentation generation for server builds").action(async (options) => {
4443
+ program.command("build").description("Build handlers from endpoints, functions, and crons").option("--provider <provider>", "Target provider for generated handlers (aws, server)").option("--providers <providers>", "[DEPRECATED] Use --provider instead. Target providers for generated handlers (comma-separated)").option("--enable-openapi", "Enable OpenAPI documentation generation for server builds").option("--production", "Build for production (no dev tools, bundled output)").option("--skip-bundle", "Skip bundling step in production build").option("--stage <stage>", "Inject encrypted secrets for deployment stage").action(async (options) => {
2690
4444
  try {
2691
4445
  const globalOptions = program.opts();
2692
4446
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2693
4447
  if (options.provider) {
2694
- if (!["aws", "server"].includes(options.provider)) {
2695
- console.error(`Invalid provider: ${options.provider}. Must be 'aws' or 'server'.`);
2696
- process.exit(1);
2697
- }
4448
+ if (!["aws", "server"].includes(options.provider)) process.exit(1);
2698
4449
  await buildCommand({
2699
4450
  provider: options.provider,
2700
- enableOpenApi: options.enableOpenapi || false
4451
+ enableOpenApi: options.enableOpenapi || false,
4452
+ production: options.production || false,
4453
+ skipBundle: options.skipBundle || false,
4454
+ stage: options.stage
2701
4455
  });
2702
4456
  } else if (options.providers) {
2703
- console.warn("⚠️ --providers flag is deprecated. Use --provider instead.");
2704
4457
  const providerList = [...new Set(options.providers.split(",").map((p) => p.trim()))];
2705
4458
  await buildCommand({
2706
4459
  providers: providerList,
2707
- enableOpenApi: options.enableOpenapi || false
4460
+ enableOpenApi: options.enableOpenapi || false,
4461
+ production: options.production || false,
4462
+ skipBundle: options.skipBundle || false,
4463
+ stage: options.stage
2708
4464
  });
2709
- } else await buildCommand({ enableOpenApi: options.enableOpenapi || false });
2710
- } catch (error) {
2711
- console.error("Build failed:", error.message);
4465
+ } else await buildCommand({
4466
+ enableOpenApi: options.enableOpenapi || false,
4467
+ production: options.production || false,
4468
+ skipBundle: options.skipBundle || false,
4469
+ stage: options.stage
4470
+ });
4471
+ } catch (_error) {
2712
4472
  process.exit(1);
2713
4473
  }
2714
4474
  });
@@ -2717,12 +4477,11 @@ program.command("dev").description("Start development server with automatic relo
2717
4477
  const globalOptions = program.opts();
2718
4478
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2719
4479
  await devCommand({
2720
- port: options.port ? Number.parseInt(options.port) : 3e3,
4480
+ port: options.port ? Number.parseInt(options.port, 10) : 3e3,
2721
4481
  portExplicit: !!options.port,
2722
4482
  enableOpenApi: options.enableOpenapi ?? true
2723
4483
  });
2724
- } catch (error) {
2725
- console.error("Dev server failed:", error.message);
4484
+ } catch (_error) {
2726
4485
  process.exit(1);
2727
4486
  }
2728
4487
  });
@@ -2746,8 +4505,7 @@ program.command("openapi").description("Generate OpenAPI specification from endp
2746
4505
  const globalOptions = program.opts();
2747
4506
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2748
4507
  await require_openapi.openapiCommand({});
2749
- } catch (error) {
2750
- console.error("OpenAPI generation failed:", error.message);
4508
+ } catch (_error) {
2751
4509
  process.exit(1);
2752
4510
  }
2753
4511
  });
@@ -2756,8 +4514,194 @@ program.command("generate:react-query").description("Generate React Query hooks
2756
4514
  const globalOptions = program.opts();
2757
4515
  if (globalOptions.cwd) process.chdir(globalOptions.cwd);
2758
4516
  await require_openapi_react_query.generateReactQueryCommand(options);
4517
+ } catch (_error) {
4518
+ process.exit(1);
4519
+ }
4520
+ });
4521
+ program.command("docker").description("Generate Docker deployment files").option("--build", "Build Docker image after generating files").option("--push", "Push image to registry after building").option("--tag <tag>", "Image tag", "latest").option("--registry <registry>", "Container registry URL").option("--slim", "Use slim Dockerfile (assumes pre-built bundle exists)").option("--turbo", "Use turbo prune for monorepo optimization").option("--turbo-package <name>", "Package name for turbo prune").action(async (options) => {
4522
+ try {
4523
+ const globalOptions = program.opts();
4524
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4525
+ await dockerCommand(options);
4526
+ } catch (_error) {
4527
+ process.exit(1);
4528
+ }
4529
+ });
4530
+ program.command("prepack").description("Generate Docker files for production deployment").option("--build", "Build Docker image after generating files").option("--push", "Push image to registry after building").option("--tag <tag>", "Image tag", "latest").option("--registry <registry>", "Container registry URL").option("--slim", "Build locally first, then use slim Dockerfile").option("--skip-bundle", "Skip bundling step (only with --slim)").option("--turbo", "Use turbo prune for monorepo optimization").option("--turbo-package <name>", "Package name for turbo prune").action(async (options) => {
4531
+ try {
4532
+ const globalOptions = program.opts();
4533
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4534
+ if (options.slim) await buildCommand({
4535
+ provider: "server",
4536
+ production: true,
4537
+ skipBundle: options.skipBundle
4538
+ });
4539
+ await dockerCommand({
4540
+ build: options.build,
4541
+ push: options.push,
4542
+ tag: options.tag,
4543
+ registry: options.registry,
4544
+ slim: options.slim,
4545
+ turbo: options.turbo,
4546
+ turboPackage: options.turboPackage
4547
+ });
4548
+ if (options.slim) {}
4549
+ if (options.build) {
4550
+ const tag = options.tag ?? "latest";
4551
+ const registry = options.registry;
4552
+ const _imageRef = registry ? `${registry}/api:${tag}` : `api:${tag}`;
4553
+ }
4554
+ } catch (_error) {
4555
+ process.exit(1);
4556
+ }
4557
+ });
4558
+ program.command("secrets:init").description("Initialize secrets for a deployment stage").requiredOption("--stage <stage>", "Stage name (e.g., production, staging)").option("--force", "Overwrite existing secrets").action(async (options) => {
4559
+ try {
4560
+ const globalOptions = program.opts();
4561
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4562
+ await secretsInitCommand(options);
4563
+ } catch (_error) {
4564
+ process.exit(1);
4565
+ }
4566
+ });
4567
+ program.command("secrets:set").description("Set a custom secret for a stage").argument("<key>", "Secret key (e.g., API_KEY)").argument("[value]", "Secret value (reads from stdin if omitted)").requiredOption("--stage <stage>", "Stage name").action(async (key, value, options) => {
4568
+ try {
4569
+ const globalOptions = program.opts();
4570
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4571
+ await secretsSetCommand(key, value, options);
4572
+ } catch (_error) {
4573
+ process.exit(1);
4574
+ }
4575
+ });
4576
+ program.command("secrets:show").description("Show secrets for a stage").requiredOption("--stage <stage>", "Stage name").option("--reveal", "Show actual secret values (not masked)").action(async (options) => {
4577
+ try {
4578
+ const globalOptions = program.opts();
4579
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4580
+ await secretsShowCommand(options);
4581
+ } catch (_error) {
4582
+ process.exit(1);
4583
+ }
4584
+ });
4585
+ program.command("secrets:rotate").description("Rotate service passwords").requiredOption("--stage <stage>", "Stage name").option("--service <service>", "Specific service to rotate (postgres, redis, rabbitmq)").action(async (options) => {
4586
+ try {
4587
+ const globalOptions = program.opts();
4588
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4589
+ await secretsRotateCommand(options);
4590
+ } catch (_error) {
4591
+ process.exit(1);
4592
+ }
4593
+ });
4594
+ program.command("secrets:import").description("Import secrets from a JSON file").argument("<file>", "JSON file path (e.g., secrets.json)").requiredOption("--stage <stage>", "Stage name").option("--no-merge", "Replace all custom secrets instead of merging").action(async (file, options) => {
4595
+ try {
4596
+ const globalOptions = program.opts();
4597
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4598
+ await secretsImportCommand(file, options);
4599
+ } catch (_error) {
4600
+ process.exit(1);
4601
+ }
4602
+ });
4603
+ program.command("deploy").description("Deploy application to a provider").requiredOption("--provider <provider>", "Deploy provider (docker, dokploy, aws-lambda)").requiredOption("--stage <stage>", "Deployment stage (e.g., production, staging)").option("--tag <tag>", "Image tag (default: stage-timestamp)").option("--skip-push", "Skip pushing image to registry").option("--skip-build", "Skip build step (use existing build)").action(async (options) => {
4604
+ try {
4605
+ const globalOptions = program.opts();
4606
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4607
+ const validProviders = [
4608
+ "docker",
4609
+ "dokploy",
4610
+ "aws-lambda"
4611
+ ];
4612
+ if (!validProviders.includes(options.provider)) {
4613
+ console.error(`Invalid provider: ${options.provider}\nValid providers: ${validProviders.join(", ")}`);
4614
+ process.exit(1);
4615
+ }
4616
+ await deployCommand({
4617
+ provider: options.provider,
4618
+ stage: options.stage,
4619
+ tag: options.tag,
4620
+ skipPush: options.skipPush,
4621
+ skipBuild: options.skipBuild
4622
+ });
4623
+ } catch (_error) {
4624
+ process.exit(1);
4625
+ }
4626
+ });
4627
+ program.command("deploy:init").description("Initialize Dokploy deployment (create project and application)").option("--endpoint <url>", "Dokploy server URL (uses stored credentials if logged in)").requiredOption("--project <name>", "Project name (creates if not exists)").requiredOption("--app <name>", "Application name").option("--project-id <id>", "Use existing project ID instead of creating").option("--registry-id <id>", "Configure registry for the application").action(async (options) => {
4628
+ try {
4629
+ const globalOptions = program.opts();
4630
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4631
+ await deployInitCommand({
4632
+ endpoint: options.endpoint,
4633
+ projectName: options.project,
4634
+ appName: options.app,
4635
+ projectId: options.projectId,
4636
+ registryId: options.registryId
4637
+ });
4638
+ } catch (error) {
4639
+ console.error(error instanceof Error ? error.message : "Failed to initialize deployment");
4640
+ process.exit(1);
4641
+ }
4642
+ });
4643
+ program.command("deploy:list").description("List Dokploy resources (projects, registries)").option("--endpoint <url>", "Dokploy server URL (uses stored credentials if logged in)").option("--projects", "List projects").option("--registries", "List registries").action(async (options) => {
4644
+ try {
4645
+ const globalOptions = program.opts();
4646
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4647
+ if (options.projects) await deployListCommand({
4648
+ endpoint: options.endpoint,
4649
+ resource: "projects"
4650
+ });
4651
+ if (options.registries) await deployListCommand({
4652
+ endpoint: options.endpoint,
4653
+ resource: "registries"
4654
+ });
4655
+ if (!options.projects && !options.registries) {
4656
+ await deployListCommand({
4657
+ endpoint: options.endpoint,
4658
+ resource: "projects"
4659
+ });
4660
+ await deployListCommand({
4661
+ endpoint: options.endpoint,
4662
+ resource: "registries"
4663
+ });
4664
+ }
4665
+ } catch (error) {
4666
+ console.error(error instanceof Error ? error.message : "Failed to list resources");
4667
+ process.exit(1);
4668
+ }
4669
+ });
4670
+ program.command("login").description("Authenticate with a deployment service").option("--service <service>", "Service to login to (dokploy)", "dokploy").option("--token <token>", "API token (will prompt if not provided)").option("--endpoint <url>", "Service endpoint URL").action(async (options) => {
4671
+ try {
4672
+ const globalOptions = program.opts();
4673
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4674
+ if (options.service !== "dokploy") {
4675
+ console.error(`Unknown service: ${options.service}. Supported: dokploy`);
4676
+ process.exit(1);
4677
+ }
4678
+ await loginCommand({
4679
+ service: options.service,
4680
+ token: options.token,
4681
+ endpoint: options.endpoint
4682
+ });
4683
+ } catch (error) {
4684
+ console.error(error instanceof Error ? error.message : "Failed to login");
4685
+ process.exit(1);
4686
+ }
4687
+ });
4688
+ program.command("logout").description("Remove stored credentials").option("--service <service>", "Service to logout from (dokploy, all)", "dokploy").action(async (options) => {
4689
+ try {
4690
+ const globalOptions = program.opts();
4691
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4692
+ await logoutCommand({ service: options.service });
4693
+ } catch (error) {
4694
+ console.error(error instanceof Error ? error.message : "Failed to logout");
4695
+ process.exit(1);
4696
+ }
4697
+ });
4698
+ program.command("whoami").description("Show current authentication status").action(async () => {
4699
+ try {
4700
+ const globalOptions = program.opts();
4701
+ if (globalOptions.cwd) process.chdir(globalOptions.cwd);
4702
+ await whoamiCommand();
2759
4703
  } catch (error) {
2760
- console.error("React Query generation failed:", error.message);
4704
+ console.error(error instanceof Error ? error.message : "Failed to get status");
2761
4705
  process.exit(1);
2762
4706
  }
2763
4707
  });