@geekmidas/cli 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundler-BjholBlA.cjs +131 -0
- package/dist/bundler-BjholBlA.cjs.map +1 -0
- package/dist/bundler-DWctKN1z.mjs +130 -0
- package/dist/bundler-DWctKN1z.mjs.map +1 -0
- package/dist/config.d.cts +1 -1
- package/dist/config.d.mts +1 -1
- package/dist/dokploy-api-B7KxOQr3.cjs +3 -0
- package/dist/dokploy-api-C7F9VykY.cjs +317 -0
- package/dist/dokploy-api-C7F9VykY.cjs.map +1 -0
- package/dist/dokploy-api-CaETb2L6.mjs +305 -0
- package/dist/dokploy-api-CaETb2L6.mjs.map +1 -0
- package/dist/dokploy-api-DHvfmWbi.mjs +3 -0
- package/dist/{encryption-Dyf_r1h-.cjs → encryption-D7Efcdi9.cjs} +1 -1
- package/dist/{encryption-Dyf_r1h-.cjs.map → encryption-D7Efcdi9.cjs.map} +1 -1
- package/dist/{encryption-C8H-38Yy.mjs → encryption-h4Nb6W-M.mjs} +1 -1
- package/dist/{encryption-C8H-38Yy.mjs.map → encryption-h4Nb6W-M.mjs.map} +1 -1
- package/dist/index.cjs +1520 -1136
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +1520 -1136
- package/dist/index.mjs.map +1 -1
- package/dist/{openapi-Bt_1FDpT.cjs → openapi-C89hhkZC.cjs} +3 -3
- package/dist/{openapi-Bt_1FDpT.cjs.map → openapi-C89hhkZC.cjs.map} +1 -1
- package/dist/{openapi-BfFlOBCG.mjs → openapi-CZVcfxk-.mjs} +3 -3
- package/dist/{openapi-BfFlOBCG.mjs.map → openapi-CZVcfxk-.mjs.map} +1 -1
- package/dist/{openapi-react-query-B6XTeGqS.mjs → openapi-react-query-CM2_qlW9.mjs} +1 -1
- package/dist/{openapi-react-query-B6XTeGqS.mjs.map → openapi-react-query-CM2_qlW9.mjs.map} +1 -1
- package/dist/{openapi-react-query-B-sNWHFU.cjs → openapi-react-query-iKjfLzff.cjs} +1 -1
- package/dist/{openapi-react-query-B-sNWHFU.cjs.map → openapi-react-query-iKjfLzff.cjs.map} +1 -1
- package/dist/openapi-react-query.cjs +1 -1
- package/dist/openapi-react-query.mjs +1 -1
- package/dist/openapi.cjs +1 -1
- package/dist/openapi.d.cts +1 -1
- package/dist/openapi.d.mts +1 -1
- package/dist/openapi.mjs +1 -1
- package/dist/{storage-C9PU_30f.mjs → storage-BaOP55oq.mjs} +48 -2
- package/dist/storage-BaOP55oq.mjs.map +1 -0
- package/dist/{storage-BXoJvmv2.cjs → storage-Bn3K9Ccu.cjs} +59 -1
- package/dist/storage-Bn3K9Ccu.cjs.map +1 -0
- package/dist/storage-UfyTn7Zm.cjs +7 -0
- package/dist/storage-nkGIjeXt.mjs +3 -0
- package/dist/{types-BR0M2v_c.d.mts → types-BgaMXsUa.d.cts} +3 -1
- package/dist/{types-BR0M2v_c.d.mts.map → types-BgaMXsUa.d.cts.map} +1 -1
- package/dist/{types-BhkZc-vm.d.cts → types-iFk5ms7y.d.mts} +3 -1
- package/dist/{types-BhkZc-vm.d.cts.map → types-iFk5ms7y.d.mts.map} +1 -1
- package/package.json +4 -4
- package/src/auth/__tests__/credentials.spec.ts +127 -0
- package/src/auth/__tests__/index.spec.ts +69 -0
- package/src/auth/credentials.ts +33 -0
- package/src/auth/index.ts +57 -50
- package/src/build/__tests__/bundler.spec.ts +444 -0
- package/src/build/__tests__/endpoint-analyzer.spec.ts +623 -0
- package/src/build/__tests__/handler-templates.spec.ts +272 -0
- package/src/build/bundler.ts +126 -8
- package/src/build/index.ts +31 -0
- package/src/build/types.ts +6 -0
- package/src/deploy/__tests__/dokploy-api.spec.ts +698 -0
- package/src/deploy/__tests__/dokploy.spec.ts +196 -6
- package/src/deploy/__tests__/index.spec.ts +339 -0
- package/src/deploy/__tests__/init.spec.ts +147 -16
- package/src/deploy/docker.ts +32 -3
- package/src/deploy/dokploy-api.ts +581 -0
- package/src/deploy/dokploy.ts +66 -93
- package/src/deploy/index.ts +587 -32
- package/src/deploy/init.ts +192 -249
- package/src/deploy/types.ts +19 -1
- package/src/dev/__tests__/index.spec.ts +95 -0
- package/src/docker/__tests__/templates.spec.ts +144 -0
- package/src/docker/index.ts +96 -6
- package/src/docker/templates.ts +114 -27
- package/src/generators/EndpointGenerator.ts +2 -2
- package/src/index.ts +34 -13
- package/src/secrets/__tests__/storage.spec.ts +208 -0
- package/src/secrets/storage.ts +73 -0
- package/src/types.ts +2 -0
- package/dist/bundler-DRXCw_YR.mjs +0 -70
- package/dist/bundler-DRXCw_YR.mjs.map +0 -1
- package/dist/bundler-WsEvH_b2.cjs +0 -71
- package/dist/bundler-WsEvH_b2.cjs.map +0 -1
- package/dist/storage-BUYQJgz7.cjs +0 -4
- package/dist/storage-BXoJvmv2.cjs.map +0 -1
- package/dist/storage-C9PU_30f.mjs.map +0 -1
- package/dist/storage-DLJAYxzJ.mjs +0 -3
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-CUT6urMc.cjs');
|
|
2
|
+
const node_fs = require_chunk.__toESM(require("node:fs"));
|
|
3
|
+
const node_path = require_chunk.__toESM(require("node:path"));
|
|
4
|
+
const node_fs_promises = require_chunk.__toESM(require("node:fs/promises"));
|
|
5
|
+
const node_child_process = require_chunk.__toESM(require("node:child_process"));
|
|
6
|
+
|
|
7
|
+
//#region src/build/bundler.ts
|
|
8
|
+
/**
|
|
9
|
+
* Collect all required environment variables from constructs.
|
|
10
|
+
* Uses the SnifferEnvironmentParser to detect which env vars each service needs.
|
|
11
|
+
*
|
|
12
|
+
* @param constructs - Array of constructs to analyze
|
|
13
|
+
* @returns Deduplicated array of required environment variable names
|
|
14
|
+
*/
|
|
15
|
+
async function collectRequiredEnvVars(constructs) {
|
|
16
|
+
const allEnvVars = /* @__PURE__ */ new Set();
|
|
17
|
+
for (const construct of constructs) {
|
|
18
|
+
const envVars = await construct.getEnvironment();
|
|
19
|
+
envVars.forEach((v) => allEnvVars.add(v));
|
|
20
|
+
}
|
|
21
|
+
return Array.from(allEnvVars).sort();
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Bundle the server application using tsdown
|
|
25
|
+
*
|
|
26
|
+
* @param options - Bundle configuration options
|
|
27
|
+
* @returns Bundle result with output path and optional master key
|
|
28
|
+
*/
|
|
29
|
+
/** Default env var values for docker compose services */
|
|
30
|
+
const DOCKER_SERVICE_ENV_VARS = {
|
|
31
|
+
postgres: { DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/app" },
|
|
32
|
+
redis: { REDIS_URL: "redis://redis:6379" },
|
|
33
|
+
rabbitmq: { RABBITMQ_URL: "amqp://rabbitmq:5672" }
|
|
34
|
+
};
|
|
35
|
+
async function bundleServer(options) {
|
|
36
|
+
const { entryPoint, outputDir, minify, sourcemap, external, stage, constructs, dockerServices } = options;
|
|
37
|
+
await (0, node_fs_promises.mkdir)(outputDir, { recursive: true });
|
|
38
|
+
const args = [
|
|
39
|
+
"npx",
|
|
40
|
+
"tsdown",
|
|
41
|
+
entryPoint,
|
|
42
|
+
"--no-config",
|
|
43
|
+
"--out-dir",
|
|
44
|
+
outputDir,
|
|
45
|
+
"--format",
|
|
46
|
+
"esm",
|
|
47
|
+
"--platform",
|
|
48
|
+
"node",
|
|
49
|
+
"--target",
|
|
50
|
+
"node22",
|
|
51
|
+
"--clean"
|
|
52
|
+
];
|
|
53
|
+
if (minify) args.push("--minify");
|
|
54
|
+
if (sourcemap) args.push("--sourcemap");
|
|
55
|
+
for (const ext of external) args.push("--external", ext);
|
|
56
|
+
args.push("--external", "node:*");
|
|
57
|
+
let masterKey;
|
|
58
|
+
if (stage) {
|
|
59
|
+
const { readStageSecrets, toEmbeddableSecrets, validateEnvironmentVariables } = await Promise.resolve().then(() => require("./storage-UfyTn7Zm.cjs"));
|
|
60
|
+
const { encryptSecrets, generateDefineOptions } = await Promise.resolve().then(() => require("./encryption-D7Efcdi9.cjs"));
|
|
61
|
+
const secrets = await readStageSecrets(stage);
|
|
62
|
+
if (!secrets) throw new Error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
|
|
63
|
+
if (dockerServices) {
|
|
64
|
+
for (const [service, enabled] of Object.entries(dockerServices)) if (enabled && DOCKER_SERVICE_ENV_VARS[service]) for (const [envVar, defaultValue] of Object.entries(DOCKER_SERVICE_ENV_VARS[service])) {
|
|
65
|
+
const urlKey = envVar;
|
|
66
|
+
if (!secrets.urls[urlKey] && !secrets.custom[envVar]) {
|
|
67
|
+
secrets.urls[urlKey] = defaultValue;
|
|
68
|
+
console.log(` Auto-populated ${envVar} from docker compose`);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
if (constructs && constructs.length > 0) {
|
|
73
|
+
console.log(" Analyzing environment variable requirements...");
|
|
74
|
+
const requiredVars = await collectRequiredEnvVars(constructs);
|
|
75
|
+
if (requiredVars.length > 0) {
|
|
76
|
+
const validation = validateEnvironmentVariables(requiredVars, secrets);
|
|
77
|
+
if (!validation.valid) {
|
|
78
|
+
const errorMessage = [
|
|
79
|
+
`Missing environment variables for stage "${stage}":`,
|
|
80
|
+
"",
|
|
81
|
+
...validation.missing.map((v) => ` ❌ ${v}`),
|
|
82
|
+
"",
|
|
83
|
+
"To fix this, either:",
|
|
84
|
+
` 1. Add the missing variables to .gkm/secrets/${stage}.json using:`,
|
|
85
|
+
` gkm secrets:set <KEY> <VALUE> --stage ${stage}`,
|
|
86
|
+
"",
|
|
87
|
+
` 2. Or import from a JSON file:`,
|
|
88
|
+
` gkm secrets:import secrets.json --stage ${stage}`,
|
|
89
|
+
"",
|
|
90
|
+
"Required variables:",
|
|
91
|
+
...validation.required.map((v) => validation.missing.includes(v) ? ` ❌ ${v}` : ` ✓ ${v}`)
|
|
92
|
+
].join("\n");
|
|
93
|
+
throw new Error(errorMessage);
|
|
94
|
+
}
|
|
95
|
+
console.log(` ✓ All ${requiredVars.length} required environment variables found`);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const embeddable = toEmbeddableSecrets(secrets);
|
|
99
|
+
const encrypted = encryptSecrets(embeddable);
|
|
100
|
+
masterKey = encrypted.masterKey;
|
|
101
|
+
const defines = generateDefineOptions(encrypted);
|
|
102
|
+
for (const [key, value] of Object.entries(defines)) args.push(`--env.${key}`, value);
|
|
103
|
+
console.log(` Secrets encrypted for stage "${stage}"`);
|
|
104
|
+
}
|
|
105
|
+
const mjsOutput = (0, node_path.join)(outputDir, "server.mjs");
|
|
106
|
+
try {
|
|
107
|
+
const [cmd, ...cmdArgs] = args;
|
|
108
|
+
const result = (0, node_child_process.spawnSync)(cmd, cmdArgs, {
|
|
109
|
+
cwd: process.cwd(),
|
|
110
|
+
stdio: "inherit",
|
|
111
|
+
shell: process.platform === "win32"
|
|
112
|
+
});
|
|
113
|
+
if (result.error) throw result.error;
|
|
114
|
+
if (result.status !== 0) throw new Error(`tsdown exited with code ${result.status}`);
|
|
115
|
+
const jsOutput = (0, node_path.join)(outputDir, "server.js");
|
|
116
|
+
if ((0, node_fs.existsSync)(jsOutput)) await (0, node_fs_promises.rename)(jsOutput, mjsOutput);
|
|
117
|
+
const { readFile } = await import("node:fs/promises");
|
|
118
|
+
const content = await readFile(mjsOutput, "utf-8");
|
|
119
|
+
if (!content.startsWith("#!")) await (0, node_fs_promises.writeFile)(mjsOutput, `#!/usr/bin/env node\n${content}`);
|
|
120
|
+
} catch (error) {
|
|
121
|
+
throw new Error(`Failed to bundle server: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
outputPath: mjsOutput,
|
|
125
|
+
masterKey
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
//#endregion
|
|
130
|
+
exports.bundleServer = bundleServer;
|
|
131
|
+
//# sourceMappingURL=bundler-BjholBlA.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bundler-BjholBlA.cjs","names":["constructs: Construct[]","DOCKER_SERVICE_ENV_VARS: Record<string, Record<string, string>>","options: BundleOptions","masterKey: string | undefined"],"sources":["../src/build/bundler.ts"],"sourcesContent":["import { spawnSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { mkdir, rename, writeFile } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport type { Construct } from '@geekmidas/constructs';\n\nexport interface BundleOptions {\n\t/** Entry point file (e.g., .gkm/server/server.ts) */\n\tentryPoint: string;\n\t/** Output directory for bundled files */\n\toutputDir: string;\n\t/** Minify the output (default: true) */\n\tminify: boolean;\n\t/** Generate sourcemaps (default: false) */\n\tsourcemap: boolean;\n\t/** Packages to exclude from bundling */\n\texternal: string[];\n\t/** Stage for secrets injection (optional) */\n\tstage?: string;\n\t/** Constructs to validate environment variables for */\n\tconstructs?: Construct[];\n\t/** Docker compose services configured (for auto-populating env vars) */\n\tdockerServices?: {\n\t\tpostgres?: boolean;\n\t\tredis?: boolean;\n\t\trabbitmq?: boolean;\n\t};\n}\n\nexport interface BundleResult {\n\t/** Path to the bundled output */\n\toutputPath: string;\n\t/** Ephemeral master key for deployment (only if stage was provided) */\n\tmasterKey?: string;\n}\n\n/**\n * Collect all required environment variables from constructs.\n * Uses the SnifferEnvironmentParser to detect which env vars each service needs.\n *\n * @param constructs - Array of constructs to analyze\n * @returns Deduplicated array of required environment variable names\n */\nasync function collectRequiredEnvVars(\n\tconstructs: Construct[],\n): Promise<string[]> {\n\tconst allEnvVars = new Set<string>();\n\n\tfor (const construct of constructs) {\n\t\tconst envVars = await construct.getEnvironment();\n\t\tenvVars.forEach((v) => allEnvVars.add(v));\n\t}\n\n\treturn Array.from(allEnvVars).sort();\n}\n\n/**\n * Bundle the server application using tsdown\n *\n * @param options - Bundle configuration options\n * @returns Bundle result with output path and optional master key\n */\n/** Default env var values for docker compose services */\nconst DOCKER_SERVICE_ENV_VARS: Record<string, Record<string, string>> = {\n\tpostgres: {\n\t\tDATABASE_URL: 'postgresql://postgres:postgres@postgres:5432/app',\n\t},\n\tredis: {\n\t\tREDIS_URL: 'redis://redis:6379',\n\t},\n\trabbitmq: {\n\t\tRABBITMQ_URL: 'amqp://rabbitmq:5672',\n\t},\n};\n\nexport async function bundleServer(\n\toptions: BundleOptions,\n): Promise<BundleResult> {\n\tconst {\n\t\tentryPoint,\n\t\toutputDir,\n\t\tminify,\n\t\tsourcemap,\n\t\texternal,\n\t\tstage,\n\t\tconstructs,\n\t\tdockerServices,\n\t} = options;\n\n\t// Ensure output directory exists\n\tawait mkdir(outputDir, { recursive: true });\n\n\t// Build command-line arguments for tsdown\n\tconst args = [\n\t\t'npx',\n\t\t'tsdown',\n\t\tentryPoint,\n\t\t'--no-config', // Don't use any config file from workspace\n\t\t'--out-dir',\n\t\toutputDir,\n\t\t'--format',\n\t\t'esm',\n\t\t'--platform',\n\t\t'node',\n\t\t'--target',\n\t\t'node22',\n\t\t'--clean',\n\t];\n\n\tif (minify) {\n\t\targs.push('--minify');\n\t}\n\n\tif (sourcemap) {\n\t\targs.push('--sourcemap');\n\t}\n\n\t// Add external packages\n\tfor (const ext of external) {\n\t\targs.push('--external', ext);\n\t}\n\n\t// Always exclude node: builtins\n\targs.push('--external', 'node:*');\n\n\t// Handle secrets injection if stage is provided\n\tlet masterKey: string | undefined;\n\n\tif (stage) {\n\t\tconst {\n\t\t\treadStageSecrets,\n\t\t\ttoEmbeddableSecrets,\n\t\t\tvalidateEnvironmentVariables,\n\t\t} = await import('../secrets/storage');\n\t\tconst { encryptSecrets, generateDefineOptions } = await import(\n\t\t\t'../secrets/encryption'\n\t\t);\n\n\t\tconst secrets = await readStageSecrets(stage);\n\n\t\tif (!secrets) {\n\t\t\tthrow new Error(\n\t\t\t\t`No secrets found for stage \"${stage}\". Run \"gkm secrets:init --stage ${stage}\" first.`,\n\t\t\t);\n\t\t}\n\n\t\t// Auto-populate env vars from docker compose services\n\t\tif (dockerServices) {\n\t\t\tfor (const [service, enabled] of Object.entries(dockerServices)) {\n\t\t\t\tif (enabled && DOCKER_SERVICE_ENV_VARS[service]) {\n\t\t\t\t\tfor (const [envVar, defaultValue] of Object.entries(\n\t\t\t\t\t\tDOCKER_SERVICE_ENV_VARS[service],\n\t\t\t\t\t)) {\n\t\t\t\t\t\t// Check if not already in urls or custom\n\t\t\t\t\t\tconst urlKey = envVar as keyof typeof secrets.urls;\n\t\t\t\t\t\tif (!secrets.urls[urlKey] && !secrets.custom[envVar]) {\n\t\t\t\t\t\t\tsecrets.urls[urlKey] = defaultValue;\n\t\t\t\t\t\t\tconsole.log(` Auto-populated ${envVar} from docker compose`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Validate environment variables if constructs are provided\n\t\tif (constructs && constructs.length > 0) {\n\t\t\tconsole.log(' Analyzing environment variable requirements...');\n\t\t\tconst requiredVars = await collectRequiredEnvVars(constructs);\n\n\t\t\tif (requiredVars.length > 0) {\n\t\t\t\tconst validation = validateEnvironmentVariables(requiredVars, secrets);\n\n\t\t\t\tif (!validation.valid) {\n\t\t\t\t\tconst errorMessage = [\n\t\t\t\t\t\t`Missing environment variables for stage \"${stage}\":`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t...validation.missing.map((v) => ` ❌ ${v}`),\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t'To fix this, either:',\n\t\t\t\t\t\t` 1. Add the missing variables to .gkm/secrets/${stage}.json using:`,\n\t\t\t\t\t\t` gkm secrets:set <KEY> <VALUE> --stage ${stage}`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t` 2. Or import from a JSON file:`,\n\t\t\t\t\t\t` gkm secrets:import secrets.json --stage ${stage}`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t'Required variables:',\n\t\t\t\t\t\t...validation.required.map((v) =>\n\t\t\t\t\t\t\tvalidation.missing.includes(v) ? ` ❌ ${v}` : ` ✓ ${v}`,\n\t\t\t\t\t\t),\n\t\t\t\t\t].join('\\n');\n\n\t\t\t\t\tthrow new Error(errorMessage);\n\t\t\t\t}\n\n\t\t\t\tconsole.log(\n\t\t\t\t\t` ✓ All ${requiredVars.length} required environment variables found`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Convert to embeddable format and encrypt\n\t\tconst embeddable = toEmbeddableSecrets(secrets);\n\t\tconst encrypted = encryptSecrets(embeddable);\n\t\tmasterKey = encrypted.masterKey;\n\n\t\t// Add define options for build-time injection using tsdown's --env.* format\n\t\tconst defines = generateDefineOptions(encrypted);\n\t\tfor (const [key, value] of Object.entries(defines)) {\n\t\t\targs.push(`--env.${key}`, value);\n\t\t}\n\n\t\tconsole.log(` Secrets encrypted for stage \"${stage}\"`);\n\t}\n\n\tconst mjsOutput = join(outputDir, 'server.mjs');\n\n\ttry {\n\t\t// Run tsdown with command-line arguments\n\t\t// Use spawnSync with args array to avoid shell escaping issues with --define values\n\t\t// args is always populated with ['npx', 'tsdown', ...] so cmd is never undefined\n\t\tconst [cmd, ...cmdArgs] = args as [string, ...string[]];\n\t\tconst result = spawnSync(cmd, cmdArgs, {\n\t\t\tcwd: process.cwd(),\n\t\t\tstdio: 'inherit',\n\t\t\tshell: process.platform === 'win32', // Only use shell on Windows for npx resolution\n\t\t});\n\n\t\tif (result.error) {\n\t\t\tthrow result.error;\n\t\t}\n\t\tif (result.status !== 0) {\n\t\t\tthrow new Error(`tsdown exited with code ${result.status}`);\n\t\t}\n\n\t\t// Rename output to .mjs for explicit ESM\n\t\t// tsdown outputs as server.js for ESM format\n\t\tconst jsOutput = join(outputDir, 'server.js');\n\n\t\tif (existsSync(jsOutput)) {\n\t\t\tawait rename(jsOutput, mjsOutput);\n\t\t}\n\n\t\t// Add shebang to the bundled file\n\t\tconst { readFile } = await import('node:fs/promises');\n\t\tconst content = await readFile(mjsOutput, 'utf-8');\n\t\tif (!content.startsWith('#!')) {\n\t\t\tawait writeFile(mjsOutput, `#!/usr/bin/env node\\n${content}`);\n\t\t}\n\t} catch (error) {\n\t\tthrow new Error(\n\t\t\t`Failed to bundle server: ${error instanceof Error ? error.message : 'Unknown error'}`,\n\t\t);\n\t}\n\n\treturn {\n\t\toutputPath: mjsOutput,\n\t\tmasterKey,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;;AA2CA,eAAe,uBACdA,YACoB;CACpB,MAAM,6BAAa,IAAI;AAEvB,MAAK,MAAM,aAAa,YAAY;EACnC,MAAM,UAAU,MAAM,UAAU,gBAAgB;AAChD,UAAQ,QAAQ,CAAC,MAAM,WAAW,IAAI,EAAE,CAAC;CACzC;AAED,QAAO,MAAM,KAAK,WAAW,CAAC,MAAM;AACpC;;;;;;;;AASD,MAAMC,0BAAkE;CACvE,UAAU,EACT,cAAc,mDACd;CACD,OAAO,EACN,WAAW,qBACX;CACD,UAAU,EACT,cAAc,uBACd;AACD;AAED,eAAsB,aACrBC,SACwB;CACxB,MAAM,EACL,YACA,WACA,QACA,WACA,UACA,OACA,YACA,gBACA,GAAG;AAGJ,OAAM,4BAAM,WAAW,EAAE,WAAW,KAAM,EAAC;CAG3C,MAAM,OAAO;EACZ;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;CACA;AAED,KAAI,OACH,MAAK,KAAK,WAAW;AAGtB,KAAI,UACH,MAAK,KAAK,cAAc;AAIzB,MAAK,MAAM,OAAO,SACjB,MAAK,KAAK,cAAc,IAAI;AAI7B,MAAK,KAAK,cAAc,SAAS;CAGjC,IAAIC;AAEJ,KAAI,OAAO;EACV,MAAM,EACL,kBACA,qBACA,8BACA,GAAG,2CAAM;EACV,MAAM,EAAE,gBAAgB,uBAAuB,GAAG,2CAAM;EAIxD,MAAM,UAAU,MAAM,iBAAiB,MAAM;AAE7C,OAAK,QACJ,OAAM,IAAI,OACR,8BAA8B,MAAM,mCAAmC,MAAM;AAKhF,MAAI,gBACH;QAAK,MAAM,CAAC,SAAS,QAAQ,IAAI,OAAO,QAAQ,eAAe,CAC9D,KAAI,WAAW,wBAAwB,SACtC,MAAK,MAAM,CAAC,QAAQ,aAAa,IAAI,OAAO,QAC3C,wBAAwB,SACxB,EAAE;IAEF,MAAM,SAAS;AACf,SAAK,QAAQ,KAAK,YAAY,QAAQ,OAAO,SAAS;AACrD,aAAQ,KAAK,UAAU;AACvB,aAAQ,KAAK,mBAAmB,OAAO,sBAAsB;IAC7D;GACD;EAEF;AAIF,MAAI,cAAc,WAAW,SAAS,GAAG;AACxC,WAAQ,IAAI,mDAAmD;GAC/D,MAAM,eAAe,MAAM,uBAAuB,WAAW;AAE7D,OAAI,aAAa,SAAS,GAAG;IAC5B,MAAM,aAAa,6BAA6B,cAAc,QAAQ;AAEtE,SAAK,WAAW,OAAO;KACtB,MAAM,eAAe;OACnB,2CAA2C,MAAM;MAClD;MACA,GAAG,WAAW,QAAQ,IAAI,CAAC,OAAO,MAAM,EAAE,EAAE;MAC5C;MACA;OACC,iDAAiD,MAAM;OACvD,6CAA6C,MAAM;MACpD;OACC;OACA,+CAA+C,MAAM;MACtD;MACA;MACA,GAAG,WAAW,SAAS,IAAI,CAAC,MAC3B,WAAW,QAAQ,SAAS,EAAE,IAAI,MAAM,EAAE,KAAK,MAAM,EAAE,EACvD;KACD,EAAC,KAAK,KAAK;AAEZ,WAAM,IAAI,MAAM;IAChB;AAED,YAAQ,KACN,UAAU,aAAa,OAAO,uCAC/B;GACD;EACD;EAGD,MAAM,aAAa,oBAAoB,QAAQ;EAC/C,MAAM,YAAY,eAAe,WAAW;AAC5C,cAAY,UAAU;EAGtB,MAAM,UAAU,sBAAsB,UAAU;AAChD,OAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,QAAQ,CACjD,MAAK,MAAM,QAAQ,IAAI,GAAG,MAAM;AAGjC,UAAQ,KAAK,iCAAiC,MAAM,GAAG;CACvD;CAED,MAAM,YAAY,oBAAK,WAAW,aAAa;AAE/C,KAAI;EAIH,MAAM,CAAC,KAAK,GAAG,QAAQ,GAAG;EAC1B,MAAM,SAAS,kCAAU,KAAK,SAAS;GACtC,KAAK,QAAQ,KAAK;GAClB,OAAO;GACP,OAAO,QAAQ,aAAa;EAC5B,EAAC;AAEF,MAAI,OAAO,MACV,OAAM,OAAO;AAEd,MAAI,OAAO,WAAW,EACrB,OAAM,IAAI,OAAO,0BAA0B,OAAO,OAAO;EAK1D,MAAM,WAAW,oBAAK,WAAW,YAAY;AAE7C,MAAI,wBAAW,SAAS,CACvB,OAAM,6BAAO,UAAU,UAAU;EAIlC,MAAM,EAAE,UAAU,GAAG,MAAM,OAAO;EAClC,MAAM,UAAU,MAAM,SAAS,WAAW,QAAQ;AAClD,OAAK,QAAQ,WAAW,KAAK,CAC5B,OAAM,gCAAU,YAAY,uBAAuB,QAAQ,EAAE;CAE9D,SAAQ,OAAO;AACf,QAAM,IAAI,OACR,2BAA2B,iBAAiB,QAAQ,MAAM,UAAU,gBAAgB;CAEtF;AAED,QAAO;EACN,YAAY;EACZ;CACA;AACD"}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import { existsSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { mkdir, rename, writeFile } from "node:fs/promises";
|
|
4
|
+
import { spawnSync } from "node:child_process";
|
|
5
|
+
|
|
6
|
+
//#region src/build/bundler.ts
|
|
7
|
+
/**
|
|
8
|
+
* Collect all required environment variables from constructs.
|
|
9
|
+
* Uses the SnifferEnvironmentParser to detect which env vars each service needs.
|
|
10
|
+
*
|
|
11
|
+
* @param constructs - Array of constructs to analyze
|
|
12
|
+
* @returns Deduplicated array of required environment variable names
|
|
13
|
+
*/
|
|
14
|
+
async function collectRequiredEnvVars(constructs) {
|
|
15
|
+
const allEnvVars = /* @__PURE__ */ new Set();
|
|
16
|
+
for (const construct of constructs) {
|
|
17
|
+
const envVars = await construct.getEnvironment();
|
|
18
|
+
envVars.forEach((v) => allEnvVars.add(v));
|
|
19
|
+
}
|
|
20
|
+
return Array.from(allEnvVars).sort();
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Bundle the server application using tsdown
|
|
24
|
+
*
|
|
25
|
+
* @param options - Bundle configuration options
|
|
26
|
+
* @returns Bundle result with output path and optional master key
|
|
27
|
+
*/
|
|
28
|
+
/** Default env var values for docker compose services */
|
|
29
|
+
const DOCKER_SERVICE_ENV_VARS = {
|
|
30
|
+
postgres: { DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/app" },
|
|
31
|
+
redis: { REDIS_URL: "redis://redis:6379" },
|
|
32
|
+
rabbitmq: { RABBITMQ_URL: "amqp://rabbitmq:5672" }
|
|
33
|
+
};
|
|
34
|
+
async function bundleServer(options) {
|
|
35
|
+
const { entryPoint, outputDir, minify, sourcemap, external, stage, constructs, dockerServices } = options;
|
|
36
|
+
await mkdir(outputDir, { recursive: true });
|
|
37
|
+
const args = [
|
|
38
|
+
"npx",
|
|
39
|
+
"tsdown",
|
|
40
|
+
entryPoint,
|
|
41
|
+
"--no-config",
|
|
42
|
+
"--out-dir",
|
|
43
|
+
outputDir,
|
|
44
|
+
"--format",
|
|
45
|
+
"esm",
|
|
46
|
+
"--platform",
|
|
47
|
+
"node",
|
|
48
|
+
"--target",
|
|
49
|
+
"node22",
|
|
50
|
+
"--clean"
|
|
51
|
+
];
|
|
52
|
+
if (minify) args.push("--minify");
|
|
53
|
+
if (sourcemap) args.push("--sourcemap");
|
|
54
|
+
for (const ext of external) args.push("--external", ext);
|
|
55
|
+
args.push("--external", "node:*");
|
|
56
|
+
let masterKey;
|
|
57
|
+
if (stage) {
|
|
58
|
+
const { readStageSecrets, toEmbeddableSecrets, validateEnvironmentVariables } = await import("./storage-nkGIjeXt.mjs");
|
|
59
|
+
const { encryptSecrets, generateDefineOptions } = await import("./encryption-h4Nb6W-M.mjs");
|
|
60
|
+
const secrets = await readStageSecrets(stage);
|
|
61
|
+
if (!secrets) throw new Error(`No secrets found for stage "${stage}". Run "gkm secrets:init --stage ${stage}" first.`);
|
|
62
|
+
if (dockerServices) {
|
|
63
|
+
for (const [service, enabled] of Object.entries(dockerServices)) if (enabled && DOCKER_SERVICE_ENV_VARS[service]) for (const [envVar, defaultValue] of Object.entries(DOCKER_SERVICE_ENV_VARS[service])) {
|
|
64
|
+
const urlKey = envVar;
|
|
65
|
+
if (!secrets.urls[urlKey] && !secrets.custom[envVar]) {
|
|
66
|
+
secrets.urls[urlKey] = defaultValue;
|
|
67
|
+
console.log(` Auto-populated ${envVar} from docker compose`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (constructs && constructs.length > 0) {
|
|
72
|
+
console.log(" Analyzing environment variable requirements...");
|
|
73
|
+
const requiredVars = await collectRequiredEnvVars(constructs);
|
|
74
|
+
if (requiredVars.length > 0) {
|
|
75
|
+
const validation = validateEnvironmentVariables(requiredVars, secrets);
|
|
76
|
+
if (!validation.valid) {
|
|
77
|
+
const errorMessage = [
|
|
78
|
+
`Missing environment variables for stage "${stage}":`,
|
|
79
|
+
"",
|
|
80
|
+
...validation.missing.map((v) => ` ❌ ${v}`),
|
|
81
|
+
"",
|
|
82
|
+
"To fix this, either:",
|
|
83
|
+
` 1. Add the missing variables to .gkm/secrets/${stage}.json using:`,
|
|
84
|
+
` gkm secrets:set <KEY> <VALUE> --stage ${stage}`,
|
|
85
|
+
"",
|
|
86
|
+
` 2. Or import from a JSON file:`,
|
|
87
|
+
` gkm secrets:import secrets.json --stage ${stage}`,
|
|
88
|
+
"",
|
|
89
|
+
"Required variables:",
|
|
90
|
+
...validation.required.map((v) => validation.missing.includes(v) ? ` ❌ ${v}` : ` ✓ ${v}`)
|
|
91
|
+
].join("\n");
|
|
92
|
+
throw new Error(errorMessage);
|
|
93
|
+
}
|
|
94
|
+
console.log(` ✓ All ${requiredVars.length} required environment variables found`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
const embeddable = toEmbeddableSecrets(secrets);
|
|
98
|
+
const encrypted = encryptSecrets(embeddable);
|
|
99
|
+
masterKey = encrypted.masterKey;
|
|
100
|
+
const defines = generateDefineOptions(encrypted);
|
|
101
|
+
for (const [key, value] of Object.entries(defines)) args.push(`--env.${key}`, value);
|
|
102
|
+
console.log(` Secrets encrypted for stage "${stage}"`);
|
|
103
|
+
}
|
|
104
|
+
const mjsOutput = join(outputDir, "server.mjs");
|
|
105
|
+
try {
|
|
106
|
+
const [cmd, ...cmdArgs] = args;
|
|
107
|
+
const result = spawnSync(cmd, cmdArgs, {
|
|
108
|
+
cwd: process.cwd(),
|
|
109
|
+
stdio: "inherit",
|
|
110
|
+
shell: process.platform === "win32"
|
|
111
|
+
});
|
|
112
|
+
if (result.error) throw result.error;
|
|
113
|
+
if (result.status !== 0) throw new Error(`tsdown exited with code ${result.status}`);
|
|
114
|
+
const jsOutput = join(outputDir, "server.js");
|
|
115
|
+
if (existsSync(jsOutput)) await rename(jsOutput, mjsOutput);
|
|
116
|
+
const { readFile: readFile$1 } = await import("node:fs/promises");
|
|
117
|
+
const content = await readFile$1(mjsOutput, "utf-8");
|
|
118
|
+
if (!content.startsWith("#!")) await writeFile(mjsOutput, `#!/usr/bin/env node\n${content}`);
|
|
119
|
+
} catch (error) {
|
|
120
|
+
throw new Error(`Failed to bundle server: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
outputPath: mjsOutput,
|
|
124
|
+
masterKey
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
//#endregion
|
|
129
|
+
export { bundleServer };
|
|
130
|
+
//# sourceMappingURL=bundler-DWctKN1z.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bundler-DWctKN1z.mjs","names":["constructs: Construct[]","DOCKER_SERVICE_ENV_VARS: Record<string, Record<string, string>>","options: BundleOptions","masterKey: string | undefined"],"sources":["../src/build/bundler.ts"],"sourcesContent":["import { spawnSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { mkdir, rename, writeFile } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport type { Construct } from '@geekmidas/constructs';\n\nexport interface BundleOptions {\n\t/** Entry point file (e.g., .gkm/server/server.ts) */\n\tentryPoint: string;\n\t/** Output directory for bundled files */\n\toutputDir: string;\n\t/** Minify the output (default: true) */\n\tminify: boolean;\n\t/** Generate sourcemaps (default: false) */\n\tsourcemap: boolean;\n\t/** Packages to exclude from bundling */\n\texternal: string[];\n\t/** Stage for secrets injection (optional) */\n\tstage?: string;\n\t/** Constructs to validate environment variables for */\n\tconstructs?: Construct[];\n\t/** Docker compose services configured (for auto-populating env vars) */\n\tdockerServices?: {\n\t\tpostgres?: boolean;\n\t\tredis?: boolean;\n\t\trabbitmq?: boolean;\n\t};\n}\n\nexport interface BundleResult {\n\t/** Path to the bundled output */\n\toutputPath: string;\n\t/** Ephemeral master key for deployment (only if stage was provided) */\n\tmasterKey?: string;\n}\n\n/**\n * Collect all required environment variables from constructs.\n * Uses the SnifferEnvironmentParser to detect which env vars each service needs.\n *\n * @param constructs - Array of constructs to analyze\n * @returns Deduplicated array of required environment variable names\n */\nasync function collectRequiredEnvVars(\n\tconstructs: Construct[],\n): Promise<string[]> {\n\tconst allEnvVars = new Set<string>();\n\n\tfor (const construct of constructs) {\n\t\tconst envVars = await construct.getEnvironment();\n\t\tenvVars.forEach((v) => allEnvVars.add(v));\n\t}\n\n\treturn Array.from(allEnvVars).sort();\n}\n\n/**\n * Bundle the server application using tsdown\n *\n * @param options - Bundle configuration options\n * @returns Bundle result with output path and optional master key\n */\n/** Default env var values for docker compose services */\nconst DOCKER_SERVICE_ENV_VARS: Record<string, Record<string, string>> = {\n\tpostgres: {\n\t\tDATABASE_URL: 'postgresql://postgres:postgres@postgres:5432/app',\n\t},\n\tredis: {\n\t\tREDIS_URL: 'redis://redis:6379',\n\t},\n\trabbitmq: {\n\t\tRABBITMQ_URL: 'amqp://rabbitmq:5672',\n\t},\n};\n\nexport async function bundleServer(\n\toptions: BundleOptions,\n): Promise<BundleResult> {\n\tconst {\n\t\tentryPoint,\n\t\toutputDir,\n\t\tminify,\n\t\tsourcemap,\n\t\texternal,\n\t\tstage,\n\t\tconstructs,\n\t\tdockerServices,\n\t} = options;\n\n\t// Ensure output directory exists\n\tawait mkdir(outputDir, { recursive: true });\n\n\t// Build command-line arguments for tsdown\n\tconst args = [\n\t\t'npx',\n\t\t'tsdown',\n\t\tentryPoint,\n\t\t'--no-config', // Don't use any config file from workspace\n\t\t'--out-dir',\n\t\toutputDir,\n\t\t'--format',\n\t\t'esm',\n\t\t'--platform',\n\t\t'node',\n\t\t'--target',\n\t\t'node22',\n\t\t'--clean',\n\t];\n\n\tif (minify) {\n\t\targs.push('--minify');\n\t}\n\n\tif (sourcemap) {\n\t\targs.push('--sourcemap');\n\t}\n\n\t// Add external packages\n\tfor (const ext of external) {\n\t\targs.push('--external', ext);\n\t}\n\n\t// Always exclude node: builtins\n\targs.push('--external', 'node:*');\n\n\t// Handle secrets injection if stage is provided\n\tlet masterKey: string | undefined;\n\n\tif (stage) {\n\t\tconst {\n\t\t\treadStageSecrets,\n\t\t\ttoEmbeddableSecrets,\n\t\t\tvalidateEnvironmentVariables,\n\t\t} = await import('../secrets/storage');\n\t\tconst { encryptSecrets, generateDefineOptions } = await import(\n\t\t\t'../secrets/encryption'\n\t\t);\n\n\t\tconst secrets = await readStageSecrets(stage);\n\n\t\tif (!secrets) {\n\t\t\tthrow new Error(\n\t\t\t\t`No secrets found for stage \"${stage}\". Run \"gkm secrets:init --stage ${stage}\" first.`,\n\t\t\t);\n\t\t}\n\n\t\t// Auto-populate env vars from docker compose services\n\t\tif (dockerServices) {\n\t\t\tfor (const [service, enabled] of Object.entries(dockerServices)) {\n\t\t\t\tif (enabled && DOCKER_SERVICE_ENV_VARS[service]) {\n\t\t\t\t\tfor (const [envVar, defaultValue] of Object.entries(\n\t\t\t\t\t\tDOCKER_SERVICE_ENV_VARS[service],\n\t\t\t\t\t)) {\n\t\t\t\t\t\t// Check if not already in urls or custom\n\t\t\t\t\t\tconst urlKey = envVar as keyof typeof secrets.urls;\n\t\t\t\t\t\tif (!secrets.urls[urlKey] && !secrets.custom[envVar]) {\n\t\t\t\t\t\t\tsecrets.urls[urlKey] = defaultValue;\n\t\t\t\t\t\t\tconsole.log(` Auto-populated ${envVar} from docker compose`);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Validate environment variables if constructs are provided\n\t\tif (constructs && constructs.length > 0) {\n\t\t\tconsole.log(' Analyzing environment variable requirements...');\n\t\t\tconst requiredVars = await collectRequiredEnvVars(constructs);\n\n\t\t\tif (requiredVars.length > 0) {\n\t\t\t\tconst validation = validateEnvironmentVariables(requiredVars, secrets);\n\n\t\t\t\tif (!validation.valid) {\n\t\t\t\t\tconst errorMessage = [\n\t\t\t\t\t\t`Missing environment variables for stage \"${stage}\":`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t...validation.missing.map((v) => ` ❌ ${v}`),\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t'To fix this, either:',\n\t\t\t\t\t\t` 1. Add the missing variables to .gkm/secrets/${stage}.json using:`,\n\t\t\t\t\t\t` gkm secrets:set <KEY> <VALUE> --stage ${stage}`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t` 2. Or import from a JSON file:`,\n\t\t\t\t\t\t` gkm secrets:import secrets.json --stage ${stage}`,\n\t\t\t\t\t\t'',\n\t\t\t\t\t\t'Required variables:',\n\t\t\t\t\t\t...validation.required.map((v) =>\n\t\t\t\t\t\t\tvalidation.missing.includes(v) ? ` ❌ ${v}` : ` ✓ ${v}`,\n\t\t\t\t\t\t),\n\t\t\t\t\t].join('\\n');\n\n\t\t\t\t\tthrow new Error(errorMessage);\n\t\t\t\t}\n\n\t\t\t\tconsole.log(\n\t\t\t\t\t` ✓ All ${requiredVars.length} required environment variables found`,\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\n\t\t// Convert to embeddable format and encrypt\n\t\tconst embeddable = toEmbeddableSecrets(secrets);\n\t\tconst encrypted = encryptSecrets(embeddable);\n\t\tmasterKey = encrypted.masterKey;\n\n\t\t// Add define options for build-time injection using tsdown's --env.* format\n\t\tconst defines = generateDefineOptions(encrypted);\n\t\tfor (const [key, value] of Object.entries(defines)) {\n\t\t\targs.push(`--env.${key}`, value);\n\t\t}\n\n\t\tconsole.log(` Secrets encrypted for stage \"${stage}\"`);\n\t}\n\n\tconst mjsOutput = join(outputDir, 'server.mjs');\n\n\ttry {\n\t\t// Run tsdown with command-line arguments\n\t\t// Use spawnSync with args array to avoid shell escaping issues with --define values\n\t\t// args is always populated with ['npx', 'tsdown', ...] so cmd is never undefined\n\t\tconst [cmd, ...cmdArgs] = args as [string, ...string[]];\n\t\tconst result = spawnSync(cmd, cmdArgs, {\n\t\t\tcwd: process.cwd(),\n\t\t\tstdio: 'inherit',\n\t\t\tshell: process.platform === 'win32', // Only use shell on Windows for npx resolution\n\t\t});\n\n\t\tif (result.error) {\n\t\t\tthrow result.error;\n\t\t}\n\t\tif (result.status !== 0) {\n\t\t\tthrow new Error(`tsdown exited with code ${result.status}`);\n\t\t}\n\n\t\t// Rename output to .mjs for explicit ESM\n\t\t// tsdown outputs as server.js for ESM format\n\t\tconst jsOutput = join(outputDir, 'server.js');\n\n\t\tif (existsSync(jsOutput)) {\n\t\t\tawait rename(jsOutput, mjsOutput);\n\t\t}\n\n\t\t// Add shebang to the bundled file\n\t\tconst { readFile } = await import('node:fs/promises');\n\t\tconst content = await readFile(mjsOutput, 'utf-8');\n\t\tif (!content.startsWith('#!')) {\n\t\t\tawait writeFile(mjsOutput, `#!/usr/bin/env node\\n${content}`);\n\t\t}\n\t} catch (error) {\n\t\tthrow new Error(\n\t\t\t`Failed to bundle server: ${error instanceof Error ? error.message : 'Unknown error'}`,\n\t\t);\n\t}\n\n\treturn {\n\t\toutputPath: mjsOutput,\n\t\tmasterKey,\n\t};\n}\n"],"mappings":";;;;;;;;;;;;;AA2CA,eAAe,uBACdA,YACoB;CACpB,MAAM,6BAAa,IAAI;AAEvB,MAAK,MAAM,aAAa,YAAY;EACnC,MAAM,UAAU,MAAM,UAAU,gBAAgB;AAChD,UAAQ,QAAQ,CAAC,MAAM,WAAW,IAAI,EAAE,CAAC;CACzC;AAED,QAAO,MAAM,KAAK,WAAW,CAAC,MAAM;AACpC;;;;;;;;AASD,MAAMC,0BAAkE;CACvE,UAAU,EACT,cAAc,mDACd;CACD,OAAO,EACN,WAAW,qBACX;CACD,UAAU,EACT,cAAc,uBACd;AACD;AAED,eAAsB,aACrBC,SACwB;CACxB,MAAM,EACL,YACA,WACA,QACA,WACA,UACA,OACA,YACA,gBACA,GAAG;AAGJ,OAAM,MAAM,WAAW,EAAE,WAAW,KAAM,EAAC;CAG3C,MAAM,OAAO;EACZ;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;CACA;AAED,KAAI,OACH,MAAK,KAAK,WAAW;AAGtB,KAAI,UACH,MAAK,KAAK,cAAc;AAIzB,MAAK,MAAM,OAAO,SACjB,MAAK,KAAK,cAAc,IAAI;AAI7B,MAAK,KAAK,cAAc,SAAS;CAGjC,IAAIC;AAEJ,KAAI,OAAO;EACV,MAAM,EACL,kBACA,qBACA,8BACA,GAAG,MAAM,OAAO;EACjB,MAAM,EAAE,gBAAgB,uBAAuB,GAAG,MAAM,OACvD;EAGD,MAAM,UAAU,MAAM,iBAAiB,MAAM;AAE7C,OAAK,QACJ,OAAM,IAAI,OACR,8BAA8B,MAAM,mCAAmC,MAAM;AAKhF,MAAI,gBACH;QAAK,MAAM,CAAC,SAAS,QAAQ,IAAI,OAAO,QAAQ,eAAe,CAC9D,KAAI,WAAW,wBAAwB,SACtC,MAAK,MAAM,CAAC,QAAQ,aAAa,IAAI,OAAO,QAC3C,wBAAwB,SACxB,EAAE;IAEF,MAAM,SAAS;AACf,SAAK,QAAQ,KAAK,YAAY,QAAQ,OAAO,SAAS;AACrD,aAAQ,KAAK,UAAU;AACvB,aAAQ,KAAK,mBAAmB,OAAO,sBAAsB;IAC7D;GACD;EAEF;AAIF,MAAI,cAAc,WAAW,SAAS,GAAG;AACxC,WAAQ,IAAI,mDAAmD;GAC/D,MAAM,eAAe,MAAM,uBAAuB,WAAW;AAE7D,OAAI,aAAa,SAAS,GAAG;IAC5B,MAAM,aAAa,6BAA6B,cAAc,QAAQ;AAEtE,SAAK,WAAW,OAAO;KACtB,MAAM,eAAe;OACnB,2CAA2C,MAAM;MAClD;MACA,GAAG,WAAW,QAAQ,IAAI,CAAC,OAAO,MAAM,EAAE,EAAE;MAC5C;MACA;OACC,iDAAiD,MAAM;OACvD,6CAA6C,MAAM;MACpD;OACC;OACA,+CAA+C,MAAM;MACtD;MACA;MACA,GAAG,WAAW,SAAS,IAAI,CAAC,MAC3B,WAAW,QAAQ,SAAS,EAAE,IAAI,MAAM,EAAE,KAAK,MAAM,EAAE,EACvD;KACD,EAAC,KAAK,KAAK;AAEZ,WAAM,IAAI,MAAM;IAChB;AAED,YAAQ,KACN,UAAU,aAAa,OAAO,uCAC/B;GACD;EACD;EAGD,MAAM,aAAa,oBAAoB,QAAQ;EAC/C,MAAM,YAAY,eAAe,WAAW;AAC5C,cAAY,UAAU;EAGtB,MAAM,UAAU,sBAAsB,UAAU;AAChD,OAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,QAAQ,CACjD,MAAK,MAAM,QAAQ,IAAI,GAAG,MAAM;AAGjC,UAAQ,KAAK,iCAAiC,MAAM,GAAG;CACvD;CAED,MAAM,YAAY,KAAK,WAAW,aAAa;AAE/C,KAAI;EAIH,MAAM,CAAC,KAAK,GAAG,QAAQ,GAAG;EAC1B,MAAM,SAAS,UAAU,KAAK,SAAS;GACtC,KAAK,QAAQ,KAAK;GAClB,OAAO;GACP,OAAO,QAAQ,aAAa;EAC5B,EAAC;AAEF,MAAI,OAAO,MACV,OAAM,OAAO;AAEd,MAAI,OAAO,WAAW,EACrB,OAAM,IAAI,OAAO,0BAA0B,OAAO,OAAO;EAK1D,MAAM,WAAW,KAAK,WAAW,YAAY;AAE7C,MAAI,WAAW,SAAS,CACvB,OAAM,OAAO,UAAU,UAAU;EAIlC,MAAM,EAAE,sBAAU,GAAG,MAAM,OAAO;EAClC,MAAM,UAAU,MAAM,WAAS,WAAW,QAAQ;AAClD,OAAK,QAAQ,WAAW,KAAK,CAC5B,OAAM,UAAU,YAAY,uBAAuB,QAAQ,EAAE;CAE9D,SAAQ,OAAO;AACf,QAAM,IAAI,OACR,2BAA2B,iBAAiB,QAAQ,MAAM,UAAU,gBAAgB;CAEtF;AAED,QAAO;EACN,YAAY;EACZ;CACA;AACD"}
|
package/dist/config.d.cts
CHANGED
package/dist/config.d.mts
CHANGED
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
|
|
2
|
+
//#region src/deploy/dokploy-api.ts
|
|
3
|
+
var DokployApiError = class extends Error {
|
|
4
|
+
constructor(message, status, statusText, issues) {
|
|
5
|
+
super(message);
|
|
6
|
+
this.status = status;
|
|
7
|
+
this.statusText = statusText;
|
|
8
|
+
this.issues = issues;
|
|
9
|
+
this.name = "DokployApiError";
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
/**
|
|
13
|
+
* Dokploy API client
|
|
14
|
+
*/
|
|
15
|
+
var DokployApi = class {
|
|
16
|
+
baseUrl;
|
|
17
|
+
token;
|
|
18
|
+
constructor(options) {
|
|
19
|
+
this.baseUrl = options.baseUrl.replace(/\/$/, "");
|
|
20
|
+
this.token = options.token;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Make a GET request to the Dokploy API
|
|
24
|
+
*/
|
|
25
|
+
async get(endpoint) {
|
|
26
|
+
return this.request("GET", endpoint);
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Make a POST request to the Dokploy API
|
|
30
|
+
*/
|
|
31
|
+
async post(endpoint, body) {
|
|
32
|
+
return this.request("POST", endpoint, body);
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Make a request to the Dokploy API
|
|
36
|
+
*/
|
|
37
|
+
async request(method, endpoint, body) {
|
|
38
|
+
const url = `${this.baseUrl}/api/${endpoint}`;
|
|
39
|
+
const response = await fetch(url, {
|
|
40
|
+
method,
|
|
41
|
+
headers: {
|
|
42
|
+
"Content-Type": "application/json",
|
|
43
|
+
"x-api-key": this.token
|
|
44
|
+
},
|
|
45
|
+
body: body ? JSON.stringify(body) : void 0
|
|
46
|
+
});
|
|
47
|
+
if (!response.ok) {
|
|
48
|
+
let errorMessage = `Dokploy API error: ${response.status} ${response.statusText}`;
|
|
49
|
+
let issues;
|
|
50
|
+
try {
|
|
51
|
+
const errorBody = await response.json();
|
|
52
|
+
if (errorBody.message) errorMessage = `Dokploy API error: ${errorBody.message}`;
|
|
53
|
+
if (errorBody.issues?.length) {
|
|
54
|
+
issues = errorBody.issues;
|
|
55
|
+
errorMessage += `\n Issues: ${errorBody.issues.map((i) => i.message).join(", ")}`;
|
|
56
|
+
}
|
|
57
|
+
} catch {}
|
|
58
|
+
throw new DokployApiError(errorMessage, response.status, response.statusText, issues);
|
|
59
|
+
}
|
|
60
|
+
const text = await response.text();
|
|
61
|
+
if (!text || text.trim() === "") return void 0;
|
|
62
|
+
return JSON.parse(text);
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Validate the API token by making a test request
|
|
66
|
+
*/
|
|
67
|
+
async validateToken() {
|
|
68
|
+
try {
|
|
69
|
+
await this.get("project.all");
|
|
70
|
+
return true;
|
|
71
|
+
} catch {
|
|
72
|
+
return false;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* List all projects
|
|
77
|
+
*/
|
|
78
|
+
async listProjects() {
|
|
79
|
+
return this.get("project.all");
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Get a single project by ID
|
|
83
|
+
*/
|
|
84
|
+
async getProject(projectId) {
|
|
85
|
+
return this.get(`project.one?projectId=${projectId}`);
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Create a new project
|
|
89
|
+
*/
|
|
90
|
+
async createProject(name, description) {
|
|
91
|
+
return this.post("project.create", {
|
|
92
|
+
name,
|
|
93
|
+
description: description ?? `Created by gkm CLI`
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Create an environment in a project
|
|
98
|
+
*/
|
|
99
|
+
async createEnvironment(projectId, name, description) {
|
|
100
|
+
return this.post("environment.create", {
|
|
101
|
+
projectId,
|
|
102
|
+
name,
|
|
103
|
+
description: description ?? `${name} environment`
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Create a new application
|
|
108
|
+
*/
|
|
109
|
+
async createApplication(name, projectId, environmentId) {
|
|
110
|
+
return this.post("application.create", {
|
|
111
|
+
name,
|
|
112
|
+
projectId,
|
|
113
|
+
environmentId,
|
|
114
|
+
appName: name.toLowerCase().replace(/[^a-z0-9-]/g, "-")
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Update an application
|
|
119
|
+
*/
|
|
120
|
+
async updateApplication(applicationId, updates) {
|
|
121
|
+
await this.post("application.update", {
|
|
122
|
+
applicationId,
|
|
123
|
+
...updates
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Save environment variables for an application
|
|
128
|
+
*/
|
|
129
|
+
async saveApplicationEnv(applicationId, env) {
|
|
130
|
+
await this.post("application.saveEnvironment", {
|
|
131
|
+
applicationId,
|
|
132
|
+
env
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Configure application to use Docker provider (pull from registry)
|
|
137
|
+
*
|
|
138
|
+
* For private registries, either:
|
|
139
|
+
* - Use `registryId` if the registry is configured in Dokploy
|
|
140
|
+
* - Or provide `username`, `password`, and `registryUrl` directly
|
|
141
|
+
*/
|
|
142
|
+
async saveDockerProvider(applicationId, dockerImage, options) {
|
|
143
|
+
await this.post("application.saveDockerProvider", {
|
|
144
|
+
applicationId,
|
|
145
|
+
dockerImage,
|
|
146
|
+
...options
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Deploy an application
|
|
151
|
+
*/
|
|
152
|
+
async deployApplication(applicationId) {
|
|
153
|
+
await this.post("application.deploy", { applicationId });
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* List all registries
|
|
157
|
+
*/
|
|
158
|
+
async listRegistries() {
|
|
159
|
+
return this.get("registry.all");
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Create a new registry
|
|
163
|
+
*/
|
|
164
|
+
async createRegistry(registryName, registryUrl, username, password, options) {
|
|
165
|
+
return this.post("registry.create", {
|
|
166
|
+
registryName,
|
|
167
|
+
registryUrl,
|
|
168
|
+
username,
|
|
169
|
+
password,
|
|
170
|
+
imagePrefix: options?.imagePrefix
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Get a registry by ID
|
|
175
|
+
*/
|
|
176
|
+
async getRegistry(registryId) {
|
|
177
|
+
return this.get(`registry.one?registryId=${registryId}`);
|
|
178
|
+
}
|
|
179
|
+
/**
|
|
180
|
+
* Update a registry
|
|
181
|
+
*/
|
|
182
|
+
async updateRegistry(registryId, updates) {
|
|
183
|
+
await this.post("registry.update", {
|
|
184
|
+
registryId,
|
|
185
|
+
...updates
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Delete a registry
|
|
190
|
+
*/
|
|
191
|
+
async deleteRegistry(registryId) {
|
|
192
|
+
await this.post("registry.remove", { registryId });
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Create a new Postgres database
|
|
196
|
+
*/
|
|
197
|
+
async createPostgres(name, projectId, environmentId, options) {
|
|
198
|
+
return this.post("postgres.create", {
|
|
199
|
+
name,
|
|
200
|
+
projectId,
|
|
201
|
+
environmentId,
|
|
202
|
+
appName: options?.appName ?? name.toLowerCase().replace(/[^a-z0-9-]/g, "-"),
|
|
203
|
+
databaseName: options?.databaseName ?? "app",
|
|
204
|
+
databaseUser: options?.databaseUser ?? "postgres",
|
|
205
|
+
databasePassword: options?.databasePassword,
|
|
206
|
+
dockerImage: options?.dockerImage ?? "postgres:16-alpine",
|
|
207
|
+
description: options?.description ?? `Postgres database for ${name}`
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Get a Postgres database by ID
|
|
212
|
+
*/
|
|
213
|
+
async getPostgres(postgresId) {
|
|
214
|
+
return this.get(`postgres.one?postgresId=${postgresId}`);
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Deploy a Postgres database
|
|
218
|
+
*/
|
|
219
|
+
async deployPostgres(postgresId) {
|
|
220
|
+
await this.post("postgres.deploy", { postgresId });
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* Save environment variables for Postgres
|
|
224
|
+
*/
|
|
225
|
+
async savePostgresEnv(postgresId, env) {
|
|
226
|
+
await this.post("postgres.saveEnvironment", {
|
|
227
|
+
postgresId,
|
|
228
|
+
env
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
/**
|
|
232
|
+
* Set external port for Postgres (for external access)
|
|
233
|
+
*/
|
|
234
|
+
async savePostgresExternalPort(postgresId, externalPort) {
|
|
235
|
+
await this.post("postgres.saveExternalPort", {
|
|
236
|
+
postgresId,
|
|
237
|
+
externalPort
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Update Postgres configuration
|
|
242
|
+
*/
|
|
243
|
+
async updatePostgres(postgresId, updates) {
|
|
244
|
+
await this.post("postgres.update", {
|
|
245
|
+
postgresId,
|
|
246
|
+
...updates
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
/**
|
|
250
|
+
* Create a new Redis instance
|
|
251
|
+
*/
|
|
252
|
+
async createRedis(name, projectId, environmentId, options) {
|
|
253
|
+
return this.post("redis.create", {
|
|
254
|
+
name,
|
|
255
|
+
projectId,
|
|
256
|
+
environmentId,
|
|
257
|
+
appName: options?.appName ?? name.toLowerCase().replace(/[^a-z0-9-]/g, "-"),
|
|
258
|
+
databasePassword: options?.databasePassword,
|
|
259
|
+
dockerImage: options?.dockerImage ?? "redis:7-alpine",
|
|
260
|
+
description: options?.description ?? `Redis instance for ${name}`
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
/**
|
|
264
|
+
* Get a Redis instance by ID
|
|
265
|
+
*/
|
|
266
|
+
async getRedis(redisId) {
|
|
267
|
+
return this.get(`redis.one?redisId=${redisId}`);
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Deploy a Redis instance
|
|
271
|
+
*/
|
|
272
|
+
async deployRedis(redisId) {
|
|
273
|
+
await this.post("redis.deploy", { redisId });
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Save environment variables for Redis
|
|
277
|
+
*/
|
|
278
|
+
async saveRedisEnv(redisId, env) {
|
|
279
|
+
await this.post("redis.saveEnvironment", {
|
|
280
|
+
redisId,
|
|
281
|
+
env
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Set external port for Redis (for external access)
|
|
286
|
+
*/
|
|
287
|
+
async saveRedisExternalPort(redisId, externalPort) {
|
|
288
|
+
await this.post("redis.saveExternalPort", {
|
|
289
|
+
redisId,
|
|
290
|
+
externalPort
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
/**
|
|
294
|
+
* Update Redis configuration
|
|
295
|
+
*/
|
|
296
|
+
async updateRedis(redisId, updates) {
|
|
297
|
+
await this.post("redis.update", {
|
|
298
|
+
redisId,
|
|
299
|
+
...updates
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
//#endregion
|
|
305
|
+
Object.defineProperty(exports, 'DokployApi', {
|
|
306
|
+
enumerable: true,
|
|
307
|
+
get: function () {
|
|
308
|
+
return DokployApi;
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
Object.defineProperty(exports, 'DokployApiError', {
|
|
312
|
+
enumerable: true,
|
|
313
|
+
get: function () {
|
|
314
|
+
return DokployApiError;
|
|
315
|
+
}
|
|
316
|
+
});
|
|
317
|
+
//# sourceMappingURL=dokploy-api-C7F9VykY.cjs.map
|