@geekmidas/cli 1.5.0 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/{HostingerProvider-B9N-TKbp.mjs → HostingerProvider-402UdK89.mjs} +34 -1
- package/dist/HostingerProvider-402UdK89.mjs.map +1 -0
- package/dist/{HostingerProvider-DUV9-Tzg.cjs → HostingerProvider-BiXdHjiq.cjs} +34 -1
- package/dist/HostingerProvider-BiXdHjiq.cjs.map +1 -0
- package/dist/{Route53Provider-C8mS0zY6.mjs → Route53Provider-DbBo7Uz5.mjs} +53 -1
- package/dist/Route53Provider-DbBo7Uz5.mjs.map +1 -0
- package/dist/{Route53Provider-Bs7Arms9.cjs → Route53Provider-kfJ77LmL.cjs} +53 -1
- package/dist/Route53Provider-kfJ77LmL.cjs.map +1 -0
- package/dist/backup-provisioner-B5e-F6zX.cjs +164 -0
- package/dist/backup-provisioner-B5e-F6zX.cjs.map +1 -0
- package/dist/backup-provisioner-BIArpmTr.mjs +163 -0
- package/dist/backup-provisioner-BIArpmTr.mjs.map +1 -0
- package/dist/{config-ZQM1vBoz.cjs → config-6JHOwLCx.cjs} +30 -2
- package/dist/{config-ZQM1vBoz.cjs.map → config-6JHOwLCx.cjs.map} +1 -1
- package/dist/{config-DfCJ29PQ.mjs → config-DxASSNjr.mjs} +25 -3
- package/dist/{config-DfCJ29PQ.mjs.map → config-DxASSNjr.mjs.map} +1 -1
- package/dist/config.cjs +3 -2
- package/dist/config.d.cts +14 -2
- package/dist/config.d.cts.map +1 -1
- package/dist/config.d.mts +15 -3
- package/dist/config.d.mts.map +1 -1
- package/dist/config.mjs +3 -3
- package/dist/{dokploy-api-z0833e7r.mjs → dokploy-api-2ldYoN3i.mjs} +131 -1
- package/dist/dokploy-api-2ldYoN3i.mjs.map +1 -0
- package/dist/dokploy-api-C93pveuy.mjs +3 -0
- package/dist/dokploy-api-CbDh4o93.cjs +3 -0
- package/dist/{dokploy-api-CQvhV6Hd.cjs → dokploy-api-DLgvEQlr.cjs} +131 -1
- package/dist/dokploy-api-DLgvEQlr.cjs.map +1 -0
- package/dist/{index-C0SpUT9Y.d.mts → index-C-KxSGGK.d.mts} +133 -31
- package/dist/index-C-KxSGGK.d.mts.map +1 -0
- package/dist/{index-B58qjyBd.d.cts → index-Cyk2rTyj.d.cts} +132 -30
- package/dist/index-Cyk2rTyj.d.cts.map +1 -0
- package/dist/index.cjs +662 -152
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +626 -116
- package/dist/index.mjs.map +1 -1
- package/dist/{openapi-BcSjLfWq.mjs → openapi-BYlyAbH3.mjs} +6 -5
- package/dist/openapi-BYlyAbH3.mjs.map +1 -0
- package/dist/{openapi-D6Hcfov0.cjs → openapi-CnvwSRDU.cjs} +6 -5
- package/dist/openapi-CnvwSRDU.cjs.map +1 -0
- package/dist/openapi.cjs +3 -3
- package/dist/openapi.d.cts +1 -0
- package/dist/openapi.d.cts.map +1 -1
- package/dist/openapi.d.mts +2 -1
- package/dist/openapi.d.mts.map +1 -1
- package/dist/openapi.mjs +3 -3
- package/dist/{types-B9UZ7fOG.d.mts → types-CZg5iUgD.d.mts} +1 -1
- package/dist/{types-B9UZ7fOG.d.mts.map → types-CZg5iUgD.d.mts.map} +1 -1
- package/dist/workspace/index.cjs +1 -1
- package/dist/workspace/index.d.cts +1 -1
- package/dist/workspace/index.d.mts +2 -2
- package/dist/workspace/index.mjs +1 -1
- package/dist/{workspace-BW2iU37P.mjs → workspace-9IQIjwkQ.mjs} +20 -4
- package/dist/workspace-9IQIjwkQ.mjs.map +1 -0
- package/dist/{workspace-2Do2YcGZ.cjs → workspace-D2ocAlpl.cjs} +20 -4
- package/dist/workspace-D2ocAlpl.cjs.map +1 -0
- package/examples/cron-example.ts +6 -6
- package/examples/function-example.ts +1 -1
- package/package.json +6 -3
- package/src/config.ts +44 -0
- package/src/deploy/__tests__/backup-provisioner.spec.ts +428 -0
- package/src/deploy/__tests__/createDnsProvider.spec.ts +23 -0
- package/src/deploy/__tests__/env-resolver.spec.ts +1 -1
- package/src/deploy/__tests__/undeploy.spec.ts +758 -0
- package/src/deploy/backup-provisioner.ts +316 -0
- package/src/deploy/dns/DnsProvider.ts +39 -1
- package/src/deploy/dns/HostingerProvider.ts +74 -0
- package/src/deploy/dns/Route53Provider.ts +81 -0
- package/src/deploy/dns/index.ts +25 -0
- package/src/deploy/dokploy-api.ts +237 -0
- package/src/deploy/index.ts +71 -13
- package/src/deploy/state.ts +171 -0
- package/src/deploy/undeploy.ts +407 -0
- package/src/dev/__tests__/index.spec.ts +490 -0
- package/src/dev/index.ts +313 -18
- package/src/generators/FunctionGenerator.ts +1 -1
- package/src/generators/Generator.ts +4 -1
- package/src/init/__tests__/generators.spec.ts +167 -18
- package/src/init/__tests__/init.spec.ts +66 -3
- package/src/init/generators/auth.ts +6 -5
- package/src/init/generators/config.ts +49 -7
- package/src/init/generators/docker.ts +8 -8
- package/src/init/generators/index.ts +1 -0
- package/src/init/generators/models.ts +3 -5
- package/src/init/generators/package.ts +4 -0
- package/src/init/generators/test.ts +133 -0
- package/src/init/generators/ui.ts +13 -12
- package/src/init/generators/web.ts +9 -8
- package/src/init/index.ts +2 -0
- package/src/init/templates/api.ts +6 -6
- package/src/init/templates/minimal.ts +2 -2
- package/src/init/templates/worker.ts +2 -2
- package/src/init/versions.ts +3 -3
- package/src/openapi.ts +6 -2
- package/src/test/__tests__/__fixtures__/workspace.ts +104 -0
- package/src/test/__tests__/api.spec.ts +199 -0
- package/src/test/__tests__/auth.spec.ts +162 -0
- package/src/test/__tests__/index.spec.ts +323 -0
- package/src/test/__tests__/web.spec.ts +210 -0
- package/src/test/index.ts +165 -14
- package/src/workspace/__tests__/index.spec.ts +3 -0
- package/src/workspace/index.ts +4 -2
- package/src/workspace/schema.ts +26 -0
- package/src/workspace/types.ts +14 -37
- package/dist/HostingerProvider-B9N-TKbp.mjs.map +0 -1
- package/dist/HostingerProvider-DUV9-Tzg.cjs.map +0 -1
- package/dist/Route53Provider-Bs7Arms9.cjs.map +0 -1
- package/dist/Route53Provider-C8mS0zY6.mjs.map +0 -1
- package/dist/dokploy-api-CQvhV6Hd.cjs.map +0 -1
- package/dist/dokploy-api-CWc02yyg.cjs +0 -3
- package/dist/dokploy-api-DSJYNx88.mjs +0 -3
- package/dist/dokploy-api-z0833e7r.mjs.map +0 -1
- package/dist/index-B58qjyBd.d.cts.map +0 -1
- package/dist/index-C0SpUT9Y.d.mts.map +0 -1
- package/dist/openapi-BcSjLfWq.mjs.map +0 -1
- package/dist/openapi-D6Hcfov0.cjs.map +0 -1
- package/dist/workspace-2Do2YcGZ.cjs.map +0 -1
- package/dist/workspace-BW2iU37P.mjs.map +0 -1
package/dist/index.mjs
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env -S npx tsx
|
|
2
2
|
import { __require } from "./chunk-Duj1WY3L.mjs";
|
|
3
|
-
import { getAppBuildOrder, getDependencyEnvVars, getDeployTargetError, isDeployTargetSupported } from "./workspace-
|
|
4
|
-
import { getAppNameFromCwd, loadAppConfig, loadConfig, loadWorkspaceConfig, parseModuleConfig } from "./config-
|
|
3
|
+
import { getAppBuildOrder, getDependencyEnvVars, getDeployTargetError, isDeployTargetSupported } from "./workspace-9IQIjwkQ.mjs";
|
|
4
|
+
import { getAppNameFromCwd, loadAppConfig, loadConfig, loadWorkspaceAppInfo, loadWorkspaceConfig, parseModuleConfig } from "./config-DxASSNjr.mjs";
|
|
5
5
|
import { getCredentialsPath, getDokployCredentials, getDokployRegistryId, getDokployToken, removeDokployCredentials, storeDokployCredentials, storeDokployRegistryId } from "./credentials-s1kLcIzK.mjs";
|
|
6
|
-
import { ConstructGenerator, EndpointGenerator, OPENAPI_OUTPUT_PATH, generateOpenApi, openapiCommand, resolveOpenApiConfig } from "./openapi-
|
|
6
|
+
import { ConstructGenerator, EndpointGenerator, OPENAPI_OUTPUT_PATH, generateOpenApi, openapiCommand, resolveOpenApiConfig } from "./openapi-BYlyAbH3.mjs";
|
|
7
7
|
import { getKeyPath, maskPassword, readStageSecrets, secretsExist, setCustomSecret, toEmbeddableSecrets, writeStageSecrets } from "./storage-DmCbr6DI.mjs";
|
|
8
|
-
import { DokployApi } from "./dokploy-api-
|
|
8
|
+
import { DokployApi } from "./dokploy-api-2ldYoN3i.mjs";
|
|
9
9
|
import { encryptSecrets } from "./encryption-BOH5M-f-.mjs";
|
|
10
10
|
import { CachedStateProvider } from "./CachedStateProvider-BDq5WqSy.mjs";
|
|
11
11
|
import { generateReactQueryCommand } from "./openapi-react-query-DaTMSPD5.mjs";
|
|
@@ -21,6 +21,7 @@ import { createServer } from "node:net";
|
|
|
21
21
|
import chokidar from "chokidar";
|
|
22
22
|
import { config } from "dotenv";
|
|
23
23
|
import fg from "fast-glob";
|
|
24
|
+
import { parse as parse$1 } from "yaml";
|
|
24
25
|
import { Cron } from "@geekmidas/constructs/crons";
|
|
25
26
|
import { Function } from "@geekmidas/constructs/functions";
|
|
26
27
|
import { Subscriber } from "@geekmidas/constructs/subscribers";
|
|
@@ -32,7 +33,7 @@ import prompts from "prompts";
|
|
|
32
33
|
|
|
33
34
|
//#region package.json
|
|
34
35
|
var name = "@geekmidas/cli";
|
|
35
|
-
var version = "1.
|
|
36
|
+
var version = "1.5.1";
|
|
36
37
|
var description = "CLI tools for building Lambda handlers, server applications, and generating OpenAPI specs";
|
|
37
38
|
var private$1 = false;
|
|
38
39
|
var type = "module";
|
|
@@ -78,7 +79,9 @@ var repository = {
|
|
|
78
79
|
};
|
|
79
80
|
var dependencies = {
|
|
80
81
|
"@apidevtools/swagger-parser": "^10.1.0",
|
|
82
|
+
"@aws-sdk/client-iam": "~3.971.0",
|
|
81
83
|
"@aws-sdk/client-route-53": "~3.971.0",
|
|
84
|
+
"@aws-sdk/client-s3": "~3.971.0",
|
|
82
85
|
"@aws-sdk/client-ssm": "~3.971.0",
|
|
83
86
|
"@aws-sdk/credential-providers": "~3.971.0",
|
|
84
87
|
"@geekmidas/constructs": "workspace:~",
|
|
@@ -95,7 +98,8 @@ var dependencies = {
|
|
|
95
98
|
"openapi-typescript": "^7.4.2",
|
|
96
99
|
"pg": "~8.17.1",
|
|
97
100
|
"prompts": "~2.4.2",
|
|
98
|
-
"tsx": "~4.20.3"
|
|
101
|
+
"tsx": "~4.20.3",
|
|
102
|
+
"yaml": "~2.8.2"
|
|
99
103
|
};
|
|
100
104
|
var devDependencies = {
|
|
101
105
|
"@geekmidas/testkit": "workspace:*",
|
|
@@ -132,7 +136,7 @@ const logger$11 = console;
|
|
|
132
136
|
* Validate Dokploy token by making a test API call
|
|
133
137
|
*/
|
|
134
138
|
async function validateDokployToken(endpoint, token) {
|
|
135
|
-
const { DokployApi: DokployApi$1 } = await import("./dokploy-api-
|
|
139
|
+
const { DokployApi: DokployApi$1 } = await import("./dokploy-api-C93pveuy.mjs");
|
|
136
140
|
const api = new DokployApi$1({
|
|
137
141
|
baseUrl: endpoint,
|
|
138
142
|
token
|
|
@@ -417,7 +421,7 @@ var FunctionGenerator = class extends ConstructGenerator {
|
|
|
417
421
|
const importPath = relativePath.replace(/\.ts$/, ".js");
|
|
418
422
|
const relativeEnvParserPath = relative(dirname(handlerPath), context.envParserPath);
|
|
419
423
|
const relativeLoggerPath = relative(dirname(handlerPath), context.loggerPath);
|
|
420
|
-
const content = `import { AWSLambdaFunction } from '@geekmidas/constructs/
|
|
424
|
+
const content = `import { AWSLambdaFunction } from '@geekmidas/constructs/aws';
|
|
421
425
|
import { ${exportName} } from '${importPath}';
|
|
422
426
|
import ${context.envParserImportPattern} from '${relativeEnvParserPath}';
|
|
423
427
|
import ${context.loggerImportPattern} from '${relativeLoggerPath}';
|
|
@@ -773,6 +777,152 @@ async function findAvailablePort(preferredPort, maxAttempts = 10) {
|
|
|
773
777
|
}
|
|
774
778
|
throw new Error(`Could not find an available port after trying ${maxAttempts} ports starting from ${preferredPort}`);
|
|
775
779
|
}
|
|
780
|
+
const PORT_STATE_PATH = ".gkm/ports.json";
|
|
781
|
+
/**
|
|
782
|
+
* Parse docker-compose.yml and extract all port mappings that use env var interpolation.
|
|
783
|
+
* Entries like `'${POSTGRES_HOST_PORT:-5432}:5432'` are captured.
|
|
784
|
+
* Fixed port mappings like `'5050:80'` are skipped.
|
|
785
|
+
* @internal Exported for testing
|
|
786
|
+
*/
|
|
787
|
+
function parseComposePortMappings(composePath) {
|
|
788
|
+
if (!existsSync(composePath)) return [];
|
|
789
|
+
const content = readFileSync(composePath, "utf-8");
|
|
790
|
+
const compose = parse$1(content);
|
|
791
|
+
if (!compose?.services) return [];
|
|
792
|
+
const results = [];
|
|
793
|
+
for (const [serviceName, serviceConfig] of Object.entries(compose.services)) for (const portMapping of serviceConfig?.ports ?? []) {
|
|
794
|
+
const match = String(portMapping).match(/\$\{(\w+):-(\d+)\}:(\d+)/);
|
|
795
|
+
if (match?.[1] && match[2] && match[3]) results.push({
|
|
796
|
+
service: serviceName,
|
|
797
|
+
envVar: match[1],
|
|
798
|
+
defaultPort: Number(match[2]),
|
|
799
|
+
containerPort: Number(match[3])
|
|
800
|
+
});
|
|
801
|
+
}
|
|
802
|
+
return results;
|
|
803
|
+
}
|
|
804
|
+
/**
|
|
805
|
+
* Load saved port state from .gkm/ports.json.
|
|
806
|
+
* @internal Exported for testing
|
|
807
|
+
*/
|
|
808
|
+
async function loadPortState(workspaceRoot) {
|
|
809
|
+
try {
|
|
810
|
+
const raw = await readFile(join(workspaceRoot, PORT_STATE_PATH), "utf-8");
|
|
811
|
+
return JSON.parse(raw);
|
|
812
|
+
} catch {
|
|
813
|
+
return {};
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
/**
|
|
817
|
+
* Save port state to .gkm/ports.json.
|
|
818
|
+
* @internal Exported for testing
|
|
819
|
+
*/
|
|
820
|
+
async function savePortState(workspaceRoot, ports) {
|
|
821
|
+
const dir = join(workspaceRoot, ".gkm");
|
|
822
|
+
await mkdir(dir, { recursive: true });
|
|
823
|
+
await writeFile(join(workspaceRoot, PORT_STATE_PATH), `${JSON.stringify(ports, null, 2)}\n`);
|
|
824
|
+
}
|
|
825
|
+
/**
|
|
826
|
+
* Check if a project's own Docker container is running and return its host port.
|
|
827
|
+
* Uses `docker compose port` scoped to the project's compose file.
|
|
828
|
+
* @internal Exported for testing
|
|
829
|
+
*/
|
|
830
|
+
function getContainerHostPort(workspaceRoot, service, containerPort) {
|
|
831
|
+
try {
|
|
832
|
+
const result = execSync(`docker compose port ${service} ${containerPort}`, {
|
|
833
|
+
cwd: workspaceRoot,
|
|
834
|
+
stdio: "pipe"
|
|
835
|
+
}).toString().trim();
|
|
836
|
+
const match = result.match(/:(\d+)$/);
|
|
837
|
+
return match ? Number(match[1]) : null;
|
|
838
|
+
} catch {
|
|
839
|
+
return null;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
/**
|
|
843
|
+
* Resolve host ports for Docker services by parsing docker-compose.yml.
|
|
844
|
+
* Priority: running container → saved state → find available port.
|
|
845
|
+
* Persists resolved ports to .gkm/ports.json.
|
|
846
|
+
* @internal Exported for testing
|
|
847
|
+
*/
|
|
848
|
+
async function resolveServicePorts(workspaceRoot) {
|
|
849
|
+
const composePath = join(workspaceRoot, "docker-compose.yml");
|
|
850
|
+
const mappings = parseComposePortMappings(composePath);
|
|
851
|
+
if (mappings.length === 0) return {
|
|
852
|
+
dockerEnv: {},
|
|
853
|
+
ports: {},
|
|
854
|
+
mappings: []
|
|
855
|
+
};
|
|
856
|
+
const savedState = await loadPortState(workspaceRoot);
|
|
857
|
+
const dockerEnv = {};
|
|
858
|
+
const ports = {};
|
|
859
|
+
logger$9.log("\n🔌 Resolving service ports...");
|
|
860
|
+
for (const mapping of mappings) {
|
|
861
|
+
const containerPort = getContainerHostPort(workspaceRoot, mapping.service, mapping.containerPort);
|
|
862
|
+
if (containerPort !== null) {
|
|
863
|
+
ports[mapping.envVar] = containerPort;
|
|
864
|
+
dockerEnv[mapping.envVar] = String(containerPort);
|
|
865
|
+
logger$9.log(` 🔄 ${mapping.service}:${mapping.containerPort}: reusing existing container on port ${containerPort}`);
|
|
866
|
+
continue;
|
|
867
|
+
}
|
|
868
|
+
const savedPort = savedState[mapping.envVar];
|
|
869
|
+
if (savedPort && await isPortAvailable(savedPort)) {
|
|
870
|
+
ports[mapping.envVar] = savedPort;
|
|
871
|
+
dockerEnv[mapping.envVar] = String(savedPort);
|
|
872
|
+
logger$9.log(` 💾 ${mapping.service}:${mapping.containerPort}: using saved port ${savedPort}`);
|
|
873
|
+
continue;
|
|
874
|
+
}
|
|
875
|
+
const resolvedPort = await findAvailablePort(mapping.defaultPort);
|
|
876
|
+
ports[mapping.envVar] = resolvedPort;
|
|
877
|
+
dockerEnv[mapping.envVar] = String(resolvedPort);
|
|
878
|
+
if (resolvedPort !== mapping.defaultPort) logger$9.log(` ⚡ ${mapping.service}:${mapping.containerPort}: port ${mapping.defaultPort} occupied, using port ${resolvedPort}`);
|
|
879
|
+
else logger$9.log(` ✅ ${mapping.service}:${mapping.containerPort}: using default port ${resolvedPort}`);
|
|
880
|
+
}
|
|
881
|
+
await savePortState(workspaceRoot, ports);
|
|
882
|
+
return {
|
|
883
|
+
dockerEnv,
|
|
884
|
+
ports,
|
|
885
|
+
mappings
|
|
886
|
+
};
|
|
887
|
+
}
|
|
888
|
+
/**
|
|
889
|
+
* Replace a port in a URL string.
|
|
890
|
+
* Handles both `hostname:port` and `localhost:port` patterns.
|
|
891
|
+
* @internal Exported for testing
|
|
892
|
+
*/
|
|
893
|
+
function replacePortInUrl(url, oldPort, newPort) {
|
|
894
|
+
if (oldPort === newPort) return url;
|
|
895
|
+
return url.replace(new RegExp(`:${oldPort}(?=/|$)`, "g"), `:${newPort}`);
|
|
896
|
+
}
|
|
897
|
+
/**
|
|
898
|
+
* Rewrite connection URLs and port vars in secrets with resolved ports.
|
|
899
|
+
* Uses the parsed compose mappings to determine which default ports to replace.
|
|
900
|
+
* Pure transform — does not modify secrets on disk.
|
|
901
|
+
* @internal Exported for testing
|
|
902
|
+
*/
|
|
903
|
+
function rewriteUrlsWithPorts(secrets, resolvedPorts) {
|
|
904
|
+
const { ports, mappings } = resolvedPorts;
|
|
905
|
+
const result = { ...secrets };
|
|
906
|
+
const portReplacements = [];
|
|
907
|
+
for (const mapping of mappings) {
|
|
908
|
+
const resolved = ports[mapping.envVar];
|
|
909
|
+
if (resolved !== void 0) portReplacements.push({
|
|
910
|
+
defaultPort: mapping.defaultPort,
|
|
911
|
+
resolvedPort: resolved
|
|
912
|
+
});
|
|
913
|
+
}
|
|
914
|
+
for (const [key, value] of Object.entries(result)) {
|
|
915
|
+
if (!key.endsWith("_PORT")) continue;
|
|
916
|
+
for (const { defaultPort, resolvedPort } of portReplacements) if (value === String(defaultPort)) result[key] = String(resolvedPort);
|
|
917
|
+
}
|
|
918
|
+
for (const [key, value] of Object.entries(result)) {
|
|
919
|
+
if (!key.endsWith("_URL") && key !== "DATABASE_URL") continue;
|
|
920
|
+
let rewritten = value;
|
|
921
|
+
for (const { defaultPort, resolvedPort } of portReplacements) rewritten = replacePortInUrl(rewritten, defaultPort, resolvedPort);
|
|
922
|
+
result[key] = rewritten;
|
|
923
|
+
}
|
|
924
|
+
return result;
|
|
925
|
+
}
|
|
776
926
|
/**
|
|
777
927
|
* Normalize telescope configuration
|
|
778
928
|
* @internal Exported for testing
|
|
@@ -1000,8 +1150,11 @@ async function devCommand(options) {
|
|
|
1000
1150
|
rebuildTimeout = setTimeout(async () => {
|
|
1001
1151
|
try {
|
|
1002
1152
|
logger$9.log("🔄 Rebuilding...");
|
|
1003
|
-
await buildServer(config$1, buildContext, resolved.providers[0], enableOpenApi, appRoot);
|
|
1004
|
-
if (enableOpenApi) await generateOpenApi(config$1, {
|
|
1153
|
+
await buildServer(config$1, buildContext, resolved.providers[0], enableOpenApi, appRoot, true);
|
|
1154
|
+
if (enableOpenApi) await generateOpenApi(config$1, {
|
|
1155
|
+
silent: true,
|
|
1156
|
+
bustCache: true
|
|
1157
|
+
});
|
|
1005
1158
|
logger$9.log("✅ Rebuild complete, restarting server...");
|
|
1006
1159
|
await devServer.restart();
|
|
1007
1160
|
} catch (error) {
|
|
@@ -1154,7 +1307,7 @@ async function loadSecretsForApp(secretsRoot, appName) {
|
|
|
1154
1307
|
* Start docker-compose services for the workspace.
|
|
1155
1308
|
* @internal Exported for testing
|
|
1156
1309
|
*/
|
|
1157
|
-
async function startWorkspaceServices(workspace) {
|
|
1310
|
+
async function startWorkspaceServices(workspace, portEnv) {
|
|
1158
1311
|
const services = workspace.services;
|
|
1159
1312
|
if (!services.db && !services.cache && !services.mail) return;
|
|
1160
1313
|
const servicesToStart = [];
|
|
@@ -1171,7 +1324,11 @@ async function startWorkspaceServices(workspace) {
|
|
|
1171
1324
|
}
|
|
1172
1325
|
execSync(`docker compose up -d ${servicesToStart.join(" ")}`, {
|
|
1173
1326
|
cwd: workspace.root,
|
|
1174
|
-
stdio: "inherit"
|
|
1327
|
+
stdio: "inherit",
|
|
1328
|
+
env: {
|
|
1329
|
+
...process.env,
|
|
1330
|
+
...portEnv
|
|
1331
|
+
}
|
|
1175
1332
|
});
|
|
1176
1333
|
logger$9.log("✅ Services started");
|
|
1177
1334
|
} catch (error) {
|
|
@@ -1219,8 +1376,9 @@ async function workspaceDevCommand(workspace, options) {
|
|
|
1219
1376
|
const copiedCount = clientResults.filter((r) => r.success).length;
|
|
1220
1377
|
if (copiedCount > 0) logger$9.log(`\n📦 Copied ${copiedCount} API client(s)`);
|
|
1221
1378
|
}
|
|
1222
|
-
await
|
|
1223
|
-
|
|
1379
|
+
const resolvedPorts = await resolveServicePorts(workspace.root);
|
|
1380
|
+
await startWorkspaceServices(workspace, resolvedPorts.dockerEnv);
|
|
1381
|
+
const secretsEnv = rewriteUrlsWithPorts(await loadDevSecrets(workspace), resolvedPorts);
|
|
1224
1382
|
if (Object.keys(secretsEnv).length > 0) logger$9.log(` Loaded ${Object.keys(secretsEnv).length} secret(s)`);
|
|
1225
1383
|
const dependencyEnv = generateAllDependencyEnvVars(workspace);
|
|
1226
1384
|
if (Object.keys(dependencyEnv).length > 0) {
|
|
@@ -1345,16 +1503,16 @@ async function workspaceDevCommand(workspace, options) {
|
|
|
1345
1503
|
});
|
|
1346
1504
|
});
|
|
1347
1505
|
}
|
|
1348
|
-
async function buildServer(config$1, context, provider, enableOpenApi, appRoot = process.cwd()) {
|
|
1506
|
+
async function buildServer(config$1, context, provider, enableOpenApi, appRoot = process.cwd(), bustCache = false) {
|
|
1349
1507
|
const endpointGenerator = new EndpointGenerator();
|
|
1350
1508
|
const functionGenerator = new FunctionGenerator();
|
|
1351
1509
|
const cronGenerator = new CronGenerator();
|
|
1352
1510
|
const subscriberGenerator = new SubscriberGenerator();
|
|
1353
1511
|
const [allEndpoints, allFunctions, allCrons, allSubscribers] = await Promise.all([
|
|
1354
|
-
endpointGenerator.load(config$1.routes, appRoot),
|
|
1355
|
-
config$1.functions ? functionGenerator.load(config$1.functions, appRoot) : [],
|
|
1356
|
-
config$1.crons ? cronGenerator.load(config$1.crons, appRoot) : [],
|
|
1357
|
-
config$1.subscribers ? subscriberGenerator.load(config$1.subscribers, appRoot) : []
|
|
1512
|
+
endpointGenerator.load(config$1.routes, appRoot, bustCache),
|
|
1513
|
+
config$1.functions ? functionGenerator.load(config$1.functions, appRoot, bustCache) : [],
|
|
1514
|
+
config$1.crons ? cronGenerator.load(config$1.crons, appRoot, bustCache) : [],
|
|
1515
|
+
config$1.subscribers ? subscriberGenerator.load(config$1.subscribers, appRoot, bustCache) : []
|
|
1358
1516
|
]);
|
|
1359
1517
|
const outputDir = join(appRoot, ".gkm", provider);
|
|
1360
1518
|
await mkdir(outputDir, { recursive: true });
|
|
@@ -1443,10 +1601,10 @@ async function prepareEntryCredentials(options) {
|
|
|
1443
1601
|
let secretsRoot = cwd;
|
|
1444
1602
|
let appName;
|
|
1445
1603
|
try {
|
|
1446
|
-
const
|
|
1447
|
-
workspaceAppPort =
|
|
1448
|
-
secretsRoot =
|
|
1449
|
-
appName =
|
|
1604
|
+
const appInfo = await loadWorkspaceAppInfo(cwd);
|
|
1605
|
+
workspaceAppPort = appInfo.app.port;
|
|
1606
|
+
secretsRoot = appInfo.workspaceRoot;
|
|
1607
|
+
appName = appInfo.appName;
|
|
1450
1608
|
} catch (error) {
|
|
1451
1609
|
logger$9.log(`⚠️ Could not load workspace config: ${error.message}`);
|
|
1452
1610
|
secretsRoot = findSecretsRoot(cwd);
|
|
@@ -1744,17 +1902,39 @@ async function execCommand(commandArgs, options = {}) {
|
|
|
1744
1902
|
if (commandArgs.length === 0) throw new Error("No command specified. Usage: gkm exec -- <command>");
|
|
1745
1903
|
const defaultEnv = loadEnvFiles(".env");
|
|
1746
1904
|
if (defaultEnv.loaded.length > 0) logger$9.log(`📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
|
|
1747
|
-
const { credentials, secretsJsonPath, appName } = await prepareEntryCredentials({ cwd });
|
|
1905
|
+
const { credentials, secretsJsonPath, appName, secretsRoot } = await prepareEntryCredentials({ cwd });
|
|
1748
1906
|
if (appName) logger$9.log(`📦 App: ${appName}`);
|
|
1749
1907
|
const secretCount = Object.keys(credentials).filter((k) => k !== "PORT").length;
|
|
1750
1908
|
if (secretCount > 0) logger$9.log(`🔐 Loaded ${secretCount} secret(s)`);
|
|
1909
|
+
const composePath = join(secretsRoot, "docker-compose.yml");
|
|
1910
|
+
const mappings = parseComposePortMappings(composePath);
|
|
1911
|
+
if (mappings.length > 0) {
|
|
1912
|
+
const ports = await loadPortState(secretsRoot);
|
|
1913
|
+
if (Object.keys(ports).length > 0) {
|
|
1914
|
+
const rewritten = rewriteUrlsWithPorts(credentials, {
|
|
1915
|
+
dockerEnv: {},
|
|
1916
|
+
ports,
|
|
1917
|
+
mappings
|
|
1918
|
+
});
|
|
1919
|
+
Object.assign(credentials, rewritten);
|
|
1920
|
+
logger$9.log(`🔌 Applied ${Object.keys(ports).length} port mapping(s)`);
|
|
1921
|
+
}
|
|
1922
|
+
}
|
|
1923
|
+
try {
|
|
1924
|
+
const appInfo = await loadWorkspaceAppInfo(cwd);
|
|
1925
|
+
if (appInfo.appName) {
|
|
1926
|
+
const depEnv = getDependencyEnvVars(appInfo.workspace, appInfo.appName);
|
|
1927
|
+
Object.assign(credentials, depEnv);
|
|
1928
|
+
}
|
|
1929
|
+
} catch {}
|
|
1751
1930
|
const preloadDir = join(cwd, ".gkm");
|
|
1752
1931
|
await mkdir(preloadDir, { recursive: true });
|
|
1753
1932
|
const preloadPath = join(preloadDir, "credentials-preload.ts");
|
|
1754
1933
|
await createCredentialsPreload(preloadPath, secretsJsonPath);
|
|
1755
|
-
const [cmd, ...
|
|
1934
|
+
const [cmd, ...rawArgs] = commandArgs;
|
|
1756
1935
|
if (!cmd) throw new Error("No command specified");
|
|
1757
|
-
|
|
1936
|
+
const args = rawArgs.map((arg) => arg.replace(/\$PORT\b/g, credentials.PORT ?? "3000"));
|
|
1937
|
+
logger$9.log(`🚀 Running: ${[cmd, ...args].join(" ")}`);
|
|
1758
1938
|
const existingNodeOptions = process.env.NODE_OPTIONS ?? "";
|
|
1759
1939
|
const tsxImport = "--import=tsx";
|
|
1760
1940
|
const preloadImport = `--import=${preloadPath}`;
|
|
@@ -2176,6 +2356,41 @@ function isDnsVerified(state, hostname, serverIp) {
|
|
|
2176
2356
|
const record = state?.dnsVerified?.[hostname];
|
|
2177
2357
|
return record?.serverIp === serverIp;
|
|
2178
2358
|
}
|
|
2359
|
+
/**
|
|
2360
|
+
* Get the key for a DNS record in state
|
|
2361
|
+
*/
|
|
2362
|
+
function getDnsRecordKey(name$1, type$1) {
|
|
2363
|
+
return `${name$1}:${type$1}`;
|
|
2364
|
+
}
|
|
2365
|
+
/**
|
|
2366
|
+
* Set a created DNS record in state (mutates state)
|
|
2367
|
+
*/
|
|
2368
|
+
function setDnsRecord(state, record) {
|
|
2369
|
+
if (!state.dnsRecords) state.dnsRecords = {};
|
|
2370
|
+
const key = getDnsRecordKey(record.name, record.type);
|
|
2371
|
+
state.dnsRecords[key] = {
|
|
2372
|
+
...record,
|
|
2373
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
2374
|
+
};
|
|
2375
|
+
}
|
|
2376
|
+
/**
|
|
2377
|
+
* Get backup state from state
|
|
2378
|
+
*/
|
|
2379
|
+
function getBackupState(state) {
|
|
2380
|
+
return state?.backups;
|
|
2381
|
+
}
|
|
2382
|
+
/**
|
|
2383
|
+
* Set backup state (mutates state)
|
|
2384
|
+
*/
|
|
2385
|
+
function setBackupState(state, backupState) {
|
|
2386
|
+
state.backups = backupState;
|
|
2387
|
+
}
|
|
2388
|
+
/**
|
|
2389
|
+
* Set postgres backup ID in state (mutates state)
|
|
2390
|
+
*/
|
|
2391
|
+
function setPostgresBackupId(state, backupId) {
|
|
2392
|
+
if (state.backups) state.backups.postgresBackupId = backupId;
|
|
2393
|
+
}
|
|
2179
2394
|
|
|
2180
2395
|
//#endregion
|
|
2181
2396
|
//#region src/deploy/dns/DnsProvider.ts
|
|
@@ -2183,7 +2398,7 @@ function isDnsVerified(state, hostname, serverIp) {
|
|
|
2183
2398
|
* Check if value is a DnsProvider implementation.
|
|
2184
2399
|
*/
|
|
2185
2400
|
function isDnsProvider(value) {
|
|
2186
|
-
return typeof value === "object" && value !== null && typeof value.name === "string" && typeof value.getRecords === "function" && typeof value.upsertRecords === "function";
|
|
2401
|
+
return typeof value === "object" && value !== null && typeof value.name === "string" && typeof value.getRecords === "function" && typeof value.upsertRecords === "function" && typeof value.deleteRecords === "function";
|
|
2187
2402
|
}
|
|
2188
2403
|
/**
|
|
2189
2404
|
* Create a DNS provider based on configuration.
|
|
@@ -2199,11 +2414,11 @@ async function createDnsProvider(options) {
|
|
|
2199
2414
|
if (isDnsProvider(config$1.provider)) return config$1.provider;
|
|
2200
2415
|
const provider = config$1.provider;
|
|
2201
2416
|
if (provider === "hostinger") {
|
|
2202
|
-
const { HostingerProvider } = await import("./HostingerProvider-
|
|
2417
|
+
const { HostingerProvider } = await import("./HostingerProvider-402UdK89.mjs");
|
|
2203
2418
|
return new HostingerProvider();
|
|
2204
2419
|
}
|
|
2205
2420
|
if (provider === "route53") {
|
|
2206
|
-
const { Route53Provider } = await import("./Route53Provider-
|
|
2421
|
+
const { Route53Provider } = await import("./Route53Provider-DbBo7Uz5.mjs");
|
|
2207
2422
|
const route53Config = config$1;
|
|
2208
2423
|
return new Route53Provider({
|
|
2209
2424
|
region: route53Config.region,
|
|
@@ -2414,8 +2629,13 @@ async function createDnsRecordsForDomain(records, rootDomain, providerConfig) {
|
|
|
2414
2629
|
* Supports both legacy single-domain format and new multi-domain format:
|
|
2415
2630
|
* - Legacy: { provider: 'hostinger', domain: 'example.com' }
|
|
2416
2631
|
* - Multi: { 'example.com': { provider: 'hostinger' }, 'example.dev': { provider: 'route53' } }
|
|
2632
|
+
*
|
|
2633
|
+
* @param appHostnames - Map of app names to hostnames
|
|
2634
|
+
* @param dnsConfig - DNS configuration (legacy or multi-domain)
|
|
2635
|
+
* @param dokployEndpoint - Dokploy server endpoint to resolve IP from
|
|
2636
|
+
* @param state - Optional state to save created records for later deletion
|
|
2417
2637
|
*/
|
|
2418
|
-
async function orchestrateDns(appHostnames, dnsConfig, dokployEndpoint) {
|
|
2638
|
+
async function orchestrateDns(appHostnames, dnsConfig, dokployEndpoint, state) {
|
|
2419
2639
|
if (!dnsConfig) return null;
|
|
2420
2640
|
const normalizedConfig = normalizeDnsConfig(dnsConfig);
|
|
2421
2641
|
logger$6.log("\n🌐 Setting up DNS records...");
|
|
@@ -2461,6 +2681,15 @@ async function orchestrateDns(appHostnames, dnsConfig, dokployEndpoint) {
|
|
|
2461
2681
|
logger$6.log(` ⚠ ${failed} record(s) failed for ${rootDomain}`);
|
|
2462
2682
|
hasFailures = true;
|
|
2463
2683
|
}
|
|
2684
|
+
if (state) {
|
|
2685
|
+
for (const record of domainRecords) if (record.created || record.existed) setDnsRecord(state, {
|
|
2686
|
+
domain: rootDomain,
|
|
2687
|
+
name: record.subdomain,
|
|
2688
|
+
type: record.type,
|
|
2689
|
+
value: record.value,
|
|
2690
|
+
ttl: "ttl" in providerConfig && providerConfig.ttl ? providerConfig.ttl : 300
|
|
2691
|
+
});
|
|
2692
|
+
}
|
|
2464
2693
|
printDnsRecordsTable(domainRecords, rootDomain);
|
|
2465
2694
|
if (providerConfig.provider === "manual" || failed > 0) printDnsRecordsSimple(domainRecords.filter((r) => !r.created && !r.existed), rootDomain);
|
|
2466
2695
|
}
|
|
@@ -5008,27 +5237,40 @@ async function initializePostgresUsers(api, postgres, serverHostname, users) {
|
|
|
5008
5237
|
for (const user of users) {
|
|
5009
5238
|
const schemaName = user.usePublicSchema ? "public" : user.name;
|
|
5010
5239
|
logger$1.log(` Creating user "${user.name}" with schema "${schemaName}"...`);
|
|
5011
|
-
|
|
5012
|
-
|
|
5013
|
-
|
|
5014
|
-
|
|
5015
|
-
|
|
5016
|
-
|
|
5017
|
-
|
|
5018
|
-
|
|
5019
|
-
|
|
5020
|
-
|
|
5240
|
+
if (user.usePublicSchema) {
|
|
5241
|
+
await client.query(`
|
|
5242
|
+
DO $$ BEGIN
|
|
5243
|
+
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '${user.name}') THEN
|
|
5244
|
+
CREATE USER "${user.name}" WITH PASSWORD '${user.password}';
|
|
5245
|
+
ELSE
|
|
5246
|
+
ALTER USER "${user.name}" WITH PASSWORD '${user.password}';
|
|
5247
|
+
END IF;
|
|
5248
|
+
END $$;
|
|
5249
|
+
`);
|
|
5250
|
+
await client.query(`
|
|
5021
5251
|
GRANT ALL ON SCHEMA public TO "${user.name}";
|
|
5022
5252
|
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO "${user.name}";
|
|
5023
5253
|
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO "${user.name}";
|
|
5024
5254
|
`);
|
|
5025
|
-
else
|
|
5255
|
+
} else {
|
|
5256
|
+
await client.query(`
|
|
5257
|
+
DO $$ BEGIN
|
|
5258
|
+
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '${user.name}') THEN
|
|
5259
|
+
CREATE USER "${user.name}" WITH PASSWORD '${user.password}';
|
|
5260
|
+
ELSE
|
|
5261
|
+
ALTER USER "${user.name}" WITH PASSWORD '${user.password}';
|
|
5262
|
+
END IF;
|
|
5263
|
+
-- Set search_path in same transaction to avoid tuple conflict
|
|
5264
|
+
ALTER USER "${user.name}" SET search_path TO "${schemaName}";
|
|
5265
|
+
END $$;
|
|
5266
|
+
`);
|
|
5267
|
+
await client.query(`
|
|
5026
5268
|
CREATE SCHEMA IF NOT EXISTS "${schemaName}" AUTHORIZATION "${user.name}";
|
|
5027
|
-
ALTER USER "${user.name}" SET search_path TO "${schemaName}";
|
|
5028
5269
|
GRANT USAGE ON SCHEMA "${schemaName}" TO "${user.name}";
|
|
5029
5270
|
GRANT ALL ON ALL TABLES IN SCHEMA "${schemaName}" TO "${user.name}";
|
|
5030
5271
|
ALTER DEFAULT PRIVILEGES IN SCHEMA "${schemaName}" GRANT ALL ON TABLES TO "${user.name}";
|
|
5031
5272
|
`);
|
|
5273
|
+
}
|
|
5032
5274
|
logger$1.log(` ✓ User "${user.name}" configured`);
|
|
5033
5275
|
}
|
|
5034
5276
|
} finally {
|
|
@@ -5534,6 +5776,36 @@ async function workspaceDeployCommand(workspace, options) {
|
|
|
5534
5776
|
await initializePostgresUsers(api, provisionedPostgres, serverHostname, usersToCreate);
|
|
5535
5777
|
}
|
|
5536
5778
|
}
|
|
5779
|
+
if (workspace.deploy?.backups && provisionedPostgres) {
|
|
5780
|
+
logger$1.log("\n💾 Provisioning backup destination...");
|
|
5781
|
+
const { provisionBackupDestination } = await import("./backup-provisioner-BIArpmTr.mjs");
|
|
5782
|
+
const backupState = await provisionBackupDestination({
|
|
5783
|
+
api,
|
|
5784
|
+
projectId: project.projectId,
|
|
5785
|
+
projectName: workspace.name,
|
|
5786
|
+
stage,
|
|
5787
|
+
config: workspace.deploy.backups,
|
|
5788
|
+
existingState: getBackupState(state),
|
|
5789
|
+
logger: logger$1
|
|
5790
|
+
});
|
|
5791
|
+
setBackupState(state, backupState);
|
|
5792
|
+
if (!backupState.postgresBackupId) {
|
|
5793
|
+
const backupSchedule = workspace.deploy.backups.schedule ?? "0 2 * * *";
|
|
5794
|
+
const backupRetention = workspace.deploy.backups.retention ?? 30;
|
|
5795
|
+
logger$1.log(" Creating postgres backup schedule...");
|
|
5796
|
+
const backup = await api.createPostgresBackup({
|
|
5797
|
+
schedule: backupSchedule,
|
|
5798
|
+
prefix: `${stage}/postgres`,
|
|
5799
|
+
destinationId: backupState.destinationId,
|
|
5800
|
+
database: provisionedPostgres.databaseName,
|
|
5801
|
+
postgresId: provisionedPostgres.postgresId,
|
|
5802
|
+
enabled: true,
|
|
5803
|
+
keepLatestCount: backupRetention
|
|
5804
|
+
});
|
|
5805
|
+
setPostgresBackupId(state, backup.backupId);
|
|
5806
|
+
logger$1.log(` ✓ Postgres backup schedule created (${backupSchedule})`);
|
|
5807
|
+
} else logger$1.log(" ✓ Using existing postgres backup schedule");
|
|
5808
|
+
}
|
|
5537
5809
|
const publicUrls = {};
|
|
5538
5810
|
const results = [];
|
|
5539
5811
|
const dokployConfig = workspace.deploy.dokploy;
|
|
@@ -6275,16 +6547,16 @@ const GEEKMIDAS_VERSIONS = {
|
|
|
6275
6547
|
"@geekmidas/cache": "~1.0.0",
|
|
6276
6548
|
"@geekmidas/client": "~1.0.0",
|
|
6277
6549
|
"@geekmidas/cloud": "~1.0.0",
|
|
6278
|
-
"@geekmidas/constructs": "~1.0.
|
|
6550
|
+
"@geekmidas/constructs": "~1.0.5",
|
|
6279
6551
|
"@geekmidas/db": "~1.0.0",
|
|
6280
6552
|
"@geekmidas/emailkit": "~1.0.0",
|
|
6281
|
-
"@geekmidas/envkit": "~1.0.
|
|
6553
|
+
"@geekmidas/envkit": "~1.0.2",
|
|
6282
6554
|
"@geekmidas/errors": "~1.0.0",
|
|
6283
6555
|
"@geekmidas/events": "~1.0.0",
|
|
6284
6556
|
"@geekmidas/logger": "~1.0.0",
|
|
6285
6557
|
"@geekmidas/rate-limit": "~1.0.0",
|
|
6286
6558
|
"@geekmidas/schema": "~1.0.0",
|
|
6287
|
-
"@geekmidas/services": "~1.0.
|
|
6559
|
+
"@geekmidas/services": "~1.0.1",
|
|
6288
6560
|
"@geekmidas/storage": "~1.0.0",
|
|
6289
6561
|
"@geekmidas/studio": "~1.0.0",
|
|
6290
6562
|
"@geekmidas/telescope": "~1.0.0",
|
|
@@ -6337,6 +6609,7 @@ function generateAuthAppFiles(options) {
|
|
|
6337
6609
|
extends: "../../tsconfig.json",
|
|
6338
6610
|
compilerOptions: {
|
|
6339
6611
|
noEmit: true,
|
|
6612
|
+
allowImportingTsExtensions: true,
|
|
6340
6613
|
baseUrl: ".",
|
|
6341
6614
|
paths: {
|
|
6342
6615
|
"~/*": ["./src/*"],
|
|
@@ -6367,8 +6640,8 @@ export const logger = createLogger();
|
|
|
6367
6640
|
const authTs = `import { betterAuth } from 'better-auth';
|
|
6368
6641
|
import { magicLink } from 'better-auth/plugins';
|
|
6369
6642
|
import pg from 'pg';
|
|
6370
|
-
import { envParser } from './config/env.
|
|
6371
|
-
import { logger } from './config/logger.
|
|
6643
|
+
import { envParser } from './config/env.ts';
|
|
6644
|
+
import { logger } from './config/logger.ts';
|
|
6372
6645
|
|
|
6373
6646
|
// Parse auth-specific config (no defaults - values from secrets)
|
|
6374
6647
|
const authConfig = envParser
|
|
@@ -6411,9 +6684,9 @@ export type Auth = typeof auth;
|
|
|
6411
6684
|
const indexTs = `import { Hono } from 'hono';
|
|
6412
6685
|
import { cors } from 'hono/cors';
|
|
6413
6686
|
import { serve } from '@hono/node-server';
|
|
6414
|
-
import { auth } from './auth.
|
|
6415
|
-
import { envParser } from './config/env.
|
|
6416
|
-
import { logger } from './config/logger.
|
|
6687
|
+
import { auth } from './auth.ts';
|
|
6688
|
+
import { envParser } from './config/env.ts';
|
|
6689
|
+
import { logger } from './config/logger.ts';
|
|
6417
6690
|
|
|
6418
6691
|
// Parse server config (no defaults - values from secrets)
|
|
6419
6692
|
const serverConfig = envParser
|
|
@@ -6499,6 +6772,20 @@ dist/
|
|
|
6499
6772
|
//#endregion
|
|
6500
6773
|
//#region src/init/generators/config.ts
|
|
6501
6774
|
/**
|
|
6775
|
+
* Vitest config content with globalSetup for database-enabled apps
|
|
6776
|
+
*/
|
|
6777
|
+
const vitestConfigContent = `import { defineConfig } from 'vitest/config';
|
|
6778
|
+
import tsconfigPaths from 'vite-tsconfig-paths';
|
|
6779
|
+
|
|
6780
|
+
export default defineConfig({
|
|
6781
|
+
plugins: [tsconfigPaths()],
|
|
6782
|
+
test: {
|
|
6783
|
+
environment: 'node',
|
|
6784
|
+
globalSetup: './test/globalSetup.ts',
|
|
6785
|
+
},
|
|
6786
|
+
});
|
|
6787
|
+
`;
|
|
6788
|
+
/**
|
|
6502
6789
|
* Generate configuration files (gkm.config.ts, tsconfig.json, biome.json, turbo.json)
|
|
6503
6790
|
*/
|
|
6504
6791
|
function generateConfigFiles(options, template) {
|
|
@@ -6550,6 +6837,7 @@ export default defineConfig({
|
|
|
6550
6837
|
extends: "../../tsconfig.json",
|
|
6551
6838
|
compilerOptions: {
|
|
6552
6839
|
noEmit: true,
|
|
6840
|
+
allowImportingTsExtensions: true,
|
|
6553
6841
|
baseUrl: ".",
|
|
6554
6842
|
paths: {
|
|
6555
6843
|
"~/*": ["./src/*"],
|
|
@@ -6569,21 +6857,26 @@ export default defineConfig({
|
|
|
6569
6857
|
skipLibCheck: true,
|
|
6570
6858
|
forceConsistentCasingInFileNames: true,
|
|
6571
6859
|
resolveJsonModule: true,
|
|
6572
|
-
|
|
6573
|
-
|
|
6574
|
-
outDir: "./dist",
|
|
6575
|
-
rootDir: "./src"
|
|
6860
|
+
noEmit: true,
|
|
6861
|
+
allowImportingTsExtensions: true
|
|
6576
6862
|
},
|
|
6577
6863
|
include: ["src/**/*.ts"],
|
|
6578
6864
|
exclude: ["node_modules", "dist"]
|
|
6579
6865
|
};
|
|
6580
|
-
if (options.monorepo)
|
|
6581
|
-
|
|
6582
|
-
|
|
6583
|
-
|
|
6584
|
-
|
|
6585
|
-
|
|
6586
|
-
|
|
6866
|
+
if (options.monorepo) {
|
|
6867
|
+
const files$1 = [{
|
|
6868
|
+
path: "gkm.config.ts",
|
|
6869
|
+
content: gkmConfig
|
|
6870
|
+
}, {
|
|
6871
|
+
path: "tsconfig.json",
|
|
6872
|
+
content: `${JSON.stringify(tsConfig, null, 2)}\n`
|
|
6873
|
+
}];
|
|
6874
|
+
if (options.database) files$1.push({
|
|
6875
|
+
path: "vitest.config.ts",
|
|
6876
|
+
content: vitestConfigContent
|
|
6877
|
+
});
|
|
6878
|
+
return files$1;
|
|
6879
|
+
}
|
|
6587
6880
|
const biomeConfig = {
|
|
6588
6881
|
$schema: "https://biomejs.dev/schemas/2.3.0/schema.json",
|
|
6589
6882
|
vcs: {
|
|
@@ -6649,7 +6942,7 @@ export default defineConfig({
|
|
|
6649
6942
|
fmt: { outputs: [] }
|
|
6650
6943
|
}
|
|
6651
6944
|
};
|
|
6652
|
-
|
|
6945
|
+
const files = [
|
|
6653
6946
|
{
|
|
6654
6947
|
path: "gkm.config.ts",
|
|
6655
6948
|
content: gkmConfig
|
|
@@ -6667,12 +6960,18 @@ export default defineConfig({
|
|
|
6667
6960
|
content: `${JSON.stringify(turboConfig, null, 2)}\n`
|
|
6668
6961
|
}
|
|
6669
6962
|
];
|
|
6963
|
+
if (options.database) files.push({
|
|
6964
|
+
path: "vitest.config.ts",
|
|
6965
|
+
content: vitestConfigContent
|
|
6966
|
+
});
|
|
6967
|
+
return files;
|
|
6670
6968
|
}
|
|
6671
6969
|
function generateSingleAppConfigFiles(options, _template, _helpers) {
|
|
6672
6970
|
const tsConfig = {
|
|
6673
6971
|
extends: "../../tsconfig.json",
|
|
6674
6972
|
compilerOptions: {
|
|
6675
6973
|
noEmit: true,
|
|
6974
|
+
allowImportingTsExtensions: true,
|
|
6676
6975
|
baseUrl: ".",
|
|
6677
6976
|
paths: {
|
|
6678
6977
|
"~/*": ["./src/*"],
|
|
@@ -6682,10 +6981,15 @@ function generateSingleAppConfigFiles(options, _template, _helpers) {
|
|
|
6682
6981
|
include: ["src/**/*.ts"],
|
|
6683
6982
|
exclude: ["node_modules", "dist"]
|
|
6684
6983
|
};
|
|
6685
|
-
|
|
6984
|
+
const files = [{
|
|
6686
6985
|
path: "tsconfig.json",
|
|
6687
6986
|
content: `${JSON.stringify(tsConfig, null, 2)}\n`
|
|
6688
6987
|
}];
|
|
6988
|
+
if (options.database) files.push({
|
|
6989
|
+
path: "vitest.config.ts",
|
|
6990
|
+
content: vitestConfigContent
|
|
6991
|
+
});
|
|
6992
|
+
return files;
|
|
6689
6993
|
}
|
|
6690
6994
|
|
|
6691
6995
|
//#endregion
|
|
@@ -6716,7 +7020,7 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6716
7020
|
POSTGRES_PASSWORD: postgres
|
|
6717
7021
|
POSTGRES_DB: ${options.name.replace(/-/g, "_")}_dev
|
|
6718
7022
|
ports:
|
|
6719
|
-
- '5432:5432'
|
|
7023
|
+
- '\${POSTGRES_HOST_PORT:-5432}:5432'
|
|
6720
7024
|
volumes:
|
|
6721
7025
|
- postgres_data:/var/lib/postgresql/data${initVolume}
|
|
6722
7026
|
healthcheck:
|
|
@@ -6742,7 +7046,7 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6742
7046
|
container_name: ${options.name}-redis
|
|
6743
7047
|
restart: unless-stopped
|
|
6744
7048
|
ports:
|
|
6745
|
-
- '6379:6379'
|
|
7049
|
+
- '\${REDIS_HOST_PORT:-6379}:6379'
|
|
6746
7050
|
volumes:
|
|
6747
7051
|
- redis_data:/data
|
|
6748
7052
|
healthcheck:
|
|
@@ -6756,7 +7060,7 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6756
7060
|
container_name: ${options.name}-serverless-redis
|
|
6757
7061
|
restart: unless-stopped
|
|
6758
7062
|
ports:
|
|
6759
|
-
- '8079:80'
|
|
7063
|
+
- '\${SRH_HOST_PORT:-8079}:80'
|
|
6760
7064
|
environment:
|
|
6761
7065
|
SRH_MODE: env
|
|
6762
7066
|
SRH_TOKEN: local_dev_token
|
|
@@ -6771,7 +7075,7 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6771
7075
|
container_name: ${options.name}-redis
|
|
6772
7076
|
restart: unless-stopped
|
|
6773
7077
|
ports:
|
|
6774
|
-
- '6379:6379'
|
|
7078
|
+
- '\${REDIS_HOST_PORT:-6379}:6379'
|
|
6775
7079
|
volumes:
|
|
6776
7080
|
- redis_data:/data
|
|
6777
7081
|
healthcheck:
|
|
@@ -6787,8 +7091,8 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6787
7091
|
container_name: ${options.name}-rabbitmq
|
|
6788
7092
|
restart: unless-stopped
|
|
6789
7093
|
ports:
|
|
6790
|
-
- '5672:5672'
|
|
6791
|
-
- '15672:15672'
|
|
7094
|
+
- '\${RABBITMQ_HOST_PORT:-5672}:5672'
|
|
7095
|
+
- '\${RABBITMQ_MGMT_HOST_PORT:-15672}:15672'
|
|
6792
7096
|
environment:
|
|
6793
7097
|
RABBITMQ_DEFAULT_USER: guest
|
|
6794
7098
|
RABBITMQ_DEFAULT_PASS: guest
|
|
@@ -6806,8 +7110,8 @@ function generateDockerFiles(options, template, dbApps) {
|
|
|
6806
7110
|
container_name: ${options.name}-mailpit
|
|
6807
7111
|
restart: unless-stopped
|
|
6808
7112
|
ports:
|
|
6809
|
-
- '1025:1025'
|
|
6810
|
-
- '8025:8025'
|
|
7113
|
+
- '\${MAILPIT_SMTP_HOST_PORT:-1025}:1025'
|
|
7114
|
+
- '\${MAILPIT_UI_HOST_PORT:-8025}:8025'
|
|
6811
7115
|
environment:
|
|
6812
7116
|
MP_SMTP_AUTH_ACCEPT_ANY: 1
|
|
6813
7117
|
MP_SMTP_AUTH_ALLOW_INSECURE: 1`);
|
|
@@ -6961,10 +7265,8 @@ function generateModelsPackage(options) {
|
|
|
6961
7265
|
const tsConfig = {
|
|
6962
7266
|
extends: "../../tsconfig.json",
|
|
6963
7267
|
compilerOptions: {
|
|
6964
|
-
|
|
6965
|
-
|
|
6966
|
-
outDir: "./dist",
|
|
6967
|
-
rootDir: "./src"
|
|
7268
|
+
noEmit: true,
|
|
7269
|
+
allowImportingTsExtensions: true
|
|
6968
7270
|
},
|
|
6969
7271
|
include: ["src/**/*.ts"],
|
|
6970
7272
|
exclude: ["node_modules", "dist"]
|
|
@@ -7010,7 +7312,7 @@ export type Timestamps = z.infer<typeof TimestampsSchema>;
|
|
|
7010
7312
|
export type Pagination = z.infer<typeof PaginationSchema>;
|
|
7011
7313
|
`;
|
|
7012
7314
|
const userTs = `import { z } from 'zod';
|
|
7013
|
-
import { IdSchema, TimestampsSchema } from './common.
|
|
7315
|
+
import { IdSchema, TimestampsSchema } from './common.ts';
|
|
7014
7316
|
|
|
7015
7317
|
// ============================================
|
|
7016
7318
|
// User Schemas
|
|
@@ -7501,7 +7803,7 @@ export const config = envParser
|
|
|
7501
7803
|
{
|
|
7502
7804
|
path: getRoutePath("health.ts"),
|
|
7503
7805
|
content: monorepo ? `import { z } from 'zod';
|
|
7504
|
-
import { publicRouter } from '~/router';
|
|
7806
|
+
import { publicRouter } from '~/router.ts';
|
|
7505
7807
|
|
|
7506
7808
|
export const healthEndpoint = publicRouter
|
|
7507
7809
|
.get('/health')
|
|
@@ -7644,8 +7946,8 @@ export const authService = {
|
|
|
7644
7946
|
path: "src/router.ts",
|
|
7645
7947
|
content: `import { e } from '@geekmidas/constructs/endpoints';
|
|
7646
7948
|
import { UnauthorizedError } from '@geekmidas/errors';
|
|
7647
|
-
import { authService, type Session } from './services/auth.
|
|
7648
|
-
import { logger } from './config/logger.
|
|
7949
|
+
import { authService, type Session } from './services/auth.ts';
|
|
7950
|
+
import { logger } from './config/logger.ts';
|
|
7649
7951
|
|
|
7650
7952
|
// Public router - no auth required
|
|
7651
7953
|
export const publicRouter = e.logger(logger);
|
|
@@ -7669,7 +7971,7 @@ export const sessionRouter = r.session<Session>(async ({ services, header }) =>
|
|
|
7669
7971
|
files.push({
|
|
7670
7972
|
path: getRoutePath("profile.ts"),
|
|
7671
7973
|
content: `import { z } from 'zod';
|
|
7672
|
-
import { sessionRouter } from '~/router';
|
|
7974
|
+
import { sessionRouter } from '~/router.ts';
|
|
7673
7975
|
|
|
7674
7976
|
export const profileEndpoint = sessionRouter
|
|
7675
7977
|
.get('/profile')
|
|
@@ -7738,8 +8040,8 @@ export const telescope = new Telescope({
|
|
|
7738
8040
|
content: `import { Direction, InMemoryMonitoringStorage, Studio } from '@geekmidas/studio';
|
|
7739
8041
|
import { Kysely, PostgresDialect } from 'kysely';
|
|
7740
8042
|
import pg from 'pg';
|
|
7741
|
-
import type { Database } from '
|
|
7742
|
-
import { envParser } from '
|
|
8043
|
+
import type { Database } from '~/services/database.ts';
|
|
8044
|
+
import { envParser } from '~/config/env.ts';
|
|
7743
8045
|
|
|
7744
8046
|
// Parse database config for Studio
|
|
7745
8047
|
const studioConfig = envParser
|
|
@@ -7912,8 +8214,8 @@ export const telescope = new Telescope({
|
|
|
7912
8214
|
content: `import { Direction, InMemoryMonitoringStorage, Studio } from '@geekmidas/studio';
|
|
7913
8215
|
import { Kysely, PostgresDialect } from 'kysely';
|
|
7914
8216
|
import pg from 'pg';
|
|
7915
|
-
import type { Database } from '
|
|
7916
|
-
import { envParser } from '
|
|
8217
|
+
import type { Database } from '~/services/database.ts';
|
|
8218
|
+
import { envParser } from '~/config/env.ts';
|
|
7917
8219
|
|
|
7918
8220
|
// Parse database config for Studio
|
|
7919
8221
|
const studioConfig = envParser
|
|
@@ -8169,7 +8471,7 @@ export type AppEvents =
|
|
|
8169
8471
|
path: "src/events/publisher.ts",
|
|
8170
8472
|
content: `import type { Service, ServiceRegisterOptions } from '@geekmidas/services';
|
|
8171
8473
|
import { Publisher, type EventPublisher } from '@geekmidas/events';
|
|
8172
|
-
import type { AppEvents } from './types.
|
|
8474
|
+
import type { AppEvents } from './types.ts';
|
|
8173
8475
|
|
|
8174
8476
|
export const eventsPublisherService = {
|
|
8175
8477
|
serviceName: 'events' as const,
|
|
@@ -8196,7 +8498,7 @@ export const eventsPublisherService = {
|
|
|
8196
8498
|
{
|
|
8197
8499
|
path: "src/subscribers/user-events.ts",
|
|
8198
8500
|
content: `import { s } from '@geekmidas/constructs/subscribers';
|
|
8199
|
-
import { eventsPublisherService } from '
|
|
8501
|
+
import { eventsPublisherService } from '~/events/publisher.ts';
|
|
8200
8502
|
|
|
8201
8503
|
export const userEventsSubscriber = s
|
|
8202
8504
|
.publisher(eventsPublisherService)
|
|
@@ -8398,6 +8700,9 @@ function generatePackageJson(options, template) {
|
|
|
8398
8700
|
dependencies$1.kysely = "~0.28.2";
|
|
8399
8701
|
dependencies$1.pg = "~8.16.0";
|
|
8400
8702
|
devDependencies$1["@types/pg"] = "~8.15.0";
|
|
8703
|
+
devDependencies$1["@geekmidas/testkit"] = GEEKMIDAS_VERSIONS["@geekmidas/testkit"];
|
|
8704
|
+
devDependencies$1["@faker-js/faker"] = "~9.8.0";
|
|
8705
|
+
devDependencies$1["vite-tsconfig-paths"] = "~5.1.0";
|
|
8401
8706
|
}
|
|
8402
8707
|
if (monorepo) {
|
|
8403
8708
|
delete devDependencies$1["@biomejs/biome"];
|
|
@@ -8444,6 +8749,118 @@ function generateSourceFiles(options, template) {
|
|
|
8444
8749
|
return template.files(options);
|
|
8445
8750
|
}
|
|
8446
8751
|
|
|
8752
|
+
//#endregion
|
|
8753
|
+
//#region src/init/generators/test.ts
|
|
8754
|
+
/**
|
|
8755
|
+
* Generate test infrastructure files when database is enabled.
|
|
8756
|
+
* Includes transaction-isolated test config, global setup with migrations,
|
|
8757
|
+
* factory system with builders/seeds, and an example spec.
|
|
8758
|
+
*/
|
|
8759
|
+
function generateTestFiles(options, _template) {
|
|
8760
|
+
if (!options.database) return [];
|
|
8761
|
+
return [
|
|
8762
|
+
{
|
|
8763
|
+
path: "test/config.ts",
|
|
8764
|
+
content: `import { it as itVitest } from 'vitest';
|
|
8765
|
+
import { Kysely, PostgresDialect } from 'kysely';
|
|
8766
|
+
import pg from 'pg';
|
|
8767
|
+
import { wrapVitestKyselyTransaction } from '@geekmidas/testkit/kysely';
|
|
8768
|
+
import type { Database } from '~/services/database.ts';
|
|
8769
|
+
|
|
8770
|
+
const connection = new Kysely<Database>({
|
|
8771
|
+
dialect: new PostgresDialect({
|
|
8772
|
+
pool: new pg.Pool({ connectionString: process.env.DATABASE_URL }),
|
|
8773
|
+
}),
|
|
8774
|
+
});
|
|
8775
|
+
|
|
8776
|
+
export const it = wrapVitestKyselyTransaction<Database>(itVitest, {
|
|
8777
|
+
connection,
|
|
8778
|
+
});
|
|
8779
|
+
`
|
|
8780
|
+
},
|
|
8781
|
+
{
|
|
8782
|
+
path: "test/globalSetup.ts",
|
|
8783
|
+
content: `import { Kysely, PostgresDialect } from 'kysely';
|
|
8784
|
+
import pg from 'pg';
|
|
8785
|
+
import { PostgresKyselyMigrator } from '@geekmidas/testkit/kysely';
|
|
8786
|
+
import type { Database } from '~/services/database.ts';
|
|
8787
|
+
|
|
8788
|
+
export async function setup() {
|
|
8789
|
+
const testUrl = process.env.DATABASE_URL;
|
|
8790
|
+
if (!testUrl) throw new Error('DATABASE_URL is required for tests');
|
|
8791
|
+
|
|
8792
|
+
// Run migrations on the test database
|
|
8793
|
+
// (gkm test already rewrites DATABASE_URL to point to the _test database)
|
|
8794
|
+
const db = new Kysely<Database>({
|
|
8795
|
+
dialect: new PostgresDialect({
|
|
8796
|
+
pool: new pg.Pool({ connectionString: testUrl }),
|
|
8797
|
+
}),
|
|
8798
|
+
});
|
|
8799
|
+
|
|
8800
|
+
const migrator = new PostgresKyselyMigrator({
|
|
8801
|
+
db,
|
|
8802
|
+
migrationsPath: './src/migrations',
|
|
8803
|
+
});
|
|
8804
|
+
|
|
8805
|
+
await migrator.migrateToLatest();
|
|
8806
|
+
await db.destroy();
|
|
8807
|
+
}
|
|
8808
|
+
`
|
|
8809
|
+
},
|
|
8810
|
+
{
|
|
8811
|
+
path: "test/factory/index.ts",
|
|
8812
|
+
content: `import type { Kysely } from 'kysely';
|
|
8813
|
+
import { KyselyFactory } from '@geekmidas/testkit/kysely';
|
|
8814
|
+
import type { Database } from '~/services/database.ts';
|
|
8815
|
+
import { usersBuilder } from './users.ts';
|
|
8816
|
+
|
|
8817
|
+
const builders = { users: usersBuilder };
|
|
8818
|
+
const seeds = {};
|
|
8819
|
+
|
|
8820
|
+
export function createFactory(db: Kysely<Database>) {
|
|
8821
|
+
return new KyselyFactory<Database, typeof builders, typeof seeds>(
|
|
8822
|
+
builders,
|
|
8823
|
+
seeds,
|
|
8824
|
+
db,
|
|
8825
|
+
);
|
|
8826
|
+
}
|
|
8827
|
+
|
|
8828
|
+
export type Factory = ReturnType<typeof createFactory>;
|
|
8829
|
+
`
|
|
8830
|
+
},
|
|
8831
|
+
{
|
|
8832
|
+
path: "test/factory/users.ts",
|
|
8833
|
+
content: `import { KyselyFactory } from '@geekmidas/testkit/kysely';
|
|
8834
|
+
import type { Database } from '~/services/database.ts';
|
|
8835
|
+
|
|
8836
|
+
export const usersBuilder = KyselyFactory.createBuilder<Database, 'users'>(
|
|
8837
|
+
'users',
|
|
8838
|
+
({ faker }) => ({
|
|
8839
|
+
id: faker.string.uuid(),
|
|
8840
|
+
name: faker.person.fullName(),
|
|
8841
|
+
email: faker.internet.email(),
|
|
8842
|
+
created_at: new Date(),
|
|
8843
|
+
}),
|
|
8844
|
+
);
|
|
8845
|
+
`
|
|
8846
|
+
},
|
|
8847
|
+
{
|
|
8848
|
+
path: "test/example.spec.ts",
|
|
8849
|
+
content: `import { describe, expect } from 'vitest';
|
|
8850
|
+
import { it } from './config.ts';
|
|
8851
|
+
|
|
8852
|
+
describe('example', () => {
|
|
8853
|
+
it('should have a working test setup', async ({ db }) => {
|
|
8854
|
+
// db is a transaction-wrapped Kysely instance
|
|
8855
|
+
// All changes are automatically rolled back after the test
|
|
8856
|
+
expect(db).toBeDefined();
|
|
8857
|
+
});
|
|
8858
|
+
});
|
|
8859
|
+
`
|
|
8860
|
+
}
|
|
8861
|
+
];
|
|
8862
|
+
}
|
|
8863
|
+
|
|
8447
8864
|
//#endregion
|
|
8448
8865
|
//#region src/init/generators/ui.ts
|
|
8449
8866
|
/**
|
|
@@ -8513,6 +8930,7 @@ function generateUiPackageFiles(options) {
|
|
|
8513
8930
|
"DOM.Iterable"
|
|
8514
8931
|
],
|
|
8515
8932
|
noEmit: true,
|
|
8933
|
+
allowImportingTsExtensions: true,
|
|
8516
8934
|
baseUrl: ".",
|
|
8517
8935
|
paths: { "~/*": ["./src/*"] }
|
|
8518
8936
|
},
|
|
@@ -9686,8 +10104,8 @@ export const Alert: Story = {
|
|
|
9686
10104
|
),
|
|
9687
10105
|
};
|
|
9688
10106
|
`;
|
|
9689
|
-
const componentsUiIndex = `export { Button, type ButtonProps, buttonVariants } from './button';
|
|
9690
|
-
export { Input } from './input';
|
|
10107
|
+
const componentsUiIndex = `export { Button, type ButtonProps, buttonVariants } from './button.tsx';
|
|
10108
|
+
export { Input } from './input.tsx';
|
|
9691
10109
|
export {
|
|
9692
10110
|
Card,
|
|
9693
10111
|
CardHeader,
|
|
@@ -9695,17 +10113,17 @@ export {
|
|
|
9695
10113
|
CardTitle,
|
|
9696
10114
|
CardDescription,
|
|
9697
10115
|
CardContent,
|
|
9698
|
-
} from './card';
|
|
9699
|
-
export { Label } from './label';
|
|
9700
|
-
export { Badge, type BadgeProps, badgeVariants } from './badge';
|
|
9701
|
-
export { Separator } from './separator';
|
|
9702
|
-
export { Tabs, TabsList, TabsTrigger, TabsContent } from './tabs';
|
|
10116
|
+
} from './card.tsx';
|
|
10117
|
+
export { Label } from './label.tsx';
|
|
10118
|
+
export { Badge, type BadgeProps, badgeVariants } from './badge.tsx';
|
|
10119
|
+
export { Separator } from './separator.tsx';
|
|
10120
|
+
export { Tabs, TabsList, TabsTrigger, TabsContent } from './tabs.tsx';
|
|
9703
10121
|
export {
|
|
9704
10122
|
Tooltip,
|
|
9705
10123
|
TooltipTrigger,
|
|
9706
10124
|
TooltipContent,
|
|
9707
10125
|
TooltipProvider,
|
|
9708
|
-
} from './tooltip';
|
|
10126
|
+
} from './tooltip.tsx';
|
|
9709
10127
|
export {
|
|
9710
10128
|
Dialog,
|
|
9711
10129
|
DialogPortal,
|
|
@@ -9717,20 +10135,20 @@ export {
|
|
|
9717
10135
|
DialogFooter,
|
|
9718
10136
|
DialogTitle,
|
|
9719
10137
|
DialogDescription,
|
|
9720
|
-
} from './dialog';
|
|
10138
|
+
} from './dialog.tsx';
|
|
9721
10139
|
`;
|
|
9722
10140
|
const buttonIndexTsx = buttonTsx;
|
|
9723
10141
|
const inputIndexTsx = inputTsx;
|
|
9724
10142
|
const cardIndexTsx = cardTsx;
|
|
9725
|
-
const componentsIndex = `export * from './ui';
|
|
10143
|
+
const componentsIndex = `export * from './ui/index.ts';
|
|
9726
10144
|
`;
|
|
9727
10145
|
const indexTs = `// @${options.name}/ui - Shared UI component library
|
|
9728
10146
|
|
|
9729
10147
|
// shadcn/ui components
|
|
9730
|
-
export * from './components';
|
|
10148
|
+
export * from './components/index.ts';
|
|
9731
10149
|
|
|
9732
10150
|
// Utilities
|
|
9733
|
-
export { cn } from './lib/utils';
|
|
10151
|
+
export { cn } from './lib/utils.ts';
|
|
9734
10152
|
`;
|
|
9735
10153
|
const gitignore = `node_modules/
|
|
9736
10154
|
dist/
|
|
@@ -9873,7 +10291,7 @@ function generateWebAppFiles(options) {
|
|
|
9873
10291
|
private: true,
|
|
9874
10292
|
type: "module",
|
|
9875
10293
|
scripts: {
|
|
9876
|
-
dev: "gkm exec -- next dev --turbopack",
|
|
10294
|
+
dev: "gkm exec -- next dev --turbopack -p $PORT",
|
|
9877
10295
|
build: "gkm exec -- next build",
|
|
9878
10296
|
start: "next start",
|
|
9879
10297
|
typecheck: "tsc --noEmit"
|
|
@@ -9928,6 +10346,7 @@ export default nextConfig;
|
|
|
9928
10346
|
skipLibCheck: true,
|
|
9929
10347
|
strict: true,
|
|
9930
10348
|
noEmit: true,
|
|
10349
|
+
allowImportingTsExtensions: true,
|
|
9931
10350
|
esModuleInterop: true,
|
|
9932
10351
|
module: "ESNext",
|
|
9933
10352
|
moduleResolution: "bundler",
|
|
@@ -10009,7 +10428,7 @@ export const serverConfig = envParser
|
|
|
10009
10428
|
`;
|
|
10010
10429
|
const authClientTs = `import { createAuthClient } from 'better-auth/react';
|
|
10011
10430
|
import { magicLinkClient } from 'better-auth/client/plugins';
|
|
10012
|
-
import { clientConfig } from '~/config/client';
|
|
10431
|
+
import { clientConfig } from '~/config/client.ts';
|
|
10013
10432
|
|
|
10014
10433
|
export const authClient = createAuthClient({
|
|
10015
10434
|
baseURL: clientConfig.authUrl,
|
|
@@ -10021,7 +10440,7 @@ export const { signIn, signUp, signOut, useSession, magicLink } = authClient;
|
|
|
10021
10440
|
const providersTsx = `'use client';
|
|
10022
10441
|
|
|
10023
10442
|
import { QueryClientProvider } from '@tanstack/react-query';
|
|
10024
|
-
import { getQueryClient } from '~/lib/query-client';
|
|
10443
|
+
import { getQueryClient } from '~/lib/query-client.ts';
|
|
10025
10444
|
|
|
10026
10445
|
export function Providers({ children }: { children: React.ReactNode }) {
|
|
10027
10446
|
const queryClient = getQueryClient();
|
|
@@ -10031,9 +10450,9 @@ export function Providers({ children }: { children: React.ReactNode }) {
|
|
|
10031
10450
|
);
|
|
10032
10451
|
}
|
|
10033
10452
|
`;
|
|
10034
|
-
const apiIndexTs = `import { createApi } from './
|
|
10035
|
-
import { getQueryClient } from '~/lib/query-client';
|
|
10036
|
-
import { clientConfig } from '~/config/client';
|
|
10453
|
+
const apiIndexTs = `import { createApi } from './api.ts';
|
|
10454
|
+
import { getQueryClient } from '~/lib/query-client.ts';
|
|
10455
|
+
import { clientConfig } from '~/config/client.ts';
|
|
10037
10456
|
|
|
10038
10457
|
export const api = createApi({
|
|
10039
10458
|
baseURL: clientConfig.apiUrl,
|
|
@@ -10043,7 +10462,7 @@ export const api = createApi({
|
|
|
10043
10462
|
const globalsCss = `@import '${uiPackage}/styles';
|
|
10044
10463
|
`;
|
|
10045
10464
|
const layoutTsx = `import type { Metadata } from 'next';
|
|
10046
|
-
import { Providers } from './providers';
|
|
10465
|
+
import { Providers } from './providers.tsx';
|
|
10047
10466
|
import './globals.css';
|
|
10048
10467
|
|
|
10049
10468
|
export const metadata: Metadata = {
|
|
@@ -10065,7 +10484,7 @@ export default function RootLayout({
|
|
|
10065
10484
|
);
|
|
10066
10485
|
}
|
|
10067
10486
|
`;
|
|
10068
|
-
const pageTsx = `import { api } from '~/api';
|
|
10487
|
+
const pageTsx = `import { api } from '~/api/index.ts';
|
|
10069
10488
|
import { Button, Card, CardContent, CardDescription, CardHeader, CardTitle } from '${uiPackage}/components';
|
|
10070
10489
|
|
|
10071
10490
|
export default async function Home() {
|
|
@@ -10408,6 +10827,7 @@ async function initCommand(projectName, options = {}) {
|
|
|
10408
10827
|
...generateConfigFiles(templateOptions, baseTemplate),
|
|
10409
10828
|
...generateEnvFiles(templateOptions, baseTemplate),
|
|
10410
10829
|
...generateSourceFiles(templateOptions, baseTemplate),
|
|
10830
|
+
...generateTestFiles(templateOptions, baseTemplate),
|
|
10411
10831
|
...isMonorepo$1 ? [] : generateDockerFiles(templateOptions, baseTemplate, dbApps)
|
|
10412
10832
|
] : [];
|
|
10413
10833
|
const dockerFiles = isMonorepo$1 && baseTemplate ? generateDockerFiles(templateOptions, baseTemplate, dbApps) : [];
|
|
@@ -10751,23 +11171,61 @@ function maskUrl(url) {
|
|
|
10751
11171
|
//#endregion
|
|
10752
11172
|
//#region src/test/index.ts
|
|
10753
11173
|
/**
|
|
10754
|
-
* Run tests with secrets
|
|
10755
|
-
*
|
|
11174
|
+
* Run tests with secrets, dependency URLs, and .env files loaded.
|
|
11175
|
+
* Environment variables are sniffed to inject only what the app needs.
|
|
10756
11176
|
*/
|
|
10757
11177
|
async function testCommand(options = {}) {
|
|
10758
11178
|
const stage = options.stage ?? "development";
|
|
10759
|
-
|
|
10760
|
-
|
|
11179
|
+
const cwd = process.cwd();
|
|
11180
|
+
console.log(`\n🧪 Running tests with ${stage} environment...\n`);
|
|
11181
|
+
const defaultEnv = loadEnvFiles(".env");
|
|
11182
|
+
if (defaultEnv.loaded.length > 0) console.log(` 📦 Loaded env: ${defaultEnv.loaded.join(", ")}`);
|
|
11183
|
+
let secretsEnv = {};
|
|
10761
11184
|
try {
|
|
10762
11185
|
const secrets = await readStageSecrets(stage);
|
|
10763
11186
|
if (secrets) {
|
|
10764
|
-
|
|
10765
|
-
console.log(` Loaded ${Object.keys(
|
|
10766
|
-
} else console.log(` No secrets found for ${stage}
|
|
11187
|
+
secretsEnv = toEmbeddableSecrets(secrets);
|
|
11188
|
+
console.log(` 🔐 Loaded ${Object.keys(secretsEnv).length} secrets from ${stage}`);
|
|
11189
|
+
} else console.log(` No secrets found for ${stage}`);
|
|
10767
11190
|
} catch (error) {
|
|
10768
|
-
if (error instanceof Error && error.message.includes("key not found")) console.log(` Decryption key not found for ${stage}
|
|
11191
|
+
if (error instanceof Error && error.message.includes("key not found")) console.log(` Decryption key not found for ${stage}`);
|
|
10769
11192
|
else throw error;
|
|
10770
11193
|
}
|
|
11194
|
+
const composePath = join(cwd, "docker-compose.yml");
|
|
11195
|
+
const mappings = parseComposePortMappings(composePath);
|
|
11196
|
+
if (mappings.length > 0) {
|
|
11197
|
+
const ports = await loadPortState(cwd);
|
|
11198
|
+
if (Object.keys(ports).length > 0) {
|
|
11199
|
+
secretsEnv = rewriteUrlsWithPorts(secretsEnv, {
|
|
11200
|
+
dockerEnv: {},
|
|
11201
|
+
ports,
|
|
11202
|
+
mappings
|
|
11203
|
+
});
|
|
11204
|
+
console.log(` 🔌 Applied ${Object.keys(ports).length} port mapping(s)`);
|
|
11205
|
+
}
|
|
11206
|
+
}
|
|
11207
|
+
secretsEnv = rewriteDatabaseUrlForTests(secretsEnv);
|
|
11208
|
+
await ensureTestDatabase(secretsEnv);
|
|
11209
|
+
let dependencyEnv = {};
|
|
11210
|
+
try {
|
|
11211
|
+
const appInfo = await loadWorkspaceAppInfo(cwd);
|
|
11212
|
+
dependencyEnv = getDependencyEnvVars(appInfo.workspace, appInfo.appName);
|
|
11213
|
+
if (Object.keys(dependencyEnv).length > 0) console.log(` 🔗 Loaded ${Object.keys(dependencyEnv).length} dependency URL(s)`);
|
|
11214
|
+
const sniffed = await sniffAppEnvironment(appInfo.app, appInfo.appName, appInfo.workspaceRoot, { logWarnings: false });
|
|
11215
|
+
if (sniffed.requiredEnvVars.length > 0) {
|
|
11216
|
+
const needed = new Set(sniffed.requiredEnvVars);
|
|
11217
|
+
const allEnv = {
|
|
11218
|
+
...secretsEnv,
|
|
11219
|
+
...dependencyEnv
|
|
11220
|
+
};
|
|
11221
|
+
const filteredEnv = {};
|
|
11222
|
+
for (const [key, value] of Object.entries(allEnv)) if (needed.has(key)) filteredEnv[key] = value;
|
|
11223
|
+
secretsEnv = {};
|
|
11224
|
+
dependencyEnv = filteredEnv;
|
|
11225
|
+
console.log(` 🔍 Sniffed ${sniffed.requiredEnvVars.length} required env var(s)`);
|
|
11226
|
+
}
|
|
11227
|
+
} catch {}
|
|
11228
|
+
console.log("");
|
|
10771
11229
|
const args = [];
|
|
10772
11230
|
if (options.run) args.push("run");
|
|
10773
11231
|
else if (options.watch) args.push("--watch");
|
|
@@ -10775,11 +11233,12 @@ async function testCommand(options = {}) {
|
|
|
10775
11233
|
if (options.ui) args.push("--ui");
|
|
10776
11234
|
if (options.pattern) args.push(options.pattern);
|
|
10777
11235
|
const vitestProcess = spawn("npx", ["vitest", ...args], {
|
|
10778
|
-
cwd
|
|
11236
|
+
cwd,
|
|
10779
11237
|
stdio: "inherit",
|
|
10780
11238
|
env: {
|
|
10781
11239
|
...process.env,
|
|
10782
|
-
...
|
|
11240
|
+
...secretsEnv,
|
|
11241
|
+
...dependencyEnv,
|
|
10783
11242
|
NODE_ENV: "test"
|
|
10784
11243
|
}
|
|
10785
11244
|
});
|
|
@@ -10793,6 +11252,57 @@ async function testCommand(options = {}) {
|
|
|
10793
11252
|
});
|
|
10794
11253
|
});
|
|
10795
11254
|
}
|
|
11255
|
+
const TEST_DB_SUFFIX = "_test";
|
|
11256
|
+
/**
|
|
11257
|
+
* Rewrite DATABASE_URL to point to a separate test database.
|
|
11258
|
+
* Appends `_test` to the database name (e.g., `app` -> `app_test`).
|
|
11259
|
+
* @internal Exported for testing
|
|
11260
|
+
*/
|
|
11261
|
+
function rewriteDatabaseUrlForTests(env) {
|
|
11262
|
+
const result = { ...env };
|
|
11263
|
+
for (const key of Object.keys(result)) {
|
|
11264
|
+
if (!key.includes("DATABASE_URL")) continue;
|
|
11265
|
+
const value = result[key];
|
|
11266
|
+
try {
|
|
11267
|
+
const url = new URL(value);
|
|
11268
|
+
const dbName = url.pathname.slice(1);
|
|
11269
|
+
if (dbName && !dbName.endsWith(TEST_DB_SUFFIX)) {
|
|
11270
|
+
url.pathname = `/${dbName}${TEST_DB_SUFFIX}`;
|
|
11271
|
+
result[key] = url.toString();
|
|
11272
|
+
console.log(` 🧪 ${key}: using test database "${dbName}${TEST_DB_SUFFIX}"`);
|
|
11273
|
+
}
|
|
11274
|
+
} catch {}
|
|
11275
|
+
}
|
|
11276
|
+
return result;
|
|
11277
|
+
}
|
|
11278
|
+
/**
|
|
11279
|
+
* Ensure the test database exists by connecting to the default database
|
|
11280
|
+
* and running CREATE DATABASE IF NOT EXISTS.
|
|
11281
|
+
* @internal Exported for testing
|
|
11282
|
+
*/
|
|
11283
|
+
async function ensureTestDatabase(env) {
|
|
11284
|
+
const databaseUrl = env.DATABASE_URL;
|
|
11285
|
+
if (!databaseUrl) return;
|
|
11286
|
+
try {
|
|
11287
|
+
const url = new URL(databaseUrl);
|
|
11288
|
+
const testDbName = url.pathname.slice(1);
|
|
11289
|
+
if (!testDbName) return;
|
|
11290
|
+
url.pathname = "/postgres";
|
|
11291
|
+
const { default: pg } = await import("pg");
|
|
11292
|
+
const client = new pg.Client({ connectionString: url.toString() });
|
|
11293
|
+
await client.connect();
|
|
11294
|
+
try {
|
|
11295
|
+
await client.query(`CREATE DATABASE "${testDbName}"`);
|
|
11296
|
+
console.log(` 📦 Created test database "${testDbName}"`);
|
|
11297
|
+
} catch (err) {
|
|
11298
|
+
if (err.code !== "42P04") throw err;
|
|
11299
|
+
} finally {
|
|
11300
|
+
await client.end();
|
|
11301
|
+
}
|
|
11302
|
+
} catch (err) {
|
|
11303
|
+
console.log(` ⚠️ Could not ensure test database: ${err.message}`);
|
|
11304
|
+
}
|
|
11305
|
+
}
|
|
10796
11306
|
|
|
10797
11307
|
//#endregion
|
|
10798
11308
|
//#region src/index.ts
|