@kithinji/pod 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build.js +22 -0
- package/dist/main.js +4464 -0
- package/dist/main.js.map +7 -0
- package/dist/types/add/component/component.d.ts +5 -0
- package/dist/types/add/component/component.d.ts.map +1 -0
- package/dist/types/add/component/index.d.ts +2 -0
- package/dist/types/add/component/index.d.ts.map +1 -0
- package/dist/types/add/index.d.ts +4 -0
- package/dist/types/add/index.d.ts.map +1 -0
- package/dist/types/add/module/index.d.ts +2 -0
- package/dist/types/add/module/index.d.ts.map +1 -0
- package/dist/types/add/module/module.d.ts +3 -0
- package/dist/types/add/module/module.d.ts.map +1 -0
- package/dist/types/add/new/index.d.ts +2 -0
- package/dist/types/add/new/index.d.ts.map +1 -0
- package/dist/types/config/config.d.ts +18 -0
- package/dist/types/config/config.d.ts.map +1 -0
- package/dist/types/config/index.d.ts +2 -0
- package/dist/types/config/index.d.ts.map +1 -0
- package/dist/types/dev/index.d.ts +2 -0
- package/dist/types/dev/index.d.ts.map +1 -0
- package/dist/types/dev/project.d.ts +9 -0
- package/dist/types/dev/project.d.ts.map +1 -0
- package/dist/types/dev/server.d.ts +2 -0
- package/dist/types/dev/server.d.ts.map +1 -0
- package/dist/types/docker/docker.d.ts +2 -0
- package/dist/types/docker/docker.d.ts.map +1 -0
- package/dist/types/docker/index.d.ts +2 -0
- package/dist/types/docker/index.d.ts.map +1 -0
- package/dist/types/macros/expand_macros.d.ts +48 -0
- package/dist/types/macros/expand_macros.d.ts.map +1 -0
- package/dist/types/macros/index.d.ts +3 -0
- package/dist/types/macros/index.d.ts.map +1 -0
- package/dist/types/macros/macro_executer.d.ts +12 -0
- package/dist/types/macros/macro_executer.d.ts.map +1 -0
- package/dist/types/main.d.ts +13 -0
- package/dist/types/main.d.ts.map +1 -0
- package/dist/types/plugins/analyzers/graph.d.ts +25 -0
- package/dist/types/plugins/analyzers/graph.d.ts.map +1 -0
- package/dist/types/plugins/css/index.d.ts +7 -0
- package/dist/types/plugins/css/index.d.ts.map +1 -0
- package/dist/types/plugins/generators/generate_controller.d.ts +2 -0
- package/dist/types/plugins/generators/generate_controller.d.ts.map +1 -0
- package/dist/types/plugins/generators/generate_rsc.d.ts +2 -0
- package/dist/types/plugins/generators/generate_rsc.d.ts.map +1 -0
- package/dist/types/plugins/generators/generate_server_component.d.ts +2 -0
- package/dist/types/plugins/generators/generate_server_component.d.ts.map +1 -0
- package/dist/types/plugins/generators/tsx_server_stub.d.ts +2 -0
- package/dist/types/plugins/generators/tsx_server_stub.d.ts.map +1 -0
- package/dist/types/plugins/index.d.ts +4 -0
- package/dist/types/plugins/index.d.ts.map +1 -0
- package/dist/types/plugins/my.d.ts +10 -0
- package/dist/types/plugins/my.d.ts.map +1 -0
- package/dist/types/plugins/transformers/j2d.d.ts +11 -0
- package/dist/types/plugins/transformers/j2d.d.ts.map +1 -0
- package/dist/types/store/index.d.ts +2 -0
- package/dist/types/store/index.d.ts.map +1 -0
- package/dist/types/store/store.d.ts +14 -0
- package/dist/types/store/store.d.ts.map +1 -0
- package/dist/types/utils/cases.d.ts +4 -0
- package/dist/types/utils/cases.d.ts.map +1 -0
- package/dist/types/utils/create.d.ts +12 -0
- package/dist/types/utils/create.d.ts.map +1 -0
- package/dist/types/utils/index.d.ts +3 -0
- package/dist/types/utils/index.d.ts.map +1 -0
- package/package.json +44 -0
- package/src/add/component/component.ts +496 -0
- package/src/add/component/index.ts +1 -0
- package/src/add/index.ts +3 -0
- package/src/add/module/index.ts +1 -0
- package/src/add/module/module.ts +521 -0
- package/src/add/new/index.ts +135 -0
- package/src/config/config.ts +141 -0
- package/src/config/index.ts +1 -0
- package/src/dev/index.ts +1 -0
- package/src/dev/project.ts +45 -0
- package/src/dev/server.ts +190 -0
- package/src/docker/docker.ts +452 -0
- package/src/docker/index.ts +1 -0
- package/src/macros/expand_macros.ts +791 -0
- package/src/macros/index.ts +2 -0
- package/src/macros/macro_executer.ts +189 -0
- package/src/main.ts +95 -0
- package/src/plugins/analyzers/graph.ts +291 -0
- package/src/plugins/css/index.ts +25 -0
- package/src/plugins/generators/generate_controller.ts +308 -0
- package/src/plugins/generators/generate_rsc.ts +274 -0
- package/src/plugins/generators/generate_server_component.ts +279 -0
- package/src/plugins/generators/tsx_server_stub.ts +295 -0
- package/src/plugins/index.ts +3 -0
- package/src/plugins/my.ts +274 -0
- package/src/plugins/transformers/j2d.ts +1014 -0
- package/src/store/index.ts +1 -0
- package/src/store/store.ts +44 -0
- package/src/utils/cases.ts +15 -0
- package/src/utils/create.ts +26 -0
- package/src/utils/index.ts +2 -0
- package/tsconfig.json +27 -0
|
@@ -0,0 +1,452 @@
|
|
|
1
|
+
import fs from "fs-extra";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import prompts from "prompts";
|
|
4
|
+
import yaml from "js-yaml";
|
|
5
|
+
|
|
6
|
+
interface PackageJson {
|
|
7
|
+
name: string;
|
|
8
|
+
dependencies?: Record<string, string>;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface DockerService {
|
|
12
|
+
name: string;
|
|
13
|
+
needsTunnel?: boolean;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export async function dockerize(env: "dev" | "prod" = "prod") {
|
|
17
|
+
const cwd = process.cwd();
|
|
18
|
+
const packageJsonPath = path.join(cwd, "package.json");
|
|
19
|
+
|
|
20
|
+
if (!fs.existsSync(packageJsonPath)) {
|
|
21
|
+
throw new Error("package.json not found. Are you in a Pod project?");
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const packageJson: PackageJson = await fs.readJSON(packageJsonPath);
|
|
25
|
+
const projectName = packageJson.name;
|
|
26
|
+
|
|
27
|
+
const detectedServices = detectServices(packageJson);
|
|
28
|
+
const selectedServices = await selectServices(detectedServices);
|
|
29
|
+
|
|
30
|
+
await restructureProject(cwd, projectName);
|
|
31
|
+
await createDockerfile(cwd, projectName);
|
|
32
|
+
|
|
33
|
+
if (env === "prod") {
|
|
34
|
+
await setupProduction(cwd, projectName, selectedServices);
|
|
35
|
+
} else {
|
|
36
|
+
await setupDevelopment(cwd, projectName, selectedServices);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
printNextSteps(projectName, env, selectedServices);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function detectServices(packageJson: PackageJson): DockerService[] {
|
|
43
|
+
const deps = packageJson.dependencies || {};
|
|
44
|
+
const services: DockerService[] = [];
|
|
45
|
+
|
|
46
|
+
if (deps.pg || deps.postgres) services.push({ name: "postgres" });
|
|
47
|
+
if (deps.mysql || deps.mysql2) services.push({ name: "mysql" });
|
|
48
|
+
if (deps.redis || deps.ioredis) services.push({ name: "redis" });
|
|
49
|
+
if (deps.mongodb || deps.mongoose) services.push({ name: "mongodb" });
|
|
50
|
+
|
|
51
|
+
return services;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async function selectServices(
|
|
55
|
+
detected: DockerService[]
|
|
56
|
+
): Promise<DockerService[]> {
|
|
57
|
+
if (detected.length === 0) return [];
|
|
58
|
+
|
|
59
|
+
const response = await prompts({
|
|
60
|
+
type: "multiselect",
|
|
61
|
+
name: "services",
|
|
62
|
+
message: "Select services to include:",
|
|
63
|
+
choices: detected.map((s) => ({
|
|
64
|
+
title: s.name,
|
|
65
|
+
value: s.name,
|
|
66
|
+
selected: true,
|
|
67
|
+
})),
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
if (!response.services) return [];
|
|
71
|
+
return detected.filter((s) => response.services.includes(s.name));
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async function restructureProject(cwd: string, projectName: string) {
|
|
75
|
+
const nestedDir = path.join(cwd, projectName);
|
|
76
|
+
|
|
77
|
+
if (fs.existsSync(nestedDir)) {
|
|
78
|
+
console.log("⚠️ Project already restructured, skipping...");
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
await fs.ensureDir(nestedDir);
|
|
83
|
+
|
|
84
|
+
const items = await fs.readdir(cwd);
|
|
85
|
+
const toMove = items.filter((item) => item !== projectName);
|
|
86
|
+
|
|
87
|
+
for (const item of toMove) {
|
|
88
|
+
const src = path.join(cwd, item);
|
|
89
|
+
const dest = path.join(nestedDir, item);
|
|
90
|
+
await fs.move(src, dest, { overwrite: true });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const envSrc = path.join(nestedDir, ".env");
|
|
94
|
+
const envDest = path.join(cwd, ".env");
|
|
95
|
+
|
|
96
|
+
if (fs.existsSync(envSrc)) {
|
|
97
|
+
await fs.move(envSrc, envDest, { overwrite: true });
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async function createDockerfile(cwd: string, projectName: string) {
|
|
102
|
+
const dockerfilePath = path.join(cwd, projectName, "Dockerfile");
|
|
103
|
+
|
|
104
|
+
const dockerfile = `FROM node:18-alpine
|
|
105
|
+
|
|
106
|
+
WORKDIR /app
|
|
107
|
+
|
|
108
|
+
COPY package*.json ./
|
|
109
|
+
RUN npm ci --only=production
|
|
110
|
+
|
|
111
|
+
COPY . .
|
|
112
|
+
RUN if [ -f "tsconfig.json" ]; then npm run build || true; fi
|
|
113
|
+
|
|
114
|
+
EXPOSE 3000
|
|
115
|
+
CMD ["npm", "start"]
|
|
116
|
+
`;
|
|
117
|
+
|
|
118
|
+
await fs.writeFile(dockerfilePath, dockerfile);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
async function setupProduction(
|
|
122
|
+
cwd: string,
|
|
123
|
+
projectName: string,
|
|
124
|
+
services: DockerService[]
|
|
125
|
+
) {
|
|
126
|
+
const compose: any = {
|
|
127
|
+
services: {
|
|
128
|
+
traefik: {
|
|
129
|
+
image: "traefik:v2.10",
|
|
130
|
+
command: [
|
|
131
|
+
"--api.insecure=true",
|
|
132
|
+
"--providers.docker=true",
|
|
133
|
+
"--providers.docker.exposedbydefault=false",
|
|
134
|
+
"--entrypoints.web.address=:80",
|
|
135
|
+
"--entrypoints.websecure.address=:443",
|
|
136
|
+
"--certificatesresolvers.myresolver.acme.tlschallenge=true",
|
|
137
|
+
"--certificatesresolvers.myresolver.acme.email=admin@example.com",
|
|
138
|
+
"--certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme.json",
|
|
139
|
+
],
|
|
140
|
+
ports: ["80:80", "443:443", "8080:8080"],
|
|
141
|
+
volumes: [
|
|
142
|
+
"/var/run/docker.sock:/var/run/docker.sock:ro",
|
|
143
|
+
"./letsencrypt:/letsencrypt",
|
|
144
|
+
],
|
|
145
|
+
networks: ["web"],
|
|
146
|
+
},
|
|
147
|
+
[projectName]: {
|
|
148
|
+
build: ".",
|
|
149
|
+
labels: [
|
|
150
|
+
"traefik.enable=true",
|
|
151
|
+
"traefik.http.routers.app.rule=Host(`localhost`)",
|
|
152
|
+
"traefik.http.routers.app.entrypoints=websecure",
|
|
153
|
+
"traefik.http.routers.app.tls.certresolver=myresolver",
|
|
154
|
+
"traefik.http.services.app.loadbalancer.server.port=3000",
|
|
155
|
+
],
|
|
156
|
+
env_file: [".env"],
|
|
157
|
+
depends_on: [],
|
|
158
|
+
networks: ["web"],
|
|
159
|
+
},
|
|
160
|
+
},
|
|
161
|
+
networks: {
|
|
162
|
+
web: {
|
|
163
|
+
driver: "bridge",
|
|
164
|
+
},
|
|
165
|
+
},
|
|
166
|
+
volumes: {},
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
for (const service of services) {
|
|
170
|
+
const config = getServiceConfig(service.name);
|
|
171
|
+
compose.services[service.name] = config.service;
|
|
172
|
+
if (config.volume) {
|
|
173
|
+
compose.volumes[config.volume.name] = {};
|
|
174
|
+
}
|
|
175
|
+
compose.services[projectName].depends_on.push(service.name);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const composePath = path.join(cwd, "docker-compose.yml");
|
|
179
|
+
await fs.writeFile(
|
|
180
|
+
composePath,
|
|
181
|
+
yaml.dump(compose, { indent: 2, lineWidth: -1 })
|
|
182
|
+
);
|
|
183
|
+
await createEnvTemplate(cwd, services, "prod");
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
async function setupDevelopment(
|
|
187
|
+
cwd: string,
|
|
188
|
+
projectName: string,
|
|
189
|
+
services: DockerService[]
|
|
190
|
+
) {
|
|
191
|
+
const existingCompose = path.join(cwd, "docker-compose.yml");
|
|
192
|
+
|
|
193
|
+
let existingServices: DockerService[] = [];
|
|
194
|
+
|
|
195
|
+
if (fs.existsSync(existingCompose)) {
|
|
196
|
+
const content = await fs.readFile(existingCompose, "utf8");
|
|
197
|
+
const existing: any = yaml.load(content);
|
|
198
|
+
if (existing.services) {
|
|
199
|
+
existingServices = Object.keys(existing.services)
|
|
200
|
+
.filter((s) => ["postgres", "mysql", "redis", "mongodb"].includes(s))
|
|
201
|
+
.map((name) => ({ name }));
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
const servicesToTunnel: DockerService[] = [];
|
|
206
|
+
|
|
207
|
+
if (existingServices.length > 0) {
|
|
208
|
+
const { tunnel } = await prompts({
|
|
209
|
+
type: "confirm",
|
|
210
|
+
name: "tunnel",
|
|
211
|
+
message: "Tunnel to remote database services?",
|
|
212
|
+
initial: false,
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
if (tunnel) {
|
|
216
|
+
const { selected } = await prompts({
|
|
217
|
+
type: "multiselect",
|
|
218
|
+
name: "selected",
|
|
219
|
+
message: "Select services to tunnel:",
|
|
220
|
+
choices: existingServices.map((s) => ({
|
|
221
|
+
title: s.name,
|
|
222
|
+
value: s.name,
|
|
223
|
+
})),
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
if (selected) {
|
|
227
|
+
servicesToTunnel.push(
|
|
228
|
+
...existingServices
|
|
229
|
+
.filter((s) => selected.includes(s.name))
|
|
230
|
+
.map((s) => ({ ...s, needsTunnel: true }))
|
|
231
|
+
);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
for (const service of servicesToTunnel) {
|
|
237
|
+
await createTunnelService(cwd, service.name);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
const compose: any = {
|
|
241
|
+
services: {
|
|
242
|
+
[projectName]: {
|
|
243
|
+
build: ".",
|
|
244
|
+
ports: ["3000:3000"],
|
|
245
|
+
env_file: [".env"],
|
|
246
|
+
volumes: [".:/app", "/app/node_modules"],
|
|
247
|
+
command: "npm run dev",
|
|
248
|
+
depends_on: [],
|
|
249
|
+
},
|
|
250
|
+
},
|
|
251
|
+
networks: {
|
|
252
|
+
default: {
|
|
253
|
+
driver: "bridge",
|
|
254
|
+
},
|
|
255
|
+
},
|
|
256
|
+
};
|
|
257
|
+
|
|
258
|
+
for (const service of servicesToTunnel) {
|
|
259
|
+
const tunnelName = `${service.name}-tunnel`;
|
|
260
|
+
compose.services[tunnelName] = {
|
|
261
|
+
build: `./${tunnelName}`,
|
|
262
|
+
environment: [
|
|
263
|
+
`REMOTE_HOST=\${${service.name.toUpperCase()}_REMOTE_HOST}`,
|
|
264
|
+
`REMOTE_PORT=\${${service.name.toUpperCase()}_REMOTE_PORT:-${getDefaultPort(
|
|
265
|
+
service.name
|
|
266
|
+
)}}`,
|
|
267
|
+
`LOCAL_PORT=${getDefaultPort(service.name)}`,
|
|
268
|
+
],
|
|
269
|
+
volumes: [`./${service.name}.pem:/ssh/${service.name}.pem:ro`],
|
|
270
|
+
};
|
|
271
|
+
compose.services[projectName].depends_on.push(tunnelName);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const devComposePath = path.join(cwd, "docker-compose.dev.yml");
|
|
275
|
+
await fs.writeFile(
|
|
276
|
+
devComposePath,
|
|
277
|
+
yaml.dump(compose, { indent: 2, lineWidth: -1 })
|
|
278
|
+
);
|
|
279
|
+
await createEnvTemplate(cwd, services, "dev");
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
async function createTunnelService(projectDir: string, serviceName: string) {
|
|
283
|
+
const tunnelDir = path.join(projectDir, `${serviceName}-tunnel`);
|
|
284
|
+
await fs.ensureDir(tunnelDir);
|
|
285
|
+
|
|
286
|
+
const dockerfile = `FROM alpine:latest
|
|
287
|
+
|
|
288
|
+
RUN apk add --no-cache openssh-client
|
|
289
|
+
|
|
290
|
+
COPY tunnel.sh /tunnel.sh
|
|
291
|
+
RUN chmod +x /tunnel.sh
|
|
292
|
+
|
|
293
|
+
CMD ["/tunnel.sh"]
|
|
294
|
+
`;
|
|
295
|
+
|
|
296
|
+
const tunnelScript = `#!/bin/sh
|
|
297
|
+
|
|
298
|
+
SSH_KEY="/ssh/${serviceName}.pem"
|
|
299
|
+
REMOTE_HOST=\${REMOTE_HOST}
|
|
300
|
+
REMOTE_PORT=\${REMOTE_PORT:-${getDefaultPort(serviceName)}}
|
|
301
|
+
LOCAL_PORT=\${LOCAL_PORT:-${getDefaultPort(serviceName)}}
|
|
302
|
+
|
|
303
|
+
chmod 600 $SSH_KEY
|
|
304
|
+
|
|
305
|
+
echo "Starting SSH tunnel for ${serviceName}..."
|
|
306
|
+
echo "Remote: $REMOTE_HOST:$REMOTE_PORT -> Local: $LOCAL_PORT"
|
|
307
|
+
|
|
308
|
+
ssh -i $SSH_KEY \\
|
|
309
|
+
-N -L 0.0.0.0:$LOCAL_PORT:localhost:$REMOTE_PORT \\
|
|
310
|
+
-o StrictHostKeyChecking=no \\
|
|
311
|
+
-o ServerAliveInterval=60 \\
|
|
312
|
+
$REMOTE_HOST
|
|
313
|
+
`;
|
|
314
|
+
|
|
315
|
+
await fs.writeFile(path.join(tunnelDir, "Dockerfile"), dockerfile);
|
|
316
|
+
await fs.writeFile(path.join(tunnelDir, "tunnel.sh"), tunnelScript);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
async function createEnvTemplate(
|
|
320
|
+
projectDir: string,
|
|
321
|
+
services: DockerService[],
|
|
322
|
+
env: "dev" | "prod"
|
|
323
|
+
) {
|
|
324
|
+
const envPath = path.join(projectDir, ".env.example");
|
|
325
|
+
|
|
326
|
+
let content = `NODE_ENV=${
|
|
327
|
+
env === "prod" ? "production" : "development"
|
|
328
|
+
}\nPORT=3000\n`;
|
|
329
|
+
|
|
330
|
+
if (services.length > 0) {
|
|
331
|
+
content += `\n`;
|
|
332
|
+
for (const service of services) {
|
|
333
|
+
const vars = getEnvVars(service.name);
|
|
334
|
+
content += vars.join("\n") + "\n\n";
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
await fs.writeFile(envPath, content);
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
function getServiceConfig(serviceName: string) {
|
|
342
|
+
const configs: Record<string, any> = {
|
|
343
|
+
postgres: {
|
|
344
|
+
service: {
|
|
345
|
+
image: "postgres:15-alpine",
|
|
346
|
+
environment: [
|
|
347
|
+
"POSTGRES_USER=${DB_USER}",
|
|
348
|
+
"POSTGRES_PASSWORD=${DB_PASSWORD}",
|
|
349
|
+
"POSTGRES_DB=${DB_NAME}",
|
|
350
|
+
],
|
|
351
|
+
volumes: ["postgres_data:/var/lib/postgresql/data"],
|
|
352
|
+
networks: ["web"],
|
|
353
|
+
},
|
|
354
|
+
volume: { name: "postgres_data" },
|
|
355
|
+
},
|
|
356
|
+
mysql: {
|
|
357
|
+
service: {
|
|
358
|
+
image: "mysql:8",
|
|
359
|
+
environment: [
|
|
360
|
+
"MYSQL_ROOT_PASSWORD=${DB_ROOT_PASSWORD}",
|
|
361
|
+
"MYSQL_DATABASE=${DB_NAME}",
|
|
362
|
+
"MYSQL_USER=${DB_USER}",
|
|
363
|
+
"MYSQL_PASSWORD=${DB_PASSWORD}",
|
|
364
|
+
],
|
|
365
|
+
volumes: ["mysql_data:/var/lib/mysql"],
|
|
366
|
+
networks: ["web"],
|
|
367
|
+
},
|
|
368
|
+
volume: { name: "mysql_data" },
|
|
369
|
+
},
|
|
370
|
+
redis: {
|
|
371
|
+
service: {
|
|
372
|
+
image: "redis:7-alpine",
|
|
373
|
+
volumes: ["redis_data:/data"],
|
|
374
|
+
networks: ["web"],
|
|
375
|
+
},
|
|
376
|
+
volume: { name: "redis_data" },
|
|
377
|
+
},
|
|
378
|
+
mongodb: {
|
|
379
|
+
service: {
|
|
380
|
+
image: "mongo:6",
|
|
381
|
+
environment: [
|
|
382
|
+
"MONGO_INITDB_ROOT_USERNAME=${DB_USER}",
|
|
383
|
+
"MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}",
|
|
384
|
+
],
|
|
385
|
+
volumes: ["mongo_data:/data/db"],
|
|
386
|
+
networks: ["web"],
|
|
387
|
+
},
|
|
388
|
+
volume: { name: "mongo_data" },
|
|
389
|
+
},
|
|
390
|
+
};
|
|
391
|
+
|
|
392
|
+
return configs[serviceName];
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
function getEnvVars(serviceName: string): string[] {
|
|
396
|
+
const vars: Record<string, string[]> = {
|
|
397
|
+
postgres: [
|
|
398
|
+
"DB_HOST=postgres",
|
|
399
|
+
"DB_PORT=5432",
|
|
400
|
+
"DB_USER=myuser",
|
|
401
|
+
"DB_PASSWORD=mypassword",
|
|
402
|
+
"DB_NAME=mydb",
|
|
403
|
+
],
|
|
404
|
+
mysql: [
|
|
405
|
+
"DB_HOST=mysql",
|
|
406
|
+
"DB_PORT=3306",
|
|
407
|
+
"DB_USER=myuser",
|
|
408
|
+
"DB_PASSWORD=mypassword",
|
|
409
|
+
"DB_NAME=mydb",
|
|
410
|
+
"DB_ROOT_PASSWORD=rootpassword",
|
|
411
|
+
],
|
|
412
|
+
redis: ["REDIS_HOST=redis", "REDIS_PORT=6379"],
|
|
413
|
+
mongodb: [
|
|
414
|
+
"MONGO_HOST=mongodb",
|
|
415
|
+
"MONGO_PORT=27017",
|
|
416
|
+
"MONGO_USER=myuser",
|
|
417
|
+
"MONGO_PASSWORD=mypassword",
|
|
418
|
+
],
|
|
419
|
+
};
|
|
420
|
+
|
|
421
|
+
return vars[serviceName] || [];
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
function getDefaultPort(service: string): number {
|
|
425
|
+
const ports: Record<string, number> = {
|
|
426
|
+
postgres: 5432,
|
|
427
|
+
mysql: 3306,
|
|
428
|
+
redis: 6379,
|
|
429
|
+
mongodb: 27017,
|
|
430
|
+
};
|
|
431
|
+
return ports[service] || 3000;
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
function printNextSteps(
|
|
435
|
+
projectName: string,
|
|
436
|
+
env: string,
|
|
437
|
+
services: DockerService[]
|
|
438
|
+
) {
|
|
439
|
+
console.log(`\n✅ Done! Next steps:\n`);
|
|
440
|
+
|
|
441
|
+
if (env === "prod") {
|
|
442
|
+
console.log(` # Edit .env with your settings`);
|
|
443
|
+
console.log(` docker-compose up -d`);
|
|
444
|
+
console.log(` # Access at https://localhost\n`);
|
|
445
|
+
} else {
|
|
446
|
+
console.log(` # Edit .env with your settings`);
|
|
447
|
+
if (services.some((s) => s.needsTunnel)) {
|
|
448
|
+
console.log(` # Add SSH keys: {service}.pem`);
|
|
449
|
+
}
|
|
450
|
+
console.log(` docker-compose -f docker-compose.dev.yml up -d\n`);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from "./docker";
|