@kithinji/pod 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/build.js +22 -0
  2. package/dist/main.js +4464 -0
  3. package/dist/main.js.map +7 -0
  4. package/dist/types/add/component/component.d.ts +5 -0
  5. package/dist/types/add/component/component.d.ts.map +1 -0
  6. package/dist/types/add/component/index.d.ts +2 -0
  7. package/dist/types/add/component/index.d.ts.map +1 -0
  8. package/dist/types/add/index.d.ts +4 -0
  9. package/dist/types/add/index.d.ts.map +1 -0
  10. package/dist/types/add/module/index.d.ts +2 -0
  11. package/dist/types/add/module/index.d.ts.map +1 -0
  12. package/dist/types/add/module/module.d.ts +3 -0
  13. package/dist/types/add/module/module.d.ts.map +1 -0
  14. package/dist/types/add/new/index.d.ts +2 -0
  15. package/dist/types/add/new/index.d.ts.map +1 -0
  16. package/dist/types/config/config.d.ts +18 -0
  17. package/dist/types/config/config.d.ts.map +1 -0
  18. package/dist/types/config/index.d.ts +2 -0
  19. package/dist/types/config/index.d.ts.map +1 -0
  20. package/dist/types/dev/index.d.ts +2 -0
  21. package/dist/types/dev/index.d.ts.map +1 -0
  22. package/dist/types/dev/project.d.ts +9 -0
  23. package/dist/types/dev/project.d.ts.map +1 -0
  24. package/dist/types/dev/server.d.ts +2 -0
  25. package/dist/types/dev/server.d.ts.map +1 -0
  26. package/dist/types/docker/docker.d.ts +2 -0
  27. package/dist/types/docker/docker.d.ts.map +1 -0
  28. package/dist/types/docker/index.d.ts +2 -0
  29. package/dist/types/docker/index.d.ts.map +1 -0
  30. package/dist/types/macros/expand_macros.d.ts +48 -0
  31. package/dist/types/macros/expand_macros.d.ts.map +1 -0
  32. package/dist/types/macros/index.d.ts +3 -0
  33. package/dist/types/macros/index.d.ts.map +1 -0
  34. package/dist/types/macros/macro_executer.d.ts +12 -0
  35. package/dist/types/macros/macro_executer.d.ts.map +1 -0
  36. package/dist/types/main.d.ts +13 -0
  37. package/dist/types/main.d.ts.map +1 -0
  38. package/dist/types/plugins/analyzers/graph.d.ts +25 -0
  39. package/dist/types/plugins/analyzers/graph.d.ts.map +1 -0
  40. package/dist/types/plugins/css/index.d.ts +7 -0
  41. package/dist/types/plugins/css/index.d.ts.map +1 -0
  42. package/dist/types/plugins/generators/generate_controller.d.ts +2 -0
  43. package/dist/types/plugins/generators/generate_controller.d.ts.map +1 -0
  44. package/dist/types/plugins/generators/generate_rsc.d.ts +2 -0
  45. package/dist/types/plugins/generators/generate_rsc.d.ts.map +1 -0
  46. package/dist/types/plugins/generators/generate_server_component.d.ts +2 -0
  47. package/dist/types/plugins/generators/generate_server_component.d.ts.map +1 -0
  48. package/dist/types/plugins/generators/tsx_server_stub.d.ts +2 -0
  49. package/dist/types/plugins/generators/tsx_server_stub.d.ts.map +1 -0
  50. package/dist/types/plugins/index.d.ts +4 -0
  51. package/dist/types/plugins/index.d.ts.map +1 -0
  52. package/dist/types/plugins/my.d.ts +10 -0
  53. package/dist/types/plugins/my.d.ts.map +1 -0
  54. package/dist/types/plugins/transformers/j2d.d.ts +11 -0
  55. package/dist/types/plugins/transformers/j2d.d.ts.map +1 -0
  56. package/dist/types/store/index.d.ts +2 -0
  57. package/dist/types/store/index.d.ts.map +1 -0
  58. package/dist/types/store/store.d.ts +14 -0
  59. package/dist/types/store/store.d.ts.map +1 -0
  60. package/dist/types/utils/cases.d.ts +4 -0
  61. package/dist/types/utils/cases.d.ts.map +1 -0
  62. package/dist/types/utils/create.d.ts +12 -0
  63. package/dist/types/utils/create.d.ts.map +1 -0
  64. package/dist/types/utils/index.d.ts +3 -0
  65. package/dist/types/utils/index.d.ts.map +1 -0
  66. package/package.json +44 -0
  67. package/src/add/component/component.ts +496 -0
  68. package/src/add/component/index.ts +1 -0
  69. package/src/add/index.ts +3 -0
  70. package/src/add/module/index.ts +1 -0
  71. package/src/add/module/module.ts +521 -0
  72. package/src/add/new/index.ts +135 -0
  73. package/src/config/config.ts +141 -0
  74. package/src/config/index.ts +1 -0
  75. package/src/dev/index.ts +1 -0
  76. package/src/dev/project.ts +45 -0
  77. package/src/dev/server.ts +190 -0
  78. package/src/docker/docker.ts +452 -0
  79. package/src/docker/index.ts +1 -0
  80. package/src/macros/expand_macros.ts +791 -0
  81. package/src/macros/index.ts +2 -0
  82. package/src/macros/macro_executer.ts +189 -0
  83. package/src/main.ts +95 -0
  84. package/src/plugins/analyzers/graph.ts +291 -0
  85. package/src/plugins/css/index.ts +25 -0
  86. package/src/plugins/generators/generate_controller.ts +308 -0
  87. package/src/plugins/generators/generate_rsc.ts +274 -0
  88. package/src/plugins/generators/generate_server_component.ts +279 -0
  89. package/src/plugins/generators/tsx_server_stub.ts +295 -0
  90. package/src/plugins/index.ts +3 -0
  91. package/src/plugins/my.ts +274 -0
  92. package/src/plugins/transformers/j2d.ts +1014 -0
  93. package/src/store/index.ts +1 -0
  94. package/src/store/store.ts +44 -0
  95. package/src/utils/cases.ts +15 -0
  96. package/src/utils/create.ts +26 -0
  97. package/src/utils/index.ts +2 -0
  98. package/tsconfig.json +27 -0
@@ -0,0 +1,452 @@
1
+ import fs from "fs-extra";
2
+ import path from "path";
3
+ import prompts from "prompts";
4
+ import yaml from "js-yaml";
5
+
6
+ interface PackageJson {
7
+ name: string;
8
+ dependencies?: Record<string, string>;
9
+ }
10
+
11
+ interface DockerService {
12
+ name: string;
13
+ needsTunnel?: boolean;
14
+ }
15
+
16
+ export async function dockerize(env: "dev" | "prod" = "prod") {
17
+ const cwd = process.cwd();
18
+ const packageJsonPath = path.join(cwd, "package.json");
19
+
20
+ if (!fs.existsSync(packageJsonPath)) {
21
+ throw new Error("package.json not found. Are you in a Pod project?");
22
+ }
23
+
24
+ const packageJson: PackageJson = await fs.readJSON(packageJsonPath);
25
+ const projectName = packageJson.name;
26
+
27
+ const detectedServices = detectServices(packageJson);
28
+ const selectedServices = await selectServices(detectedServices);
29
+
30
+ await restructureProject(cwd, projectName);
31
+ await createDockerfile(cwd, projectName);
32
+
33
+ if (env === "prod") {
34
+ await setupProduction(cwd, projectName, selectedServices);
35
+ } else {
36
+ await setupDevelopment(cwd, projectName, selectedServices);
37
+ }
38
+
39
+ printNextSteps(projectName, env, selectedServices);
40
+ }
41
+
42
+ function detectServices(packageJson: PackageJson): DockerService[] {
43
+ const deps = packageJson.dependencies || {};
44
+ const services: DockerService[] = [];
45
+
46
+ if (deps.pg || deps.postgres) services.push({ name: "postgres" });
47
+ if (deps.mysql || deps.mysql2) services.push({ name: "mysql" });
48
+ if (deps.redis || deps.ioredis) services.push({ name: "redis" });
49
+ if (deps.mongodb || deps.mongoose) services.push({ name: "mongodb" });
50
+
51
+ return services;
52
+ }
53
+
54
+ async function selectServices(
55
+ detected: DockerService[]
56
+ ): Promise<DockerService[]> {
57
+ if (detected.length === 0) return [];
58
+
59
+ const response = await prompts({
60
+ type: "multiselect",
61
+ name: "services",
62
+ message: "Select services to include:",
63
+ choices: detected.map((s) => ({
64
+ title: s.name,
65
+ value: s.name,
66
+ selected: true,
67
+ })),
68
+ });
69
+
70
+ if (!response.services) return [];
71
+ return detected.filter((s) => response.services.includes(s.name));
72
+ }
73
+
74
+ async function restructureProject(cwd: string, projectName: string) {
75
+ const nestedDir = path.join(cwd, projectName);
76
+
77
+ if (fs.existsSync(nestedDir)) {
78
+ console.log("⚠️ Project already restructured, skipping...");
79
+ return;
80
+ }
81
+
82
+ await fs.ensureDir(nestedDir);
83
+
84
+ const items = await fs.readdir(cwd);
85
+ const toMove = items.filter((item) => item !== projectName);
86
+
87
+ for (const item of toMove) {
88
+ const src = path.join(cwd, item);
89
+ const dest = path.join(nestedDir, item);
90
+ await fs.move(src, dest, { overwrite: true });
91
+ }
92
+
93
+ const envSrc = path.join(nestedDir, ".env");
94
+ const envDest = path.join(cwd, ".env");
95
+
96
+ if (fs.existsSync(envSrc)) {
97
+ await fs.move(envSrc, envDest, { overwrite: true });
98
+ }
99
+ }
100
+
101
+ async function createDockerfile(cwd: string, projectName: string) {
102
+ const dockerfilePath = path.join(cwd, projectName, "Dockerfile");
103
+
104
+ const dockerfile = `FROM node:18-alpine
105
+
106
+ WORKDIR /app
107
+
108
+ COPY package*.json ./
109
+ RUN npm ci --only=production
110
+
111
+ COPY . .
112
+ RUN if [ -f "tsconfig.json" ]; then npm run build || true; fi
113
+
114
+ EXPOSE 3000
115
+ CMD ["npm", "start"]
116
+ `;
117
+
118
+ await fs.writeFile(dockerfilePath, dockerfile);
119
+ }
120
+
121
+ async function setupProduction(
122
+ cwd: string,
123
+ projectName: string,
124
+ services: DockerService[]
125
+ ) {
126
+ const compose: any = {
127
+ services: {
128
+ traefik: {
129
+ image: "traefik:v2.10",
130
+ command: [
131
+ "--api.insecure=true",
132
+ "--providers.docker=true",
133
+ "--providers.docker.exposedbydefault=false",
134
+ "--entrypoints.web.address=:80",
135
+ "--entrypoints.websecure.address=:443",
136
+ "--certificatesresolvers.myresolver.acme.tlschallenge=true",
137
+ "--certificatesresolvers.myresolver.acme.email=admin@example.com",
138
+ "--certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme.json",
139
+ ],
140
+ ports: ["80:80", "443:443", "8080:8080"],
141
+ volumes: [
142
+ "/var/run/docker.sock:/var/run/docker.sock:ro",
143
+ "./letsencrypt:/letsencrypt",
144
+ ],
145
+ networks: ["web"],
146
+ },
147
+ [projectName]: {
148
+ build: ".",
149
+ labels: [
150
+ "traefik.enable=true",
151
+ "traefik.http.routers.app.rule=Host(`localhost`)",
152
+ "traefik.http.routers.app.entrypoints=websecure",
153
+ "traefik.http.routers.app.tls.certresolver=myresolver",
154
+ "traefik.http.services.app.loadbalancer.server.port=3000",
155
+ ],
156
+ env_file: [".env"],
157
+ depends_on: [],
158
+ networks: ["web"],
159
+ },
160
+ },
161
+ networks: {
162
+ web: {
163
+ driver: "bridge",
164
+ },
165
+ },
166
+ volumes: {},
167
+ };
168
+
169
+ for (const service of services) {
170
+ const config = getServiceConfig(service.name);
171
+ compose.services[service.name] = config.service;
172
+ if (config.volume) {
173
+ compose.volumes[config.volume.name] = {};
174
+ }
175
+ compose.services[projectName].depends_on.push(service.name);
176
+ }
177
+
178
+ const composePath = path.join(cwd, "docker-compose.yml");
179
+ await fs.writeFile(
180
+ composePath,
181
+ yaml.dump(compose, { indent: 2, lineWidth: -1 })
182
+ );
183
+ await createEnvTemplate(cwd, services, "prod");
184
+ }
185
+
186
+ async function setupDevelopment(
187
+ cwd: string,
188
+ projectName: string,
189
+ services: DockerService[]
190
+ ) {
191
+ const existingCompose = path.join(cwd, "docker-compose.yml");
192
+
193
+ let existingServices: DockerService[] = [];
194
+
195
+ if (fs.existsSync(existingCompose)) {
196
+ const content = await fs.readFile(existingCompose, "utf8");
197
+ const existing: any = yaml.load(content);
198
+ if (existing.services) {
199
+ existingServices = Object.keys(existing.services)
200
+ .filter((s) => ["postgres", "mysql", "redis", "mongodb"].includes(s))
201
+ .map((name) => ({ name }));
202
+ }
203
+ }
204
+
205
+ const servicesToTunnel: DockerService[] = [];
206
+
207
+ if (existingServices.length > 0) {
208
+ const { tunnel } = await prompts({
209
+ type: "confirm",
210
+ name: "tunnel",
211
+ message: "Tunnel to remote database services?",
212
+ initial: false,
213
+ });
214
+
215
+ if (tunnel) {
216
+ const { selected } = await prompts({
217
+ type: "multiselect",
218
+ name: "selected",
219
+ message: "Select services to tunnel:",
220
+ choices: existingServices.map((s) => ({
221
+ title: s.name,
222
+ value: s.name,
223
+ })),
224
+ });
225
+
226
+ if (selected) {
227
+ servicesToTunnel.push(
228
+ ...existingServices
229
+ .filter((s) => selected.includes(s.name))
230
+ .map((s) => ({ ...s, needsTunnel: true }))
231
+ );
232
+ }
233
+ }
234
+ }
235
+
236
+ for (const service of servicesToTunnel) {
237
+ await createTunnelService(cwd, service.name);
238
+ }
239
+
240
+ const compose: any = {
241
+ services: {
242
+ [projectName]: {
243
+ build: ".",
244
+ ports: ["3000:3000"],
245
+ env_file: [".env"],
246
+ volumes: [".:/app", "/app/node_modules"],
247
+ command: "npm run dev",
248
+ depends_on: [],
249
+ },
250
+ },
251
+ networks: {
252
+ default: {
253
+ driver: "bridge",
254
+ },
255
+ },
256
+ };
257
+
258
+ for (const service of servicesToTunnel) {
259
+ const tunnelName = `${service.name}-tunnel`;
260
+ compose.services[tunnelName] = {
261
+ build: `./${tunnelName}`,
262
+ environment: [
263
+ `REMOTE_HOST=\${${service.name.toUpperCase()}_REMOTE_HOST}`,
264
+ `REMOTE_PORT=\${${service.name.toUpperCase()}_REMOTE_PORT:-${getDefaultPort(
265
+ service.name
266
+ )}}`,
267
+ `LOCAL_PORT=${getDefaultPort(service.name)}`,
268
+ ],
269
+ volumes: [`./${service.name}.pem:/ssh/${service.name}.pem:ro`],
270
+ };
271
+ compose.services[projectName].depends_on.push(tunnelName);
272
+ }
273
+
274
+ const devComposePath = path.join(cwd, "docker-compose.dev.yml");
275
+ await fs.writeFile(
276
+ devComposePath,
277
+ yaml.dump(compose, { indent: 2, lineWidth: -1 })
278
+ );
279
+ await createEnvTemplate(cwd, services, "dev");
280
+ }
281
+
282
+ async function createTunnelService(projectDir: string, serviceName: string) {
283
+ const tunnelDir = path.join(projectDir, `${serviceName}-tunnel`);
284
+ await fs.ensureDir(tunnelDir);
285
+
286
+ const dockerfile = `FROM alpine:latest
287
+
288
+ RUN apk add --no-cache openssh-client
289
+
290
+ COPY tunnel.sh /tunnel.sh
291
+ RUN chmod +x /tunnel.sh
292
+
293
+ CMD ["/tunnel.sh"]
294
+ `;
295
+
296
+ const tunnelScript = `#!/bin/sh
297
+
298
+ SSH_KEY="/ssh/${serviceName}.pem"
299
+ REMOTE_HOST=\${REMOTE_HOST}
300
+ REMOTE_PORT=\${REMOTE_PORT:-${getDefaultPort(serviceName)}}
301
+ LOCAL_PORT=\${LOCAL_PORT:-${getDefaultPort(serviceName)}}
302
+
303
+ chmod 600 $SSH_KEY
304
+
305
+ echo "Starting SSH tunnel for ${serviceName}..."
306
+ echo "Remote: $REMOTE_HOST:$REMOTE_PORT -> Local: $LOCAL_PORT"
307
+
308
+ ssh -i $SSH_KEY \\
309
+ -N -L 0.0.0.0:$LOCAL_PORT:localhost:$REMOTE_PORT \\
310
+ -o StrictHostKeyChecking=no \\
311
+ -o ServerAliveInterval=60 \\
312
+ $REMOTE_HOST
313
+ `;
314
+
315
+ await fs.writeFile(path.join(tunnelDir, "Dockerfile"), dockerfile);
316
+ await fs.writeFile(path.join(tunnelDir, "tunnel.sh"), tunnelScript);
317
+ }
318
+
319
+ async function createEnvTemplate(
320
+ projectDir: string,
321
+ services: DockerService[],
322
+ env: "dev" | "prod"
323
+ ) {
324
+ const envPath = path.join(projectDir, ".env.example");
325
+
326
+ let content = `NODE_ENV=${
327
+ env === "prod" ? "production" : "development"
328
+ }\nPORT=3000\n`;
329
+
330
+ if (services.length > 0) {
331
+ content += `\n`;
332
+ for (const service of services) {
333
+ const vars = getEnvVars(service.name);
334
+ content += vars.join("\n") + "\n\n";
335
+ }
336
+ }
337
+
338
+ await fs.writeFile(envPath, content);
339
+ }
340
+
341
+ function getServiceConfig(serviceName: string) {
342
+ const configs: Record<string, any> = {
343
+ postgres: {
344
+ service: {
345
+ image: "postgres:15-alpine",
346
+ environment: [
347
+ "POSTGRES_USER=${DB_USER}",
348
+ "POSTGRES_PASSWORD=${DB_PASSWORD}",
349
+ "POSTGRES_DB=${DB_NAME}",
350
+ ],
351
+ volumes: ["postgres_data:/var/lib/postgresql/data"],
352
+ networks: ["web"],
353
+ },
354
+ volume: { name: "postgres_data" },
355
+ },
356
+ mysql: {
357
+ service: {
358
+ image: "mysql:8",
359
+ environment: [
360
+ "MYSQL_ROOT_PASSWORD=${DB_ROOT_PASSWORD}",
361
+ "MYSQL_DATABASE=${DB_NAME}",
362
+ "MYSQL_USER=${DB_USER}",
363
+ "MYSQL_PASSWORD=${DB_PASSWORD}",
364
+ ],
365
+ volumes: ["mysql_data:/var/lib/mysql"],
366
+ networks: ["web"],
367
+ },
368
+ volume: { name: "mysql_data" },
369
+ },
370
+ redis: {
371
+ service: {
372
+ image: "redis:7-alpine",
373
+ volumes: ["redis_data:/data"],
374
+ networks: ["web"],
375
+ },
376
+ volume: { name: "redis_data" },
377
+ },
378
+ mongodb: {
379
+ service: {
380
+ image: "mongo:6",
381
+ environment: [
382
+ "MONGO_INITDB_ROOT_USERNAME=${DB_USER}",
383
+ "MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}",
384
+ ],
385
+ volumes: ["mongo_data:/data/db"],
386
+ networks: ["web"],
387
+ },
388
+ volume: { name: "mongo_data" },
389
+ },
390
+ };
391
+
392
+ return configs[serviceName];
393
+ }
394
+
395
+ function getEnvVars(serviceName: string): string[] {
396
+ const vars: Record<string, string[]> = {
397
+ postgres: [
398
+ "DB_HOST=postgres",
399
+ "DB_PORT=5432",
400
+ "DB_USER=myuser",
401
+ "DB_PASSWORD=mypassword",
402
+ "DB_NAME=mydb",
403
+ ],
404
+ mysql: [
405
+ "DB_HOST=mysql",
406
+ "DB_PORT=3306",
407
+ "DB_USER=myuser",
408
+ "DB_PASSWORD=mypassword",
409
+ "DB_NAME=mydb",
410
+ "DB_ROOT_PASSWORD=rootpassword",
411
+ ],
412
+ redis: ["REDIS_HOST=redis", "REDIS_PORT=6379"],
413
+ mongodb: [
414
+ "MONGO_HOST=mongodb",
415
+ "MONGO_PORT=27017",
416
+ "MONGO_USER=myuser",
417
+ "MONGO_PASSWORD=mypassword",
418
+ ],
419
+ };
420
+
421
+ return vars[serviceName] || [];
422
+ }
423
+
424
+ function getDefaultPort(service: string): number {
425
+ const ports: Record<string, number> = {
426
+ postgres: 5432,
427
+ mysql: 3306,
428
+ redis: 6379,
429
+ mongodb: 27017,
430
+ };
431
+ return ports[service] || 3000;
432
+ }
433
+
434
+ function printNextSteps(
435
+ projectName: string,
436
+ env: string,
437
+ services: DockerService[]
438
+ ) {
439
+ console.log(`\n✅ Done! Next steps:\n`);
440
+
441
+ if (env === "prod") {
442
+ console.log(` # Edit .env with your settings`);
443
+ console.log(` docker-compose up -d`);
444
+ console.log(` # Access at https://localhost\n`);
445
+ } else {
446
+ console.log(` # Edit .env with your settings`);
447
+ if (services.some((s) => s.needsTunnel)) {
448
+ console.log(` # Add SSH keys: {service}.pem`);
449
+ }
450
+ console.log(` docker-compose -f docker-compose.dev.yml up -d\n`);
451
+ }
452
+ }
@@ -0,0 +1 @@
1
+ export * from "./docker";