@kithinji/pod 1.0.21 → 1.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +439 -0
  2. package/dist/main.js +5 -5
  3. package/dist/main.js.map +2 -2
  4. package/package.json +31 -8
  5. package/build.js +0 -22
  6. package/src/add/component/component.ts +0 -496
  7. package/src/add/component/index.ts +0 -1
  8. package/src/add/index.ts +0 -3
  9. package/src/add/module/index.ts +0 -1
  10. package/src/add/module/module.ts +0 -545
  11. package/src/add/new/index.ts +0 -198
  12. package/src/config/config.ts +0 -141
  13. package/src/config/index.ts +0 -1
  14. package/src/deploy/deploy.ts +0 -592
  15. package/src/deploy/index.ts +0 -1
  16. package/src/dev/index.ts +0 -1
  17. package/src/dev/project.ts +0 -45
  18. package/src/dev/server.ts +0 -191
  19. package/src/docker/docker.ts +0 -697
  20. package/src/docker/index.ts +0 -1
  21. package/src/macros/expand_macros.ts +0 -791
  22. package/src/macros/index.ts +0 -2
  23. package/src/macros/macro_executer.ts +0 -189
  24. package/src/main.ts +0 -106
  25. package/src/plugins/analyzers/graph.ts +0 -279
  26. package/src/plugins/css/index.ts +0 -25
  27. package/src/plugins/generators/generate_controller.ts +0 -308
  28. package/src/plugins/generators/generate_rsc.ts +0 -274
  29. package/src/plugins/generators/generate_server_component.ts +0 -455
  30. package/src/plugins/generators/tsx_server_stub.ts +0 -315
  31. package/src/plugins/index.ts +0 -3
  32. package/src/plugins/my.ts +0 -282
  33. package/src/plugins/transformers/j2d.ts +0 -1080
  34. package/src/store/index.ts +0 -1
  35. package/src/store/store.ts +0 -44
  36. package/src/utils/cases.ts +0 -15
  37. package/src/utils/create.ts +0 -26
  38. package/src/utils/index.ts +0 -2
  39. package/tsconfig.json +0 -27
@@ -1,697 +0,0 @@
1
- import fs from "fs-extra";
2
- import path from "path";
3
- import prompts from "prompts";
4
- import yaml from "js-yaml";
5
-
6
- interface PackageJson {
7
- name: string;
8
- dependencies?: Record<string, string>;
9
- }
10
-
11
- interface DockerService {
12
- name: string;
13
- needsTunnel?: boolean;
14
- }
15
-
16
- export async function dockerize(env: "dev" | "prod" = "prod") {
17
- const cwd = process.cwd();
18
- const packageJsonPath = path.join(cwd, "package.json");
19
-
20
- if (!fs.existsSync(packageJsonPath)) {
21
- throw new Error("package.json not found. Are you in a Pod project?");
22
- }
23
-
24
- const packageJson: PackageJson = await fs.readJSON(packageJsonPath);
25
- const projectName = packageJson.name;
26
-
27
- const detectedServices = detectServices(packageJson);
28
- const selectedServices = await selectServices(detectedServices);
29
-
30
- await restructureProject(cwd, projectName);
31
- await createDockerfile(cwd, projectName);
32
-
33
- if (env === "prod") {
34
- await setupProduction(cwd, projectName, selectedServices);
35
- } else {
36
- await setupDevelopment(cwd, projectName, selectedServices);
37
- }
38
-
39
- await createDeployfile(cwd, projectName);
40
-
41
- await writeEnvVars(cwd, selectedServices, env);
42
-
43
- printNextSteps(projectName, env, selectedServices);
44
- }
45
-
46
- function detectServices(packageJson: PackageJson): DockerService[] {
47
- const deps = packageJson.dependencies || {};
48
- const services: DockerService[] = [];
49
-
50
- if (deps.pg || deps.postgres) services.push({ name: "postgres" });
51
- if (deps.mysql || deps.mysql2) services.push({ name: "mysql" });
52
- if (deps.redis || deps.ioredis) services.push({ name: "redis" });
53
- if (deps.mongodb || deps.mongoose) services.push({ name: "mongodb" });
54
-
55
- return services;
56
- }
57
-
58
- async function selectServices(
59
- detected: DockerService[]
60
- ): Promise<DockerService[]> {
61
- if (detected.length === 0) return [];
62
-
63
- const response = await prompts({
64
- type: "multiselect",
65
- name: "services",
66
- message: "Select services to include:",
67
- choices: detected.map((s) => ({
68
- title: s.name,
69
- value: s.name,
70
- selected: true,
71
- })),
72
- });
73
-
74
- if (!response.services || response.services.length === 0) return [];
75
- return detected.filter((s) => response.services.includes(s.name));
76
- }
77
-
78
- async function restructureProject(cwd: string, projectName: string) {
79
- const nestedDir = path.join(cwd, projectName);
80
-
81
- if (fs.existsSync(nestedDir)) {
82
- console.log("⚠️ Project already restructured, skipping...");
83
- return;
84
- }
85
-
86
- await fs.ensureDir(nestedDir);
87
-
88
- const items = await fs.readdir(cwd);
89
- const toMove = items.filter((item) => item !== projectName);
90
-
91
- for (const item of toMove) {
92
- const src = path.join(cwd, item);
93
- const dest = path.join(nestedDir, item);
94
- await fs.move(src, dest, { overwrite: true });
95
- }
96
-
97
- const envSrc = path.join(nestedDir, ".env");
98
- const envDest = path.join(cwd, ".env");
99
-
100
- if (fs.existsSync(envSrc)) {
101
- await fs.move(envSrc, envDest, { overwrite: true });
102
- }
103
- }
104
-
105
- async function writeEnvVars(
106
- cwd: string,
107
- services: DockerService[],
108
- env: string
109
- ) {
110
- const envPath = path.join(cwd, ".env");
111
- let existingEnv: Record<string, string> = {};
112
- let existingContent = "";
113
-
114
- if (fs.existsSync(envPath)) {
115
- existingContent = await fs.readFile(envPath, "utf8");
116
- existingEnv = parseEnvFile(existingContent);
117
- }
118
-
119
- const newVars: string[] = [];
120
-
121
- if (env === "prod" && !existingEnv.HOST) {
122
- newVars.push("HOST=example.com");
123
- }
124
-
125
- for (const service of services) {
126
- const serviceVars = getEnvVars(service.name);
127
- for (const varLine of serviceVars) {
128
- const [key] = varLine.split("=");
129
- if (!existingEnv[key]) {
130
- newVars.push(varLine);
131
- }
132
- }
133
-
134
- if (env === "dev" && service.needsTunnel) {
135
- const remoteHostKey = `${service.name.toUpperCase()}_REMOTE_HOST`;
136
- const remotePortKey = `${service.name.toUpperCase()}_REMOTE_PORT`;
137
-
138
- if (!existingEnv[remoteHostKey]) {
139
- newVars.push(`${remoteHostKey}=user@remote-server.com`);
140
- }
141
- if (!existingEnv[remotePortKey]) {
142
- newVars.push(`${remotePortKey}=${getDefaultPort(service.name)}`);
143
- }
144
- }
145
- }
146
-
147
- if (newVars.length > 0) {
148
- const separator =
149
- existingContent && !existingContent.endsWith("\n") ? "\n" : "";
150
- const newContent =
151
- existingContent +
152
- separator +
153
- (existingContent ? "\n" : "") +
154
- newVars.join("\n") +
155
- "\n";
156
- await fs.writeFile(envPath, newContent);
157
- console.log(
158
- `✅ Added ${newVars.length} new environment variable(s) to .env`
159
- );
160
- } else {
161
- console.log("✅ All required environment variables already exist in .env");
162
- }
163
- }
164
-
165
- function parseEnvFile(content: string): Record<string, string> {
166
- const env: Record<string, string> = {};
167
- const lines = content.split("\n");
168
-
169
- for (const line of lines) {
170
- const trimmed = line.trim();
171
-
172
- if (!trimmed || trimmed.startsWith("#")) continue;
173
-
174
- const equalIndex = trimmed.indexOf("=");
175
- if (equalIndex > 0) {
176
- const key = trimmed.substring(0, equalIndex).trim();
177
- const value = trimmed.substring(equalIndex + 1).trim();
178
- env[key] = value;
179
- }
180
- }
181
-
182
- return env;
183
- }
184
-
185
- async function createDockerfile(cwd: string, projectName: string) {
186
- const dockerfilePath = path.join(cwd, projectName, "Dockerfile");
187
- const dockerignorePath = path.join(cwd, projectName, ".dockerignore");
188
-
189
- const dockerfile = `FROM node:18-alpine
190
-
191
- WORKDIR /app
192
-
193
- COPY package*.json ./
194
-
195
- RUN npm install
196
-
197
- COPY . .
198
-
199
- EXPOSE 8080
200
-
201
- CMD ["npm", "run", "dev"]
202
- `;
203
-
204
- const dockerignore = `# Dependencies
205
- node_modules
206
- npm-debug.log
207
- yarn-error.log
208
- package-lock.json
209
- yarn.lock
210
-
211
- # Environment files
212
- .env
213
- .env.*
214
-
215
- # Git
216
- .git
217
- .gitignore
218
-
219
- # IDE
220
- .vscode
221
- .idea
222
- *.swp
223
- *.swo
224
- *~
225
-
226
- # OS
227
- .DS_Store
228
- Thumbs.db
229
-
230
- # Testing
231
- coverage
232
- .nyc_output
233
- *.test.js
234
- *.spec.js
235
- __tests__
236
-
237
- # Build files
238
- dist
239
- public
240
-
241
- # Logs
242
- logs
243
- *.log
244
-
245
- # Documentation
246
- README.md
247
- docs
248
- *.md
249
-
250
- # Docker
251
- Dockerfile
252
- .dockerignore
253
- docker-compose*.yml
254
-
255
- # Misc
256
- .cache
257
- tmp
258
- temp
259
- `;
260
-
261
- await fs.writeFile(dockerfilePath, dockerfile);
262
- await fs.writeFile(dockerignorePath, dockerignore);
263
- }
264
-
265
- async function createDeployfile(cwd: string, projectName: string) {
266
- const deployFile = `name: ${projectName}
267
- version: 1.0.0
268
-
269
- targets:
270
- ec2:
271
- host: ec2-xx-xx-xxx-xxx.xx-xxxx-x.compute.amazonaws.com
272
- user: ubuntu
273
- keyPath: ~/xxxx.pem
274
- port: 22
275
- deployPath: /home/\${ubuntu}/app
276
-
277
- operations:
278
- - name: "Setup swap space"
279
- type: ensure
280
- ensure:
281
- swap:
282
- size: 4G
283
-
284
- - name: "Install Docker"
285
- type: ensure
286
- ensure:
287
- docker:
288
- version: "28.5.2"
289
- addUserToGroup: true
290
-
291
- - name: "Create application directories"
292
- type: ensure
293
- ensure:
294
- directory:
295
- path: \${deployPath}
296
- owner: \${user}
297
-
298
- - name: "Create backup directory"
299
- type: ensure
300
- ensure:
301
- directory:
302
- path: /home/\${ubuntu}/backups
303
- owner: \${user}
304
-
305
- - name: "Stop running containers"
306
- type: action
307
- action:
308
- command: cd \${deployPath} && docker compose down 2>/dev/null || true
309
-
310
- - name: "Sync application files"
311
- type: action
312
- action:
313
- rsync:
314
- source: ./
315
- destination: \${deployPath}/
316
- exclude:
317
- - node_modules/
318
- - .git/
319
- - "*.log"
320
- - .env.local
321
- - dist/
322
- - public/
323
-
324
- - name: "Pull Docker images"
325
- type: action
326
- action:
327
- command: cd \${deployPath} && docker compose pull
328
-
329
- - name: "Build and start containers"
330
- type: action
331
- action:
332
- command: cd \${deployPath} && docker compose up -d --build --remove-orphans
333
-
334
- - name: "Wait for services to start"
335
- type: action
336
- action:
337
- command: sleep 10
338
-
339
- - name: "Show container status"
340
- type: action
341
- action:
342
- command: cd \${deployPath} && docker compose ps
343
-
344
- - name: "Show recent logs"
345
- type: action
346
- action:
347
- command: cd \${deployPath} && docker compose logs --tail=30
348
-
349
- - name: "Cleanup old backups"
350
- type: action
351
- action:
352
- command: find /home/\${ubuntu}/backups -name "backup-*.tar.gz" -mtime +7 -delete
353
-
354
- - name: "Cleanup Docker resources"
355
- type: action
356
- action:
357
- command: docker system prune -f --volumes
358
-
359
- - name: "Verify containers are running"
360
- type: verify
361
- verify:
362
- command: cd \${deployPath} && docker compose ps | grep -q "Up"
363
- `;
364
-
365
- const deployFilePath = path.join(cwd, "pod.deploy.yml");
366
-
367
- await fs.writeFile(deployFilePath, deployFile);
368
- }
369
-
370
- async function setupProduction(
371
- cwd: string,
372
- projectName: string,
373
- services: DockerService[]
374
- ) {
375
- const compose: any = {
376
- services: {
377
- traefik: {
378
- image: "traefik:v2.10",
379
- command: [
380
- "--api.insecure=true",
381
- "--providers.docker=true",
382
- "--providers.docker.exposedbydefault=false",
383
- "--entrypoints.web.address=:80",
384
- "--entrypoints.websecure.address=:443",
385
- "--certificatesresolvers.myresolver.acme.tlschallenge=true",
386
- "--certificatesresolvers.myresolver.acme.email=admin@example.com",
387
- "--certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme.json",
388
- ],
389
- labels: [
390
- "traefik.enable=true",
391
- "traefik.http.routers.http-catchall.rule=HostRegexp(`{host:.+}`)",
392
- "traefik.http.routers.http-catchall.entrypoints=web",
393
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https",
394
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https",
395
- "traefik.http.routers.dashboard.rule=Host(`traefik.${HOST}`)",
396
- "traefik.http.routers.dashboard.entrypoints=websecure",
397
- "traefik.http.routers.dashboard.tls.certresolver=myresolver",
398
- "traefik.http.routers.dashboard.service=api@internal",
399
- ],
400
- ports: ["80:80", "443:443"],
401
- volumes: [
402
- "/var/run/docker.sock:/var/run/docker.sock:ro",
403
- "./letsencrypt:/letsencrypt",
404
- ],
405
- networks: ["web"],
406
- env_file: [".env"],
407
- },
408
- [projectName]: {
409
- build: {
410
- context: `./${projectName}`,
411
- dockerfile: "Dockerfile",
412
- },
413
- labels: [
414
- "traefik.enable=true",
415
- "traefik.http.routers.app.rule=Host(`app.${HOST}`)",
416
- "traefik.http.routers.app.entrypoints=websecure",
417
- "traefik.http.routers.app.tls.certresolver=myresolver",
418
- "traefik.http.services.app.loadbalancer.server.port=8080",
419
- ],
420
- env_file: [".env"],
421
- networks: ["web"],
422
- volumes: [`./${projectName}:/app`, `/app/node_modules`],
423
- command: "npm run dev",
424
- depends_on: [],
425
- },
426
- },
427
- networks: {
428
- web: {
429
- driver: "bridge",
430
- },
431
- },
432
- volumes: {},
433
- };
434
-
435
- for (const service of services) {
436
- const config = getServiceConfig(service.name);
437
- compose.services[service.name] = config.service;
438
- if (config.volume) {
439
- compose.volumes[config.volume.name] = {};
440
- }
441
- compose.services[projectName].depends_on.push(service.name);
442
- }
443
-
444
- const composePath = path.join(cwd, "docker-compose.yml");
445
- await fs.writeFile(
446
- composePath,
447
- yaml.dump(compose, { indent: 2, lineWidth: -1 })
448
- );
449
- }
450
-
451
- async function setupDevelopment(
452
- cwd: string,
453
- projectName: string,
454
- services: DockerService[]
455
- ) {
456
- const existingCompose = path.join(cwd, "docker-compose.yml");
457
-
458
- let existingServices: DockerService[] = [];
459
-
460
- if (fs.existsSync(existingCompose)) {
461
- const content = await fs.readFile(existingCompose, "utf8");
462
- const existing: any = yaml.load(content);
463
- if (existing.services) {
464
- existingServices = Object.keys(existing.services)
465
- .filter((s) => ["postgres", "mysql", "redis", "mongodb"].includes(s))
466
- .map((name) => ({ name }));
467
- }
468
- }
469
-
470
- const servicesToTunnel: DockerService[] = [];
471
-
472
- if (existingServices.length > 0) {
473
- const { tunnel } = await prompts({
474
- type: "confirm",
475
- name: "tunnel",
476
- message: "Tunnel to remote database services?",
477
- initial: false,
478
- });
479
-
480
- if (tunnel) {
481
- const { selected } = await prompts({
482
- type: "multiselect",
483
- name: "selected",
484
- message: "Select services to tunnel:",
485
- choices: existingServices.map((s) => ({
486
- title: s.name,
487
- value: s.name,
488
- })),
489
- });
490
-
491
- if (selected && selected.length > 0) {
492
- servicesToTunnel.push(
493
- ...existingServices
494
- .filter((s) => selected.includes(s.name))
495
- .map((s) => ({ ...s, needsTunnel: true }))
496
- );
497
- }
498
- }
499
- }
500
-
501
- for (const service of servicesToTunnel) {
502
- await createTunnelService(cwd, service.name);
503
- }
504
-
505
- const compose: any = {
506
- services: {
507
- [projectName]: {
508
- build: {
509
- context: `./${projectName}`,
510
- dockerfile: "Dockerfile",
511
- },
512
- ports: ["8080:8080"],
513
- env_file: [".env"],
514
- volumes: [`./${projectName}:/app`, `/app/node_modules`],
515
- command: "npm run dev",
516
- depends_on: [],
517
- },
518
- },
519
- networks: {
520
- default: {
521
- driver: "bridge",
522
- },
523
- },
524
- };
525
-
526
- for (const service of servicesToTunnel) {
527
- const tunnelName = `${service.name}-tunnel`;
528
- compose.services[tunnelName] = {
529
- build: `./${tunnelName}`,
530
- environment: [
531
- `REMOTE_HOST=\${${service.name.toUpperCase()}_REMOTE_HOST}`,
532
- `REMOTE_PORT=\${${service.name.toUpperCase()}_REMOTE_PORT:-${getDefaultPort(
533
- service.name
534
- )}}`,
535
- `LOCAL_PORT=${getDefaultPort(service.name)}`,
536
- ],
537
- volumes: [`./${service.name}.pem:/ssh/${service.name}.pem:ro`],
538
- };
539
- compose.services[projectName].depends_on.push(tunnelName);
540
- }
541
-
542
- const devComposePath = path.join(cwd, "docker-compose.dev.yml");
543
- await fs.writeFile(
544
- devComposePath,
545
- yaml.dump(compose, { indent: 2, lineWidth: -1 })
546
- );
547
- }
548
-
549
- async function createTunnelService(projectDir: string, serviceName: string) {
550
- const tunnelDir = path.join(projectDir, `${serviceName}-tunnel`);
551
- await fs.ensureDir(tunnelDir);
552
-
553
- const dockerfile = `FROM alpine:latest
554
-
555
- RUN apk add --no-cache openssh-client
556
-
557
- COPY tunnel.sh /tunnel.sh
558
- RUN chmod +x /tunnel.sh
559
-
560
- CMD ["/tunnel.sh"]
561
- `;
562
-
563
- const tunnelScript = `#!/bin/sh
564
-
565
- SSH_KEY="/ssh/${serviceName}.pem"
566
- REMOTE_HOST=\${REMOTE_HOST}
567
- REMOTE_PORT=\${REMOTE_PORT:-${getDefaultPort(serviceName)}}
568
- LOCAL_PORT=\${LOCAL_PORT:-${getDefaultPort(serviceName)}}
569
-
570
- chmod 600 $SSH_KEY
571
-
572
- echo "Starting SSH tunnel for ${serviceName}..."
573
- echo "Remote: $REMOTE_HOST:$REMOTE_PORT -> Local: $LOCAL_PORT"
574
-
575
- ssh -i $SSH_KEY \\
576
- -N -L 0.0.0.0:$LOCAL_PORT:localhost:$REMOTE_PORT \\
577
- -o StrictHostKeyChecking=no \\
578
- -o ServerAliveInterval=60 \\
579
- $REMOTE_HOST
580
- `;
581
-
582
- await fs.writeFile(path.join(tunnelDir, "Dockerfile"), dockerfile);
583
- await fs.writeFile(path.join(tunnelDir, "tunnel.sh"), tunnelScript);
584
- }
585
-
586
- function getServiceConfig(serviceName: string) {
587
- const configs: Record<string, any> = {
588
- postgres: {
589
- service: {
590
- image: "postgres:15-alpine",
591
- environment: [
592
- "POSTGRES_USER=${DB_USER}",
593
- "POSTGRES_PASSWORD=${DB_PASSWORD}",
594
- "POSTGRES_DB=${DB_NAME}",
595
- ],
596
- volumes: ["postgres_data:/var/lib/postgresql/data"],
597
- networks: ["web"],
598
- },
599
- volume: { name: "postgres_data" },
600
- },
601
- mysql: {
602
- service: {
603
- image: "mysql:8",
604
- environment: [
605
- "MYSQL_ROOT_PASSWORD=${DB_ROOT_PASSWORD}",
606
- "MYSQL_DATABASE=${DB_NAME}",
607
- "MYSQL_USER=${DB_USER}",
608
- "MYSQL_PASSWORD=${DB_PASSWORD}",
609
- ],
610
- volumes: ["mysql_data:/var/lib/mysql"],
611
- networks: ["web"],
612
- },
613
- volume: { name: "mysql_data" },
614
- },
615
- redis: {
616
- service: {
617
- image: "redis:7-alpine",
618
- volumes: ["redis_data:/data"],
619
- networks: ["web"],
620
- },
621
- volume: { name: "redis_data" },
622
- },
623
- mongodb: {
624
- service: {
625
- image: "mongo:6",
626
- environment: [
627
- "MONGO_INITDB_ROOT_USERNAME=${DB_USER}",
628
- "MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}",
629
- ],
630
- volumes: ["mongo_data:/data/db"],
631
- networks: ["web"],
632
- },
633
- volume: { name: "mongo_data" },
634
- },
635
- };
636
-
637
- return configs[serviceName];
638
- }
639
-
640
- function getEnvVars(serviceName: string): string[] {
641
- const vars: Record<string, string[]> = {
642
- postgres: [
643
- "DB_HOST=postgres",
644
- "DB_PORT=5432",
645
- "DB_USER=myuser",
646
- "DB_PASSWORD=mypassword",
647
- "DB_NAME=mydb",
648
- ],
649
- mysql: [
650
- "DB_HOST=mysql",
651
- "DB_PORT=3306",
652
- "DB_USER=myuser",
653
- "DB_PASSWORD=mypassword",
654
- "DB_NAME=mydb",
655
- "DB_ROOT_PASSWORD=rootpassword",
656
- ],
657
- redis: ["REDIS_HOST=redis", "REDIS_PORT=6379"],
658
- mongodb: [
659
- "MONGO_HOST=mongodb",
660
- "MONGO_PORT=27017",
661
- "MONGO_USER=myuser",
662
- "MONGO_PASSWORD=mypassword",
663
- ],
664
- };
665
-
666
- return vars[serviceName] || [];
667
- }
668
-
669
- function getDefaultPort(service: string): number {
670
- const ports: Record<string, number> = {
671
- postgres: 5432,
672
- mysql: 3306,
673
- redis: 6379,
674
- mongodb: 27017,
675
- };
676
- return ports[service] || 3000;
677
- }
678
-
679
- function printNextSteps(
680
- projectName: string,
681
- env: string,
682
- services: DockerService[]
683
- ) {
684
- console.log(`\n✅ Done! Next steps:\n`);
685
-
686
- if (env === "prod") {
687
- console.log(` # Review and edit .env with your settings`);
688
- console.log(` docker-compose up -d`);
689
- console.log(` # Access at https://app.\${HOST}\n`);
690
- } else {
691
- console.log(` # Review and edit .env with your settings`);
692
- if (services.some((s) => s.needsTunnel)) {
693
- console.log(` # Add SSH keys: {service}.pem`);
694
- }
695
- console.log(` docker-compose -f docker-compose.dev.yml up -d\n`);
696
- }
697
- }
@@ -1 +0,0 @@
1
- export * from "./docker";