@turboops/cli 1.0.0-dev.584 → 1.0.0-dev.586
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +540 -57
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
// src/index.ts
|
|
4
|
-
import { Command as
|
|
4
|
+
import { Command as Command8 } from "commander";
|
|
5
5
|
import chalk8 from "chalk";
|
|
6
6
|
|
|
7
7
|
// src/services/config.ts
|
|
@@ -135,9 +135,9 @@ function loadPackageJson() {
|
|
|
135
135
|
join(__dirname, "../package.json")
|
|
136
136
|
// From dist/
|
|
137
137
|
];
|
|
138
|
-
for (const
|
|
139
|
-
if (existsSync(
|
|
140
|
-
return require2(
|
|
138
|
+
for (const path4 of paths) {
|
|
139
|
+
if (existsSync(path4)) {
|
|
140
|
+
return require2(path4);
|
|
141
141
|
}
|
|
142
142
|
}
|
|
143
143
|
return { version: "0.0.0", name: "@turboops/cli" };
|
|
@@ -432,7 +432,7 @@ var apiClient = {
|
|
|
432
432
|
/**
|
|
433
433
|
* Make an API request
|
|
434
434
|
*/
|
|
435
|
-
async request(method,
|
|
435
|
+
async request(method, path4, body) {
|
|
436
436
|
const apiUrl = configService.getApiUrl();
|
|
437
437
|
const token = configService.getToken();
|
|
438
438
|
if (!token) {
|
|
@@ -441,7 +441,7 @@ var apiClient = {
|
|
|
441
441
|
status: 401
|
|
442
442
|
};
|
|
443
443
|
}
|
|
444
|
-
const url = `${apiUrl}${
|
|
444
|
+
const url = `${apiUrl}${path4}`;
|
|
445
445
|
const headers = {
|
|
446
446
|
"Content-Type": "application/json",
|
|
447
447
|
Authorization: `Bearer ${token}`
|
|
@@ -455,14 +455,35 @@ var apiClient = {
|
|
|
455
455
|
fetchOptions.body = JSON.stringify(body);
|
|
456
456
|
}
|
|
457
457
|
const response = await fetch(url, fetchOptions);
|
|
458
|
-
const
|
|
458
|
+
const text = await response.text();
|
|
459
|
+
let data = null;
|
|
460
|
+
try {
|
|
461
|
+
data = text ? JSON.parse(text) : null;
|
|
462
|
+
} catch {
|
|
463
|
+
if (!response.ok) {
|
|
464
|
+
return {
|
|
465
|
+
error: `HTTP ${response.status}: ${text || "No response body"}`,
|
|
466
|
+
status: response.status
|
|
467
|
+
};
|
|
468
|
+
}
|
|
469
|
+
}
|
|
459
470
|
if (!response.ok) {
|
|
460
471
|
const errorData = data;
|
|
472
|
+
const errorMessage = errorData?.message || errorData?.error || `HTTP ${response.status}`;
|
|
473
|
+
return {
|
|
474
|
+
error: errorMessage,
|
|
475
|
+
status: response.status
|
|
476
|
+
};
|
|
477
|
+
}
|
|
478
|
+
if (data === null || data === void 0) {
|
|
461
479
|
return {
|
|
462
|
-
error:
|
|
480
|
+
error: `Empty response from server (HTTP ${response.status})`,
|
|
463
481
|
status: response.status
|
|
464
482
|
};
|
|
465
483
|
}
|
|
484
|
+
if (process.env.DEBUG) {
|
|
485
|
+
console.debug("[API] Response:", JSON.stringify(data, null, 2).substring(0, 500));
|
|
486
|
+
}
|
|
466
487
|
return { data, status: response.status };
|
|
467
488
|
} catch (error) {
|
|
468
489
|
const message = error instanceof Error ? error.message : "Network error";
|
|
@@ -599,10 +620,10 @@ var apiClient = {
|
|
|
599
620
|
return this.request("POST", "/deployment/projects/simple", payload);
|
|
600
621
|
},
|
|
601
622
|
/**
|
|
602
|
-
* Get all customers
|
|
623
|
+
* Get all customers (simplified list for selection)
|
|
603
624
|
*/
|
|
604
625
|
async getCustomers() {
|
|
605
|
-
return this.request("GET", "/customer");
|
|
626
|
+
return this.request("GET", "/customer/simple");
|
|
606
627
|
},
|
|
607
628
|
/**
|
|
608
629
|
* Get environments (stages) for project
|
|
@@ -1187,14 +1208,172 @@ function getStatusColor(status) {
|
|
|
1187
1208
|
|
|
1188
1209
|
// src/commands/init.ts
|
|
1189
1210
|
import { Command as Command3 } from "commander";
|
|
1211
|
+
import prompts2 from "prompts";
|
|
1212
|
+
|
|
1213
|
+
// src/services/ai-tools.ts
|
|
1214
|
+
import { execSync, spawn } from "child_process";
|
|
1190
1215
|
import prompts from "prompts";
|
|
1216
|
+
var AI_TOOLS = {
|
|
1217
|
+
claude: {
|
|
1218
|
+
name: "Claude Code",
|
|
1219
|
+
command: "claude",
|
|
1220
|
+
promptFlag: "-p"
|
|
1221
|
+
},
|
|
1222
|
+
codex: {
|
|
1223
|
+
name: "OpenAI Codex",
|
|
1224
|
+
command: "codex",
|
|
1225
|
+
promptFlag: ""
|
|
1226
|
+
}
|
|
1227
|
+
};
|
|
1228
|
+
var aiToolsService = {
|
|
1229
|
+
/**
|
|
1230
|
+
* Check which AI tools are installed locally
|
|
1231
|
+
*/
|
|
1232
|
+
getAvailableTools() {
|
|
1233
|
+
const available = [];
|
|
1234
|
+
const whichCommand = process.platform === "win32" ? "where" : "which";
|
|
1235
|
+
for (const [key, config] of Object.entries(AI_TOOLS)) {
|
|
1236
|
+
try {
|
|
1237
|
+
execSync(`${whichCommand} ${config.command}`, { stdio: "ignore" });
|
|
1238
|
+
available.push(key);
|
|
1239
|
+
} catch {
|
|
1240
|
+
}
|
|
1241
|
+
}
|
|
1242
|
+
return available;
|
|
1243
|
+
},
|
|
1244
|
+
/**
|
|
1245
|
+
* Interactive tool selection
|
|
1246
|
+
* Returns null if no tools available or user cancels
|
|
1247
|
+
*/
|
|
1248
|
+
async selectTool() {
|
|
1249
|
+
const available = this.getAvailableTools();
|
|
1250
|
+
if (available.length === 0) {
|
|
1251
|
+
logger.error("Kein AI-Tool gefunden. Bitte installieren Sie Claude Code oder OpenAI Codex.");
|
|
1252
|
+
logger.newline();
|
|
1253
|
+
logger.info(" Claude Code: https://claude.ai/code");
|
|
1254
|
+
logger.info(" OpenAI Codex: https://openai.com/codex");
|
|
1255
|
+
return null;
|
|
1256
|
+
}
|
|
1257
|
+
if (available.length === 1) {
|
|
1258
|
+
logger.info(`Verwende ${AI_TOOLS[available[0]].name}`);
|
|
1259
|
+
return available[0];
|
|
1260
|
+
}
|
|
1261
|
+
const { selectedTool } = await prompts({
|
|
1262
|
+
type: "select",
|
|
1263
|
+
name: "selectedTool",
|
|
1264
|
+
message: "AI-Tool ausw\xE4hlen:",
|
|
1265
|
+
choices: available.map((tool) => ({
|
|
1266
|
+
title: AI_TOOLS[tool].name,
|
|
1267
|
+
value: tool
|
|
1268
|
+
}))
|
|
1269
|
+
});
|
|
1270
|
+
return selectedTool || null;
|
|
1271
|
+
},
|
|
1272
|
+
/**
|
|
1273
|
+
* Run AI tool with a prompt in the current directory
|
|
1274
|
+
* The tool runs interactively, inheriting stdio
|
|
1275
|
+
*/
|
|
1276
|
+
async runWithPrompt(tool, prompt) {
|
|
1277
|
+
const config = AI_TOOLS[tool];
|
|
1278
|
+
logger.newline();
|
|
1279
|
+
logger.info(`Starte ${config.name}...`);
|
|
1280
|
+
logger.info("Das AI-Tool wird interaktiv ausgef\xFChrt. Folgen Sie den Anweisungen.");
|
|
1281
|
+
logger.newline();
|
|
1282
|
+
return new Promise((resolve2) => {
|
|
1283
|
+
const args = config.promptFlag ? [config.promptFlag, prompt] : [prompt];
|
|
1284
|
+
const child = spawn(config.command, args, {
|
|
1285
|
+
stdio: "inherit",
|
|
1286
|
+
cwd: process.cwd(),
|
|
1287
|
+
shell: process.platform === "win32"
|
|
1288
|
+
});
|
|
1289
|
+
child.on("close", (code) => {
|
|
1290
|
+
logger.newline();
|
|
1291
|
+
resolve2(code === 0);
|
|
1292
|
+
});
|
|
1293
|
+
child.on("error", (err) => {
|
|
1294
|
+
logger.error(`Fehler beim Ausf\xFChren von ${config.name}: ${err.message}`);
|
|
1295
|
+
resolve2(false);
|
|
1296
|
+
});
|
|
1297
|
+
});
|
|
1298
|
+
}
|
|
1299
|
+
};
|
|
1300
|
+
|
|
1301
|
+
// src/utils/detect-project.ts
|
|
1302
|
+
import * as fs2 from "fs/promises";
|
|
1303
|
+
import * as path2 from "path";
|
|
1304
|
+
async function detectProjectConfig() {
|
|
1305
|
+
const cwd = process.cwd();
|
|
1306
|
+
const result = {
|
|
1307
|
+
hasDockerfile: false,
|
|
1308
|
+
hasDockerCompose: false,
|
|
1309
|
+
hasGitLabPipeline: false,
|
|
1310
|
+
hasGitHubPipeline: false,
|
|
1311
|
+
hasTurboOpsInPipeline: false
|
|
1312
|
+
};
|
|
1313
|
+
const dockerfilePaths = [
|
|
1314
|
+
"Dockerfile",
|
|
1315
|
+
"dockerfile",
|
|
1316
|
+
"docker/Dockerfile",
|
|
1317
|
+
"api/Dockerfile",
|
|
1318
|
+
"app/Dockerfile",
|
|
1319
|
+
"projects/api/Dockerfile",
|
|
1320
|
+
"projects/app/Dockerfile"
|
|
1321
|
+
];
|
|
1322
|
+
for (const dockerPath of dockerfilePaths) {
|
|
1323
|
+
try {
|
|
1324
|
+
await fs2.access(path2.join(cwd, dockerPath));
|
|
1325
|
+
result.hasDockerfile = true;
|
|
1326
|
+
result.dockerfilePath = dockerPath;
|
|
1327
|
+
break;
|
|
1328
|
+
} catch {
|
|
1329
|
+
}
|
|
1330
|
+
}
|
|
1331
|
+
const composePaths = ["docker-compose.yml", "docker-compose.yaml", "compose.yml", "compose.yaml"];
|
|
1332
|
+
for (const composePath of composePaths) {
|
|
1333
|
+
try {
|
|
1334
|
+
await fs2.access(path2.join(cwd, composePath));
|
|
1335
|
+
result.hasDockerCompose = true;
|
|
1336
|
+
break;
|
|
1337
|
+
} catch {
|
|
1338
|
+
}
|
|
1339
|
+
}
|
|
1340
|
+
const gitlabPath = path2.join(cwd, ".gitlab-ci.yml");
|
|
1341
|
+
try {
|
|
1342
|
+
await fs2.access(gitlabPath);
|
|
1343
|
+
result.hasGitLabPipeline = true;
|
|
1344
|
+
result.pipelinePath = ".gitlab-ci.yml";
|
|
1345
|
+
const content = await fs2.readFile(gitlabPath, "utf-8");
|
|
1346
|
+
result.hasTurboOpsInPipeline = content.includes("turbo deploy") || content.includes("@turboops/cli");
|
|
1347
|
+
} catch {
|
|
1348
|
+
}
|
|
1349
|
+
const githubPath = path2.join(cwd, ".github", "workflows");
|
|
1350
|
+
try {
|
|
1351
|
+
const files = await fs2.readdir(githubPath);
|
|
1352
|
+
const ymlFiles = files.filter((f) => f.endsWith(".yml") || f.endsWith(".yaml"));
|
|
1353
|
+
if (ymlFiles.length > 0) {
|
|
1354
|
+
result.hasGitHubPipeline = true;
|
|
1355
|
+
result.pipelinePath = `.github/workflows/${ymlFiles[0]}`;
|
|
1356
|
+
for (const file of ymlFiles) {
|
|
1357
|
+
const content = await fs2.readFile(path2.join(githubPath, file), "utf-8");
|
|
1358
|
+
if (content.includes("turbo deploy") || content.includes("@turboops/cli")) {
|
|
1359
|
+
result.hasTurboOpsInPipeline = true;
|
|
1360
|
+
break;
|
|
1361
|
+
}
|
|
1362
|
+
}
|
|
1363
|
+
}
|
|
1364
|
+
} catch {
|
|
1365
|
+
}
|
|
1366
|
+
return result;
|
|
1367
|
+
}
|
|
1368
|
+
|
|
1369
|
+
// src/commands/init.ts
|
|
1191
1370
|
import chalk4 from "chalk";
|
|
1192
1371
|
var initCommand = new Command3("init").description("Initialize TurboOps project in current directory").action(async () => {
|
|
1193
1372
|
logger.header("TurboOps Project Initialization");
|
|
1194
1373
|
if (!configService.isAuthenticated()) {
|
|
1195
1374
|
logger.warning("Nicht authentifiziert. Bitte melden Sie sich zuerst an.");
|
|
1196
1375
|
logger.newline();
|
|
1197
|
-
const { shouldLogin } = await
|
|
1376
|
+
const { shouldLogin } = await prompts2({
|
|
1198
1377
|
initial: true,
|
|
1199
1378
|
message: "M\xF6chten Sie sich jetzt anmelden?",
|
|
1200
1379
|
name: "shouldLogin",
|
|
@@ -1218,7 +1397,7 @@ var initCommand = new Command3("init").description("Initialize TurboOps project
|
|
|
1218
1397
|
logger.success(`Angemeldet als ${result.user?.email || "Unknown"}`);
|
|
1219
1398
|
}
|
|
1220
1399
|
logger.newline();
|
|
1221
|
-
const { projectSlug } = await
|
|
1400
|
+
const { projectSlug } = await prompts2({
|
|
1222
1401
|
hint: "Der Slug Ihres TurboOps-Projekts",
|
|
1223
1402
|
message: "Projekt-Slug:",
|
|
1224
1403
|
name: "projectSlug",
|
|
@@ -1240,7 +1419,7 @@ var initCommand = new Command3("init").description("Initialize TurboOps project
|
|
|
1240
1419
|
}
|
|
1241
1420
|
logger.newline();
|
|
1242
1421
|
logger.warning(`Projekt "${projectSlug}" nicht gefunden.`);
|
|
1243
|
-
const { shouldCreate } = await
|
|
1422
|
+
const { shouldCreate } = await prompts2({
|
|
1244
1423
|
initial: true,
|
|
1245
1424
|
message: "M\xF6chten Sie ein neues Projekt anlegen?",
|
|
1246
1425
|
name: "shouldCreate",
|
|
@@ -1258,7 +1437,7 @@ async function setupProject(project) {
|
|
|
1258
1437
|
const { data: environments } = await apiClient.getEnvironments(project.id);
|
|
1259
1438
|
if (!environments || environments.length === 0) {
|
|
1260
1439
|
logger.newline();
|
|
1261
|
-
const { shouldCreateStage } = await
|
|
1440
|
+
const { shouldCreateStage } = await prompts2({
|
|
1262
1441
|
initial: true,
|
|
1263
1442
|
message: "Das Projekt hat noch keine Stages. M\xF6chten Sie jetzt eine erstellen?",
|
|
1264
1443
|
name: "shouldCreateStage",
|
|
@@ -1268,18 +1447,18 @@ async function setupProject(project) {
|
|
|
1268
1447
|
await createFirstStage(project.id, project.slug);
|
|
1269
1448
|
}
|
|
1270
1449
|
}
|
|
1271
|
-
const
|
|
1272
|
-
const
|
|
1273
|
-
const gitlabCiPath =
|
|
1450
|
+
const fs4 = await import("fs/promises");
|
|
1451
|
+
const path4 = await import("path");
|
|
1452
|
+
const gitlabCiPath = path4.join(process.cwd(), ".gitlab-ci.yml");
|
|
1274
1453
|
let hasGitLabPipeline = false;
|
|
1275
1454
|
try {
|
|
1276
|
-
await
|
|
1455
|
+
await fs4.access(gitlabCiPath);
|
|
1277
1456
|
hasGitLabPipeline = true;
|
|
1278
1457
|
} catch {
|
|
1279
1458
|
}
|
|
1280
1459
|
if (!hasGitLabPipeline) {
|
|
1281
1460
|
logger.newline();
|
|
1282
|
-
const { shouldCreatePipeline } = await
|
|
1461
|
+
const { shouldCreatePipeline } = await prompts2({
|
|
1283
1462
|
initial: false,
|
|
1284
1463
|
message: "M\xF6chten Sie eine CI/CD Pipeline anlegen?",
|
|
1285
1464
|
name: "shouldCreatePipeline",
|
|
@@ -1289,6 +1468,7 @@ async function setupProject(project) {
|
|
|
1289
1468
|
await createPipeline(project.id);
|
|
1290
1469
|
}
|
|
1291
1470
|
}
|
|
1471
|
+
await offerAiAssistance(project.slug);
|
|
1292
1472
|
await showFinalSummary(project);
|
|
1293
1473
|
}
|
|
1294
1474
|
async function createNewProject(slug) {
|
|
@@ -1330,7 +1510,7 @@ async function createNewProject(slug) {
|
|
|
1330
1510
|
type: "text"
|
|
1331
1511
|
}
|
|
1332
1512
|
);
|
|
1333
|
-
const projectDetails = await
|
|
1513
|
+
const projectDetails = await prompts2(promptQuestions);
|
|
1334
1514
|
if (!projectDetails.name) {
|
|
1335
1515
|
logger.warning("Projekterstellung abgebrochen");
|
|
1336
1516
|
addJsonData({ initialized: false, reason: "cancelled" });
|
|
@@ -1347,9 +1527,14 @@ async function createNewProject(slug) {
|
|
|
1347
1527
|
})
|
|
1348
1528
|
);
|
|
1349
1529
|
if (createError || !newProject) {
|
|
1350
|
-
|
|
1530
|
+
const errorMsg = createError || "Leere Antwort vom Server";
|
|
1531
|
+
logger.error(`Projekt konnte nicht erstellt werden: ${errorMsg}`);
|
|
1532
|
+
if (process.env.DEBUG) {
|
|
1533
|
+
console.debug("[DEBUG] createError:", createError);
|
|
1534
|
+
console.debug("[DEBUG] newProject:", newProject);
|
|
1535
|
+
}
|
|
1351
1536
|
addJsonData({
|
|
1352
|
-
error:
|
|
1537
|
+
error: errorMsg,
|
|
1353
1538
|
initialized: false,
|
|
1354
1539
|
reason: "create_failed"
|
|
1355
1540
|
});
|
|
@@ -1358,7 +1543,7 @@ async function createNewProject(slug) {
|
|
|
1358
1543
|
logger.success(`Projekt "${newProject.name}" wurde erstellt!`);
|
|
1359
1544
|
configService.setProject(newProject.slug);
|
|
1360
1545
|
logger.newline();
|
|
1361
|
-
const { shouldCreateStage } = await
|
|
1546
|
+
const { shouldCreateStage } = await prompts2({
|
|
1362
1547
|
initial: true,
|
|
1363
1548
|
message: "M\xF6chten Sie jetzt die erste Stage anlegen?",
|
|
1364
1549
|
name: "shouldCreateStage",
|
|
@@ -1368,7 +1553,7 @@ async function createNewProject(slug) {
|
|
|
1368
1553
|
await createFirstStage(newProject.id, newProject.slug);
|
|
1369
1554
|
}
|
|
1370
1555
|
logger.newline();
|
|
1371
|
-
const { shouldCreatePipeline } = await
|
|
1556
|
+
const { shouldCreatePipeline } = await prompts2({
|
|
1372
1557
|
initial: true,
|
|
1373
1558
|
message: "M\xF6chten Sie eine CI/CD Pipeline anlegen?",
|
|
1374
1559
|
name: "shouldCreatePipeline",
|
|
@@ -1437,7 +1622,7 @@ async function createFirstStage(projectId, projectSlug) {
|
|
|
1437
1622
|
type: "select"
|
|
1438
1623
|
});
|
|
1439
1624
|
}
|
|
1440
|
-
const stageDetails = await
|
|
1625
|
+
const stageDetails = await prompts2(stageQuestions);
|
|
1441
1626
|
if (!stageDetails.name || !stageDetails.slug) {
|
|
1442
1627
|
logger.warning("Stage-Erstellung abgebrochen");
|
|
1443
1628
|
return;
|
|
@@ -1475,7 +1660,7 @@ async function createPipeline(projectId) {
|
|
|
1475
1660
|
type: "select"
|
|
1476
1661
|
}
|
|
1477
1662
|
];
|
|
1478
|
-
const pipelineDetails = await
|
|
1663
|
+
const pipelineDetails = await prompts2(pipelineQuestions);
|
|
1479
1664
|
if (!pipelineDetails.pipelineType) {
|
|
1480
1665
|
logger.warning("Pipeline-Erstellung abgebrochen");
|
|
1481
1666
|
return;
|
|
@@ -1488,18 +1673,18 @@ async function createPipeline(projectId) {
|
|
|
1488
1673
|
logger.error(`Pipeline konnte nicht generiert werden: ${error || "Unbekannter Fehler"}`);
|
|
1489
1674
|
return;
|
|
1490
1675
|
}
|
|
1491
|
-
const
|
|
1492
|
-
const
|
|
1676
|
+
const fs4 = await import("fs/promises");
|
|
1677
|
+
const path4 = await import("path");
|
|
1493
1678
|
let filePath;
|
|
1494
1679
|
if (pipelineDetails.pipelineType === "github") {
|
|
1495
|
-
const workflowsDir =
|
|
1496
|
-
await
|
|
1497
|
-
filePath =
|
|
1680
|
+
const workflowsDir = path4.join(process.cwd(), ".github", "workflows");
|
|
1681
|
+
await fs4.mkdir(workflowsDir, { recursive: true });
|
|
1682
|
+
filePath = path4.join(workflowsDir, "deploy.yml");
|
|
1498
1683
|
} else {
|
|
1499
|
-
filePath =
|
|
1684
|
+
filePath = path4.join(process.cwd(), ".gitlab-ci.yml");
|
|
1500
1685
|
}
|
|
1501
1686
|
try {
|
|
1502
|
-
await
|
|
1687
|
+
await fs4.writeFile(filePath, pipeline.content, "utf-8");
|
|
1503
1688
|
logger.success(`${pipeline.filename} wurde erstellt!`);
|
|
1504
1689
|
logger.newline();
|
|
1505
1690
|
const { data: secrets } = await apiClient.getPipelineSecrets(projectId, pipelineDetails.pipelineType);
|
|
@@ -1557,6 +1742,114 @@ async function showFinalSummary(project) {
|
|
|
1557
1742
|
"F\xFChren Sie `turbo logs <stage>` aus, um Logs anzuzeigen"
|
|
1558
1743
|
]);
|
|
1559
1744
|
}
|
|
1745
|
+
async function offerAiAssistance(projectSlug) {
|
|
1746
|
+
const detection = await detectProjectConfig();
|
|
1747
|
+
if (!detection.hasDockerCompose) {
|
|
1748
|
+
logger.newline();
|
|
1749
|
+
logger.warning("Keine docker-compose.yml gefunden.");
|
|
1750
|
+
logger.info("TurboOps Deployment ben\xF6tigt eine docker-compose.yml auf Root-Ebene.");
|
|
1751
|
+
const { shouldCreateDocker } = await prompts2({
|
|
1752
|
+
type: "confirm",
|
|
1753
|
+
name: "shouldCreateDocker",
|
|
1754
|
+
message: "Docker-Setup mit AI erstellen lassen?",
|
|
1755
|
+
initial: true
|
|
1756
|
+
});
|
|
1757
|
+
if (shouldCreateDocker) {
|
|
1758
|
+
await createDockerSetupWithAI();
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
if ((detection.hasGitLabPipeline || detection.hasGitHubPipeline) && !detection.hasTurboOpsInPipeline) {
|
|
1762
|
+
logger.newline();
|
|
1763
|
+
const pipelineType = detection.hasGitLabPipeline ? "GitLab" : "GitHub";
|
|
1764
|
+
const { shouldIntegrate } = await prompts2({
|
|
1765
|
+
type: "confirm",
|
|
1766
|
+
name: "shouldIntegrate",
|
|
1767
|
+
message: `${pipelineType} Pipeline gefunden ohne TurboOps. Mit AI integrieren lassen?`,
|
|
1768
|
+
initial: true
|
|
1769
|
+
});
|
|
1770
|
+
if (shouldIntegrate) {
|
|
1771
|
+
await integratePipelineWithAI(detection, projectSlug);
|
|
1772
|
+
}
|
|
1773
|
+
}
|
|
1774
|
+
}
|
|
1775
|
+
async function createDockerSetupWithAI() {
|
|
1776
|
+
const tool = await aiToolsService.selectTool();
|
|
1777
|
+
if (!tool) return;
|
|
1778
|
+
const prompt = `Analysiere dieses Projekt und erstelle ein vollst\xE4ndiges Docker-Setup f\xFCr Production Deployment.
|
|
1779
|
+
|
|
1780
|
+
**Wichtig: TurboOps ben\xF6tigt eine docker-compose.yml auf Root-Ebene!**
|
|
1781
|
+
|
|
1782
|
+
Anforderungen:
|
|
1783
|
+
1. Erstelle eine "docker-compose.yml" im Projekt-Root
|
|
1784
|
+
- Services f\xFCr alle Komponenten (z.B. api, app, db wenn n\xF6tig)
|
|
1785
|
+
- Verwende build-Direktive mit Pfad zu den Dockerfiles
|
|
1786
|
+
- Health-Checks f\xFCr alle Services
|
|
1787
|
+
- Production-geeignete Konfiguration
|
|
1788
|
+
|
|
1789
|
+
2. Erstelle Dockerfiles in den entsprechenden Unterordnern
|
|
1790
|
+
- Multi-stage builds f\xFCr kleine Images
|
|
1791
|
+
- F\xFCr jeden Service ein eigenes Dockerfile (z.B. ./api/Dockerfile, ./app/Dockerfile)
|
|
1792
|
+
- Optimiert f\xFCr Production
|
|
1793
|
+
|
|
1794
|
+
3. Erstelle eine ".dockerignore" auf Root-Ebene
|
|
1795
|
+
- node_modules, .git, etc. ausschlie\xDFen
|
|
1796
|
+
|
|
1797
|
+
4. Falls Monorepo: Beachte die Projekt-Struktur
|
|
1798
|
+
- Pr\xFCfe ob projects/, packages/, apps/ Ordner existieren
|
|
1799
|
+
|
|
1800
|
+
Beispiel docker-compose.yml Struktur:
|
|
1801
|
+
\`\`\`yaml
|
|
1802
|
+
version: '3.8'
|
|
1803
|
+
services:
|
|
1804
|
+
api:
|
|
1805
|
+
build:
|
|
1806
|
+
context: .
|
|
1807
|
+
dockerfile: ./api/Dockerfile
|
|
1808
|
+
ports:
|
|
1809
|
+
- "3000:3000"
|
|
1810
|
+
healthcheck:
|
|
1811
|
+
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
|
1812
|
+
interval: 30s
|
|
1813
|
+
timeout: 10s
|
|
1814
|
+
retries: 3
|
|
1815
|
+
\`\`\`
|
|
1816
|
+
|
|
1817
|
+
Erstelle alle notwendigen Dateien.`;
|
|
1818
|
+
const success = await aiToolsService.runWithPrompt(tool, prompt);
|
|
1819
|
+
if (success) {
|
|
1820
|
+
logger.success("Docker-Setup wurde erstellt!");
|
|
1821
|
+
}
|
|
1822
|
+
}
|
|
1823
|
+
async function integratePipelineWithAI(detection, projectSlug) {
|
|
1824
|
+
const tool = await aiToolsService.selectTool();
|
|
1825
|
+
if (!tool) return;
|
|
1826
|
+
const pipelineType = detection.hasGitLabPipeline ? "GitLab CI" : "GitHub Actions";
|
|
1827
|
+
const pipelineFile = detection.pipelinePath;
|
|
1828
|
+
const prompt = `Integriere TurboOps Deployment in die bestehende ${pipelineType} Pipeline.
|
|
1829
|
+
|
|
1830
|
+
Projekt-Slug: ${projectSlug}
|
|
1831
|
+
Pipeline-Datei: ${pipelineFile}
|
|
1832
|
+
|
|
1833
|
+
Anforderungen:
|
|
1834
|
+
1. Die bestehende Pipeline-Struktur beibehalten
|
|
1835
|
+
2. Einen neuen Deploy-Job/Step hinzuf\xFCgen der nach dem Build l\xE4uft
|
|
1836
|
+
3. TurboOps CLI installieren: npm install -g @turboops/cli
|
|
1837
|
+
4. Token setzen: turbo config set token \${TURBOOPS_TOKEN}
|
|
1838
|
+
5. Deploy ausf\xFChren: turbo deploy <environment> --image <image-tag> --wait
|
|
1839
|
+
|
|
1840
|
+
Die erste Stage braucht --image, weitere Stages k\xF6nnen ohne --image deployen (Promotion).
|
|
1841
|
+
|
|
1842
|
+
F\xFCge am Ende einen Kommentar hinzu welche Secrets ben\xF6tigt werden:
|
|
1843
|
+
- TURBOOPS_TOKEN: TurboOps Projekt-Token
|
|
1844
|
+
|
|
1845
|
+
Modifiziere die Datei "${pipelineFile}" entsprechend.`;
|
|
1846
|
+
const success = await aiToolsService.runWithPrompt(tool, prompt);
|
|
1847
|
+
if (success) {
|
|
1848
|
+
logger.success("Pipeline wurde mit AI aktualisiert!");
|
|
1849
|
+
logger.newline();
|
|
1850
|
+
logger.info("Vergessen Sie nicht, das CI/CD Secret TURBOOPS_TOKEN zu konfigurieren.");
|
|
1851
|
+
}
|
|
1852
|
+
}
|
|
1560
1853
|
|
|
1561
1854
|
// src/commands/logs.ts
|
|
1562
1855
|
import { Command as Command4 } from "commander";
|
|
@@ -1825,17 +2118,33 @@ function getStatusColor2(status) {
|
|
|
1825
2118
|
|
|
1826
2119
|
// src/commands/pipeline.ts
|
|
1827
2120
|
import { Command as Command6 } from "commander";
|
|
1828
|
-
import
|
|
2121
|
+
import prompts3 from "prompts";
|
|
1829
2122
|
import chalk7 from "chalk";
|
|
1830
|
-
import * as
|
|
1831
|
-
import * as
|
|
2123
|
+
import * as fs3 from "fs/promises";
|
|
2124
|
+
import * as path3 from "path";
|
|
1832
2125
|
var pipelineCommand = new Command6("pipeline").description("Manage CI/CD pipeline configuration");
|
|
1833
2126
|
pipelineCommand.command("generate").description("Generate CI/CD pipeline configuration").option("-t, --type <type>", "Pipeline type (gitlab, github)").option("-f, --force", "Overwrite existing pipeline file").option("-o, --output <path>", "Custom output path").action(async (options) => {
|
|
1834
2127
|
const { project } = await getCommandContext();
|
|
1835
2128
|
logger.header("CI/CD Pipeline generieren");
|
|
2129
|
+
const detection = await detectProjectConfig();
|
|
2130
|
+
if (!detection.hasDockerCompose) {
|
|
2131
|
+
logger.warning("Keine docker-compose.yml gefunden.");
|
|
2132
|
+
logger.info("TurboOps Deployment ben\xF6tigt eine docker-compose.yml.");
|
|
2133
|
+
logger.newline();
|
|
2134
|
+
const { shouldCreateDocker } = await prompts3({
|
|
2135
|
+
type: "confirm",
|
|
2136
|
+
name: "shouldCreateDocker",
|
|
2137
|
+
message: "Docker-Setup mit AI erstellen?",
|
|
2138
|
+
initial: true
|
|
2139
|
+
});
|
|
2140
|
+
if (shouldCreateDocker) {
|
|
2141
|
+
await createDockerSetupWithAI2();
|
|
2142
|
+
logger.newline();
|
|
2143
|
+
}
|
|
2144
|
+
}
|
|
1836
2145
|
let pipelineType = options.type;
|
|
1837
2146
|
if (!pipelineType) {
|
|
1838
|
-
const { selectedType } = await
|
|
2147
|
+
const { selectedType } = await prompts3({
|
|
1839
2148
|
choices: [
|
|
1840
2149
|
{ title: "GitLab CI/CD", value: "gitlab" },
|
|
1841
2150
|
{ title: "GitHub Actions", value: "github" }
|
|
@@ -1858,25 +2167,49 @@ pipelineCommand.command("generate").description("Generate CI/CD pipeline configu
|
|
|
1858
2167
|
}
|
|
1859
2168
|
let outputPath;
|
|
1860
2169
|
if (options.output) {
|
|
1861
|
-
outputPath =
|
|
2170
|
+
outputPath = path3.resolve(options.output);
|
|
1862
2171
|
} else if (pipelineType === "github") {
|
|
1863
|
-
outputPath =
|
|
2172
|
+
outputPath = path3.join(process.cwd(), ".github", "workflows", "deploy.yml");
|
|
1864
2173
|
} else {
|
|
1865
|
-
outputPath =
|
|
2174
|
+
outputPath = path3.join(process.cwd(), ".gitlab-ci.yml");
|
|
1866
2175
|
}
|
|
1867
2176
|
if (!options.force) {
|
|
1868
2177
|
try {
|
|
1869
|
-
await
|
|
1870
|
-
const
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
2178
|
+
await fs3.access(outputPath);
|
|
2179
|
+
const content = await fs3.readFile(outputPath, "utf-8");
|
|
2180
|
+
const hasTurboOps = content.includes("turbo deploy") || content.includes("@turboops/cli");
|
|
2181
|
+
if (!hasTurboOps) {
|
|
2182
|
+
const { action } = await prompts3({
|
|
2183
|
+
type: "select",
|
|
2184
|
+
name: "action",
|
|
2185
|
+
message: `${path3.basename(outputPath)} existiert bereits ohne TurboOps.`,
|
|
2186
|
+
choices: [
|
|
2187
|
+
{ title: "Mit AI integrieren (empfohlen)", value: "ai-integrate" },
|
|
2188
|
+
{ title: "\xDCberschreiben", value: "overwrite" },
|
|
2189
|
+
{ title: "Abbrechen", value: "cancel" }
|
|
2190
|
+
]
|
|
2191
|
+
});
|
|
2192
|
+
if (action === "cancel" || !action) {
|
|
2193
|
+
logger.info("Pipeline-Generierung abgebrochen");
|
|
2194
|
+
addJsonData({ generated: false, reason: "cancelled" });
|
|
2195
|
+
return;
|
|
2196
|
+
}
|
|
2197
|
+
if (action === "ai-integrate") {
|
|
2198
|
+
await integratePipelineWithAI2(pipelineType, project.slug, outputPath);
|
|
2199
|
+
return;
|
|
2200
|
+
}
|
|
2201
|
+
} else {
|
|
2202
|
+
const { shouldOverwrite } = await prompts3({
|
|
2203
|
+
initial: false,
|
|
2204
|
+
message: `${path3.basename(outputPath)} existiert bereits mit TurboOps. \xDCberschreiben?`,
|
|
2205
|
+
name: "shouldOverwrite",
|
|
2206
|
+
type: "confirm"
|
|
2207
|
+
});
|
|
2208
|
+
if (!shouldOverwrite) {
|
|
2209
|
+
logger.info("Pipeline-Generierung abgebrochen");
|
|
2210
|
+
addJsonData({ generated: false, reason: "file_exists" });
|
|
2211
|
+
return;
|
|
2212
|
+
}
|
|
1880
2213
|
}
|
|
1881
2214
|
} catch {
|
|
1882
2215
|
}
|
|
@@ -1890,13 +2223,13 @@ pipelineCommand.command("generate").description("Generate CI/CD pipeline configu
|
|
|
1890
2223
|
addJsonData({ error: error || "Unknown error", generated: false });
|
|
1891
2224
|
process.exit(13 /* API_ERROR */);
|
|
1892
2225
|
}
|
|
1893
|
-
const outputDir =
|
|
1894
|
-
await
|
|
2226
|
+
const outputDir = path3.dirname(outputPath);
|
|
2227
|
+
await fs3.mkdir(outputDir, { recursive: true });
|
|
1895
2228
|
try {
|
|
1896
|
-
await
|
|
1897
|
-
logger.success(`${
|
|
2229
|
+
await fs3.writeFile(outputPath, pipeline.content, "utf-8");
|
|
2230
|
+
logger.success(`${path3.relative(process.cwd(), outputPath)} wurde erstellt!`);
|
|
1898
2231
|
addJsonData({
|
|
1899
|
-
filename:
|
|
2232
|
+
filename: path3.relative(process.cwd(), outputPath),
|
|
1900
2233
|
generated: true,
|
|
1901
2234
|
project: project.slug,
|
|
1902
2235
|
type: pipelineType
|
|
@@ -1954,11 +2287,160 @@ async function showSecrets(projectId, pipelineType) {
|
|
|
1954
2287
|
logger.info("Projekt-Token k\xF6nnen Sie in der TurboOps Web-UI erstellen:");
|
|
1955
2288
|
logger.info("Projekt \u2192 Settings \u2192 Tokens \u2192 Neuen Token erstellen");
|
|
1956
2289
|
}
|
|
2290
|
+
async function createDockerSetupWithAI2() {
|
|
2291
|
+
const tool = await aiToolsService.selectTool();
|
|
2292
|
+
if (!tool) return;
|
|
2293
|
+
const prompt = `Analysiere dieses Projekt und erstelle ein vollst\xE4ndiges Docker-Setup f\xFCr Production Deployment.
|
|
2294
|
+
|
|
2295
|
+
**Wichtig: TurboOps ben\xF6tigt eine docker-compose.yml auf Root-Ebene!**
|
|
2296
|
+
|
|
2297
|
+
Anforderungen:
|
|
2298
|
+
1. Erstelle eine "docker-compose.yml" im Projekt-Root
|
|
2299
|
+
- Services f\xFCr alle Komponenten (z.B. api, app, db wenn n\xF6tig)
|
|
2300
|
+
- Verwende build-Direktive mit Pfad zu den Dockerfiles
|
|
2301
|
+
- Health-Checks f\xFCr alle Services
|
|
2302
|
+
- Production-geeignete Konfiguration
|
|
2303
|
+
|
|
2304
|
+
2. Erstelle Dockerfiles in den entsprechenden Unterordnern
|
|
2305
|
+
- Multi-stage builds f\xFCr kleine Images
|
|
2306
|
+
- F\xFCr jeden Service ein eigenes Dockerfile (z.B. ./api/Dockerfile, ./app/Dockerfile)
|
|
2307
|
+
- Optimiert f\xFCr Production
|
|
2308
|
+
|
|
2309
|
+
3. Erstelle eine ".dockerignore" auf Root-Ebene
|
|
2310
|
+
- node_modules, .git, etc. ausschlie\xDFen
|
|
2311
|
+
|
|
2312
|
+
4. Falls Monorepo: Beachte die Projekt-Struktur
|
|
2313
|
+
- Pr\xFCfe ob projects/, packages/, apps/ Ordner existieren
|
|
2314
|
+
|
|
2315
|
+
Erstelle alle notwendigen Dateien.`;
|
|
2316
|
+
const success = await aiToolsService.runWithPrompt(tool, prompt);
|
|
2317
|
+
if (success) {
|
|
2318
|
+
logger.success("Docker-Setup wurde erstellt!");
|
|
2319
|
+
}
|
|
2320
|
+
}
|
|
2321
|
+
async function integratePipelineWithAI2(pipelineType, projectSlug, pipelinePath) {
|
|
2322
|
+
const tool = await aiToolsService.selectTool();
|
|
2323
|
+
if (!tool) {
|
|
2324
|
+
addJsonData({ generated: false, reason: "no_ai_tool" });
|
|
2325
|
+
return;
|
|
2326
|
+
}
|
|
2327
|
+
const typeName = pipelineType === "gitlab" ? "GitLab CI" : "GitHub Actions";
|
|
2328
|
+
const prompt = `Integriere TurboOps Deployment in die bestehende ${typeName} Pipeline.
|
|
2329
|
+
|
|
2330
|
+
Projekt-Slug: ${projectSlug}
|
|
2331
|
+
Pipeline-Datei: ${pipelinePath}
|
|
2332
|
+
|
|
2333
|
+
Anforderungen:
|
|
2334
|
+
1. Die bestehende Pipeline-Struktur beibehalten
|
|
2335
|
+
2. Einen neuen Deploy-Job/Step hinzuf\xFCgen der nach dem Build l\xE4uft
|
|
2336
|
+
3. TurboOps CLI installieren: npm install -g @turboops/cli
|
|
2337
|
+
4. Token setzen: turbo config set token \${TURBOOPS_TOKEN}
|
|
2338
|
+
5. Deploy ausf\xFChren: turbo deploy <environment> --image <image-tag> --wait
|
|
2339
|
+
|
|
2340
|
+
Die erste Stage braucht --image, weitere Stages k\xF6nnen ohne --image deployen (Auto-Promotion).
|
|
2341
|
+
|
|
2342
|
+
Secrets die ben\xF6tigt werden:
|
|
2343
|
+
- TURBOOPS_TOKEN: TurboOps Projekt-Token
|
|
2344
|
+
|
|
2345
|
+
Modifiziere die Datei "${pipelinePath}" entsprechend.`;
|
|
2346
|
+
const success = await aiToolsService.runWithPrompt(tool, prompt);
|
|
2347
|
+
if (success) {
|
|
2348
|
+
logger.success("Pipeline wurde mit AI aktualisiert!");
|
|
2349
|
+
logger.newline();
|
|
2350
|
+
logger.info("Vergessen Sie nicht, das CI/CD Secret TURBOOPS_TOKEN zu konfigurieren.");
|
|
2351
|
+
addJsonData({ generated: true, method: "ai-integration" });
|
|
2352
|
+
} else {
|
|
2353
|
+
addJsonData({ generated: false, reason: "ai_failed" });
|
|
2354
|
+
}
|
|
2355
|
+
}
|
|
2356
|
+
|
|
2357
|
+
// src/commands/docker.ts
|
|
2358
|
+
import { Command as Command7 } from "commander";
|
|
2359
|
+
import prompts4 from "prompts";
|
|
2360
|
+
var DOCKER_SETUP_PROMPT = `Analysiere dieses Projekt und erstelle ein vollst\xE4ndiges Docker-Setup f\xFCr Production Deployment.
|
|
2361
|
+
|
|
2362
|
+
**Wichtig: TurboOps ben\xF6tigt eine docker-compose.yml auf Root-Ebene!**
|
|
2363
|
+
|
|
2364
|
+
Anforderungen:
|
|
2365
|
+
1. Erstelle eine "docker-compose.yml" im Projekt-Root
|
|
2366
|
+
- Services f\xFCr alle Komponenten (z.B. api, app, db wenn n\xF6tig)
|
|
2367
|
+
- Verwende build-Direktive mit Pfad zu den Dockerfiles
|
|
2368
|
+
- Health-Checks f\xFCr alle Services
|
|
2369
|
+
- Production-geeignete Konfiguration
|
|
2370
|
+
|
|
2371
|
+
2. Erstelle Dockerfiles in den entsprechenden Unterordnern
|
|
2372
|
+
- Multi-stage builds f\xFCr kleine Images
|
|
2373
|
+
- F\xFCr jeden Service ein eigenes Dockerfile (z.B. ./api/Dockerfile, ./app/Dockerfile)
|
|
2374
|
+
- Optimiert f\xFCr Production
|
|
2375
|
+
|
|
2376
|
+
3. Erstelle eine ".dockerignore" auf Root-Ebene
|
|
2377
|
+
- node_modules, .git, etc. ausschlie\xDFen
|
|
2378
|
+
|
|
2379
|
+
4. Falls Monorepo: Beachte die Projekt-Struktur
|
|
2380
|
+
- Pr\xFCfe ob projects/, packages/, apps/ Ordner existieren
|
|
2381
|
+
|
|
2382
|
+
Beispiel docker-compose.yml Struktur:
|
|
2383
|
+
\`\`\`yaml
|
|
2384
|
+
version: '3.8'
|
|
2385
|
+
services:
|
|
2386
|
+
api:
|
|
2387
|
+
build:
|
|
2388
|
+
context: .
|
|
2389
|
+
dockerfile: ./api/Dockerfile
|
|
2390
|
+
ports:
|
|
2391
|
+
- "3000:3000"
|
|
2392
|
+
healthcheck:
|
|
2393
|
+
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
|
2394
|
+
interval: 30s
|
|
2395
|
+
timeout: 10s
|
|
2396
|
+
retries: 3
|
|
2397
|
+
\`\`\`
|
|
2398
|
+
|
|
2399
|
+
Erstelle alle notwendigen Dateien.`;
|
|
2400
|
+
var dockerCommand = new Command7("docker").description("Manage Docker configuration");
|
|
2401
|
+
dockerCommand.command("generate").description("Docker-Setup (docker-compose + Dockerfiles) mit AI erstellen").option("-f, --force", "Bestehende Dateien \xFCberschreiben").action(async (options) => {
|
|
2402
|
+
logger.header("Docker-Setup generieren");
|
|
2403
|
+
const detection = await detectProjectConfig();
|
|
2404
|
+
if (detection.hasDockerCompose && !options.force) {
|
|
2405
|
+
logger.warning("docker-compose.yml existiert bereits.");
|
|
2406
|
+
const { shouldOverwrite } = await prompts4({
|
|
2407
|
+
type: "confirm",
|
|
2408
|
+
name: "shouldOverwrite",
|
|
2409
|
+
message: "M\xF6chten Sie das bestehende Setup \xFCberschreiben?",
|
|
2410
|
+
initial: false
|
|
2411
|
+
});
|
|
2412
|
+
if (!shouldOverwrite) {
|
|
2413
|
+
logger.info("Docker-Setup Generierung abgebrochen");
|
|
2414
|
+
addJsonData({ generated: false, reason: "file_exists" });
|
|
2415
|
+
return;
|
|
2416
|
+
}
|
|
2417
|
+
}
|
|
2418
|
+
const tool = await aiToolsService.selectTool();
|
|
2419
|
+
if (!tool) {
|
|
2420
|
+
addJsonData({ generated: false, reason: "no_ai_tool" });
|
|
2421
|
+
return;
|
|
2422
|
+
}
|
|
2423
|
+
const success = await aiToolsService.runWithPrompt(tool, DOCKER_SETUP_PROMPT);
|
|
2424
|
+
if (success) {
|
|
2425
|
+
logger.success("Docker-Setup wurde erstellt!");
|
|
2426
|
+
logger.newline();
|
|
2427
|
+
logger.info("N\xE4chste Schritte:");
|
|
2428
|
+
logger.list([
|
|
2429
|
+
"Pr\xFCfen Sie die erstellten Dateien",
|
|
2430
|
+
"Passen Sie Umgebungsvariablen an",
|
|
2431
|
+
"Testen Sie mit: docker compose build && docker compose up"
|
|
2432
|
+
]);
|
|
2433
|
+
addJsonData({ generated: true });
|
|
2434
|
+
} else {
|
|
2435
|
+
logger.warning("Docker-Setup Generierung wurde abgebrochen oder ist fehlgeschlagen.");
|
|
2436
|
+
addJsonData({ generated: false, reason: "ai_failed" });
|
|
2437
|
+
}
|
|
2438
|
+
});
|
|
1957
2439
|
|
|
1958
2440
|
// src/index.ts
|
|
1959
2441
|
var VERSION = getCurrentVersion();
|
|
1960
2442
|
var shouldCheckUpdate = true;
|
|
1961
|
-
var program = new
|
|
2443
|
+
var program = new Command8();
|
|
1962
2444
|
program.name("turbo").description("TurboCLI - Command line interface for TurboOps deployments").version(VERSION, "-v, --version", "Show version number").option("--project <slug>", "Override project slug").option("--token <token>", "Override API token").option("--json", "Output as JSON").option("--quiet", "Only show errors").option("--verbose", "Show debug output").option("--no-update-check", "Skip version check");
|
|
1963
2445
|
program.hook("preAction", (thisCommand) => {
|
|
1964
2446
|
const opts = thisCommand.opts();
|
|
@@ -1997,6 +2479,7 @@ program.addCommand(configCommand);
|
|
|
1997
2479
|
program.addCommand(logsCommand);
|
|
1998
2480
|
program.addCommand(deployCommand);
|
|
1999
2481
|
program.addCommand(pipelineCommand);
|
|
2482
|
+
program.addCommand(dockerCommand);
|
|
2000
2483
|
program.command("self-update").description("Update TurboCLI to the latest version").action(async () => {
|
|
2001
2484
|
logger.info("Checking for updates...");
|
|
2002
2485
|
try {
|