@turboops/cli 1.0.91-dev.1467 → 1.0.91

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +19 -0
  2. package/dist/index.js +459 -686
  3. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -194,7 +194,7 @@ function getPackageName() {
194
194
  }
195
195
 
196
196
  // src/services/config.ts
197
- var BUILD_ENV = true ? "dev" : void 0;
197
+ var BUILD_ENV = true ? "prod" : void 0;
198
198
  var API_URLS = {
199
199
  local: "http://localhost:3000",
200
200
  dev: "https://api.dev.turbo-ops.de",
@@ -739,12 +739,11 @@ var apiClient = {
739
739
  /**
740
740
  * Trigger a deployment (Project Token endpoint for CI/CD)
741
741
  */
742
- async deploy(stageId, imageTag, composeFile) {
743
- const body = {
744
- stage: stageId
745
- };
742
+ async deploy(stageId, imageTag, composeFile, envExamples) {
743
+ const body = { stage: stageId };
746
744
  if (imageTag) body.imageTag = imageTag;
747
745
  if (composeFile) body.composeFile = composeFile;
746
+ if (envExamples) body.envExamples = envExamples;
748
747
  return this.request("POST", "/project/deployment/deploy", body);
749
748
  },
750
749
  /**
@@ -790,7 +789,7 @@ var apiClient = {
790
789
  if (Date.now() - startTime > timeout) {
791
790
  throw new Error(`Deployment timeout after ${timeout / 1e3} seconds`);
792
791
  }
793
- await new Promise((resolve4) => setTimeout(resolve4, pollInterval));
792
+ await new Promise((resolve5) => setTimeout(resolve5, pollInterval));
794
793
  }
795
794
  }
796
795
  };
@@ -913,7 +912,7 @@ var authService = {
913
912
  success: false
914
913
  };
915
914
  }
916
- await new Promise((resolve4) => setTimeout(resolve4, pollInterval));
915
+ await new Promise((resolve5) => setTimeout(resolve5, pollInterval));
917
916
  }
918
917
  return { error: "Zeit\xFCberschreitung bei der Anmeldung", success: false };
919
918
  },
@@ -1327,8 +1326,8 @@ function getStatusColor(status) {
1327
1326
  }
1328
1327
 
1329
1328
  // src/commands/init.ts
1330
- import { readFileSync as readFileSync2, existsSync as existsSync3, statSync } from "fs";
1331
- import { resolve } from "path";
1329
+ import { readFileSync as readFileSync3, existsSync as existsSync4 } from "fs";
1330
+ import { resolve as resolve2 } from "path";
1332
1331
  import chalk5 from "chalk";
1333
1332
  import { Command as Command3 } from "commander";
1334
1333
  import prompts2 from "prompts";
@@ -1429,7 +1428,7 @@ var aiToolsService = {
1429
1428
  text: `${config.name} arbeitet...`,
1430
1429
  color: "cyan"
1431
1430
  }).start();
1432
- return new Promise((resolve4) => {
1431
+ return new Promise((resolve5) => {
1433
1432
  if (verbose) {
1434
1433
  spinner.stop();
1435
1434
  console.log(chalk4.dim("[DEBUG] Spawning child process..."));
@@ -1550,7 +1549,7 @@ var aiToolsService = {
1550
1549
  spinner.fail(`${config.name} fehlgeschlagen (Exit Code: ${code})`);
1551
1550
  }
1552
1551
  logger.newline();
1553
- resolve4(code === 0);
1552
+ resolve5(code === 0);
1554
1553
  });
1555
1554
  child.on("error", (err) => {
1556
1555
  if (verbose) {
@@ -1559,7 +1558,7 @@ var aiToolsService = {
1559
1558
  spinner.fail(
1560
1559
  `Fehler beim Ausf\xFChren von ${config.name}: ${err.message}`
1561
1560
  );
1562
- resolve4(false);
1561
+ resolve5(false);
1563
1562
  });
1564
1563
  });
1565
1564
  }
@@ -1588,6 +1587,262 @@ function formatToolAction(toolName, input) {
1588
1587
  }
1589
1588
  }
1590
1589
 
1590
+ // src/utils/ai-prompts.ts
1591
+ var DOCKER_SETUP_PROMPT = `Analysiere dieses Projekt und erstelle ein Docker-Setup f\xFCr TurboOps Production Deployment.
1592
+
1593
+ === TURBOOPS ANFORDERUNGEN ===
1594
+ - docker-compose.yml MUSS auf Root-Ebene liegen
1595
+ - ALLE Abh\xE4ngigkeiten m\xFCssen enthalten sein (DB, Redis, etc.) - production-ready!
1596
+ - Images m\xFCssen immutable sein (keine Volume-Mounts f\xFCr Code)
1597
+ - Datenbank-Volumes f\xFCr Persistenz sind erlaubt (named volumes)
1598
+ - Environment-Variablen via \${VARIABLE} Syntax (werden zur Laufzeit injiziert)
1599
+
1600
+ === ZU ERSTELLENDE DATEIEN ===
1601
+
1602
+ 1. docker-compose.yml (Root-Ebene) - Datenbanken + Application Services
1603
+ WICHTIG: Keine Environment-Variablen f\xFCr Application Services!
1604
+ - Environment-Variablen f\xFCr api/app werden von TurboOps zur Deployment-Zeit injiziert
1605
+ - Datenbanken bekommen ihre Konfiguration (Credentials kommen von TurboOps)
1606
+ \`\`\`yaml
1607
+ services:
1608
+ # === DATENBANKEN ===
1609
+ mongo:
1610
+ image: mongo:7
1611
+ volumes:
1612
+ - mongo_data:/data/db
1613
+ healthcheck:
1614
+ test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
1615
+ interval: 30s
1616
+ timeout: 10s
1617
+ retries: 3
1618
+ restart: unless-stopped
1619
+
1620
+ redis:
1621
+ image: redis:7-alpine
1622
+ volumes:
1623
+ - redis_data:/data
1624
+ healthcheck:
1625
+ test: ["CMD", "redis-cli", "ping"]
1626
+ interval: 30s
1627
+ timeout: 10s
1628
+ retries: 3
1629
+ restart: unless-stopped
1630
+
1631
+ # === APPLICATION SERVICES (keine environment - kommt von TurboOps!) ===
1632
+ # WICHTIG: image: muss gesetzt sein f\xFCr docker compose push!
1633
+ api:
1634
+ build:
1635
+ context: .
1636
+ dockerfile: ./projects/api/Dockerfile # oder ./api/Dockerfile
1637
+ image: \${IMAGE_NAME}/api:\${IMAGE_TAG:-latest}
1638
+ expose:
1639
+ - "3000"
1640
+ depends_on:
1641
+ mongo:
1642
+ condition: service_healthy
1643
+ redis:
1644
+ condition: service_healthy
1645
+ healthcheck:
1646
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/health"]
1647
+ interval: 30s
1648
+ timeout: 10s
1649
+ retries: 3
1650
+ start_period: 40s
1651
+ restart: unless-stopped
1652
+
1653
+ app:
1654
+ build:
1655
+ context: .
1656
+ dockerfile: ./projects/app/Dockerfile # oder ./app/Dockerfile
1657
+ image: \${IMAGE_NAME}/app:\${IMAGE_TAG:-latest}
1658
+ expose:
1659
+ - "3000"
1660
+ depends_on:
1661
+ api:
1662
+ condition: service_healthy
1663
+ healthcheck:
1664
+ test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000"]
1665
+ interval: 30s
1666
+ timeout: 10s
1667
+ retries: 3
1668
+ restart: unless-stopped
1669
+
1670
+ volumes:
1671
+ mongo_data:
1672
+ redis_data:
1673
+ \`\`\`
1674
+
1675
+ 2. Dockerfile f\xFCr jeden Service (im jeweiligen Ordner)
1676
+ KRITISCH: IMMER --ignore-scripts bei npm ci verwenden!
1677
+ \`\`\`dockerfile
1678
+ # Stage 1: Dependencies
1679
+ FROM node:22-alpine AS deps
1680
+ WORKDIR /app
1681
+ COPY package*.json ./
1682
+ # KRITISCH: --ignore-scripts verhindert husky/prepare Fehler in Docker!
1683
+ RUN npm ci --ignore-scripts
1684
+
1685
+ # Stage 2: Builder
1686
+ FROM node:22-alpine AS builder
1687
+ WORKDIR /app
1688
+ COPY --from=deps /app/node_modules ./node_modules
1689
+ COPY . .
1690
+ RUN npm run build
1691
+
1692
+ # Stage 3: Runner (Production)
1693
+ FROM node:22-alpine AS runner
1694
+ WORKDIR /app
1695
+ ENV NODE_ENV=production
1696
+ # Non-root user f\xFCr Sicherheit
1697
+ RUN addgroup -g 1001 -S nodejs && adduser -S nodejs -u 1001
1698
+ COPY --from=builder --chown=nodejs:nodejs /app/dist ./dist
1699
+ COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules
1700
+ COPY --from=builder --chown=nodejs:nodejs /app/package.json ./
1701
+
1702
+ # Writable directories for runtime files (TUS uploads, temp processing)
1703
+ RUN mkdir -p /app/uploads/tus /app/temp && chown -R nodejs:nodejs /app/uploads /app/temp
1704
+
1705
+ USER nodejs
1706
+ EXPOSE 3000
1707
+ CMD ["node", "dist/main.js"]
1708
+ \`\`\`
1709
+
1710
+ 3. .dockerignore (Root-Ebene)
1711
+ \`\`\`
1712
+ node_modules
1713
+ .git
1714
+ .env*
1715
+ *.log
1716
+ dist
1717
+ .nuxt
1718
+ .output
1719
+ coverage
1720
+ \`\`\`
1721
+
1722
+ === WICHTIGE REGELN F\xDCR TURBOOPS ===
1723
+ - KRITISCH: IMMER \`npm ci --ignore-scripts\` verwenden (verhindert husky/prepare Fehler!)
1724
+ - KRITISCH: KEINE \`ports:\` \u2192 Verwende \`expose:\` (nur im Docker-Netzwerk sichtbar)
1725
+ - KRITISCH: KEINE \`environment:\` f\xFCr Application Services (api, app) \u2192 Env-Variablen werden von TurboOps zur Deployment-Zeit injiziert
1726
+ - Datenbank-Services (mongo, redis, etc.) OHNE environment - Credentials kommen von TurboOps
1727
+ - Ein Reverse-Proxy (Traefik) routet externe Anfragen
1728
+ - Pr\xFCfe die tats\xE4chliche Projektstruktur (projects/, packages/, apps/, oder flach)
1729
+ - Passe Dockerfile-Pfade entsprechend an
1730
+ - NestJS: CMD ["node", "dist/main.js"]
1731
+ - Nuxt: CMD ["node", ".output/server/index.mjs"]
1732
+ - Nutze wget statt curl f\xFCr healthchecks (curl nicht in alpine)
1733
+ - Named volumes f\xFCr Datenbank-Persistenz (mongo_data, redis_data, etc.)
1734
+ - Keine Code-Volume-Mounts (Code ist im Image)
1735
+ - depends_on mit condition: service_healthy f\xFCr korrekten Start-Order
1736
+ - Erkenne ben\xF6tigte Services aus package.json (mongoose \u2192 mongo, ioredis \u2192 redis, etc.)
1737
+
1738
+ Erstelle alle notwendigen Dateien basierend auf der erkannten Projektstruktur und Abh\xE4ngigkeiten.`;
1739
+ function buildPipelineIntegrationPrompt(pipelineType, projectSlug, pipelinePath, templateContent) {
1740
+ const pipelineTypeName = pipelineType === "gitlab" ? "GitLab CI" : "GitHub Actions";
1741
+ if (templateContent) {
1742
+ return `WICHTIG: Integriere das TurboOps Deployment-Template EXAKT wie vorgegeben in die bestehende ${pipelineTypeName} Pipeline.
1743
+
1744
+ Bestehende Pipeline-Datei: ${pipelinePath}
1745
+
1746
+ === TURBOOPS TEMPLATE (NICHT VER\xC4NDERN!) ===
1747
+ \`\`\`yaml
1748
+ ${templateContent}
1749
+ \`\`\`
1750
+ === ENDE TEMPLATE ===
1751
+
1752
+ STRIKTE REGELN:
1753
+ 1. Das TurboOps Template MUSS EXAKT so \xFCbernommen werden wie oben angegeben
1754
+ 2. KEINE \xC4nderungen an:
1755
+ - Variables (IMAGE_NAME, DOCKER_TLS_CERTDIR)
1756
+ - Job-Namen (build, deploy-dev, deploy-test, deploy-prod)
1757
+ - Script-Befehlen (turbo deploy, docker build, etc.)
1758
+ - Branch-Rules (die sind korrekt f\xFCr die konfigurierten Stages)
1759
+ - needs/dependencies zwischen Jobs
1760
+ 3. NUR erlaubte \xC4nderungen:
1761
+ - Stages der bestehenden Pipeline VOR den TurboOps-Stages einf\xFCgen
1762
+ - Bestehende Jobs der Pipeline BEHALTEN (z.B. lint, test)
1763
+ - TurboOps "build" Job muss von bestehenden Build-Jobs abh\xE4ngen (needs)
1764
+
1765
+ MERGE-STRATEGIE:
1766
+ - Bestehende stages: [lint, test, build] + TurboOps stages: [build, deploy-dev, deploy-test, deploy-prod]
1767
+ - Ergebnis: stages: [lint, test, build, turboops-build, deploy-dev, deploy-test, deploy-prod]
1768
+ - Der TurboOps "build" Job sollte in "turboops-build" umbenannt werden falls es bereits einen "build" Job gibt
1769
+ - turboops-build needs: [build] (vom bestehenden build Job)
1770
+
1771
+ Modifiziere die Datei "${pipelinePath}" entsprechend.`;
1772
+ }
1773
+ return `Erstelle eine neue ${pipelineTypeName} Pipeline f\xFCr TurboOps Deployment.
1774
+
1775
+ Projekt-Slug: ${projectSlug}
1776
+ Pipeline-Datei: ${pipelinePath}
1777
+
1778
+ WICHTIG - Branch-zu-Stage-Zuordnung (STRIKT EINHALTEN!):
1779
+ - dev Stage \u2192 deployed NUR wenn Branch == "dev"
1780
+ - test Stage \u2192 deployed NUR wenn Branch == "test"
1781
+ - prod Stage \u2192 deployed NUR wenn Branch == "main"
1782
+
1783
+ PIPELINE-STRUKTUR:
1784
+
1785
+ 1. BUILD-JOB (l\xE4uft NUR auf dem default branch z.B. dev):
1786
+ stages: [build, deploy]
1787
+
1788
+ build:
1789
+ image: docker:24-dind
1790
+ stage: build
1791
+ services: [docker:24-dind]
1792
+ before_script:
1793
+ - docker login -u ${projectSlug} -p \${TURBOOPS_TOKEN} ${configService.getRegistryUrl()}
1794
+ script:
1795
+ - docker compose -f docker-compose.yml build
1796
+ - docker compose -f docker-compose.yml push
1797
+ only:
1798
+ - dev # L\xE4uft nur auf dem default branch
1799
+
1800
+ 2. DEPLOY-JOBS (jeder NUR auf seinem Branch!):
1801
+ WICHTIG: KEIN --image Flag n\xF6tig! Die CLI erkennt CI_COMMIT_SHA automatisch!
1802
+
1803
+ deploy-dev:
1804
+ image: node:22-alpine
1805
+ stage: deploy
1806
+ before_script:
1807
+ - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
1808
+ - turbo config set token \${TURBOOPS_TOKEN}
1809
+ script:
1810
+ - turbo deploy dev --wait # CLI erkennt Image automatisch via CI_COMMIT_SHA
1811
+ only:
1812
+ - dev # NUR auf dev Branch!
1813
+
1814
+ deploy-test:
1815
+ image: node:22-alpine
1816
+ stage: deploy
1817
+ before_script:
1818
+ - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
1819
+ - turbo config set token \${TURBOOPS_TOKEN}
1820
+ script:
1821
+ - turbo deploy test --wait
1822
+ only:
1823
+ - test # NUR auf test Branch!
1824
+
1825
+ deploy-prod:
1826
+ image: node:22-alpine
1827
+ stage: deploy
1828
+ before_script:
1829
+ - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
1830
+ - turbo config set token \${TURBOOPS_TOKEN}
1831
+ script:
1832
+ - turbo deploy prod --wait
1833
+ only:
1834
+ - main # NUR auf main Branch!
1835
+
1836
+ WICHTIG:
1837
+ - Der Build l\xE4uft auf ALLEN Branches (dev, test, main)
1838
+ - Jeder Deploy-Job l\xE4uft NUR auf seinem zugeordneten Branch
1839
+ - KEIN --image Flag n\xF6tig - CLI erkennt CI_COMMIT_SHA/GITHUB_SHA automatisch
1840
+ - Registry-URL: ${configService.getRegistryUrl()}/${projectSlug}
1841
+ - Secrets ben\xF6tigt: TURBOOPS_TOKEN
1842
+
1843
+ Erstelle die Datei "${pipelinePath}".`;
1844
+ }
1845
+
1591
1846
  // src/utils/detect-project.ts
1592
1847
  import * as fs2 from "fs/promises";
1593
1848
  import * as path2 from "path";
@@ -1644,10 +1899,71 @@ async function detectProjectConfig() {
1644
1899
  return result;
1645
1900
  }
1646
1901
 
1647
- // src/commands/init.ts
1648
- var initCommand = new Command3("init").description("Initialize TurboOps project in current directory").action(async () => {
1649
- const verbose = process.env.DEBUG === "true";
1650
- if (verbose) {
1902
+ // src/utils/env-examples.ts
1903
+ import { existsSync as existsSync3, readFileSync as readFileSync2, statSync } from "fs";
1904
+ import { resolve } from "path";
1905
+ var DIR_TO_SERVICE = {
1906
+ ".": "_root",
1907
+ "projects/api": "api",
1908
+ "projects/app": "app",
1909
+ "packages/api": "api",
1910
+ "packages/app": "app",
1911
+ api: "api",
1912
+ app: "app",
1913
+ backend: "api",
1914
+ frontend: "app",
1915
+ server: "api",
1916
+ client: "app"
1917
+ };
1918
+ function collectEnvExamples(options) {
1919
+ const cwd = process.cwd();
1920
+ const envFileNames = [".env.example", ".env.sample"];
1921
+ const silent = options?.silent ?? false;
1922
+ const result = { services: {} };
1923
+ let found = false;
1924
+ for (const [dir, serviceName] of Object.entries(DIR_TO_SERVICE)) {
1925
+ for (const fileName of envFileNames) {
1926
+ const filePath = resolve(cwd, dir, fileName);
1927
+ if (!existsSync3(filePath)) {
1928
+ continue;
1929
+ }
1930
+ const relativePath = dir === "." ? fileName : `${dir}/${fileName}`;
1931
+ const fileSize = statSync(filePath).size;
1932
+ if (fileSize > 64 * 1024) {
1933
+ if (!silent) {
1934
+ logger.warning(
1935
+ `${relativePath} ist zu gro\xDF (> 64 KB) \u2014 \xFCbersprungen.`
1936
+ );
1937
+ }
1938
+ break;
1939
+ }
1940
+ const content = readFileSync2(filePath, "utf-8");
1941
+ found = true;
1942
+ if (serviceName === "_root") {
1943
+ result.root = content;
1944
+ if (!silent) {
1945
+ logger.info(
1946
+ `${relativePath} gefunden \u2014 Stage-Umgebungsvariablen werden vorbereitet.`
1947
+ );
1948
+ }
1949
+ } else if (!result.services[serviceName]) {
1950
+ result.services[serviceName] = content;
1951
+ if (!silent) {
1952
+ logger.info(
1953
+ `${relativePath} gefunden \u2014 Umgebungsvariablen f\xFCr Service "${serviceName}" werden vorbereitet.`
1954
+ );
1955
+ }
1956
+ }
1957
+ break;
1958
+ }
1959
+ }
1960
+ return found ? result : void 0;
1961
+ }
1962
+
1963
+ // src/commands/init.ts
1964
+ var initCommand = new Command3("init").description("Initialize TurboOps project in current directory").action(async () => {
1965
+ const verbose = process.env.DEBUG === "true";
1966
+ if (verbose) {
1651
1967
  console.log("[DEBUG] Verbose mode enabled");
1652
1968
  }
1653
1969
  logger.header("TurboOps Project Initialization");
@@ -1751,14 +2067,10 @@ async function setupProject(project, verbose = false) {
1751
2067
  const { data: environments } = await apiClient.getEnvironments(project.id);
1752
2068
  if (!environments || environments.length === 0) {
1753
2069
  logger.newline();
1754
- const { shouldCreateStage } = await prompts2({
1755
- initial: true,
1756
- message: "Das Projekt hat noch keine Stages. Standard-Stages erstellen (dev, test, prod)?",
1757
- name: "shouldCreateStage",
1758
- type: "confirm"
1759
- });
1760
- if (shouldCreateStage) {
1761
- await createDefaultStages(project.id, project.slug);
2070
+ logger.info("Das Projekt hat noch keine Stages.");
2071
+ const selectedStages = await selectStages();
2072
+ if (selectedStages && selectedStages.length > 0) {
2073
+ await createDefaultStages(project.id, project.slug, selectedStages);
1762
2074
  }
1763
2075
  }
1764
2076
  await offerAiAssistance(project.slug, project.id, verbose);
@@ -1859,45 +2171,62 @@ async function createNewProject(slug, verbose = false) {
1859
2171
  }
1860
2172
  configService.setProject(newProject.slug);
1861
2173
  logger.newline();
1862
- const { shouldCreateStage } = await prompts2({
1863
- initial: true,
1864
- message: "Standard-Stages erstellen (dev, test, prod)?",
1865
- name: "shouldCreateStage",
1866
- type: "confirm"
1867
- });
1868
- if (shouldCreateStage) {
1869
- await createDefaultStages(newProject.id, newProject.slug);
2174
+ const selectedStages = await selectStages();
2175
+ if (selectedStages && selectedStages.length > 0) {
2176
+ await createDefaultStages(newProject.id, newProject.slug, selectedStages);
1870
2177
  }
1871
2178
  await offerAiAssistance(newProject.slug, newProject.id, verbose);
1872
2179
  await detectAndUploadCompose(newProject.slug);
1873
2180
  await showFinalSummary(newProject);
1874
2181
  }
1875
- var DEFAULT_STAGES = [
2182
+ var AVAILABLE_STAGES = [
1876
2183
  {
1877
2184
  name: "Development",
1878
2185
  slug: "dev",
1879
2186
  type: "development",
1880
2187
  branch: "dev",
1881
- stageOrder: 0
2188
+ selected: true
1882
2189
  },
1883
2190
  {
1884
2191
  name: "Test",
1885
2192
  slug: "test",
1886
2193
  type: "staging",
1887
2194
  branch: "test",
1888
- stageOrder: 1
2195
+ selected: true
2196
+ },
2197
+ {
2198
+ name: "Preview",
2199
+ slug: "preview",
2200
+ type: "preview",
2201
+ branch: "",
2202
+ selected: false
1889
2203
  },
1890
2204
  {
1891
2205
  name: "Production",
1892
2206
  slug: "prod",
1893
2207
  type: "production",
1894
2208
  branch: "main",
1895
- stageOrder: 2
2209
+ selected: true
1896
2210
  }
1897
2211
  ];
1898
- async function createDefaultStages(projectId, projectSlug) {
2212
+ async function selectStages() {
2213
+ const { selectedSlugs } = await prompts2({
2214
+ type: "multiselect",
2215
+ name: "selectedSlugs",
2216
+ message: "Welche Stages sollen erstellt werden?",
2217
+ choices: AVAILABLE_STAGES.map((stage) => ({
2218
+ title: `${stage.name} (${stage.slug})`,
2219
+ value: stage.slug,
2220
+ selected: stage.selected
2221
+ })),
2222
+ hint: "- Leertaste zum Ausw\xE4hlen, Enter zum Best\xE4tigen"
2223
+ });
2224
+ if (!selectedSlugs) return null;
2225
+ return AVAILABLE_STAGES.filter((s) => selectedSlugs.includes(s.slug));
2226
+ }
2227
+ async function createDefaultStages(projectId, _projectSlug, stages) {
1899
2228
  logger.newline();
1900
- logger.header("Standard-Stages erstellen");
2229
+ logger.header("Stages erstellen");
1901
2230
  const { baseDomain } = await prompts2({
1902
2231
  type: "text",
1903
2232
  name: "baseDomain",
@@ -1909,8 +2238,11 @@ async function createDefaultStages(projectId, projectSlug) {
1909
2238
  return;
1910
2239
  }
1911
2240
  logger.newline();
1912
- logger.info("Erstelle 3 Standard-Stages:");
1913
- for (const stage of DEFAULT_STAGES) {
2241
+ logger.info(
2242
+ `Erstelle ${stages.length} Stage${stages.length !== 1 ? "s" : ""}:`
2243
+ );
2244
+ for (let i = 0; i < stages.length; i++) {
2245
+ const stage = stages[i];
1914
2246
  const domain = stage.type === "production" ? baseDomain : `${stage.slug}.${baseDomain}`;
1915
2247
  const { data: newStage, error } = await withSpinner(
1916
2248
  `Erstelle ${stage.name}...`,
@@ -1919,9 +2251,9 @@ async function createDefaultStages(projectId, projectSlug) {
1919
2251
  name: stage.name,
1920
2252
  slug: stage.slug,
1921
2253
  type: stage.type,
1922
- stageOrder: stage.stageOrder,
2254
+ stageOrder: i,
1923
2255
  domain,
1924
- branch: stage.branch
2256
+ branch: stage.branch || void 0
1925
2257
  })
1926
2258
  );
1927
2259
  if (error || !newStage) {
@@ -2031,7 +2363,11 @@ async function showFinalSummary(project) {
2031
2363
  logger.newline();
2032
2364
  logger.header("Verf\xFCgbare Stages");
2033
2365
  for (const env of environments) {
2034
- console.log(` ${chalk5.bold(env.slug)} - ${env.name} (${env.type})`);
2366
+ const services = env.services?.filter((s) => s.enabled !== false) || [];
2367
+ const serviceInfo = services.length > 0 ? ` \u2192 Services: ${services.map((s) => s.name).join(", ")}` : "";
2368
+ console.log(
2369
+ ` ${chalk5.bold(env.slug)} - ${env.name} (${env.type})${chalk5.dim(serviceInfo)}`
2370
+ );
2035
2371
  }
2036
2372
  }
2037
2373
  if (project.projectToken) {
@@ -2191,152 +2527,11 @@ async function offerAiAssistance(projectSlug, projectId, verbose = false) {
2191
2527
  async function createDockerSetupWithAI(projectId, verbose = false) {
2192
2528
  const tool = await aiToolsService.selectTool();
2193
2529
  if (!tool) return;
2194
- const prompt = `Analysiere dieses Projekt und erstelle ein Docker-Setup f\xFCr TurboOps Production Deployment.
2195
-
2196
- === TURBOOPS ANFORDERUNGEN ===
2197
- - docker-compose.yml MUSS auf Root-Ebene liegen
2198
- - ALLE Abh\xE4ngigkeiten m\xFCssen enthalten sein (DB, Redis, etc.) - production-ready!
2199
- - Images m\xFCssen immutable sein (keine Volume-Mounts f\xFCr Code)
2200
- - Datenbank-Volumes f\xFCr Persistenz sind erlaubt (named volumes)
2201
- - Environment-Variablen via \${VARIABLE} Syntax (werden zur Laufzeit injiziert)
2202
-
2203
- === ZU ERSTELLENDE DATEIEN ===
2204
-
2205
- 1. docker-compose.yml (Root-Ebene) - Datenbanken + Application Services
2206
- WICHTIG: Keine Environment-Variablen f\xFCr Application Services!
2207
- - Environment-Variablen f\xFCr api/app werden von TurboOps zur Deployment-Zeit injiziert
2208
- - Datenbanken bekommen ihre Konfiguration (Credentials kommen von TurboOps)
2209
- \`\`\`yaml
2210
- services:
2211
- # === DATENBANKEN ===
2212
- mongo:
2213
- image: mongo:7
2214
- volumes:
2215
- - mongo_data:/data/db
2216
- healthcheck:
2217
- test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
2218
- interval: 30s
2219
- timeout: 10s
2220
- retries: 3
2221
- restart: unless-stopped
2222
-
2223
- redis:
2224
- image: redis:7-alpine
2225
- volumes:
2226
- - redis_data:/data
2227
- healthcheck:
2228
- test: ["CMD", "redis-cli", "ping"]
2229
- interval: 30s
2230
- timeout: 10s
2231
- retries: 3
2232
- restart: unless-stopped
2233
-
2234
- # === APPLICATION SERVICES (keine environment - kommt von TurboOps!) ===
2235
- # WICHTIG: image: muss gesetzt sein f\xFCr docker compose push!
2236
- api:
2237
- build:
2238
- context: .
2239
- dockerfile: ./projects/api/Dockerfile # oder ./api/Dockerfile
2240
- image: \${IMAGE_NAME}/api:\${IMAGE_TAG:-latest}
2241
- expose:
2242
- - "3000"
2243
- depends_on:
2244
- - mongo
2245
- - redis
2246
- healthcheck:
2247
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/health"]
2248
- interval: 30s
2249
- timeout: 10s
2250
- retries: 3
2251
- start_period: 40s
2252
- restart: unless-stopped
2253
-
2254
- app:
2255
- build:
2256
- context: .
2257
- dockerfile: ./projects/app/Dockerfile # oder ./app/Dockerfile
2258
- image: \${IMAGE_NAME}/app:\${IMAGE_TAG:-latest}
2259
- expose:
2260
- - "3000"
2261
- depends_on:
2262
- - api
2263
- healthcheck:
2264
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000"]
2265
- interval: 30s
2266
- timeout: 10s
2267
- retries: 3
2268
- restart: unless-stopped
2269
-
2270
- volumes:
2271
- mongo_data:
2272
- redis_data:
2273
- \`\`\`
2274
-
2275
- 2. Dockerfile f\xFCr jeden Service (im jeweiligen Ordner)
2276
- KRITISCH: IMMER --ignore-scripts bei npm ci verwenden!
2277
- \`\`\`dockerfile
2278
- # Stage 1: Dependencies
2279
- FROM node:22-alpine AS deps
2280
- WORKDIR /app
2281
- COPY package*.json ./
2282
- # KRITISCH: --ignore-scripts verhindert husky/prepare Fehler in Docker!
2283
- RUN npm ci --ignore-scripts
2284
-
2285
- # Stage 2: Builder
2286
- FROM node:22-alpine AS builder
2287
- WORKDIR /app
2288
- COPY --from=deps /app/node_modules ./node_modules
2289
- COPY . .
2290
- RUN npm run build
2291
-
2292
- # Stage 3: Runner (Production)
2293
- FROM node:22-alpine AS runner
2294
- WORKDIR /app
2295
- ENV NODE_ENV=production
2296
- # Non-root user f\xFCr Sicherheit
2297
- RUN addgroup -g 1001 -S nodejs && adduser -S nodejs -u 1001
2298
- COPY --from=builder --chown=nodejs:nodejs /app/dist ./dist
2299
- COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules
2300
- COPY --from=builder --chown=nodejs:nodejs /app/package.json ./
2301
-
2302
- # Writable directories for runtime files (TUS uploads, temp processing)
2303
- RUN mkdir -p /app/uploads/tus /app/temp && chown -R nodejs:nodejs /app/uploads /app/temp
2304
-
2305
- USER nodejs
2306
- EXPOSE 3000
2307
- CMD ["node", "dist/main.js"]
2308
- \`\`\`
2309
-
2310
- 3. .dockerignore (Root-Ebene)
2311
- \`\`\`
2312
- node_modules
2313
- .git
2314
- .env*
2315
- *.log
2316
- dist
2317
- .nuxt
2318
- .output
2319
- coverage
2320
- \`\`\`
2321
-
2322
- === WICHTIGE REGELN F\xDCR TURBOOPS ===
2323
- - KRITISCH: IMMER \`npm ci --ignore-scripts\` verwenden (verhindert husky/prepare Fehler!)
2324
- - KRITISCH: KEINE \`ports:\` \u2192 Verwende \`expose:\` (nur im Docker-Netzwerk sichtbar)
2325
- - KRITISCH: KEINE \`environment:\` f\xFCr Application Services (api, app) \u2192 Env-Variablen werden von TurboOps zur Deployment-Zeit injiziert
2326
- - Datenbank-Services (mongo, redis, etc.) OHNE environment - Credentials kommen von TurboOps
2327
- - Ein Reverse-Proxy (Traefik) routet externe Anfragen
2328
- - Pr\xFCfe die tats\xE4chliche Projektstruktur (projects/, packages/, apps/, oder flach)
2329
- - Passe Dockerfile-Pfade entsprechend an
2330
- - NestJS: CMD ["node", "dist/main.js"]
2331
- - Nuxt: CMD ["node", ".output/server/index.mjs"]
2332
- - Nutze wget statt curl f\xFCr healthchecks (curl nicht in alpine)
2333
- - Named volumes f\xFCr Datenbank-Persistenz (mongo_data, redis_data, etc.)
2334
- - Keine Code-Volume-Mounts (Code ist im Image)
2335
- - depends_on mit condition: service_healthy f\xFCr korrekten Start-Order
2336
- - Erkenne ben\xF6tigte Services aus package.json (mongoose \u2192 mongo, ioredis \u2192 redis, etc.)
2337
-
2338
- Erstelle alle notwendigen Dateien basierend auf der erkannten Projektstruktur und Abh\xE4ngigkeiten.`;
2339
- const success = await aiToolsService.runWithPrompt(tool, prompt, verbose);
2530
+ const success = await aiToolsService.runWithPrompt(
2531
+ tool,
2532
+ DOCKER_SETUP_PROMPT,
2533
+ verbose
2534
+ );
2340
2535
  if (success) {
2341
2536
  logger.success("Docker-Setup wurde erstellt!");
2342
2537
  await apiClient.updateProjectConfig(projectId, {
@@ -2353,12 +2548,12 @@ async function detectAndUploadCompose(projectSlug) {
2353
2548
  if (!composePath) {
2354
2549
  return;
2355
2550
  }
2356
- const fullPath = resolve(process.cwd(), composePath);
2357
- if (!existsSync3(fullPath)) {
2551
+ const fullPath = resolve2(process.cwd(), composePath);
2552
+ if (!existsSync4(fullPath)) {
2358
2553
  return;
2359
2554
  }
2360
2555
  try {
2361
- const content = readFileSync2(fullPath, "utf-8");
2556
+ const content = readFileSync3(fullPath, "utf-8");
2362
2557
  const envExamples = collectEnvExamples();
2363
2558
  const { error } = await apiClient.uploadCompose(
2364
2559
  project.id,
@@ -2384,8 +2579,8 @@ async function integratePipelineWithAI(detection, projectSlug, verbose = false,
2384
2579
  const tool = await aiToolsService.selectTool();
2385
2580
  if (!tool) return;
2386
2581
  const pipelineType = detection.hasGitLabPipeline ? "gitlab" : "github";
2387
- const pipelineFile = detection.pipelinePath;
2388
- let templateContent = "";
2582
+ const pipelineFile = detection.pipelinePath || (pipelineType === "github" ? ".github/workflows/deploy.yml" : ".gitlab-ci.yml");
2583
+ let templateContent;
2389
2584
  if (projectId) {
2390
2585
  const { data: pipelineTemplate, error } = await withSpinner(
2391
2586
  "Lade TurboOps Pipeline-Template...",
@@ -2400,176 +2595,33 @@ async function integratePipelineWithAI(detection, projectSlug, verbose = false,
2400
2595
  templateContent = pipelineTemplate.content;
2401
2596
  }
2402
2597
  }
2403
- const pipelineTypeName = pipelineType === "gitlab" ? "GitLab CI" : "GitHub Actions";
2404
- const prompt = templateContent ? `WICHTIG: Integriere das TurboOps Deployment-Template EXAKT wie vorgegeben in die bestehende ${pipelineTypeName} Pipeline.
2405
-
2406
- Bestehende Pipeline-Datei: ${pipelineFile}
2407
-
2408
- === TURBOOPS TEMPLATE (NICHT VER\xC4NDERN!) ===
2409
- \`\`\`yaml
2410
- ${templateContent}
2411
- \`\`\`
2412
- === ENDE TEMPLATE ===
2413
-
2414
- STRIKTE REGELN:
2415
- 1. Das TurboOps Template MUSS EXAKT so \xFCbernommen werden wie oben angegeben
2416
- 2. KEINE \xC4nderungen an:
2417
- - Variables (IMAGE_NAME, DOCKER_TLS_CERTDIR)
2418
- - Job-Namen (build, deploy-dev, deploy-test, deploy-prod)
2419
- - Script-Befehlen (turbo deploy, docker build, etc.)
2420
- - Branch-Rules (die sind korrekt f\xFCr die konfigurierten Stages)
2421
- - needs/dependencies zwischen Jobs
2422
- 3. NUR erlaubte \xC4nderungen:
2423
- - Stages der bestehenden Pipeline VOR den TurboOps-Stages einf\xFCgen
2424
- - Bestehende Jobs der Pipeline BEHALTEN (z.B. lint, test)
2425
- - TurboOps "build" Job muss von bestehenden Build-Jobs abh\xE4ngen (needs)
2426
-
2427
- MERGE-STRATEGIE:
2428
- - Bestehende stages: [lint, test, build] + TurboOps stages: [build, deploy-dev, deploy-test, deploy-prod]
2429
- - Ergebnis: stages: [lint, test, build, turboops-build, deploy-dev, deploy-test, deploy-prod]
2430
- - Der TurboOps "build" Job sollte in "turboops-build" umbenannt werden falls es bereits einen "build" Job gibt
2431
- - turboops-build needs: [build] (vom bestehenden build Job)
2432
-
2433
- Modifiziere die Datei "${pipelineFile}" entsprechend.` : `Erstelle eine neue ${pipelineTypeName} Pipeline f\xFCr TurboOps Deployment.
2434
-
2435
- Projekt-Slug: ${projectSlug}
2436
- Pipeline-Datei: ${pipelineFile}
2437
-
2438
- WICHTIG - Branch-zu-Stage-Zuordnung (STRIKT EINHALTEN!):
2439
- - dev Stage \u2192 deployed NUR wenn Branch == "dev"
2440
- - test Stage \u2192 deployed NUR wenn Branch == "test"
2441
- - prod Stage \u2192 deployed NUR wenn Branch == "main"
2442
-
2443
- PIPELINE-STRUKTUR:
2444
-
2445
- 1. BUILD-JOB (l\xE4uft NUR auf dem default branch z.B. dev):
2446
- stages: [build, deploy]
2447
-
2448
- build:
2449
- image: docker:24-dind
2450
- stage: build
2451
- services: [docker:24-dind]
2452
- before_script:
2453
- - docker login -u ${projectSlug} -p \${TURBOOPS_TOKEN} ${configService.getRegistryUrl()}
2454
- script:
2455
- - docker compose -f docker-compose.yml build
2456
- - docker compose -f docker-compose.yml push
2457
- only:
2458
- - dev # L\xE4uft nur auf dem default branch
2459
-
2460
- 2. DEPLOY-JOBS (jeder NUR auf seinem Branch!):
2461
- WICHTIG: KEIN --image Flag n\xF6tig! Die CLI erkennt CI_COMMIT_SHA automatisch!
2462
-
2463
- deploy-dev:
2464
- image: node:22-alpine
2465
- stage: deploy
2466
- before_script:
2467
- - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
2468
- - turbo config set token \${TURBOOPS_TOKEN}
2469
- script:
2470
- - turbo deploy dev --wait # CLI erkennt Image automatisch via CI_COMMIT_SHA
2471
- only:
2472
- - dev # NUR auf dev Branch!
2473
-
2474
- deploy-test:
2475
- image: node:22-alpine
2476
- stage: deploy
2477
- before_script:
2478
- - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
2479
- - turbo config set token \${TURBOOPS_TOKEN}
2480
- script:
2481
- - turbo deploy test --wait
2482
- only:
2483
- - test # NUR auf test Branch!
2484
-
2485
- deploy-prod:
2486
- image: node:22-alpine
2487
- stage: deploy
2488
- before_script:
2489
- - npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
2490
- - turbo config set token \${TURBOOPS_TOKEN}
2491
- script:
2492
- - turbo deploy prod --wait
2493
- only:
2494
- - main # NUR auf main Branch!
2495
-
2496
- WICHTIG:
2497
- - Der Build l\xE4uft auf ALLEN Branches (dev, test, main)
2498
- - Jeder Deploy-Job l\xE4uft NUR auf seinem zugeordneten Branch
2499
- - KEIN --image Flag n\xF6tig - CLI erkennt CI_COMMIT_SHA/GITHUB_SHA automatisch
2500
- - Registry-URL: ${configService.getRegistryUrl()}/${projectSlug}
2501
- - Secrets ben\xF6tigt: TURBOOPS_TOKEN
2502
-
2503
- Erstelle die Datei "${pipelineFile}".`;
2504
- const success = await aiToolsService.runWithPrompt(tool, prompt, verbose);
2505
- if (success) {
2506
- logger.success("Pipeline wurde mit AI aktualisiert!");
2507
- if (projectId) {
2508
- await apiClient.updateProjectConfig(projectId, {
2509
- pipelineConfig: {
2510
- hasPipeline: true,
2511
- pipelineType
2512
- }
2513
- });
2514
- }
2515
- logger.newline();
2516
- logger.info(
2517
- "Vergessen Sie nicht, das CI/CD Secret TURBOOPS_TOKEN zu konfigurieren."
2518
- );
2519
- }
2520
- }
2521
- var DIR_TO_SERVICE = {
2522
- ".": "_root",
2523
- "projects/api": "api",
2524
- "projects/app": "app",
2525
- "packages/api": "api",
2526
- "packages/app": "app",
2527
- api: "api",
2528
- app: "app",
2529
- backend: "api",
2530
- frontend: "app",
2531
- server: "api",
2532
- client: "app"
2533
- };
2534
- function collectEnvExamples() {
2535
- const cwd = process.cwd();
2536
- const envFileNames = [".env.example", ".env.sample"];
2537
- const result = { services: {} };
2538
- let found = false;
2539
- for (const [dir, serviceName] of Object.entries(DIR_TO_SERVICE)) {
2540
- for (const fileName of envFileNames) {
2541
- const filePath = resolve(cwd, dir, fileName);
2542
- if (!existsSync3(filePath)) {
2543
- continue;
2544
- }
2545
- const relativePath = dir === "." ? fileName : `${dir}/${fileName}`;
2546
- const fileSize = statSync(filePath).size;
2547
- if (fileSize > 64 * 1024) {
2548
- logger.warning(`${relativePath} ist zu gro\xDF (> 64 KB) \u2014 \xFCbersprungen.`);
2549
- break;
2550
- }
2551
- const content = readFileSync2(filePath, "utf-8");
2552
- found = true;
2553
- if (serviceName === "_root") {
2554
- result.root = content;
2555
- logger.info(
2556
- `${relativePath} gefunden \u2014 Stage-Umgebungsvariablen werden vorbereitet.`
2557
- );
2558
- } else if (!result.services[serviceName]) {
2559
- result.services[serviceName] = content;
2560
- logger.info(
2561
- `${relativePath} gefunden \u2014 Umgebungsvariablen f\xFCr Service "${serviceName}" werden vorbereitet.`
2562
- );
2563
- }
2564
- break;
2565
- }
2566
- }
2567
- return found ? result : void 0;
2568
- }
2598
+ const prompt = buildPipelineIntegrationPrompt(
2599
+ pipelineType,
2600
+ projectSlug,
2601
+ pipelineFile,
2602
+ templateContent
2603
+ );
2604
+ const success = await aiToolsService.runWithPrompt(tool, prompt, verbose);
2605
+ if (success) {
2606
+ logger.success("Pipeline wurde mit AI aktualisiert!");
2607
+ if (projectId) {
2608
+ await apiClient.updateProjectConfig(projectId, {
2609
+ pipelineConfig: {
2610
+ hasPipeline: true,
2611
+ pipelineType
2612
+ }
2613
+ });
2614
+ }
2615
+ logger.newline();
2616
+ logger.info(
2617
+ "Vergessen Sie nicht, das CI/CD Secret TURBOOPS_TOKEN zu konfigurieren."
2618
+ );
2619
+ }
2620
+ }
2569
2621
 
2570
2622
  // src/commands/deploy.ts
2571
- import { readFileSync as readFileSync3, existsSync as existsSync4 } from "fs";
2572
- import { resolve as resolve2 } from "path";
2623
+ import { readFileSync as readFileSync4, existsSync as existsSync5 } from "fs";
2624
+ import { resolve as resolve3 } from "path";
2573
2625
  import { Command as Command4 } from "commander";
2574
2626
  import chalk6 from "chalk";
2575
2627
  var DOCKER_TAG_REGEX = /^[a-zA-Z0-9][a-zA-Z0-9._-]{0,127}$/;
@@ -2637,20 +2689,20 @@ var deployCommand = new Command4("deploy").description("Trigger a deployment (fo
2637
2689
  let composeFileContent;
2638
2690
  if (isFirstStage) {
2639
2691
  if (typeof options.compose === "string") {
2640
- const composePath = resolve2(options.compose);
2641
- if (!existsSync4(composePath)) {
2692
+ const composePath = resolve3(options.compose);
2693
+ if (!existsSync5(composePath)) {
2642
2694
  logger.error(`Compose file not found: ${composePath}`);
2643
2695
  process.exit(11 /* CONFIG_ERROR */);
2644
2696
  }
2645
- composeFileContent = readFileSync3(composePath, "utf-8");
2697
+ composeFileContent = readFileSync4(composePath, "utf-8");
2646
2698
  logger.info(`Using compose file: ${composePath}`);
2647
2699
  } else if (project.detectedConfig?.composePath) {
2648
- const composePath = resolve2(
2700
+ const composePath = resolve3(
2649
2701
  process.cwd(),
2650
2702
  project.detectedConfig.composePath
2651
2703
  );
2652
- if (existsSync4(composePath)) {
2653
- composeFileContent = readFileSync3(composePath, "utf-8");
2704
+ if (existsSync5(composePath)) {
2705
+ composeFileContent = readFileSync4(composePath, "utf-8");
2654
2706
  logger.info(
2655
2707
  `Using project compose file: ${project.detectedConfig.composePath}`
2656
2708
  );
@@ -2679,11 +2731,13 @@ var deployCommand = new Command4("deploy").description("Trigger a deployment (fo
2679
2731
  `Stage "${env.name}" uses promotion \u2014 compose file from previous stage`
2680
2732
  );
2681
2733
  }
2734
+ const envExamples = collectEnvExamples({ silent: true });
2682
2735
  logger.info("Triggering deployment...");
2683
2736
  const { data: deployment, error } = await apiClient.deploy(
2684
2737
  env.id,
2685
2738
  imageTag,
2686
- composeFileContent
2739
+ composeFileContent,
2740
+ envExamples
2687
2741
  );
2688
2742
  if (error) {
2689
2743
  logger.error(`Failed to trigger deployment: ${error}`);
@@ -2732,15 +2786,20 @@ var deployCommand = new Command4("deploy").description("Trigger a deployment (fo
2732
2786
  logger.newline();
2733
2787
  logger.success("Deployment completed successfully!");
2734
2788
  if (finalStatus.healthStatus) {
2735
- logger.info(
2736
- `Health: ${finalStatus.healthStatus.healthy}/${finalStatus.healthStatus.total} containers healthy`
2737
- );
2789
+ const { healthy, total } = finalStatus.healthStatus;
2790
+ const allHealthy = healthy >= total;
2791
+ const healthMsg = `Health: ${healthy}/${total} containers healthy`;
2792
+ if (allHealthy) {
2793
+ console.log(chalk6.green(` \u2713 ${healthMsg}`));
2794
+ } else {
2795
+ console.log(chalk6.yellow(` \u26A0 ${healthMsg}`));
2796
+ }
2738
2797
  }
2739
2798
  } else if (finalStatus.status === "failed") {
2740
2799
  logger.newline();
2741
- logger.error("Deployment failed!");
2800
+ console.log(chalk6.red.bold(" \u2717 Deployment failed!"));
2742
2801
  if (finalStatus.errorMessage) {
2743
- logger.error(`Error: ${finalStatus.errorMessage}`);
2802
+ console.log(chalk6.red(` Error: ${finalStatus.errorMessage}`));
2744
2803
  }
2745
2804
  await printContainerLogs(env.id);
2746
2805
  process.exit(1 /* ERROR */);
@@ -2775,9 +2834,10 @@ async function printContainerLogs(stageId) {
2775
2834
  const time = new Date(log.timestamp).toLocaleTimeString();
2776
2835
  const levelColor = getLogLevelColor(log.level);
2777
2836
  const container = log.container ? chalk6.dim(`[${log.container}]`) : "";
2837
+ const message = isErrorLevel(log.level) ? chalk6.red(log.message) : log.message;
2778
2838
  if (!isJsonMode()) {
2779
2839
  console.log(
2780
- ` ${chalk6.dim(time)} ${levelColor(log.level.padEnd(5))} ${container} ${log.message}`
2840
+ ` ${chalk6.dim(time)} ${levelColor(log.level.padEnd(5))} ${container} ${message}`
2781
2841
  );
2782
2842
  }
2783
2843
  }
@@ -2785,6 +2845,9 @@ async function printContainerLogs(stageId) {
2785
2845
  } catch {
2786
2846
  }
2787
2847
  }
2848
+ function isErrorLevel(level) {
2849
+ return ["error", "fatal", "err", "crit"].includes(level.toLowerCase());
2850
+ }
2788
2851
  function getLogLevelColor(level) {
2789
2852
  switch (level.toLowerCase()) {
2790
2853
  case "error":
@@ -3018,190 +3081,37 @@ async function showSecrets(projectId, pipelineType) {
3018
3081
  async function createDockerSetupWithAI2() {
3019
3082
  const tool = await aiToolsService.selectTool();
3020
3083
  if (!tool) return;
3021
- const prompt = `Analysiere dieses Projekt und erstelle ein vollst\xE4ndiges Docker-Setup f\xFCr Production Deployment.
3022
-
3023
- **Wichtig: TurboOps ben\xF6tigt eine docker-compose.yml auf Root-Ebene!**
3024
-
3025
- KRITISCHE ANFORDERUNGEN:
3026
-
3027
- 1. docker-compose.yml im Projekt-Root:
3028
- - Services f\xFCr alle Komponenten (api, app, etc.)
3029
- - Verwende build-Direktive mit Pfad zu den Dockerfiles
3030
- - KEINE version: Angabe (deprecated)
3031
- - Health-Checks f\xFCr alle Services
3032
-
3033
- 2. Dockerfiles - WICHTIGE REGELN:
3034
- - Multi-stage builds (deps -> builder -> runner)
3035
- - IMMER \`npm ci --ignore-scripts\` verwenden (verhindert husky/prepare Fehler!)
3036
- - Non-root User f\xFCr Production
3037
- - Minimales Base-Image (node:22-alpine)
3038
-
3039
- 3. .dockerignore auf Root-Ebene:
3040
- - node_modules, .git, dist, .env*, *.log
3041
-
3042
- 4. Monorepo-Struktur beachten:
3043
- - Pr\xFCfe projects/, packages/, apps/ Ordner
3044
- - Context muss Root sein f\xFCr Monorepo-Zugriff
3045
-
3046
- DOCKERFILE-TEMPLATE (STRIKT EINHALTEN!):
3047
- \`\`\`dockerfile
3048
- # Stage 1: Dependencies
3049
- FROM node:22-alpine AS deps
3050
- WORKDIR /app
3051
- COPY package*.json ./
3052
- # WICHTIG: --ignore-scripts verhindert husky/prepare Fehler!
3053
- RUN npm ci --ignore-scripts
3054
-
3055
- # Stage 2: Builder
3056
- FROM node:22-alpine AS builder
3057
- WORKDIR /app
3058
- COPY --from=deps /app/node_modules ./node_modules
3059
- COPY . .
3060
- RUN npm run build
3061
-
3062
- # Stage 3: Runner (Production)
3063
- FROM node:22-alpine AS runner
3064
- WORKDIR /app
3065
- ENV NODE_ENV=production
3066
- RUN addgroup -g 1001 -S nodejs && adduser -S nodejs -u 1001
3067
- COPY --from=builder --chown=nodejs:nodejs /app/dist ./dist
3068
- COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules
3069
- COPY --from=builder --chown=nodejs:nodejs /app/package.json ./
3070
-
3071
- # Writable directories for runtime files (TUS uploads, temp processing)
3072
- RUN mkdir -p /app/uploads/tus /app/temp && chown -R nodejs:nodejs /app/uploads /app/temp
3073
-
3074
- USER nodejs
3075
- EXPOSE 3000
3076
- CMD ["node", "dist/main.js"]
3077
- \`\`\`
3078
-
3079
- docker-compose.yml TEMPLATE:
3080
- \`\`\`yaml
3081
- services:
3082
- # === DATENBANKEN (erkenne aus package.json: mongoose \u2192 mongo, ioredis \u2192 redis) ===
3083
- mongo:
3084
- image: mongo:7
3085
- volumes:
3086
- - mongo_data:/data/db
3087
- healthcheck:
3088
- test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
3089
- interval: 30s
3090
- timeout: 10s
3091
- retries: 3
3092
- restart: unless-stopped
3093
-
3094
- redis:
3095
- image: redis:7-alpine
3096
- volumes:
3097
- - redis_data:/data
3098
- healthcheck:
3099
- test: ["CMD", "redis-cli", "ping"]
3100
- interval: 30s
3101
- timeout: 10s
3102
- retries: 3
3103
- restart: unless-stopped
3104
-
3105
- # === APPLICATION SERVICES (KEINE environment!) ===
3106
- # WICHTIG: image: muss gesetzt sein f\xFCr docker compose push!
3107
- api:
3108
- build:
3109
- context: .
3110
- dockerfile: projects/api/Dockerfile
3111
- image: \${IMAGE_NAME}/api:\${IMAGE_TAG:-latest}
3112
- expose:
3113
- - "3000"
3114
- depends_on:
3115
- mongo:
3116
- condition: service_healthy
3117
- redis:
3118
- condition: service_healthy
3119
- healthcheck:
3120
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/health"]
3121
- interval: 30s
3122
- timeout: 10s
3123
- retries: 3
3124
- restart: unless-stopped
3125
-
3126
- app:
3127
- build:
3128
- context: .
3129
- dockerfile: projects/app/Dockerfile
3130
- image: \${IMAGE_NAME}/app:\${IMAGE_TAG:-latest}
3131
- expose:
3132
- - "3000"
3133
- depends_on:
3134
- api:
3135
- condition: service_healthy
3136
- healthcheck:
3137
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000"]
3138
- interval: 30s
3139
- timeout: 10s
3140
- retries: 3
3141
- restart: unless-stopped
3142
-
3143
- volumes:
3144
- mongo_data:
3145
- redis_data:
3146
- \`\`\`
3147
-
3148
- WICHTIG - TURBOOPS DEPLOYMENT:
3149
- - KEINE \`ports:\` \u2192 Verwende \`expose:\` (nur im Docker-Netzwerk sichtbar)
3150
- - KEINE \`environment:\` f\xFCr Application Services \u2192 Env-Variablen werden von TurboOps zur Deployment-Zeit injiziert
3151
- - Datenbanken OHNE environment - Credentials kommen von TurboOps
3152
- - Ein Reverse-Proxy (Traefik) routet externe Anfragen
3153
- - Verhindert Port-Konflikte bei mehreren Projekten auf einem Server
3154
-
3155
- Erstelle alle notwendigen Dateien.`;
3156
- const success = await aiToolsService.runWithPrompt(tool, prompt);
3084
+ const success = await aiToolsService.runWithPrompt(tool, DOCKER_SETUP_PROMPT);
3157
3085
  if (success) {
3158
3086
  logger.success("Docker-Setup wurde erstellt!");
3159
3087
  }
3160
3088
  }
3161
3089
  async function integratePipelineWithAI2(pipelineType, projectSlug, pipelinePath) {
3090
+ const { project } = await getCommandContext();
3162
3091
  const tool = await aiToolsService.selectTool();
3163
3092
  if (!tool) {
3164
3093
  addJsonData({ generated: false, reason: "no_ai_tool" });
3165
3094
  return;
3166
3095
  }
3167
- const typeName = pipelineType === "gitlab" ? "GitLab CI" : "GitHub Actions";
3168
- const prompt = `Integriere TurboOps Deployment in die bestehende ${typeName} Pipeline.
3169
-
3170
- Projekt-Slug: ${projectSlug}
3171
- Pipeline-Datei: ${pipelinePath}
3172
-
3173
- WICHTIG - Branch-zu-Stage-Zuordnung:
3174
- - dev Stage \u2192 wird NUR auf dem "dev" Branch deployed
3175
- - test Stage \u2192 wird NUR auf dem "test" Branch deployed
3176
- - prod Stage \u2192 wird NUR auf dem "main" Branch deployed
3177
-
3178
- PIPELINE-STRUKTUR:
3179
- 1. Build-Job: L\xE4uft auf ALLEN Branches (dev, test, main)
3180
- - Docker Images bauen mit docker-compose build
3181
- - Images pushen mit docker-compose push
3182
- - Registry: ${configService.getRegistryUrl()}/${projectSlug}
3183
- - Image-Tag: \${CI_COMMIT_SHA} (wird automatisch von CLI erkannt)
3184
-
3185
- 2. Deploy-Jobs: Jeder Job l\xE4uft NUR auf seinem Branch!
3186
- - deploy-dev: rules: if: $CI_COMMIT_BRANCH == "dev"
3187
- - deploy-test: rules: if: $CI_COMMIT_BRANCH == "test"
3188
- - deploy-prod: rules: if: $CI_COMMIT_BRANCH == "main" (when: manual)
3189
-
3190
- DEPLOY-BEFEHLE (WICHTIG: KEIN --image Flag n\xF6tig!):
3191
- - TurboOps CLI installieren: npm install -g @turboops/cli${configService.getEnvironment() === "dev" ? "@dev" : ""}
3192
- - Token setzen: turbo config set token \${TURBOOPS_TOKEN}
3193
- - Deploy: turbo deploy <stage> --wait
3194
-
3195
- Die CLI erkennt automatisch CI_COMMIT_SHA/GITHUB_SHA und verwendet das richtige Image!
3196
-
3197
- DEPENDENCIES:
3198
- - deploy-dev needs: [build]
3199
- - deploy-test needs: [deploy-dev]
3200
- - deploy-prod needs: [deploy-test]
3201
-
3202
- Secrets ben\xF6tigt: TURBOOPS_TOKEN
3203
-
3204
- Modifiziere die Datei "${pipelinePath}" entsprechend.`;
3096
+ let templateContent;
3097
+ const { data: pipelineTemplate, error } = await withSpinner(
3098
+ "Lade TurboOps Pipeline-Template...",
3099
+ () => apiClient.generatePipeline(project.id, pipelineType)
3100
+ );
3101
+ if (error || !pipelineTemplate) {
3102
+ logger.warning(
3103
+ `Pipeline-Template konnte nicht geladen werden: ${error || "Unbekannter Fehler"}`
3104
+ );
3105
+ logger.info("AI wird ein generisches Template verwenden.");
3106
+ } else {
3107
+ templateContent = pipelineTemplate.content;
3108
+ }
3109
+ const prompt = buildPipelineIntegrationPrompt(
3110
+ pipelineType,
3111
+ projectSlug,
3112
+ pipelinePath,
3113
+ templateContent
3114
+ );
3205
3115
  const success = await aiToolsService.runWithPrompt(tool, prompt);
3206
3116
  if (success) {
3207
3117
  logger.success("Pipeline wurde mit AI aktualisiert!");
@@ -3218,143 +3128,6 @@ Modifiziere die Datei "${pipelinePath}" entsprechend.`;
3218
3128
  // src/commands/docker.ts
3219
3129
  import { Command as Command6 } from "commander";
3220
3130
  import prompts4 from "prompts";
3221
- var DOCKER_SETUP_PROMPT = `Analysiere dieses Projekt und erstelle ein vollst\xE4ndiges Docker-Setup f\xFCr Production Deployment.
3222
-
3223
- **Wichtig: TurboOps ben\xF6tigt eine docker-compose.yml auf Root-Ebene!**
3224
-
3225
- KRITISCHE ANFORDERUNGEN:
3226
-
3227
- 1. docker-compose.yml im Projekt-Root:
3228
- - Services f\xFCr alle Komponenten (api, app, etc.)
3229
- - Verwende build-Direktive mit Pfad zu den Dockerfiles
3230
- - KEINE version: Angabe (deprecated)
3231
- - Health-Checks f\xFCr alle Services
3232
-
3233
- 2. Dockerfiles - WICHTIGE REGELN:
3234
- - Multi-stage builds (deps -> builder -> runner)
3235
- - IMMER \`npm ci --ignore-scripts\` verwenden (verhindert husky/prepare Fehler!)
3236
- - Non-root User f\xFCr Production
3237
- - Minimales Base-Image (node:22-alpine)
3238
-
3239
- 3. .dockerignore auf Root-Ebene:
3240
- - node_modules, .git, dist, .env*, *.log
3241
-
3242
- 4. Monorepo-Struktur beachten:
3243
- - Pr\xFCfe projects/, packages/, apps/ Ordner
3244
- - Context muss Root sein f\xFCr Monorepo-Zugriff
3245
-
3246
- DOCKERFILE-TEMPLATE (STRIKT EINHALTEN!):
3247
- \`\`\`dockerfile
3248
- # Stage 1: Dependencies
3249
- FROM node:22-alpine AS deps
3250
- WORKDIR /app
3251
- COPY package*.json ./
3252
- # WICHTIG: --ignore-scripts verhindert husky/prepare Fehler!
3253
- RUN npm ci --ignore-scripts
3254
-
3255
- # Stage 2: Builder
3256
- FROM node:22-alpine AS builder
3257
- WORKDIR /app
3258
- COPY --from=deps /app/node_modules ./node_modules
3259
- COPY . .
3260
- RUN npm run build
3261
-
3262
- # Stage 3: Runner (Production)
3263
- FROM node:22-alpine AS runner
3264
- WORKDIR /app
3265
- ENV NODE_ENV=production
3266
-
3267
- # Non-root user
3268
- RUN addgroup -g 1001 -S nodejs && adduser -S nodejs -u 1001
3269
-
3270
- COPY --from=builder --chown=nodejs:nodejs /app/dist ./dist
3271
- COPY --from=builder --chown=nodejs:nodejs /app/node_modules ./node_modules
3272
- COPY --from=builder --chown=nodejs:nodejs /app/package.json ./
3273
-
3274
- # Writable directories for runtime files (TUS uploads, temp processing)
3275
- RUN mkdir -p /app/uploads/tus /app/temp && chown -R nodejs:nodejs /app/uploads /app/temp
3276
-
3277
- USER nodejs
3278
-
3279
- EXPOSE 3000
3280
- CMD ["node", "dist/main.js"]
3281
- \`\`\`
3282
-
3283
- docker-compose.yml TEMPLATE:
3284
- \`\`\`yaml
3285
- services:
3286
- # === DATENBANKEN (erkenne aus package.json: mongoose \u2192 mongo, ioredis \u2192 redis) ===
3287
- mongo:
3288
- image: mongo:7
3289
- volumes:
3290
- - mongo_data:/data/db
3291
- healthcheck:
3292
- test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
3293
- interval: 30s
3294
- timeout: 10s
3295
- retries: 3
3296
- restart: unless-stopped
3297
-
3298
- redis:
3299
- image: redis:7-alpine
3300
- volumes:
3301
- - redis_data:/data
3302
- healthcheck:
3303
- test: ["CMD", "redis-cli", "ping"]
3304
- interval: 30s
3305
- timeout: 10s
3306
- retries: 3
3307
- restart: unless-stopped
3308
-
3309
- # === APPLICATION SERVICES (KEINE environment!) ===
3310
- # WICHTIG: image: muss gesetzt sein f\xFCr docker compose push!
3311
- # Format: \${IMAGE_NAME:-registry/project}/service:\${IMAGE_TAG:-latest}
3312
- api:
3313
- build:
3314
- context: .
3315
- dockerfile: projects/api/Dockerfile
3316
- image: \${IMAGE_NAME}/api:\${IMAGE_TAG:-latest}
3317
- expose:
3318
- - "3000"
3319
- depends_on:
3320
- - mongo
3321
- - redis
3322
- healthcheck:
3323
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/health"]
3324
- interval: 30s
3325
- timeout: 10s
3326
- retries: 3
3327
- restart: unless-stopped
3328
-
3329
- app:
3330
- build:
3331
- context: .
3332
- dockerfile: projects/app/Dockerfile
3333
- image: \${IMAGE_NAME}/app:\${IMAGE_TAG:-latest}
3334
- expose:
3335
- - "3000"
3336
- depends_on:
3337
- - api
3338
- healthcheck:
3339
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000"]
3340
- interval: 30s
3341
- timeout: 10s
3342
- retries: 3
3343
- restart: unless-stopped
3344
-
3345
- volumes:
3346
- mongo_data:
3347
- redis_data:
3348
- \`\`\`
3349
-
3350
- WICHTIG - TURBOOPS DEPLOYMENT:
3351
- - KEINE \`ports:\` \u2192 Verwende \`expose:\` (nur im Docker-Netzwerk sichtbar)
3352
- - KEINE \`environment:\` f\xFCr Application Services \u2192 Env-Variablen werden von TurboOps zur Deployment-Zeit injiziert
3353
- - Datenbanken OHNE environment - Credentials kommen von TurboOps
3354
- - Ein Reverse-Proxy (Traefik) routet externe Anfragen
3355
- - Verhindert Port-Konflikte bei mehreren Projekten auf einem Server
3356
-
3357
- Erstelle alle notwendigen Dateien.`;
3358
3131
  var dockerCommand = new Command6("docker").description(
3359
3132
  "Manage Docker configuration"
3360
3133
  );