mlgym-deploy 3.3.34 → 3.3.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +758 -20
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -18,7 +18,7 @@ import crypto from 'crypto';
18
18
  const execAsync = promisify(exec);
19
19
 
20
20
  // Current version of this MCP server - INCREMENT FOR WORKFLOW FIXES
21
- const CURRENT_VERSION = '3.3.34'; // Fix duplicate project creation + SSH key config always updates for current user
21
+ const CURRENT_VERSION = '3.3.40'; // Fix auth.jwt_token -> auth.token consistency bug
22
22
  const PACKAGE_NAME = 'mlgym-deploy';
23
23
 
24
24
  // Debug logging configuration - ENABLED BY DEFAULT
@@ -530,6 +530,11 @@ async function analyzeProject(local_path = '.') {
530
530
  analysis.framework = 'nextjs';
531
531
  analysis.build_command = packageJson.scripts?.build || 'npm run build';
532
532
  analysis.start_command = packageJson.scripts?.start || 'npm start';
533
+ } else if (deps['@nestjs/core'] || deps['@nestjs/common']) {
534
+ // NestJS - TypeScript backend framework requiring build step
535
+ analysis.framework = 'nestjs';
536
+ analysis.build_command = packageJson.scripts?.build || 'npm run build';
537
+ analysis.start_command = packageJson.scripts?.['start:prod'] || 'node dist/main.js';
533
538
  } else if (deps.express) {
534
539
  analysis.framework = 'express';
535
540
  analysis.start_command = packageJson.scripts?.start || 'node index.js';
@@ -613,6 +618,117 @@ async function analyzeProject(local_path = '.') {
613
618
  } catch {}
614
619
  }
615
620
 
621
+ // Check for Ruby project (Gemfile)
622
+ if (analysis.project_type === 'unknown') {
623
+ try {
624
+ await fs.access(path.join(absolutePath, 'Gemfile'));
625
+ analysis.project_type = 'ruby';
626
+ analysis.detected_files.push('Gemfile');
627
+
628
+ // Try to detect Rails vs Sinatra vs generic Ruby
629
+ try {
630
+ const gemfileContent = await fs.readFile(path.join(absolutePath, 'Gemfile'), 'utf8');
631
+ if (gemfileContent.includes("'rails'") || gemfileContent.includes('"rails"')) {
632
+ analysis.framework = 'rails';
633
+ analysis.start_command = 'rails server -b 0.0.0.0';
634
+ } else if (gemfileContent.includes("'sinatra'") || gemfileContent.includes('"sinatra"')) {
635
+ analysis.framework = 'sinatra';
636
+ analysis.start_command = 'ruby app.rb';
637
+ } else {
638
+ analysis.framework = 'ruby';
639
+ analysis.start_command = 'ruby app.rb';
640
+ }
641
+ } catch {
642
+ analysis.framework = 'ruby';
643
+ }
644
+ } catch {}
645
+ }
646
+
647
+ // Check for Rust project (Cargo.toml)
648
+ if (analysis.project_type === 'unknown') {
649
+ try {
650
+ await fs.access(path.join(absolutePath, 'Cargo.toml'));
651
+ analysis.project_type = 'rust';
652
+ analysis.detected_files.push('Cargo.toml');
653
+ analysis.framework = 'rust';
654
+ analysis.build_command = 'cargo build --release';
655
+ analysis.start_command = './target/release/app';
656
+
657
+ // Try to get binary name from Cargo.toml
658
+ try {
659
+ const cargoContent = await fs.readFile(path.join(absolutePath, 'Cargo.toml'), 'utf8');
660
+ const nameMatch = cargoContent.match(/name\s*=\s*["']([^"']+)["']/);
661
+ if (nameMatch) {
662
+ analysis.binary_name = nameMatch[1];
663
+ analysis.start_command = `./target/release/${nameMatch[1]}`;
664
+ }
665
+ } catch {}
666
+ } catch {}
667
+ }
668
+
669
+ // Check for Java project (pom.xml for Maven)
670
+ if (analysis.project_type === 'unknown') {
671
+ try {
672
+ await fs.access(path.join(absolutePath, 'pom.xml'));
673
+ analysis.project_type = 'java';
674
+ analysis.detected_files.push('pom.xml');
675
+ analysis.framework = 'maven';
676
+ analysis.build_command = 'mvn clean package -DskipTests';
677
+ analysis.start_command = 'java -jar target/app.jar';
678
+ } catch {}
679
+ }
680
+
681
+ // Check for Java project (build.gradle for Gradle)
682
+ if (analysis.project_type === 'unknown') {
683
+ try {
684
+ await fs.access(path.join(absolutePath, 'build.gradle'));
685
+ analysis.project_type = 'java';
686
+ analysis.detected_files.push('build.gradle');
687
+ analysis.framework = 'gradle';
688
+ analysis.build_command = './gradlew build -x test';
689
+ analysis.start_command = 'java -jar build/libs/app.jar';
690
+ } catch {}
691
+ }
692
+
693
+ // Check for .NET project (*.csproj)
694
+ if (analysis.project_type === 'unknown') {
695
+ try {
696
+ const files = await fs.readdir(absolutePath);
697
+ const csprojFile = files.find(f => f.endsWith('.csproj'));
698
+ if (csprojFile) {
699
+ analysis.project_type = 'dotnet';
700
+ analysis.detected_files.push(csprojFile);
701
+ analysis.framework = 'aspnet';
702
+ analysis.build_command = 'dotnet publish -c Release -o out';
703
+ analysis.start_command = 'dotnet out/app.dll';
704
+ analysis.csproj_file = csprojFile;
705
+ }
706
+ } catch {}
707
+ }
708
+
709
+ // Check for Elixir project (mix.exs)
710
+ if (analysis.project_type === 'unknown') {
711
+ try {
712
+ await fs.access(path.join(absolutePath, 'mix.exs'));
713
+ analysis.project_type = 'elixir';
714
+ analysis.detected_files.push('mix.exs');
715
+
716
+ // Check for Phoenix framework
717
+ try {
718
+ const mixContent = await fs.readFile(path.join(absolutePath, 'mix.exs'), 'utf8');
719
+ if (mixContent.includes(':phoenix')) {
720
+ analysis.framework = 'phoenix';
721
+ analysis.start_command = 'mix phx.server';
722
+ } else {
723
+ analysis.framework = 'elixir';
724
+ analysis.start_command = 'mix run --no-halt';
725
+ }
726
+ } catch {
727
+ analysis.framework = 'elixir';
728
+ }
729
+ } catch {}
730
+ }
731
+
616
732
  // Check for Scala/sbt project
617
733
  if (analysis.project_type === 'unknown') {
618
734
  try {
@@ -706,6 +822,24 @@ COPY --from=builder /app/package.json ./
706
822
  COPY --from=builder /app/public ./public
707
823
  EXPOSE 3000
708
824
  CMD ["${packageManager}", "start"]`;
825
+ } else if (framework === 'nestjs') {
826
+ // NestJS - TypeScript backend framework requiring build step
827
+ dockerfile = `# Build stage
828
+ FROM node:18-alpine AS builder
829
+ WORKDIR /app
830
+ COPY package*.json ./
831
+ RUN ${packageManager} ${packageManager === 'npm' ? 'ci' : 'install --frozen-lockfile'}
832
+ COPY . .
833
+ RUN ${packageManager} run build
834
+
835
+ # Production stage
836
+ FROM node:18-alpine
837
+ WORKDIR /app
838
+ COPY --from=builder /app/dist ./dist
839
+ COPY --from=builder /app/node_modules ./node_modules
840
+ COPY --from=builder /app/package.json ./
841
+ EXPOSE 3000
842
+ CMD ["node", "dist/main.js"]`;
709
843
  } else if (framework === 'express') {
710
844
  dockerfile = `FROM node:18-alpine
711
845
  WORKDIR /app
@@ -766,33 +900,222 @@ EXPOSE 8000
766
900
  CMD ["python", "main.py"]`;
767
901
  }
768
902
  } else if (projectType === 'php') {
769
- dockerfile = `FROM php:8.2-apache
903
+ if (framework === 'composer' || framework === 'laravel') {
904
+ // Laravel/Composer PHP project
905
+ dockerfile = `FROM php:8.2-apache
906
+
907
+ # Install PHP extensions and Composer
908
+ RUN apt-get update && apt-get install -y \\
909
+ libpng-dev libjpeg-dev libfreetype6-dev libzip-dev unzip git \\
910
+ && docker-php-ext-configure gd --with-freetype --with-jpeg \\
911
+ && docker-php-ext-install gd pdo pdo_mysql zip \\
912
+ && curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer \\
913
+ && a2enmod rewrite \\
914
+ && rm -rf /var/lib/apt/lists/*
915
+
916
+ # Set document root to Laravel's public folder
917
+ ENV APACHE_DOCUMENT_ROOT /var/www/html/public
918
+ RUN sed -ri -e 's!/var/www/html!\${APACHE_DOCUMENT_ROOT}!g' /etc/apache2/sites-available/*.conf
919
+ RUN sed -ri -e 's!/var/www/!\${APACHE_DOCUMENT_ROOT}!g' /etc/apache2/apache2.conf /etc/apache2/conf-available/*.conf
920
+
921
+ WORKDIR /var/www/html
922
+ COPY . .
923
+
924
+ # Install dependencies and set permissions
925
+ RUN composer install --no-dev --optimize-autoloader --no-interaction \\
926
+ && chown -R www-data:www-data /var/www/html \\
927
+ && chmod -R 755 /var/www/html/storage /var/www/html/bootstrap/cache 2>/dev/null || true
928
+
929
+ EXPOSE 80
930
+ CMD ["apache2-foreground"]`;
931
+ } else {
932
+ // Simple PHP project
933
+ dockerfile = `FROM php:8.2-apache
770
934
  WORKDIR /var/www/html
771
935
  COPY . .
772
936
  RUN chown -R www-data:www-data /var/www/html
773
937
  EXPOSE 80
774
938
  CMD ["apache2-foreground"]`;
939
+ }
775
940
  } else if (projectType === 'static') {
776
941
  dockerfile = `FROM nginx:alpine
777
942
  COPY . /usr/share/nginx/html
778
943
  EXPOSE 80
779
944
  CMD ["nginx", "-g", "daemon off;"]`;
780
945
  } else if (projectType === 'go') {
946
+ // Go project with CGO disabled for static binary
781
947
  dockerfile = `# Build stage
782
948
  FROM golang:1.21-alpine AS builder
783
949
  WORKDIR /app
784
- COPY go.mod go.sum ./
785
- RUN go mod download
950
+ COPY go.mod go.sum* ./
951
+ RUN go mod download 2>/dev/null || true
786
952
  COPY . .
787
- RUN go build -o app
953
+ RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o app .
788
954
 
789
955
  # Production stage
790
956
  FROM alpine:latest
791
957
  RUN apk --no-cache add ca-certificates
792
- WORKDIR /root/
958
+ WORKDIR /app
793
959
  COPY --from=builder /app/app .
794
960
  EXPOSE 8080
795
961
  CMD ["./app"]`;
962
+ } else if (projectType === 'ruby') {
963
+ if (framework === 'rails') {
964
+ // Ruby on Rails
965
+ dockerfile = `FROM ruby:3.2-slim
966
+
967
+ # Install dependencies
968
+ RUN apt-get update && apt-get install -y \\
969
+ build-essential libpq-dev nodejs npm \\
970
+ && rm -rf /var/lib/apt/lists/*
971
+
972
+ WORKDIR /app
973
+ COPY Gemfile Gemfile.lock* ./
974
+ RUN bundle install --without development test
975
+
976
+ COPY . .
977
+ RUN bundle exec rake assets:precompile 2>/dev/null || true
978
+
979
+ EXPOSE 3000
980
+ CMD ["rails", "server", "-b", "0.0.0.0"]`;
981
+ } else {
982
+ // Sinatra or generic Ruby
983
+ dockerfile = `FROM ruby:3.2-slim
984
+
985
+ RUN apt-get update && apt-get install -y build-essential && rm -rf /var/lib/apt/lists/*
986
+
987
+ WORKDIR /app
988
+ COPY Gemfile Gemfile.lock* ./
989
+ RUN bundle install
990
+
991
+ COPY . .
992
+ EXPOSE 4567
993
+ CMD ["ruby", "app.rb"]`;
994
+ }
995
+ } else if (projectType === 'rust') {
996
+ dockerfile = `# Build stage
997
+ FROM rust:1.75-slim AS builder
998
+ WORKDIR /app
999
+ COPY Cargo.toml Cargo.lock* ./
1000
+ COPY src ./src
1001
+ RUN cargo build --release
1002
+
1003
+ # Production stage
1004
+ FROM debian:bookworm-slim
1005
+ RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
1006
+ WORKDIR /app
1007
+ COPY --from=builder /app/target/release/* ./
1008
+ EXPOSE 8080
1009
+ CMD ["./app"]`;
1010
+ } else if (projectType === 'java') {
1011
+ if (framework === 'maven') {
1012
+ dockerfile = `# Build stage
1013
+ FROM maven:3.9-eclipse-temurin-17 AS builder
1014
+ WORKDIR /app
1015
+ COPY pom.xml .
1016
+ RUN mvn dependency:go-offline -B
1017
+ COPY src ./src
1018
+ RUN mvn clean package -DskipTests -B
1019
+
1020
+ # Production stage
1021
+ FROM eclipse-temurin:17-jre-jammy
1022
+ WORKDIR /app
1023
+ COPY --from=builder /app/target/*.jar app.jar
1024
+ EXPOSE 8080
1025
+ CMD ["java", "-jar", "app.jar"]`;
1026
+ } else if (framework === 'gradle') {
1027
+ dockerfile = `# Build stage
1028
+ FROM gradle:8-jdk17 AS builder
1029
+ WORKDIR /app
1030
+ COPY build.gradle settings.gradle* gradle* ./
1031
+ COPY gradle ./gradle 2>/dev/null || true
1032
+ COPY src ./src
1033
+ RUN gradle build -x test --no-daemon
1034
+
1035
+ # Production stage
1036
+ FROM eclipse-temurin:17-jre-jammy
1037
+ WORKDIR /app
1038
+ COPY --from=builder /app/build/libs/*.jar app.jar
1039
+ EXPOSE 8080
1040
+ CMD ["java", "-jar", "app.jar"]`;
1041
+ } else {
1042
+ // Generic Java with Maven fallback
1043
+ dockerfile = `# Build stage
1044
+ FROM maven:3.9-eclipse-temurin-17 AS builder
1045
+ WORKDIR /app
1046
+ COPY pom.xml .
1047
+ COPY src ./src
1048
+ RUN mvn clean package -DskipTests -B
1049
+
1050
+ # Production stage
1051
+ FROM eclipse-temurin:17-jre-jammy
1052
+ WORKDIR /app
1053
+ COPY --from=builder /app/target/*.jar app.jar
1054
+ EXPOSE 8080
1055
+ CMD ["java", "-jar", "app.jar"]`;
1056
+ }
1057
+ } else if (projectType === 'dotnet') {
1058
+ dockerfile = `# Build stage
1059
+ FROM mcr.microsoft.com/dotnet/sdk:8.0 AS builder
1060
+ WORKDIR /app
1061
+ COPY *.csproj ./
1062
+ RUN dotnet restore
1063
+ COPY . .
1064
+ RUN dotnet publish -c Release -o out
1065
+
1066
+ # Production stage
1067
+ FROM mcr.microsoft.com/dotnet/aspnet:8.0
1068
+ WORKDIR /app
1069
+ COPY --from=builder /app/out .
1070
+ EXPOSE 8080
1071
+ ENV ASPNETCORE_URLS=http://+:8080
1072
+ ENTRYPOINT ["dotnet", "app.dll"]`;
1073
+ } else if (projectType === 'elixir') {
1074
+ if (framework === 'phoenix') {
1075
+ dockerfile = `# Build stage
1076
+ FROM elixir:1.16-alpine AS builder
1077
+ RUN apk add --no-cache build-base git nodejs npm
1078
+ WORKDIR /app
1079
+
1080
+ # Install hex and rebar
1081
+ RUN mix local.hex --force && mix local.rebar --force
1082
+
1083
+ # Install dependencies
1084
+ COPY mix.exs mix.lock ./
1085
+ RUN mix deps.get --only prod
1086
+ RUN MIX_ENV=prod mix deps.compile
1087
+
1088
+ # Build assets and release
1089
+ COPY . .
1090
+ RUN cd assets && npm install && npm run deploy 2>/dev/null || true
1091
+ RUN MIX_ENV=prod mix phx.digest 2>/dev/null || true
1092
+ RUN MIX_ENV=prod mix release
1093
+
1094
+ # Production stage
1095
+ FROM alpine:3.19
1096
+ RUN apk add --no-cache libstdc++ openssl ncurses-libs
1097
+ WORKDIR /app
1098
+ COPY --from=builder /app/_build/prod/rel/app ./
1099
+ EXPOSE 4000
1100
+ ENV PHX_HOST=localhost
1101
+ CMD ["bin/app", "start"]`;
1102
+ } else {
1103
+ dockerfile = `FROM elixir:1.16-alpine
1104
+ RUN apk add --no-cache build-base
1105
+ WORKDIR /app
1106
+
1107
+ RUN mix local.hex --force && mix local.rebar --force
1108
+
1109
+ COPY mix.exs mix.lock ./
1110
+ RUN mix deps.get
1111
+ RUN mix deps.compile
1112
+
1113
+ COPY . .
1114
+ RUN mix compile
1115
+
1116
+ EXPOSE 4000
1117
+ CMD ["mix", "run", "--no-halt"]`;
1118
+ }
796
1119
  } else if (projectType === 'scala') {
797
1120
  dockerfile = `# Build stage
798
1121
  FROM sbtscala/scala-sbt:eclipse-temurin-jammy-17.0.10_7_1.10.2_2.13.15 AS builder
@@ -1384,6 +1707,284 @@ function randomizeVolumeNames(content, suffix) {
1384
1707
  return { content: result, randomized };
1385
1708
  }
1386
1709
 
1710
+ // Known valid Docker image version ranges
1711
+ const VALID_IMAGE_VERSIONS = {
1712
+ php: { min: 5, max: 8, latest: '8.3', variants: ['apache', 'fpm', 'cli', 'alpine', 'slim'] },
1713
+ node: { min: 10, max: 22, latest: '20', variants: ['alpine', 'slim', 'bullseye', 'bookworm'] },
1714
+ python: { min: 2, max: 3, latest: '3.12', subVersionMax: { 2: 7, 3: 12 } },
1715
+ ruby: { min: 2, max: 3, latest: '3.3', subVersionMax: { 2: 7, 3: 3 } },
1716
+ golang: { min: 1, max: 1, latest: '1.22', subVersionMax: { 1: 22 } },
1717
+ go: { min: 1, max: 1, latest: '1.22', subVersionMax: { 1: 22 } },
1718
+ java: { min: 8, max: 21, latest: '21' },
1719
+ openjdk: { min: 8, max: 21, latest: '21' },
1720
+ rust: { min: 1, max: 1, latest: '1.75', subVersionMax: { 1: 75 } },
1721
+ perl: { min: 5, max: 5, latest: '5.38', subVersionMax: { 5: 38 } },
1722
+ elixir: { min: 1, max: 1, latest: '1.16', subVersionMax: { 1: 16 } },
1723
+ };
1724
+
1725
+ // Validate Docker base image version
1726
+ function validateBaseImage(baseImage) {
1727
+ if (!baseImage) return null;
1728
+
1729
+ // Extract image name and tag
1730
+ const parts = baseImage.split(':');
1731
+ const imageName = parts[0].split('/').pop(); // Handle registry/image:tag format
1732
+ const tag = parts[1] || 'latest';
1733
+
1734
+ // Check if we know this image type
1735
+ const imageConfig = VALID_IMAGE_VERSIONS[imageName];
1736
+ if (!imageConfig) return null; // Unknown image, can't validate
1737
+
1738
+ // Extract version number and variant from tag (e.g., "99-apache" -> version: 99, variant: "-apache")
1739
+ const versionMatch = tag.match(/^(\d+)(?:\.(\d+))?(.*)$/);
1740
+ if (!versionMatch) return null; // Can't parse version (e.g., 'latest', 'alpine')
1741
+
1742
+ const majorVersion = parseInt(versionMatch[1], 10);
1743
+ const minorVersion = versionMatch[2] ? parseInt(versionMatch[2], 10) : null;
1744
+ const variant = versionMatch[3] || ''; // Preserve variant like "-apache", "-alpine", "-slim"
1745
+
1746
+ // Build the suggested fix preserving the variant
1747
+ const suggestedTag = `${imageConfig.latest}${variant}`;
1748
+
1749
+ // Check if major version is in valid range
1750
+ if (majorVersion < imageConfig.min || majorVersion > imageConfig.max) {
1751
+ return {
1752
+ invalid: true,
1753
+ reason: `${imageName}:${tag} has invalid version ${majorVersion} (valid: ${imageConfig.min}-${imageConfig.max})`,
1754
+ suggestedFix: `${imageName}:${suggestedTag}`,
1755
+ autofix: 'fix_base_image'
1756
+ };
1757
+ }
1758
+
1759
+ // Check minor version if we have subversion limits
1760
+ if (minorVersion !== null && imageConfig.subVersionMax && imageConfig.subVersionMax[majorVersion]) {
1761
+ if (minorVersion > imageConfig.subVersionMax[majorVersion]) {
1762
+ return {
1763
+ invalid: true,
1764
+ reason: `${imageName}:${tag} has invalid minor version (${majorVersion}.${minorVersion} doesn't exist)`,
1765
+ suggestedFix: `${imageName}:${suggestedTag}`,
1766
+ autofix: 'fix_base_image'
1767
+ };
1768
+ }
1769
+ }
1770
+
1771
+ return null; // Valid
1772
+ }
1773
+
1774
+ /**
1775
+ * Native file scanner - replaces glob dependency
1776
+ * @param {string} baseDir - Base directory to scan
1777
+ * @param {string[]} extensions - File extensions to match (e.g., ['.py', '.js'])
1778
+ * @param {string[]} ignoreDirs - Directory names to ignore (e.g., ['node_modules', 'venv'])
1779
+ * @returns {string[]} Array of relative file paths
1780
+ */
1781
+ async function scanFilesNative(baseDir, extensions, ignoreDirs = []) {
1782
+ const results = [];
1783
+ const ignoreSet = new Set(ignoreDirs);
1784
+
1785
+ async function scanDir(dir, relativePath = '') {
1786
+ try {
1787
+ const entries = await fs.readdir(dir, { withFileTypes: true });
1788
+ for (const entry of entries) {
1789
+ const fullPath = path.join(dir, entry.name);
1790
+ const relPath = relativePath ? path.join(relativePath, entry.name) : entry.name;
1791
+
1792
+ if (entry.isDirectory()) {
1793
+ // Skip ignored directories
1794
+ if (!ignoreSet.has(entry.name) && !entry.name.startsWith('.')) {
1795
+ await scanDir(fullPath, relPath);
1796
+ }
1797
+ } else if (entry.isFile()) {
1798
+ const ext = path.extname(entry.name).toLowerCase();
1799
+ if (extensions.includes(ext)) {
1800
+ results.push(relPath);
1801
+ }
1802
+ }
1803
+ }
1804
+ } catch (err) {
1805
+ // Ignore permission errors and other issues
1806
+ }
1807
+ }
1808
+
1809
+ await scanDir(baseDir);
1810
+ return results;
1811
+ }
1812
+
1813
+ /**
1814
+ * Detect port mismatch between application code and Dockerfile EXPOSE
1815
+ * Returns null if no mismatch, or object with details if mismatch found
1816
+ */
1817
+ async function detectPortMismatch(projectPath, exposedPort) {
1818
+ if (!exposedPort) return null;
1819
+
1820
+ const exposedPortNum = parseInt(exposedPort, 10);
1821
+ const appPorts = [];
1822
+
1823
+ try {
1824
+ // Check Python files for Flask/FastAPI port
1825
+ const pythonFiles = await scanFilesNative(projectPath, ['.py'], ['venv', '__pycache__', '.venv']);
1826
+ for (const file of pythonFiles.slice(0, 10)) {
1827
+ const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
1828
+ // Flask: app.run(port=5000), app.run(host='0.0.0.0', port=5000)
1829
+ const flaskMatch = content.match(/\.run\s*\([^)]*port\s*=\s*(\d+)/);
1830
+ if (flaskMatch) appPorts.push({ port: parseInt(flaskMatch[1], 10), file, type: 'Flask' });
1831
+ // FastAPI/Uvicorn: uvicorn.run(..., port=8000)
1832
+ const uvicornMatch = content.match(/uvicorn\.run\s*\([^)]*port\s*=\s*(\d+)/);
1833
+ if (uvicornMatch) appPorts.push({ port: parseInt(uvicornMatch[1], 10), file, type: 'Uvicorn' });
1834
+ }
1835
+
1836
+ // Check Node.js files for Express/HTTP port
1837
+ const jsFiles = await scanFilesNative(projectPath, ['.js', '.ts'], ['node_modules', 'dist', 'build']);
1838
+ for (const file of jsFiles.slice(0, 10)) {
1839
+ const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
1840
+ // Express: app.listen(3000), server.listen(3000)
1841
+ const listenMatch = content.match(/\.listen\s*\(\s*(\d+)/);
1842
+ if (listenMatch) appPorts.push({ port: parseInt(listenMatch[1], 10), file, type: 'Node.js' });
1843
+ // PORT env: process.env.PORT || 3000
1844
+ const portEnvMatch = content.match(/process\.env\.PORT\s*\|\|\s*(\d+)/);
1845
+ if (portEnvMatch) appPorts.push({ port: parseInt(portEnvMatch[1], 10), file, type: 'Node.js (env fallback)' });
1846
+ }
1847
+
1848
+ // Check Ruby files for Sinatra/Rails port
1849
+ const rubyFiles = await scanFilesNative(projectPath, ['.rb'], ['vendor', 'bundle']);
1850
+ for (const file of rubyFiles.slice(0, 10)) {
1851
+ const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
1852
+ // Sinatra: set :port, 4567
1853
+ const sinatraMatch = content.match(/set\s+:port\s*,\s*(\d+)/);
1854
+ if (sinatraMatch) appPorts.push({ port: parseInt(sinatraMatch[1], 10), file, type: 'Sinatra' });
1855
+ }
1856
+
1857
+ // Check Go files for HTTP port
1858
+ const goFiles = await scanFilesNative(projectPath, ['.go'], ['vendor']);
1859
+ for (const file of goFiles.slice(0, 10)) {
1860
+ const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
1861
+ // Go: http.ListenAndServe(":8080", ...)
1862
+ const goMatch = content.match(/ListenAndServe\s*\(\s*["':](\d+)/);
1863
+ if (goMatch) appPorts.push({ port: parseInt(goMatch[1], 10), file, type: 'Go HTTP' });
1864
+ }
1865
+
1866
+ // Find mismatches
1867
+ const mismatches = appPorts.filter(p => p.port !== exposedPortNum);
1868
+ if (mismatches.length > 0) {
1869
+ const mismatch = mismatches[0];
1870
+ return {
1871
+ mismatch: true,
1872
+ exposedPort: exposedPortNum,
1873
+ appPort: mismatch.port,
1874
+ file: mismatch.file,
1875
+ type: mismatch.type,
1876
+ suggestedFix: mismatch.port,
1877
+ autofix: 'fix_expose_port'
1878
+ };
1879
+ }
1880
+ } catch (err) {
1881
+ log.warning(`MCP >>> Port detection failed: ${err.message}`);
1882
+ }
1883
+
1884
+ return null;
1885
+ }
1886
+
1887
+ /**
1888
+ * Detect missing dependencies in Node.js projects
1889
+ * Compares require/import statements with package.json dependencies
1890
+ */
1891
+ async function detectMissingNodeDependencies(projectPath) {
1892
+ try {
1893
+ const packageJsonPath = path.join(projectPath, 'package.json');
1894
+ const packageJsonContent = await fs.readFile(packageJsonPath, 'utf-8').catch(() => null);
1895
+ if (!packageJsonContent) return null;
1896
+
1897
+ const packageJson = JSON.parse(packageJsonContent);
1898
+ const declaredDeps = new Set([
1899
+ ...Object.keys(packageJson.dependencies || {}),
1900
+ ...Object.keys(packageJson.devDependencies || {})
1901
+ ]);
1902
+
1903
+ // Scan JS/TS files for imports
1904
+ const usedDeps = new Set();
1905
+ const jsFiles = await scanFilesNative(projectPath, ['.js', '.ts', '.jsx', '.tsx'], ['node_modules', 'dist', 'build']);
1906
+
1907
+ for (const file of jsFiles.slice(0, 20)) {
1908
+ const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
1909
+
1910
+ // CommonJS: require('package')
1911
+ const requireMatches = content.matchAll(/require\s*\(\s*['"]([^'"./][^'"]*)['"]\s*\)/g);
1912
+ for (const match of requireMatches) {
1913
+ const pkg = match[1].split('/')[0]; // Handle scoped packages and subpaths
1914
+ if (!pkg.startsWith('@')) usedDeps.add(pkg);
1915
+ else usedDeps.add(match[1].split('/').slice(0, 2).join('/')); // @scope/package
1916
+ }
1917
+
1918
+ // ES6: import ... from 'package'
1919
+ const importMatches = content.matchAll(/import\s+(?:[^'"]+\s+from\s+)?['"]([^'"./][^'"]*)['"]/g);
1920
+ for (const match of importMatches) {
1921
+ const pkg = match[1].split('/')[0];
1922
+ if (!pkg.startsWith('@')) usedDeps.add(pkg);
1923
+ else usedDeps.add(match[1].split('/').slice(0, 2).join('/'));
1924
+ }
1925
+ }
1926
+
1927
+ // Built-in Node.js modules to ignore
1928
+ const builtins = new Set(['fs', 'path', 'http', 'https', 'crypto', 'os', 'url', 'util', 'stream', 'events', 'child_process', 'cluster', 'dns', 'net', 'tls', 'zlib', 'buffer', 'querystring', 'readline', 'assert', 'module', 'process', 'timers', 'vm', 'worker_threads']);
1929
+
1930
+ // Find missing dependencies
1931
+ const missing = [];
1932
+ for (const dep of usedDeps) {
1933
+ if (!builtins.has(dep) && !declaredDeps.has(dep)) {
1934
+ missing.push(dep);
1935
+ }
1936
+ }
1937
+
1938
+ if (missing.length > 0) {
1939
+ return {
1940
+ missing: true,
1941
+ packages: missing,
1942
+ autofix: 'add_node_deps'
1943
+ };
1944
+ }
1945
+ } catch (err) {
1946
+ log.warning(`MCP >>> Dependency detection failed: ${err.message}`);
1947
+ }
1948
+
1949
+ return null;
1950
+ }
1951
+
1952
+ /**
1953
+ * Detect invalid path alias dependencies in package.json
1954
+ * Path aliases like "@/components" should only exist in tsconfig.json, not as npm packages
1955
+ */
1956
+ function detectInvalidPathAliasDependencies(packageJsonPath) {
1957
+ try {
1958
+ const packageJsonContent = fsSync.readFileSync(packageJsonPath, 'utf-8');
1959
+ const packageJson = JSON.parse(packageJsonContent);
1960
+
1961
+ const invalidDeps = [];
1962
+ const allDeps = {
1963
+ ...packageJson.dependencies || {},
1964
+ ...packageJson.devDependencies || {}
1965
+ };
1966
+
1967
+ // Check for path alias patterns like "@/something"
1968
+ for (const [depName, depVersion] of Object.entries(allDeps)) {
1969
+ if (depName.startsWith('@/')) {
1970
+ invalidDeps.push(depName);
1971
+ }
1972
+ }
1973
+
1974
+ if (invalidDeps.length > 0) {
1975
+ return {
1976
+ invalid: true,
1977
+ packages: invalidDeps,
1978
+ autofix: 'remove_path_aliases'
1979
+ };
1980
+ }
1981
+ } catch (err) {
1982
+ log.warning(`MCP >>> Path alias validation failed: ${err.message}`);
1983
+ }
1984
+
1985
+ return null;
1986
+ }
1987
+
1387
1988
  // Validate Dockerfile for Coolify compliance
1388
1989
  function validateDockerfile(content) {
1389
1990
  const lines = content.split('\n');
@@ -1403,13 +2004,17 @@ function validateDockerfile(content) {
1403
2004
  const trimmed = lines[i].trim();
1404
2005
  const upper = trimmed.toUpperCase();
1405
2006
 
1406
- // Track base image
2007
+ // Track base image (use the last non-builder FROM for final image)
1407
2008
  if (upper.startsWith('FROM ')) {
1408
- baseImage = trimmed.substring(5).split(' ')[0].toLowerCase();
2009
+ const fromImage = trimmed.substring(5).split(' ')[0].toLowerCase();
1409
2010
  // Check for Elixir multi-stage build
1410
- if (baseImage.includes('elixir') && upper.includes(' AS ')) {
2011
+ if (fromImage.includes('elixir') && upper.includes(' AS ')) {
1411
2012
  isElixirMultiStage = true;
1412
2013
  }
2014
+ // Only track as base image if it's not a builder stage reference
2015
+ if (!fromImage.includes('builder') && !fromImage.includes('build')) {
2016
+ baseImage = fromImage;
2017
+ }
1413
2018
  }
1414
2019
 
1415
2020
  if (upper.startsWith('EXPOSE')) {
@@ -1438,6 +2043,17 @@ function validateDockerfile(content) {
1438
2043
  }
1439
2044
  }
1440
2045
 
2046
+ // Validate base image version
2047
+ const baseImageValidation = validateBaseImage(baseImage);
2048
+ if (baseImageValidation && baseImageValidation.invalid) {
2049
+ issues.push({
2050
+ issue: `Invalid Docker base image: ${baseImageValidation.reason}`,
2051
+ fix: `Change FROM to use: ${baseImageValidation.suggestedFix}`,
2052
+ autofix: baseImageValidation.autofix,
2053
+ suggestedImage: baseImageValidation.suggestedFix
2054
+ });
2055
+ }
2056
+
1441
2057
  if (!hasExpose) {
1442
2058
  issues.push({
1443
2059
  issue: 'Dockerfile does not have an EXPOSE directive',
@@ -1527,6 +2143,41 @@ function autoFixDockerfile(content, issues) {
1527
2143
  }
1528
2144
  }
1529
2145
  }
2146
+
2147
+ if (issue.autofix === 'fix_base_image') {
2148
+ // Replace invalid base image with valid one
2149
+ const suggestedImage = issue.suggestedImage || issue.suggestedFix;
2150
+ for (let i = 0; i < lines.length; i++) {
2151
+ const trimmed = lines[i].trim().toUpperCase();
2152
+ if (trimmed.startsWith('FROM ')) {
2153
+ const originalLine = lines[i];
2154
+ const indent = originalLine.match(/^(\s*)/)[1];
2155
+ const fromMatch = originalLine.match(/^(\s*FROM\s+)([^\s]+)(\s+AS\s+\S+)?(.*)$/i);
2156
+ if (fromMatch && suggestedImage) {
2157
+ const asClause = fromMatch[3] || '';
2158
+ const rest = fromMatch[4] || '';
2159
+ lines[i] = `${indent}FROM ${suggestedImage}${asClause}${rest}`;
2160
+ modified = true;
2161
+ log.success(`MCP >>> Auto-fixed Dockerfile: replaced invalid base image with ${suggestedImage}`);
2162
+ break;
2163
+ }
2164
+ }
2165
+ }
2166
+ }
2167
+
2168
+ if (issue.autofix === 'fix_expose_port') {
2169
+ // Replace existing EXPOSE with correct port
2170
+ for (let i = 0; i < lines.length; i++) {
2171
+ const trimmed = lines[i].trim().toUpperCase();
2172
+ if (trimmed.startsWith('EXPOSE')) {
2173
+ const indent = lines[i].match(/^(\s*)/)[1];
2174
+ lines[i] = `${indent}EXPOSE ${issue.suggestedFix}`;
2175
+ modified = true;
2176
+ log.success(`MCP >>> Auto-fixed Dockerfile: changed EXPOSE to ${issue.suggestedFix}`);
2177
+ break;
2178
+ }
2179
+ }
2180
+ }
1530
2181
  }
1531
2182
 
1532
2183
  return { content: lines.join('\n'), modified };
@@ -1785,7 +2436,7 @@ async function initProject(args) {
1785
2436
  projectData.docker_compose_content = composeContent;
1786
2437
  log.info(`MCP >>> [initProject] Sending docker-compose content: ${composeContent.length} bytes`);
1787
2438
  } else {
1788
- log.warn('MCP >>> [initProject] docker-compose strategy but no compose file found!');
2439
+ log.warning('MCP >>> [initProject] docker-compose strategy but no compose file found!');
1789
2440
  }
1790
2441
  }
1791
2442
 
@@ -2641,6 +3292,93 @@ async function deployProject(args) {
2641
3292
  } else {
2642
3293
  log.success('MCP >>> Dockerfile is Coolify compliant');
2643
3294
  }
3295
+
3296
+ // Check for port mismatch between app code and Dockerfile
3297
+ const currentContent = fsSync.readFileSync(dockerfilePath, 'utf8');
3298
+ const currentValidation = validateDockerfile(currentContent);
3299
+ if (currentValidation.exposedPort) {
3300
+ const portMismatch = await detectPortMismatch(local_path, currentValidation.exposedPort);
3301
+ if (portMismatch && portMismatch.mismatch) {
3302
+ log.warning(`MCP >>> Port mismatch detected: App runs on ${portMismatch.appPort} but EXPOSE is ${portMismatch.exposedPort}`);
3303
+ log.info(`MCP >>> Auto-fixing: Changing EXPOSE to ${portMismatch.appPort}`);
3304
+ const fixResult = autoFixDockerfile(currentContent, [{
3305
+ autofix: 'fix_expose_port',
3306
+ suggestedFix: portMismatch.appPort
3307
+ }]);
3308
+ if (fixResult.modified) {
3309
+ fsSync.writeFileSync(dockerfilePath, fixResult.content);
3310
+ log.success(`MCP >>> Fixed port mismatch: EXPOSE now ${portMismatch.appPort}`);
3311
+ }
3312
+ }
3313
+ }
3314
+ }
3315
+ }
3316
+
3317
+ // Step 4.5: Check for missing Node.js dependencies
3318
+ const packageJsonPath = path.join(local_path, 'package.json');
3319
+ if (fsSync.existsSync(packageJsonPath)) {
3320
+ log.info('MCP >>> Checking for missing Node.js dependencies...');
3321
+ const missingDeps = await detectMissingNodeDependencies(local_path);
3322
+ if (missingDeps && missingDeps.missing && missingDeps.packages.length > 0) {
3323
+ log.warning(`MCP >>> Missing dependencies detected: ${missingDeps.packages.join(', ')}`);
3324
+ log.info('MCP >>> Auto-fixing: Adding missing dependencies to package.json');
3325
+
3326
+ const packageJsonContent = fsSync.readFileSync(packageJsonPath, 'utf-8');
3327
+ const packageJson = JSON.parse(packageJsonContent);
3328
+ if (!packageJson.dependencies) packageJson.dependencies = {};
3329
+
3330
+ for (const pkg of missingDeps.packages) {
3331
+ packageJson.dependencies[pkg] = '*'; // Will be resolved to latest during npm install
3332
+ }
3333
+
3334
+ fsSync.writeFileSync(packageJsonPath + '.backup', packageJsonContent);
3335
+ fsSync.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
3336
+ log.success(`MCP >>> Added missing dependencies: ${missingDeps.packages.join(', ')}`);
3337
+ }
3338
+
3339
+ // Step 4.6: Check for invalid path alias dependencies
3340
+ log.info('MCP >>> Checking for invalid path alias dependencies...');
3341
+ const invalidDeps = detectInvalidPathAliasDependencies(packageJsonPath);
3342
+ if (invalidDeps && invalidDeps.invalid && invalidDeps.packages.length > 0) {
3343
+ log.warning(`MCP >>> ⚠️ CRITICAL: Invalid path alias dependencies detected!`);
3344
+ log.warning(`MCP >>> The following entries should NOT be in package.json:`);
3345
+ invalidDeps.packages.forEach(pkg => log.warning(`MCP >>> - ${pkg}`));
3346
+ log.warning(`MCP >>> `);
3347
+ log.warning(`MCP >>> ❌ ERROR: npm cannot install packages with names like "@/components"`);
3348
+ log.warning(`MCP >>> These are TypeScript/JavaScript path aliases, not npm packages!`);
3349
+ log.warning(`MCP >>> `);
3350
+ log.warning(`MCP >>> ✅ HOW TO FIX:`);
3351
+ log.warning(`MCP >>> 1. Remove these entries from package.json dependencies/devDependencies`);
3352
+ log.warning(`MCP >>> 2. Keep them ONLY in tsconfig.json or jsconfig.json under "paths":`);
3353
+ log.warning(`MCP >>> {`);
3354
+ log.warning(`MCP >>> "compilerOptions": {`);
3355
+ log.warning(`MCP >>> "paths": { "@/*": ["./*"] }`);
3356
+ log.warning(`MCP >>> }`);
3357
+ log.warning(`MCP >>> }`);
3358
+ log.warning(`MCP >>> `);
3359
+ log.info('MCP >>> Auto-fixing: Removing invalid path alias dependencies from package.json');
3360
+
3361
+ const packageJsonContent = fsSync.readFileSync(packageJsonPath, 'utf-8');
3362
+ const packageJson = JSON.parse(packageJsonContent);
3363
+
3364
+ // Remove invalid dependencies
3365
+ let removedCount = 0;
3366
+ for (const pkg of invalidDeps.packages) {
3367
+ if (packageJson.dependencies && packageJson.dependencies[pkg]) {
3368
+ delete packageJson.dependencies[pkg];
3369
+ removedCount++;
3370
+ }
3371
+ if (packageJson.devDependencies && packageJson.devDependencies[pkg]) {
3372
+ delete packageJson.devDependencies[pkg];
3373
+ removedCount++;
3374
+ }
3375
+ }
3376
+
3377
+ // Create backup and save fixed version
3378
+ fsSync.writeFileSync(packageJsonPath + '.backup', packageJsonContent);
3379
+ fsSync.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
3380
+ log.success(`MCP >>> ✅ Removed ${removedCount} invalid path alias dependencies`);
3381
+ log.info(`MCP >>> Backup saved to: package.json.backup`);
2644
3382
  }
2645
3383
  }
2646
3384
 
@@ -2884,11 +3622,11 @@ async function setEnvironmentVariables(args) {
2884
3622
 
2885
3623
  // Ensure authentication
2886
3624
  let auth = await loadAuth();
2887
- if (!auth || !auth.jwt_token) {
3625
+ if (!auth || !auth.token) {
2888
3626
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
2889
3627
  }
2890
3628
 
2891
- log.debug(`Using JWT token: ${auth.jwt_token.substring(0, 20)}...`);
3629
+ log.debug(`Using JWT token: ${auth.token.substring(0, 20)}...`);
2892
3630
 
2893
3631
  // Determine project name
2894
3632
  let finalProjectName = project_name;
@@ -2914,7 +3652,7 @@ async function setEnvironmentVariables(args) {
2914
3652
 
2915
3653
  const projectResponse = await axios.get(backendUrl, {
2916
3654
  headers: {
2917
- 'Authorization': `Bearer ${auth.jwt_token}`,
3655
+ 'Authorization': `Bearer ${auth.token}`,
2918
3656
  'Content-Type': 'application/json'
2919
3657
  }
2920
3658
  });
@@ -2986,7 +3724,7 @@ async function getApplicationUUID(projectName, auth) {
2986
3724
 
2987
3725
  const projectResponse = await axios.get(backendUrl, {
2988
3726
  headers: {
2989
- 'Authorization': `Bearer ${auth.jwt_token}`,
3727
+ 'Authorization': `Bearer ${auth.token}`,
2990
3728
  'Content-Type': 'application/json'
2991
3729
  }
2992
3730
  });
@@ -3007,7 +3745,7 @@ async function setHealthCheck(args) {
3007
3745
 
3008
3746
  // Ensure authentication
3009
3747
  let auth = await loadAuth();
3010
- if (!auth || !auth.jwt_token) {
3748
+ if (!auth || !auth.token) {
3011
3749
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3012
3750
  }
3013
3751
 
@@ -3072,7 +3810,7 @@ async function setDomain(args) {
3072
3810
  log.info(`MCP >>> [setDomain] Setting domain for ${project_name || 'project'}`);
3073
3811
 
3074
3812
  let auth = await loadAuth();
3075
- if (!auth || !auth.jwt_token) {
3813
+ if (!auth || !auth.token) {
3076
3814
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3077
3815
  }
3078
3816
 
@@ -3134,7 +3872,7 @@ async function setDeploymentCommands(args) {
3134
3872
  log.info(`MCP >>> [setDeploymentCommands] Setting deployment commands for ${project_name || 'project'}`);
3135
3873
 
3136
3874
  let auth = await loadAuth();
3137
- if (!auth || !auth.jwt_token) {
3875
+ if (!auth || !auth.token) {
3138
3876
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3139
3877
  }
3140
3878
 
@@ -3196,7 +3934,7 @@ async function manualDeploy(args) {
3196
3934
  log.info(`MCP >>> [manualDeploy] Triggering manual deployment for ${project_name || 'project'}`);
3197
3935
 
3198
3936
  let auth = await loadAuth();
3199
- if (!auth || !auth.jwt_token) {
3937
+ if (!auth || !auth.token) {
3200
3938
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3201
3939
  }
3202
3940
 
@@ -3264,7 +4002,7 @@ async function setOptions(args) {
3264
4002
  log.info(`MCP >>> [setOptions] Setting options for ${project_name || 'project'}`);
3265
4003
 
3266
4004
  let auth = await loadAuth();
3267
- if (!auth || !auth.jwt_token) {
4005
+ if (!auth || !auth.token) {
3268
4006
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3269
4007
  }
3270
4008
 
@@ -3326,7 +4064,7 @@ async function rollback(args) {
3326
4064
  log.info(`MCP >>> [rollback] Rolling back ${project_name || 'project'}`);
3327
4065
 
3328
4066
  let auth = await loadAuth();
3329
- if (!auth || !auth.jwt_token) {
4067
+ if (!auth || !auth.token) {
3330
4068
  throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
3331
4069
  }
3332
4070
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mlgym-deploy",
3
- "version": "3.3.34",
3
+ "version": "3.3.41",
4
4
  "description": "MCP server for MLGym - Complete deployment management: deploy, configure, monitor, and rollback applications",
5
5
  "main": "index.js",
6
6
  "type": "module",