mlgym-deploy 3.3.32 → 3.3.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +744 -45
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -18,7 +18,7 @@ import crypto from 'crypto';
|
|
|
18
18
|
const execAsync = promisify(exec);
|
|
19
19
|
|
|
20
20
|
// Current version of this MCP server - INCREMENT FOR WORKFLOW FIXES
|
|
21
|
-
const CURRENT_VERSION = '3.3.
|
|
21
|
+
const CURRENT_VERSION = '3.3.40'; // Fix auth.jwt_token -> auth.token consistency bug
|
|
22
22
|
const PACKAGE_NAME = 'mlgym-deploy';
|
|
23
23
|
|
|
24
24
|
// Debug logging configuration - ENABLED BY DEFAULT
|
|
@@ -205,33 +205,40 @@ async function generateSSHKeyPair(email) {
|
|
|
205
205
|
const sanitizedEmail = email.replace('@', '_at_').replace(/[^a-zA-Z0-9_-]/g, '_');
|
|
206
206
|
const keyPath = path.join(sshDir, `mlgym_${sanitizedEmail}`);
|
|
207
207
|
|
|
208
|
+
let keyExists = false;
|
|
209
|
+
let publicKey = '';
|
|
210
|
+
|
|
208
211
|
try {
|
|
209
212
|
await fs.access(keyPath);
|
|
210
213
|
console.error(`SSH key already exists at ${keyPath}, using existing key`);
|
|
211
|
-
|
|
212
|
-
|
|
214
|
+
publicKey = await fs.readFile(`${keyPath}.pub`, 'utf8');
|
|
215
|
+
keyExists = true;
|
|
213
216
|
} catch {
|
|
214
217
|
// Key doesn't exist, generate new one
|
|
218
|
+
console.error(`Generating new SSH key for ${email}...`);
|
|
215
219
|
}
|
|
216
220
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
+
if (!keyExists) {
|
|
222
|
+
const { stdout, stderr } = await execAsync(
|
|
223
|
+
`ssh-keygen -t ed25519 -f "${keyPath}" -N "" -C "${email}"`,
|
|
224
|
+
{ timeout: 10000 }
|
|
225
|
+
);
|
|
221
226
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
227
|
+
if (stderr && !stderr.includes('Generating public/private')) {
|
|
228
|
+
throw new Error(`SSH key generation failed: ${stderr}`);
|
|
229
|
+
}
|
|
225
230
|
|
|
226
|
-
|
|
227
|
-
|
|
231
|
+
await execAsync(`chmod 600 "${keyPath}"`);
|
|
232
|
+
await execAsync(`chmod 644 "${keyPath}.pub"`);
|
|
228
233
|
|
|
229
|
-
|
|
234
|
+
publicKey = await fs.readFile(`${keyPath}.pub`, 'utf8');
|
|
235
|
+
}
|
|
230
236
|
|
|
231
|
-
//
|
|
237
|
+
// FIX: ALWAYS update SSH config to point to THIS user's key
|
|
238
|
+
// This prevents the bug where User A's key remains in config after User B authenticates
|
|
232
239
|
const configPath = path.join(sshDir, 'config');
|
|
233
240
|
const configEntry = `
|
|
234
|
-
# MLGym GitLab (added by mlgym-deploy)
|
|
241
|
+
# MLGym GitLab (added by mlgym-deploy for ${email})
|
|
235
242
|
Host git.mlgym.io
|
|
236
243
|
User git
|
|
237
244
|
Port 22
|
|
@@ -241,18 +248,16 @@ Host git.mlgym.io
|
|
|
241
248
|
|
|
242
249
|
try {
|
|
243
250
|
const existingConfig = await fs.readFile(configPath, 'utf8');
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
} else {
|
|
252
|
-
await fs.appendFile(configPath, configEntry);
|
|
253
|
-
}
|
|
251
|
+
// Always remove old MLGym config and add new one for current user
|
|
252
|
+
const updatedConfig = existingConfig.replace(
|
|
253
|
+
/\n?# MLGym GitLab[^\n]*\nHost git\.mlgym\.io\n(?:[ \t]+[^\n]+\n)*/g,
|
|
254
|
+
''
|
|
255
|
+
).trim();
|
|
256
|
+
await fs.writeFile(configPath, updatedConfig + '\n' + configEntry, { mode: 0o600 });
|
|
257
|
+
console.error(`SSH config updated to use key for ${email}`);
|
|
254
258
|
} catch {
|
|
255
259
|
await fs.writeFile(configPath, configEntry, { mode: 0o600 });
|
|
260
|
+
console.error(`SSH config created for ${email}`);
|
|
256
261
|
}
|
|
257
262
|
|
|
258
263
|
return { publicKey: publicKey.trim(), privateKeyPath: keyPath };
|
|
@@ -525,6 +530,11 @@ async function analyzeProject(local_path = '.') {
|
|
|
525
530
|
analysis.framework = 'nextjs';
|
|
526
531
|
analysis.build_command = packageJson.scripts?.build || 'npm run build';
|
|
527
532
|
analysis.start_command = packageJson.scripts?.start || 'npm start';
|
|
533
|
+
} else if (deps['@nestjs/core'] || deps['@nestjs/common']) {
|
|
534
|
+
// NestJS - TypeScript backend framework requiring build step
|
|
535
|
+
analysis.framework = 'nestjs';
|
|
536
|
+
analysis.build_command = packageJson.scripts?.build || 'npm run build';
|
|
537
|
+
analysis.start_command = packageJson.scripts?.['start:prod'] || 'node dist/main.js';
|
|
528
538
|
} else if (deps.express) {
|
|
529
539
|
analysis.framework = 'express';
|
|
530
540
|
analysis.start_command = packageJson.scripts?.start || 'node index.js';
|
|
@@ -608,6 +618,117 @@ async function analyzeProject(local_path = '.') {
|
|
|
608
618
|
} catch {}
|
|
609
619
|
}
|
|
610
620
|
|
|
621
|
+
// Check for Ruby project (Gemfile)
|
|
622
|
+
if (analysis.project_type === 'unknown') {
|
|
623
|
+
try {
|
|
624
|
+
await fs.access(path.join(absolutePath, 'Gemfile'));
|
|
625
|
+
analysis.project_type = 'ruby';
|
|
626
|
+
analysis.detected_files.push('Gemfile');
|
|
627
|
+
|
|
628
|
+
// Try to detect Rails vs Sinatra vs generic Ruby
|
|
629
|
+
try {
|
|
630
|
+
const gemfileContent = await fs.readFile(path.join(absolutePath, 'Gemfile'), 'utf8');
|
|
631
|
+
if (gemfileContent.includes("'rails'") || gemfileContent.includes('"rails"')) {
|
|
632
|
+
analysis.framework = 'rails';
|
|
633
|
+
analysis.start_command = 'rails server -b 0.0.0.0';
|
|
634
|
+
} else if (gemfileContent.includes("'sinatra'") || gemfileContent.includes('"sinatra"')) {
|
|
635
|
+
analysis.framework = 'sinatra';
|
|
636
|
+
analysis.start_command = 'ruby app.rb';
|
|
637
|
+
} else {
|
|
638
|
+
analysis.framework = 'ruby';
|
|
639
|
+
analysis.start_command = 'ruby app.rb';
|
|
640
|
+
}
|
|
641
|
+
} catch {
|
|
642
|
+
analysis.framework = 'ruby';
|
|
643
|
+
}
|
|
644
|
+
} catch {}
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
// Check for Rust project (Cargo.toml)
|
|
648
|
+
if (analysis.project_type === 'unknown') {
|
|
649
|
+
try {
|
|
650
|
+
await fs.access(path.join(absolutePath, 'Cargo.toml'));
|
|
651
|
+
analysis.project_type = 'rust';
|
|
652
|
+
analysis.detected_files.push('Cargo.toml');
|
|
653
|
+
analysis.framework = 'rust';
|
|
654
|
+
analysis.build_command = 'cargo build --release';
|
|
655
|
+
analysis.start_command = './target/release/app';
|
|
656
|
+
|
|
657
|
+
// Try to get binary name from Cargo.toml
|
|
658
|
+
try {
|
|
659
|
+
const cargoContent = await fs.readFile(path.join(absolutePath, 'Cargo.toml'), 'utf8');
|
|
660
|
+
const nameMatch = cargoContent.match(/name\s*=\s*["']([^"']+)["']/);
|
|
661
|
+
if (nameMatch) {
|
|
662
|
+
analysis.binary_name = nameMatch[1];
|
|
663
|
+
analysis.start_command = `./target/release/${nameMatch[1]}`;
|
|
664
|
+
}
|
|
665
|
+
} catch {}
|
|
666
|
+
} catch {}
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
// Check for Java project (pom.xml for Maven)
|
|
670
|
+
if (analysis.project_type === 'unknown') {
|
|
671
|
+
try {
|
|
672
|
+
await fs.access(path.join(absolutePath, 'pom.xml'));
|
|
673
|
+
analysis.project_type = 'java';
|
|
674
|
+
analysis.detected_files.push('pom.xml');
|
|
675
|
+
analysis.framework = 'maven';
|
|
676
|
+
analysis.build_command = 'mvn clean package -DskipTests';
|
|
677
|
+
analysis.start_command = 'java -jar target/app.jar';
|
|
678
|
+
} catch {}
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
// Check for Java project (build.gradle for Gradle)
|
|
682
|
+
if (analysis.project_type === 'unknown') {
|
|
683
|
+
try {
|
|
684
|
+
await fs.access(path.join(absolutePath, 'build.gradle'));
|
|
685
|
+
analysis.project_type = 'java';
|
|
686
|
+
analysis.detected_files.push('build.gradle');
|
|
687
|
+
analysis.framework = 'gradle';
|
|
688
|
+
analysis.build_command = './gradlew build -x test';
|
|
689
|
+
analysis.start_command = 'java -jar build/libs/app.jar';
|
|
690
|
+
} catch {}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
// Check for .NET project (*.csproj)
|
|
694
|
+
if (analysis.project_type === 'unknown') {
|
|
695
|
+
try {
|
|
696
|
+
const files = await fs.readdir(absolutePath);
|
|
697
|
+
const csprojFile = files.find(f => f.endsWith('.csproj'));
|
|
698
|
+
if (csprojFile) {
|
|
699
|
+
analysis.project_type = 'dotnet';
|
|
700
|
+
analysis.detected_files.push(csprojFile);
|
|
701
|
+
analysis.framework = 'aspnet';
|
|
702
|
+
analysis.build_command = 'dotnet publish -c Release -o out';
|
|
703
|
+
analysis.start_command = 'dotnet out/app.dll';
|
|
704
|
+
analysis.csproj_file = csprojFile;
|
|
705
|
+
}
|
|
706
|
+
} catch {}
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
// Check for Elixir project (mix.exs)
|
|
710
|
+
if (analysis.project_type === 'unknown') {
|
|
711
|
+
try {
|
|
712
|
+
await fs.access(path.join(absolutePath, 'mix.exs'));
|
|
713
|
+
analysis.project_type = 'elixir';
|
|
714
|
+
analysis.detected_files.push('mix.exs');
|
|
715
|
+
|
|
716
|
+
// Check for Phoenix framework
|
|
717
|
+
try {
|
|
718
|
+
const mixContent = await fs.readFile(path.join(absolutePath, 'mix.exs'), 'utf8');
|
|
719
|
+
if (mixContent.includes(':phoenix')) {
|
|
720
|
+
analysis.framework = 'phoenix';
|
|
721
|
+
analysis.start_command = 'mix phx.server';
|
|
722
|
+
} else {
|
|
723
|
+
analysis.framework = 'elixir';
|
|
724
|
+
analysis.start_command = 'mix run --no-halt';
|
|
725
|
+
}
|
|
726
|
+
} catch {
|
|
727
|
+
analysis.framework = 'elixir';
|
|
728
|
+
}
|
|
729
|
+
} catch {}
|
|
730
|
+
}
|
|
731
|
+
|
|
611
732
|
// Check for Scala/sbt project
|
|
612
733
|
if (analysis.project_type === 'unknown') {
|
|
613
734
|
try {
|
|
@@ -701,6 +822,24 @@ COPY --from=builder /app/package.json ./
|
|
|
701
822
|
COPY --from=builder /app/public ./public
|
|
702
823
|
EXPOSE 3000
|
|
703
824
|
CMD ["${packageManager}", "start"]`;
|
|
825
|
+
} else if (framework === 'nestjs') {
|
|
826
|
+
// NestJS - TypeScript backend framework requiring build step
|
|
827
|
+
dockerfile = `# Build stage
|
|
828
|
+
FROM node:18-alpine AS builder
|
|
829
|
+
WORKDIR /app
|
|
830
|
+
COPY package*.json ./
|
|
831
|
+
RUN ${packageManager} ${packageManager === 'npm' ? 'ci' : 'install --frozen-lockfile'}
|
|
832
|
+
COPY . .
|
|
833
|
+
RUN ${packageManager} run build
|
|
834
|
+
|
|
835
|
+
# Production stage
|
|
836
|
+
FROM node:18-alpine
|
|
837
|
+
WORKDIR /app
|
|
838
|
+
COPY --from=builder /app/dist ./dist
|
|
839
|
+
COPY --from=builder /app/node_modules ./node_modules
|
|
840
|
+
COPY --from=builder /app/package.json ./
|
|
841
|
+
EXPOSE 3000
|
|
842
|
+
CMD ["node", "dist/main.js"]`;
|
|
704
843
|
} else if (framework === 'express') {
|
|
705
844
|
dockerfile = `FROM node:18-alpine
|
|
706
845
|
WORKDIR /app
|
|
@@ -761,33 +900,222 @@ EXPOSE 8000
|
|
|
761
900
|
CMD ["python", "main.py"]`;
|
|
762
901
|
}
|
|
763
902
|
} else if (projectType === 'php') {
|
|
764
|
-
|
|
903
|
+
if (framework === 'composer' || framework === 'laravel') {
|
|
904
|
+
// Laravel/Composer PHP project
|
|
905
|
+
dockerfile = `FROM php:8.2-apache
|
|
906
|
+
|
|
907
|
+
# Install PHP extensions and Composer
|
|
908
|
+
RUN apt-get update && apt-get install -y \\
|
|
909
|
+
libpng-dev libjpeg-dev libfreetype6-dev libzip-dev unzip git \\
|
|
910
|
+
&& docker-php-ext-configure gd --with-freetype --with-jpeg \\
|
|
911
|
+
&& docker-php-ext-install gd pdo pdo_mysql zip \\
|
|
912
|
+
&& curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer \\
|
|
913
|
+
&& a2enmod rewrite \\
|
|
914
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
915
|
+
|
|
916
|
+
# Set document root to Laravel's public folder
|
|
917
|
+
ENV APACHE_DOCUMENT_ROOT /var/www/html/public
|
|
918
|
+
RUN sed -ri -e 's!/var/www/html!\${APACHE_DOCUMENT_ROOT}!g' /etc/apache2/sites-available/*.conf
|
|
919
|
+
RUN sed -ri -e 's!/var/www/!\${APACHE_DOCUMENT_ROOT}!g' /etc/apache2/apache2.conf /etc/apache2/conf-available/*.conf
|
|
920
|
+
|
|
921
|
+
WORKDIR /var/www/html
|
|
922
|
+
COPY . .
|
|
923
|
+
|
|
924
|
+
# Install dependencies and set permissions
|
|
925
|
+
RUN composer install --no-dev --optimize-autoloader --no-interaction \\
|
|
926
|
+
&& chown -R www-data:www-data /var/www/html \\
|
|
927
|
+
&& chmod -R 755 /var/www/html/storage /var/www/html/bootstrap/cache 2>/dev/null || true
|
|
928
|
+
|
|
929
|
+
EXPOSE 80
|
|
930
|
+
CMD ["apache2-foreground"]`;
|
|
931
|
+
} else {
|
|
932
|
+
// Simple PHP project
|
|
933
|
+
dockerfile = `FROM php:8.2-apache
|
|
765
934
|
WORKDIR /var/www/html
|
|
766
935
|
COPY . .
|
|
767
936
|
RUN chown -R www-data:www-data /var/www/html
|
|
768
937
|
EXPOSE 80
|
|
769
938
|
CMD ["apache2-foreground"]`;
|
|
939
|
+
}
|
|
770
940
|
} else if (projectType === 'static') {
|
|
771
941
|
dockerfile = `FROM nginx:alpine
|
|
772
942
|
COPY . /usr/share/nginx/html
|
|
773
943
|
EXPOSE 80
|
|
774
944
|
CMD ["nginx", "-g", "daemon off;"]`;
|
|
775
945
|
} else if (projectType === 'go') {
|
|
946
|
+
// Go project with CGO disabled for static binary
|
|
776
947
|
dockerfile = `# Build stage
|
|
777
948
|
FROM golang:1.21-alpine AS builder
|
|
778
949
|
WORKDIR /app
|
|
779
|
-
COPY go.mod go.sum ./
|
|
780
|
-
RUN go mod download
|
|
950
|
+
COPY go.mod go.sum* ./
|
|
951
|
+
RUN go mod download 2>/dev/null || true
|
|
781
952
|
COPY . .
|
|
782
|
-
RUN go build -o app
|
|
953
|
+
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o app .
|
|
783
954
|
|
|
784
955
|
# Production stage
|
|
785
956
|
FROM alpine:latest
|
|
786
957
|
RUN apk --no-cache add ca-certificates
|
|
787
|
-
WORKDIR /
|
|
958
|
+
WORKDIR /app
|
|
788
959
|
COPY --from=builder /app/app .
|
|
789
960
|
EXPOSE 8080
|
|
790
961
|
CMD ["./app"]`;
|
|
962
|
+
} else if (projectType === 'ruby') {
|
|
963
|
+
if (framework === 'rails') {
|
|
964
|
+
// Ruby on Rails
|
|
965
|
+
dockerfile = `FROM ruby:3.2-slim
|
|
966
|
+
|
|
967
|
+
# Install dependencies
|
|
968
|
+
RUN apt-get update && apt-get install -y \\
|
|
969
|
+
build-essential libpq-dev nodejs npm \\
|
|
970
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
971
|
+
|
|
972
|
+
WORKDIR /app
|
|
973
|
+
COPY Gemfile Gemfile.lock* ./
|
|
974
|
+
RUN bundle install --without development test
|
|
975
|
+
|
|
976
|
+
COPY . .
|
|
977
|
+
RUN bundle exec rake assets:precompile 2>/dev/null || true
|
|
978
|
+
|
|
979
|
+
EXPOSE 3000
|
|
980
|
+
CMD ["rails", "server", "-b", "0.0.0.0"]`;
|
|
981
|
+
} else {
|
|
982
|
+
// Sinatra or generic Ruby
|
|
983
|
+
dockerfile = `FROM ruby:3.2-slim
|
|
984
|
+
|
|
985
|
+
RUN apt-get update && apt-get install -y build-essential && rm -rf /var/lib/apt/lists/*
|
|
986
|
+
|
|
987
|
+
WORKDIR /app
|
|
988
|
+
COPY Gemfile Gemfile.lock* ./
|
|
989
|
+
RUN bundle install
|
|
990
|
+
|
|
991
|
+
COPY . .
|
|
992
|
+
EXPOSE 4567
|
|
993
|
+
CMD ["ruby", "app.rb"]`;
|
|
994
|
+
}
|
|
995
|
+
} else if (projectType === 'rust') {
|
|
996
|
+
dockerfile = `# Build stage
|
|
997
|
+
FROM rust:1.75-slim AS builder
|
|
998
|
+
WORKDIR /app
|
|
999
|
+
COPY Cargo.toml Cargo.lock* ./
|
|
1000
|
+
COPY src ./src
|
|
1001
|
+
RUN cargo build --release
|
|
1002
|
+
|
|
1003
|
+
# Production stage
|
|
1004
|
+
FROM debian:bookworm-slim
|
|
1005
|
+
RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
|
|
1006
|
+
WORKDIR /app
|
|
1007
|
+
COPY --from=builder /app/target/release/* ./
|
|
1008
|
+
EXPOSE 8080
|
|
1009
|
+
CMD ["./app"]`;
|
|
1010
|
+
} else if (projectType === 'java') {
|
|
1011
|
+
if (framework === 'maven') {
|
|
1012
|
+
dockerfile = `# Build stage
|
|
1013
|
+
FROM maven:3.9-eclipse-temurin-17 AS builder
|
|
1014
|
+
WORKDIR /app
|
|
1015
|
+
COPY pom.xml .
|
|
1016
|
+
RUN mvn dependency:go-offline -B
|
|
1017
|
+
COPY src ./src
|
|
1018
|
+
RUN mvn clean package -DskipTests -B
|
|
1019
|
+
|
|
1020
|
+
# Production stage
|
|
1021
|
+
FROM eclipse-temurin:17-jre-jammy
|
|
1022
|
+
WORKDIR /app
|
|
1023
|
+
COPY --from=builder /app/target/*.jar app.jar
|
|
1024
|
+
EXPOSE 8080
|
|
1025
|
+
CMD ["java", "-jar", "app.jar"]`;
|
|
1026
|
+
} else if (framework === 'gradle') {
|
|
1027
|
+
dockerfile = `# Build stage
|
|
1028
|
+
FROM gradle:8-jdk17 AS builder
|
|
1029
|
+
WORKDIR /app
|
|
1030
|
+
COPY build.gradle settings.gradle* gradle* ./
|
|
1031
|
+
COPY gradle ./gradle 2>/dev/null || true
|
|
1032
|
+
COPY src ./src
|
|
1033
|
+
RUN gradle build -x test --no-daemon
|
|
1034
|
+
|
|
1035
|
+
# Production stage
|
|
1036
|
+
FROM eclipse-temurin:17-jre-jammy
|
|
1037
|
+
WORKDIR /app
|
|
1038
|
+
COPY --from=builder /app/build/libs/*.jar app.jar
|
|
1039
|
+
EXPOSE 8080
|
|
1040
|
+
CMD ["java", "-jar", "app.jar"]`;
|
|
1041
|
+
} else {
|
|
1042
|
+
// Generic Java with Maven fallback
|
|
1043
|
+
dockerfile = `# Build stage
|
|
1044
|
+
FROM maven:3.9-eclipse-temurin-17 AS builder
|
|
1045
|
+
WORKDIR /app
|
|
1046
|
+
COPY pom.xml .
|
|
1047
|
+
COPY src ./src
|
|
1048
|
+
RUN mvn clean package -DskipTests -B
|
|
1049
|
+
|
|
1050
|
+
# Production stage
|
|
1051
|
+
FROM eclipse-temurin:17-jre-jammy
|
|
1052
|
+
WORKDIR /app
|
|
1053
|
+
COPY --from=builder /app/target/*.jar app.jar
|
|
1054
|
+
EXPOSE 8080
|
|
1055
|
+
CMD ["java", "-jar", "app.jar"]`;
|
|
1056
|
+
}
|
|
1057
|
+
} else if (projectType === 'dotnet') {
|
|
1058
|
+
dockerfile = `# Build stage
|
|
1059
|
+
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS builder
|
|
1060
|
+
WORKDIR /app
|
|
1061
|
+
COPY *.csproj ./
|
|
1062
|
+
RUN dotnet restore
|
|
1063
|
+
COPY . .
|
|
1064
|
+
RUN dotnet publish -c Release -o out
|
|
1065
|
+
|
|
1066
|
+
# Production stage
|
|
1067
|
+
FROM mcr.microsoft.com/dotnet/aspnet:8.0
|
|
1068
|
+
WORKDIR /app
|
|
1069
|
+
COPY --from=builder /app/out .
|
|
1070
|
+
EXPOSE 8080
|
|
1071
|
+
ENV ASPNETCORE_URLS=http://+:8080
|
|
1072
|
+
ENTRYPOINT ["dotnet", "app.dll"]`;
|
|
1073
|
+
} else if (projectType === 'elixir') {
|
|
1074
|
+
if (framework === 'phoenix') {
|
|
1075
|
+
dockerfile = `# Build stage
|
|
1076
|
+
FROM elixir:1.16-alpine AS builder
|
|
1077
|
+
RUN apk add --no-cache build-base git nodejs npm
|
|
1078
|
+
WORKDIR /app
|
|
1079
|
+
|
|
1080
|
+
# Install hex and rebar
|
|
1081
|
+
RUN mix local.hex --force && mix local.rebar --force
|
|
1082
|
+
|
|
1083
|
+
# Install dependencies
|
|
1084
|
+
COPY mix.exs mix.lock ./
|
|
1085
|
+
RUN mix deps.get --only prod
|
|
1086
|
+
RUN MIX_ENV=prod mix deps.compile
|
|
1087
|
+
|
|
1088
|
+
# Build assets and release
|
|
1089
|
+
COPY . .
|
|
1090
|
+
RUN cd assets && npm install && npm run deploy 2>/dev/null || true
|
|
1091
|
+
RUN MIX_ENV=prod mix phx.digest 2>/dev/null || true
|
|
1092
|
+
RUN MIX_ENV=prod mix release
|
|
1093
|
+
|
|
1094
|
+
# Production stage
|
|
1095
|
+
FROM alpine:3.19
|
|
1096
|
+
RUN apk add --no-cache libstdc++ openssl ncurses-libs
|
|
1097
|
+
WORKDIR /app
|
|
1098
|
+
COPY --from=builder /app/_build/prod/rel/app ./
|
|
1099
|
+
EXPOSE 4000
|
|
1100
|
+
ENV PHX_HOST=localhost
|
|
1101
|
+
CMD ["bin/app", "start"]`;
|
|
1102
|
+
} else {
|
|
1103
|
+
dockerfile = `FROM elixir:1.16-alpine
|
|
1104
|
+
RUN apk add --no-cache build-base
|
|
1105
|
+
WORKDIR /app
|
|
1106
|
+
|
|
1107
|
+
RUN mix local.hex --force && mix local.rebar --force
|
|
1108
|
+
|
|
1109
|
+
COPY mix.exs mix.lock ./
|
|
1110
|
+
RUN mix deps.get
|
|
1111
|
+
RUN mix deps.compile
|
|
1112
|
+
|
|
1113
|
+
COPY . .
|
|
1114
|
+
RUN mix compile
|
|
1115
|
+
|
|
1116
|
+
EXPOSE 4000
|
|
1117
|
+
CMD ["mix", "run", "--no-halt"]`;
|
|
1118
|
+
}
|
|
791
1119
|
} else if (projectType === 'scala') {
|
|
792
1120
|
dockerfile = `# Build stage
|
|
793
1121
|
FROM sbtscala/scala-sbt:eclipse-temurin-jammy-17.0.10_7_1.10.2_2.13.15 AS builder
|
|
@@ -1379,6 +1707,248 @@ function randomizeVolumeNames(content, suffix) {
|
|
|
1379
1707
|
return { content: result, randomized };
|
|
1380
1708
|
}
|
|
1381
1709
|
|
|
1710
|
+
// Known valid Docker image version ranges
|
|
1711
|
+
const VALID_IMAGE_VERSIONS = {
|
|
1712
|
+
php: { min: 5, max: 8, latest: '8.3', variants: ['apache', 'fpm', 'cli', 'alpine', 'slim'] },
|
|
1713
|
+
node: { min: 10, max: 22, latest: '20', variants: ['alpine', 'slim', 'bullseye', 'bookworm'] },
|
|
1714
|
+
python: { min: 2, max: 3, latest: '3.12', subVersionMax: { 2: 7, 3: 12 } },
|
|
1715
|
+
ruby: { min: 2, max: 3, latest: '3.3', subVersionMax: { 2: 7, 3: 3 } },
|
|
1716
|
+
golang: { min: 1, max: 1, latest: '1.22', subVersionMax: { 1: 22 } },
|
|
1717
|
+
go: { min: 1, max: 1, latest: '1.22', subVersionMax: { 1: 22 } },
|
|
1718
|
+
java: { min: 8, max: 21, latest: '21' },
|
|
1719
|
+
openjdk: { min: 8, max: 21, latest: '21' },
|
|
1720
|
+
rust: { min: 1, max: 1, latest: '1.75', subVersionMax: { 1: 75 } },
|
|
1721
|
+
perl: { min: 5, max: 5, latest: '5.38', subVersionMax: { 5: 38 } },
|
|
1722
|
+
elixir: { min: 1, max: 1, latest: '1.16', subVersionMax: { 1: 16 } },
|
|
1723
|
+
};
|
|
1724
|
+
|
|
1725
|
+
// Validate Docker base image version
|
|
1726
|
+
function validateBaseImage(baseImage) {
|
|
1727
|
+
if (!baseImage) return null;
|
|
1728
|
+
|
|
1729
|
+
// Extract image name and tag
|
|
1730
|
+
const parts = baseImage.split(':');
|
|
1731
|
+
const imageName = parts[0].split('/').pop(); // Handle registry/image:tag format
|
|
1732
|
+
const tag = parts[1] || 'latest';
|
|
1733
|
+
|
|
1734
|
+
// Check if we know this image type
|
|
1735
|
+
const imageConfig = VALID_IMAGE_VERSIONS[imageName];
|
|
1736
|
+
if (!imageConfig) return null; // Unknown image, can't validate
|
|
1737
|
+
|
|
1738
|
+
// Extract version number and variant from tag (e.g., "99-apache" -> version: 99, variant: "-apache")
|
|
1739
|
+
const versionMatch = tag.match(/^(\d+)(?:\.(\d+))?(.*)$/);
|
|
1740
|
+
if (!versionMatch) return null; // Can't parse version (e.g., 'latest', 'alpine')
|
|
1741
|
+
|
|
1742
|
+
const majorVersion = parseInt(versionMatch[1], 10);
|
|
1743
|
+
const minorVersion = versionMatch[2] ? parseInt(versionMatch[2], 10) : null;
|
|
1744
|
+
const variant = versionMatch[3] || ''; // Preserve variant like "-apache", "-alpine", "-slim"
|
|
1745
|
+
|
|
1746
|
+
// Build the suggested fix preserving the variant
|
|
1747
|
+
const suggestedTag = `${imageConfig.latest}${variant}`;
|
|
1748
|
+
|
|
1749
|
+
// Check if major version is in valid range
|
|
1750
|
+
if (majorVersion < imageConfig.min || majorVersion > imageConfig.max) {
|
|
1751
|
+
return {
|
|
1752
|
+
invalid: true,
|
|
1753
|
+
reason: `${imageName}:${tag} has invalid version ${majorVersion} (valid: ${imageConfig.min}-${imageConfig.max})`,
|
|
1754
|
+
suggestedFix: `${imageName}:${suggestedTag}`,
|
|
1755
|
+
autofix: 'fix_base_image'
|
|
1756
|
+
};
|
|
1757
|
+
}
|
|
1758
|
+
|
|
1759
|
+
// Check minor version if we have subversion limits
|
|
1760
|
+
if (minorVersion !== null && imageConfig.subVersionMax && imageConfig.subVersionMax[majorVersion]) {
|
|
1761
|
+
if (minorVersion > imageConfig.subVersionMax[majorVersion]) {
|
|
1762
|
+
return {
|
|
1763
|
+
invalid: true,
|
|
1764
|
+
reason: `${imageName}:${tag} has invalid minor version (${majorVersion}.${minorVersion} doesn't exist)`,
|
|
1765
|
+
suggestedFix: `${imageName}:${suggestedTag}`,
|
|
1766
|
+
autofix: 'fix_base_image'
|
|
1767
|
+
};
|
|
1768
|
+
}
|
|
1769
|
+
}
|
|
1770
|
+
|
|
1771
|
+
return null; // Valid
|
|
1772
|
+
}
|
|
1773
|
+
|
|
1774
|
+
/**
|
|
1775
|
+
* Native file scanner - replaces glob dependency
|
|
1776
|
+
* @param {string} baseDir - Base directory to scan
|
|
1777
|
+
* @param {string[]} extensions - File extensions to match (e.g., ['.py', '.js'])
|
|
1778
|
+
* @param {string[]} ignoreDirs - Directory names to ignore (e.g., ['node_modules', 'venv'])
|
|
1779
|
+
* @returns {string[]} Array of relative file paths
|
|
1780
|
+
*/
|
|
1781
|
+
async function scanFilesNative(baseDir, extensions, ignoreDirs = []) {
|
|
1782
|
+
const results = [];
|
|
1783
|
+
const ignoreSet = new Set(ignoreDirs);
|
|
1784
|
+
|
|
1785
|
+
async function scanDir(dir, relativePath = '') {
|
|
1786
|
+
try {
|
|
1787
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
1788
|
+
for (const entry of entries) {
|
|
1789
|
+
const fullPath = path.join(dir, entry.name);
|
|
1790
|
+
const relPath = relativePath ? path.join(relativePath, entry.name) : entry.name;
|
|
1791
|
+
|
|
1792
|
+
if (entry.isDirectory()) {
|
|
1793
|
+
// Skip ignored directories
|
|
1794
|
+
if (!ignoreSet.has(entry.name) && !entry.name.startsWith('.')) {
|
|
1795
|
+
await scanDir(fullPath, relPath);
|
|
1796
|
+
}
|
|
1797
|
+
} else if (entry.isFile()) {
|
|
1798
|
+
const ext = path.extname(entry.name).toLowerCase();
|
|
1799
|
+
if (extensions.includes(ext)) {
|
|
1800
|
+
results.push(relPath);
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
}
|
|
1804
|
+
} catch (err) {
|
|
1805
|
+
// Ignore permission errors and other issues
|
|
1806
|
+
}
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
await scanDir(baseDir);
|
|
1810
|
+
return results;
|
|
1811
|
+
}
|
|
1812
|
+
|
|
1813
|
+
/**
|
|
1814
|
+
* Detect port mismatch between application code and Dockerfile EXPOSE
|
|
1815
|
+
* Returns null if no mismatch, or object with details if mismatch found
|
|
1816
|
+
*/
|
|
1817
|
+
async function detectPortMismatch(projectPath, exposedPort) {
|
|
1818
|
+
if (!exposedPort) return null;
|
|
1819
|
+
|
|
1820
|
+
const exposedPortNum = parseInt(exposedPort, 10);
|
|
1821
|
+
const appPorts = [];
|
|
1822
|
+
|
|
1823
|
+
try {
|
|
1824
|
+
// Check Python files for Flask/FastAPI port
|
|
1825
|
+
const pythonFiles = await scanFilesNative(projectPath, ['.py'], ['venv', '__pycache__', '.venv']);
|
|
1826
|
+
for (const file of pythonFiles.slice(0, 10)) {
|
|
1827
|
+
const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
|
|
1828
|
+
// Flask: app.run(port=5000), app.run(host='0.0.0.0', port=5000)
|
|
1829
|
+
const flaskMatch = content.match(/\.run\s*\([^)]*port\s*=\s*(\d+)/);
|
|
1830
|
+
if (flaskMatch) appPorts.push({ port: parseInt(flaskMatch[1], 10), file, type: 'Flask' });
|
|
1831
|
+
// FastAPI/Uvicorn: uvicorn.run(..., port=8000)
|
|
1832
|
+
const uvicornMatch = content.match(/uvicorn\.run\s*\([^)]*port\s*=\s*(\d+)/);
|
|
1833
|
+
if (uvicornMatch) appPorts.push({ port: parseInt(uvicornMatch[1], 10), file, type: 'Uvicorn' });
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1836
|
+
// Check Node.js files for Express/HTTP port
|
|
1837
|
+
const jsFiles = await scanFilesNative(projectPath, ['.js', '.ts'], ['node_modules', 'dist', 'build']);
|
|
1838
|
+
for (const file of jsFiles.slice(0, 10)) {
|
|
1839
|
+
const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
|
|
1840
|
+
// Express: app.listen(3000), server.listen(3000)
|
|
1841
|
+
const listenMatch = content.match(/\.listen\s*\(\s*(\d+)/);
|
|
1842
|
+
if (listenMatch) appPorts.push({ port: parseInt(listenMatch[1], 10), file, type: 'Node.js' });
|
|
1843
|
+
// PORT env: process.env.PORT || 3000
|
|
1844
|
+
const portEnvMatch = content.match(/process\.env\.PORT\s*\|\|\s*(\d+)/);
|
|
1845
|
+
if (portEnvMatch) appPorts.push({ port: parseInt(portEnvMatch[1], 10), file, type: 'Node.js (env fallback)' });
|
|
1846
|
+
}
|
|
1847
|
+
|
|
1848
|
+
// Check Ruby files for Sinatra/Rails port
|
|
1849
|
+
const rubyFiles = await scanFilesNative(projectPath, ['.rb'], ['vendor', 'bundle']);
|
|
1850
|
+
for (const file of rubyFiles.slice(0, 10)) {
|
|
1851
|
+
const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
|
|
1852
|
+
// Sinatra: set :port, 4567
|
|
1853
|
+
const sinatraMatch = content.match(/set\s+:port\s*,\s*(\d+)/);
|
|
1854
|
+
if (sinatraMatch) appPorts.push({ port: parseInt(sinatraMatch[1], 10), file, type: 'Sinatra' });
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
// Check Go files for HTTP port
|
|
1858
|
+
const goFiles = await scanFilesNative(projectPath, ['.go'], ['vendor']);
|
|
1859
|
+
for (const file of goFiles.slice(0, 10)) {
|
|
1860
|
+
const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
|
|
1861
|
+
// Go: http.ListenAndServe(":8080", ...)
|
|
1862
|
+
const goMatch = content.match(/ListenAndServe\s*\(\s*["':](\d+)/);
|
|
1863
|
+
if (goMatch) appPorts.push({ port: parseInt(goMatch[1], 10), file, type: 'Go HTTP' });
|
|
1864
|
+
}
|
|
1865
|
+
|
|
1866
|
+
// Find mismatches
|
|
1867
|
+
const mismatches = appPorts.filter(p => p.port !== exposedPortNum);
|
|
1868
|
+
if (mismatches.length > 0) {
|
|
1869
|
+
const mismatch = mismatches[0];
|
|
1870
|
+
return {
|
|
1871
|
+
mismatch: true,
|
|
1872
|
+
exposedPort: exposedPortNum,
|
|
1873
|
+
appPort: mismatch.port,
|
|
1874
|
+
file: mismatch.file,
|
|
1875
|
+
type: mismatch.type,
|
|
1876
|
+
suggestedFix: mismatch.port,
|
|
1877
|
+
autofix: 'fix_expose_port'
|
|
1878
|
+
};
|
|
1879
|
+
}
|
|
1880
|
+
} catch (err) {
|
|
1881
|
+
log.warning(`MCP >>> Port detection failed: ${err.message}`);
|
|
1882
|
+
}
|
|
1883
|
+
|
|
1884
|
+
return null;
|
|
1885
|
+
}
|
|
1886
|
+
|
|
1887
|
+
/**
|
|
1888
|
+
* Detect missing dependencies in Node.js projects
|
|
1889
|
+
* Compares require/import statements with package.json dependencies
|
|
1890
|
+
*/
|
|
1891
|
+
async function detectMissingNodeDependencies(projectPath) {
|
|
1892
|
+
try {
|
|
1893
|
+
const packageJsonPath = path.join(projectPath, 'package.json');
|
|
1894
|
+
const packageJsonContent = await fs.readFile(packageJsonPath, 'utf-8').catch(() => null);
|
|
1895
|
+
if (!packageJsonContent) return null;
|
|
1896
|
+
|
|
1897
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
1898
|
+
const declaredDeps = new Set([
|
|
1899
|
+
...Object.keys(packageJson.dependencies || {}),
|
|
1900
|
+
...Object.keys(packageJson.devDependencies || {})
|
|
1901
|
+
]);
|
|
1902
|
+
|
|
1903
|
+
// Scan JS/TS files for imports
|
|
1904
|
+
const usedDeps = new Set();
|
|
1905
|
+
const jsFiles = await scanFilesNative(projectPath, ['.js', '.ts', '.jsx', '.tsx'], ['node_modules', 'dist', 'build']);
|
|
1906
|
+
|
|
1907
|
+
for (const file of jsFiles.slice(0, 20)) {
|
|
1908
|
+
const content = await fs.readFile(path.join(projectPath, file), 'utf-8').catch(() => '');
|
|
1909
|
+
|
|
1910
|
+
// CommonJS: require('package')
|
|
1911
|
+
const requireMatches = content.matchAll(/require\s*\(\s*['"]([^'"./][^'"]*)['"]\s*\)/g);
|
|
1912
|
+
for (const match of requireMatches) {
|
|
1913
|
+
const pkg = match[1].split('/')[0]; // Handle scoped packages and subpaths
|
|
1914
|
+
if (!pkg.startsWith('@')) usedDeps.add(pkg);
|
|
1915
|
+
else usedDeps.add(match[1].split('/').slice(0, 2).join('/')); // @scope/package
|
|
1916
|
+
}
|
|
1917
|
+
|
|
1918
|
+
// ES6: import ... from 'package'
|
|
1919
|
+
const importMatches = content.matchAll(/import\s+(?:[^'"]+\s+from\s+)?['"]([^'"./][^'"]*)['"]/g);
|
|
1920
|
+
for (const match of importMatches) {
|
|
1921
|
+
const pkg = match[1].split('/')[0];
|
|
1922
|
+
if (!pkg.startsWith('@')) usedDeps.add(pkg);
|
|
1923
|
+
else usedDeps.add(match[1].split('/').slice(0, 2).join('/'));
|
|
1924
|
+
}
|
|
1925
|
+
}
|
|
1926
|
+
|
|
1927
|
+
// Built-in Node.js modules to ignore
|
|
1928
|
+
const builtins = new Set(['fs', 'path', 'http', 'https', 'crypto', 'os', 'url', 'util', 'stream', 'events', 'child_process', 'cluster', 'dns', 'net', 'tls', 'zlib', 'buffer', 'querystring', 'readline', 'assert', 'module', 'process', 'timers', 'vm', 'worker_threads']);
|
|
1929
|
+
|
|
1930
|
+
// Find missing dependencies
|
|
1931
|
+
const missing = [];
|
|
1932
|
+
for (const dep of usedDeps) {
|
|
1933
|
+
if (!builtins.has(dep) && !declaredDeps.has(dep)) {
|
|
1934
|
+
missing.push(dep);
|
|
1935
|
+
}
|
|
1936
|
+
}
|
|
1937
|
+
|
|
1938
|
+
if (missing.length > 0) {
|
|
1939
|
+
return {
|
|
1940
|
+
missing: true,
|
|
1941
|
+
packages: missing,
|
|
1942
|
+
autofix: 'add_node_deps'
|
|
1943
|
+
};
|
|
1944
|
+
}
|
|
1945
|
+
} catch (err) {
|
|
1946
|
+
log.warning(`MCP >>> Dependency detection failed: ${err.message}`);
|
|
1947
|
+
}
|
|
1948
|
+
|
|
1949
|
+
return null;
|
|
1950
|
+
}
|
|
1951
|
+
|
|
1382
1952
|
// Validate Dockerfile for Coolify compliance
|
|
1383
1953
|
function validateDockerfile(content) {
|
|
1384
1954
|
const lines = content.split('\n');
|
|
@@ -1398,13 +1968,17 @@ function validateDockerfile(content) {
|
|
|
1398
1968
|
const trimmed = lines[i].trim();
|
|
1399
1969
|
const upper = trimmed.toUpperCase();
|
|
1400
1970
|
|
|
1401
|
-
// Track base image
|
|
1971
|
+
// Track base image (use the last non-builder FROM for final image)
|
|
1402
1972
|
if (upper.startsWith('FROM ')) {
|
|
1403
|
-
|
|
1973
|
+
const fromImage = trimmed.substring(5).split(' ')[0].toLowerCase();
|
|
1404
1974
|
// Check for Elixir multi-stage build
|
|
1405
|
-
if (
|
|
1975
|
+
if (fromImage.includes('elixir') && upper.includes(' AS ')) {
|
|
1406
1976
|
isElixirMultiStage = true;
|
|
1407
1977
|
}
|
|
1978
|
+
// Only track as base image if it's not a builder stage reference
|
|
1979
|
+
if (!fromImage.includes('builder') && !fromImage.includes('build')) {
|
|
1980
|
+
baseImage = fromImage;
|
|
1981
|
+
}
|
|
1408
1982
|
}
|
|
1409
1983
|
|
|
1410
1984
|
if (upper.startsWith('EXPOSE')) {
|
|
@@ -1433,6 +2007,17 @@ function validateDockerfile(content) {
|
|
|
1433
2007
|
}
|
|
1434
2008
|
}
|
|
1435
2009
|
|
|
2010
|
+
// Validate base image version
|
|
2011
|
+
const baseImageValidation = validateBaseImage(baseImage);
|
|
2012
|
+
if (baseImageValidation && baseImageValidation.invalid) {
|
|
2013
|
+
issues.push({
|
|
2014
|
+
issue: `Invalid Docker base image: ${baseImageValidation.reason}`,
|
|
2015
|
+
fix: `Change FROM to use: ${baseImageValidation.suggestedFix}`,
|
|
2016
|
+
autofix: baseImageValidation.autofix,
|
|
2017
|
+
suggestedImage: baseImageValidation.suggestedFix
|
|
2018
|
+
});
|
|
2019
|
+
}
|
|
2020
|
+
|
|
1436
2021
|
if (!hasExpose) {
|
|
1437
2022
|
issues.push({
|
|
1438
2023
|
issue: 'Dockerfile does not have an EXPOSE directive',
|
|
@@ -1522,6 +2107,41 @@ function autoFixDockerfile(content, issues) {
|
|
|
1522
2107
|
}
|
|
1523
2108
|
}
|
|
1524
2109
|
}
|
|
2110
|
+
|
|
2111
|
+
if (issue.autofix === 'fix_base_image') {
|
|
2112
|
+
// Replace invalid base image with valid one
|
|
2113
|
+
const suggestedImage = issue.suggestedImage || issue.suggestedFix;
|
|
2114
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2115
|
+
const trimmed = lines[i].trim().toUpperCase();
|
|
2116
|
+
if (trimmed.startsWith('FROM ')) {
|
|
2117
|
+
const originalLine = lines[i];
|
|
2118
|
+
const indent = originalLine.match(/^(\s*)/)[1];
|
|
2119
|
+
const fromMatch = originalLine.match(/^(\s*FROM\s+)([^\s]+)(\s+AS\s+\S+)?(.*)$/i);
|
|
2120
|
+
if (fromMatch && suggestedImage) {
|
|
2121
|
+
const asClause = fromMatch[3] || '';
|
|
2122
|
+
const rest = fromMatch[4] || '';
|
|
2123
|
+
lines[i] = `${indent}FROM ${suggestedImage}${asClause}${rest}`;
|
|
2124
|
+
modified = true;
|
|
2125
|
+
log.success(`MCP >>> Auto-fixed Dockerfile: replaced invalid base image with ${suggestedImage}`);
|
|
2126
|
+
break;
|
|
2127
|
+
}
|
|
2128
|
+
}
|
|
2129
|
+
}
|
|
2130
|
+
}
|
|
2131
|
+
|
|
2132
|
+
if (issue.autofix === 'fix_expose_port') {
|
|
2133
|
+
// Replace existing EXPOSE with correct port
|
|
2134
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2135
|
+
const trimmed = lines[i].trim().toUpperCase();
|
|
2136
|
+
if (trimmed.startsWith('EXPOSE')) {
|
|
2137
|
+
const indent = lines[i].match(/^(\s*)/)[1];
|
|
2138
|
+
lines[i] = `${indent}EXPOSE ${issue.suggestedFix}`;
|
|
2139
|
+
modified = true;
|
|
2140
|
+
log.success(`MCP >>> Auto-fixed Dockerfile: changed EXPOSE to ${issue.suggestedFix}`);
|
|
2141
|
+
break;
|
|
2142
|
+
}
|
|
2143
|
+
}
|
|
2144
|
+
}
|
|
1525
2145
|
}
|
|
1526
2146
|
|
|
1527
2147
|
return { content: lines.join('\n'), modified };
|
|
@@ -1660,7 +2280,7 @@ async function initProject(args) {
|
|
|
1660
2280
|
status: 'error',
|
|
1661
2281
|
message: 'Hostname is required when deployment is enabled',
|
|
1662
2282
|
required_fields: {
|
|
1663
|
-
hostname: '✗ missing -
|
|
2283
|
+
hostname: '✗ missing - project identifier (actual URL will be https://{app_id}.eu{1,2,3}.ezb.net)'
|
|
1664
2284
|
}
|
|
1665
2285
|
}, null, 2)
|
|
1666
2286
|
}]
|
|
@@ -1683,6 +2303,43 @@ async function initProject(args) {
|
|
|
1683
2303
|
|
|
1684
2304
|
console.error(`Creating project: ${name}`);
|
|
1685
2305
|
|
|
2306
|
+
// FIX: Check if project already exists on backend before creating (prevents duplicates)
|
|
2307
|
+
// This is critical to prevent race conditions where multiple concurrent requests
|
|
2308
|
+
// could create duplicate projects
|
|
2309
|
+
log.info(`MCP >>> [initProject] Checking if project '${name}' already exists on backend...`);
|
|
2310
|
+
const existingProjectsResult = await apiRequest('GET', '/api/v1/projects', null, true);
|
|
2311
|
+
if (existingProjectsResult.success && Array.isArray(existingProjectsResult.data)) {
|
|
2312
|
+
// Check if any existing project name ends with our project name (user prefix is added by backend)
|
|
2313
|
+
const existingProject = existingProjectsResult.data.find(p =>
|
|
2314
|
+
p.name && (p.name === name || p.name.endsWith(`-${name}`))
|
|
2315
|
+
);
|
|
2316
|
+
if (existingProject) {
|
|
2317
|
+
log.warning(`MCP >>> [initProject] Project '${name}' already exists (found: ${existingProject.name})`);
|
|
2318
|
+
return {
|
|
2319
|
+
content: [{
|
|
2320
|
+
type: 'text',
|
|
2321
|
+
text: JSON.stringify({
|
|
2322
|
+
status: 'already_exists',
|
|
2323
|
+
message: `Project '${name}' already exists on the server`,
|
|
2324
|
+
existing_project: {
|
|
2325
|
+
id: existingProject.id,
|
|
2326
|
+
name: existingProject.name,
|
|
2327
|
+
ssh_url: existingProject.ssh_url_to_repo,
|
|
2328
|
+
web_url: existingProject.web_url,
|
|
2329
|
+
deployment_url: existingProject.deployment_url
|
|
2330
|
+
},
|
|
2331
|
+
next_steps: [
|
|
2332
|
+
'Use the existing project instead of creating a new one',
|
|
2333
|
+
'To update: git push mlgym main',
|
|
2334
|
+
'To check status: use mlgym_status tool'
|
|
2335
|
+
]
|
|
2336
|
+
}, null, 2)
|
|
2337
|
+
}]
|
|
2338
|
+
};
|
|
2339
|
+
}
|
|
2340
|
+
}
|
|
2341
|
+
log.success(`MCP >>> [initProject] No existing project found, proceeding with creation`);
|
|
2342
|
+
|
|
1686
2343
|
// Create project via backend API with FLAT structure (matching CLI)
|
|
1687
2344
|
const projectData = {
|
|
1688
2345
|
name: name,
|
|
@@ -1743,7 +2400,7 @@ async function initProject(args) {
|
|
|
1743
2400
|
projectData.docker_compose_content = composeContent;
|
|
1744
2401
|
log.info(`MCP >>> [initProject] Sending docker-compose content: ${composeContent.length} bytes`);
|
|
1745
2402
|
} else {
|
|
1746
|
-
log.
|
|
2403
|
+
log.warning('MCP >>> [initProject] docker-compose strategy but no compose file found!');
|
|
1747
2404
|
}
|
|
1748
2405
|
}
|
|
1749
2406
|
|
|
@@ -2599,6 +3256,48 @@ async function deployProject(args) {
|
|
|
2599
3256
|
} else {
|
|
2600
3257
|
log.success('MCP >>> Dockerfile is Coolify compliant');
|
|
2601
3258
|
}
|
|
3259
|
+
|
|
3260
|
+
// Check for port mismatch between app code and Dockerfile
|
|
3261
|
+
const currentContent = fsSync.readFileSync(dockerfilePath, 'utf8');
|
|
3262
|
+
const currentValidation = validateDockerfile(currentContent);
|
|
3263
|
+
if (currentValidation.exposedPort) {
|
|
3264
|
+
const portMismatch = await detectPortMismatch(local_path, currentValidation.exposedPort);
|
|
3265
|
+
if (portMismatch && portMismatch.mismatch) {
|
|
3266
|
+
log.warning(`MCP >>> Port mismatch detected: App runs on ${portMismatch.appPort} but EXPOSE is ${portMismatch.exposedPort}`);
|
|
3267
|
+
log.info(`MCP >>> Auto-fixing: Changing EXPOSE to ${portMismatch.appPort}`);
|
|
3268
|
+
const fixResult = autoFixDockerfile(currentContent, [{
|
|
3269
|
+
autofix: 'fix_expose_port',
|
|
3270
|
+
suggestedFix: portMismatch.appPort
|
|
3271
|
+
}]);
|
|
3272
|
+
if (fixResult.modified) {
|
|
3273
|
+
fsSync.writeFileSync(dockerfilePath, fixResult.content);
|
|
3274
|
+
log.success(`MCP >>> Fixed port mismatch: EXPOSE now ${portMismatch.appPort}`);
|
|
3275
|
+
}
|
|
3276
|
+
}
|
|
3277
|
+
}
|
|
3278
|
+
}
|
|
3279
|
+
}
|
|
3280
|
+
|
|
3281
|
+
// Step 4.5: Check for missing Node.js dependencies
|
|
3282
|
+
const packageJsonPath = path.join(local_path, 'package.json');
|
|
3283
|
+
if (fsSync.existsSync(packageJsonPath)) {
|
|
3284
|
+
log.info('MCP >>> Checking for missing Node.js dependencies...');
|
|
3285
|
+
const missingDeps = await detectMissingNodeDependencies(local_path);
|
|
3286
|
+
if (missingDeps && missingDeps.missing && missingDeps.packages.length > 0) {
|
|
3287
|
+
log.warning(`MCP >>> Missing dependencies detected: ${missingDeps.packages.join(', ')}`);
|
|
3288
|
+
log.info('MCP >>> Auto-fixing: Adding missing dependencies to package.json');
|
|
3289
|
+
|
|
3290
|
+
const packageJsonContent = fsSync.readFileSync(packageJsonPath, 'utf-8');
|
|
3291
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
3292
|
+
if (!packageJson.dependencies) packageJson.dependencies = {};
|
|
3293
|
+
|
|
3294
|
+
for (const pkg of missingDeps.packages) {
|
|
3295
|
+
packageJson.dependencies[pkg] = '*'; // Will be resolved to latest during npm install
|
|
3296
|
+
}
|
|
3297
|
+
|
|
3298
|
+
fsSync.writeFileSync(packageJsonPath + '.backup', packageJsonContent);
|
|
3299
|
+
fsSync.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
|
|
3300
|
+
log.success(`MCP >>> Added missing dependencies: ${missingDeps.packages.join(', ')}`);
|
|
2602
3301
|
}
|
|
2603
3302
|
}
|
|
2604
3303
|
|
|
@@ -2842,11 +3541,11 @@ async function setEnvironmentVariables(args) {
|
|
|
2842
3541
|
|
|
2843
3542
|
// Ensure authentication
|
|
2844
3543
|
let auth = await loadAuth();
|
|
2845
|
-
if (!auth || !auth.
|
|
3544
|
+
if (!auth || !auth.token) {
|
|
2846
3545
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
2847
3546
|
}
|
|
2848
3547
|
|
|
2849
|
-
log.debug(`Using JWT token: ${auth.
|
|
3548
|
+
log.debug(`Using JWT token: ${auth.token.substring(0, 20)}...`);
|
|
2850
3549
|
|
|
2851
3550
|
// Determine project name
|
|
2852
3551
|
let finalProjectName = project_name;
|
|
@@ -2872,7 +3571,7 @@ async function setEnvironmentVariables(args) {
|
|
|
2872
3571
|
|
|
2873
3572
|
const projectResponse = await axios.get(backendUrl, {
|
|
2874
3573
|
headers: {
|
|
2875
|
-
'Authorization': `Bearer ${auth.
|
|
3574
|
+
'Authorization': `Bearer ${auth.token}`,
|
|
2876
3575
|
'Content-Type': 'application/json'
|
|
2877
3576
|
}
|
|
2878
3577
|
});
|
|
@@ -2944,7 +3643,7 @@ async function getApplicationUUID(projectName, auth) {
|
|
|
2944
3643
|
|
|
2945
3644
|
const projectResponse = await axios.get(backendUrl, {
|
|
2946
3645
|
headers: {
|
|
2947
|
-
'Authorization': `Bearer ${auth.
|
|
3646
|
+
'Authorization': `Bearer ${auth.token}`,
|
|
2948
3647
|
'Content-Type': 'application/json'
|
|
2949
3648
|
}
|
|
2950
3649
|
});
|
|
@@ -2965,7 +3664,7 @@ async function setHealthCheck(args) {
|
|
|
2965
3664
|
|
|
2966
3665
|
// Ensure authentication
|
|
2967
3666
|
let auth = await loadAuth();
|
|
2968
|
-
if (!auth || !auth.
|
|
3667
|
+
if (!auth || !auth.token) {
|
|
2969
3668
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
2970
3669
|
}
|
|
2971
3670
|
|
|
@@ -3030,7 +3729,7 @@ async function setDomain(args) {
|
|
|
3030
3729
|
log.info(`MCP >>> [setDomain] Setting domain for ${project_name || 'project'}`);
|
|
3031
3730
|
|
|
3032
3731
|
let auth = await loadAuth();
|
|
3033
|
-
if (!auth || !auth.
|
|
3732
|
+
if (!auth || !auth.token) {
|
|
3034
3733
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
3035
3734
|
}
|
|
3036
3735
|
|
|
@@ -3092,7 +3791,7 @@ async function setDeploymentCommands(args) {
|
|
|
3092
3791
|
log.info(`MCP >>> [setDeploymentCommands] Setting deployment commands for ${project_name || 'project'}`);
|
|
3093
3792
|
|
|
3094
3793
|
let auth = await loadAuth();
|
|
3095
|
-
if (!auth || !auth.
|
|
3794
|
+
if (!auth || !auth.token) {
|
|
3096
3795
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
3097
3796
|
}
|
|
3098
3797
|
|
|
@@ -3154,7 +3853,7 @@ async function manualDeploy(args) {
|
|
|
3154
3853
|
log.info(`MCP >>> [manualDeploy] Triggering manual deployment for ${project_name || 'project'}`);
|
|
3155
3854
|
|
|
3156
3855
|
let auth = await loadAuth();
|
|
3157
|
-
if (!auth || !auth.
|
|
3856
|
+
if (!auth || !auth.token) {
|
|
3158
3857
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
3159
3858
|
}
|
|
3160
3859
|
|
|
@@ -3222,7 +3921,7 @@ async function setOptions(args) {
|
|
|
3222
3921
|
log.info(`MCP >>> [setOptions] Setting options for ${project_name || 'project'}`);
|
|
3223
3922
|
|
|
3224
3923
|
let auth = await loadAuth();
|
|
3225
|
-
if (!auth || !auth.
|
|
3924
|
+
if (!auth || !auth.token) {
|
|
3226
3925
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
3227
3926
|
}
|
|
3228
3927
|
|
|
@@ -3284,7 +3983,7 @@ async function rollback(args) {
|
|
|
3284
3983
|
log.info(`MCP >>> [rollback] Rolling back ${project_name || 'project'}`);
|
|
3285
3984
|
|
|
3286
3985
|
let auth = await loadAuth();
|
|
3287
|
-
if (!auth || !auth.
|
|
3986
|
+
if (!auth || !auth.token) {
|
|
3288
3987
|
throw new Error('Not authenticated. Please run mlgym_deploy or mlgym_auth_login first');
|
|
3289
3988
|
}
|
|
3290
3989
|
|
package/package.json
CHANGED