mlgym-deploy 3.3.16 → 3.3.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +313 -37
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -18,7 +18,7 @@ import crypto from 'crypto';
|
|
|
18
18
|
const execAsync = promisify(exec);
|
|
19
19
|
|
|
20
20
|
// Current version of this MCP server - INCREMENT FOR WORKFLOW FIXES
|
|
21
|
-
const CURRENT_VERSION = '3.3.
|
|
21
|
+
const CURRENT_VERSION = '3.3.23'; // Scala: flexible src structure, copy all .scala files
|
|
22
22
|
const PACKAGE_NAME = 'mlgym-deploy';
|
|
23
23
|
|
|
24
24
|
// Debug logging configuration - ENABLED BY DEFAULT
|
|
@@ -95,6 +95,12 @@ const CONFIG = {
|
|
|
95
95
|
|
|
96
96
|
// Helper to load/save authentication
|
|
97
97
|
async function loadAuth() {
|
|
98
|
+
// First check environment variable
|
|
99
|
+
if (process.env.MLGYM_TOKEN) {
|
|
100
|
+
return { token: process.env.MLGYM_TOKEN, email: process.env.MLGYM_EMAIL || 'env-token-user' };
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Fall back to config file
|
|
98
104
|
try {
|
|
99
105
|
const data = await fs.readFile(CONFIG.config_file, 'utf8');
|
|
100
106
|
return JSON.parse(data);
|
|
@@ -216,7 +222,14 @@ Host git.mlgym.io
|
|
|
216
222
|
|
|
217
223
|
try {
|
|
218
224
|
const existingConfig = await fs.readFile(configPath, 'utf8');
|
|
219
|
-
if (
|
|
225
|
+
if (existingConfig.includes('Host git.mlgym.io')) {
|
|
226
|
+
// Replace existing MLGym SSH config block with new key
|
|
227
|
+
const updatedConfig = existingConfig.replace(
|
|
228
|
+
/# MLGym GitLab.*?Host git\.mlgym\.io.*?(?=\n#|\n\nHost|\n?$)/gs,
|
|
229
|
+
''
|
|
230
|
+
).trim();
|
|
231
|
+
await fs.writeFile(configPath, updatedConfig + configEntry, { mode: 0o600 });
|
|
232
|
+
} else {
|
|
220
233
|
await fs.appendFile(configPath, configEntry);
|
|
221
234
|
}
|
|
222
235
|
} catch {
|
|
@@ -558,6 +571,27 @@ async function analyzeProject(local_path = '.') {
|
|
|
558
571
|
} catch {}
|
|
559
572
|
}
|
|
560
573
|
|
|
574
|
+
// Check for Scala/sbt project
|
|
575
|
+
if (analysis.project_type === 'unknown') {
|
|
576
|
+
try {
|
|
577
|
+
await fs.access(path.join(absolutePath, 'build.sbt'));
|
|
578
|
+
analysis.project_type = 'scala';
|
|
579
|
+
analysis.detected_files.push('build.sbt');
|
|
580
|
+
analysis.framework = 'sbt';
|
|
581
|
+
analysis.build_command = 'sbt assembly';
|
|
582
|
+
analysis.start_command = 'java -jar app.jar';
|
|
583
|
+
|
|
584
|
+
// Check if sbt-assembly plugin exists
|
|
585
|
+
try {
|
|
586
|
+
await fs.access(path.join(absolutePath, 'project', 'plugins.sbt'));
|
|
587
|
+
analysis.detected_files.push('project/plugins.sbt');
|
|
588
|
+
analysis.has_sbt_assembly = true;
|
|
589
|
+
} catch {
|
|
590
|
+
analysis.has_sbt_assembly = false;
|
|
591
|
+
}
|
|
592
|
+
} catch {}
|
|
593
|
+
}
|
|
594
|
+
|
|
561
595
|
} catch (error) {
|
|
562
596
|
console.error('Project analysis error:', error);
|
|
563
597
|
}
|
|
@@ -710,6 +744,21 @@ WORKDIR /root/
|
|
|
710
744
|
COPY --from=builder /app/app .
|
|
711
745
|
EXPOSE 8080
|
|
712
746
|
CMD ["./app"]`;
|
|
747
|
+
} else if (projectType === 'scala') {
|
|
748
|
+
dockerfile = `# Build stage
|
|
749
|
+
FROM sbtscala/scala-sbt:eclipse-temurin-jammy-17.0.10_7_1.10.2_2.13.15 AS builder
|
|
750
|
+
WORKDIR /app
|
|
751
|
+
COPY project ./project
|
|
752
|
+
COPY build.sbt .
|
|
753
|
+
COPY src ./src
|
|
754
|
+
RUN sbt assembly
|
|
755
|
+
|
|
756
|
+
# Production stage
|
|
757
|
+
FROM eclipse-temurin:17-jre-jammy
|
|
758
|
+
WORKDIR /app
|
|
759
|
+
COPY --from=builder /app/target/scala-2.13/app.jar ./app.jar
|
|
760
|
+
EXPOSE 8080
|
|
761
|
+
CMD ["java", "-jar", "app.jar"]`;
|
|
713
762
|
} else {
|
|
714
763
|
// Unknown type - basic Alpine with shell
|
|
715
764
|
dockerfile = `FROM alpine:latest
|
|
@@ -760,6 +809,90 @@ async function prepareProject(args) {
|
|
|
760
809
|
log.warning('MCP >>> [prepareProject-func] Project type is unknown, skipping Dockerfile generation');
|
|
761
810
|
}
|
|
762
811
|
|
|
812
|
+
// Scala/sbt: Check for sbt-assembly plugin (required for fat JAR)
|
|
813
|
+
if (project_type === 'scala') {
|
|
814
|
+
const projectDir = path.join(absolutePath, 'project');
|
|
815
|
+
const pluginsSbtPath = path.join(projectDir, 'plugins.sbt');
|
|
816
|
+
let hasAssemblyPlugin = false;
|
|
817
|
+
|
|
818
|
+
try {
|
|
819
|
+
const pluginsContent = await fs.readFile(pluginsSbtPath, 'utf8');
|
|
820
|
+
hasAssemblyPlugin = pluginsContent.includes('sbt-assembly');
|
|
821
|
+
} catch {
|
|
822
|
+
// plugins.sbt doesn't exist
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
if (!hasAssemblyPlugin) {
|
|
826
|
+
log.info('MCP >>> [prepareProject-func] Scala project missing sbt-assembly plugin, adding...');
|
|
827
|
+
|
|
828
|
+
// Create project directory if it doesn't exist
|
|
829
|
+
try {
|
|
830
|
+
await fs.mkdir(projectDir, { recursive: true });
|
|
831
|
+
} catch {}
|
|
832
|
+
|
|
833
|
+
// Create plugins.sbt with sbt-assembly
|
|
834
|
+
const pluginsContent = 'addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.5")\n';
|
|
835
|
+
await fs.writeFile(pluginsSbtPath, pluginsContent);
|
|
836
|
+
actions.push('Created project/plugins.sbt with sbt-assembly plugin');
|
|
837
|
+
log.success('MCP >>> [prepareProject-func] ✅ Added sbt-assembly plugin');
|
|
838
|
+
|
|
839
|
+
// Also update build.sbt to add assembly merge strategy if not present
|
|
840
|
+
const buildSbtPath = path.join(absolutePath, 'build.sbt');
|
|
841
|
+
try {
|
|
842
|
+
let buildSbtContent = await fs.readFile(buildSbtPath, 'utf8');
|
|
843
|
+
if (!buildSbtContent.includes('assemblyMergeStrategy')) {
|
|
844
|
+
const assemblyConfig = `
|
|
845
|
+
|
|
846
|
+
assembly / assemblyMergeStrategy := {
|
|
847
|
+
case PathList("META-INF", xs @ _*) => MergeStrategy.discard
|
|
848
|
+
case "reference.conf" => MergeStrategy.concat
|
|
849
|
+
case x => MergeStrategy.first
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
assembly / assemblyJarName := "app.jar"
|
|
853
|
+
`;
|
|
854
|
+
buildSbtContent += assemblyConfig;
|
|
855
|
+
await fs.writeFile(buildSbtPath, buildSbtContent);
|
|
856
|
+
actions.push('Added assembly merge strategy to build.sbt');
|
|
857
|
+
log.success('MCP >>> [prepareProject-func] ✅ Added assembly config to build.sbt');
|
|
858
|
+
}
|
|
859
|
+
} catch (err) {
|
|
860
|
+
log.warning('MCP >>> [prepareProject-func] Could not update build.sbt:', err.message);
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
// Ensure proper Scala project structure - move root .scala files to src/main/scala/
|
|
865
|
+
try {
|
|
866
|
+
const srcDir = path.join(absolutePath, 'src', 'main', 'scala');
|
|
867
|
+
const rootFiles = await fs.readdir(absolutePath);
|
|
868
|
+
const scalaFilesInRoot = rootFiles.filter(f => f.endsWith('.scala'));
|
|
869
|
+
|
|
870
|
+
if (scalaFilesInRoot.length > 0) {
|
|
871
|
+
// Check if src/main/scala exists
|
|
872
|
+
let srcExists = false;
|
|
873
|
+
try {
|
|
874
|
+
await fs.access(srcDir);
|
|
875
|
+
srcExists = true;
|
|
876
|
+
} catch {}
|
|
877
|
+
|
|
878
|
+
if (!srcExists) {
|
|
879
|
+
// Create src/main/scala and move .scala files there
|
|
880
|
+
await fs.mkdir(srcDir, { recursive: true });
|
|
881
|
+
for (const scalaFile of scalaFilesInRoot) {
|
|
882
|
+
const srcPath = path.join(absolutePath, scalaFile);
|
|
883
|
+
const destPath = path.join(srcDir, scalaFile);
|
|
884
|
+
await fs.rename(srcPath, destPath);
|
|
885
|
+
log.info(`MCP >>> [prepareProject-func] Moved ${scalaFile} to src/main/scala/`);
|
|
886
|
+
}
|
|
887
|
+
actions.push(`Moved ${scalaFilesInRoot.length} .scala files to src/main/scala/`);
|
|
888
|
+
log.success('MCP >>> [prepareProject-func] ✅ Created proper Scala project structure');
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
} catch (err) {
|
|
892
|
+
log.warning('MCP >>> [prepareProject-func] Could not reorganize Scala files:', err.message);
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
|
|
763
896
|
// Check/create .gitignore
|
|
764
897
|
const gitignorePath = path.join(absolutePath, '.gitignore');
|
|
765
898
|
let gitignoreExists = false;
|
|
@@ -788,6 +921,14 @@ build/
|
|
|
788
921
|
venv/
|
|
789
922
|
env/
|
|
790
923
|
.venv/`;
|
|
924
|
+
} else if (project_type === 'scala') {
|
|
925
|
+
gitignoreContent = `target/
|
|
926
|
+
project/target/
|
|
927
|
+
.bsp/
|
|
928
|
+
.idea/
|
|
929
|
+
*.class
|
|
930
|
+
*.log
|
|
931
|
+
.env`;
|
|
791
932
|
} else {
|
|
792
933
|
gitignoreContent = `.env
|
|
793
934
|
*.log
|
|
@@ -985,6 +1126,30 @@ async function smartDeploy(args) {
|
|
|
985
1126
|
}
|
|
986
1127
|
|
|
987
1128
|
// Deployment Strategy Detection (matches CLI logic)
|
|
1129
|
+
// Extract EXPOSE port from Dockerfile
|
|
1130
|
+
async function extractDockerfilePort(dockerfilePath) {
|
|
1131
|
+
try {
|
|
1132
|
+
const content = await fs.readFile(dockerfilePath, 'utf8');
|
|
1133
|
+
const lines = content.split('\n');
|
|
1134
|
+
for (const line of lines) {
|
|
1135
|
+
const trimmed = line.trim();
|
|
1136
|
+
if (trimmed.startsWith('EXPOSE ')) {
|
|
1137
|
+
const portMatch = trimmed.match(/EXPOSE\s+(\d+)/);
|
|
1138
|
+
if (portMatch) {
|
|
1139
|
+
const port = portMatch[1];
|
|
1140
|
+
log.info(`MCP >>> [extractDockerfilePort] Found EXPOSE ${port}`);
|
|
1141
|
+
return port;
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
log.info('MCP >>> [extractDockerfilePort] No EXPOSE directive found, defaulting to 3000');
|
|
1146
|
+
return '3000'; // Default if no EXPOSE found
|
|
1147
|
+
} catch (err) {
|
|
1148
|
+
log.error('MCP >>> [extractDockerfilePort] Error reading Dockerfile:', err.message);
|
|
1149
|
+
return '3000';
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
|
|
988
1153
|
async function detectDeploymentStrategy(projectPath) {
|
|
989
1154
|
const dockerfilePath = path.join(projectPath, 'Dockerfile');
|
|
990
1155
|
const composePathYML = path.join(projectPath, 'docker-compose.yml');
|
|
@@ -1000,8 +1165,9 @@ async function detectDeploymentStrategy(projectPath) {
|
|
|
1000
1165
|
|
|
1001
1166
|
// Case 1: Only Dockerfile
|
|
1002
1167
|
if (hasDockerfile && !hasCompose) {
|
|
1003
|
-
|
|
1004
|
-
|
|
1168
|
+
const port = await extractDockerfilePort(dockerfilePath);
|
|
1169
|
+
log.info(`MCP >>> [detectDeploymentStrategy] Strategy: dockerfile (Dockerfile only), port: ${port}`);
|
|
1170
|
+
return { type: 'dockerfile', reason: 'Dockerfile only', port };
|
|
1005
1171
|
}
|
|
1006
1172
|
|
|
1007
1173
|
// Case 2: Only docker-compose
|
|
@@ -1022,8 +1188,9 @@ async function detectDeploymentStrategy(projectPath) {
|
|
|
1022
1188
|
// SIMPLE: Only 1 web service, no other services, uses local Dockerfile
|
|
1023
1189
|
// → docker-compose is just convenience wrapper
|
|
1024
1190
|
if (totalServiceCount === 1 && webServiceCount === 1 && usesLocalDockerfile) {
|
|
1025
|
-
|
|
1026
|
-
|
|
1191
|
+
const port = await extractDockerfilePort(dockerfilePath);
|
|
1192
|
+
log.info(`MCP >>> [detectDeploymentStrategy] Strategy: dockerfile (single web service), port: ${port}`);
|
|
1193
|
+
return { type: 'dockerfile', reason: 'docker-compose.yml only has web service (convenience wrapper for Dockerfile)', port };
|
|
1027
1194
|
}
|
|
1028
1195
|
|
|
1029
1196
|
// COMPLEX: Multiple services (web + database, etc.)
|
|
@@ -1165,32 +1332,150 @@ function randomizeVolumeNames(content, suffix) {
|
|
|
1165
1332
|
function validateDockerfile(content) {
|
|
1166
1333
|
const lines = content.split('\n');
|
|
1167
1334
|
const issues = [];
|
|
1168
|
-
let
|
|
1335
|
+
let hasExpose = false;
|
|
1336
|
+
let exposedPort = null;
|
|
1337
|
+
let baseImage = null;
|
|
1338
|
+
let usesCpanm = false;
|
|
1339
|
+
let installsCpanm = false;
|
|
1340
|
+
|
|
1341
|
+
// Elixir multi-stage detection
|
|
1342
|
+
let isElixirMultiStage = false;
|
|
1343
|
+
let hasMixLockCopy = false;
|
|
1344
|
+
let builderStageHasMixDepsGet = false;
|
|
1169
1345
|
|
|
1170
1346
|
for (let i = 0; i < lines.length; i++) {
|
|
1171
|
-
const trimmed = lines[i].trim()
|
|
1347
|
+
const trimmed = lines[i].trim();
|
|
1348
|
+
const upper = trimmed.toUpperCase();
|
|
1349
|
+
|
|
1350
|
+
// Track base image
|
|
1351
|
+
if (upper.startsWith('FROM ')) {
|
|
1352
|
+
baseImage = trimmed.substring(5).split(' ')[0].toLowerCase();
|
|
1353
|
+
// Check for Elixir multi-stage build
|
|
1354
|
+
if (baseImage.includes('elixir') && upper.includes(' AS ')) {
|
|
1355
|
+
isElixirMultiStage = true;
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1172
1358
|
|
|
1173
|
-
if (
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1359
|
+
if (upper.startsWith('EXPOSE')) {
|
|
1360
|
+
hasExpose = true;
|
|
1361
|
+
// Extract the port number
|
|
1362
|
+
const portMatch = trimmed.match(/EXPOSE\s+(\d+)/i);
|
|
1363
|
+
if (portMatch) {
|
|
1364
|
+
exposedPort = portMatch[1];
|
|
1177
1365
|
}
|
|
1178
1366
|
}
|
|
1367
|
+
|
|
1368
|
+
// Check for cpanm usage and installation
|
|
1369
|
+
if (trimmed.includes('cpanm ') && !trimmed.includes('App::cpanminus')) {
|
|
1370
|
+
usesCpanm = true;
|
|
1371
|
+
}
|
|
1372
|
+
if (trimmed.includes('App::cpanminus')) {
|
|
1373
|
+
installsCpanm = true;
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1376
|
+
// Elixir-specific checks
|
|
1377
|
+
if (trimmed.includes('mix deps.get')) {
|
|
1378
|
+
builderStageHasMixDepsGet = true;
|
|
1379
|
+
}
|
|
1380
|
+
if (upper.includes('COPY') && trimmed.includes('mix.lock')) {
|
|
1381
|
+
hasMixLockCopy = true;
|
|
1382
|
+
}
|
|
1383
|
+
}
|
|
1384
|
+
|
|
1385
|
+
if (!hasExpose) {
|
|
1386
|
+
issues.push({
|
|
1387
|
+
issue: 'Dockerfile does not have an EXPOSE directive',
|
|
1388
|
+
fix: 'Add "EXPOSE <port>" to your Dockerfile (e.g., EXPOSE 80 for web servers, EXPOSE 3000 for Node.js)',
|
|
1389
|
+
autofix: 'add_expose'
|
|
1390
|
+
});
|
|
1391
|
+
}
|
|
1392
|
+
|
|
1393
|
+
// Perl-specific: cpanm needs to be installed on slim images
|
|
1394
|
+
if (baseImage && baseImage.includes('perl') && baseImage.includes('slim') && usesCpanm && !installsCpanm) {
|
|
1395
|
+
issues.push({
|
|
1396
|
+
issue: 'Dockerfile uses cpanm but perl:*-slim does not have it pre-installed',
|
|
1397
|
+
fix: 'Add "RUN cpan -T App::cpanminus" before using cpanm, and install build deps: "RUN apt-get update && apt-get install -y --no-install-recommends make gcc && rm -rf /var/lib/apt/lists/*"',
|
|
1398
|
+
autofix: 'perl_cpanm'
|
|
1399
|
+
});
|
|
1179
1400
|
}
|
|
1180
1401
|
|
|
1181
|
-
|
|
1402
|
+
// Elixir-specific: multi-stage builds must copy mix.lock from builder
|
|
1403
|
+
if (isElixirMultiStage && builderStageHasMixDepsGet && !hasMixLockCopy) {
|
|
1182
1404
|
issues.push({
|
|
1183
|
-
issue: 'Dockerfile
|
|
1184
|
-
fix: 'Add "
|
|
1405
|
+
issue: 'Elixir multi-stage Dockerfile missing mix.lock copy from builder stage',
|
|
1406
|
+
fix: 'Add "COPY --from=builder /app/mix.lock /app/mix.lock" to copy the generated mix.lock file',
|
|
1407
|
+
autofix: 'elixir_mix_lock'
|
|
1185
1408
|
});
|
|
1186
1409
|
}
|
|
1187
1410
|
|
|
1188
1411
|
return {
|
|
1189
1412
|
isValid: issues.length === 0,
|
|
1190
|
-
issues
|
|
1413
|
+
issues,
|
|
1414
|
+
exposedPort,
|
|
1415
|
+
baseImage
|
|
1191
1416
|
};
|
|
1192
1417
|
}
|
|
1193
1418
|
|
|
1419
|
+
// Auto-fix known Dockerfile issues
|
|
1420
|
+
function autoFixDockerfile(content, issues) {
|
|
1421
|
+
let lines = content.split('\n');
|
|
1422
|
+
let modified = false;
|
|
1423
|
+
|
|
1424
|
+
for (const issue of issues) {
|
|
1425
|
+
if (issue.autofix === 'perl_cpanm') {
|
|
1426
|
+
// Find the line that uses cpanm and insert the fix before it
|
|
1427
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1428
|
+
const trimmed = lines[i].trim().toUpperCase();
|
|
1429
|
+
if (trimmed.startsWith('RUN') && lines[i].includes('cpanm ') && !lines[i].includes('App::cpanminus')) {
|
|
1430
|
+
// Insert build dependencies and cpanm installation before this line
|
|
1431
|
+
const indent = lines[i].match(/^(\s*)/)[1];
|
|
1432
|
+
lines.splice(i, 0,
|
|
1433
|
+
`${indent}RUN apt-get update && apt-get install -y --no-install-recommends make gcc && rm -rf /var/lib/apt/lists/*`,
|
|
1434
|
+
`${indent}RUN cpan -T App::cpanminus`
|
|
1435
|
+
);
|
|
1436
|
+
modified = true;
|
|
1437
|
+
log.success('MCP >>> Auto-fixed Perl Dockerfile: added cpanm installation');
|
|
1438
|
+
break;
|
|
1439
|
+
}
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
|
|
1443
|
+
if (issue.autofix === 'add_expose') {
|
|
1444
|
+
// Find the last FROM or WORKDIR line to add EXPOSE after it
|
|
1445
|
+
const hasAnyExpose = /^\s*EXPOSE\s+\d+/mi.test(content);
|
|
1446
|
+
if (!hasAnyExpose) {
|
|
1447
|
+
let insertIndex = lines.length - 1;
|
|
1448
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
1449
|
+
if (lines[i].trim().startsWith('WORKDIR') || lines[i].trim().startsWith('FROM')) {
|
|
1450
|
+
insertIndex = i + 1;
|
|
1451
|
+
break;
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
lines.splice(insertIndex, 0, '', 'EXPOSE 80');
|
|
1455
|
+
modified = true;
|
|
1456
|
+
log.success('MCP >>> Auto-fixed Dockerfile: added EXPOSE 80');
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
|
|
1460
|
+
if (issue.autofix === 'elixir_mix_lock') {
|
|
1461
|
+
// Find the last COPY --from=builder line and add mix.lock copy after it
|
|
1462
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
1463
|
+
const trimmed = lines[i].trim();
|
|
1464
|
+
if (trimmed.includes('COPY --from=builder') && trimmed.includes('mix.exs')) {
|
|
1465
|
+
// Insert mix.lock copy right after mix.exs copy
|
|
1466
|
+
const indent = lines[i].match(/^(\s*)/)[1];
|
|
1467
|
+
lines.splice(i + 1, 0, `${indent}COPY --from=builder /app/mix.lock /app/mix.lock`);
|
|
1468
|
+
modified = true;
|
|
1469
|
+
log.success('MCP >>> Auto-fixed Elixir Dockerfile: added mix.lock copy from builder');
|
|
1470
|
+
break;
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
|
|
1476
|
+
return { content: lines.join('\n'), modified };
|
|
1477
|
+
}
|
|
1478
|
+
|
|
1194
1479
|
// Analyze docker-compose.yml to determine deployment strategy
|
|
1195
1480
|
async function analyzeComposeFile(composePath) {
|
|
1196
1481
|
const content = await fs.readFile(composePath, 'utf-8');
|
|
@@ -1357,6 +1642,12 @@ async function initProject(args) {
|
|
|
1357
1642
|
projectData.hostname = hostname;
|
|
1358
1643
|
projectData.local_path = local_path;
|
|
1359
1644
|
|
|
1645
|
+
// Send ports_exposes for Dockerfile deployments (v3.3.17+)
|
|
1646
|
+
if (strategy.port) {
|
|
1647
|
+
projectData.ports_exposes = strategy.port;
|
|
1648
|
+
log.info(`MCP >>> [initProject] Sending ports_exposes: ${strategy.port} to backend`);
|
|
1649
|
+
}
|
|
1650
|
+
|
|
1360
1651
|
// Read docker-compose content if using dockercompose strategy (v3.2.1+)
|
|
1361
1652
|
if (strategy.type === 'dockercompose') {
|
|
1362
1653
|
const composeFiles = ['docker-compose.yml', 'docker-compose.yaml'];
|
|
@@ -2208,28 +2499,13 @@ async function deployProject(args) {
|
|
|
2208
2499
|
log.info(` Fix: ${issue.fix}`);
|
|
2209
2500
|
});
|
|
2210
2501
|
|
|
2211
|
-
//
|
|
2212
|
-
|
|
2213
|
-
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
// Find the last FROM or WORKDIR line to add EXPOSE after it
|
|
2217
|
-
let insertIndex = lines.length - 1;
|
|
2218
|
-
for (let i = lines.length - 1; i >= 0; i--) {
|
|
2219
|
-
if (lines[i].trim().startsWith('WORKDIR') || lines[i].trim().startsWith('FROM')) {
|
|
2220
|
-
insertIndex = i + 1;
|
|
2221
|
-
break;
|
|
2222
|
-
}
|
|
2223
|
-
}
|
|
2224
|
-
|
|
2225
|
-
lines.splice(insertIndex, 0, '', 'EXPOSE 80');
|
|
2226
|
-
const fixedContent = lines.join('\n');
|
|
2227
|
-
|
|
2228
|
-
// Create backup
|
|
2502
|
+
// Use autoFixDockerfile to apply all fixes
|
|
2503
|
+
const fixResult = autoFixDockerfile(content, validation.issues);
|
|
2504
|
+
if (fixResult.modified) {
|
|
2505
|
+
// Create backup and save fixed content
|
|
2229
2506
|
fsSync.writeFileSync(dockerfilePath + '.backup', content);
|
|
2230
|
-
fsSync.writeFileSync(dockerfilePath,
|
|
2231
|
-
|
|
2232
|
-
log.success('MCP >>> Fixed Dockerfile: added EXPOSE 80');
|
|
2507
|
+
fsSync.writeFileSync(dockerfilePath, fixResult.content);
|
|
2508
|
+
log.success('MCP >>> Dockerfile auto-fixes applied');
|
|
2233
2509
|
}
|
|
2234
2510
|
} else {
|
|
2235
2511
|
log.success('MCP >>> Dockerfile is Coolify compliant');
|
package/package.json
CHANGED