deepdebug-local-agent 1.0.19 → 1.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +32 -16
- package/cloudbuild-agent-qa.yaml +43 -0
- package/docker-compose.yml +2 -2
- package/package.json +1 -1
- package/src/exec-utils.js +90 -5
- package/src/server.js +433 -54
- package/src/vercel-proxy.js +226 -0
- package/tunnel-manager.js +70 -0
package/Dockerfile
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
# ║ DeepDebug Local Agent - Enterprise Docker ║
|
|
3
3
|
# ║ ║
|
|
4
4
|
# ║ Security-hardened container for enterprise deployments ║
|
|
5
|
-
# ║ Supports:
|
|
5
|
+
# ║ Supports: Cloud Run with NFS (GCP Filestore) ║
|
|
6
6
|
# ╚══════════════════════════════════════════════════════════════╝
|
|
7
7
|
|
|
8
8
|
# ===========================================
|
|
@@ -12,14 +12,9 @@ FROM node:20-alpine AS builder
|
|
|
12
12
|
|
|
13
13
|
WORKDIR /build
|
|
14
14
|
|
|
15
|
-
# Copy package files first (better cache)
|
|
16
15
|
COPY package*.json ./
|
|
17
|
-
|
|
18
|
-
# Install dependencies (production only)
|
|
19
|
-
# Using npm install instead of npm ci for repos without package-lock.json
|
|
20
16
|
RUN npm install --omit=dev && npm cache clean --force
|
|
21
17
|
|
|
22
|
-
# Copy source code
|
|
23
18
|
COPY src/ ./src/
|
|
24
19
|
|
|
25
20
|
# ===========================================
|
|
@@ -27,48 +22,69 @@ COPY src/ ./src/
|
|
|
27
22
|
# ===========================================
|
|
28
23
|
FROM node:20-alpine AS production
|
|
29
24
|
|
|
30
|
-
# Security: Add labels for compliance
|
|
31
25
|
LABEL org.opencontainers.image.title="DeepDebug Local Agent"
|
|
32
26
|
LABEL org.opencontainers.image.description="Enterprise debugging agent for code analysis"
|
|
33
27
|
LABEL org.opencontainers.image.vendor="InspTech AI"
|
|
34
|
-
LABEL org.opencontainers.image.version="1.
|
|
28
|
+
LABEL org.opencontainers.image.version="1.2.0"
|
|
35
29
|
LABEL org.opencontainers.image.licenses="Proprietary"
|
|
36
30
|
LABEL security.scan.required="true"
|
|
37
31
|
|
|
38
|
-
#
|
|
32
|
+
# Create non-root user
|
|
39
33
|
RUN addgroup -g 1001 -S deepdebug && \
|
|
40
34
|
adduser -u 1001 -S deepdebug -G deepdebug
|
|
41
35
|
|
|
42
|
-
#
|
|
36
|
+
# Install dependencies including Java 17 for Maven wrapper support
|
|
43
37
|
RUN apk update && \
|
|
44
38
|
apk upgrade --no-cache && \
|
|
45
39
|
apk add --no-cache \
|
|
46
40
|
dumb-init \
|
|
47
41
|
git \
|
|
48
42
|
curl \
|
|
43
|
+
nfs-utils \
|
|
44
|
+
openjdk17-jdk \
|
|
49
45
|
&& rm -rf /var/cache/apk/*
|
|
50
46
|
|
|
47
|
+
# Set JAVA_HOME so ./mvnw can find Java
|
|
48
|
+
# Alpine stores JVM under java-17-openjdk with arch suffix - use readlink to resolve
|
|
49
|
+
RUN ln -sf $(dirname $(dirname $(readlink -f $(which java)))) /usr/local/java-home
|
|
50
|
+
ENV JAVA_HOME=/usr/local/java-home
|
|
51
|
+
ENV PATH="$JAVA_HOME/bin:$PATH"
|
|
52
|
+
|
|
53
|
+
# Create NFS mount point with correct ownership
|
|
54
|
+
RUN mkdir -p /mnt/workspaces && \
|
|
55
|
+
chown deepdebug:deepdebug /mnt/workspaces
|
|
56
|
+
|
|
57
|
+
# Install cloudflared for tunnel support (Vibe preview)
|
|
58
|
+
RUN wget -q https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 \
|
|
59
|
+
-O /usr/local/bin/cloudflared && \
|
|
60
|
+
chmod +x /usr/local/bin/cloudflared && \
|
|
61
|
+
cloudflared --version
|
|
62
|
+
|
|
63
|
+
# Install GitHub CLI (gh) for PR creation
|
|
64
|
+
RUN wget -q https://github.com/cli/cli/releases/download/v2.63.2/gh_2.63.2_linux_amd64.tar.gz \
|
|
65
|
+
-O /tmp/gh.tar.gz && \
|
|
66
|
+
tar -xzf /tmp/gh.tar.gz -C /tmp && \
|
|
67
|
+
mv /tmp/gh_2.63.2_linux_amd64/bin/gh /usr/local/bin/gh && \
|
|
68
|
+
chmod +x /usr/local/bin/gh && \
|
|
69
|
+
rm -rf /tmp/gh* && \
|
|
70
|
+
gh --version
|
|
71
|
+
|
|
51
72
|
WORKDIR /app
|
|
52
73
|
|
|
53
|
-
# Copy from builder with correct ownership
|
|
54
74
|
COPY --from=builder --chown=deepdebug:deepdebug /build/node_modules ./node_modules
|
|
55
75
|
COPY --from=builder --chown=deepdebug:deepdebug /build/src ./src
|
|
56
76
|
COPY --chown=deepdebug:deepdebug package*.json ./
|
|
57
77
|
|
|
58
|
-
# Security: Switch to non-root user
|
|
59
78
|
USER deepdebug
|
|
60
79
|
|
|
61
|
-
# Health check
|
|
62
80
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
|
63
81
|
CMD curl -f http://localhost:5055/health || exit 1
|
|
64
82
|
|
|
65
|
-
# Environment
|
|
66
83
|
ENV NODE_ENV=production
|
|
67
84
|
ENV PORT=5055
|
|
85
|
+
ENV WORKSPACES_MOUNT=/mnt/workspaces
|
|
68
86
|
|
|
69
|
-
# Expose port
|
|
70
87
|
EXPOSE 5055
|
|
71
88
|
|
|
72
|
-
# Security: Use dumb-init to handle signals properly
|
|
73
89
|
ENTRYPOINT ["dumb-init", "--"]
|
|
74
90
|
CMD ["node", "src/server.js"]
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
steps:
|
|
2
|
+
# Step 1: Build Docker image
|
|
3
|
+
- name: 'gcr.io/cloud-builders/docker'
|
|
4
|
+
args:
|
|
5
|
+
- 'build'
|
|
6
|
+
- '--platform=linux/amd64'
|
|
7
|
+
- '-t'
|
|
8
|
+
- 'us-central1-docker.pkg.dev/insptechai/deepdebug-docker/local-agent-qa:latest'
|
|
9
|
+
- '.'
|
|
10
|
+
timeout: '600s'
|
|
11
|
+
|
|
12
|
+
# Step 2: Push to Artifact Registry
|
|
13
|
+
- name: 'gcr.io/cloud-builders/docker'
|
|
14
|
+
args:
|
|
15
|
+
- 'push'
|
|
16
|
+
- 'us-central1-docker.pkg.dev/insptechai/deepdebug-docker/local-agent-qa:latest'
|
|
17
|
+
|
|
18
|
+
# Step 3: Deploy to Cloud Run QA
|
|
19
|
+
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
|
|
20
|
+
entrypoint: 'gcloud'
|
|
21
|
+
args:
|
|
22
|
+
- 'run'
|
|
23
|
+
- 'deploy'
|
|
24
|
+
- 'deepdebug-local-agent-qa'
|
|
25
|
+
- '--image=us-central1-docker.pkg.dev/insptechai/deepdebug-docker/local-agent-qa:latest'
|
|
26
|
+
- '--region=us-central1'
|
|
27
|
+
- '--platform=managed'
|
|
28
|
+
- '--allow-unauthenticated'
|
|
29
|
+
- '--memory=1Gi'
|
|
30
|
+
- '--cpu=1'
|
|
31
|
+
- '--min-instances=1'
|
|
32
|
+
- '--max-instances=3'
|
|
33
|
+
- '--port=5055'
|
|
34
|
+
- '--timeout=300'
|
|
35
|
+
- '--update-env-vars=NODE_ENV=qa'
|
|
36
|
+
|
|
37
|
+
images:
|
|
38
|
+
- 'us-central1-docker.pkg.dev/insptechai/deepdebug-docker/local-agent-qa:latest'
|
|
39
|
+
|
|
40
|
+
options:
|
|
41
|
+
logging: CLOUD_LOGGING_ONLY
|
|
42
|
+
|
|
43
|
+
timeout: '1200s'
|
package/docker-compose.yml
CHANGED
|
@@ -52,8 +52,8 @@ services:
|
|
|
52
52
|
# Volumes
|
|
53
53
|
# ─────────────────────────────────────────
|
|
54
54
|
volumes:
|
|
55
|
-
# Project source code
|
|
56
|
-
-
|
|
55
|
+
# Project source code - writable so agent can apply patches
|
|
56
|
+
- /Users/macintosh/IdeaProjects:/workspace
|
|
57
57
|
# Temp directory for container writes
|
|
58
58
|
- agent-tmp:/tmp
|
|
59
59
|
|
package/package.json
CHANGED
package/src/exec-utils.js
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import { spawn } from "child_process";
|
|
2
2
|
import stripAnsi from "strip-ansi";
|
|
3
3
|
|
|
4
|
-
export function run(cmd, args, cwd, timeoutMs = 10 * 60 * 1000) {
|
|
4
|
+
export function run(cmd, args, cwd, timeoutMs = 10 * 60 * 1000, env = null) {
|
|
5
5
|
return new Promise((resolve) => {
|
|
6
6
|
const start = Date.now();
|
|
7
|
-
const
|
|
7
|
+
const spawnOpts = { cwd, shell: true };
|
|
8
|
+
if (env) spawnOpts.env = env;
|
|
9
|
+
const child = spawn(cmd, args, spawnOpts);
|
|
8
10
|
let stdout = "";
|
|
9
11
|
let stderr = "";
|
|
10
12
|
const timer = setTimeout(() => {
|
|
@@ -73,12 +75,95 @@ export async function compileAndTest({ language, buildTool, cwd, skipTests = fal
|
|
|
73
75
|
}
|
|
74
76
|
}
|
|
75
77
|
|
|
78
|
+
// Fix for maven-compiler-plugin 3.8.1 + Java 17:
|
|
79
|
+
// plexus-compiler-javac 2.8.4 does not support --release 17.
|
|
80
|
+
// Workaround: disable the --release flag entirely and use -source/-target instead.
|
|
81
|
+
// Also pass JAVA_HOME explicitly so mvnw uses the correct JDK.
|
|
82
|
+
// Dynamically prepare Maven environment and fix common Java version issues
|
|
83
|
+
const fs2 = await import("fs");
|
|
84
|
+
const path2 = await import("path");
|
|
85
|
+
|
|
86
|
+
// 1. Detect Java version available
|
|
87
|
+
const javaHome = process.env.JAVA_HOME || "/usr/local/java-home";
|
|
88
|
+
const javacPath = `${javaHome}/bin/javac`;
|
|
89
|
+
const hasJavac = fs2.existsSync(javacPath);
|
|
90
|
+
const mavenEnv = {
|
|
91
|
+
...process.env,
|
|
92
|
+
JAVA_HOME: javaHome,
|
|
93
|
+
PATH: `${javaHome}/bin:${process.env.PATH}`
|
|
94
|
+
};
|
|
95
|
+
console.log(`[BUILD] JAVA_HOME=${javaHome}, javac available=${hasJavac}`);
|
|
96
|
+
|
|
97
|
+
// 2. Detect Java version from javac
|
|
98
|
+
let javaVersion = 11; // default fallback
|
|
99
|
+
try {
|
|
100
|
+
const versionResult = await run(`${javaHome}/bin/java`, ["-version"], cwd, 5000, mavenEnv);
|
|
101
|
+
const versionOutput = versionResult.stdout + versionResult.stderr;
|
|
102
|
+
const match = versionOutput.match(/version "(\d+)/);
|
|
103
|
+
if (match) javaVersion = parseInt(match[1]);
|
|
104
|
+
console.log(`[BUILD] Detected Java version: ${javaVersion}`);
|
|
105
|
+
} catch {}
|
|
106
|
+
|
|
107
|
+
// 3. Patch pom.xml dynamically based on what's in it
|
|
108
|
+
const pomPath = path2.join(cwd, "pom.xml");
|
|
109
|
+
const javaCompatFlags = [];
|
|
110
|
+
if (fs2.existsSync(pomPath)) {
|
|
111
|
+
try {
|
|
112
|
+
let pomContent = fs2.readFileSync(pomPath, "utf8");
|
|
113
|
+
let patched = pomContent;
|
|
114
|
+
|
|
115
|
+
// Detect configured java version in pom
|
|
116
|
+
const javaVersionMatch = pomContent.match(/<java\.version>(\d+)<\/java\.version>/);
|
|
117
|
+
const pomJavaVersion = javaVersionMatch ? parseInt(javaVersionMatch[1]) : javaVersion;
|
|
118
|
+
console.log(`[BUILD] pom.xml java.version=${pomJavaVersion}`);
|
|
119
|
+
|
|
120
|
+
// Detect maven-compiler-plugin version
|
|
121
|
+
const compilerVersionMatch = pomContent.match(/<artifactId>maven-compiler-plugin<\/artifactId>\s*<version>([^<]+)<\/version>/);
|
|
122
|
+
const compilerVersion = compilerVersionMatch ? compilerVersionMatch[1] : "unknown";
|
|
123
|
+
console.log(`[BUILD] maven-compiler-plugin version=${compilerVersion}`);
|
|
124
|
+
|
|
125
|
+
// If compiler plugin < 3.10 and java >= 17: upgrade to 3.11.0
|
|
126
|
+
// (3.8.x uses plexus-compiler-javac 2.8.4 which doesn't support --release 17)
|
|
127
|
+
const [compMajor, compMinor] = compilerVersion.split(".").map(Number);
|
|
128
|
+
if (!isNaN(compMajor) && (compMajor < 3 || (compMajor === 3 && compMinor < 10)) && pomJavaVersion >= 17) {
|
|
129
|
+
patched = patched.replace(
|
|
130
|
+
new RegExp("(<artifactId>maven-compiler-plugin<\/artifactId>\\s*<version>)[^<]+(<\/version>)"),
|
|
131
|
+
"$1" + "3.11.0" + "$2"
|
|
132
|
+
);
|
|
133
|
+
console.log(`[BUILD] Patched: maven-compiler-plugin ${compilerVersion} -> 3.11.0`);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// If pom uses <release> tag that might fail, replace with <source>/<target>
|
|
137
|
+
if (pomContent.includes("<release>") && pomJavaVersion >= 17) {
|
|
138
|
+
patched = patched.replace(
|
|
139
|
+
/<release>\d+<\/release>/g,
|
|
140
|
+
`<source>${pomJavaVersion}</source>
|
|
141
|
+
<target>${pomJavaVersion}</target>`
|
|
142
|
+
);
|
|
143
|
+
console.log(`[BUILD] Patched: replaced <release> with <source>/<target> for Java ${pomJavaVersion}`);
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (patched !== pomContent) {
|
|
147
|
+
fs2.writeFileSync(pomPath, patched, "utf8");
|
|
148
|
+
console.log("[BUILD] pom.xml patched successfully");
|
|
149
|
+
} else {
|
|
150
|
+
console.log("[BUILD] pom.xml no patches needed");
|
|
151
|
+
}
|
|
152
|
+
} catch (patchErr) {
|
|
153
|
+
console.warn("[BUILD] Could not patch pom.xml:", patchErr.message);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
76
157
|
const args = skipTests
|
|
77
|
-
? ["compile", "-
|
|
78
|
-
: ["test",
|
|
158
|
+
? ["compile", "-DskipTests", ...javaCompatFlags]
|
|
159
|
+
: ["test", ...javaCompatFlags];
|
|
79
160
|
|
|
80
161
|
console.log(`[BUILD] [COMPILE] Running: ${mvnCmd} ${args.join(" ")}`);
|
|
81
|
-
result = await run(mvnCmd, args, cwd);
|
|
162
|
+
result = await run(mvnCmd, args, cwd, 10 * 60 * 1000, mavenEnv);
|
|
163
|
+
if (result.code !== 0) {
|
|
164
|
+
console.error("[ERR] [COMPILE] stdout:", result.stdout.substring(0, 2000));
|
|
165
|
+
console.error("[ERR] [COMPILE] stderr:", result.stderr.substring(0, 2000));
|
|
166
|
+
}
|
|
82
167
|
}
|
|
83
168
|
else if (language === "java" && buildTool === "gradle") {
|
|
84
169
|
// Gradle: clean build
|
package/src/server.js
CHANGED
|
@@ -5,7 +5,7 @@ import path from "path";
|
|
|
5
5
|
import os from "os";
|
|
6
6
|
import fs from "fs";
|
|
7
7
|
const fsPromises = fs.promises;
|
|
8
|
-
import { exec } from "child_process";
|
|
8
|
+
import { exec, spawn } from "child_process";
|
|
9
9
|
import { promisify } from "util";
|
|
10
10
|
import { EventEmitter } from "events";
|
|
11
11
|
import { exists, listRecursive, readFile, writeFile } from "./fs-utils.js";
|
|
@@ -24,6 +24,7 @@ import { DTOAnalyzer } from "./analyzers/dto-analyzer.js";
|
|
|
24
24
|
import { ConfigAnalyzer } from "./analyzers/config-analyzer.js";
|
|
25
25
|
import { WorkspaceManager } from "./workspace-manager.js";
|
|
26
26
|
import { startMCPHttpServer } from "./mcp-http-server.js";
|
|
27
|
+
import { registerVercelRoutes } from "./vercel-proxy.js";
|
|
27
28
|
|
|
28
29
|
const execAsync = promisify(exec);
|
|
29
30
|
|
|
@@ -545,24 +546,49 @@ const app = express();
|
|
|
545
546
|
// FIXED: Support Cloud Run PORT environment variable (GCP uses PORT)
|
|
546
547
|
const PORT = process.env.PORT || process.env.LOCAL_AGENT_PORT || 5055;
|
|
547
548
|
|
|
548
|
-
//
|
|
549
|
+
// CORS - Allow from Cloud Run, local dev, and Lovable frontend domains
|
|
549
550
|
app.use(cors({
|
|
550
|
-
origin:
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
551
|
+
origin: function(origin, callback) {
|
|
552
|
+
// Allow requests with no origin (curl, mobile apps, server-to-server)
|
|
553
|
+
if (!origin) return callback(null, true);
|
|
554
|
+
|
|
555
|
+
const allowed = [
|
|
556
|
+
// Local development
|
|
557
|
+
"http://localhost:3010",
|
|
558
|
+
"http://localhost:3000",
|
|
559
|
+
"http://localhost:8085",
|
|
560
|
+
"http://127.0.0.1:3010",
|
|
561
|
+
"http://127.0.0.1:3000",
|
|
562
|
+
"http://127.0.0.1:8085",
|
|
563
|
+
];
|
|
564
|
+
|
|
565
|
+
const allowedPatterns = [
|
|
566
|
+
/https:\/\/.*\.run\.app$/, // Cloud Run
|
|
567
|
+
/https:\/\/.*\.web\.app$/, // Firebase hosting
|
|
568
|
+
/https:\/\/.*\.lovable\.app$/, // Lovable preview
|
|
569
|
+
/https:\/\/.*\.lovableproject\.com$/, // Lovable project domains
|
|
570
|
+
/https:\/\/.*\.netlify\.app$/, // Netlify
|
|
571
|
+
/https:\/\/.*\.vercel\.app$/, // Vercel
|
|
572
|
+
/https:\/\/deepdebug\.ai$/, // Production
|
|
573
|
+
/https:\/\/.*\.deepdebug\.ai$/, // Production subdomains
|
|
574
|
+
];
|
|
575
|
+
|
|
576
|
+
if (allowed.includes(origin) || allowedPatterns.some(p => p.test(origin))) {
|
|
577
|
+
return callback(null, true);
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// In development/QA, log and allow unknown origins instead of blocking
|
|
581
|
+
const env = process.env.NODE_ENV || 'development';
|
|
582
|
+
if (env !== 'production') {
|
|
583
|
+
console.warn(`CORS: allowing unknown origin in non-prod: ${origin}`);
|
|
584
|
+
return callback(null, true);
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
console.warn(`CORS: blocked origin: ${origin}`);
|
|
588
|
+
return callback(new Error(`CORS: origin ${origin} not allowed`));
|
|
589
|
+
},
|
|
564
590
|
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
|
565
|
-
allowedHeaders: ["Content-Type", "Authorization", "X-Tenant-ID"],
|
|
591
|
+
allowedHeaders: ["Content-Type", "Authorization", "X-Tenant-ID", "X-User-ID"],
|
|
566
592
|
credentials: true
|
|
567
593
|
}));
|
|
568
594
|
|
|
@@ -573,7 +599,9 @@ app.use(bodyParser.json({ limit: "50mb" }));
|
|
|
573
599
|
|
|
574
600
|
// DEFAULT WORKSPACE - Define o workspace padro
|
|
575
601
|
// Pode ser sobrescrito via varivel de ambiente ou POST /workspace/open
|
|
576
|
-
const DEFAULT_WORKSPACE = process.env.
|
|
602
|
+
const DEFAULT_WORKSPACE = process.env.WORKSPACE_ROOT
|
|
603
|
+
|| process.env.DEFAULT_WORKSPACE
|
|
604
|
+
|| '/Users/macintosh/IdeaProjects/pure-core-ms';
|
|
577
605
|
|
|
578
606
|
let WORKSPACE_ROOT = fs.existsSync(DEFAULT_WORKSPACE) ? DEFAULT_WORKSPACE : null;
|
|
579
607
|
if (WORKSPACE_ROOT) {
|
|
@@ -622,7 +650,19 @@ aiEngine = new AIVibeCodingEngine(processManager, () => WORKSPACE_ROOT);
|
|
|
622
650
|
// ============================================
|
|
623
651
|
const BACKUPS = new Map();
|
|
624
652
|
const MAX_BACKUPS = 50;
|
|
625
|
-
|
|
653
|
+
// Store backup index on NFS Filestore so it survives container restarts
|
|
654
|
+
// Falls back to /tmp if NFS is not mounted (local dev)
|
|
655
|
+
const NFS_BACKUPS_DIR = process.env.WORKSPACE_MOUNT || '/mnt/workspaces';
|
|
656
|
+
const NFS_BACKUP_INDEX = path.join(NFS_BACKUPS_DIR, '.deepdebug-backups-index.json');
|
|
657
|
+
const BACKUP_INDEX_PATH = (() => {
|
|
658
|
+
try {
|
|
659
|
+
if (fs.existsSync(NFS_BACKUPS_DIR)) {
|
|
660
|
+
return NFS_BACKUP_INDEX;
|
|
661
|
+
}
|
|
662
|
+
} catch {}
|
|
663
|
+
return path.join(os.tmpdir(), 'deepdebug-backups-index.json');
|
|
664
|
+
})();
|
|
665
|
+
console.log(`[BACKUPS] Index path: ${BACKUP_INDEX_PATH}`);
|
|
626
666
|
|
|
627
667
|
/**
|
|
628
668
|
* Persist backup index to disk so diffs survive server restarts.
|
|
@@ -884,11 +924,15 @@ app.post("/workspace/clone", async (req, res) => {
|
|
|
884
924
|
const tenantId = body.tenantId;
|
|
885
925
|
const workspaceId = body.workspaceId;
|
|
886
926
|
const gitToken = body.gitToken;
|
|
887
|
-
|
|
927
|
+
// Accept gitBranch (onboarding frontend) OR branch (legacy)
|
|
928
|
+
const branch = body.branch || body.gitBranch;
|
|
929
|
+
// Accept gitProvider (onboarding frontend) for auth URL construction
|
|
930
|
+
const gitProvider = body.gitProvider || (gitUrl && gitUrl.includes('bitbucket') ? 'bitbucket' : gitUrl && gitUrl.includes('gitlab') ? 'gitlab' : 'github');
|
|
888
931
|
|
|
889
932
|
if (!gitUrl) return res.status(400).json({ ok: false, error: "gitUrl is required" });
|
|
890
933
|
|
|
891
|
-
|
|
934
|
+
// Strip ALL trailing .git suffixes (handles accidental double .git.git from frontend)
|
|
935
|
+
const repoName = gitUrl.split('/').pop().replace(/\.git$/i, '').replace(/\.git$/i, '');
|
|
892
936
|
|
|
893
937
|
const NFS_MOUNT = '/mnt/workspaces';
|
|
894
938
|
let absTarget;
|
|
@@ -903,17 +947,22 @@ app.post("/workspace/clone", async (req, res) => {
|
|
|
903
947
|
console.log(`[clone] Using fallback path: ${absTarget}`);
|
|
904
948
|
}
|
|
905
949
|
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
950
|
+
// Clean the gitUrl before using it (strip accidental double .git suffix)
|
|
951
|
+
const cleanGitUrl = gitUrl.replace(/\.git\.git$/i, '.git');
|
|
952
|
+
|
|
953
|
+
let authenticatedUrl = cleanGitUrl;
|
|
954
|
+
if (gitToken && cleanGitUrl.startsWith('https://') && !cleanGitUrl.includes('@')) {
|
|
955
|
+
const providerLower = (gitProvider || '').toLowerCase();
|
|
956
|
+
if (providerLower === 'bitbucket' || cleanGitUrl.includes('bitbucket.org')) {
|
|
957
|
+
authenticatedUrl = cleanGitUrl.replace('https://', `https://x-token-auth:${gitToken}@`);
|
|
958
|
+
} else if (providerLower === 'gitlab' || cleanGitUrl.includes('gitlab.com')) {
|
|
959
|
+
authenticatedUrl = cleanGitUrl.replace('https://', `https://oauth2:${gitToken}@`);
|
|
912
960
|
} else {
|
|
913
|
-
|
|
961
|
+
// github or any other provider
|
|
962
|
+
authenticatedUrl = cleanGitUrl.replace('https://', `https://x-access-token:${gitToken}@`);
|
|
914
963
|
}
|
|
915
964
|
}
|
|
916
|
-
console.log(`Clone request: ${
|
|
965
|
+
console.log(`Clone request: ${cleanGitUrl} -> ${absTarget}`);
|
|
917
966
|
|
|
918
967
|
try {
|
|
919
968
|
// Ensure parent directory exists
|
|
@@ -1049,7 +1098,7 @@ app.get("/workspace/file-content", async (req, res) => {
|
|
|
1049
1098
|
|
|
1050
1099
|
/** Escreve/salva conteudo de arquivo no workspace */
|
|
1051
1100
|
app.post("/workspace/write-file", async (req, res) => {
|
|
1052
|
-
const workspaceRoot =
|
|
1101
|
+
const workspaceRoot = resolveWorkspaceRoot(req);
|
|
1053
1102
|
if (!workspaceRoot) return res.status(400).json({ error: "workspace not set" });
|
|
1054
1103
|
|
|
1055
1104
|
const { path: relativePath, content: fileContent } = req.body || {};
|
|
@@ -1653,7 +1702,9 @@ app.post("/workspace/write", async (req, res) => {
|
|
|
1653
1702
|
const { path: rel, content } = req.body || {};
|
|
1654
1703
|
if (!rel) return res.status(400).json({ error: "path is required" });
|
|
1655
1704
|
try {
|
|
1656
|
-
|
|
1705
|
+
const fullPath = path.join(wsRoot, rel);
|
|
1706
|
+
await fsPromises.mkdir(path.dirname(fullPath), { recursive: true });
|
|
1707
|
+
await writeFile(fullPath, content ?? "", "utf8");
|
|
1657
1708
|
res.json({ ok: true, path: rel, bytes: Buffer.byteLength(content ?? "", "utf8") });
|
|
1658
1709
|
} catch (e) {
|
|
1659
1710
|
res.status(400).json({ error: "write failed", details: String(e) });
|
|
@@ -1795,23 +1846,29 @@ app.post("/workspace/test-local/compile", async (req, res) => {
|
|
|
1795
1846
|
skipTests: true
|
|
1796
1847
|
});
|
|
1797
1848
|
|
|
1798
|
-
|
|
1799
|
-
|
|
1849
|
+
// Support both formats:
|
|
1850
|
+
// - new format: { success, code, steps[] } from updated exec-utils.js
|
|
1851
|
+
// - legacy format: { code, stdout, stderr } from older exec-utils.js
|
|
1852
|
+
const isSuccess = compileResult.success !== undefined
|
|
1853
|
+
? compileResult.success
|
|
1854
|
+
: compileResult.code === 0;
|
|
1855
|
+
|
|
1856
|
+
const errorOutput = compileResult.steps
|
|
1857
|
+
? compileResult.steps.map(s => s.stderr || '').filter(Boolean).join('\n').trim()
|
|
1858
|
+
: (compileResult.stderr || '');
|
|
1800
1859
|
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
.join('\n')
|
|
1805
|
-
.trim();
|
|
1860
|
+
const stdoutOutput = compileResult.steps
|
|
1861
|
+
? compileResult.steps.map(s => s.stdout || '').filter(Boolean).join('\n').trim()
|
|
1862
|
+
: (compileResult.stdout || '');
|
|
1806
1863
|
|
|
1807
|
-
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
.join('\n')
|
|
1811
|
-
.trim();
|
|
1864
|
+
const totalDuration = compileResult.steps
|
|
1865
|
+
? compileResult.steps.reduce((sum, s) => sum + (s.duration || 0), 0)
|
|
1866
|
+
: (compileResult.duration || 0);
|
|
1812
1867
|
|
|
1813
|
-
|
|
1814
|
-
|
|
1868
|
+
if (!isSuccess) {
|
|
1869
|
+
TEST_LOCAL_STATE.status = "error";
|
|
1870
|
+
console.error("[TEST-LOCAL] Compilation failed. stderr:", errorOutput.substring(0, 500));
|
|
1871
|
+
console.error("[TEST-LOCAL] Compilation failed. stdout:", stdoutOutput.substring(0, 500));
|
|
1815
1872
|
|
|
1816
1873
|
TEST_LOCAL_STATE.compilationResult = {
|
|
1817
1874
|
success: false,
|
|
@@ -1828,9 +1885,6 @@ app.post("/workspace/test-local/compile", async (req, res) => {
|
|
|
1828
1885
|
});
|
|
1829
1886
|
}
|
|
1830
1887
|
|
|
1831
|
-
const totalDuration = (compileResult.steps || [])
|
|
1832
|
-
.reduce((sum, s) => sum + (s.duration || 0), 0);
|
|
1833
|
-
|
|
1834
1888
|
TEST_LOCAL_STATE.status = "compiled";
|
|
1835
1889
|
TEST_LOCAL_STATE.compilationResult = {
|
|
1836
1890
|
success: true,
|
|
@@ -1846,7 +1900,7 @@ app.post("/workspace/test-local/compile", async (req, res) => {
|
|
|
1846
1900
|
language: meta.language,
|
|
1847
1901
|
buildTool: meta.buildTool,
|
|
1848
1902
|
duration: totalDuration,
|
|
1849
|
-
stdout:
|
|
1903
|
+
stdout: stdoutOutput
|
|
1850
1904
|
});
|
|
1851
1905
|
} catch (err) {
|
|
1852
1906
|
console.error("[TEST-LOCAL] Compilation failed:", err.message);
|
|
@@ -2367,8 +2421,9 @@ app.post("/workspace/safe-patch", async (req, res) => {
|
|
|
2367
2421
|
incidentId: incidentId || null
|
|
2368
2422
|
});
|
|
2369
2423
|
|
|
2370
|
-
// Save backup files to
|
|
2371
|
-
const
|
|
2424
|
+
// Save backup files to NFS Filestore for persistence across container restarts
|
|
2425
|
+
const backupsBase = fs.existsSync(NFS_BACKUPS_DIR) ? path.join(NFS_BACKUPS_DIR, '.deepdebug-backups') : path.join(os.tmpdir(), 'deepdebug-backups');
|
|
2426
|
+
const backupDir = path.join(backupsBase, backupId);
|
|
2372
2427
|
try {
|
|
2373
2428
|
await fsPromises.mkdir(backupDir, { recursive: true });
|
|
2374
2429
|
for (const file of backupFiles) {
|
|
@@ -5858,14 +5913,24 @@ app.post("/workspace/test-local/run-single", async (req, res) => {
|
|
|
5858
5913
|
try {
|
|
5859
5914
|
const meta = await detectProject(wsRoot);
|
|
5860
5915
|
|
|
5916
|
+
// Build JAVA_HOME env (same fix as compile endpoint)
|
|
5917
|
+
const javaHome = process.env.JAVA_HOME || "/usr/local/java-home";
|
|
5918
|
+
const mavenEnv = {
|
|
5919
|
+
...process.env,
|
|
5920
|
+
JAVA_HOME: javaHome,
|
|
5921
|
+
PATH: `${javaHome}/bin:${process.env.PATH}`
|
|
5922
|
+
};
|
|
5923
|
+
|
|
5861
5924
|
let result;
|
|
5862
5925
|
|
|
5863
5926
|
if (meta.buildTool === 'maven') {
|
|
5864
|
-
//
|
|
5865
|
-
|
|
5927
|
+
// Prefer ./mvnw wrapper, pass JAVA_HOME env
|
|
5928
|
+
const hasMvnw = fs.existsSync(path.join(wsRoot, 'mvnw'));
|
|
5929
|
+
const mvnCmd = hasMvnw ? './mvnw' : 'mvn';
|
|
5930
|
+
result = await run(mvnCmd, ['test', `-Dtest=${resolvedClass}`, '-DskipTests=false'], wsRoot, 3 * 60 * 1000, mavenEnv);
|
|
5866
5931
|
} else if (meta.buildTool === 'gradle') {
|
|
5867
5932
|
const gradleCmd = fs.existsSync(path.join(wsRoot, 'gradlew')) ? './gradlew' : 'gradle';
|
|
5868
|
-
result = await run(gradleCmd, ['test', '--tests', `*.${resolvedClass}`], wsRoot);
|
|
5933
|
+
result = await run(gradleCmd, ['test', '--tests', `*.${resolvedClass}`], wsRoot, 3 * 60 * 1000, mavenEnv);
|
|
5869
5934
|
} else if (meta.language === 'node') {
|
|
5870
5935
|
result = await run('npx', ['jest', resolvedClass, '--no-coverage'], wsRoot);
|
|
5871
5936
|
} else {
|
|
@@ -5931,6 +5996,320 @@ app.post("/workspace/test-local/run-single", async (req, res) => {
|
|
|
5931
5996
|
}
|
|
5932
5997
|
});
|
|
5933
5998
|
|
|
5999
|
+
// ============================================
|
|
6000
|
+
// SPRINT 1 - S1-T4: Session Isolation via Git Worktree
|
|
6001
|
+
// ============================================
|
|
6002
|
+
|
|
6003
|
+
/**
|
|
6004
|
+
* POST /workspace/session/create
|
|
6005
|
+
*
|
|
6006
|
+
* Creates an isolated git worktree for a session.
|
|
6007
|
+
* Each session gets its own directory and branch - zero collision between parallel sessions.
|
|
6008
|
+
*
|
|
6009
|
+
* Body: { sessionId, branchName, tenantId }
|
|
6010
|
+
* Response: { ok, sessionId, worktreePath, branchName }
|
|
6011
|
+
*/
|
|
6012
|
+
app.post("/workspace/session/create", async (req, res) => {
|
|
6013
|
+
const wsRoot = resolveWorkspaceRoot(req);
|
|
6014
|
+
if (!wsRoot) return res.status(400).json({ ok: false, error: "workspace not set" });
|
|
6015
|
+
|
|
6016
|
+
const { sessionId, branchName, tenantId } = req.body || {};
|
|
6017
|
+
if (!sessionId) return res.status(400).json({ ok: false, error: "sessionId is required" });
|
|
6018
|
+
|
|
6019
|
+
const safeBranch = (branchName || `session-${sessionId}`).replace(/[^a-zA-Z0-9/_-]/g, "-").toLowerCase();
|
|
6020
|
+
const tenantPart = tenantId || path.basename(path.dirname(wsRoot));
|
|
6021
|
+
|
|
6022
|
+
// Sessions dir: /mnt/workspaces/{tenantId}/sessions/{sessionId}
|
|
6023
|
+
const sessionsBase = path.join(path.dirname(wsRoot), "sessions");
|
|
6024
|
+
const worktreePath = path.join(sessionsBase, sessionId);
|
|
6025
|
+
|
|
6026
|
+
console.log(`[SESSION] Creating worktree: ${worktreePath} (branch: ${safeBranch})`);
|
|
6027
|
+
|
|
6028
|
+
try {
|
|
6029
|
+
await fsPromises.mkdir(sessionsBase, { recursive: true });
|
|
6030
|
+
|
|
6031
|
+
// Remove stale worktree if exists
|
|
6032
|
+
if (fs.existsSync(worktreePath)) {
|
|
6033
|
+
console.log(`[SESSION] Removing stale worktree: ${worktreePath}`);
|
|
6034
|
+
await execAsync(`git worktree remove --force "${worktreePath}"`, { cwd: wsRoot }).catch(() => {});
|
|
6035
|
+
await fsPromises.rm(worktreePath, { recursive: true, force: true }).catch(() => {});
|
|
6036
|
+
}
|
|
6037
|
+
|
|
6038
|
+
// Create new worktree with new branch
|
|
6039
|
+
await execAsync(`git worktree add "${worktreePath}" -b "${safeBranch}"`, { cwd: wsRoot });
|
|
6040
|
+
|
|
6041
|
+
console.log(`[SESSION] Worktree created: ${worktreePath}`);
|
|
6042
|
+
return res.json({
|
|
6043
|
+
ok: true,
|
|
6044
|
+
sessionId,
|
|
6045
|
+
worktreePath,
|
|
6046
|
+
branchName: safeBranch
|
|
6047
|
+
});
|
|
6048
|
+
} catch (err) {
|
|
6049
|
+
console.error(`[SESSION] Failed to create worktree: ${err.message}`);
|
|
6050
|
+
// Fallback: return main workspace path (no isolation but no crash)
|
|
6051
|
+
return res.status(500).json({
|
|
6052
|
+
ok: false,
|
|
6053
|
+
sessionId,
|
|
6054
|
+
worktreePath: wsRoot,
|
|
6055
|
+
branchName: safeBranch,
|
|
6056
|
+
error: err.message,
|
|
6057
|
+
fallback: true
|
|
6058
|
+
});
|
|
6059
|
+
}
|
|
6060
|
+
});
|
|
6061
|
+
|
|
6062
|
+
/**
|
|
6063
|
+
* DELETE /workspace/session/:sessionId
|
|
6064
|
+
*
|
|
6065
|
+
* Removes a session worktree and its branch.
|
|
6066
|
+
* Called after PR is created or session times out.
|
|
6067
|
+
*
|
|
6068
|
+
* Query: ?tenantId=xxx
|
|
6069
|
+
*/
|
|
6070
|
+
app.delete("/workspace/session/:sessionId", async (req, res) => {
|
|
6071
|
+
const wsRoot = resolveWorkspaceRoot(req);
|
|
6072
|
+
if (!wsRoot) return res.status(400).json({ ok: false, error: "workspace not set" });
|
|
6073
|
+
|
|
6074
|
+
const { sessionId } = req.params;
|
|
6075
|
+
const sessionsBase = path.join(path.dirname(wsRoot), "sessions");
|
|
6076
|
+
const worktreePath = path.join(sessionsBase, sessionId);
|
|
6077
|
+
|
|
6078
|
+
console.log(`[SESSION] Removing worktree: ${worktreePath}`);
|
|
6079
|
+
|
|
6080
|
+
try {
|
|
6081
|
+
if (fs.existsSync(worktreePath)) {
|
|
6082
|
+
await execAsync(`git worktree remove --force "${worktreePath}"`, { cwd: wsRoot }).catch(() => {});
|
|
6083
|
+
await fsPromises.rm(worktreePath, { recursive: true, force: true }).catch(() => {});
|
|
6084
|
+
}
|
|
6085
|
+
|
|
6086
|
+
// Clean up branch
|
|
6087
|
+
const branchName = `session-${sessionId}`;
|
|
6088
|
+
await execAsync(`git branch -D "${branchName}"`, { cwd: wsRoot }).catch(() => {});
|
|
6089
|
+
|
|
6090
|
+
console.log(`[SESSION] Worktree removed: ${worktreePath}`);
|
|
6091
|
+
return res.json({ ok: true, sessionId, removed: worktreePath });
|
|
6092
|
+
} catch (err) {
|
|
6093
|
+
console.error(`[SESSION] Failed to remove worktree: ${err.message}`);
|
|
6094
|
+
return res.status(500).json({ ok: false, sessionId, error: err.message });
|
|
6095
|
+
}
|
|
6096
|
+
});
|
|
6097
|
+
|
|
6098
|
+
/**
|
|
6099
|
+
* GET /workspace/session/:sessionId/status
|
|
6100
|
+
*
|
|
6101
|
+
* Returns status of a session worktree.
|
|
6102
|
+
*/
|
|
6103
|
+
app.get("/workspace/session/:sessionId/status", async (req, res) => {
|
|
6104
|
+
const wsRoot = resolveWorkspaceRoot(req);
|
|
6105
|
+
if (!wsRoot) return res.status(400).json({ ok: false, error: "workspace not set" });
|
|
6106
|
+
|
|
6107
|
+
const { sessionId } = req.params;
|
|
6108
|
+
const sessionsBase = path.join(path.dirname(wsRoot), "sessions");
|
|
6109
|
+
const worktreePath = path.join(sessionsBase, sessionId);
|
|
6110
|
+
const exists = fs.existsSync(worktreePath);
|
|
6111
|
+
|
|
6112
|
+
return res.json({
|
|
6113
|
+
ok: true,
|
|
6114
|
+
sessionId,
|
|
6115
|
+
worktreePath,
|
|
6116
|
+
exists
|
|
6117
|
+
});
|
|
6118
|
+
});
|
|
6119
|
+
|
|
6120
|
+
|
|
6121
|
+
// ============================================
|
|
6122
|
+
// FASE 5 - ENV MANAGER + PREVIEW TUNNEL
|
|
6123
|
+
// State: active vibe environments per sessionId
|
|
6124
|
+
// ============================================
|
|
6125
|
+
const vibeEnvs = new Map(); // sessionId -> { port, pid, serviceId, tunnelUrl, tunnelPid, worktreePath, status }
|
|
6126
|
+
|
|
6127
|
+
function findFreePort(min = 9000, max = 9999) {
|
|
6128
|
+
const used = new Set([...vibeEnvs.values()].map(e => e.port));
|
|
6129
|
+
for (let p = min; p <= max; p++) {
|
|
6130
|
+
if (!used.has(p)) return p;
|
|
6131
|
+
}
|
|
6132
|
+
throw new Error('No free ports available in range 9000-9999');
|
|
6133
|
+
}
|
|
6134
|
+
|
|
6135
|
+
/**
|
|
6136
|
+
* POST /workspace/vibe/env/start
|
|
6137
|
+
*
|
|
6138
|
+
* Compiles (if needed) and starts the service on a dynamic port.
|
|
6139
|
+
* Body: { sessionId, worktreePath }
|
|
6140
|
+
* Response: { ok, port, serviceId, readyUrl }
|
|
6141
|
+
*/
|
|
6142
|
+
app.post("/workspace/vibe/env/start", async (req, res) => {
|
|
6143
|
+
const { sessionId, worktreePath } = req.body || {};
|
|
6144
|
+
if (!sessionId || !worktreePath) {
|
|
6145
|
+
return res.status(400).json({ ok: false, error: "sessionId and worktreePath required" });
|
|
6146
|
+
}
|
|
6147
|
+
if (vibeEnvs.has(sessionId)) {
|
|
6148
|
+
const env = vibeEnvs.get(sessionId);
|
|
6149
|
+
return res.json({ ok: true, port: env.port, serviceId: env.serviceId, readyUrl: `http://localhost:${env.port}`, alreadyRunning: true });
|
|
6150
|
+
}
|
|
6151
|
+
try {
|
|
6152
|
+
const port = findFreePort();
|
|
6153
|
+
const serviceId = `vibe-${sessionId}`;
|
|
6154
|
+
const meta = await detectProject(worktreePath);
|
|
6155
|
+
let startConfig;
|
|
6156
|
+
if (meta.language === 'java') {
|
|
6157
|
+
const targetDir = path.join(worktreePath, 'target');
|
|
6158
|
+
let jarPath = null;
|
|
6159
|
+
if (fs.existsSync(targetDir)) {
|
|
6160
|
+
const jars = fs.readdirSync(targetDir).filter(f =>
|
|
6161
|
+
f.endsWith('.jar') && !f.endsWith('.original') &&
|
|
6162
|
+
!f.includes('-sources') && !f.includes('-javadoc'));
|
|
6163
|
+
if (jars.length > 0) jarPath = path.join(targetDir, jars[0]);
|
|
6164
|
+
}
|
|
6165
|
+
if (!jarPath) {
|
|
6166
|
+
return res.status(400).json({ ok: false, error: "No JAR found. Run compile_project first." });
|
|
6167
|
+
}
|
|
6168
|
+
const cleanEnv = { ...process.env };
|
|
6169
|
+
Object.keys(cleanEnv).forEach(k => { if (k.startsWith('SPRING_')) delete cleanEnv[k]; });
|
|
6170
|
+
cleanEnv.SERVER_PORT = String(port);
|
|
6171
|
+
cleanEnv.PORT = String(port);
|
|
6172
|
+
startConfig = { command: 'java', args: ['-jar', jarPath, `--server.port=${port}`], cwd: worktreePath, port, env: cleanEnv };
|
|
6173
|
+
} else if (meta.language === 'node' || meta.language === 'javascript') {
|
|
6174
|
+
startConfig = { command: 'node', args: ['index.js'], cwd: worktreePath, port, env: { ...process.env, PORT: String(port) } };
|
|
6175
|
+
} else {
|
|
6176
|
+
return res.status(400).json({ ok: false, error: `Unsupported language: ${meta.language}` });
|
|
6177
|
+
}
|
|
6178
|
+
vibeEnvs.set(sessionId, { port, serviceId, tunnelUrl: null, tunnelPid: null, worktreePath, status: 'starting' });
|
|
6179
|
+
await processManager.start(serviceId, startConfig);
|
|
6180
|
+
vibeEnvs.get(sessionId).status = 'running';
|
|
6181
|
+
console.log(`[VIBE-ENV] Started ${serviceId} on port ${port}`);
|
|
6182
|
+
return res.json({ ok: true, port, serviceId, readyUrl: `http://localhost:${port}` });
|
|
6183
|
+
} catch (err) {
|
|
6184
|
+
console.error(`[VIBE-ENV] Start failed: ${err.message}`);
|
|
6185
|
+
vibeEnvs.delete(sessionId);
|
|
6186
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
6187
|
+
}
|
|
6188
|
+
});
|
|
6189
|
+
|
|
6190
|
+
/**
|
|
6191
|
+
* POST /workspace/vibe/env/stop
|
|
6192
|
+
*
|
|
6193
|
+
* Stops the service and tunnel for a session.
|
|
6194
|
+
* Body: { sessionId }
|
|
6195
|
+
*/
|
|
6196
|
+
app.post("/workspace/vibe/env/stop", async (req, res) => {
|
|
6197
|
+
const { sessionId } = req.body || {};
|
|
6198
|
+
if (!sessionId) return res.status(400).json({ ok: false, error: "sessionId required" });
|
|
6199
|
+
const env = vibeEnvs.get(sessionId);
|
|
6200
|
+
if (!env) return res.json({ ok: true, message: "Not running" });
|
|
6201
|
+
try {
|
|
6202
|
+
await processManager.stop(env.serviceId);
|
|
6203
|
+
if (env.tunnelPid) {
|
|
6204
|
+
try { process.kill(env.tunnelPid, 'SIGTERM'); } catch (_) {}
|
|
6205
|
+
}
|
|
6206
|
+
vibeEnvs.delete(sessionId);
|
|
6207
|
+
console.log(`[VIBE-ENV] Stopped ${env.serviceId}`);
|
|
6208
|
+
return res.json({ ok: true });
|
|
6209
|
+
} catch (err) {
|
|
6210
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
6211
|
+
}
|
|
6212
|
+
});
|
|
6213
|
+
|
|
6214
|
+
/**
|
|
6215
|
+
* GET /workspace/vibe/env/status/:sessionId
|
|
6216
|
+
*
|
|
6217
|
+
* Returns current status of a vibe environment.
|
|
6218
|
+
*/
|
|
6219
|
+
app.get("/workspace/vibe/env/status/:sessionId", async (req, res) => {
|
|
6220
|
+
const { sessionId } = req.params;
|
|
6221
|
+
const env = vibeEnvs.get(sessionId);
|
|
6222
|
+
if (!env) return res.json({ ok: true, status: 'stopped', sessionId });
|
|
6223
|
+
return res.json({ ok: true, sessionId, status: env.status, port: env.port, serviceId: env.serviceId, tunnelUrl: env.tunnelUrl, readyUrl: `http://localhost:${env.port}` });
|
|
6224
|
+
});
|
|
6225
|
+
|
|
6226
|
+
/**
|
|
6227
|
+
* POST /workspace/vibe/tunnel/start
|
|
6228
|
+
*
|
|
6229
|
+
* Starts a cloudflared tunnel for a vibe environment.
|
|
6230
|
+
* Body: { sessionId }
|
|
6231
|
+
* Response: { ok, tunnelUrl }
|
|
6232
|
+
*
|
|
6233
|
+
* Downloads cloudflared binary automatically if not present.
|
|
6234
|
+
*/
|
|
6235
|
+
app.post("/workspace/vibe/tunnel/start", async (req, res) => {
|
|
6236
|
+
const { sessionId } = req.body || {};
|
|
6237
|
+
if (!sessionId) return res.status(400).json({ ok: false, error: "sessionId required" });
|
|
6238
|
+
const env = vibeEnvs.get(sessionId);
|
|
6239
|
+
if (!env) return res.status(404).json({ ok: false, error: "Environment not running. Call /vibe/env/start first." });
|
|
6240
|
+
if (env.tunnelUrl) return res.json({ ok: true, tunnelUrl: env.tunnelUrl, alreadyRunning: true });
|
|
6241
|
+
try {
|
|
6242
|
+
// Resolve cloudflared binary path
|
|
6243
|
+
const cfDir = path.join(process.env.HOME || '/tmp', '.deepdebug', 'bin');
|
|
6244
|
+
const cfPath = path.join(cfDir, 'cloudflared');
|
|
6245
|
+
// Download if missing
|
|
6246
|
+
if (!fs.existsSync(cfPath)) {
|
|
6247
|
+
fs.mkdirSync(cfDir, { recursive: true });
|
|
6248
|
+
const arch = process.arch === 'arm64' ? 'arm64' : 'amd64';
|
|
6249
|
+
const platform = process.platform === 'darwin' ? 'darwin' : 'linux';
|
|
6250
|
+
const url = `https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-${platform}-${arch}`;
|
|
6251
|
+
console.log(`[VIBE-TUNNEL] Downloading cloudflared from ${url}...`);
|
|
6252
|
+
await execAsync(`curl -fsSL "${url}" -o "${cfPath}" && chmod +x "${cfPath}"`);
|
|
6253
|
+
console.log(`[VIBE-TUNNEL] cloudflared downloaded to ${cfPath}`);
|
|
6254
|
+
}
|
|
6255
|
+
// Start tunnel
|
|
6256
|
+
const tunnelProcess = spawn(cfPath, ['tunnel', '--url', `http://localhost:${env.port}`], {
|
|
6257
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
6258
|
+
detached: false
|
|
6259
|
+
});
|
|
6260
|
+
env.tunnelPid = tunnelProcess.pid;
|
|
6261
|
+
env.status = 'tunneling';
|
|
6262
|
+
// Capture tunnel URL from stdout/stderr
|
|
6263
|
+
const tunnelUrl = await new Promise((resolve, reject) => {
|
|
6264
|
+
const timeout = setTimeout(() => reject(new Error('Tunnel URL not found within 30s')), 30000);
|
|
6265
|
+
const handler = (data) => {
|
|
6266
|
+
const text = data.toString();
|
|
6267
|
+
const match = text.match(/https:\/\/[a-zA-Z0-9-]+\.trycloudflare\.com/);
|
|
6268
|
+
if (match) {
|
|
6269
|
+
clearTimeout(timeout);
|
|
6270
|
+
tunnelProcess.stdout.off('data', handler);
|
|
6271
|
+
tunnelProcess.stderr.off('data', handler);
|
|
6272
|
+
resolve(match[0]);
|
|
6273
|
+
}
|
|
6274
|
+
};
|
|
6275
|
+
tunnelProcess.stdout.on('data', handler);
|
|
6276
|
+
tunnelProcess.stderr.on('data', handler);
|
|
6277
|
+
tunnelProcess.on('error', (err) => { clearTimeout(timeout); reject(err); });
|
|
6278
|
+
});
|
|
6279
|
+
env.tunnelUrl = tunnelUrl;
|
|
6280
|
+
console.log(`[VIBE-TUNNEL] Tunnel started: ${tunnelUrl}`);
|
|
6281
|
+
return res.json({ ok: true, tunnelUrl });
|
|
6282
|
+
} catch (err) {
|
|
6283
|
+
console.error(`[VIBE-TUNNEL] Failed: ${err.message}`);
|
|
6284
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
6285
|
+
}
|
|
6286
|
+
});
|
|
6287
|
+
|
|
6288
|
+
/**
|
|
6289
|
+
* DELETE /workspace/vibe/tunnel/:sessionId
|
|
6290
|
+
*
|
|
6291
|
+
* Stops only the tunnel (keeps service running).
|
|
6292
|
+
*/
|
|
6293
|
+
app.delete("/workspace/vibe/tunnel/:sessionId", async (req, res) => {
|
|
6294
|
+
const { sessionId } = req.params;
|
|
6295
|
+
const env = vibeEnvs.get(sessionId);
|
|
6296
|
+
if (!env || !env.tunnelPid) return res.json({ ok: true, message: "No tunnel running" });
|
|
6297
|
+
try {
|
|
6298
|
+
process.kill(env.tunnelPid, 'SIGTERM');
|
|
6299
|
+
env.tunnelPid = null;
|
|
6300
|
+
env.tunnelUrl = null;
|
|
6301
|
+
env.status = 'running';
|
|
6302
|
+
return res.json({ ok: true });
|
|
6303
|
+
} catch (err) {
|
|
6304
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
6305
|
+
}
|
|
6306
|
+
});
|
|
6307
|
+
|
|
6308
|
+
// ============================================
|
|
6309
|
+
// VERCEL PROXY ROUTES
|
|
6310
|
+
// ============================================
|
|
6311
|
+
registerVercelRoutes(app);
|
|
6312
|
+
|
|
5934
6313
|
// ============================================
|
|
5935
6314
|
// START SERVER
|
|
5936
6315
|
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vercel-proxy.js
|
|
3
|
+
*
|
|
4
|
+
* Proxy module for Vercel API calls from the Local Agent.
|
|
5
|
+
* The Gateway (Cloud Run) cannot reach api.vercel.com directly due to egress rules.
|
|
6
|
+
* All Vercel API calls are routed through the Agent which has unrestricted outbound access.
|
|
7
|
+
*
|
|
8
|
+
* Endpoints registered in server.js:
|
|
9
|
+
* POST /vercel/projects/ensure - Create project if not exists, return projectId
|
|
10
|
+
* POST /vercel/deploy - Trigger a deployment, return { deploymentId, previewUrl }
|
|
11
|
+
* GET /vercel/deploy/:id/status - Poll deployment status
|
|
12
|
+
* GET /vercel/projects - List all projects
|
|
13
|
+
*
|
|
14
|
+
* All endpoints require { token } in body or query param.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
const VERCEL_API = "https://api.vercel.com";
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Make an authenticated request to the Vercel API.
|
|
21
|
+
*/
|
|
22
|
+
async function vercelRequest(method, path, token, body = null) {
|
|
23
|
+
const opts = {
|
|
24
|
+
method,
|
|
25
|
+
headers: {
|
|
26
|
+
"Authorization": `Bearer ${token}`,
|
|
27
|
+
"Content-Type": "application/json"
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
if (body) opts.body = JSON.stringify(body);
|
|
31
|
+
|
|
32
|
+
const res = await fetch(`${VERCEL_API}${path}`, opts);
|
|
33
|
+
const text = await res.text();
|
|
34
|
+
|
|
35
|
+
let data;
|
|
36
|
+
try {
|
|
37
|
+
data = JSON.parse(text);
|
|
38
|
+
} catch {
|
|
39
|
+
data = { raw: text };
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (!res.ok) {
|
|
43
|
+
const errMsg = data?.error?.message || data?.message || text;
|
|
44
|
+
throw new Error(`Vercel API ${method} ${path} -> ${res.status}: ${errMsg}`);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return data;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Ensure a Vercel project exists.
|
|
52
|
+
* Creates it if missing, returns the project ID.
|
|
53
|
+
*/
|
|
54
|
+
async function ensureProject(token, projectName, repoOwner, repoName, framework) {
|
|
55
|
+
// Try to get existing project
|
|
56
|
+
try {
|
|
57
|
+
const existing = await vercelRequest("GET", `/v9/projects/${projectName}`, token);
|
|
58
|
+
console.log(`[Vercel] Project exists: ${projectName} (${existing.id})`);
|
|
59
|
+
return existing.id;
|
|
60
|
+
} catch (err) {
|
|
61
|
+
if (!err.message.includes("404")) throw err;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Create project
|
|
65
|
+
console.log(`[Vercel] Creating project: ${projectName}`);
|
|
66
|
+
const body = {
|
|
67
|
+
name: projectName,
|
|
68
|
+
gitRepository: {
|
|
69
|
+
type: "github",
|
|
70
|
+
repo: `${repoOwner}/${repoName}`
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
if (framework) body.framework = framework;
|
|
74
|
+
|
|
75
|
+
const created = await vercelRequest("POST", "/v9/projects", token, body);
|
|
76
|
+
console.log(`[Vercel] Project created: ${projectName} (${created.id})`);
|
|
77
|
+
return created.id;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Trigger a deployment for a project.
|
|
82
|
+
*/
|
|
83
|
+
async function triggerDeploy(token, projectId, projectName, repoId, branch) {
|
|
84
|
+
console.log(`[Vercel] Triggering deploy: project=${projectName} branch=${branch}`);
|
|
85
|
+
|
|
86
|
+
const body = {
|
|
87
|
+
name: projectName,
|
|
88
|
+
target: "preview",
|
|
89
|
+
gitSource: {
|
|
90
|
+
type: "github",
|
|
91
|
+
repoId: String(repoId),
|
|
92
|
+
ref: branch || "main"
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
const deployment = await vercelRequest("POST", "/v13/deployments", token, body);
|
|
97
|
+
|
|
98
|
+
const previewUrl = `https://${deployment.url}`;
|
|
99
|
+
console.log(`[Vercel] Deploy triggered: ${deployment.id} -> ${previewUrl}`);
|
|
100
|
+
|
|
101
|
+
return {
|
|
102
|
+
deploymentId: deployment.id,
|
|
103
|
+
previewUrl,
|
|
104
|
+
status: deployment.readyState || "INITIALIZING",
|
|
105
|
+
projectId,
|
|
106
|
+
projectName
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Get deployment status.
|
|
112
|
+
*/
|
|
113
|
+
async function getDeployStatus(token, deploymentId) {
|
|
114
|
+
const d = await vercelRequest("GET", `/v13/deployments/${deploymentId}`, token);
|
|
115
|
+
return {
|
|
116
|
+
deploymentId: d.id,
|
|
117
|
+
status: d.readyState,
|
|
118
|
+
previewUrl: `https://${d.url}`,
|
|
119
|
+
errorMessage: d.errorMessage || null,
|
|
120
|
+
ready: d.readyState === "READY",
|
|
121
|
+
failed: d.readyState === "ERROR" || d.readyState === "CANCELED"
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* List all projects.
|
|
127
|
+
*/
|
|
128
|
+
async function listProjects(token) {
|
|
129
|
+
const data = await vercelRequest("GET", "/v9/projects?limit=20", token);
|
|
130
|
+
return (data.projects || []).map(p => ({
|
|
131
|
+
id: p.id,
|
|
132
|
+
name: p.name,
|
|
133
|
+
framework: p.framework,
|
|
134
|
+
updatedAt: p.updatedAt,
|
|
135
|
+
latestDeploy: p.latestDeployments?.[0]?.url
|
|
136
|
+
? `https://${p.latestDeployments[0].url}`
|
|
137
|
+
: null
|
|
138
|
+
}));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Register Vercel proxy routes on the Express app.
|
|
143
|
+
*
|
|
144
|
+
* @param {import('express').Application} app
|
|
145
|
+
*/
|
|
146
|
+
export function registerVercelRoutes(app) {
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* POST /vercel/projects/ensure
|
|
150
|
+
* Body: { token, projectName, repoOwner, repoName, framework? }
|
|
151
|
+
* Response: { ok, projectId }
|
|
152
|
+
*/
|
|
153
|
+
app.post("/vercel/projects/ensure", async (req, res) => {
|
|
154
|
+
const { token, projectName, repoOwner, repoName, framework } = req.body || {};
|
|
155
|
+
if (!token || !projectName || !repoOwner || !repoName) {
|
|
156
|
+
return res.status(400).json({ ok: false, error: "token, projectName, repoOwner, repoName required" });
|
|
157
|
+
}
|
|
158
|
+
try {
|
|
159
|
+
const projectId = await ensureProject(token, projectName, repoOwner, repoName, framework);
|
|
160
|
+
return res.json({ ok: true, projectId, projectName });
|
|
161
|
+
} catch (err) {
|
|
162
|
+
console.error(`[Vercel] ensureProject failed: ${err.message}`);
|
|
163
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* POST /vercel/deploy
|
|
169
|
+
* Body: { token, projectId, projectName, repoOwner, repoName, branch? }
|
|
170
|
+
* Note: repoId is fetched automatically from GitHub API if not provided.
|
|
171
|
+
* Response: { ok, deploymentId, previewUrl, status }
|
|
172
|
+
*/
|
|
173
|
+
app.post("/vercel/deploy", async (req, res) => {
|
|
174
|
+
const { token, projectId, projectName, repoOwner, repoName, branch } = req.body || {};
|
|
175
|
+
if (!token || !projectId || !projectName || !repoOwner || !repoName) {
|
|
176
|
+
return res.status(400).json({ ok: false, error: "token, projectId, projectName, repoOwner, repoName required" });
|
|
177
|
+
}
|
|
178
|
+
try {
|
|
179
|
+
// Fetch repoId from GitHub API
|
|
180
|
+
const ghRes = await fetch(`https://api.github.com/repos/${repoOwner}/${repoName}`);
|
|
181
|
+
if (!ghRes.ok) throw new Error(`GitHub repo not found: ${repoOwner}/${repoName}`);
|
|
182
|
+
const ghData = await ghRes.json();
|
|
183
|
+
const repoId = ghData.id;
|
|
184
|
+
const result = await triggerDeploy(token, projectId, projectName, repoId, branch);
|
|
185
|
+
return res.json({ ok: true, ...result });
|
|
186
|
+
} catch (err) {
|
|
187
|
+
console.error(`[Vercel] deploy failed: ${err.message}`);
|
|
188
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* GET /vercel/deploy/:deploymentId/status?token=xxx
|
|
194
|
+
* Response: { ok, deploymentId, status, previewUrl, ready, failed }
|
|
195
|
+
*/
|
|
196
|
+
app.get("/vercel/deploy/:deploymentId/status", async (req, res) => {
|
|
197
|
+
const { deploymentId } = req.params;
|
|
198
|
+
const token = req.query.token;
|
|
199
|
+
if (!token) return res.status(400).json({ ok: false, error: "token query param required" });
|
|
200
|
+
try {
|
|
201
|
+
const result = await getDeployStatus(token, deploymentId);
|
|
202
|
+
return res.json({ ok: true, ...result });
|
|
203
|
+
} catch (err) {
|
|
204
|
+
console.error(`[Vercel] status check failed: ${err.message}`);
|
|
205
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* GET /vercel/projects?token=xxx
|
|
211
|
+
* Response: { ok, projects[] }
|
|
212
|
+
*/
|
|
213
|
+
app.get("/vercel/projects", async (req, res) => {
|
|
214
|
+
const token = req.query.token;
|
|
215
|
+
if (!token) return res.status(400).json({ ok: false, error: "token query param required" });
|
|
216
|
+
try {
|
|
217
|
+
const projects = await listProjects(token);
|
|
218
|
+
return res.json({ ok: true, projects });
|
|
219
|
+
} catch (err) {
|
|
220
|
+
console.error(`[Vercel] listProjects failed: ${err.message}`);
|
|
221
|
+
return res.status(500).json({ ok: false, error: err.message });
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
console.log("[Vercel] Proxy routes registered: /vercel/projects/ensure, /vercel/deploy, /vercel/deploy/:id/status, /vercel/projects");
|
|
226
|
+
}
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
const { spawn } = require('child_process');
|
|
2
|
+
const activeTunnels = new Map();
|
|
3
|
+
|
|
4
|
+
async function startTunnel(sessionId, port) {
|
|
5
|
+
// If tunnel already exists for this session, return existing URL
|
|
6
|
+
const existing = activeTunnels.get(sessionId);
|
|
7
|
+
if (existing && existing.url) {
|
|
8
|
+
return existing.url;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
return new Promise((resolve, reject) => {
|
|
12
|
+
const proc = spawn('cloudflared', ['tunnel', '--url', `http://localhost:${port}`], {
|
|
13
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
let tunnelUrl = null;
|
|
17
|
+
|
|
18
|
+
const timeout = setTimeout(() => {
|
|
19
|
+
if (!tunnelUrl) {
|
|
20
|
+
proc.kill();
|
|
21
|
+
reject(new Error('Tunnel startup timeout after 30s'));
|
|
22
|
+
}
|
|
23
|
+
}, 30000);
|
|
24
|
+
|
|
25
|
+
const onData = (data) => {
|
|
26
|
+
const match = data.toString().match(/https:\/\/[a-z0-9-]+\.trycloudflare\.com/);
|
|
27
|
+
if (match && !tunnelUrl) {
|
|
28
|
+
tunnelUrl = match[0];
|
|
29
|
+
clearTimeout(timeout);
|
|
30
|
+
activeTunnels.set(sessionId, { process: proc, url: tunnelUrl, port });
|
|
31
|
+
console.log(`[Tunnel] Ready: ${tunnelUrl} (session: ${sessionId})`);
|
|
32
|
+
resolve(tunnelUrl);
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
proc.stdout.on('data', onData);
|
|
37
|
+
proc.stderr.on('data', onData);
|
|
38
|
+
|
|
39
|
+
proc.on('exit', (code) => {
|
|
40
|
+
activeTunnels.delete(sessionId);
|
|
41
|
+
console.log(`[Tunnel] Process exited for session ${sessionId} (code: ${code})`);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
proc.on('error', (err) => {
|
|
45
|
+
clearTimeout(timeout);
|
|
46
|
+
reject(err);
|
|
47
|
+
});
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function stopTunnel(sessionId) {
|
|
52
|
+
const tunnel = activeTunnels.get(sessionId);
|
|
53
|
+
if (tunnel) {
|
|
54
|
+
tunnel.process.kill('SIGTERM');
|
|
55
|
+
activeTunnels.delete(sessionId);
|
|
56
|
+
console.log(`[Tunnel] Stopped: ${sessionId}`);
|
|
57
|
+
return true;
|
|
58
|
+
}
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function getTunnelInfo(sessionId) {
|
|
63
|
+
return activeTunnels.get(sessionId) || null;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function getActiveTunnelCount() {
|
|
67
|
+
return activeTunnels.size;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
module.exports = { startTunnel, stopTunnel, getTunnelInfo, getActiveTunnelCount };
|