@context-engine-bridge/context-engine-mcp-bridge 0.0.25 → 0.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -1
- package/src/connectCli.js +28 -107
- package/src/mcpServer.js +5 -36
- package/src/uploader.js +323 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@context-engine-bridge/context-engine-mcp-bridge",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.28",
|
|
4
4
|
"description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
|
|
5
5
|
"bin": {
|
|
6
6
|
"ctxce": "bin/ctxce.js",
|
|
@@ -18,6 +18,8 @@
|
|
|
18
18
|
},
|
|
19
19
|
"dependencies": {
|
|
20
20
|
"@modelcontextprotocol/sdk": "^1.24.3",
|
|
21
|
+
"ignore": "^7.0.5",
|
|
22
|
+
"tar": "^7.5.9",
|
|
21
23
|
"zod": "^3.25.0"
|
|
22
24
|
},
|
|
23
25
|
"devDependencies": {
|
package/src/connectCli.js
CHANGED
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
import process from "node:process";
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
import fs from "node:fs";
|
|
4
|
-
import { execSync, spawn } from "node:child_process";
|
|
5
|
-
import { fileURLToPath } from "node:url";
|
|
6
4
|
import { saveAuthEntry } from "./authConfig.js";
|
|
7
|
-
|
|
8
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
-
const __dirname = path.dirname(__filename);
|
|
5
|
+
import { indexWorkspace } from "./uploader.js";
|
|
10
6
|
|
|
11
7
|
const SAAS_ENDPOINTS = {
|
|
12
8
|
uploadEndpoint: "https://dev.context-engine.ai/upload",
|
|
@@ -145,114 +141,31 @@ async function authenticateWithApiKey(apiKey) {
|
|
|
145
141
|
return entry;
|
|
146
142
|
}
|
|
147
143
|
|
|
148
|
-
function
|
|
149
|
-
const candidates = [
|
|
150
|
-
path.resolve(process.cwd(), "scripts", "standalone_upload_client.py"),
|
|
151
|
-
path.resolve(process.cwd(), "..", "scripts", "standalone_upload_client.py"),
|
|
152
|
-
path.resolve(__dirname, "..", "..", "scripts", "standalone_upload_client.py"),
|
|
153
|
-
];
|
|
154
|
-
|
|
155
|
-
for (const candidate of candidates) {
|
|
156
|
-
if (fs.existsSync(candidate)) {
|
|
157
|
-
return candidate;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
return null;
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
function detectPython() {
|
|
165
|
-
for (const cmd of ["python3", "python"]) {
|
|
166
|
-
try {
|
|
167
|
-
execSync(`${cmd} --version`, { stdio: "ignore" });
|
|
168
|
-
return cmd;
|
|
169
|
-
} catch {
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
return null;
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
async function runUploadClient(workspace, sessionId, uploadClient, python) {
|
|
176
|
-
const env = {
|
|
177
|
-
...process.env,
|
|
178
|
-
REMOTE_UPLOAD_ENDPOINT: SAAS_ENDPOINTS.uploadEndpoint,
|
|
179
|
-
CTXCE_AUTH_BACKEND_URL: SAAS_ENDPOINTS.authBackendUrl,
|
|
180
|
-
CTXCE_SESSION_ID: sessionId,
|
|
181
|
-
HOST_ROOT: workspace,
|
|
182
|
-
CONTAINER_ROOT: "/work",
|
|
183
|
-
};
|
|
184
|
-
|
|
185
|
-
return new Promise((resolve) => {
|
|
186
|
-
const args = [
|
|
187
|
-
uploadClient,
|
|
188
|
-
"--path", workspace,
|
|
189
|
-
"--endpoint", SAAS_ENDPOINTS.uploadEndpoint,
|
|
190
|
-
"--force",
|
|
191
|
-
];
|
|
192
|
-
|
|
193
|
-
const proc = spawn(python, args, {
|
|
194
|
-
env,
|
|
195
|
-
stdio: ["ignore", "pipe", "pipe"],
|
|
196
|
-
cwd: workspace,
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
proc.stdout.on("data", (data) => {
|
|
200
|
-
const line = data.toString().trim();
|
|
201
|
-
if (line) {
|
|
202
|
-
console.error(`[upload] ${line}`);
|
|
203
|
-
}
|
|
204
|
-
});
|
|
205
|
-
|
|
206
|
-
proc.stderr.on("data", (data) => {
|
|
207
|
-
const line = data.toString().trim();
|
|
208
|
-
if (line) {
|
|
209
|
-
console.error(`[upload] ${line}`);
|
|
210
|
-
}
|
|
211
|
-
});
|
|
212
|
-
|
|
213
|
-
proc.on("close", (code) => {
|
|
214
|
-
resolve(code === 0);
|
|
215
|
-
});
|
|
216
|
-
|
|
217
|
-
proc.on("error", (err) => {
|
|
218
|
-
console.error(`[ctxce] Upload process error: ${err}`);
|
|
219
|
-
resolve(false);
|
|
220
|
-
});
|
|
221
|
-
});
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
async function triggerIndexing(workspace, sessionId) {
|
|
144
|
+
async function triggerIndexing(workspace, sessionId, authEntry) {
|
|
225
145
|
console.error("[ctxce] Starting workspace indexing...");
|
|
226
146
|
console.error(`[ctxce] Workspace: ${workspace}`);
|
|
227
147
|
console.error(`[ctxce] Endpoint: ${SAAS_ENDPOINTS.uploadEndpoint}`);
|
|
228
148
|
|
|
229
|
-
const
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
149
|
+
const result = await indexWorkspace(
|
|
150
|
+
workspace,
|
|
151
|
+
SAAS_ENDPOINTS.uploadEndpoint,
|
|
152
|
+
sessionId,
|
|
153
|
+
{
|
|
154
|
+
log: console.error,
|
|
155
|
+
orgId: authEntry?.org_id,
|
|
156
|
+
orgSlug: authEntry?.org_slug,
|
|
157
|
+
}
|
|
158
|
+
);
|
|
237
159
|
|
|
238
|
-
return
|
|
160
|
+
return result.success;
|
|
239
161
|
}
|
|
240
162
|
|
|
241
|
-
function startWatcher(workspace, sessionId, intervalMs) {
|
|
242
|
-
const uploadClient = findUploadClient();
|
|
243
|
-
const python = detectPython();
|
|
244
|
-
|
|
245
|
-
if (!uploadClient || !python) {
|
|
246
|
-
console.error("[ctxce] Cannot start watcher: Python upload client not available.");
|
|
247
|
-
return null;
|
|
248
|
-
}
|
|
249
|
-
|
|
163
|
+
function startWatcher(workspace, sessionId, authEntry, intervalMs) {
|
|
250
164
|
console.error(`[ctxce] Starting file watcher (sync every ${intervalMs / 1000}s)...`);
|
|
251
165
|
console.error("[ctxce] Press Ctrl+C to stop.");
|
|
252
166
|
|
|
253
167
|
let isRunning = false;
|
|
254
168
|
let pendingSync = false;
|
|
255
|
-
let lastSyncTime = Date.now();
|
|
256
169
|
|
|
257
170
|
const fileHashes = new Map();
|
|
258
171
|
|
|
@@ -329,18 +242,26 @@ function startWatcher(workspace, sessionId, intervalMs) {
|
|
|
329
242
|
console.error(`[ctxce] [${now}] Syncing changes...`);
|
|
330
243
|
|
|
331
244
|
try {
|
|
332
|
-
const
|
|
333
|
-
|
|
245
|
+
const result = await indexWorkspace(
|
|
246
|
+
workspace,
|
|
247
|
+
SAAS_ENDPOINTS.uploadEndpoint,
|
|
248
|
+
sessionId,
|
|
249
|
+
{
|
|
250
|
+
log: console.error,
|
|
251
|
+
orgId: authEntry?.org_id,
|
|
252
|
+
orgSlug: authEntry?.org_slug,
|
|
253
|
+
}
|
|
254
|
+
);
|
|
255
|
+
if (result.success) {
|
|
334
256
|
console.error(`[ctxce] [${now}] Sync complete.`);
|
|
335
257
|
} else {
|
|
336
|
-
console.error(`[ctxce] [${now}] Sync failed
|
|
258
|
+
console.error(`[ctxce] [${now}] Sync failed: ${result.error}`);
|
|
337
259
|
}
|
|
338
260
|
} catch (err) {
|
|
339
261
|
console.error(`[ctxce] [${now}] Sync error: ${err}`);
|
|
340
262
|
}
|
|
341
263
|
|
|
342
264
|
isRunning = false;
|
|
343
|
-
lastSyncTime = Date.now();
|
|
344
265
|
|
|
345
266
|
if (pendingSync) {
|
|
346
267
|
pendingSync = false;
|
|
@@ -428,7 +349,7 @@ export async function runConnectCommand(args) {
|
|
|
428
349
|
}
|
|
429
350
|
|
|
430
351
|
if (!skipIndex) {
|
|
431
|
-
const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId);
|
|
352
|
+
const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId, authEntry);
|
|
432
353
|
if (!indexed) {
|
|
433
354
|
console.error("[ctxce] Initial indexing failed, but will continue.");
|
|
434
355
|
}
|
|
@@ -440,5 +361,5 @@ export async function runConnectCommand(args) {
|
|
|
440
361
|
}
|
|
441
362
|
|
|
442
363
|
printSuccess();
|
|
443
|
-
startWatcher(resolvedWorkspace, authEntry.sessionId, watchInterval);
|
|
364
|
+
startWatcher(resolvedWorkspace, authEntry.sessionId, authEntry, watchInterval);
|
|
444
365
|
}
|
package/src/mcpServer.js
CHANGED
|
@@ -72,40 +72,6 @@ async function listMemoryTools(client) {
|
|
|
72
72
|
}
|
|
73
73
|
}
|
|
74
74
|
|
|
75
|
-
function withTimeout(promise, ms, label) {
|
|
76
|
-
return new Promise((resolve, reject) => {
|
|
77
|
-
let settled = false;
|
|
78
|
-
const timer = setTimeout(() => {
|
|
79
|
-
if (settled) {
|
|
80
|
-
return;
|
|
81
|
-
}
|
|
82
|
-
settled = true;
|
|
83
|
-
const errorMessage =
|
|
84
|
-
label != null
|
|
85
|
-
? `[ctxce] Timeout after ${ms}ms in ${label}`
|
|
86
|
-
: `[ctxce] Timeout after ${ms}ms`;
|
|
87
|
-
reject(new Error(errorMessage));
|
|
88
|
-
}, ms);
|
|
89
|
-
promise
|
|
90
|
-
.then((value) => {
|
|
91
|
-
if (settled) {
|
|
92
|
-
return;
|
|
93
|
-
}
|
|
94
|
-
settled = true;
|
|
95
|
-
clearTimeout(timer);
|
|
96
|
-
resolve(value);
|
|
97
|
-
})
|
|
98
|
-
.catch((err) => {
|
|
99
|
-
if (settled) {
|
|
100
|
-
return;
|
|
101
|
-
}
|
|
102
|
-
settled = true;
|
|
103
|
-
clearTimeout(timer);
|
|
104
|
-
reject(err);
|
|
105
|
-
});
|
|
106
|
-
});
|
|
107
|
-
}
|
|
108
|
-
|
|
109
75
|
function getBridgeToolTimeoutMs() {
|
|
110
76
|
try {
|
|
111
77
|
const raw = process.env.CTXCE_TOOL_TIMEOUT_MSEC;
|
|
@@ -705,7 +671,7 @@ async function createBridgeServer(options) {
|
|
|
705
671
|
|
|
706
672
|
let nextIndexerClient = null;
|
|
707
673
|
try {
|
|
708
|
-
const indexerTransport = new StreamableHTTPClientTransport(indexerUrl, transportOpts);
|
|
674
|
+
const indexerTransport = new StreamableHTTPClientTransport(new URL(indexerUrl), transportOpts);
|
|
709
675
|
const client = new Client(
|
|
710
676
|
{
|
|
711
677
|
name: "ctx-context-engine-bridge-http-client",
|
|
@@ -729,7 +695,7 @@ async function createBridgeServer(options) {
|
|
|
729
695
|
let nextMemoryClient = null;
|
|
730
696
|
if (memoryUrl) {
|
|
731
697
|
try {
|
|
732
|
-
const memoryTransport = new StreamableHTTPClientTransport(memoryUrl, transportOpts);
|
|
698
|
+
const memoryTransport = new StreamableHTTPClientTransport(new URL(memoryUrl), transportOpts);
|
|
733
699
|
const client = new Client(
|
|
734
700
|
{
|
|
735
701
|
name: "ctx-context-engine-bridge-memory-client",
|
|
@@ -860,6 +826,9 @@ async function createBridgeServer(options) {
|
|
|
860
826
|
args = maybeRemapToolArgs(name, args, workspace);
|
|
861
827
|
|
|
862
828
|
if (name === "set_session_defaults") {
|
|
829
|
+
if (!indexerClient) {
|
|
830
|
+
throw new Error("Indexer client not connected");
|
|
831
|
+
}
|
|
863
832
|
const indexerResult = await indexerClient.callTool({ name, arguments: args });
|
|
864
833
|
if (memoryClient) {
|
|
865
834
|
try {
|
package/src/uploader.js
ADDED
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { createHash } from "node:crypto";
|
|
5
|
+
import { create as tarCreate } from "tar";
|
|
6
|
+
import ignore from "ignore";
|
|
7
|
+
|
|
8
|
+
const CODE_EXTS = new Set([
|
|
9
|
+
".py", ".js", ".ts", ".tsx", ".jsx", ".java", ".go", ".rs", ".rb", ".php",
|
|
10
|
+
".c", ".h", ".cpp", ".cc", ".hpp", ".cs", ".csx", ".kt", ".swift", ".scala",
|
|
11
|
+
".sh", ".ps1", ".psm1", ".psd1", ".pl", ".lua",
|
|
12
|
+
".sql", ".md", ".yml", ".yaml", ".toml", ".ini", ".cfg", ".conf", ".json", ".xml",
|
|
13
|
+
".csproj", ".config", ".resx",
|
|
14
|
+
".html", ".htm", ".css", ".scss", ".sass", ".less", ".vue", ".svelte", ".cshtml", ".razor",
|
|
15
|
+
".tf", ".tfvars", ".hcl", ".dockerfile",
|
|
16
|
+
".ex", ".exs",
|
|
17
|
+
".elm", ".dart", ".r", ".R", ".m", ".cljs", ".clj", ".hs", ".ml",
|
|
18
|
+
".zig", ".nim", ".v", ".sv", ".vhdl", ".asm", ".s",
|
|
19
|
+
]);
|
|
20
|
+
|
|
21
|
+
const EXTENSIONLESS_FILES = new Set([
|
|
22
|
+
"dockerfile", "makefile", "gemfile", "rakefile", "procfile", "vagrantfile",
|
|
23
|
+
"jenkinsfile", ".gitignore", ".dockerignore", ".editorconfig",
|
|
24
|
+
]);
|
|
25
|
+
|
|
26
|
+
const DEFAULT_IGNORES = [
|
|
27
|
+
".git", "node_modules", "__pycache__", ".venv", "venv", ".env",
|
|
28
|
+
"dist", "build", ".next", ".nuxt", "coverage", ".nyc_output",
|
|
29
|
+
"*.pyc", "*.pyo", "*.so", "*.dylib", "*.dll", "*.exe",
|
|
30
|
+
"*.jpg", "*.jpeg", "*.png", "*.gif", "*.ico", "*.svg", "*.webp",
|
|
31
|
+
"*.mp3", "*.mp4", "*.wav", "*.avi", "*.mov",
|
|
32
|
+
"*.zip", "*.tar", "*.gz", "*.rar", "*.7z",
|
|
33
|
+
"*.pdf", "*.doc", "*.docx", "*.xls", "*.xlsx",
|
|
34
|
+
".DS_Store", "Thumbs.db",
|
|
35
|
+
];
|
|
36
|
+
|
|
37
|
+
const MAX_FILE_SIZE = 10 * 1024 * 1024;
|
|
38
|
+
|
|
39
|
+
function loadGitignore(workspacePath) {
|
|
40
|
+
const ig = ignore();
|
|
41
|
+
ig.add(DEFAULT_IGNORES);
|
|
42
|
+
|
|
43
|
+
const gitignorePath = path.join(workspacePath, ".gitignore");
|
|
44
|
+
if (fs.existsSync(gitignorePath)) {
|
|
45
|
+
try {
|
|
46
|
+
const content = fs.readFileSync(gitignorePath, "utf-8");
|
|
47
|
+
ig.add(content);
|
|
48
|
+
} catch {
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return ig;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function isCodeFile(filePath) {
|
|
56
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
57
|
+
if (CODE_EXTS.has(ext)) return true;
|
|
58
|
+
|
|
59
|
+
const basename = path.basename(filePath).toLowerCase();
|
|
60
|
+
if (EXTENSIONLESS_FILES.has(basename)) return true;
|
|
61
|
+
if (basename.startsWith("dockerfile")) return true;
|
|
62
|
+
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function computeFileHash(filePath) {
|
|
67
|
+
const content = fs.readFileSync(filePath);
|
|
68
|
+
return createHash("sha256").update(content).digest("hex").slice(0, 16);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function computeLogicalRepoId(workspacePath) {
|
|
72
|
+
const normalized = workspacePath.replace(/\\/g, "/").replace(/\/+$/, "");
|
|
73
|
+
return createHash("sha256").update(normalized).digest("hex").slice(0, 12);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
function scanWorkspace(workspacePath, ig) {
|
|
77
|
+
const files = [];
|
|
78
|
+
|
|
79
|
+
function walk(dir, relativePath = "") {
|
|
80
|
+
let entries;
|
|
81
|
+
try {
|
|
82
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
83
|
+
} catch {
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
for (const entry of entries) {
|
|
88
|
+
const fullPath = path.join(dir, entry.name);
|
|
89
|
+
const relPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
90
|
+
|
|
91
|
+
if (ig.ignores(relPath) || ig.ignores(relPath + "/")) {
|
|
92
|
+
continue;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (entry.isDirectory()) {
|
|
96
|
+
walk(fullPath, relPath);
|
|
97
|
+
} else if (entry.isFile() && isCodeFile(entry.name)) {
|
|
98
|
+
try {
|
|
99
|
+
const stat = fs.statSync(fullPath);
|
|
100
|
+
if (stat.size > 0 && stat.size < MAX_FILE_SIZE) {
|
|
101
|
+
files.push({
|
|
102
|
+
path: relPath,
|
|
103
|
+
fullPath,
|
|
104
|
+
size: stat.size,
|
|
105
|
+
mtime: stat.mtimeMs,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
} catch {
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
walk(workspacePath);
|
|
115
|
+
return files;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
export async function createBundle(workspacePath, options = {}) {
|
|
119
|
+
const { log = console.error } = options;
|
|
120
|
+
|
|
121
|
+
const ig = loadGitignore(workspacePath);
|
|
122
|
+
const files = scanWorkspace(workspacePath, ig);
|
|
123
|
+
|
|
124
|
+
if (files.length === 0) {
|
|
125
|
+
log("[uploader] No code files found in workspace");
|
|
126
|
+
return null;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
log(`[uploader] Found ${files.length} code files`);
|
|
130
|
+
|
|
131
|
+
const bundleId = createHash("sha256").update(Date.now().toString() + Math.random().toString()).digest("hex").slice(0, 16);
|
|
132
|
+
const createdAt = new Date().toISOString();
|
|
133
|
+
|
|
134
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-"));
|
|
135
|
+
const bundleDir = path.join(tmpDir, bundleId);
|
|
136
|
+
const metadataDir = path.join(bundleDir, ".metadata");
|
|
137
|
+
const filesDir = path.join(bundleDir, "files");
|
|
138
|
+
|
|
139
|
+
fs.mkdirSync(metadataDir, { recursive: true });
|
|
140
|
+
fs.mkdirSync(filesDir, { recursive: true });
|
|
141
|
+
|
|
142
|
+
const operations = [];
|
|
143
|
+
const fileHashes = {};
|
|
144
|
+
let totalSize = 0;
|
|
145
|
+
|
|
146
|
+
for (const file of files) {
|
|
147
|
+
const destPath = path.join(filesDir, file.path);
|
|
148
|
+
fs.mkdirSync(path.dirname(destPath), { recursive: true });
|
|
149
|
+
fs.copyFileSync(file.fullPath, destPath);
|
|
150
|
+
|
|
151
|
+
const hash = computeFileHash(file.fullPath);
|
|
152
|
+
fileHashes[file.path] = hash;
|
|
153
|
+
totalSize += file.size;
|
|
154
|
+
|
|
155
|
+
operations.push({
|
|
156
|
+
op: "upsert",
|
|
157
|
+
path: file.path,
|
|
158
|
+
size: file.size,
|
|
159
|
+
hash,
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const manifest = {
|
|
164
|
+
version: "1.0",
|
|
165
|
+
bundle_id: bundleId,
|
|
166
|
+
workspace_path: workspacePath,
|
|
167
|
+
collection_name: computeLogicalRepoId(workspacePath),
|
|
168
|
+
created_at: createdAt,
|
|
169
|
+
sequence_number: null,
|
|
170
|
+
parent_sequence: null,
|
|
171
|
+
operations: {
|
|
172
|
+
created: files.length,
|
|
173
|
+
updated: 0,
|
|
174
|
+
deleted: 0,
|
|
175
|
+
moved: 0,
|
|
176
|
+
},
|
|
177
|
+
total_files: files.length,
|
|
178
|
+
total_size_bytes: totalSize,
|
|
179
|
+
compression: "gzip",
|
|
180
|
+
encoding: "utf-8",
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
fs.writeFileSync(path.join(bundleDir, "manifest.json"), JSON.stringify(manifest, null, 2));
|
|
184
|
+
fs.writeFileSync(path.join(metadataDir, "operations.json"), JSON.stringify({ operations }, null, 2));
|
|
185
|
+
fs.writeFileSync(path.join(metadataDir, "hashes.json"), JSON.stringify({
|
|
186
|
+
workspace_path: workspacePath,
|
|
187
|
+
updated_at: createdAt,
|
|
188
|
+
file_hashes: fileHashes,
|
|
189
|
+
}, null, 2));
|
|
190
|
+
|
|
191
|
+
const bundlePath = path.join(tmpDir, `${bundleId}.tar.gz`);
|
|
192
|
+
|
|
193
|
+
try {
|
|
194
|
+
await tarCreate(
|
|
195
|
+
{
|
|
196
|
+
gzip: true,
|
|
197
|
+
file: bundlePath,
|
|
198
|
+
cwd: tmpDir,
|
|
199
|
+
portable: true,
|
|
200
|
+
},
|
|
201
|
+
[bundleId]
|
|
202
|
+
);
|
|
203
|
+
|
|
204
|
+
const bundleSize = fs.statSync(bundlePath).size;
|
|
205
|
+
log(`[uploader] Created bundle: ${bundleSize} bytes`);
|
|
206
|
+
|
|
207
|
+
return {
|
|
208
|
+
bundlePath,
|
|
209
|
+
manifest,
|
|
210
|
+
tmpDir,
|
|
211
|
+
cleanup: () => {
|
|
212
|
+
try {
|
|
213
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
214
|
+
} catch {
|
|
215
|
+
}
|
|
216
|
+
},
|
|
217
|
+
};
|
|
218
|
+
} catch (err) {
|
|
219
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
220
|
+
throw err;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
export async function uploadBundle(bundlePath, manifest, uploadEndpoint, sessionId, options = {}) {
|
|
225
|
+
const { log = console.error, orgId, orgSlug } = options;
|
|
226
|
+
|
|
227
|
+
const bundleData = fs.readFileSync(bundlePath);
|
|
228
|
+
const boundary = `----ctxce${Date.now()}${Math.random().toString(36).slice(2)}`;
|
|
229
|
+
|
|
230
|
+
const parts = [];
|
|
231
|
+
|
|
232
|
+
parts.push(
|
|
233
|
+
`--${boundary}\r\n`,
|
|
234
|
+
`Content-Disposition: form-data; name="bundle"; filename="bundle.tar.gz"\r\n`,
|
|
235
|
+
`Content-Type: application/gzip\r\n\r\n`,
|
|
236
|
+
);
|
|
237
|
+
parts.push(bundleData);
|
|
238
|
+
parts.push(`\r\n`);
|
|
239
|
+
|
|
240
|
+
const logicalRepoId = computeLogicalRepoId(manifest.workspace_path);
|
|
241
|
+
const fields = {
|
|
242
|
+
workspace_path: manifest.workspace_path,
|
|
243
|
+
collection_name: manifest.collection_name || logicalRepoId,
|
|
244
|
+
force: "true",
|
|
245
|
+
source_path: manifest.workspace_path,
|
|
246
|
+
logical_repo_id: logicalRepoId,
|
|
247
|
+
session: sessionId,
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
if (orgId) fields.org_id = orgId;
|
|
251
|
+
if (orgSlug) fields.org_slug = orgSlug;
|
|
252
|
+
|
|
253
|
+
for (const [key, value] of Object.entries(fields)) {
|
|
254
|
+
if (value) {
|
|
255
|
+
parts.push(
|
|
256
|
+
`--${boundary}\r\n`,
|
|
257
|
+
`Content-Disposition: form-data; name="${key}"\r\n\r\n`,
|
|
258
|
+
`${value}\r\n`,
|
|
259
|
+
);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
parts.push(`--${boundary}--\r\n`);
|
|
264
|
+
|
|
265
|
+
const bodyParts = parts.map(p => typeof p === "string" ? Buffer.from(p) : p);
|
|
266
|
+
const body = Buffer.concat(bodyParts);
|
|
267
|
+
|
|
268
|
+
const url = `${uploadEndpoint}/api/v1/delta/upload`;
|
|
269
|
+
log(`[uploader] Uploading to ${url}...`);
|
|
270
|
+
|
|
271
|
+
const resp = await fetch(url, {
|
|
272
|
+
method: "POST",
|
|
273
|
+
headers: {
|
|
274
|
+
"Content-Type": `multipart/form-data; boundary=${boundary}`,
|
|
275
|
+
"Content-Length": String(body.length),
|
|
276
|
+
},
|
|
277
|
+
body,
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
let result;
|
|
281
|
+
try {
|
|
282
|
+
result = await resp.json();
|
|
283
|
+
} catch {
|
|
284
|
+
result = { success: false, error: { message: await resp.text() } };
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (!resp.ok || !result.success) {
|
|
288
|
+
let errorMsg = result.error?.message || result.detail || result.error;
|
|
289
|
+
if (typeof errorMsg === "object") {
|
|
290
|
+
errorMsg = JSON.stringify(errorMsg);
|
|
291
|
+
}
|
|
292
|
+
errorMsg = errorMsg || `HTTP ${resp.status}`;
|
|
293
|
+
log(`[uploader] Upload failed: ${errorMsg}`);
|
|
294
|
+
return { success: false, error: errorMsg };
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
log(`[uploader] Upload successful!`);
|
|
298
|
+
return { success: true, result };
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
export async function indexWorkspace(workspacePath, uploadEndpoint, sessionId, options = {}) {
|
|
302
|
+
const { log = console.error, orgId, orgSlug } = options;
|
|
303
|
+
|
|
304
|
+
log(`[uploader] Scanning workspace: ${workspacePath}`);
|
|
305
|
+
|
|
306
|
+
const bundle = await createBundle(workspacePath, { log });
|
|
307
|
+
if (!bundle) {
|
|
308
|
+
return { success: false, error: "No code files found" };
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
try {
|
|
312
|
+
const result = await uploadBundle(
|
|
313
|
+
bundle.bundlePath,
|
|
314
|
+
bundle.manifest,
|
|
315
|
+
uploadEndpoint,
|
|
316
|
+
sessionId,
|
|
317
|
+
{ log, orgId, orgSlug }
|
|
318
|
+
);
|
|
319
|
+
return result;
|
|
320
|
+
} finally {
|
|
321
|
+
bundle.cleanup();
|
|
322
|
+
}
|
|
323
|
+
}
|