@context-engine-bridge/context-engine-mcp-bridge 0.0.25 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@context-engine-bridge/context-engine-mcp-bridge",
3
- "version": "0.0.25",
3
+ "version": "0.0.27",
4
4
  "description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
5
5
  "bin": {
6
6
  "ctxce": "bin/ctxce.js",
@@ -18,6 +18,8 @@
18
18
  },
19
19
  "dependencies": {
20
20
  "@modelcontextprotocol/sdk": "^1.24.3",
21
+ "ignore": "^7.0.5",
22
+ "tar": "^7.5.9",
21
23
  "zod": "^3.25.0"
22
24
  },
23
25
  "devDependencies": {
package/src/connectCli.js CHANGED
@@ -1,12 +1,8 @@
1
1
  import process from "node:process";
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
- import { execSync, spawn } from "node:child_process";
5
- import { fileURLToPath } from "node:url";
6
4
  import { saveAuthEntry } from "./authConfig.js";
7
-
8
- const __filename = fileURLToPath(import.meta.url);
9
- const __dirname = path.dirname(__filename);
5
+ import { indexWorkspace } from "./uploader.js";
10
6
 
11
7
  const SAAS_ENDPOINTS = {
12
8
  uploadEndpoint: "https://dev.context-engine.ai/upload",
@@ -145,114 +141,31 @@ async function authenticateWithApiKey(apiKey) {
145
141
  return entry;
146
142
  }
147
143
 
148
- function findUploadClient() {
149
- const candidates = [
150
- path.resolve(process.cwd(), "scripts", "standalone_upload_client.py"),
151
- path.resolve(process.cwd(), "..", "scripts", "standalone_upload_client.py"),
152
- path.resolve(__dirname, "..", "..", "scripts", "standalone_upload_client.py"),
153
- ];
154
-
155
- for (const candidate of candidates) {
156
- if (fs.existsSync(candidate)) {
157
- return candidate;
158
- }
159
- }
160
-
161
- return null;
162
- }
163
-
164
- function detectPython() {
165
- for (const cmd of ["python3", "python"]) {
166
- try {
167
- execSync(`${cmd} --version`, { stdio: "ignore" });
168
- return cmd;
169
- } catch {
170
- }
171
- }
172
- return null;
173
- }
174
-
175
- async function runUploadClient(workspace, sessionId, uploadClient, python) {
176
- const env = {
177
- ...process.env,
178
- REMOTE_UPLOAD_ENDPOINT: SAAS_ENDPOINTS.uploadEndpoint,
179
- CTXCE_AUTH_BACKEND_URL: SAAS_ENDPOINTS.authBackendUrl,
180
- CTXCE_SESSION_ID: sessionId,
181
- HOST_ROOT: workspace,
182
- CONTAINER_ROOT: "/work",
183
- };
184
-
185
- return new Promise((resolve) => {
186
- const args = [
187
- uploadClient,
188
- "--path", workspace,
189
- "--endpoint", SAAS_ENDPOINTS.uploadEndpoint,
190
- "--force",
191
- ];
192
-
193
- const proc = spawn(python, args, {
194
- env,
195
- stdio: ["ignore", "pipe", "pipe"],
196
- cwd: workspace,
197
- });
198
-
199
- proc.stdout.on("data", (data) => {
200
- const line = data.toString().trim();
201
- if (line) {
202
- console.error(`[upload] ${line}`);
203
- }
204
- });
205
-
206
- proc.stderr.on("data", (data) => {
207
- const line = data.toString().trim();
208
- if (line) {
209
- console.error(`[upload] ${line}`);
210
- }
211
- });
212
-
213
- proc.on("close", (code) => {
214
- resolve(code === 0);
215
- });
216
-
217
- proc.on("error", (err) => {
218
- console.error(`[ctxce] Upload process error: ${err}`);
219
- resolve(false);
220
- });
221
- });
222
- }
223
-
224
- async function triggerIndexing(workspace, sessionId) {
144
+ async function triggerIndexing(workspace, sessionId, authEntry) {
225
145
  console.error("[ctxce] Starting workspace indexing...");
226
146
  console.error(`[ctxce] Workspace: ${workspace}`);
227
147
  console.error(`[ctxce] Endpoint: ${SAAS_ENDPOINTS.uploadEndpoint}`);
228
148
 
229
- const uploadClient = findUploadClient();
230
- const python = detectPython();
231
-
232
- if (!uploadClient || !python) {
233
- console.error("[ctxce] Python upload client not available.");
234
- console.error("[ctxce] Install context-engine Python package or use VS Code extension.");
235
- return false;
236
- }
149
+ const result = await indexWorkspace(
150
+ workspace,
151
+ SAAS_ENDPOINTS.uploadEndpoint,
152
+ sessionId,
153
+ {
154
+ log: console.error,
155
+ orgId: authEntry?.org_id,
156
+ orgSlug: authEntry?.org_slug,
157
+ }
158
+ );
237
159
 
238
- return await runUploadClient(workspace, sessionId, uploadClient, python);
160
+ return result.success;
239
161
  }
240
162
 
241
- function startWatcher(workspace, sessionId, intervalMs) {
242
- const uploadClient = findUploadClient();
243
- const python = detectPython();
244
-
245
- if (!uploadClient || !python) {
246
- console.error("[ctxce] Cannot start watcher: Python upload client not available.");
247
- return null;
248
- }
249
-
163
+ function startWatcher(workspace, sessionId, authEntry, intervalMs) {
250
164
  console.error(`[ctxce] Starting file watcher (sync every ${intervalMs / 1000}s)...`);
251
165
  console.error("[ctxce] Press Ctrl+C to stop.");
252
166
 
253
167
  let isRunning = false;
254
168
  let pendingSync = false;
255
- let lastSyncTime = Date.now();
256
169
 
257
170
  const fileHashes = new Map();
258
171
 
@@ -329,18 +242,26 @@ function startWatcher(workspace, sessionId, intervalMs) {
329
242
  console.error(`[ctxce] [${now}] Syncing changes...`);
330
243
 
331
244
  try {
332
- const success = await runUploadClient(workspace, sessionId, uploadClient, python);
333
- if (success) {
245
+ const result = await indexWorkspace(
246
+ workspace,
247
+ SAAS_ENDPOINTS.uploadEndpoint,
248
+ sessionId,
249
+ {
250
+ log: console.error,
251
+ orgId: authEntry?.org_id,
252
+ orgSlug: authEntry?.org_slug,
253
+ }
254
+ );
255
+ if (result.success) {
334
256
  console.error(`[ctxce] [${now}] Sync complete.`);
335
257
  } else {
336
- console.error(`[ctxce] [${now}] Sync failed.`);
258
+ console.error(`[ctxce] [${now}] Sync failed: ${result.error}`);
337
259
  }
338
260
  } catch (err) {
339
261
  console.error(`[ctxce] [${now}] Sync error: ${err}`);
340
262
  }
341
263
 
342
264
  isRunning = false;
343
- lastSyncTime = Date.now();
344
265
 
345
266
  if (pendingSync) {
346
267
  pendingSync = false;
@@ -428,7 +349,7 @@ export async function runConnectCommand(args) {
428
349
  }
429
350
 
430
351
  if (!skipIndex) {
431
- const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId);
352
+ const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId, authEntry);
432
353
  if (!indexed) {
433
354
  console.error("[ctxce] Initial indexing failed, but will continue.");
434
355
  }
@@ -440,5 +361,5 @@ export async function runConnectCommand(args) {
440
361
  }
441
362
 
442
363
  printSuccess();
443
- startWatcher(resolvedWorkspace, authEntry.sessionId, watchInterval);
364
+ startWatcher(resolvedWorkspace, authEntry.sessionId, authEntry, watchInterval);
444
365
  }
@@ -0,0 +1,276 @@
1
+ import fs from "node:fs";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+ import { createHash } from "node:crypto";
5
+ import { create as tarCreate } from "tar";
6
+ import ignore from "ignore";
7
+
8
+ const CODE_EXTS = new Set([
9
+ ".py", ".js", ".ts", ".tsx", ".jsx", ".java", ".go", ".rs", ".rb", ".php",
10
+ ".c", ".h", ".cpp", ".cc", ".hpp", ".cs", ".csx", ".kt", ".swift", ".scala",
11
+ ".sh", ".ps1", ".psm1", ".psd1", ".pl", ".lua",
12
+ ".sql", ".md", ".yml", ".yaml", ".toml", ".ini", ".cfg", ".conf", ".json", ".xml",
13
+ ".csproj", ".config", ".resx",
14
+ ".html", ".htm", ".css", ".scss", ".sass", ".less", ".vue", ".svelte", ".cshtml", ".razor",
15
+ ".tf", ".tfvars", ".hcl", ".dockerfile",
16
+ ".ex", ".exs",
17
+ ".elm", ".dart", ".r", ".R", ".m", ".cljs", ".clj", ".hs", ".ml",
18
+ ".zig", ".nim", ".v", ".sv", ".vhdl", ".asm", ".s",
19
+ ]);
20
+
21
+ const EXTENSIONLESS_FILES = new Set([
22
+ "dockerfile", "makefile", "gemfile", "rakefile", "procfile", "vagrantfile",
23
+ "jenkinsfile", ".gitignore", ".dockerignore", ".editorconfig",
24
+ ]);
25
+
26
+ const DEFAULT_IGNORES = [
27
+ ".git", "node_modules", "__pycache__", ".venv", "venv", ".env",
28
+ "dist", "build", ".next", ".nuxt", "coverage", ".nyc_output",
29
+ "*.pyc", "*.pyo", "*.so", "*.dylib", "*.dll", "*.exe",
30
+ "*.jpg", "*.jpeg", "*.png", "*.gif", "*.ico", "*.svg", "*.webp",
31
+ "*.mp3", "*.mp4", "*.wav", "*.avi", "*.mov",
32
+ "*.zip", "*.tar", "*.gz", "*.rar", "*.7z",
33
+ "*.pdf", "*.doc", "*.docx", "*.xls", "*.xlsx",
34
+ ".DS_Store", "Thumbs.db",
35
+ ];
36
+
37
+ const MAX_FILE_SIZE = 10 * 1024 * 1024;
38
+
39
+ function loadGitignore(workspacePath) {
40
+ const ig = ignore();
41
+ ig.add(DEFAULT_IGNORES);
42
+
43
+ const gitignorePath = path.join(workspacePath, ".gitignore");
44
+ if (fs.existsSync(gitignorePath)) {
45
+ try {
46
+ const content = fs.readFileSync(gitignorePath, "utf-8");
47
+ ig.add(content);
48
+ } catch {
49
+ }
50
+ }
51
+
52
+ return ig;
53
+ }
54
+
55
+ function isCodeFile(filePath) {
56
+ const ext = path.extname(filePath).toLowerCase();
57
+ if (CODE_EXTS.has(ext)) return true;
58
+
59
+ const basename = path.basename(filePath).toLowerCase();
60
+ if (EXTENSIONLESS_FILES.has(basename)) return true;
61
+ if (basename.startsWith("dockerfile")) return true;
62
+
63
+ return false;
64
+ }
65
+
66
+ function computeFileHash(filePath) {
67
+ const content = fs.readFileSync(filePath);
68
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
69
+ }
70
+
71
+ function computeLogicalRepoId(workspacePath) {
72
+ const normalized = workspacePath.replace(/\\/g, "/").replace(/\/+$/, "");
73
+ return createHash("sha256").update(normalized).digest("hex").slice(0, 12);
74
+ }
75
+
76
+ function scanWorkspace(workspacePath, ig) {
77
+ const files = [];
78
+
79
+ function walk(dir, relativePath = "") {
80
+ let entries;
81
+ try {
82
+ entries = fs.readdirSync(dir, { withFileTypes: true });
83
+ } catch {
84
+ return;
85
+ }
86
+
87
+ for (const entry of entries) {
88
+ const fullPath = path.join(dir, entry.name);
89
+ const relPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
90
+
91
+ if (ig.ignores(relPath) || ig.ignores(relPath + "/")) {
92
+ continue;
93
+ }
94
+
95
+ if (entry.isDirectory()) {
96
+ walk(fullPath, relPath);
97
+ } else if (entry.isFile() && isCodeFile(entry.name)) {
98
+ try {
99
+ const stat = fs.statSync(fullPath);
100
+ if (stat.size > 0 && stat.size < MAX_FILE_SIZE) {
101
+ files.push({
102
+ path: relPath,
103
+ fullPath,
104
+ size: stat.size,
105
+ mtime: stat.mtimeMs,
106
+ });
107
+ }
108
+ } catch {
109
+ }
110
+ }
111
+ }
112
+ }
113
+
114
+ walk(workspacePath);
115
+ return files;
116
+ }
117
+
118
+ export async function createBundle(workspacePath, options = {}) {
119
+ const { log = console.error } = options;
120
+
121
+ const ig = loadGitignore(workspacePath);
122
+ const files = scanWorkspace(workspacePath, ig);
123
+
124
+ if (files.length === 0) {
125
+ log("[uploader] No code files found in workspace");
126
+ return null;
127
+ }
128
+
129
+ log(`[uploader] Found ${files.length} code files`);
130
+
131
+ const bundleId = createHash("sha256").update(Date.now().toString() + Math.random().toString()).digest("hex").slice(0, 16);
132
+
133
+ const manifest = {
134
+ bundle_id: bundleId,
135
+ workspace_path: workspacePath,
136
+ logical_repo_id: computeLogicalRepoId(workspacePath),
137
+ sequence_number: Date.now(),
138
+ file_count: files.length,
139
+ files: files.map(f => ({
140
+ path: f.path,
141
+ size: f.size,
142
+ hash: computeFileHash(f.fullPath),
143
+ })),
144
+ created_at: new Date().toISOString(),
145
+ };
146
+
147
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-"));
148
+ const bundlePath = path.join(tmpDir, `bundle-${bundleId}.tar.gz`);
149
+
150
+ try {
151
+ await tarCreate(
152
+ {
153
+ gzip: true,
154
+ file: bundlePath,
155
+ cwd: workspacePath,
156
+ portable: true,
157
+ },
158
+ files.map(f => f.path)
159
+ );
160
+
161
+ const bundleSize = fs.statSync(bundlePath).size;
162
+ log(`[uploader] Created bundle: ${bundleSize} bytes`);
163
+
164
+ return {
165
+ bundlePath,
166
+ manifest,
167
+ tmpDir,
168
+ cleanup: () => {
169
+ try {
170
+ fs.rmSync(tmpDir, { recursive: true, force: true });
171
+ } catch {
172
+ }
173
+ },
174
+ };
175
+ } catch (err) {
176
+ fs.rmSync(tmpDir, { recursive: true, force: true });
177
+ throw err;
178
+ }
179
+ }
180
+
181
+ export async function uploadBundle(bundlePath, manifest, uploadEndpoint, sessionId, options = {}) {
182
+ const { log = console.error, orgId, orgSlug } = options;
183
+
184
+ const bundleData = fs.readFileSync(bundlePath);
185
+ const boundary = `----ctxce${Date.now()}${Math.random().toString(36).slice(2)}`;
186
+
187
+ const parts = [];
188
+
189
+ parts.push(
190
+ `--${boundary}\r\n`,
191
+ `Content-Disposition: form-data; name="bundle"; filename="bundle.tar.gz"\r\n`,
192
+ `Content-Type: application/gzip\r\n\r\n`,
193
+ );
194
+ parts.push(bundleData);
195
+ parts.push(`\r\n`);
196
+
197
+ const fields = {
198
+ workspace_path: manifest.workspace_path,
199
+ collection_name: manifest.logical_repo_id,
200
+ sequence_number: String(manifest.sequence_number),
201
+ force: "true",
202
+ source_path: manifest.workspace_path,
203
+ logical_repo_id: manifest.logical_repo_id,
204
+ session: sessionId,
205
+ };
206
+
207
+ if (orgId) fields.org_id = orgId;
208
+ if (orgSlug) fields.org_slug = orgSlug;
209
+
210
+ for (const [key, value] of Object.entries(fields)) {
211
+ if (value) {
212
+ parts.push(
213
+ `--${boundary}\r\n`,
214
+ `Content-Disposition: form-data; name="${key}"\r\n\r\n`,
215
+ `${value}\r\n`,
216
+ );
217
+ }
218
+ }
219
+
220
+ parts.push(`--${boundary}--\r\n`);
221
+
222
+ const bodyParts = parts.map(p => typeof p === "string" ? Buffer.from(p) : p);
223
+ const body = Buffer.concat(bodyParts);
224
+
225
+ const url = `${uploadEndpoint}/api/v1/delta/upload`;
226
+ log(`[uploader] Uploading to ${url}...`);
227
+
228
+ const resp = await fetch(url, {
229
+ method: "POST",
230
+ headers: {
231
+ "Content-Type": `multipart/form-data; boundary=${boundary}`,
232
+ "Content-Length": String(body.length),
233
+ },
234
+ body,
235
+ });
236
+
237
+ let result;
238
+ try {
239
+ result = await resp.json();
240
+ } catch {
241
+ result = { success: false, error: { message: await resp.text() } };
242
+ }
243
+
244
+ if (!resp.ok || !result.success) {
245
+ const errorMsg = result.error?.message || result.detail || `HTTP ${resp.status}`;
246
+ log(`[uploader] Upload failed: ${errorMsg}`);
247
+ return { success: false, error: errorMsg };
248
+ }
249
+
250
+ log(`[uploader] Upload successful!`);
251
+ return { success: true, result };
252
+ }
253
+
254
+ export async function indexWorkspace(workspacePath, uploadEndpoint, sessionId, options = {}) {
255
+ const { log = console.error, orgId, orgSlug } = options;
256
+
257
+ log(`[uploader] Scanning workspace: ${workspacePath}`);
258
+
259
+ const bundle = await createBundle(workspacePath, { log });
260
+ if (!bundle) {
261
+ return { success: false, error: "No code files found" };
262
+ }
263
+
264
+ try {
265
+ const result = await uploadBundle(
266
+ bundle.bundlePath,
267
+ bundle.manifest,
268
+ uploadEndpoint,
269
+ sessionId,
270
+ { log, orgId, orgSlug }
271
+ );
272
+ return result;
273
+ } finally {
274
+ bundle.cleanup();
275
+ }
276
+ }