@context-engine-bridge/context-engine-mcp-bridge 0.0.24 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@context-engine-bridge/context-engine-mcp-bridge",
3
- "version": "0.0.24",
3
+ "version": "0.0.27",
4
4
  "description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
5
5
  "bin": {
6
6
  "ctxce": "bin/ctxce.js",
@@ -18,6 +18,8 @@
18
18
  },
19
19
  "dependencies": {
20
20
  "@modelcontextprotocol/sdk": "^1.24.3",
21
+ "ignore": "^7.0.5",
22
+ "tar": "^7.5.9",
21
23
  "zod": "^3.25.0"
22
24
  },
23
25
  "devDependencies": {
package/src/connectCli.js CHANGED
@@ -1,8 +1,8 @@
1
1
  import process from "node:process";
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
- import { execSync, spawn } from "node:child_process";
5
4
  import { saveAuthEntry } from "./authConfig.js";
5
+ import { indexWorkspace } from "./uploader.js";
6
6
 
7
7
  const SAAS_ENDPOINTS = {
8
8
  uploadEndpoint: "https://dev.context-engine.ai/upload",
@@ -141,114 +141,31 @@ async function authenticateWithApiKey(apiKey) {
141
141
  return entry;
142
142
  }
143
143
 
144
- function findUploadClient() {
145
- const candidates = [
146
- path.resolve(process.cwd(), "scripts", "standalone_upload_client.py"),
147
- path.resolve(process.cwd(), "..", "scripts", "standalone_upload_client.py"),
148
- path.resolve(__dirname, "..", "..", "scripts", "standalone_upload_client.py"),
149
- ];
150
-
151
- for (const candidate of candidates) {
152
- if (fs.existsSync(candidate)) {
153
- return candidate;
154
- }
155
- }
156
-
157
- return null;
158
- }
159
-
160
- function detectPython() {
161
- for (const cmd of ["python3", "python"]) {
162
- try {
163
- execSync(`${cmd} --version`, { stdio: "ignore" });
164
- return cmd;
165
- } catch {
166
- }
167
- }
168
- return null;
169
- }
170
-
171
- async function runUploadClient(workspace, sessionId, uploadClient, python) {
172
- const env = {
173
- ...process.env,
174
- REMOTE_UPLOAD_ENDPOINT: SAAS_ENDPOINTS.uploadEndpoint,
175
- CTXCE_AUTH_BACKEND_URL: SAAS_ENDPOINTS.authBackendUrl,
176
- CTXCE_SESSION_ID: sessionId,
177
- HOST_ROOT: workspace,
178
- CONTAINER_ROOT: "/work",
179
- };
180
-
181
- return new Promise((resolve) => {
182
- const args = [
183
- uploadClient,
184
- "--path", workspace,
185
- "--endpoint", SAAS_ENDPOINTS.uploadEndpoint,
186
- "--force",
187
- ];
188
-
189
- const proc = spawn(python, args, {
190
- env,
191
- stdio: ["ignore", "pipe", "pipe"],
192
- cwd: workspace,
193
- });
194
-
195
- proc.stdout.on("data", (data) => {
196
- const line = data.toString().trim();
197
- if (line) {
198
- console.error(`[upload] ${line}`);
199
- }
200
- });
201
-
202
- proc.stderr.on("data", (data) => {
203
- const line = data.toString().trim();
204
- if (line) {
205
- console.error(`[upload] ${line}`);
206
- }
207
- });
208
-
209
- proc.on("close", (code) => {
210
- resolve(code === 0);
211
- });
212
-
213
- proc.on("error", (err) => {
214
- console.error(`[ctxce] Upload process error: ${err}`);
215
- resolve(false);
216
- });
217
- });
218
- }
219
-
220
- async function triggerIndexing(workspace, sessionId) {
144
+ async function triggerIndexing(workspace, sessionId, authEntry) {
221
145
  console.error("[ctxce] Starting workspace indexing...");
222
146
  console.error(`[ctxce] Workspace: ${workspace}`);
223
147
  console.error(`[ctxce] Endpoint: ${SAAS_ENDPOINTS.uploadEndpoint}`);
224
148
 
225
- const uploadClient = findUploadClient();
226
- const python = detectPython();
227
-
228
- if (!uploadClient || !python) {
229
- console.error("[ctxce] Python upload client not available.");
230
- console.error("[ctxce] Install context-engine Python package or use VS Code extension.");
231
- return false;
232
- }
149
+ const result = await indexWorkspace(
150
+ workspace,
151
+ SAAS_ENDPOINTS.uploadEndpoint,
152
+ sessionId,
153
+ {
154
+ log: console.error,
155
+ orgId: authEntry?.org_id,
156
+ orgSlug: authEntry?.org_slug,
157
+ }
158
+ );
233
159
 
234
- return await runUploadClient(workspace, sessionId, uploadClient, python);
160
+ return result.success;
235
161
  }
236
162
 
237
- function startWatcher(workspace, sessionId, intervalMs) {
238
- const uploadClient = findUploadClient();
239
- const python = detectPython();
240
-
241
- if (!uploadClient || !python) {
242
- console.error("[ctxce] Cannot start watcher: Python upload client not available.");
243
- return null;
244
- }
245
-
163
+ function startWatcher(workspace, sessionId, authEntry, intervalMs) {
246
164
  console.error(`[ctxce] Starting file watcher (sync every ${intervalMs / 1000}s)...`);
247
165
  console.error("[ctxce] Press Ctrl+C to stop.");
248
166
 
249
167
  let isRunning = false;
250
168
  let pendingSync = false;
251
- let lastSyncTime = Date.now();
252
169
 
253
170
  const fileHashes = new Map();
254
171
 
@@ -325,18 +242,26 @@ function startWatcher(workspace, sessionId, intervalMs) {
325
242
  console.error(`[ctxce] [${now}] Syncing changes...`);
326
243
 
327
244
  try {
328
- const success = await runUploadClient(workspace, sessionId, uploadClient, python);
329
- if (success) {
245
+ const result = await indexWorkspace(
246
+ workspace,
247
+ SAAS_ENDPOINTS.uploadEndpoint,
248
+ sessionId,
249
+ {
250
+ log: console.error,
251
+ orgId: authEntry?.org_id,
252
+ orgSlug: authEntry?.org_slug,
253
+ }
254
+ );
255
+ if (result.success) {
330
256
  console.error(`[ctxce] [${now}] Sync complete.`);
331
257
  } else {
332
- console.error(`[ctxce] [${now}] Sync failed.`);
258
+ console.error(`[ctxce] [${now}] Sync failed: ${result.error}`);
333
259
  }
334
260
  } catch (err) {
335
261
  console.error(`[ctxce] [${now}] Sync error: ${err}`);
336
262
  }
337
263
 
338
264
  isRunning = false;
339
- lastSyncTime = Date.now();
340
265
 
341
266
  if (pendingSync) {
342
267
  pendingSync = false;
@@ -424,7 +349,7 @@ export async function runConnectCommand(args) {
424
349
  }
425
350
 
426
351
  if (!skipIndex) {
427
- const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId);
352
+ const indexed = await triggerIndexing(resolvedWorkspace, authEntry.sessionId, authEntry);
428
353
  if (!indexed) {
429
354
  console.error("[ctxce] Initial indexing failed, but will continue.");
430
355
  }
@@ -436,5 +361,5 @@ export async function runConnectCommand(args) {
436
361
  }
437
362
 
438
363
  printSuccess();
439
- startWatcher(resolvedWorkspace, authEntry.sessionId, watchInterval);
364
+ startWatcher(resolvedWorkspace, authEntry.sessionId, authEntry, watchInterval);
440
365
  }
@@ -0,0 +1,276 @@
1
+ import fs from "node:fs";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+ import { createHash } from "node:crypto";
5
+ import { create as tarCreate } from "tar";
6
+ import ignore from "ignore";
7
+
8
+ const CODE_EXTS = new Set([
9
+ ".py", ".js", ".ts", ".tsx", ".jsx", ".java", ".go", ".rs", ".rb", ".php",
10
+ ".c", ".h", ".cpp", ".cc", ".hpp", ".cs", ".csx", ".kt", ".swift", ".scala",
11
+ ".sh", ".ps1", ".psm1", ".psd1", ".pl", ".lua",
12
+ ".sql", ".md", ".yml", ".yaml", ".toml", ".ini", ".cfg", ".conf", ".json", ".xml",
13
+ ".csproj", ".config", ".resx",
14
+ ".html", ".htm", ".css", ".scss", ".sass", ".less", ".vue", ".svelte", ".cshtml", ".razor",
15
+ ".tf", ".tfvars", ".hcl", ".dockerfile",
16
+ ".ex", ".exs",
17
+ ".elm", ".dart", ".r", ".R", ".m", ".cljs", ".clj", ".hs", ".ml",
18
+ ".zig", ".nim", ".v", ".sv", ".vhdl", ".asm", ".s",
19
+ ]);
20
+
21
+ const EXTENSIONLESS_FILES = new Set([
22
+ "dockerfile", "makefile", "gemfile", "rakefile", "procfile", "vagrantfile",
23
+ "jenkinsfile", ".gitignore", ".dockerignore", ".editorconfig",
24
+ ]);
25
+
26
+ const DEFAULT_IGNORES = [
27
+ ".git", "node_modules", "__pycache__", ".venv", "venv", ".env",
28
+ "dist", "build", ".next", ".nuxt", "coverage", ".nyc_output",
29
+ "*.pyc", "*.pyo", "*.so", "*.dylib", "*.dll", "*.exe",
30
+ "*.jpg", "*.jpeg", "*.png", "*.gif", "*.ico", "*.svg", "*.webp",
31
+ "*.mp3", "*.mp4", "*.wav", "*.avi", "*.mov",
32
+ "*.zip", "*.tar", "*.gz", "*.rar", "*.7z",
33
+ "*.pdf", "*.doc", "*.docx", "*.xls", "*.xlsx",
34
+ ".DS_Store", "Thumbs.db",
35
+ ];
36
+
37
+ const MAX_FILE_SIZE = 10 * 1024 * 1024;
38
+
39
+ function loadGitignore(workspacePath) {
40
+ const ig = ignore();
41
+ ig.add(DEFAULT_IGNORES);
42
+
43
+ const gitignorePath = path.join(workspacePath, ".gitignore");
44
+ if (fs.existsSync(gitignorePath)) {
45
+ try {
46
+ const content = fs.readFileSync(gitignorePath, "utf-8");
47
+ ig.add(content);
48
+ } catch {
49
+ }
50
+ }
51
+
52
+ return ig;
53
+ }
54
+
55
+ function isCodeFile(filePath) {
56
+ const ext = path.extname(filePath).toLowerCase();
57
+ if (CODE_EXTS.has(ext)) return true;
58
+
59
+ const basename = path.basename(filePath).toLowerCase();
60
+ if (EXTENSIONLESS_FILES.has(basename)) return true;
61
+ if (basename.startsWith("dockerfile")) return true;
62
+
63
+ return false;
64
+ }
65
+
66
+ function computeFileHash(filePath) {
67
+ const content = fs.readFileSync(filePath);
68
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
69
+ }
70
+
71
+ function computeLogicalRepoId(workspacePath) {
72
+ const normalized = workspacePath.replace(/\\/g, "/").replace(/\/+$/, "");
73
+ return createHash("sha256").update(normalized).digest("hex").slice(0, 12);
74
+ }
75
+
76
+ function scanWorkspace(workspacePath, ig) {
77
+ const files = [];
78
+
79
+ function walk(dir, relativePath = "") {
80
+ let entries;
81
+ try {
82
+ entries = fs.readdirSync(dir, { withFileTypes: true });
83
+ } catch {
84
+ return;
85
+ }
86
+
87
+ for (const entry of entries) {
88
+ const fullPath = path.join(dir, entry.name);
89
+ const relPath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
90
+
91
+ if (ig.ignores(relPath) || ig.ignores(relPath + "/")) {
92
+ continue;
93
+ }
94
+
95
+ if (entry.isDirectory()) {
96
+ walk(fullPath, relPath);
97
+ } else if (entry.isFile() && isCodeFile(entry.name)) {
98
+ try {
99
+ const stat = fs.statSync(fullPath);
100
+ if (stat.size > 0 && stat.size < MAX_FILE_SIZE) {
101
+ files.push({
102
+ path: relPath,
103
+ fullPath,
104
+ size: stat.size,
105
+ mtime: stat.mtimeMs,
106
+ });
107
+ }
108
+ } catch {
109
+ }
110
+ }
111
+ }
112
+ }
113
+
114
+ walk(workspacePath);
115
+ return files;
116
+ }
117
+
118
+ export async function createBundle(workspacePath, options = {}) {
119
+ const { log = console.error } = options;
120
+
121
+ const ig = loadGitignore(workspacePath);
122
+ const files = scanWorkspace(workspacePath, ig);
123
+
124
+ if (files.length === 0) {
125
+ log("[uploader] No code files found in workspace");
126
+ return null;
127
+ }
128
+
129
+ log(`[uploader] Found ${files.length} code files`);
130
+
131
+ const bundleId = createHash("sha256").update(Date.now().toString() + Math.random().toString()).digest("hex").slice(0, 16);
132
+
133
+ const manifest = {
134
+ bundle_id: bundleId,
135
+ workspace_path: workspacePath,
136
+ logical_repo_id: computeLogicalRepoId(workspacePath),
137
+ sequence_number: Date.now(),
138
+ file_count: files.length,
139
+ files: files.map(f => ({
140
+ path: f.path,
141
+ size: f.size,
142
+ hash: computeFileHash(f.fullPath),
143
+ })),
144
+ created_at: new Date().toISOString(),
145
+ };
146
+
147
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-"));
148
+ const bundlePath = path.join(tmpDir, `bundle-${bundleId}.tar.gz`);
149
+
150
+ try {
151
+ await tarCreate(
152
+ {
153
+ gzip: true,
154
+ file: bundlePath,
155
+ cwd: workspacePath,
156
+ portable: true,
157
+ },
158
+ files.map(f => f.path)
159
+ );
160
+
161
+ const bundleSize = fs.statSync(bundlePath).size;
162
+ log(`[uploader] Created bundle: ${bundleSize} bytes`);
163
+
164
+ return {
165
+ bundlePath,
166
+ manifest,
167
+ tmpDir,
168
+ cleanup: () => {
169
+ try {
170
+ fs.rmSync(tmpDir, { recursive: true, force: true });
171
+ } catch {
172
+ }
173
+ },
174
+ };
175
+ } catch (err) {
176
+ fs.rmSync(tmpDir, { recursive: true, force: true });
177
+ throw err;
178
+ }
179
+ }
180
+
181
+ export async function uploadBundle(bundlePath, manifest, uploadEndpoint, sessionId, options = {}) {
182
+ const { log = console.error, orgId, orgSlug } = options;
183
+
184
+ const bundleData = fs.readFileSync(bundlePath);
185
+ const boundary = `----ctxce${Date.now()}${Math.random().toString(36).slice(2)}`;
186
+
187
+ const parts = [];
188
+
189
+ parts.push(
190
+ `--${boundary}\r\n`,
191
+ `Content-Disposition: form-data; name="bundle"; filename="bundle.tar.gz"\r\n`,
192
+ `Content-Type: application/gzip\r\n\r\n`,
193
+ );
194
+ parts.push(bundleData);
195
+ parts.push(`\r\n`);
196
+
197
+ const fields = {
198
+ workspace_path: manifest.workspace_path,
199
+ collection_name: manifest.logical_repo_id,
200
+ sequence_number: String(manifest.sequence_number),
201
+ force: "true",
202
+ source_path: manifest.workspace_path,
203
+ logical_repo_id: manifest.logical_repo_id,
204
+ session: sessionId,
205
+ };
206
+
207
+ if (orgId) fields.org_id = orgId;
208
+ if (orgSlug) fields.org_slug = orgSlug;
209
+
210
+ for (const [key, value] of Object.entries(fields)) {
211
+ if (value) {
212
+ parts.push(
213
+ `--${boundary}\r\n`,
214
+ `Content-Disposition: form-data; name="${key}"\r\n\r\n`,
215
+ `${value}\r\n`,
216
+ );
217
+ }
218
+ }
219
+
220
+ parts.push(`--${boundary}--\r\n`);
221
+
222
+ const bodyParts = parts.map(p => typeof p === "string" ? Buffer.from(p) : p);
223
+ const body = Buffer.concat(bodyParts);
224
+
225
+ const url = `${uploadEndpoint}/api/v1/delta/upload`;
226
+ log(`[uploader] Uploading to ${url}...`);
227
+
228
+ const resp = await fetch(url, {
229
+ method: "POST",
230
+ headers: {
231
+ "Content-Type": `multipart/form-data; boundary=${boundary}`,
232
+ "Content-Length": String(body.length),
233
+ },
234
+ body,
235
+ });
236
+
237
+ let result;
238
+ try {
239
+ result = await resp.json();
240
+ } catch {
241
+ result = { success: false, error: { message: await resp.text() } };
242
+ }
243
+
244
+ if (!resp.ok || !result.success) {
245
+ const errorMsg = result.error?.message || result.detail || `HTTP ${resp.status}`;
246
+ log(`[uploader] Upload failed: ${errorMsg}`);
247
+ return { success: false, error: errorMsg };
248
+ }
249
+
250
+ log(`[uploader] Upload successful!`);
251
+ return { success: true, result };
252
+ }
253
+
254
+ export async function indexWorkspace(workspacePath, uploadEndpoint, sessionId, options = {}) {
255
+ const { log = console.error, orgId, orgSlug } = options;
256
+
257
+ log(`[uploader] Scanning workspace: ${workspacePath}`);
258
+
259
+ const bundle = await createBundle(workspacePath, { log });
260
+ if (!bundle) {
261
+ return { success: false, error: "No code files found" };
262
+ }
263
+
264
+ try {
265
+ const result = await uploadBundle(
266
+ bundle.bundlePath,
267
+ bundle.manifest,
268
+ uploadEndpoint,
269
+ sessionId,
270
+ { log, orgId, orgSlug }
271
+ );
272
+ return result;
273
+ } finally {
274
+ bundle.cleanup();
275
+ }
276
+ }