@context-engine-bridge/context-engine-mcp-bridge 0.0.73 → 0.0.74

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,11 @@
1
+ {
2
+ "enableAllProjectMcpServers": true,
3
+ "enabledMcpjsonServers": [
4
+ "context-engine"
5
+ ],
6
+ "permissions": {
7
+ "allow": [
8
+ "mcp__context-engine__repo_search"
9
+ ]
10
+ }
11
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@context-engine-bridge/context-engine-mcp-bridge",
3
- "version": "0.0.73",
3
+ "version": "0.0.74",
4
4
  "description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
5
5
  "bin": {
6
6
  "ctxce": "bin/ctxce.js",
package/src/authCli.js CHANGED
@@ -1,6 +1,11 @@
1
1
  import process from "node:process";
2
2
  import { loadAuthEntry, saveAuthEntry, deleteAuthEntry, loadAnyAuthEntry } from "./authConfig.js";
3
3
 
4
+ const SAAS_ENDPOINTS = {
5
+ uploadEndpoint: "https://dev.context-engine.ai/upload",
6
+ authBackendUrl: "https://dev.context-engine.ai",
7
+ };
8
+
4
9
  function parseAuthArgs(args) {
5
10
  let backendUrl = process.env.CTXCE_AUTH_BACKEND_URL || "";
6
11
  let token = process.env.CTXCE_AUTH_TOKEN || "";
@@ -49,8 +54,12 @@ function getBackendUrl(backendUrl) {
49
54
  }
50
55
 
51
56
  function getDefaultUploadBackend() {
52
- // Default to upload service when nothing else is configured
53
- return (process.env.CTXCE_UPLOAD_ENDPOINT || process.env.UPLOAD_ENDPOINT || "http://localhost:8004").trim();
57
+ return (
58
+ process.env.CTXCE_AUTH_BACKEND_URL
59
+ || process.env.CTXCE_UPLOAD_ENDPOINT
60
+ || process.env.UPLOAD_ENDPOINT
61
+ || SAAS_ENDPOINTS.authBackendUrl
62
+ ).trim();
54
63
  }
55
64
 
56
65
  function requireBackendUrl(backendUrl) {
package/src/cli.js CHANGED
@@ -7,6 +7,11 @@ import { runMcpServer, runHttpMcpServer } from "./mcpServer.js";
7
7
  import { runAuthCommand } from "./authCli.js";
8
8
  import { runConnectCommand } from "./connectCli.js";
9
9
 
10
+ const SAAS_ENDPOINTS = {
11
+ mcpIndexerUrl: "https://dev.context-engine.ai/indexer/mcp",
12
+ mcpMemoryUrl: "https://dev.context-engine.ai/memory/mcp",
13
+ };
14
+
10
15
  export async function runCli() {
11
16
  const argv = process.argv.slice(2);
12
17
  const cmd = argv[0];
@@ -27,8 +32,8 @@ export async function runCli() {
27
32
  if (cmd === "mcp-http-serve") {
28
33
  const args = argv.slice(1);
29
34
  let workspace = process.cwd();
30
- let indexerUrl = process.env.CTXCE_INDEXER_URL || "http://localhost:8003/mcp";
31
- let memoryUrl = process.env.CTXCE_MEMORY_URL || null;
35
+ let indexerUrl = process.env.CTXCE_INDEXER_URL || SAAS_ENDPOINTS.mcpIndexerUrl;
36
+ let memoryUrl = process.env.CTXCE_MEMORY_URL || SAAS_ENDPOINTS.mcpMemoryUrl;
32
37
  let port = Number.parseInt(process.env.CTXCE_HTTP_PORT || "30810", 10) || 30810;
33
38
  let collection = null;
34
39
 
@@ -86,12 +91,12 @@ export async function runCli() {
86
91
  // Minimal flag parsing for PoC: allow passing workspace/root and indexer URL.
87
92
  // Supported flags:
88
93
  // --workspace / --path : workspace root (default: cwd)
89
- // --indexer-url : override MCP indexer URL (default env CTXCE_INDEXER_URL or http://localhost:8003/mcp)
94
+ // --indexer-url : override MCP indexer URL (default env CTXCE_INDEXER_URL or SaaS endpoint)
90
95
  // --collection : collection name to use for MCP calls
91
96
  const args = argv.slice(1);
92
97
  let workspace = process.cwd();
93
- let indexerUrl = process.env.CTXCE_INDEXER_URL || "http://localhost:8003/mcp";
94
- let memoryUrl = process.env.CTXCE_MEMORY_URL || null;
98
+ let indexerUrl = process.env.CTXCE_INDEXER_URL || SAAS_ENDPOINTS.mcpIndexerUrl;
99
+ let memoryUrl = process.env.CTXCE_MEMORY_URL || SAAS_ENDPOINTS.mcpMemoryUrl;
95
100
  let collection = null;
96
101
 
97
102
  for (let i = 0; i < args.length; i += 1) {
@@ -198,7 +198,7 @@ export function getLoginPage(redirectUri, clientId, state, codeChallenge, codeCh
198
198
  <form id="loginForm">
199
199
  <div class="form-group">
200
200
  <label>Backend URL</label>
201
- <input type="url" id="backendUrl" placeholder="http://localhost:8004" required>
201
+ <input type="url" id="backendUrl" placeholder="https://dev.context-engine.ai" required>
202
202
  </div>
203
203
  <div class="form-group">
204
204
  <label>Username (optional)</label>
package/src/uploader.js CHANGED
@@ -2,6 +2,7 @@ import fs from "node:fs";
2
2
  import os from "node:os";
3
3
  import path from "node:path";
4
4
  import { createHash } from "node:crypto";
5
+ import { execFileSync } from "node:child_process";
5
6
  import { create as tarCreate } from "tar";
6
7
  import ignore from "ignore";
7
8
 
@@ -36,6 +37,29 @@ const DEFAULT_IGNORES = [
36
37
 
37
38
  const MAX_FILE_SIZE = 10 * 1024 * 1024;
38
39
 
40
+ function sanitizeRepoName(repoName) {
41
+ return String(repoName || "")
42
+ .toLowerCase()
43
+ .trim()
44
+ .replace(/[^a-z0-9_.-]+/g, "-")
45
+ .replace(/-+/g, "-")
46
+ .replace(/^-|-$/g, "") || "workspace";
47
+ }
48
+
49
+ function extractRepoNameFromPath(workspacePath) {
50
+ try {
51
+ return path.basename(path.resolve(workspacePath));
52
+ } catch {
53
+ return "workspace";
54
+ }
55
+ }
56
+
57
+ function getCollectionName(repoName) {
58
+ const sanitized = sanitizeRepoName(repoName);
59
+ const hash = createHash("sha256").update(String(repoName || "")).digest("hex").slice(0, 8);
60
+ return `${sanitized}-${hash}`;
61
+ }
62
+
39
63
  export function loadGitignore(workspacePath) {
40
64
  const ig = ignore();
41
65
  ig.add(DEFAULT_IGNORES);
@@ -68,9 +92,73 @@ function computeFileHash(filePath) {
68
92
  return createHash("sha256").update(content).digest("hex").slice(0, 16);
69
93
  }
70
94
 
95
+ function normalizeGitRemoteUrl(rawRemote) {
96
+ const value = String(rawRemote || "").trim();
97
+ if (!value) return "";
98
+
99
+ if (!value.includes("://")) {
100
+ const match = value.match(/^(?:([^@/\s]+)@)?([^:/\s]+):(.+)$/);
101
+ if (match) {
102
+ const host = String(match[2] || "").trim().toLowerCase();
103
+ let repoPath = String(match[3] || "").trim().replace(/^\/+|\/+$/g, "");
104
+ repoPath = repoPath.replace(/\.git$/i, "");
105
+ if (host && repoPath) return `${host}/${repoPath}`;
106
+ }
107
+ }
108
+
109
+ try {
110
+ const parsed = new URL(value);
111
+ const host = String(parsed.hostname || "").trim().toLowerCase();
112
+ let repoPath = String(parsed.pathname || "").trim().replace(/^\/+|\/+$/g, "");
113
+ repoPath = repoPath.replace(/\.git$/i, "");
114
+ const port = parsed.port ? `:${parsed.port}` : "";
115
+ if (host && repoPath) return `${host}${port}/${repoPath}`;
116
+ } catch {
117
+ }
118
+
119
+ return "";
120
+ }
121
+
122
+ export function computeLogicalRepoIdentity(workspacePath) {
123
+ const resolved = path.resolve(workspacePath);
124
+
125
+ try {
126
+ const remote = execFileSync("git", ["config", "--get", "remote.origin.url"], {
127
+ cwd: resolved,
128
+ encoding: "utf8",
129
+ stdio: ["pipe", "pipe", "pipe"],
130
+ }).trim();
131
+ const normalizedRemote = normalizeGitRemoteUrl(remote);
132
+ if (normalizedRemote) {
133
+ const hash = createHash("sha1").update(normalizedRemote).digest("hex").slice(0, 16);
134
+ return { id: `git:${hash}`, source: "remote_origin" };
135
+ }
136
+ } catch {
137
+ }
138
+
139
+ try {
140
+ let commonDir = execFileSync("git", ["rev-parse", "--git-common-dir"], {
141
+ cwd: resolved,
142
+ encoding: "utf8",
143
+ stdio: ["pipe", "pipe", "pipe"],
144
+ }).trim();
145
+ if (commonDir) {
146
+ if (!path.isAbsolute(commonDir)) {
147
+ commonDir = path.resolve(resolved, commonDir);
148
+ }
149
+ const hash = createHash("sha1").update(commonDir).digest("hex").slice(0, 16);
150
+ return { id: `git:${hash}`, source: "git_common_dir" };
151
+ }
152
+ } catch {
153
+ }
154
+
155
+ const normalized = resolved.replace(/\\/g, "/").replace(/\/+$/, "");
156
+ const hash = createHash("sha1").update(normalized).digest("hex").slice(0, 16);
157
+ return { id: `fs:${hash}`, source: "filesystem_path" };
158
+ }
159
+
71
160
  function computeLogicalRepoId(workspacePath) {
72
- const normalized = workspacePath.replace(/\\/g, "/").replace(/\/+$/, "");
73
- return createHash("sha256").update(normalized).digest("hex").slice(0, 12);
161
+ return computeLogicalRepoIdentity(workspacePath).id;
74
162
  }
75
163
 
76
164
  function scanWorkspace(workspacePath, ig) {
@@ -130,6 +218,7 @@ export async function createBundle(workspacePath, options = {}) {
130
218
 
131
219
  const bundleId = createHash("sha256").update(Date.now().toString() + Math.random().toString()).digest("hex").slice(0, 16);
132
220
  const createdAt = new Date().toISOString();
221
+ const repoName = extractRepoNameFromPath(workspacePath);
133
222
 
134
223
  const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-"));
135
224
  const bundleDir = path.join(tmpDir, bundleId);
@@ -164,7 +253,7 @@ export async function createBundle(workspacePath, options = {}) {
164
253
  version: "1.0",
165
254
  bundle_id: bundleId,
166
255
  workspace_path: workspacePath,
167
- collection_name: computeLogicalRepoId(workspacePath),
256
+ collection_name: getCollectionName(repoName),
168
257
  created_at: createdAt,
169
258
  sequence_number: null,
170
259
  parent_sequence: null,
@@ -228,13 +317,16 @@ export async function uploadBundle(bundlePath, manifest, uploadEndpoint, session
228
317
  const boundary = `----ctxce${Date.now()}${Math.random().toString(36).slice(2)}`;
229
318
 
230
319
  // Build form fields (small metadata -- kept in memory)
231
- const logicalRepoId = computeLogicalRepoId(manifest.workspace_path);
320
+ const { id: logicalRepoId, source: logicalRepoSource } = computeLogicalRepoIdentity(
321
+ manifest.workspace_path
322
+ );
232
323
  const fields = {
233
324
  workspace_path: manifest.workspace_path,
234
325
  collection_name: manifest.collection_name || logicalRepoId,
235
326
  force: "true",
236
327
  source_path: manifest.workspace_path,
237
328
  logical_repo_id: logicalRepoId,
329
+ logical_repo_source: logicalRepoSource,
238
330
  session: sessionId,
239
331
  };
240
332
  if (orgId) fields.org_id = orgId;
@@ -263,29 +355,16 @@ export async function uploadBundle(bundlePath, manifest, uploadEndpoint, session
263
355
  // This prevents OOM for large repositories (hundreds of MB bundles).
264
356
  const totalLength = filePreamble.length + bundleSize + fileEpilogue.length + fieldsBuffer.length;
265
357
 
266
- const { Readable } = await import('node:stream');
267
- const bodyStream = new Readable({
268
- read() {
269
- // Push preamble
270
- this.push(filePreamble);
271
- // Push file data in chunks via sync read to keep it simple
272
- const CHUNK = 256 * 1024; // 256KB chunks
273
- const fd = fs.openSync(bundlePath, 'r');
274
- try {
275
- const buf = Buffer.allocUnsafe(CHUNK);
276
- let bytesRead;
277
- while ((bytesRead = fs.readSync(fd, buf, 0, CHUNK)) > 0) {
278
- this.push(bytesRead === CHUNK ? buf : buf.subarray(0, bytesRead));
279
- }
280
- } finally {
281
- fs.closeSync(fd);
282
- }
283
- // Push epilogue + fields + close
284
- this.push(fileEpilogue);
285
- this.push(fieldsBuffer);
286
- this.push(null); // EOF
358
+ const { Readable } = await import("node:stream");
359
+ const bodyStream = Readable.from((async function* buildMultipartStream() {
360
+ yield filePreamble;
361
+ const fileStream = fs.createReadStream(bundlePath, { highWaterMark: 256 * 1024 });
362
+ for await (const chunk of fileStream) {
363
+ yield chunk;
287
364
  }
288
- });
365
+ yield fileEpilogue;
366
+ yield fieldsBuffer;
367
+ })());
289
368
 
290
369
  const url = `${uploadEndpoint}/api/v1/delta/upload`;
291
370
  log(`[uploader] Uploading to ${url} (${(bundleSize / 1024).toFixed(0)}KB bundle, streaming)...`);
@@ -0,0 +1,84 @@
1
+ import test from "node:test";
2
+ import assert from "node:assert/strict";
3
+ import fs from "node:fs";
4
+ import os from "node:os";
5
+ import path from "node:path";
6
+ import { execFileSync } from "node:child_process";
7
+
8
+ import { computeLogicalRepoIdentity, uploadBundle } from "../src/uploader.js";
9
+
10
+ test("computeLogicalRepoIdentity prefers normalized remote origin", async () => {
11
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-bridge-git-"));
12
+ try {
13
+ execFileSync("git", ["init"], { cwd: tmpDir, stdio: "ignore" });
14
+ execFileSync("git", ["remote", "add", "origin", "git@github.com:Context-Engine-AI/Context-Engine.git"], {
15
+ cwd: tmpDir,
16
+ stdio: "ignore",
17
+ });
18
+
19
+ const identity = computeLogicalRepoIdentity(tmpDir);
20
+ assert.equal(identity.source, "remote_origin");
21
+ assert.match(identity.id, /^git:[0-9a-f]{16}$/);
22
+ } finally {
23
+ fs.rmSync(tmpDir, { recursive: true, force: true });
24
+ }
25
+ });
26
+
27
+ test("uploadBundle streams an exact multipart file payload", async () => {
28
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ctxce-bridge-upload-"));
29
+ const workspacePath = path.join(tmpDir, "workspace");
30
+ fs.mkdirSync(workspacePath, { recursive: true });
31
+
32
+ const bundlePath = path.join(tmpDir, "bundle.tar.gz");
33
+ const bundleData = Buffer.from(Array.from({ length: 4096 }, (_, i) => i % 251));
34
+ fs.writeFileSync(bundlePath, bundleData);
35
+
36
+ let capturedBody = null;
37
+ let boundary = "";
38
+ const originalFetch = global.fetch;
39
+ global.fetch = async (_url, options) => {
40
+ boundary = String(options.headers["Content-Type"] || "").split("boundary=")[1] || "";
41
+ const chunks = [];
42
+ for await (const chunk of options.body) {
43
+ chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
44
+ }
45
+ capturedBody = Buffer.concat(chunks);
46
+ return {
47
+ ok: true,
48
+ async json() {
49
+ return { success: true };
50
+ },
51
+ };
52
+ };
53
+
54
+ try {
55
+ const result = await uploadBundle(
56
+ bundlePath,
57
+ {
58
+ workspace_path: workspacePath,
59
+ collection_name: "demo-collection",
60
+ },
61
+ "http://example.invalid",
62
+ "session-token",
63
+ { log: () => {} }
64
+ );
65
+
66
+ assert.equal(result.success, true);
67
+ assert.ok(capturedBody);
68
+ assert.ok(boundary);
69
+
70
+ const fileHeader = Buffer.from('Content-Disposition: form-data; name="bundle"; filename="bundle.tar.gz"\r\nContent-Type: application/gzip\r\n\r\n');
71
+ const headerIndex = capturedBody.indexOf(fileHeader);
72
+ assert.ok(headerIndex >= 0, "bundle header not found in multipart body");
73
+
74
+ const payloadStart = headerIndex + fileHeader.length;
75
+ const payloadEnd = capturedBody.indexOf(Buffer.from(`\r\n--${boundary}\r\n`), payloadStart);
76
+ assert.ok(payloadEnd > payloadStart, "bundle payload terminator not found");
77
+
78
+ const uploadedPayload = capturedBody.subarray(payloadStart, payloadEnd);
79
+ assert.deepEqual(uploadedPayload, bundleData);
80
+ } finally {
81
+ global.fetch = originalFetch;
82
+ fs.rmSync(tmpDir, { recursive: true, force: true });
83
+ }
84
+ });