@context-engine-bridge/context-engine-mcp-bridge 0.0.77 → 0.0.79

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@context-engine-bridge/context-engine-mcp-bridge",
3
- "version": "0.0.77",
3
+ "version": "0.0.79",
4
4
  "description": "Context Engine MCP bridge (http/stdio proxy combining indexer + memory servers)",
5
5
  "bin": {
6
6
  "ctxce": "bin/ctxce.js",
package/src/authCli.js CHANGED
@@ -154,11 +154,19 @@ async function doLogin(args) {
154
154
  const sessionId = data.session_id || data.sessionId || null;
155
155
  const userId = data.user_id || data.userId || null;
156
156
  const expiresAt = data.expires_at || data.expiresAt || null;
157
+ const orgId = data.org_id || data.orgId || null;
158
+ const orgSlug = data.org_slug || data.orgSlug || null;
157
159
  if (!sessionId) {
158
160
  console.error("[ctxce] Auth login response missing session id.");
159
161
  process.exit(1);
160
162
  }
161
- saveAuthEntry(url, { sessionId, userId, expiresAt });
163
+ saveAuthEntry(url, {
164
+ sessionId,
165
+ userId,
166
+ expiresAt,
167
+ org_id: orgId,
168
+ org_slug: orgSlug,
169
+ });
162
170
  console.error("[ctxce] Auth login successful for", url);
163
171
  }
164
172
 
package/src/authConfig.js CHANGED
@@ -4,6 +4,7 @@ import path from "node:path";
4
4
 
5
5
  const CONFIG_DIR_NAME = ".ctxce";
6
6
  const CONFIG_BASENAME = "auth.json";
7
+ const RESOLVED_COLLECTIONS_KEY = "resolved_collections_v1";
7
8
 
8
9
  function getConfigPath() {
9
10
  const home = os.homedir() || process.cwd();
@@ -36,6 +37,38 @@ function writeConfig(data) {
36
37
  }
37
38
  }
38
39
 
40
+ function normalizeScopeValue(value) {
41
+ return typeof value === "string" ? value.trim() : "";
42
+ }
43
+
44
+ function buildResolvedCollectionScopeKey({ orgId, orgSlug, logicalRepoId } = {}) {
45
+ const repo = normalizeScopeValue(logicalRepoId);
46
+ if (!repo) {
47
+ return "";
48
+ }
49
+ const normalizedOrgId = normalizeScopeValue(orgId);
50
+ const normalizedOrgSlug = normalizeScopeValue(orgSlug);
51
+ const org = normalizedOrgId || (normalizedOrgSlug ? `slug:${normalizedOrgSlug}` : "org:none");
52
+ return `${org}::${repo}`;
53
+ }
54
+
55
+ function getResolvedCollections(entry) {
56
+ const map = entry && typeof entry === "object" ? entry[RESOLVED_COLLECTIONS_KEY] : null;
57
+ return map && typeof map === "object" ? map : {};
58
+ }
59
+
60
+ function preserveInternalEntryState(existingEntry, nextEntry) {
61
+ const preserved = {};
62
+ const existingCollections = getResolvedCollections(existingEntry);
63
+ if (Object.keys(existingCollections).length > 0) {
64
+ preserved[RESOLVED_COLLECTIONS_KEY] = existingCollections;
65
+ }
66
+ return {
67
+ ...preserved,
68
+ ...nextEntry,
69
+ };
70
+ }
71
+
39
72
  export function loadAuthEntry(backendUrl) {
40
73
  if (!backendUrl) {
41
74
  return null;
@@ -55,7 +88,8 @@ export function saveAuthEntry(backendUrl, entry) {
55
88
  }
56
89
  const all = readConfig();
57
90
  const key = String(backendUrl);
58
- all[key] = entry;
91
+ const existingEntry = all[key];
92
+ all[key] = preserveInternalEntryState(existingEntry, entry);
59
93
  writeConfig(all);
60
94
  }
61
95
 
@@ -82,3 +116,39 @@ export function loadAnyAuthEntry() {
82
116
  }
83
117
  return null;
84
118
  }
119
+
120
+ export function loadResolvedCollection(backendUrl, scope) {
121
+ if (!backendUrl) {
122
+ return null;
123
+ }
124
+ const scopeKey = buildResolvedCollectionScopeKey(scope);
125
+ if (!scopeKey) {
126
+ return null;
127
+ }
128
+ const entry = loadAuthEntry(backendUrl);
129
+ const resolved = getResolvedCollections(entry)[scopeKey];
130
+ return typeof resolved === "string" && resolved.trim() ? resolved.trim() : null;
131
+ }
132
+
133
+ export function saveResolvedCollection(backendUrl, scope, collection) {
134
+ if (!backendUrl) {
135
+ return;
136
+ }
137
+ const scopeKey = buildResolvedCollectionScopeKey(scope);
138
+ const nextCollection = typeof collection === "string" ? collection.trim() : "";
139
+ if (!scopeKey || !nextCollection) {
140
+ return;
141
+ }
142
+ const all = readConfig();
143
+ const key = String(backendUrl);
144
+ const existingEntry = all[key] && typeof all[key] === "object" ? all[key] : {};
145
+ const resolvedCollections = {
146
+ ...getResolvedCollections(existingEntry),
147
+ [scopeKey]: nextCollection,
148
+ };
149
+ all[key] = {
150
+ ...existingEntry,
151
+ [RESOLVED_COLLECTIONS_KEY]: resolvedCollections,
152
+ };
153
+ writeConfig(all);
154
+ }
package/src/connectCli.js CHANGED
@@ -2,7 +2,8 @@ import process from "node:process";
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
4
  import { saveAuthEntry } from "./authConfig.js";
5
- import { indexWorkspace, loadGitignore, isCodeFile, collectGitState } from "./uploader.js";
5
+ import { indexWorkspace } from "./uploader.js";
6
+ import { startSyncDaemon } from "./syncDaemon.js";
6
7
 
7
8
  const SAAS_ENDPOINTS = {
8
9
  uploadEndpoint: "https://dev.context-engine.ai/upload",
@@ -12,7 +13,6 @@ const SAAS_ENDPOINTS = {
12
13
  };
13
14
 
14
15
  const DEFAULT_WATCH_INTERVAL_MS = 30000;
15
- const DEFAULT_DEBOUNCE_MS = 2000;
16
16
 
17
17
  function parseConnectArgs(args) {
18
18
  let apiKey = "";
@@ -164,181 +164,20 @@ function startWatcher(workspace, initialSessionId, authEntry, intervalMs) {
164
164
  console.error(`[ctxce] Starting file watcher (sync every ${intervalMs / 1000}s)...`);
165
165
  console.error("[ctxce] Press Ctrl+C to stop.");
166
166
 
167
- let isRunning = false;
168
- let pendingSync = false;
169
- let sessionId = initialSessionId;
170
- let pendingHistoryOnly = false;
171
- let lastKnownHead = "";
172
-
173
- async function refreshSessionIfNeeded() {
174
- // If the auth entry has an expiry and we're within 5 minutes of it,
175
- // re-authenticate using the stored API key.
176
- if (!authEntry || !authEntry.apiKey) return;
177
- const expiresAt = authEntry.expiresAt;
178
- if (typeof expiresAt !== "number" || !Number.isFinite(expiresAt) || expiresAt <= 0) return;
179
- const nowSecs = Math.floor(Date.now() / 1000);
180
- const remainingSecs = expiresAt - nowSecs;
181
- if (remainingSecs > 300) return; // still valid for > 5 min
182
- console.error("[ctxce] Session approaching expiry, refreshing...");
183
- try {
184
- const refreshed = await authenticateWithApiKey(authEntry.apiKey);
185
- if (refreshed && refreshed.sessionId) {
186
- sessionId = refreshed.sessionId;
187
- authEntry = refreshed;
188
- console.error("[ctxce] Session refreshed successfully.");
189
- }
190
- } catch (err) {
191
- console.error(`[ctxce] Session refresh failed: ${err}`);
192
- }
193
- }
194
-
195
- const fileHashes = new Map();
196
-
197
- function getFileHash(filePath) {
198
- try {
199
- const stat = fs.statSync(filePath);
200
- return `${stat.mtime.getTime()}-${stat.size}`;
201
- } catch {
202
- return null;
203
- }
204
- }
205
-
206
- // Use gitignore from uploader.js so the watcher ignores the same files as
207
- // the bundle creator -- prevents redundant uploads for generated/ignored files.
208
- const _ig = loadGitignore(workspace);
209
-
210
- function scanDirectory(dir, files = []) {
211
- try {
212
- const entries = fs.readdirSync(dir, { withFileTypes: true });
213
- for (const entry of entries) {
214
- if (entry.isSymbolicLink()) continue;
215
- const fullPath = path.join(dir, entry.name);
216
- // Normalize to forward slashes for the `ignore` library (expects POSIX paths)
217
- const relPath = path.relative(workspace, fullPath).split(path.sep).join('/');
218
-
219
- // Use the same ignore rules as the bundle creator
220
- // Directories need a trailing slash for gitignore pattern matching
221
- if (entry.isDirectory()) {
222
- if (_ig.ignores(relPath + '/')) continue;
223
- scanDirectory(fullPath, files);
224
- } else if (entry.isFile()) {
225
- if (_ig.ignores(relPath)) continue;
226
- if (isCodeFile(fullPath)) {
227
- files.push(fullPath);
228
- }
229
- }
230
- }
231
- } catch {
232
- }
233
- return files;
234
- }
235
-
236
- function detectChanges() {
237
- const currentFiles = scanDirectory(workspace);
238
- let codeChanged = false;
239
-
240
- const currentPaths = new Set(currentFiles);
241
-
242
- for (const filePath of currentFiles) {
243
- const newHash = getFileHash(filePath);
244
- const oldHash = fileHashes.get(filePath);
245
-
246
- if (newHash !== oldHash) {
247
- codeChanged = true;
248
- fileHashes.set(filePath, newHash);
249
- }
250
- }
251
-
252
- for (const oldPath of fileHashes.keys()) {
253
- if (!currentPaths.has(oldPath)) {
254
- codeChanged = true;
255
- fileHashes.delete(oldPath);
256
- }
257
- }
167
+ const handle = startSyncDaemon({
168
+ workspace,
169
+ sessionId: initialSessionId,
170
+ authEntry,
171
+ uploadEndpoint: SAAS_ENDPOINTS.uploadEndpoint,
172
+ intervalMs,
173
+ log: console.error,
174
+ });
258
175
 
259
- let historyChanged = false;
176
+ const cleanup = () => {
260
177
  try {
261
- const gitState = collectGitState(workspace);
262
- const currentHead = gitState && gitState.head ? gitState.head : "";
263
- if (currentHead && currentHead !== lastKnownHead) {
264
- historyChanged = true;
265
- }
178
+ handle.cleanup();
266
179
  } catch {
267
180
  }
268
-
269
- return { codeChanged, historyChanged };
270
- }
271
-
272
- async function doSync(historyOnly = false) {
273
- if (isRunning) {
274
- pendingSync = true;
275
- pendingHistoryOnly = pendingHistoryOnly || historyOnly;
276
- return;
277
- }
278
-
279
- isRunning = true;
280
- const now = new Date().toLocaleTimeString();
281
- console.error(`[ctxce] [${now}] Syncing changes...`);
282
-
283
- try {
284
- // Refresh session before upload if approaching expiry
285
- await refreshSessionIfNeeded();
286
-
287
- const result = await indexWorkspace(
288
- workspace,
289
- SAAS_ENDPOINTS.uploadEndpoint,
290
- sessionId,
291
- {
292
- log: console.error,
293
- orgId: authEntry?.org_id,
294
- orgSlug: authEntry?.org_slug,
295
- historyOnly,
296
- }
297
- );
298
- if (result.success) {
299
- try {
300
- const gitState = collectGitState(workspace);
301
- lastKnownHead = gitState && gitState.head ? gitState.head : lastKnownHead;
302
- } catch {
303
- }
304
- console.error(`[ctxce] [${now}] Sync complete.`);
305
- } else {
306
- console.error(`[ctxce] [${now}] Sync failed: ${result.error}`);
307
- }
308
- } catch (err) {
309
- console.error(`[ctxce] [${now}] Sync error: ${err}`);
310
- }
311
-
312
- isRunning = false;
313
-
314
- if (pendingSync) {
315
- const nextHistoryOnly = pendingHistoryOnly;
316
- pendingSync = false;
317
- pendingHistoryOnly = false;
318
- setTimeout(() => {
319
- doSync(nextHistoryOnly);
320
- }, DEFAULT_DEBOUNCE_MS);
321
- }
322
- }
323
-
324
- scanDirectory(workspace).forEach(f => {
325
- fileHashes.set(f, getFileHash(f));
326
- });
327
- try {
328
- const gitState = collectGitState(workspace);
329
- lastKnownHead = gitState && gitState.head ? gitState.head : "";
330
- } catch {
331
- }
332
-
333
- const intervalId = setInterval(() => {
334
- const changeState = detectChanges();
335
- if (changeState.codeChanged || changeState.historyChanged) {
336
- doSync(changeState.historyChanged && !changeState.codeChanged);
337
- }
338
- }, intervalMs);
339
-
340
- const cleanup = () => {
341
- clearInterval(intervalId);
342
181
  console.error("\n[ctxce] Watcher stopped.");
343
182
  };
344
183
 
@@ -352,7 +191,7 @@ function startWatcher(workspace, initialSessionId, authEntry, intervalMs) {
352
191
  process.exit(0);
353
192
  });
354
193
 
355
- return { intervalId, cleanup };
194
+ return handle;
356
195
  }
357
196
 
358
197
  function printSuccess() {
package/src/mcpServer.js CHANGED
@@ -11,8 +11,17 @@ import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/
11
11
  import { Client } from "@modelcontextprotocol/sdk/client/index.js";
12
12
  import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
13
13
  import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprotocol/sdk/types.js";
14
- import { loadAnyAuthEntry, loadAuthEntry, readConfig, saveAuthEntry } from "./authConfig.js";
14
+ import {
15
+ loadAnyAuthEntry,
16
+ loadAuthEntry,
17
+ loadResolvedCollection,
18
+ readConfig,
19
+ saveAuthEntry,
20
+ saveResolvedCollection,
21
+ } from "./authConfig.js";
15
22
  import { maybeRemapToolArgs, maybeRemapToolResult } from "./resultPathMapping.js";
23
+ import { startSyncDaemon } from "./syncDaemon.js";
24
+ import { computeLogicalRepoIdentity } from "./uploader.js";
16
25
  import * as oauthHandler from "./oauthHandler.js";
17
26
 
18
27
  const LSP_CONN_CACHE_TTL = 5000;
@@ -592,6 +601,7 @@ async function fetchBridgeCollectionState({
592
601
  collection,
593
602
  sessionId,
594
603
  repoName,
604
+ logicalRepoId,
595
605
  bridgeStateToken,
596
606
  backendHint,
597
607
  uploadServiceUrl,
@@ -611,6 +621,9 @@ async function fetchBridgeCollectionState({
611
621
  if (repoName && repoName.trim()) {
612
622
  url.searchParams.set("repo_name", repoName.trim());
613
623
  }
624
+ if (logicalRepoId && logicalRepoId.trim()) {
625
+ url.searchParams.set("logical_repo_id", logicalRepoId.trim());
626
+ }
614
627
 
615
628
  const headers = {
616
629
  Accept: "application/json",
@@ -645,6 +658,131 @@ async function fetchBridgeCollectionState({
645
658
  }
646
659
  }
647
660
 
661
+ export async function buildDefaultsPayload({
662
+ workspace,
663
+ sessionId,
664
+ explicitCollection,
665
+ defaultCollection,
666
+ defaultMode,
667
+ defaultUnder,
668
+ config,
669
+ backendHint,
670
+ uploadServiceUrl,
671
+ bridgeStateToken = BRIDGE_STATE_TOKEN,
672
+ authEntry = undefined,
673
+ fetchState = fetchBridgeCollectionState,
674
+ getLogicalRepoIdentity = computeLogicalRepoIdentity,
675
+ loadResolvedCollectionHint = loadResolvedCollection,
676
+ saveResolvedCollectionHint = saveResolvedCollection,
677
+ log = debugLog,
678
+ } = {}) {
679
+ const defaultsPayload = { session: sessionId };
680
+ const pinnedCollection =
681
+ typeof explicitCollection === "string" ? explicitCollection.trim() : "";
682
+ const configuredCollection =
683
+ !pinnedCollection && typeof defaultCollection === "string"
684
+ ? defaultCollection.trim()
685
+ : "";
686
+
687
+ if (pinnedCollection) {
688
+ defaultsPayload.collection = pinnedCollection;
689
+ } else if (configuredCollection) {
690
+ defaultsPayload.collection = configuredCollection;
691
+ }
692
+
693
+ const resolvedAuthEntry = authEntry === undefined
694
+ ? (backendHint ? loadAuthEntry(backendHint) : null)
695
+ : authEntry;
696
+
697
+ if (resolvedAuthEntry && (resolvedAuthEntry.org_id || resolvedAuthEntry.org_slug)) {
698
+ if (resolvedAuthEntry.org_id) {
699
+ defaultsPayload.org_id = resolvedAuthEntry.org_id;
700
+ }
701
+ if (resolvedAuthEntry.org_slug) {
702
+ defaultsPayload.org_slug = resolvedAuthEntry.org_slug;
703
+ }
704
+ }
705
+
706
+ const repoName = detectRepoName(workspace, config);
707
+ let logicalRepoId = "";
708
+ try {
709
+ logicalRepoId = getLogicalRepoIdentity(workspace)?.id || "";
710
+ } catch {
711
+ logicalRepoId = "";
712
+ }
713
+
714
+ const exactWorkspaceCollection = !pinnedCollection
715
+ ? _readExactWorkspaceCachedCollection(workspace)
716
+ : null;
717
+
718
+ if (!defaultsPayload.collection && exactWorkspaceCollection) {
719
+ defaultsPayload.collection = exactWorkspaceCollection;
720
+ log(`[ctxce] Using exact workspace cached collection: ${exactWorkspaceCollection}`);
721
+ }
722
+
723
+ if (!defaultsPayload.collection && backendHint && logicalRepoId) {
724
+ const cachedCollection = loadResolvedCollectionHint(backendHint, {
725
+ orgId: resolvedAuthEntry?.org_id,
726
+ orgSlug: resolvedAuthEntry?.org_slug,
727
+ logicalRepoId,
728
+ });
729
+ if (cachedCollection) {
730
+ defaultsPayload.collection = cachedCollection;
731
+ log(`[ctxce] Using cached resolved collection from auth store: ${cachedCollection}`);
732
+ }
733
+ }
734
+
735
+ if (!pinnedCollection) {
736
+ try {
737
+ const state = await fetchState({
738
+ workspace,
739
+ collection: configuredCollection,
740
+ sessionId,
741
+ repoName,
742
+ logicalRepoId,
743
+ bridgeStateToken,
744
+ backendHint,
745
+ uploadServiceUrl,
746
+ });
747
+ if (state) {
748
+ const servingCollection = typeof state.serving_collection === "string"
749
+ ? state.serving_collection.trim()
750
+ : "";
751
+ const activeCollection = typeof state.active_collection === "string"
752
+ ? state.active_collection.trim()
753
+ : "";
754
+ if (servingCollection) {
755
+ defaultsPayload.collection = servingCollection;
756
+ if (!configuredCollection || configuredCollection !== servingCollection) {
757
+ log(`[ctxce] Using serving collection from /bridge/state: ${servingCollection}`);
758
+ }
759
+ if (backendHint && logicalRepoId) {
760
+ saveResolvedCollectionHint(backendHint, {
761
+ orgId: resolvedAuthEntry?.org_id,
762
+ orgSlug: resolvedAuthEntry?.org_slug,
763
+ logicalRepoId,
764
+ }, servingCollection);
765
+ }
766
+ } else if (!defaultsPayload.collection && activeCollection) {
767
+ defaultsPayload.collection = activeCollection;
768
+ log(`[ctxce] Using active collection from /bridge/state fallback: ${activeCollection}`);
769
+ }
770
+ }
771
+ } catch (err) {
772
+ log("[ctxce] bridge/state lookup failed: " + String(err));
773
+ }
774
+ }
775
+
776
+ if (defaultMode) {
777
+ defaultsPayload.mode = defaultMode;
778
+ }
779
+ if (defaultUnder) {
780
+ defaultsPayload.under = defaultUnder;
781
+ }
782
+
783
+ return defaultsPayload;
784
+ }
785
+
648
786
  function _validateWorkspacePath(raw) {
649
787
  if (typeof raw !== "string" || raw.length === 0) return null;
650
788
  const resolved = path.resolve(raw);
@@ -658,6 +796,42 @@ function _validateWorkspacePath(raw) {
658
796
  return resolved;
659
797
  }
660
798
 
799
+ function _readExactWorkspaceCachedCollection(workspacePath) {
800
+ const resolvedWorkspace = _validateWorkspacePath(workspacePath);
801
+ if (!resolvedWorkspace) return null;
802
+
803
+ const wsDir = _computeWorkspaceDir(resolvedWorkspace);
804
+ const configPath = path.join(wsDir, "ctx_config.json");
805
+ const metaPath = path.join(wsDir, "meta.json");
806
+
807
+ if (!fs.existsSync(configPath)) {
808
+ return null;
809
+ }
810
+
811
+ if (fs.existsSync(metaPath)) {
812
+ try {
813
+ const meta = JSON.parse(fs.readFileSync(metaPath, "utf8"));
814
+ const metaWorkspace = _validateWorkspacePath(meta && meta.workspace_path);
815
+ if (!metaWorkspace || metaWorkspace !== resolvedWorkspace) {
816
+ return null;
817
+ }
818
+ } catch (_) {
819
+ return null;
820
+ }
821
+ }
822
+
823
+ try {
824
+ const parsed = JSON.parse(fs.readFileSync(configPath, "utf8"));
825
+ const collection =
826
+ parsed && typeof parsed.default_collection === "string"
827
+ ? parsed.default_collection.trim()
828
+ : "";
829
+ return collection || null;
830
+ } catch (_) {
831
+ return null;
832
+ }
833
+ }
834
+
661
835
  const MAX_WS_SCAN = 50;
662
836
 
663
837
  function _resolveWorkspace(providedWorkspace) {
@@ -846,57 +1020,47 @@ async function createBridgeServer(options) {
846
1020
  }
847
1021
  }
848
1022
 
849
- // Best-effort: inform the indexer of default collection and session.
850
- // If this fails we still proceed, falling back to per-call injection.
851
- const defaultsPayload = { session: sessionId };
852
- if (defaultCollection) {
853
- defaultsPayload.collection = defaultCollection;
854
- }
855
-
856
- // Include org context from auth entry if available (for org-scoped collection isolation)
1023
+ const syncDaemonEnabled = process.env.CTXCE_SYNC_DAEMON !== "0";
1024
+ const syncUploadEndpoint = uploadServiceUrl
1025
+ ? `${String(uploadServiceUrl).replace(/\/+$/, "")}/upload`
1026
+ : "";
1027
+ let syncAuthEntry = null;
857
1028
  try {
858
- const authEntry = backendHint ? loadAuthEntry(backendHint) : null;
859
- if (authEntry && authEntry.org_id) {
860
- defaultsPayload.org_id = authEntry.org_id;
861
- defaultsPayload.org_slug = authEntry.org_slug;
862
- }
1029
+ syncAuthEntry = backendHint ? loadAuthEntry(backendHint) : null;
863
1030
  } catch {
864
- // ignore auth entry lookup failures
865
- }
866
-
867
- const repoName = detectRepoName(workspace, config);
868
-
869
- try {
870
- const state = await fetchBridgeCollectionState({
871
- workspace,
872
- collection: defaultCollection,
873
- sessionId,
874
- repoName,
875
- bridgeStateToken: BRIDGE_STATE_TOKEN,
876
- backendHint,
877
- uploadServiceUrl,
878
- });
879
- if (state) {
880
- const serving = state.serving_collection || state.active_collection;
881
- if (serving) {
882
- defaultsPayload.collection = serving;
883
- if (!defaultCollection || defaultCollection !== serving) {
884
- debugLog(
885
- `[ctxce] Using serving collection from /bridge/state: ${serving}`,
886
- );
887
- }
888
- }
1031
+ syncAuthEntry = null;
1032
+ }
1033
+ if (
1034
+ syncDaemonEnabled &&
1035
+ syncUploadEndpoint &&
1036
+ sessionId &&
1037
+ !sessionId.startsWith("ctxce-")
1038
+ ) {
1039
+ try {
1040
+ startSyncDaemon({
1041
+ workspace,
1042
+ sessionId,
1043
+ authEntry: syncAuthEntry || { sessionId },
1044
+ uploadEndpoint: syncUploadEndpoint,
1045
+ });
1046
+ } catch (err) {
1047
+ debugLog("[ctxce] Failed to start sync daemon: " + String(err));
889
1048
  }
890
- } catch (err) {
891
- debugLog("[ctxce] bridge/state lookup failed: " + String(err));
892
1049
  }
893
1050
 
894
- if (defaultMode) {
895
- defaultsPayload.mode = defaultMode;
896
- }
897
- if (defaultUnder) {
898
- defaultsPayload.under = defaultUnder;
899
- }
1051
+ // Best-effort: inform the indexer of default collection and session.
1052
+ // If this fails we still proceed, falling back to per-call injection.
1053
+ const defaultsPayload = await buildDefaultsPayload({
1054
+ workspace,
1055
+ sessionId,
1056
+ explicitCollection,
1057
+ defaultCollection,
1058
+ defaultMode,
1059
+ defaultUnder,
1060
+ config,
1061
+ backendHint,
1062
+ uploadServiceUrl,
1063
+ });
900
1064
 
901
1065
  async function initializeRemoteClients(forceRecreate = false) {
902
1066
  if (!forceRecreate && indexerClient) {
@@ -2136,4 +2300,3 @@ function detectRepoName(workspace, config) {
2136
2300
  const leaf = workspace ? path.basename(workspace) : "";
2137
2301
  return leaf && SLUGGED_REPO_RE.test(leaf) ? leaf : null;
2138
2302
  }
2139
-
@@ -247,6 +247,8 @@ export function getLoginPage(redirectUri, clientId, state, codeChallenge, codeCh
247
247
 
248
248
  const data = await resp.json();
249
249
  const sessionId = data.session_id || data.sessionId;
250
+ const orgId = data.org_id || data.orgId || null;
251
+ const orgSlug = data.org_slug || data.orgSlug || null;
250
252
  if (!sessionId) {
251
253
  throw new Error('No session in response');
252
254
  }
@@ -258,6 +260,8 @@ export function getLoginPage(redirectUri, clientId, state, codeChallenge, codeCh
258
260
  body: JSON.stringify({
259
261
  session_id: sessionId,
260
262
  backend_url: backendUrl,
263
+ org_id: orgId,
264
+ org_slug: orgSlug,
261
265
  redirect_uri: params.redirect_uri,
262
266
  state: params.state,
263
267
  code_challenge: params.code_challenge,
@@ -443,7 +447,17 @@ export function handleOAuthStoreSession(req, res) {
443
447
  res.setHeader("Content-Type", "application/json");
444
448
  try {
445
449
  const data = JSON.parse(body);
446
- const { session_id, backend_url, redirect_uri, state, code_challenge, code_challenge_method, client_id } = data;
450
+ const {
451
+ session_id,
452
+ backend_url,
453
+ org_id,
454
+ org_slug,
455
+ redirect_uri,
456
+ state,
457
+ code_challenge,
458
+ code_challenge_method,
459
+ client_id,
460
+ } = data;
447
461
 
448
462
  if (!session_id || !backend_url) {
449
463
  res.statusCode = 400;
@@ -503,6 +517,8 @@ export function handleOAuthStoreSession(req, res) {
503
517
  sessionId: session_id,
504
518
  userId: "oauth-user",
505
519
  expiresAt: null,
520
+ org_id: org_id || null,
521
+ org_slug: org_slug || null,
506
522
  });
507
523
 
508
524
  // Generate auth code
@@ -0,0 +1,463 @@
1
+ /**
2
+ * syncDaemon.js -- process-level singleton file-watcher / upload sync daemon.
3
+ *
4
+ * Can be started from any entry point (connect CLI, mcp-serve stdio, mcp-http-serve).
5
+ * Only one watcher per resolved workspace path is allowed per process.
6
+ *
7
+ * Exports:
8
+ * startSyncDaemon(options) -> { intervalId, cleanup }
9
+ * stopSyncDaemon(workspace) -> void
10
+ */
11
+
12
+ import path from "node:path";
13
+ import fs from "node:fs";
14
+ import { saveAuthEntry } from "./authConfig.js";
15
+ import { indexWorkspace, loadGitignore, isCodeFile, collectGitState } from "./uploader.js";
16
+
17
+ // ---------------------------------------------------------------------------
18
+ // Constants
19
+ // ---------------------------------------------------------------------------
20
+
21
+ const DEFAULT_WATCH_INTERVAL_MS = 30000;
22
+ const DEFAULT_DEBOUNCE_MS = 2000;
23
+
24
+ // No-op logger used as the default in MCP mode so the daemon is silent.
25
+ const noop = () => {};
26
+
27
+ // ---------------------------------------------------------------------------
28
+ // Process-level singleton registry
29
+ // keyed by path.resolve(workspace) -> { intervalId, cleanup, updateRuntimeState }
30
+ // ---------------------------------------------------------------------------
31
+
32
+ const _activeDaemons = new Map();
33
+
34
+ // ---------------------------------------------------------------------------
35
+ // Session refresh helper (self-contained, no side-effects on the caller's env)
36
+ // ---------------------------------------------------------------------------
37
+
38
+ /**
39
+ * Derive the auth backend URL from the upload endpoint.
40
+ * Convention: the upload endpoint is `<authBackendUrl>/upload`.
41
+ * Strip the trailing `/upload` path segment if present, otherwise use as-is.
42
+ *
43
+ * @param {string} uploadEndpoint
44
+ * @returns {string}
45
+ */
46
+ function deriveAuthBackendUrl(uploadEndpoint) {
47
+ try {
48
+ const u = new URL(uploadEndpoint);
49
+ // Remove the last path segment only if it is "upload"
50
+ if (u.pathname.endsWith("/upload")) {
51
+ u.pathname = u.pathname.slice(0, -"/upload".length) || "/";
52
+ }
53
+ // Normalise trailing slash away
54
+ u.pathname = u.pathname.replace(/\/$/, "") || "/";
55
+ return u.origin + (u.pathname === "/" ? "" : u.pathname);
56
+ } catch {
57
+ // Fallback: simple string surgery
58
+ return uploadEndpoint.replace(/\/upload\/?$/, "");
59
+ }
60
+ }
61
+
62
+ /**
63
+ * Attempt to obtain a fresh session by posting to `<authBackendUrl>/auth/login`.
64
+ * Returns the new auth entry on success, or null on failure.
65
+ *
66
+ * @param {string} apiKey
67
+ * @param {string} authBackendUrl
68
+ * @param {string} workspace
69
+ * @param {function} log
70
+ * @returns {Promise<object|null>}
71
+ */
72
+ async function _refreshSession(apiKey, authBackendUrl, workspace, log) {
73
+ const authUrl = `${authBackendUrl}/auth/login`;
74
+ const body = {
75
+ client: "ctxce-cli",
76
+ token: apiKey,
77
+ workspace,
78
+ };
79
+
80
+ let resp;
81
+ try {
82
+ resp = await fetch(authUrl, {
83
+ method: "POST",
84
+ headers: { "Content-Type": "application/json" },
85
+ body: JSON.stringify(body),
86
+ });
87
+ } catch (err) {
88
+ log(`[syncDaemon] Session refresh request failed: ${err}`);
89
+ return null;
90
+ }
91
+
92
+ if (!resp || !resp.ok) {
93
+ const status = resp ? resp.status : "<no-response>";
94
+ log(`[syncDaemon] Session refresh HTTP error: ${status}`);
95
+ return null;
96
+ }
97
+
98
+ let data;
99
+ try {
100
+ data = await resp.json();
101
+ } catch {
102
+ data = {};
103
+ }
104
+
105
+ const sessionId = data.session_id || data.sessionId || null;
106
+ const userId = data.user_id || data.userId || null;
107
+ const expiresAt = data.expires_at || data.expiresAt || null;
108
+ const orgId = data.org_id || data.orgId || null;
109
+ const orgSlug = data.org_slug || data.orgSlug || null;
110
+
111
+ if (!sessionId) {
112
+ log("[syncDaemon] Session refresh response missing session ID.");
113
+ return null;
114
+ }
115
+
116
+ const entry = {
117
+ sessionId,
118
+ userId,
119
+ expiresAt,
120
+ org_id: orgId,
121
+ org_slug: orgSlug,
122
+ apiKey,
123
+ };
124
+
125
+ // Persist the refreshed entry so other callers can pick it up.
126
+ saveAuthEntry(authBackendUrl, entry);
127
+
128
+ return entry;
129
+ }
130
+
131
+ // ---------------------------------------------------------------------------
132
+ // Core daemon factory
133
+ // ---------------------------------------------------------------------------
134
+
135
+ /**
136
+ * Start the sync daemon for the given workspace.
137
+ *
138
+ * If a daemon is already running for this workspace (same process), this
139
+ * function returns the existing handle WITHOUT starting a second one.
140
+ *
141
+ * @param {object} options
142
+ * @param {string} options.workspace Absolute or relative path to the workspace root.
143
+ * @param {string} options.sessionId Initial session ID for uploads.
144
+ * @param {object} options.authEntry Auth entry object (may contain apiKey, expiresAt, org_id, org_slug).
145
+ * @param {string} options.uploadEndpoint Full upload endpoint URL (e.g. "https://dev.context-engine.ai/upload").
146
+ * @param {number} [options.intervalMs] Poll interval in milliseconds (default: 30000).
147
+ * @param {function} [options.log] Logger function. Defaults to a no-op (silent in MCP mode).
148
+ * Pass `console.error` for interactive CLI output.
149
+ * @returns {{ intervalId: NodeJS.Timeout, cleanup: function }}
150
+ */
151
+ export function startSyncDaemon(options) {
152
+ const {
153
+ workspace,
154
+ sessionId: initialSessionId,
155
+ authEntry: initialAuthEntry,
156
+ uploadEndpoint: initialUploadEndpoint,
157
+ intervalMs = DEFAULT_WATCH_INTERVAL_MS,
158
+ log = noop,
159
+ } = options;
160
+
161
+ const resolvedWorkspace = path.resolve(workspace);
162
+
163
+ // Singleton guard: return existing daemon if already running, but refresh
164
+ // any mutable runtime state supplied by the new caller.
165
+ if (_activeDaemons.has(resolvedWorkspace)) {
166
+ const existingHandle = _activeDaemons.get(resolvedWorkspace);
167
+ existingHandle?.updateRuntimeState?.({
168
+ sessionId: initialSessionId,
169
+ authEntry: initialAuthEntry,
170
+ uploadEndpoint: initialUploadEndpoint,
171
+ });
172
+ log(`[syncDaemon] Daemon already running for ${resolvedWorkspace}, reusing.`);
173
+ return existingHandle;
174
+ }
175
+
176
+ log(`[syncDaemon] Starting file watcher for ${resolvedWorkspace} (interval: ${intervalMs / 1000}s)`);
177
+
178
+ // Mutable state captured by the daemon closures.
179
+ let isRunning = false;
180
+ let pendingSync = false;
181
+ let pendingHistoryOnly = true;
182
+ let sessionId = initialSessionId;
183
+ let authEntry = initialAuthEntry;
184
+ let uploadEndpoint = initialUploadEndpoint;
185
+ let lastKnownHead = "";
186
+
187
+ let authBackendUrl = deriveAuthBackendUrl(uploadEndpoint);
188
+
189
+ function updateRuntimeState(nextState = {}) {
190
+ if (typeof nextState.sessionId === "string" && nextState.sessionId) {
191
+ sessionId = nextState.sessionId;
192
+ }
193
+
194
+ if (nextState.authEntry && typeof nextState.authEntry === "object") {
195
+ authEntry = nextState.authEntry;
196
+ if (typeof nextState.authEntry.sessionId === "string" && nextState.authEntry.sessionId) {
197
+ sessionId = nextState.authEntry.sessionId;
198
+ }
199
+ }
200
+
201
+ if (typeof nextState.uploadEndpoint === "string" && nextState.uploadEndpoint) {
202
+ uploadEndpoint = nextState.uploadEndpoint;
203
+ authBackendUrl = deriveAuthBackendUrl(uploadEndpoint);
204
+ }
205
+ }
206
+
207
+ // -------------------------------------------------------------------------
208
+ // Session refresh
209
+ // -------------------------------------------------------------------------
210
+
211
+ async function refreshSessionIfNeeded() {
212
+ if (!authEntry || !authEntry.apiKey) return;
213
+ const expiresAt = authEntry.expiresAt;
214
+ if (typeof expiresAt !== "number" || !Number.isFinite(expiresAt) || expiresAt <= 0) return;
215
+ const nowSecs = Math.floor(Date.now() / 1000);
216
+ const remainingSecs = expiresAt - nowSecs;
217
+ if (remainingSecs > 300) return; // still valid for > 5 minutes
218
+
219
+ log("[syncDaemon] Session approaching expiry, refreshing...");
220
+ try {
221
+ const refreshed = await _refreshSession(
222
+ authEntry.apiKey,
223
+ authBackendUrl,
224
+ resolvedWorkspace,
225
+ log
226
+ );
227
+ if (refreshed && refreshed.sessionId) {
228
+ sessionId = refreshed.sessionId;
229
+ authEntry = refreshed;
230
+ log("[syncDaemon] Session refreshed successfully.");
231
+ }
232
+ } catch (err) {
233
+ log(`[syncDaemon] Session refresh failed: ${err}`);
234
+ }
235
+ }
236
+
237
+ // -------------------------------------------------------------------------
238
+ // File-system scanning
239
+ // -------------------------------------------------------------------------
240
+
241
+ const fileHashes = new Map();
242
+ const _ig = loadGitignore(resolvedWorkspace);
243
+
244
+ function getFileHash(filePath) {
245
+ try {
246
+ const stat = fs.statSync(filePath);
247
+ return `${stat.mtime.getTime()}-${stat.size}`;
248
+ } catch {
249
+ return null;
250
+ }
251
+ }
252
+
253
+ function scanDirectory(dir, files = []) {
254
+ try {
255
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
256
+ for (const entry of entries) {
257
+ if (entry.isSymbolicLink()) continue;
258
+ const fullPath = path.join(dir, entry.name);
259
+ // Normalise to forward slashes for the `ignore` library (expects POSIX paths).
260
+ const relPath = path.relative(resolvedWorkspace, fullPath).split(path.sep).join("/");
261
+
262
+ if (entry.isDirectory()) {
263
+ if (_ig.ignores(relPath + "/")) continue;
264
+ scanDirectory(fullPath, files);
265
+ } else if (entry.isFile()) {
266
+ if (_ig.ignores(relPath)) continue;
267
+ if (isCodeFile(fullPath)) {
268
+ files.push(fullPath);
269
+ }
270
+ }
271
+ }
272
+ } catch {
273
+ // Ignore unreadable directories.
274
+ }
275
+ return files;
276
+ }
277
+
278
+ function snapshotFileHashes() {
279
+ const nextHashes = new Map();
280
+ scanDirectory(resolvedWorkspace).forEach((filePath) => {
281
+ nextHashes.set(filePath, getFileHash(filePath));
282
+ });
283
+ return nextHashes;
284
+ }
285
+
286
+ // -------------------------------------------------------------------------
287
+ // Change detection
288
+ // -------------------------------------------------------------------------
289
+
290
+ function detectChanges() {
291
+ const currentHashes = snapshotFileHashes();
292
+ let codeChanged = false;
293
+
294
+ for (const [filePath, newHash] of currentHashes.entries()) {
295
+ const oldHash = fileHashes.get(filePath);
296
+ if (newHash !== oldHash) {
297
+ codeChanged = true;
298
+ }
299
+ }
300
+
301
+ for (const oldPath of fileHashes.keys()) {
302
+ if (!currentHashes.has(oldPath)) {
303
+ codeChanged = true;
304
+ }
305
+ }
306
+
307
+ let historyChanged = false;
308
+ try {
309
+ const gitState = collectGitState(resolvedWorkspace);
310
+ const currentHead = gitState && gitState.head ? gitState.head : "";
311
+ if (currentHead && currentHead !== lastKnownHead) {
312
+ historyChanged = true;
313
+ // NOTE: lastKnownHead is updated only on a successful sync so that a
314
+ // failed upload doesn't suppress the next attempt.
315
+ }
316
+ } catch {
317
+ // Git not available or not a git repo -- ignore.
318
+ }
319
+
320
+ return { codeChanged, historyChanged };
321
+ }
322
+
323
+ // -------------------------------------------------------------------------
324
+ // Sync execution
325
+ // -------------------------------------------------------------------------
326
+
327
+ async function doSync(historyOnly = false) {
328
+ if (isRunning) {
329
+ // Another sync is in flight; queue one more run after it completes.
330
+ pendingSync = true;
331
+ // If the pending request is a full sync, don't downgrade it to history-only.
332
+ pendingHistoryOnly = pendingHistoryOnly && historyOnly;
333
+ return;
334
+ }
335
+
336
+ isRunning = true;
337
+ const now = new Date().toLocaleTimeString();
338
+ log(`[syncDaemon] [${now}] Syncing changes (historyOnly=${historyOnly})...`);
339
+
340
+ try {
341
+ await refreshSessionIfNeeded();
342
+
343
+ const result = await indexWorkspace(
344
+ resolvedWorkspace,
345
+ uploadEndpoint,
346
+ sessionId,
347
+ {
348
+ log,
349
+ orgId: authEntry?.org_id,
350
+ orgSlug: authEntry?.org_slug,
351
+ historyOnly,
352
+ }
353
+ );
354
+
355
+ if (result.success) {
356
+ const latestHashes = snapshotFileHashes();
357
+ fileHashes.clear();
358
+ for (const [filePath, hash] of latestHashes.entries()) {
359
+ fileHashes.set(filePath, hash);
360
+ }
361
+
362
+ // Update lastKnownHead only after a successful upload so a transient
363
+ // network failure doesn't prevent retrying the history sync.
364
+ try {
365
+ const gitState = collectGitState(resolvedWorkspace);
366
+ lastKnownHead = gitState && gitState.head ? gitState.head : lastKnownHead;
367
+ } catch {
368
+ // Ignore.
369
+ }
370
+ log(`[syncDaemon] [${now}] Sync complete.`);
371
+ } else {
372
+ log(`[syncDaemon] [${now}] Sync failed: ${result.error}`);
373
+ }
374
+ } catch (err) {
375
+ log(`[syncDaemon] [${now}] Sync error: ${err}`);
376
+ }
377
+
378
+ isRunning = false;
379
+
380
+ if (pendingSync) {
381
+ const nextHistoryOnly = pendingHistoryOnly;
382
+ pendingSync = false;
383
+ pendingHistoryOnly = true;
384
+ setTimeout(() => {
385
+ doSync(nextHistoryOnly);
386
+ }, DEFAULT_DEBOUNCE_MS);
387
+ }
388
+ }
389
+
390
+ // -------------------------------------------------------------------------
391
+ // Initialisation: snapshot current file state and git HEAD
392
+ // -------------------------------------------------------------------------
393
+
394
+ for (const [filePath, hash] of snapshotFileHashes().entries()) {
395
+ fileHashes.set(filePath, hash);
396
+ }
397
+
398
+ try {
399
+ const gitState = collectGitState(resolvedWorkspace);
400
+ lastKnownHead = gitState && gitState.head ? gitState.head : "";
401
+ } catch {
402
+ // Not a git repo or git unavailable.
403
+ }
404
+
405
+ // Perform an initial history-only sync so that commit history is up-to-date
406
+ // without triggering a potentially expensive full code reindex on every
407
+ // startup (important for MCP mode where the daemon may restart frequently).
408
+ if (lastKnownHead) {
409
+ log("[syncDaemon] Performing initial git history sync...");
410
+ doSync(true /* historyOnly */);
411
+ }
412
+
413
+ // -------------------------------------------------------------------------
414
+ // Polling interval
415
+ // -------------------------------------------------------------------------
416
+
417
+ const intervalId = setInterval(() => {
418
+ const changeState = detectChanges();
419
+ if (changeState.codeChanged || changeState.historyChanged) {
420
+ // historyOnly = true only when git HEAD changed but NO code files changed.
421
+ doSync(changeState.historyChanged && !changeState.codeChanged);
422
+ }
423
+ }, intervalMs);
424
+
425
+ // -------------------------------------------------------------------------
426
+ // Cleanup
427
+ // -------------------------------------------------------------------------
428
+
429
+ const cleanup = () => {
430
+ clearInterval(intervalId);
431
+ _activeDaemons.delete(resolvedWorkspace);
432
+ log(`[syncDaemon] Watcher stopped for ${resolvedWorkspace}.`);
433
+ };
434
+
435
+ // Register in singleton map BEFORE returning so concurrent callers see it.
436
+ const handle = { intervalId, cleanup, updateRuntimeState };
437
+ _activeDaemons.set(resolvedWorkspace, handle);
438
+
439
+ return handle;
440
+ }
441
+
442
+ /**
443
+ * Stop the sync daemon for the given workspace (if one is running).
444
+ *
445
+ * @param {string} workspace Workspace path (resolved internally).
446
+ */
447
+ export function stopSyncDaemon(workspace) {
448
+ const resolvedWorkspace = path.resolve(workspace);
449
+ const handle = _activeDaemons.get(resolvedWorkspace);
450
+ if (handle) {
451
+ handle.cleanup();
452
+ }
453
+ }
454
+
455
+ /**
456
+ * Return true if a daemon is currently active for the given workspace.
457
+ *
458
+ * @param {string} workspace
459
+ * @returns {boolean}
460
+ */
461
+ export function isSyncDaemonRunning(workspace) {
462
+ return _activeDaemons.has(path.resolve(workspace));
463
+ }