@contextstream/mcp-server 0.3.17 → 0.3.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -1
- package/dist/index.js +232 -24
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -23,7 +23,7 @@ One integration. Every AI editor. Persistent memory that never forgets.
|
|
|
23
23
|
<div align="center">
|
|
24
24
|
|
|
25
25
|
<a href="https://contextstream.io">
|
|
26
|
-
<img src="https://
|
|
26
|
+
<img src="https://raw.githubusercontent.com/contextstream/mcp-server/main/mcp.gif.gif" alt="ContextStream Demo - AI that remembers across sessions" width="600" />
|
|
27
27
|
</a>
|
|
28
28
|
|
|
29
29
|
<sub>Your AI remembers decisions, preferences, and context — across sessions and tools.</sub>
|
|
@@ -119,6 +119,8 @@ CONTEXTSTREAM_API_KEY = "your_api_key"
|
|
|
119
119
|
|
|
120
120
|
> Codex expects snake_case `mcp_servers` keys. After editing, fully restart Codex.
|
|
121
121
|
|
|
122
|
+
> For workspace-pooled rate limiting (Team/Enterprise), the MCP server sends `X-Workspace-Id` based on the active repo/session (or explicit `workspace_id` in tool calls). You can optionally set `CONTEXTSTREAM_WORKSPACE_ID` as a fallback default, but it’s not required and isn’t a good fit if you frequently switch workspaces.
|
|
123
|
+
|
|
122
124
|
### AI Rules Files (Recommended)
|
|
123
125
|
|
|
124
126
|
Adding rules files ensures your AI automatically uses ContextStream for memory on every conversation.
|
|
@@ -173,6 +175,19 @@ AI: "You prefer functional React components."
|
|
|
173
175
|
|
|
174
176
|
✨ **That's it. Your AI remembers now.**
|
|
175
177
|
|
|
178
|
+
### 4. (Optional) Connect GitHub + Slack for richer context
|
|
179
|
+
|
|
180
|
+
MCP gives your AI memory. Integrations make that memory richer by pulling in PRs, issues, and team conversations.
|
|
181
|
+
|
|
182
|
+
- GitHub App setup + connect flow: https://contextstream.io/docs/integrations/github
|
|
183
|
+
- Slack app setup + connect flow: https://contextstream.io/docs/integrations/slack
|
|
184
|
+
|
|
185
|
+
After you connect, try prompts like:
|
|
186
|
+
```
|
|
187
|
+
"Search our Slack messages for the decision about rate limiting."
|
|
188
|
+
"What did we decide in GitHub issues about the auth flow?"
|
|
189
|
+
```
|
|
190
|
+
|
|
176
191
|
---
|
|
177
192
|
|
|
178
193
|
## Beyond Memory: Intelligence That Compounds
|
package/dist/index.js
CHANGED
|
@@ -4149,6 +4149,8 @@ async function request(config, path3, options = {}) {
|
|
|
4149
4149
|
};
|
|
4150
4150
|
if (apiKey) headers["X-API-Key"] = apiKey;
|
|
4151
4151
|
if (jwt) headers["Authorization"] = `Bearer ${jwt}`;
|
|
4152
|
+
const workspaceId = options.workspaceId || inferWorkspaceIdFromBody(options.body) || inferWorkspaceIdFromPath(apiPath) || config.defaultWorkspaceId;
|
|
4153
|
+
if (workspaceId) headers["X-Workspace-Id"] = workspaceId;
|
|
4152
4154
|
const fetchOptions = {
|
|
4153
4155
|
method: options.method || (options.body ? "POST" : "GET"),
|
|
4154
4156
|
headers
|
|
@@ -4192,8 +4194,12 @@ async function request(config, path3, options = {}) {
|
|
|
4192
4194
|
payload = await response.text().catch(() => null);
|
|
4193
4195
|
}
|
|
4194
4196
|
if (!response.ok) {
|
|
4195
|
-
const
|
|
4196
|
-
|
|
4197
|
+
const rateLimit = parseRateLimitHeaders(response.headers);
|
|
4198
|
+
const enrichedPayload = attachRateLimit(payload, rateLimit);
|
|
4199
|
+
const message = extractErrorMessage(enrichedPayload, response.statusText);
|
|
4200
|
+
lastError = new HttpError(response.status, message, enrichedPayload);
|
|
4201
|
+
const apiCode = extractErrorCode(enrichedPayload);
|
|
4202
|
+
if (apiCode) lastError.code = apiCode;
|
|
4197
4203
|
if (RETRYABLE_STATUSES.has(response.status) && attempt < maxRetries) {
|
|
4198
4204
|
const retryAfter = response.headers.get("retry-after");
|
|
4199
4205
|
const delay = retryAfter ? parseInt(retryAfter, 10) * 1e3 : baseDelay * Math.pow(2, attempt);
|
|
@@ -4206,6 +4212,72 @@ async function request(config, path3, options = {}) {
|
|
|
4206
4212
|
}
|
|
4207
4213
|
throw lastError || new HttpError(0, "Request failed after retries");
|
|
4208
4214
|
}
|
|
4215
|
+
var UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
|
4216
|
+
function isUuid(value) {
|
|
4217
|
+
return typeof value === "string" && UUID_RE.test(value);
|
|
4218
|
+
}
|
|
4219
|
+
function inferWorkspaceIdFromBody(body) {
|
|
4220
|
+
if (!body || typeof body !== "object") return void 0;
|
|
4221
|
+
const maybe = body.workspace_id;
|
|
4222
|
+
return isUuid(maybe) ? maybe : void 0;
|
|
4223
|
+
}
|
|
4224
|
+
function inferWorkspaceIdFromPath(apiPath) {
|
|
4225
|
+
const qIndex = apiPath.indexOf("?");
|
|
4226
|
+
if (qIndex >= 0) {
|
|
4227
|
+
try {
|
|
4228
|
+
const query = apiPath.slice(qIndex + 1);
|
|
4229
|
+
const params = new URLSearchParams(query);
|
|
4230
|
+
const ws = params.get("workspace_id");
|
|
4231
|
+
if (isUuid(ws)) return ws;
|
|
4232
|
+
} catch {
|
|
4233
|
+
}
|
|
4234
|
+
}
|
|
4235
|
+
const match = apiPath.match(
|
|
4236
|
+
/\/(?:workspaces|workspace)\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/i
|
|
4237
|
+
);
|
|
4238
|
+
return match?.[1];
|
|
4239
|
+
}
|
|
4240
|
+
function parseRateLimitHeaders(headers) {
|
|
4241
|
+
const limit = headers.get("X-RateLimit-Limit");
|
|
4242
|
+
if (!limit) return null;
|
|
4243
|
+
const retryAfter = headers.get("Retry-After");
|
|
4244
|
+
return {
|
|
4245
|
+
limit: parseInt(limit, 10),
|
|
4246
|
+
remaining: parseInt(headers.get("X-RateLimit-Remaining") || "0", 10),
|
|
4247
|
+
reset: parseInt(headers.get("X-RateLimit-Reset") || "0", 10),
|
|
4248
|
+
scope: headers.get("X-RateLimit-Scope") || "unknown",
|
|
4249
|
+
plan: headers.get("X-RateLimit-Plan") || "unknown",
|
|
4250
|
+
group: headers.get("X-RateLimit-Group") || "default",
|
|
4251
|
+
retryAfter: retryAfter ? parseInt(retryAfter, 10) : void 0
|
|
4252
|
+
};
|
|
4253
|
+
}
|
|
4254
|
+
function attachRateLimit(payload, rateLimit) {
|
|
4255
|
+
if (!rateLimit) return payload;
|
|
4256
|
+
if (payload && typeof payload === "object") {
|
|
4257
|
+
return { ...payload, rate_limit: rateLimit };
|
|
4258
|
+
}
|
|
4259
|
+
return { error: payload, rate_limit: rateLimit };
|
|
4260
|
+
}
|
|
4261
|
+
function extractErrorMessage(payload, fallback) {
|
|
4262
|
+
if (!payload) return fallback;
|
|
4263
|
+
const nested = payload?.error;
|
|
4264
|
+
if (nested && typeof nested === "object" && typeof nested.message === "string") {
|
|
4265
|
+
return nested.message;
|
|
4266
|
+
}
|
|
4267
|
+
if (typeof payload.message === "string") return payload.message;
|
|
4268
|
+
if (typeof payload.error === "string") return payload.error;
|
|
4269
|
+
if (typeof payload.detail === "string") return payload.detail;
|
|
4270
|
+
return fallback;
|
|
4271
|
+
}
|
|
4272
|
+
function extractErrorCode(payload) {
|
|
4273
|
+
if (!payload) return null;
|
|
4274
|
+
const nested = payload?.error;
|
|
4275
|
+
if (nested && typeof nested === "object" && typeof nested.code === "string" && nested.code.trim()) {
|
|
4276
|
+
return nested.code.trim();
|
|
4277
|
+
}
|
|
4278
|
+
if (typeof payload.code === "string" && payload.code.trim()) return payload.code.trim();
|
|
4279
|
+
return null;
|
|
4280
|
+
}
|
|
4209
4281
|
|
|
4210
4282
|
// src/files.ts
|
|
4211
4283
|
import * as fs from "fs";
|
|
@@ -4558,6 +4630,30 @@ var ContextStreamClient = class {
|
|
|
4558
4630
|
constructor(config) {
|
|
4559
4631
|
this.config = config;
|
|
4560
4632
|
}
|
|
4633
|
+
/**
|
|
4634
|
+
* Update the client's default workspace/project IDs at runtime.
|
|
4635
|
+
*
|
|
4636
|
+
* This is useful for multi-workspace users: once a session is initialized
|
|
4637
|
+
* (via repo mapping or explicit session_init), the MCP server can treat that
|
|
4638
|
+
* workspace as the default for subsequent calls that don't explicitly include
|
|
4639
|
+
* `workspace_id` in the request payload/path/query.
|
|
4640
|
+
*/
|
|
4641
|
+
setDefaults(input) {
|
|
4642
|
+
if (input.workspace_id) {
|
|
4643
|
+
try {
|
|
4644
|
+
uuidSchema.parse(input.workspace_id);
|
|
4645
|
+
this.config.defaultWorkspaceId = input.workspace_id;
|
|
4646
|
+
} catch {
|
|
4647
|
+
}
|
|
4648
|
+
}
|
|
4649
|
+
if (input.project_id) {
|
|
4650
|
+
try {
|
|
4651
|
+
uuidSchema.parse(input.project_id);
|
|
4652
|
+
this.config.defaultProjectId = input.project_id;
|
|
4653
|
+
} catch {
|
|
4654
|
+
}
|
|
4655
|
+
}
|
|
4656
|
+
}
|
|
4561
4657
|
withDefaults(input) {
|
|
4562
4658
|
const { defaultWorkspaceId, defaultProjectId } = this.config;
|
|
4563
4659
|
return {
|
|
@@ -5335,6 +5431,17 @@ var ContextStreamClient = class {
|
|
|
5335
5431
|
// ============================================
|
|
5336
5432
|
// Token-Saving Context Tools
|
|
5337
5433
|
// ============================================
|
|
5434
|
+
/**
|
|
5435
|
+
* Record a token savings event for user-facing dashboard analytics.
|
|
5436
|
+
* Best-effort: callers should not await this in latency-sensitive paths.
|
|
5437
|
+
*/
|
|
5438
|
+
trackTokenSavings(body) {
|
|
5439
|
+
const payload = this.withDefaults({
|
|
5440
|
+
source: "mcp",
|
|
5441
|
+
...body
|
|
5442
|
+
});
|
|
5443
|
+
return request(this.config, "/analytics/token-savings", { body: payload });
|
|
5444
|
+
}
|
|
5338
5445
|
/**
|
|
5339
5446
|
* Get a compact, token-efficient summary of workspace context.
|
|
5340
5447
|
* Designed to be included in every AI prompt without consuming many tokens.
|
|
@@ -5423,7 +5530,31 @@ var ContextStreamClient = class {
|
|
|
5423
5530
|
}
|
|
5424
5531
|
parts.push("");
|
|
5425
5532
|
parts.push('\u{1F4A1} Use session_recall("topic") for specific context');
|
|
5426
|
-
const
|
|
5533
|
+
const candidateSummary = parts.join("\n");
|
|
5534
|
+
const maxChars = maxTokens * 4;
|
|
5535
|
+
const candidateLines = candidateSummary.split("\n");
|
|
5536
|
+
const finalLines = [];
|
|
5537
|
+
let used = 0;
|
|
5538
|
+
for (const line of candidateLines) {
|
|
5539
|
+
const next = (finalLines.length ? "\n" : "") + line;
|
|
5540
|
+
if (used + next.length > maxChars) break;
|
|
5541
|
+
finalLines.push(line);
|
|
5542
|
+
used += next.length;
|
|
5543
|
+
}
|
|
5544
|
+
const summary = finalLines.join("\n");
|
|
5545
|
+
this.trackTokenSavings({
|
|
5546
|
+
tool: "session_summary",
|
|
5547
|
+
workspace_id: withDefaults.workspace_id,
|
|
5548
|
+
project_id: withDefaults.project_id,
|
|
5549
|
+
candidate_chars: candidateSummary.length,
|
|
5550
|
+
context_chars: summary.length,
|
|
5551
|
+
max_tokens: maxTokens,
|
|
5552
|
+
metadata: {
|
|
5553
|
+
decision_count: decisionCount,
|
|
5554
|
+
memory_count: memoryCount
|
|
5555
|
+
}
|
|
5556
|
+
}).catch(() => {
|
|
5557
|
+
});
|
|
5427
5558
|
return {
|
|
5428
5559
|
summary,
|
|
5429
5560
|
workspace_name: workspaceName,
|
|
@@ -5558,6 +5689,7 @@ var ContextStreamClient = class {
|
|
|
5558
5689
|
const charsPerToken = 4;
|
|
5559
5690
|
const maxChars = maxTokens * charsPerToken;
|
|
5560
5691
|
const parts = [];
|
|
5692
|
+
const candidateParts = [];
|
|
5561
5693
|
const sources = [];
|
|
5562
5694
|
let currentChars = 0;
|
|
5563
5695
|
if (params.include_decisions !== false && withDefaults.workspace_id) {
|
|
@@ -5569,14 +5701,22 @@ var ContextStreamClient = class {
|
|
|
5569
5701
|
});
|
|
5570
5702
|
if (decisions.items) {
|
|
5571
5703
|
parts.push("## Relevant Decisions\n");
|
|
5704
|
+
candidateParts.push("## Relevant Decisions\n");
|
|
5572
5705
|
currentChars += 25;
|
|
5573
|
-
|
|
5574
|
-
const
|
|
5575
|
-
|
|
5576
|
-
|
|
5577
|
-
|
|
5578
|
-
|
|
5579
|
-
|
|
5706
|
+
const decisionEntries = decisions.items.map((d) => {
|
|
5707
|
+
const title = d.title || "Decision";
|
|
5708
|
+
return { title, entry: `\u2022 ${title}
|
|
5709
|
+
` };
|
|
5710
|
+
});
|
|
5711
|
+
for (const d of decisionEntries) {
|
|
5712
|
+
candidateParts.push(d.entry);
|
|
5713
|
+
}
|
|
5714
|
+
candidateParts.push("\n");
|
|
5715
|
+
for (const d of decisionEntries) {
|
|
5716
|
+
if (currentChars + d.entry.length > maxChars * 0.4) break;
|
|
5717
|
+
parts.push(d.entry);
|
|
5718
|
+
currentChars += d.entry.length;
|
|
5719
|
+
sources.push({ type: "decision", title: d.title });
|
|
5580
5720
|
}
|
|
5581
5721
|
parts.push("\n");
|
|
5582
5722
|
}
|
|
@@ -5593,16 +5733,23 @@ var ContextStreamClient = class {
|
|
|
5593
5733
|
});
|
|
5594
5734
|
if (memory.results) {
|
|
5595
5735
|
parts.push("## Related Context\n");
|
|
5736
|
+
candidateParts.push("## Related Context\n");
|
|
5596
5737
|
currentChars += 20;
|
|
5597
|
-
|
|
5738
|
+
const memoryEntries = memory.results.map((m) => {
|
|
5598
5739
|
const title = m.title || "Context";
|
|
5599
5740
|
const content = m.content?.slice(0, 200) || "";
|
|
5600
|
-
|
|
5601
|
-
|
|
5602
|
-
|
|
5603
|
-
|
|
5604
|
-
|
|
5605
|
-
|
|
5741
|
+
return { title, entry: `\u2022 ${title}: ${content}...
|
|
5742
|
+
` };
|
|
5743
|
+
});
|
|
5744
|
+
for (const m of memoryEntries) {
|
|
5745
|
+
candidateParts.push(m.entry);
|
|
5746
|
+
}
|
|
5747
|
+
candidateParts.push("\n");
|
|
5748
|
+
for (const m of memoryEntries) {
|
|
5749
|
+
if (currentChars + m.entry.length > maxChars * 0.7) break;
|
|
5750
|
+
parts.push(m.entry);
|
|
5751
|
+
currentChars += m.entry.length;
|
|
5752
|
+
sources.push({ type: "memory", title: m.title });
|
|
5606
5753
|
}
|
|
5607
5754
|
parts.push("\n");
|
|
5608
5755
|
}
|
|
@@ -5619,23 +5766,45 @@ var ContextStreamClient = class {
|
|
|
5619
5766
|
});
|
|
5620
5767
|
if (code.results) {
|
|
5621
5768
|
parts.push("## Relevant Code\n");
|
|
5769
|
+
candidateParts.push("## Relevant Code\n");
|
|
5622
5770
|
currentChars += 18;
|
|
5623
|
-
|
|
5771
|
+
const codeEntries = code.results.map((c) => {
|
|
5624
5772
|
const path3 = c.file_path || "file";
|
|
5625
5773
|
const content = c.content?.slice(0, 150) || "";
|
|
5626
|
-
|
|
5627
|
-
|
|
5628
|
-
|
|
5629
|
-
|
|
5630
|
-
|
|
5631
|
-
|
|
5774
|
+
return { path: path3, entry: `\u2022 ${path3}: ${content}...
|
|
5775
|
+
` };
|
|
5776
|
+
});
|
|
5777
|
+
for (const c of codeEntries) {
|
|
5778
|
+
candidateParts.push(c.entry);
|
|
5779
|
+
}
|
|
5780
|
+
for (const c of codeEntries) {
|
|
5781
|
+
if (currentChars + c.entry.length > maxChars) break;
|
|
5782
|
+
parts.push(c.entry);
|
|
5783
|
+
currentChars += c.entry.length;
|
|
5784
|
+
sources.push({ type: "code", title: c.path });
|
|
5632
5785
|
}
|
|
5633
5786
|
}
|
|
5634
5787
|
} catch {
|
|
5635
5788
|
}
|
|
5636
5789
|
}
|
|
5637
5790
|
const context = parts.join("");
|
|
5791
|
+
const candidateContext = candidateParts.join("");
|
|
5638
5792
|
const tokenEstimate = Math.ceil(context.length / charsPerToken);
|
|
5793
|
+
this.trackTokenSavings({
|
|
5794
|
+
tool: "ai_context_budget",
|
|
5795
|
+
workspace_id: withDefaults.workspace_id,
|
|
5796
|
+
project_id: withDefaults.project_id,
|
|
5797
|
+
candidate_chars: candidateContext.length,
|
|
5798
|
+
context_chars: context.length,
|
|
5799
|
+
max_tokens: maxTokens,
|
|
5800
|
+
metadata: {
|
|
5801
|
+
include_decisions: params.include_decisions !== false,
|
|
5802
|
+
include_memory: params.include_memory !== false,
|
|
5803
|
+
include_code: !!params.include_code,
|
|
5804
|
+
sources: sources.length
|
|
5805
|
+
}
|
|
5806
|
+
}).catch(() => {
|
|
5807
|
+
});
|
|
5639
5808
|
return {
|
|
5640
5809
|
context,
|
|
5641
5810
|
token_estimate: tokenEstimate,
|
|
@@ -5812,6 +5981,7 @@ var ContextStreamClient = class {
|
|
|
5812
5981
|
let context;
|
|
5813
5982
|
let charsUsed = 0;
|
|
5814
5983
|
const maxChars = maxTokens * 4;
|
|
5984
|
+
let candidateContext;
|
|
5815
5985
|
if (format === "minified") {
|
|
5816
5986
|
const parts = [];
|
|
5817
5987
|
for (const item of items) {
|
|
@@ -5821,6 +5991,7 @@ var ContextStreamClient = class {
|
|
|
5821
5991
|
charsUsed += entry.length + 1;
|
|
5822
5992
|
}
|
|
5823
5993
|
context = parts.join("|");
|
|
5994
|
+
candidateContext = items.map((i) => `${i.type}:${i.value}`).join("|");
|
|
5824
5995
|
} else if (format === "structured") {
|
|
5825
5996
|
const grouped = {};
|
|
5826
5997
|
for (const item of items) {
|
|
@@ -5830,6 +6001,12 @@ var ContextStreamClient = class {
|
|
|
5830
6001
|
charsUsed += item.value.length + 5;
|
|
5831
6002
|
}
|
|
5832
6003
|
context = JSON.stringify(grouped);
|
|
6004
|
+
const candidateGrouped = {};
|
|
6005
|
+
for (const item of items) {
|
|
6006
|
+
if (!candidateGrouped[item.type]) candidateGrouped[item.type] = [];
|
|
6007
|
+
candidateGrouped[item.type].push(item.value);
|
|
6008
|
+
}
|
|
6009
|
+
candidateContext = JSON.stringify(candidateGrouped);
|
|
5833
6010
|
} else {
|
|
5834
6011
|
const lines = ["[CTX]"];
|
|
5835
6012
|
for (const item of items) {
|
|
@@ -5840,6 +6017,12 @@ var ContextStreamClient = class {
|
|
|
5840
6017
|
}
|
|
5841
6018
|
lines.push("[/CTX]");
|
|
5842
6019
|
context = lines.join("\n");
|
|
6020
|
+
const candidateLines = ["[CTX]"];
|
|
6021
|
+
for (const item of items) {
|
|
6022
|
+
candidateLines.push(`${item.type}:${item.value}`);
|
|
6023
|
+
}
|
|
6024
|
+
candidateLines.push("[/CTX]");
|
|
6025
|
+
candidateContext = candidateLines.join("\n");
|
|
5843
6026
|
}
|
|
5844
6027
|
if (context.length === 0 && withDefaults.workspace_id) {
|
|
5845
6028
|
const wsHint = items.find((i) => i.type === "W")?.value || withDefaults.workspace_id.slice(0, 8);
|
|
@@ -5847,7 +6030,23 @@ var ContextStreamClient = class {
|
|
|
5847
6030
|
W:${wsHint}
|
|
5848
6031
|
[NO_MATCHES]
|
|
5849
6032
|
[/CTX]`;
|
|
6033
|
+
candidateContext = context;
|
|
5850
6034
|
}
|
|
6035
|
+
this.trackTokenSavings({
|
|
6036
|
+
tool: "context_smart",
|
|
6037
|
+
workspace_id: withDefaults.workspace_id,
|
|
6038
|
+
project_id: withDefaults.project_id,
|
|
6039
|
+
candidate_chars: candidateContext.length,
|
|
6040
|
+
context_chars: context.length,
|
|
6041
|
+
max_tokens: maxTokens,
|
|
6042
|
+
metadata: {
|
|
6043
|
+
format,
|
|
6044
|
+
items: items.length,
|
|
6045
|
+
keywords: keywords.slice(0, 10),
|
|
6046
|
+
errors: errors.length
|
|
6047
|
+
}
|
|
6048
|
+
}).catch(() => {
|
|
6049
|
+
});
|
|
5851
6050
|
return {
|
|
5852
6051
|
context,
|
|
5853
6052
|
token_estimate: Math.ceil(context.length / 4),
|
|
@@ -8246,6 +8445,11 @@ var SessionManager = class {
|
|
|
8246
8445
|
markInitialized(context) {
|
|
8247
8446
|
this.initialized = true;
|
|
8248
8447
|
this.context = context;
|
|
8448
|
+
const workspaceId = typeof context.workspace_id === "string" ? context.workspace_id : void 0;
|
|
8449
|
+
const projectId = typeof context.project_id === "string" ? context.project_id : void 0;
|
|
8450
|
+
if (workspaceId || projectId) {
|
|
8451
|
+
this.client.setDefaults({ workspace_id: workspaceId, project_id: projectId });
|
|
8452
|
+
}
|
|
8249
8453
|
}
|
|
8250
8454
|
/**
|
|
8251
8455
|
* Set the folder path hint (can be passed from tools that know the workspace path)
|
|
@@ -8359,6 +8563,10 @@ var SessionManager = class {
|
|
|
8359
8563
|
);
|
|
8360
8564
|
this.initialized = true;
|
|
8361
8565
|
this.context = context;
|
|
8566
|
+
this.client.setDefaults({
|
|
8567
|
+
workspace_id: typeof context.workspace_id === "string" ? context.workspace_id : void 0,
|
|
8568
|
+
project_id: typeof context.project_id === "string" ? context.project_id : void 0
|
|
8569
|
+
});
|
|
8362
8570
|
console.error("[ContextStream] Workspace resolved:", context.workspace_name, "(source:", context.workspace_source, ")");
|
|
8363
8571
|
const summary = this.buildContextSummary(context);
|
|
8364
8572
|
console.error("[ContextStream] Auto-initialization complete");
|
package/package.json
CHANGED