@contextstream/mcp-server 0.3.32 → 0.3.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -2
- package/dist/index.js +281 -74
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -156,17 +156,21 @@ User scope (all projects):
|
|
|
156
156
|
```bash
|
|
157
157
|
claude mcp add --transport stdio contextstream --scope user \
|
|
158
158
|
--env CONTEXTSTREAM_API_URL=https://api.contextstream.io \
|
|
159
|
-
--env CONTEXTSTREAM_API_KEY=YOUR_KEY
|
|
159
|
+
--env CONTEXTSTREAM_API_KEY=YOUR_KEY \
|
|
160
|
+
--env CONTEXTSTREAM_TOOLSET=core -- \
|
|
160
161
|
npx -y @contextstream/mcp-server
|
|
161
162
|
```
|
|
162
163
|
|
|
164
|
+
Tip: Claude Code warns on large tool contexts. The default toolset is `core`.
|
|
165
|
+
Set `CONTEXTSTREAM_TOOLSET=full` to expose everything.
|
|
166
|
+
|
|
163
167
|
Windows caveat (native Windows, not WSL): if `npx` isn’t found, use `cmd /c npx -y @contextstream/mcp-server` after `--`.
|
|
164
168
|
|
|
165
169
|
Alternative (JSON form):
|
|
166
170
|
|
|
167
171
|
```bash
|
|
168
172
|
claude mcp add-json contextstream \
|
|
169
|
-
'{"type":"stdio","command":"npx","args":["-y","@contextstream/mcp-server"],"env":{"CONTEXTSTREAM_API_URL":"https://api.contextstream.io","CONTEXTSTREAM_API_KEY":"your_api_key"}}'
|
|
173
|
+
'{"type":"stdio","command":"npx","args":["-y","@contextstream/mcp-server"],"env":{"CONTEXTSTREAM_API_URL":"https://api.contextstream.io","CONTEXTSTREAM_API_KEY":"your_api_key","CONTEXTSTREAM_TOOLSET":"core"}}'
|
|
170
174
|
```
|
|
171
175
|
|
|
172
176
|
### Codex CLI (`~/.codex/config.toml`)
|
|
@@ -200,9 +204,39 @@ You can authenticate using either:
|
|
|
200
204
|
| `CONTEXTSTREAM_WORKSPACE_ID` | No | Default workspace ID fallback |
|
|
201
205
|
| `CONTEXTSTREAM_PROJECT_ID` | No | Default project ID fallback |
|
|
202
206
|
| `CONTEXTSTREAM_USER_AGENT` | No | Custom user agent string |
|
|
207
|
+
| `CONTEXTSTREAM_TOOLSET` | No | Tool bundle to expose (`core` default, or `full`) |
|
|
208
|
+
| `CONTEXTSTREAM_TOOL_ALLOWLIST` | No | Comma-separated tool names to expose (overrides toolset) |
|
|
203
209
|
| `CONTEXTSTREAM_PRO_TOOLS` | No | Comma-separated tool names treated as PRO (default: `ai_context,ai_enhanced_context,ai_context_budget,ai_embeddings,ai_plan,ai_tasks`) |
|
|
204
210
|
| `CONTEXTSTREAM_UPGRADE_URL` | No | Upgrade link shown when Free users call PRO tools (default: `https://contextstream.io/pricing`) |
|
|
205
211
|
|
|
212
|
+
### Server-side environment variables (API)
|
|
213
|
+
|
|
214
|
+
The following environment variables are configured on the ContextStream API server (not in your MCP client config):
|
|
215
|
+
|
|
216
|
+
| Variable | Required | Description |
|
|
217
|
+
|----------|----------|-------------|
|
|
218
|
+
| `QA_FILE_WRITE_ROOT` | No | Server-side root directory for `write_to_disk` file writes. When set, the API allows the `projects_ingest_local` tool to write ingested files to disk for testing/QA purposes. Files are written under `<QA_FILE_WRITE_ROOT>/<project_id>/<relative_path>`. If not set, `write_to_disk` requests are rejected. |
|
|
219
|
+
|
|
220
|
+
#### File write parameters for `projects_ingest_local`
|
|
221
|
+
|
|
222
|
+
The `projects_ingest_local` tool accepts two optional parameters for QA/testing scenarios:
|
|
223
|
+
|
|
224
|
+
| Parameter | Type | Default | Description |
|
|
225
|
+
|-----------|------|---------|-------------|
|
|
226
|
+
| `write_to_disk` | boolean | `false` | When `true`, writes ingested files to disk on the API server under `QA_FILE_WRITE_ROOT` before indexing. Requires the API to have `QA_FILE_WRITE_ROOT` configured. |
|
|
227
|
+
| `overwrite` | boolean | `false` | When `true` (and `write_to_disk` is enabled), allows overwriting existing files. Otherwise, existing files are skipped. |
|
|
228
|
+
|
|
229
|
+
**Example usage:**
|
|
230
|
+
```json
|
|
231
|
+
{
|
|
232
|
+
"path": "/path/to/local/project",
|
|
233
|
+
"write_to_disk": true,
|
|
234
|
+
"overwrite": false
|
|
235
|
+
}
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
**Note:** The `write_to_disk` feature is intended for testing, QA, and development scenarios where you need to materialize files on a test server. In production, `QA_FILE_WRITE_ROOT` should typically be unset to disable file writes.
|
|
239
|
+
|
|
206
240
|
## Usage patterns
|
|
207
241
|
|
|
208
242
|
### Recommended flow for AI tools
|
package/dist/index.js
CHANGED
|
@@ -4136,7 +4136,7 @@ var RETRYABLE_STATUSES = /* @__PURE__ */ new Set([408, 429, 500, 502, 503, 504])
|
|
|
4136
4136
|
var MAX_RETRIES = 3;
|
|
4137
4137
|
var BASE_DELAY = 1e3;
|
|
4138
4138
|
async function sleep(ms) {
|
|
4139
|
-
return new Promise((
|
|
4139
|
+
return new Promise((resolve3) => setTimeout(resolve3, ms));
|
|
4140
4140
|
}
|
|
4141
4141
|
async function request(config, path7, options = {}) {
|
|
4142
4142
|
const { apiUrl, apiKey, jwt, userAgent } = config;
|
|
@@ -4429,6 +4429,51 @@ async function* readAllFilesInBatches(rootPath, options = {}) {
|
|
|
4429
4429
|
yield batch;
|
|
4430
4430
|
}
|
|
4431
4431
|
}
|
|
4432
|
+
async function countIndexableFiles(rootPath, options = {}) {
|
|
4433
|
+
const maxFiles = options.maxFiles ?? 1;
|
|
4434
|
+
const maxFileSize = options.maxFileSize ?? MAX_FILE_SIZE;
|
|
4435
|
+
let count = 0;
|
|
4436
|
+
let stopped = false;
|
|
4437
|
+
async function walkDir(dir) {
|
|
4438
|
+
if (count >= maxFiles) {
|
|
4439
|
+
stopped = true;
|
|
4440
|
+
return;
|
|
4441
|
+
}
|
|
4442
|
+
let entries;
|
|
4443
|
+
try {
|
|
4444
|
+
entries = await fs.promises.readdir(dir, { withFileTypes: true });
|
|
4445
|
+
} catch {
|
|
4446
|
+
return;
|
|
4447
|
+
}
|
|
4448
|
+
for (const entry of entries) {
|
|
4449
|
+
if (count >= maxFiles) {
|
|
4450
|
+
stopped = true;
|
|
4451
|
+
return;
|
|
4452
|
+
}
|
|
4453
|
+
const fullPath = path.join(dir, entry.name);
|
|
4454
|
+
if (entry.isDirectory()) {
|
|
4455
|
+
if (IGNORE_DIRS.has(entry.name)) continue;
|
|
4456
|
+
await walkDir(fullPath);
|
|
4457
|
+
} else if (entry.isFile()) {
|
|
4458
|
+
if (IGNORE_FILES.has(entry.name)) continue;
|
|
4459
|
+
const ext = entry.name.split(".").pop()?.toLowerCase() ?? "";
|
|
4460
|
+
if (!CODE_EXTENSIONS.has(ext)) continue;
|
|
4461
|
+
try {
|
|
4462
|
+
const stat2 = await fs.promises.stat(fullPath);
|
|
4463
|
+
if (stat2.size > maxFileSize) continue;
|
|
4464
|
+
count++;
|
|
4465
|
+
if (count >= maxFiles) {
|
|
4466
|
+
stopped = true;
|
|
4467
|
+
return;
|
|
4468
|
+
}
|
|
4469
|
+
} catch {
|
|
4470
|
+
}
|
|
4471
|
+
}
|
|
4472
|
+
}
|
|
4473
|
+
}
|
|
4474
|
+
await walkDir(rootPath);
|
|
4475
|
+
return { count, stopped };
|
|
4476
|
+
}
|
|
4432
4477
|
|
|
4433
4478
|
// src/workspace-config.ts
|
|
4434
4479
|
import * as fs2 from "fs";
|
|
@@ -4964,11 +5009,20 @@ var ContextStreamClient = class {
|
|
|
4964
5009
|
/**
|
|
4965
5010
|
* Ingest files for indexing
|
|
4966
5011
|
* This uploads files to the API for indexing
|
|
5012
|
+
* @param projectId - Project UUID
|
|
5013
|
+
* @param files - Array of files to ingest
|
|
5014
|
+
* @param options - Optional ingest options
|
|
5015
|
+
* @param options.write_to_disk - When true, write files to disk under QA_FILE_WRITE_ROOT before indexing
|
|
5016
|
+
* @param options.overwrite - Allow overwriting existing files when write_to_disk is enabled
|
|
4967
5017
|
*/
|
|
4968
|
-
ingestFiles(projectId, files) {
|
|
5018
|
+
ingestFiles(projectId, files, options) {
|
|
4969
5019
|
uuidSchema.parse(projectId);
|
|
4970
5020
|
return request(this.config, `/projects/${projectId}/files/ingest`, {
|
|
4971
|
-
body: {
|
|
5021
|
+
body: {
|
|
5022
|
+
files,
|
|
5023
|
+
...options?.write_to_disk !== void 0 && { write_to_disk: options.write_to_disk },
|
|
5024
|
+
...options?.overwrite !== void 0 && { overwrite: options.overwrite }
|
|
5025
|
+
}
|
|
4972
5026
|
});
|
|
4973
5027
|
}
|
|
4974
5028
|
// Workspace extended operations (with caching)
|
|
@@ -6567,6 +6621,7 @@ W:${wsHint}
|
|
|
6567
6621
|
};
|
|
6568
6622
|
|
|
6569
6623
|
// src/tools.ts
|
|
6624
|
+
import * as fs3 from "node:fs";
|
|
6570
6625
|
import * as path4 from "node:path";
|
|
6571
6626
|
|
|
6572
6627
|
// src/rules-templates.ts
|
|
@@ -6862,6 +6917,61 @@ var VERSION = getVersion();
|
|
|
6862
6917
|
// src/tools.ts
|
|
6863
6918
|
var LESSON_DEDUP_WINDOW_MS = 2 * 60 * 1e3;
|
|
6864
6919
|
var recentLessonCaptures = /* @__PURE__ */ new Map();
|
|
6920
|
+
var CORE_TOOLSET = /* @__PURE__ */ new Set([
|
|
6921
|
+
"session_init",
|
|
6922
|
+
"context_smart",
|
|
6923
|
+
"session_summary",
|
|
6924
|
+
"session_capture",
|
|
6925
|
+
"session_capture_lesson",
|
|
6926
|
+
"session_get_lessons",
|
|
6927
|
+
"session_recall",
|
|
6928
|
+
"session_remember",
|
|
6929
|
+
"session_get_user_context",
|
|
6930
|
+
"session_smart_search",
|
|
6931
|
+
"session_compress",
|
|
6932
|
+
"session_delta",
|
|
6933
|
+
"generate_editor_rules",
|
|
6934
|
+
"workspace_associate",
|
|
6935
|
+
"workspace_bootstrap",
|
|
6936
|
+
"auth_me",
|
|
6937
|
+
"mcp_server_version"
|
|
6938
|
+
]);
|
|
6939
|
+
var TOOLSET_ALIASES = {
|
|
6940
|
+
core: CORE_TOOLSET,
|
|
6941
|
+
minimal: CORE_TOOLSET,
|
|
6942
|
+
essential: CORE_TOOLSET
|
|
6943
|
+
};
|
|
6944
|
+
function parseToolList(raw) {
|
|
6945
|
+
return new Set(
|
|
6946
|
+
raw.split(",").map((tool) => tool.trim()).filter(Boolean)
|
|
6947
|
+
);
|
|
6948
|
+
}
|
|
6949
|
+
function resolveToolFilter() {
|
|
6950
|
+
const defaultToolset = CORE_TOOLSET;
|
|
6951
|
+
const allowlistRaw = process.env.CONTEXTSTREAM_TOOL_ALLOWLIST;
|
|
6952
|
+
if (allowlistRaw) {
|
|
6953
|
+
const allowlist = parseToolList(allowlistRaw);
|
|
6954
|
+
if (allowlist.size === 0) {
|
|
6955
|
+
console.error("[ContextStream] CONTEXTSTREAM_TOOL_ALLOWLIST is empty; using core tool list.");
|
|
6956
|
+
return { allowlist: defaultToolset, source: "core" };
|
|
6957
|
+
}
|
|
6958
|
+
return { allowlist, source: "allowlist" };
|
|
6959
|
+
}
|
|
6960
|
+
const toolsetRaw = process.env.CONTEXTSTREAM_TOOLSET;
|
|
6961
|
+
if (!toolsetRaw) {
|
|
6962
|
+
return { allowlist: defaultToolset, source: "core" };
|
|
6963
|
+
}
|
|
6964
|
+
const key = toolsetRaw.trim().toLowerCase();
|
|
6965
|
+
if (!key || key === "full" || key === "all") {
|
|
6966
|
+
return { allowlist: null, source: "full" };
|
|
6967
|
+
}
|
|
6968
|
+
const resolved = TOOLSET_ALIASES[key];
|
|
6969
|
+
if (resolved) {
|
|
6970
|
+
return { allowlist: resolved, source: key };
|
|
6971
|
+
}
|
|
6972
|
+
console.error(`[ContextStream] Unknown CONTEXTSTREAM_TOOLSET "${toolsetRaw}". Using core tool list.`);
|
|
6973
|
+
return { allowlist: defaultToolset, source: "core" };
|
|
6974
|
+
}
|
|
6865
6975
|
function formatContent(data) {
|
|
6866
6976
|
return JSON.stringify(data, null, 2);
|
|
6867
6977
|
}
|
|
@@ -6902,6 +7012,13 @@ function isDuplicateLessonCapture(signature) {
|
|
|
6902
7012
|
}
|
|
6903
7013
|
function registerTools(server, client, sessionManager) {
|
|
6904
7014
|
const upgradeUrl = process.env.CONTEXTSTREAM_UPGRADE_URL || "https://contextstream.io/pricing";
|
|
7015
|
+
const toolFilter = resolveToolFilter();
|
|
7016
|
+
const toolAllowlist = toolFilter.allowlist;
|
|
7017
|
+
if (toolAllowlist) {
|
|
7018
|
+
const source = toolFilter.source ?? "custom";
|
|
7019
|
+
const hint = source === "core" ? " Set CONTEXTSTREAM_TOOLSET=full to expose all tools." : "";
|
|
7020
|
+
console.error(`[ContextStream] Toolset limited (${source}): ${toolAllowlist.size} tools.${hint}`);
|
|
7021
|
+
}
|
|
6905
7022
|
const defaultProTools = /* @__PURE__ */ new Set([
|
|
6906
7023
|
// AI endpoints (typically paid/credit-metered)
|
|
6907
7024
|
"ai_context",
|
|
@@ -6977,6 +7094,9 @@ function registerTools(server, client, sessionManager) {
|
|
|
6977
7094
|
};
|
|
6978
7095
|
}
|
|
6979
7096
|
function registerTool(name, config, handler) {
|
|
7097
|
+
if (toolAllowlist && !toolAllowlist.has(name)) {
|
|
7098
|
+
return;
|
|
7099
|
+
}
|
|
6980
7100
|
const accessLabel = getToolAccessLabel(name);
|
|
6981
7101
|
const labeledConfig = {
|
|
6982
7102
|
...config,
|
|
@@ -6997,10 +7117,20 @@ Access: ${accessLabel}${accessLabel === "PRO" ? ` (upgrade: ${upgradeUrl})` : ""
|
|
|
6997
7117
|
const isPlanLimit = String(errorCode).toUpperCase() === "FORBIDDEN" && String(errorMessage).toLowerCase().includes("plan limit reached");
|
|
6998
7118
|
const upgradeHint = isPlanLimit ? `
|
|
6999
7119
|
Upgrade: ${upgradeUrl}` : "";
|
|
7000
|
-
const
|
|
7001
|
-
|
|
7002
|
-
|
|
7003
|
-
|
|
7120
|
+
const errorPayload = {
|
|
7121
|
+
success: false,
|
|
7122
|
+
error: {
|
|
7123
|
+
code: errorCode,
|
|
7124
|
+
message: errorMessage,
|
|
7125
|
+
details: errorDetails
|
|
7126
|
+
}
|
|
7127
|
+
};
|
|
7128
|
+
const errorText = `[${errorCode}] ${errorMessage}${upgradeHint}${errorDetails ? `: ${JSON.stringify(errorDetails)}` : ""}`;
|
|
7129
|
+
return {
|
|
7130
|
+
content: [{ type: "text", text: errorText }],
|
|
7131
|
+
structuredContent: errorPayload,
|
|
7132
|
+
isError: true
|
|
7133
|
+
};
|
|
7004
7134
|
}
|
|
7005
7135
|
};
|
|
7006
7136
|
server.registerTool(
|
|
@@ -7025,6 +7155,33 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7025
7155
|
const ctx = sessionManager?.getContext();
|
|
7026
7156
|
return typeof ctx?.project_id === "string" ? ctx.project_id : void 0;
|
|
7027
7157
|
}
|
|
7158
|
+
async function validateReadableDirectory(inputPath) {
|
|
7159
|
+
const resolvedPath = path4.resolve(inputPath);
|
|
7160
|
+
let stats;
|
|
7161
|
+
try {
|
|
7162
|
+
stats = await fs3.promises.stat(resolvedPath);
|
|
7163
|
+
} catch (error) {
|
|
7164
|
+
if (error?.code === "ENOENT") {
|
|
7165
|
+
return { ok: false, error: `Error: path does not exist: ${inputPath}` };
|
|
7166
|
+
}
|
|
7167
|
+
return {
|
|
7168
|
+
ok: false,
|
|
7169
|
+
error: `Error: unable to access path: ${inputPath}${error?.message ? ` (${error.message})` : ""}`
|
|
7170
|
+
};
|
|
7171
|
+
}
|
|
7172
|
+
if (!stats.isDirectory()) {
|
|
7173
|
+
return { ok: false, error: `Error: path is not a directory: ${inputPath}` };
|
|
7174
|
+
}
|
|
7175
|
+
try {
|
|
7176
|
+
await fs3.promises.access(resolvedPath, fs3.constants.R_OK | fs3.constants.X_OK);
|
|
7177
|
+
} catch (error) {
|
|
7178
|
+
return {
|
|
7179
|
+
ok: false,
|
|
7180
|
+
error: `Error: path is not readable: ${inputPath}${error?.code ? ` (${error.code})` : ""}`
|
|
7181
|
+
};
|
|
7182
|
+
}
|
|
7183
|
+
return { ok: true, resolvedPath };
|
|
7184
|
+
}
|
|
7028
7185
|
registerTool(
|
|
7029
7186
|
"mcp_server_version",
|
|
7030
7187
|
{
|
|
@@ -7053,7 +7210,7 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7053
7210
|
"workspaces_list",
|
|
7054
7211
|
{
|
|
7055
7212
|
title: "List workspaces",
|
|
7056
|
-
description: "List accessible workspaces",
|
|
7213
|
+
description: "List accessible workspaces (paginated list: items, total, page, per_page, has_next, has_prev).",
|
|
7057
7214
|
inputSchema: external_exports.object({ page: external_exports.number().optional(), page_size: external_exports.number().optional() })
|
|
7058
7215
|
},
|
|
7059
7216
|
async (input) => {
|
|
@@ -7065,7 +7222,7 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7065
7222
|
"workspaces_create",
|
|
7066
7223
|
{
|
|
7067
7224
|
title: "Create workspace",
|
|
7068
|
-
description: "Create a new workspace",
|
|
7225
|
+
description: "Create a new workspace (returns ApiResponse with created workspace in data).",
|
|
7069
7226
|
inputSchema: external_exports.object({
|
|
7070
7227
|
name: external_exports.string(),
|
|
7071
7228
|
description: external_exports.string().optional(),
|
|
@@ -7113,7 +7270,7 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7113
7270
|
"projects_list",
|
|
7114
7271
|
{
|
|
7115
7272
|
title: "List projects",
|
|
7116
|
-
description: "List projects (optionally by workspace)",
|
|
7273
|
+
description: "List projects (optionally by workspace; paginated list: items, total, page, per_page, has_next, has_prev).",
|
|
7117
7274
|
inputSchema: external_exports.object({ workspace_id: external_exports.string().uuid().optional(), page: external_exports.number().optional(), page_size: external_exports.number().optional() })
|
|
7118
7275
|
},
|
|
7119
7276
|
async (input) => {
|
|
@@ -7125,7 +7282,7 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7125
7282
|
"projects_create",
|
|
7126
7283
|
{
|
|
7127
7284
|
title: "Create project",
|
|
7128
|
-
description: "Create a project within a workspace",
|
|
7285
|
+
description: "Create a project within a workspace (returns ApiResponse with created project in data).",
|
|
7129
7286
|
inputSchema: external_exports.object({
|
|
7130
7287
|
name: external_exports.string(),
|
|
7131
7288
|
description: external_exports.string().optional(),
|
|
@@ -7400,9 +7557,14 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7400
7557
|
"graph_dependencies",
|
|
7401
7558
|
{
|
|
7402
7559
|
title: "Code dependencies",
|
|
7403
|
-
description:
|
|
7560
|
+
description: `Dependency graph query
|
|
7561
|
+
|
|
7562
|
+
Access: Free`,
|
|
7404
7563
|
inputSchema: external_exports.object({
|
|
7405
|
-
target: external_exports.object({
|
|
7564
|
+
target: external_exports.object({
|
|
7565
|
+
type: external_exports.string().describe("Code element type. Accepted values: module (aliases: file, path), function (alias: method), type (aliases: struct, enum, trait, class), variable (aliases: data, const, constant). For knowledge/memory nodes, use graph_path with UUID ids instead."),
|
|
7566
|
+
id: external_exports.string().describe('Element identifier. For module type, use file path (e.g., "src/auth.rs"). For function/type/variable, use the element id.')
|
|
7567
|
+
}),
|
|
7406
7568
|
max_depth: external_exports.number().optional(),
|
|
7407
7569
|
include_transitive: external_exports.boolean().optional()
|
|
7408
7570
|
})
|
|
@@ -7416,10 +7578,18 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7416
7578
|
"graph_call_path",
|
|
7417
7579
|
{
|
|
7418
7580
|
title: "Call path",
|
|
7419
|
-
description:
|
|
7581
|
+
description: `Find call path between two targets
|
|
7582
|
+
|
|
7583
|
+
Access: Free`,
|
|
7420
7584
|
inputSchema: external_exports.object({
|
|
7421
|
-
source: external_exports.object({
|
|
7422
|
-
|
|
7585
|
+
source: external_exports.object({
|
|
7586
|
+
type: external_exports.string().describe('Must be "function" (alias: method). Only function types are supported for call path analysis. For knowledge/memory nodes, use graph_path with UUID ids instead.'),
|
|
7587
|
+
id: external_exports.string().describe("Source function identifier.")
|
|
7588
|
+
}),
|
|
7589
|
+
target: external_exports.object({
|
|
7590
|
+
type: external_exports.string().describe('Must be "function" (alias: method). Only function types are supported for call path analysis.'),
|
|
7591
|
+
id: external_exports.string().describe("Target function identifier.")
|
|
7592
|
+
}),
|
|
7423
7593
|
max_depth: external_exports.number().optional()
|
|
7424
7594
|
})
|
|
7425
7595
|
},
|
|
@@ -7432,8 +7602,16 @@ Upgrade: ${upgradeUrl}` : "";
|
|
|
7432
7602
|
"graph_impact",
|
|
7433
7603
|
{
|
|
7434
7604
|
title: "Impact analysis",
|
|
7435
|
-
description:
|
|
7436
|
-
|
|
7605
|
+
description: `Analyze impact of a target node
|
|
7606
|
+
|
|
7607
|
+
Access: Free`,
|
|
7608
|
+
inputSchema: external_exports.object({
|
|
7609
|
+
target: external_exports.object({
|
|
7610
|
+
type: external_exports.string().describe("Code element type. Accepted values: module (aliases: file, path), function (alias: method), type (aliases: struct, enum, trait, class), variable (aliases: data, const, constant). For knowledge/memory nodes, use graph_path with UUID ids instead."),
|
|
7611
|
+
id: external_exports.string().describe('Element identifier. For module type, use file path (e.g., "src/auth.rs"). For function/type/variable, use the element id.')
|
|
7612
|
+
}),
|
|
7613
|
+
max_depth: external_exports.number().optional()
|
|
7614
|
+
})
|
|
7437
7615
|
},
|
|
7438
7616
|
async (input) => {
|
|
7439
7617
|
const result = await client.graphImpact(input);
|
|
@@ -7614,7 +7792,9 @@ This indexes your entire project by reading files in batches.
|
|
|
7614
7792
|
Automatically detects code files and skips ignored directories like node_modules, target, dist, etc.`,
|
|
7615
7793
|
inputSchema: external_exports.object({
|
|
7616
7794
|
project_id: external_exports.string().uuid().optional().describe("Project to ingest files into (defaults to current session project)"),
|
|
7617
|
-
path: external_exports.string().describe("Local directory path to read files from")
|
|
7795
|
+
path: external_exports.string().describe("Local directory path to read files from"),
|
|
7796
|
+
write_to_disk: external_exports.boolean().optional().describe("When true, write files to disk under QA_FILE_WRITE_ROOT before indexing (for testing/QA)"),
|
|
7797
|
+
overwrite: external_exports.boolean().optional().describe("Allow overwriting existing files when write_to_disk is enabled")
|
|
7618
7798
|
})
|
|
7619
7799
|
},
|
|
7620
7800
|
async (input) => {
|
|
@@ -7622,13 +7802,27 @@ Automatically detects code files and skips ignored directories like node_modules
|
|
|
7622
7802
|
if (!projectId) {
|
|
7623
7803
|
return errorResult("Error: project_id is required. Please call session_init first or provide project_id explicitly.");
|
|
7624
7804
|
}
|
|
7805
|
+
const pathCheck = await validateReadableDirectory(input.path);
|
|
7806
|
+
if (!pathCheck.ok) {
|
|
7807
|
+
return errorResult(pathCheck.error);
|
|
7808
|
+
}
|
|
7809
|
+
const fileCheck = await countIndexableFiles(pathCheck.resolvedPath, { maxFiles: 1 });
|
|
7810
|
+
if (fileCheck.count === 0) {
|
|
7811
|
+
return errorResult(
|
|
7812
|
+
`Error: no indexable files found in directory: ${input.path}. The directory may be empty or contain only ignored files/directories. Supported file types include: .ts, .js, .py, .rs, .go, .java, .md, .json, etc.`
|
|
7813
|
+
);
|
|
7814
|
+
}
|
|
7815
|
+
const ingestOptions = {
|
|
7816
|
+
...input.write_to_disk !== void 0 && { write_to_disk: input.write_to_disk },
|
|
7817
|
+
...input.overwrite !== void 0 && { overwrite: input.overwrite }
|
|
7818
|
+
};
|
|
7625
7819
|
(async () => {
|
|
7626
7820
|
try {
|
|
7627
7821
|
let totalIndexed = 0;
|
|
7628
7822
|
let batchCount = 0;
|
|
7629
|
-
console.error(`[ContextStream] Starting background ingestion for project ${projectId} from ${
|
|
7630
|
-
for await (const batch of readAllFilesInBatches(
|
|
7631
|
-
const result = await client.ingestFiles(projectId, batch);
|
|
7823
|
+
console.error(`[ContextStream] Starting background ingestion for project ${projectId} from ${pathCheck.resolvedPath}`);
|
|
7824
|
+
for await (const batch of readAllFilesInBatches(pathCheck.resolvedPath, { batchSize: 50 })) {
|
|
7825
|
+
const result = await client.ingestFiles(projectId, batch, ingestOptions);
|
|
7632
7826
|
totalIndexed += result.data?.files_indexed ?? batch.length;
|
|
7633
7827
|
batchCount++;
|
|
7634
7828
|
}
|
|
@@ -7642,6 +7836,8 @@ Automatically detects code files and skips ignored directories like node_modules
|
|
|
7642
7836
|
message: "Ingestion running in background",
|
|
7643
7837
|
project_id: projectId,
|
|
7644
7838
|
path: input.path,
|
|
7839
|
+
...input.write_to_disk && { write_to_disk: input.write_to_disk },
|
|
7840
|
+
...input.overwrite && { overwrite: input.overwrite },
|
|
7645
7841
|
note: "Use 'projects_index_status' to monitor progress."
|
|
7646
7842
|
};
|
|
7647
7843
|
return {
|
|
@@ -7705,7 +7901,7 @@ Automatically detects code files and skips ignored directories like node_modules
|
|
|
7705
7901
|
"workspaces_content",
|
|
7706
7902
|
{
|
|
7707
7903
|
title: "Workspace content",
|
|
7708
|
-
description: "List content in a workspace",
|
|
7904
|
+
description: "List content in a workspace (paginated list: items, total, page, per_page, has_next, has_prev).",
|
|
7709
7905
|
inputSchema: external_exports.object({ workspace_id: external_exports.string().uuid().optional() })
|
|
7710
7906
|
},
|
|
7711
7907
|
async (input) => {
|
|
@@ -8051,7 +8247,7 @@ Optionally generates AI editor rules for automatic ContextStream usage.`,
|
|
|
8051
8247
|
const result = await client.associateWorkspace(input);
|
|
8052
8248
|
let rulesGenerated = [];
|
|
8053
8249
|
if (input.generate_editor_rules) {
|
|
8054
|
-
const
|
|
8250
|
+
const fs6 = await import("fs");
|
|
8055
8251
|
const path7 = await import("path");
|
|
8056
8252
|
for (const editor of getAvailableEditors()) {
|
|
8057
8253
|
const rule = generateRuleContent(editor, {
|
|
@@ -8063,14 +8259,14 @@ Optionally generates AI editor rules for automatic ContextStream usage.`,
|
|
|
8063
8259
|
try {
|
|
8064
8260
|
let existingContent = "";
|
|
8065
8261
|
try {
|
|
8066
|
-
existingContent =
|
|
8262
|
+
existingContent = fs6.readFileSync(filePath, "utf-8");
|
|
8067
8263
|
} catch {
|
|
8068
8264
|
}
|
|
8069
8265
|
if (!existingContent) {
|
|
8070
|
-
|
|
8266
|
+
fs6.writeFileSync(filePath, rule.content);
|
|
8071
8267
|
rulesGenerated.push(rule.filename);
|
|
8072
8268
|
} else if (!existingContent.includes("ContextStream Integration")) {
|
|
8073
|
-
|
|
8269
|
+
fs6.writeFileSync(filePath, existingContent + "\n\n" + rule.content);
|
|
8074
8270
|
rulesGenerated.push(rule.filename + " (appended)");
|
|
8075
8271
|
}
|
|
8076
8272
|
} catch {
|
|
@@ -8156,7 +8352,7 @@ Behavior:
|
|
|
8156
8352
|
});
|
|
8157
8353
|
let rulesGenerated = [];
|
|
8158
8354
|
if (input.generate_editor_rules) {
|
|
8159
|
-
const
|
|
8355
|
+
const fs6 = await import("fs");
|
|
8160
8356
|
const path7 = await import("path");
|
|
8161
8357
|
for (const editor of getAvailableEditors()) {
|
|
8162
8358
|
const rule = generateRuleContent(editor, {
|
|
@@ -8168,14 +8364,14 @@ Behavior:
|
|
|
8168
8364
|
try {
|
|
8169
8365
|
let existingContent = "";
|
|
8170
8366
|
try {
|
|
8171
|
-
existingContent =
|
|
8367
|
+
existingContent = fs6.readFileSync(filePath, "utf-8");
|
|
8172
8368
|
} catch {
|
|
8173
8369
|
}
|
|
8174
8370
|
if (!existingContent) {
|
|
8175
|
-
|
|
8371
|
+
fs6.writeFileSync(filePath, rule.content);
|
|
8176
8372
|
rulesGenerated.push(rule.filename);
|
|
8177
8373
|
} else if (!existingContent.includes("ContextStream Integration")) {
|
|
8178
|
-
|
|
8374
|
+
fs6.writeFileSync(filePath, existingContent + "\n\n" + rule.content);
|
|
8179
8375
|
rulesGenerated.push(rule.filename + " (appended)");
|
|
8180
8376
|
}
|
|
8181
8377
|
} catch {
|
|
@@ -8594,7 +8790,7 @@ Supported editors: ${getAvailableEditors().join(", ")}`,
|
|
|
8594
8790
|
})
|
|
8595
8791
|
},
|
|
8596
8792
|
async (input) => {
|
|
8597
|
-
const
|
|
8793
|
+
const fs6 = await import("fs");
|
|
8598
8794
|
const path7 = await import("path");
|
|
8599
8795
|
const editors = input.editors?.includes("all") || !input.editors ? getAvailableEditors() : input.editors.filter((e) => e !== "all");
|
|
8600
8796
|
const results = [];
|
|
@@ -8622,15 +8818,15 @@ Supported editors: ${getAvailableEditors().join(", ")}`,
|
|
|
8622
8818
|
try {
|
|
8623
8819
|
let existingContent = "";
|
|
8624
8820
|
try {
|
|
8625
|
-
existingContent =
|
|
8821
|
+
existingContent = fs6.readFileSync(filePath, "utf-8");
|
|
8626
8822
|
} catch {
|
|
8627
8823
|
}
|
|
8628
8824
|
if (existingContent && !existingContent.includes("ContextStream Integration")) {
|
|
8629
8825
|
const updatedContent = existingContent + "\n\n" + rule.content;
|
|
8630
|
-
|
|
8826
|
+
fs6.writeFileSync(filePath, updatedContent);
|
|
8631
8827
|
results.push({ editor, filename: rule.filename, status: "appended to existing" });
|
|
8632
8828
|
} else {
|
|
8633
|
-
|
|
8829
|
+
fs6.writeFileSync(filePath, rule.content);
|
|
8634
8830
|
results.push({ editor, filename: rule.filename, status: "created" });
|
|
8635
8831
|
}
|
|
8636
8832
|
} catch (err) {
|
|
@@ -9368,11 +9564,11 @@ var SessionManager = class {
|
|
|
9368
9564
|
}
|
|
9369
9565
|
if (this.ideRoots.length === 0) {
|
|
9370
9566
|
const cwd = process.cwd();
|
|
9371
|
-
const
|
|
9567
|
+
const fs6 = await import("fs");
|
|
9372
9568
|
const projectIndicators = [".git", "package.json", "Cargo.toml", "pyproject.toml", ".contextstream"];
|
|
9373
9569
|
const hasProjectIndicator = projectIndicators.some((f) => {
|
|
9374
9570
|
try {
|
|
9375
|
-
return
|
|
9571
|
+
return fs6.existsSync(`${cwd}/${f}`);
|
|
9376
9572
|
} catch {
|
|
9377
9573
|
return false;
|
|
9378
9574
|
}
|
|
@@ -9554,14 +9750,14 @@ import { homedir as homedir3 } from "os";
|
|
|
9554
9750
|
import { join as join6 } from "path";
|
|
9555
9751
|
|
|
9556
9752
|
// src/setup.ts
|
|
9557
|
-
import * as
|
|
9753
|
+
import * as fs5 from "node:fs/promises";
|
|
9558
9754
|
import * as path6 from "node:path";
|
|
9559
9755
|
import { homedir as homedir2 } from "node:os";
|
|
9560
9756
|
import { stdin, stdout } from "node:process";
|
|
9561
9757
|
import { createInterface } from "node:readline/promises";
|
|
9562
9758
|
|
|
9563
9759
|
// src/credentials.ts
|
|
9564
|
-
import * as
|
|
9760
|
+
import * as fs4 from "node:fs/promises";
|
|
9565
9761
|
import * as path5 from "node:path";
|
|
9566
9762
|
import { homedir } from "node:os";
|
|
9567
9763
|
function normalizeApiUrl(input) {
|
|
@@ -9576,7 +9772,7 @@ function isRecord(value) {
|
|
|
9576
9772
|
async function readSavedCredentials() {
|
|
9577
9773
|
const filePath = credentialsFilePath();
|
|
9578
9774
|
try {
|
|
9579
|
-
const raw = await
|
|
9775
|
+
const raw = await fs4.readFile(filePath, "utf8");
|
|
9580
9776
|
const parsed = JSON.parse(raw);
|
|
9581
9777
|
if (!isRecord(parsed)) return null;
|
|
9582
9778
|
const version = parsed.version;
|
|
@@ -9602,7 +9798,7 @@ async function readSavedCredentials() {
|
|
|
9602
9798
|
}
|
|
9603
9799
|
async function writeSavedCredentials(input) {
|
|
9604
9800
|
const filePath = credentialsFilePath();
|
|
9605
|
-
await
|
|
9801
|
+
await fs4.mkdir(path5.dirname(filePath), { recursive: true });
|
|
9606
9802
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
9607
9803
|
const existing = await readSavedCredentials();
|
|
9608
9804
|
const value = {
|
|
@@ -9614,9 +9810,9 @@ async function writeSavedCredentials(input) {
|
|
|
9614
9810
|
updated_at: now
|
|
9615
9811
|
};
|
|
9616
9812
|
const body = JSON.stringify(value, null, 2) + "\n";
|
|
9617
|
-
await
|
|
9813
|
+
await fs4.writeFile(filePath, body, { encoding: "utf8", mode: 384 });
|
|
9618
9814
|
try {
|
|
9619
|
-
await
|
|
9815
|
+
await fs4.chmod(filePath, 384);
|
|
9620
9816
|
} catch {
|
|
9621
9817
|
}
|
|
9622
9818
|
return { path: filePath, value };
|
|
@@ -9660,23 +9856,23 @@ function parseNumberList(input, max) {
|
|
|
9660
9856
|
}
|
|
9661
9857
|
async function fileExists(filePath) {
|
|
9662
9858
|
try {
|
|
9663
|
-
await
|
|
9859
|
+
await fs5.stat(filePath);
|
|
9664
9860
|
return true;
|
|
9665
9861
|
} catch {
|
|
9666
9862
|
return false;
|
|
9667
9863
|
}
|
|
9668
9864
|
}
|
|
9669
9865
|
async function upsertTextFile(filePath, content, marker) {
|
|
9670
|
-
await
|
|
9866
|
+
await fs5.mkdir(path6.dirname(filePath), { recursive: true });
|
|
9671
9867
|
const exists = await fileExists(filePath);
|
|
9672
9868
|
if (!exists) {
|
|
9673
|
-
await
|
|
9869
|
+
await fs5.writeFile(filePath, content, "utf8");
|
|
9674
9870
|
return "created";
|
|
9675
9871
|
}
|
|
9676
|
-
const existing = await
|
|
9872
|
+
const existing = await fs5.readFile(filePath, "utf8").catch(() => "");
|
|
9677
9873
|
if (existing.includes(marker)) return "skipped";
|
|
9678
9874
|
const joined = existing.trimEnd() + "\n\n" + content.trim() + "\n";
|
|
9679
|
-
await
|
|
9875
|
+
await fs5.writeFile(filePath, joined, "utf8");
|
|
9680
9876
|
return "appended";
|
|
9681
9877
|
}
|
|
9682
9878
|
function globalRulesPathForEditor(editor) {
|
|
@@ -9703,24 +9899,32 @@ function globalRulesPathForEditor(editor) {
|
|
|
9703
9899
|
}
|
|
9704
9900
|
}
|
|
9705
9901
|
function buildContextStreamMcpServer(params) {
|
|
9902
|
+
const env = {
|
|
9903
|
+
CONTEXTSTREAM_API_URL: params.apiUrl,
|
|
9904
|
+
CONTEXTSTREAM_API_KEY: params.apiKey
|
|
9905
|
+
};
|
|
9906
|
+
if (params.toolset) {
|
|
9907
|
+
env.CONTEXTSTREAM_TOOLSET = params.toolset;
|
|
9908
|
+
}
|
|
9706
9909
|
return {
|
|
9707
9910
|
command: "npx",
|
|
9708
9911
|
args: ["-y", "@contextstream/mcp-server"],
|
|
9709
|
-
env
|
|
9710
|
-
CONTEXTSTREAM_API_URL: params.apiUrl,
|
|
9711
|
-
CONTEXTSTREAM_API_KEY: params.apiKey
|
|
9712
|
-
}
|
|
9912
|
+
env
|
|
9713
9913
|
};
|
|
9714
9914
|
}
|
|
9715
9915
|
function buildContextStreamVsCodeServer(params) {
|
|
9916
|
+
const env = {
|
|
9917
|
+
CONTEXTSTREAM_API_URL: params.apiUrl,
|
|
9918
|
+
CONTEXTSTREAM_API_KEY: params.apiKey
|
|
9919
|
+
};
|
|
9920
|
+
if (params.toolset) {
|
|
9921
|
+
env.CONTEXTSTREAM_TOOLSET = params.toolset;
|
|
9922
|
+
}
|
|
9716
9923
|
return {
|
|
9717
9924
|
type: "stdio",
|
|
9718
9925
|
command: "npx",
|
|
9719
9926
|
args: ["-y", "@contextstream/mcp-server"],
|
|
9720
|
-
env
|
|
9721
|
-
CONTEXTSTREAM_API_URL: params.apiUrl,
|
|
9722
|
-
CONTEXTSTREAM_API_KEY: params.apiKey
|
|
9723
|
-
}
|
|
9927
|
+
env
|
|
9724
9928
|
};
|
|
9725
9929
|
}
|
|
9726
9930
|
function stripJsonComments(input) {
|
|
@@ -9742,11 +9946,11 @@ function tryParseJsonLike(raw) {
|
|
|
9742
9946
|
}
|
|
9743
9947
|
}
|
|
9744
9948
|
async function upsertJsonMcpConfig(filePath, server) {
|
|
9745
|
-
await
|
|
9949
|
+
await fs5.mkdir(path6.dirname(filePath), { recursive: true });
|
|
9746
9950
|
const exists = await fileExists(filePath);
|
|
9747
9951
|
let root = {};
|
|
9748
9952
|
if (exists) {
|
|
9749
|
-
const raw = await
|
|
9953
|
+
const raw = await fs5.readFile(filePath, "utf8").catch(() => "");
|
|
9750
9954
|
const parsed = tryParseJsonLike(raw);
|
|
9751
9955
|
if (!parsed.ok) throw new Error(`Invalid JSON in ${filePath}: ${parsed.error}`);
|
|
9752
9956
|
root = parsed.value;
|
|
@@ -9756,16 +9960,16 @@ async function upsertJsonMcpConfig(filePath, server) {
|
|
|
9756
9960
|
const before = JSON.stringify(root.mcpServers.contextstream ?? null);
|
|
9757
9961
|
root.mcpServers.contextstream = server;
|
|
9758
9962
|
const after = JSON.stringify(root.mcpServers.contextstream ?? null);
|
|
9759
|
-
await
|
|
9963
|
+
await fs5.writeFile(filePath, JSON.stringify(root, null, 2) + "\n", "utf8");
|
|
9760
9964
|
if (!exists) return "created";
|
|
9761
9965
|
return before === after ? "skipped" : "updated";
|
|
9762
9966
|
}
|
|
9763
9967
|
async function upsertJsonVsCodeMcpConfig(filePath, server) {
|
|
9764
|
-
await
|
|
9968
|
+
await fs5.mkdir(path6.dirname(filePath), { recursive: true });
|
|
9765
9969
|
const exists = await fileExists(filePath);
|
|
9766
9970
|
let root = {};
|
|
9767
9971
|
if (exists) {
|
|
9768
|
-
const raw = await
|
|
9972
|
+
const raw = await fs5.readFile(filePath, "utf8").catch(() => "");
|
|
9769
9973
|
const parsed = tryParseJsonLike(raw);
|
|
9770
9974
|
if (!parsed.ok) throw new Error(`Invalid JSON in ${filePath}: ${parsed.error}`);
|
|
9771
9975
|
root = parsed.value;
|
|
@@ -9775,7 +9979,7 @@ async function upsertJsonVsCodeMcpConfig(filePath, server) {
|
|
|
9775
9979
|
const before = JSON.stringify(root.servers.contextstream ?? null);
|
|
9776
9980
|
root.servers.contextstream = server;
|
|
9777
9981
|
const after = JSON.stringify(root.servers.contextstream ?? null);
|
|
9778
|
-
await
|
|
9982
|
+
await fs5.writeFile(filePath, JSON.stringify(root, null, 2) + "\n", "utf8");
|
|
9779
9983
|
if (!exists) return "created";
|
|
9780
9984
|
return before === after ? "skipped" : "updated";
|
|
9781
9985
|
}
|
|
@@ -9791,9 +9995,9 @@ function claudeDesktopConfigPath() {
|
|
|
9791
9995
|
return null;
|
|
9792
9996
|
}
|
|
9793
9997
|
async function upsertCodexTomlConfig(filePath, params) {
|
|
9794
|
-
await
|
|
9998
|
+
await fs5.mkdir(path6.dirname(filePath), { recursive: true });
|
|
9795
9999
|
const exists = await fileExists(filePath);
|
|
9796
|
-
const existing = exists ? await
|
|
10000
|
+
const existing = exists ? await fs5.readFile(filePath, "utf8").catch(() => "") : "";
|
|
9797
10001
|
const marker = "[mcp_servers.contextstream]";
|
|
9798
10002
|
const envMarker = "[mcp_servers.contextstream.env]";
|
|
9799
10003
|
const block = `
|
|
@@ -9808,15 +10012,15 @@ CONTEXTSTREAM_API_URL = "${params.apiUrl}"
|
|
|
9808
10012
|
CONTEXTSTREAM_API_KEY = "${params.apiKey}"
|
|
9809
10013
|
`;
|
|
9810
10014
|
if (!exists) {
|
|
9811
|
-
await
|
|
10015
|
+
await fs5.writeFile(filePath, block.trimStart(), "utf8");
|
|
9812
10016
|
return "created";
|
|
9813
10017
|
}
|
|
9814
10018
|
if (!existing.includes(marker)) {
|
|
9815
|
-
await
|
|
10019
|
+
await fs5.writeFile(filePath, existing.trimEnd() + block, "utf8");
|
|
9816
10020
|
return "updated";
|
|
9817
10021
|
}
|
|
9818
10022
|
if (!existing.includes(envMarker)) {
|
|
9819
|
-
await
|
|
10023
|
+
await fs5.writeFile(filePath, existing.trimEnd() + "\n\n" + envMarker + `
|
|
9820
10024
|
CONTEXTSTREAM_API_URL = "${params.apiUrl}"
|
|
9821
10025
|
CONTEXTSTREAM_API_KEY = "${params.apiKey}"
|
|
9822
10026
|
`, "utf8");
|
|
@@ -9857,11 +10061,11 @@ CONTEXTSTREAM_API_KEY = "${params.apiKey}"
|
|
|
9857
10061
|
}
|
|
9858
10062
|
const updated = out.join("\n");
|
|
9859
10063
|
if (updated === existing) return "skipped";
|
|
9860
|
-
await
|
|
10064
|
+
await fs5.writeFile(filePath, updated, "utf8");
|
|
9861
10065
|
return "updated";
|
|
9862
10066
|
}
|
|
9863
10067
|
async function discoverProjectsUnderFolder(parentFolder) {
|
|
9864
|
-
const entries = await
|
|
10068
|
+
const entries = await fs5.readdir(parentFolder, { withFileTypes: true });
|
|
9865
10069
|
const candidates = entries.filter((e) => e.isDirectory() && !e.name.startsWith(".")).map((e) => path6.join(parentFolder, e.name));
|
|
9866
10070
|
const projects = [];
|
|
9867
10071
|
for (const dir of candidates) {
|
|
@@ -9970,10 +10174,10 @@ Code: ${device.user_code}`);
|
|
|
9970
10174
|
if (poll && poll.status === "pending") {
|
|
9971
10175
|
const intervalSeconds = typeof poll.interval === "number" ? poll.interval : 5;
|
|
9972
10176
|
const waitMs = Math.max(1, intervalSeconds) * 1e3;
|
|
9973
|
-
await new Promise((
|
|
10177
|
+
await new Promise((resolve3) => setTimeout(resolve3, waitMs));
|
|
9974
10178
|
continue;
|
|
9975
10179
|
}
|
|
9976
|
-
await new Promise((
|
|
10180
|
+
await new Promise((resolve3) => setTimeout(resolve3, 1e3));
|
|
9977
10181
|
}
|
|
9978
10182
|
if (!accessToken) {
|
|
9979
10183
|
throw new Error("Browser login expired or was not approved in time. Please run setup again.");
|
|
@@ -10106,6 +10310,7 @@ Created API key: ${maskApiKey(apiKey)}
|
|
|
10106
10310
|
const mcpChoice = normalizeInput(await rl.question(`Choose [${hasCodex && !hasProjectMcpEditors ? "1/2" : "1/2/3/4"}] (default ${mcpChoiceDefault}): `)) || mcpChoiceDefault;
|
|
10107
10311
|
const mcpScope = mcpChoice === "2" && hasCodex && !hasProjectMcpEditors ? "skip" : mcpChoice === "4" ? "skip" : mcpChoice === "1" ? "global" : mcpChoice === "2" ? "project" : "both";
|
|
10108
10312
|
const mcpServer = buildContextStreamMcpServer({ apiUrl, apiKey });
|
|
10313
|
+
const mcpServerClaude = buildContextStreamMcpServer({ apiUrl, apiKey, toolset: "core" });
|
|
10109
10314
|
const vsCodeServer = buildContextStreamVsCodeServer({ apiUrl, apiKey });
|
|
10110
10315
|
const needsGlobalMcpConfig = mcpScope === "global" || mcpScope === "both" || mcpScope === "project" && hasCodex;
|
|
10111
10316
|
if (needsGlobalMcpConfig) {
|
|
@@ -10146,14 +10351,14 @@ Created API key: ${maskApiKey(apiKey)}
|
|
|
10146
10351
|
writeActions.push({ kind: "mcp-config", target: desktopPath, status: "dry-run" });
|
|
10147
10352
|
console.log(`- Claude Desktop: would update ${desktopPath}`);
|
|
10148
10353
|
} else {
|
|
10149
|
-
const status = await upsertJsonMcpConfig(desktopPath,
|
|
10354
|
+
const status = await upsertJsonMcpConfig(desktopPath, mcpServerClaude);
|
|
10150
10355
|
writeActions.push({ kind: "mcp-config", target: desktopPath, status });
|
|
10151
10356
|
console.log(`- Claude Desktop: ${status} ${desktopPath}`);
|
|
10152
10357
|
}
|
|
10153
10358
|
}
|
|
10154
10359
|
}
|
|
10155
10360
|
console.log("- Claude Code: global MCP config is best done via `claude mcp add --transport stdio ...` (see docs).");
|
|
10156
|
-
console.log(" macOS/Linux: claude mcp add --transport stdio contextstream --scope user --env CONTEXTSTREAM_API_URL=... --env CONTEXTSTREAM_API_KEY=... -- npx -y @contextstream/mcp-server");
|
|
10361
|
+
console.log(" macOS/Linux: claude mcp add --transport stdio contextstream --scope user --env CONTEXTSTREAM_API_URL=... --env CONTEXTSTREAM_API_KEY=... --env CONTEXTSTREAM_TOOLSET=core -- npx -y @contextstream/mcp-server");
|
|
10157
10362
|
console.log(" Windows (native): use `cmd /c npx -y @contextstream/mcp-server` after `--` if `npx` is not found.");
|
|
10158
10363
|
continue;
|
|
10159
10364
|
}
|
|
@@ -10294,7 +10499,7 @@ Applying to ${projects.length} project(s)...`);
|
|
|
10294
10499
|
if (dryRun) {
|
|
10295
10500
|
writeActions.push({ kind: "mcp-config", target: mcpPath, status: "dry-run" });
|
|
10296
10501
|
} else {
|
|
10297
|
-
const status = await upsertJsonMcpConfig(mcpPath,
|
|
10502
|
+
const status = await upsertJsonMcpConfig(mcpPath, mcpServerClaude);
|
|
10298
10503
|
writeActions.push({ kind: "mcp-config", target: mcpPath, status });
|
|
10299
10504
|
}
|
|
10300
10505
|
continue;
|
|
@@ -10408,6 +10613,8 @@ Environment variables:
|
|
|
10408
10613
|
CONTEXTSTREAM_JWT JWT for authentication (alternative to API key)
|
|
10409
10614
|
CONTEXTSTREAM_WORKSPACE_ID Optional default workspace ID
|
|
10410
10615
|
CONTEXTSTREAM_PROJECT_ID Optional default project ID
|
|
10616
|
+
CONTEXTSTREAM_TOOLSET Optional tool bundle (core|full). Defaults to core to reduce tool context size.
|
|
10617
|
+
CONTEXTSTREAM_TOOL_ALLOWLIST Optional comma-separated tool names to expose (overrides toolset)
|
|
10411
10618
|
CONTEXTSTREAM_PRO_TOOLS Optional comma-separated PRO tool names (default: AI tools)
|
|
10412
10619
|
CONTEXTSTREAM_UPGRADE_URL Optional upgrade URL shown for PRO tools on Free plan
|
|
10413
10620
|
|
package/package.json
CHANGED