@zilliz/claude-context-mcp 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Zilliz
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,582 @@
1
+ # @zilliz/claude-context-mcp
2
+ ![](../../assets/code_context_logo_dark.png)
3
+ Model Context Protocol (MCP) integration for Code Context - A powerful MCP server that enables AI assistants and agents to index and search codebases using semantic search.
4
+
5
+ [![npm version](https://img.shields.io/npm/v/@zilliz/claude-context-mcp.svg)](https://www.npmjs.com/package/@zilliz/claude-context-mcp)
6
+ [![npm downloads](https://img.shields.io/npm/dm/@zilliz/claude-context-mcp.svg)](https://www.npmjs.com/package/@zilliz/claude-context-mcp)
7
+
8
+ > 📖 **New to Code Context?** Check out the [main project README](../../README.md) for an overview and setup instructions.
9
+
10
+
11
+ ## 🚀 Use Code Context as MCP in Claude Code and others
12
+
13
+ ![img](https://lh7-rt.googleusercontent.com/docsz/AD_4nXeUgHZrQT1xNXvPLa5DuPQLpnK5yhHk6yJvLwcq5ZBAaUWo69tcyqalcChWFF4sjQ1mjUSBZgKqLKtD1edKnCPq2af6D_jGRNvwyTEc2UcGnJbsFw1mu_uSmdZHxTLdLO6dFAa8kg?key=_L-CtW461S9w7NRqzdFOIg)
14
+
15
+ Model Context Protocol (MCP) allows you to integrate Code Context with your favorite AI coding assistants, e.g. Claude Code.
16
+
17
+
18
+ ## Quick Start
19
+
20
+ ### Prerequisites
21
+
22
+ Before using the MCP server, make sure you have:
23
+ - API key for your chosen embedding provider (OpenAI, VoyageAI, Gemini, or Ollama setup)
24
+ - Milvus vector database (local or cloud)
25
+
26
+ > 💡 **Setup Help:** See the [main project setup guide](../../README.md#-quick-start) for detailed installation instructions.
27
+
28
+ ### Prepare Environment Variables
29
+
30
+ #### Embedding Provider Configuration
31
+
32
+ Code Context MCP supports multiple embedding providers. Choose the one that best fits your needs:
33
+
34
+ > 💡 **Tip**: You can also use [global environment variables](../../docs/getting-started/environment-variables.md) for easier configuration management across different MCP clients.
35
+
36
+ ```bash
37
+ # Supported providers: OpenAI, VoyageAI, Gemini, Ollama
38
+ EMBEDDING_PROVIDER=OpenAI
39
+ ```
40
+
41
+ <details>
42
+ <summary><strong>1. OpenAI Configuration (Default)</strong></summary>
43
+
44
+ OpenAI provides high-quality embeddings with excellent performance for code understanding.
45
+
46
+ ```bash
47
+ # Required: Your OpenAI API key
48
+ OPENAI_API_KEY=sk-your-openai-api-key
49
+
50
+ # Optional: Specify embedding model (default: text-embedding-3-small)
51
+ EMBEDDING_MODEL=text-embedding-3-small
52
+
53
+ # Optional: Custom API base URL (for Azure OpenAI or other compatible services)
54
+ OPENAI_BASE_URL=https://api.openai.com/v1
55
+ ```
56
+
57
+ **Available Models:**
58
+ - `text-embedding-3-small` (1536 dimensions, faster, lower cost)
59
+ - `text-embedding-3-large` (3072 dimensions, higher quality)
60
+ - `text-embedding-ada-002` (1536 dimensions, legacy model)
61
+
62
+ **Getting API Key:**
63
+ 1. Visit [OpenAI Platform](https://platform.openai.com/api-keys)
64
+ 2. Sign in or create an account
65
+ 3. Generate a new API key
66
+ 4. Set up billing if needed
67
+
68
+ </details>
69
+
70
+ <details>
71
+ <summary><strong>2. VoyageAI Configuration</strong></summary>
72
+
73
+ VoyageAI offers specialized code embeddings optimized for programming languages.
74
+
75
+ ```bash
76
+ # Required: Your VoyageAI API key
77
+ VOYAGEAI_API_KEY=pa-your-voyageai-api-key
78
+
79
+ # Optional: Specify embedding model (default: voyage-code-3)
80
+ EMBEDDING_MODEL=voyage-code-3
81
+ ```
82
+
83
+ **Available Models:**
84
+ - `voyage-code-3` (1024 dimensions, optimized for code)
85
+ - `voyage-3` (1024 dimensions, general purpose)
86
+ - `voyage-3-lite` (512 dimensions, faster inference)
87
+
88
+ **Getting API Key:**
89
+ 1. Visit [VoyageAI Console](https://dash.voyageai.com/)
90
+ 2. Sign up for an account
91
+ 3. Navigate to API Keys section
92
+ 4. Create a new API key
93
+
94
+ </details>
95
+
96
+ <details>
97
+ <summary><strong>3. Gemini Configuration</strong></summary>
98
+
99
+ Google's Gemini provides competitive embeddings with good multilingual support.
100
+
101
+ ```bash
102
+ # Required: Your Gemini API key
103
+ GEMINI_API_KEY=your-gemini-api-key
104
+
105
+ # Optional: Specify embedding model (default: gemini-embedding-001)
106
+ EMBEDDING_MODEL=gemini-embedding-001
107
+ ```
108
+
109
+ **Available Models:**
110
+ - `gemini-embedding-001` (3072 dimensions, latest model)
111
+
112
+ **Getting API Key:**
113
+ 1. Visit [Google AI Studio](https://aistudio.google.com/)
114
+ 2. Sign in with your Google account
115
+ 3. Go to "Get API key" section
116
+ 4. Create a new API key
117
+
118
+ </details>
119
+
120
+ <details>
121
+ <summary><strong>4. Ollama Configuration (Local/Self-hosted)</strong></summary>
122
+
123
+ Ollama allows you to run embeddings locally without sending data to external services.
124
+
125
+ ```bash
126
+ # Required: Specify which Ollama model to use
127
+ EMBEDDING_MODEL=nomic-embed-text
128
+
129
+ # Optional: Specify Ollama host (default: http://127.0.0.1:11434)
130
+ OLLAMA_HOST=http://127.0.0.1:11434
131
+ ```
132
+
133
+ **Available Models:**
134
+ - `nomic-embed-text` (768 dimensions, recommended for code)
135
+ - `mxbai-embed-large` (1024 dimensions, higher quality)
136
+ - `all-minilm` (384 dimensions, lightweight)
137
+
138
+ **Setup Instructions:**
139
+ 1. Install Ollama from [ollama.ai](https://ollama.ai/)
140
+ 2. Pull the embedding model:
141
+ ```bash
142
+ ollama pull nomic-embed-text
143
+ ```
144
+ 3. Ensure Ollama is running:
145
+ ```bash
146
+ ollama serve
147
+ ```
148
+
149
+ </details>
150
+
151
+ #### Get a free vector database on Zilliz Cloud
152
+
153
+ Code Context needs a vector database. You can [sign up](https://cloud.zilliz.com/signup?utm_source=github&utm_medium=referral&utm_campaign=2507-codecontext-readme) on Zilliz Cloud to get an API key.
154
+
155
+ ![](../../assets/signup_and_get_apikey.png)
156
+
157
+ Copy your Personal Key to replace `your-zilliz-cloud-api-key` in the configuration examples.
158
+
159
+ ```bash
160
+ MILVUS_TOKEN=your-zilliz-cloud-api-key
161
+ ```
162
+
163
+
164
+ #### Embedding Batch Size
165
+ You can set the embedding batch size to optimize the performance of the MCP server, depending on your embedding model throughput. The default value is 100.
166
+ ```bash
167
+ EMBEDDING_BATCH_SIZE=512
168
+ ```
169
+
170
+ #### Custom File Processing (Optional)
171
+ You can configure custom file extensions and ignore patterns globally via environment variables:
172
+
173
+ ```bash
174
+ # Additional file extensions to include beyond defaults
175
+ CUSTOM_EXTENSIONS=.vue,.svelte,.astro,.twig
176
+
177
+ # Additional ignore patterns to exclude files/directories
178
+ CUSTOM_IGNORE_PATTERNS=temp/**,*.backup,private/**,uploads/**
179
+ ```
180
+
181
+ These settings work in combination with tool parameters - patterns from both sources will be merged together.
182
+
183
+ ## Usage with MCP Clients
184
+
185
+
186
+ <details>
187
+ <summary><strong>Qwen Code</strong></summary>
188
+
189
+ Create or edit the `~/.qwen/settings.json` file and add the following configuration:
190
+
191
+ ```json
192
+ {
193
+ "mcpServers": {
194
+ "claude-context": {
195
+ "command": "npx",
196
+ "args": ["@zilliz/claude-context-mcp@latest"],
197
+ "env": {
198
+ "OPENAI_API_KEY": "your-openai-api-key",
199
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
200
+ }
201
+ }
202
+ }
203
+ }
204
+ ```
205
+
206
+ </details>
207
+
208
+ <details>
209
+ <summary><strong>Cursor</strong></summary>
210
+
211
+ Go to: `Settings` -> `Cursor Settings` -> `MCP` -> `Add new global MCP server`
212
+
213
+ Pasting the following configuration into your Cursor `~/.cursor/mcp.json` file is the recommended approach. You may also install in a specific project by creating `.cursor/mcp.json` in your project folder. See [Cursor MCP docs](https://docs.cursor.com/context/model-context-protocol) for more info.
214
+
215
+ **OpenAI Configuration (Default):**
216
+ ```json
217
+ {
218
+ "mcpServers": {
219
+ "claude-context": {
220
+ "command": "npx",
221
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
222
+ "env": {
223
+ "EMBEDDING_PROVIDER": "OpenAI",
224
+ "OPENAI_API_KEY": "your-openai-api-key",
225
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
226
+ }
227
+ }
228
+ }
229
+ }
230
+ ```
231
+
232
+ **VoyageAI Configuration:**
233
+ ```json
234
+ {
235
+ "mcpServers": {
236
+ "claude-context": {
237
+ "command": "npx",
238
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
239
+ "env": {
240
+ "EMBEDDING_PROVIDER": "VoyageAI",
241
+ "VOYAGEAI_API_KEY": "your-voyageai-api-key",
242
+ "EMBEDDING_MODEL": "voyage-code-3",
243
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
244
+ }
245
+ }
246
+ }
247
+ }
248
+ ```
249
+
250
+ **Gemini Configuration:**
251
+ ```json
252
+ {
253
+ "mcpServers": {
254
+ "claude-context": {
255
+ "command": "npx",
256
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
257
+ "env": {
258
+ "EMBEDDING_PROVIDER": "Gemini",
259
+ "GEMINI_API_KEY": "your-gemini-api-key",
260
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
261
+ }
262
+ }
263
+ }
264
+ }
265
+ ```
266
+
267
+ **Ollama Configuration:**
268
+ ```json
269
+ {
270
+ "mcpServers": {
271
+ "claude-context": {
272
+ "command": "npx",
273
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
274
+ "env": {
275
+ "EMBEDDING_PROVIDER": "Ollama",
276
+ "EMBEDDING_MODEL": "nomic-embed-text",
277
+ "OLLAMA_HOST": "http://127.0.0.1:11434",
278
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
279
+ }
280
+ }
281
+ }
282
+ }
283
+ ```
284
+
285
+ </details>
286
+
287
+ <details>
288
+ <summary><strong>Claude Desktop</strong></summary>
289
+
290
+ Add to your Claude Desktop configuration:
291
+
292
+ ```json
293
+ {
294
+ "mcpServers": {
295
+ "claude-context": {
296
+ "command": "npx",
297
+ "args": ["@zilliz/claude-context-mcp@latest"],
298
+ "env": {
299
+ "OPENAI_API_KEY": "your-openai-api-key",
300
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
301
+ }
302
+ }
303
+ }
304
+ }
305
+ ```
306
+
307
+ </details>
308
+
309
+ <details>
310
+ <summary><strong>Claude Code</strong></summary>
311
+
312
+ Use the command line interface to add the CodeContext MCP server:
313
+
314
+ ```bash
315
+ # Add the CodeContext MCP server
316
+ claude mcp add claude-context -e OPENAI_API_KEY=your-openai-api-key -e MILVUS_TOKEN=your-zilliz-cloud-api-key -- npx @zilliz/claude-context-mcp@latest
317
+
318
+ ```
319
+
320
+ See the [Claude Code MCP documentation](https://docs.anthropic.com/en/docs/claude-code/mcp) for more details about MCP server management.
321
+
322
+ </details>
323
+
324
+ <details>
325
+ <summary><strong>Windsurf</strong></summary>
326
+
327
+ Windsurf supports MCP configuration through a JSON file. Add the following configuration to your Windsurf MCP settings:
328
+
329
+ ```json
330
+ {
331
+ "mcpServers": {
332
+ "claude-context": {
333
+ "command": "npx",
334
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
335
+ "env": {
336
+ "OPENAI_API_KEY": "your-openai-api-key",
337
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
338
+ }
339
+ }
340
+ }
341
+ }
342
+ ```
343
+
344
+ </details>
345
+
346
+ <details>
347
+ <summary><strong>VS Code</strong></summary>
348
+
349
+ The CodeContext MCP server can be used with VS Code through MCP-compatible extensions. Add the following configuration to your VS Code MCP settings:
350
+
351
+ ```json
352
+ {
353
+ "mcpServers": {
354
+ "claude-context": {
355
+ "command": "npx",
356
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"],
357
+ "env": {
358
+ "OPENAI_API_KEY": "your-openai-api-key",
359
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
360
+ }
361
+ }
362
+ }
363
+ }
364
+ ```
365
+
366
+ </details>
367
+
368
+ <details>
369
+ <summary><strong>Cherry Studio</strong></summary>
370
+
371
+ Cherry Studio allows for visual MCP server configuration through its settings interface. While it doesn't directly support manual JSON configuration, you can add a new server via the GUI:
372
+
373
+ 1. Navigate to **Settings → MCP Servers → Add Server**.
374
+ 2. Fill in the server details:
375
+ - **Name**: `claude-context`
376
+ - **Type**: `STDIO`
377
+ - **Command**: `npx`
378
+ - **Arguments**: `["@zilliz/claude-context-mcp@latest"]`
379
+ - **Environment Variables**:
380
+ - `OPENAI_API_KEY`: `your-openai-api-key`
381
+ - `MILVUS_TOKEN`: `your-zilliz-cloud-api-key`
382
+ 3. Save the configuration to activate the server.
383
+
384
+ </details>
385
+
386
+ <details>
387
+ <summary><strong>Cline</strong></summary>
388
+
389
+ Cline uses a JSON configuration file to manage MCP servers. To integrate the provided MCP server configuration:
390
+
391
+ 1. Open Cline and click on the **MCP Servers** icon in the top navigation bar.
392
+
393
+ 2. Select the **Installed** tab, then click **Advanced MCP Settings**.
394
+
395
+ 3. In the `cline_mcp_settings.json` file, add the following configuration:
396
+
397
+ ```json
398
+ {
399
+ "mcpServers": {
400
+ "claude-context": {
401
+ "command": "npx",
402
+ "args": ["@zilliz/claude-context-mcp@latest"],
403
+ "env": {
404
+ "OPENAI_API_KEY": "your-openai-api-key",
405
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
406
+ }
407
+ }
408
+ }
409
+ }
410
+ ```
411
+
412
+ 4. Save the file.
413
+
414
+ </details>
415
+
416
+ <details>
417
+ <summary><strong>Augment</strong></summary>
418
+
419
+ To configure Code Context MCP in Augment Code, you can use either the graphical interface or manual configuration.
420
+
421
+ #### **A. Using the Augment Code UI**
422
+
423
+ 1. Click the hamburger menu.
424
+
425
+ 2. Select **Settings**.
426
+
427
+ 3. Navigate to the **Tools** section.
428
+
429
+ 4. Click the **+ Add MCP** button.
430
+
431
+ 5. Enter the following command:
432
+
433
+ ```
434
+ npx @zilliz/claude-context-mcp@latest
435
+ ```
436
+
437
+ 6. Name the MCP: **Code Context**.
438
+
439
+ 7. Click the **Add** button.
440
+
441
+ ------
442
+
443
+ #### **B. Manual Configuration**
444
+
445
+ 1. Press Cmd/Ctrl Shift P or go to the hamburger menu in the Augment panel
446
+ 2. Select Edit Settings
447
+ 3. Under Advanced, click Edit in settings.json
448
+ 4. Add the server configuration to the `mcpServers` array in the `augment.advanced` object
449
+
450
+ ```json
451
+ "augment.advanced": {
452
+ "mcpServers": [
453
+ {
454
+ "name": "claude-context",
455
+ "command": "npx",
456
+ "args": ["-y", "@zilliz/claude-context-mcp@latest"]
457
+ }
458
+ ]
459
+ }
460
+ ```
461
+
462
+ </details>
463
+
464
+ <details>
465
+ <summary><strong>Gemini CLI</strong></summary>
466
+
467
+ Gemini CLI requires manual configuration through a JSON file:
468
+
469
+ 1. Create or edit the `~/.gemini/settings.json` file.
470
+
471
+ 2. Add the following configuration:
472
+
473
+ ```json
474
+ {
475
+ "mcpServers": {
476
+ "claude-context": {
477
+ "command": "npx",
478
+ "args": ["@zilliz/claude-context-mcp@latest"],
479
+ "env": {
480
+ "OPENAI_API_KEY": "your-openai-api-key",
481
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
482
+ }
483
+ }
484
+ }
485
+ }
486
+ ```
487
+
488
+ 3. Save the file and restart Gemini CLI to apply the changes.
489
+
490
+ </details>
491
+
492
+ <details>
493
+ <summary><strong>Roo Code</strong></summary>
494
+
495
+ Roo Code utilizes a JSON configuration file for MCP servers:
496
+
497
+ 1. Open Roo Code and navigate to **Settings → MCP Servers → Edit Global Config**.
498
+
499
+ 2. In the `mcp_settings.json` file, add the following configuration:
500
+
501
+ ```json
502
+ {
503
+ "mcpServers": {
504
+ "claude-context": {
505
+ "command": "npx",
506
+ "args": ["@zilliz/claude-context-mcp@latest"],
507
+ "env": {
508
+ "OPENAI_API_KEY": "your-openai-api-key",
509
+ "MILVUS_TOKEN": "your-zilliz-cloud-api-key"
510
+ }
511
+ }
512
+ }
513
+ }
514
+ ```
515
+
516
+ 3. Save the file to activate the server.
517
+
518
+ </details>
519
+
520
+ <details>
521
+ <summary><strong>Other MCP Clients</strong></summary>
522
+
523
+ The server uses stdio transport and follows the standard MCP protocol. It can be integrated with any MCP-compatible client by running:
524
+
525
+ ```bash
526
+ npx @zilliz/claude-context-mcp@latest
527
+ ```
528
+
529
+ </details>
530
+
531
+ ## Features
532
+
533
+ - 🔌 MCP Protocol Compliance: Full compatibility with MCP-enabled AI assistants and agents
534
+ - 🔍 Semantic Code Search: Natural language queries to find relevant code snippets
535
+ - 📁 Codebase Indexing: Index entire codebases for fast semantic search
536
+ - 🔄 Auto-Sync: Automatically detects and synchronizes file changes to keep index up-to-date
537
+ - 🧠 AI-Powered: Uses OpenAI embeddings and Milvus vector database
538
+ - ⚡ Real-time: Interactive indexing and searching with progress feedback
539
+ - 🛠️ Tool-based: Exposes three main tools via MCP protocol
540
+
541
+ ## Available Tools
542
+
543
+ ### 1. `index_codebase`
544
+ Index a codebase directory for semantic search.
545
+
546
+ **Parameters:**
547
+ - `path` (required): Absolute path to the codebase directory to index
548
+ - `force` (optional): Force re-indexing even if already indexed (default: false)
549
+ - `splitter` (optional): Code splitter to use - 'ast' for syntax-aware splitting with automatic fallback, 'langchain' for character-based splitting (default: "ast")
550
+ - `customExtensions` (optional): Additional file extensions to include beyond defaults (e.g., ['.vue', '.svelte', '.astro']). Extensions should include the dot prefix or will be automatically added (default: [])
551
+ - `ignorePatterns` (optional): Additional ignore patterns to exclude specific files/directories beyond defaults (e.g., ['static/**', '*.tmp', 'private/**']) (default: [])
552
+
553
+ ### 2. `search_code`
554
+ Search the indexed codebase using natural language queries.
555
+
556
+ **Parameters:**
557
+ - `path` (required): Absolute path to the codebase directory to search in
558
+ - `query` (required): Natural language query to search for in the codebase
559
+ - `limit` (optional): Maximum number of results to return (default: 10, max: 50)
560
+
561
+ ### 3. `clear_index`
562
+ Clear the search index for a specific codebase.
563
+
564
+ **Parameters:**
565
+ - `path` (required): Absolute path to the codebase directory to clear index for
566
+
567
+
568
+ ## Contributing
569
+
570
+ This package is part of the CodeContext monorepo. Please see:
571
+ - [Main Contributing Guide](../../CONTRIBUTING.md) - General contribution guidelines
572
+ - [MCP Package Contributing](CONTRIBUTING.md) - Specific development guide for this package
573
+
574
+ ## Related Projects
575
+
576
+ - **[@zilliz/claude-context-core](../core)** - Core indexing engine used by this MCP server
577
+ - **[VSCode Extension](../vscode-extension)** - Alternative VSCode integration
578
+ - [Model Context Protocol](https://modelcontextprotocol.io/) - Official MCP documentation
579
+
580
+ ## License
581
+
582
+ MIT - See [LICENSE](../../LICENSE) for details
@@ -0,0 +1,25 @@
1
+ export interface CodeContextMcpConfig {
2
+ name: string;
3
+ version: string;
4
+ embeddingProvider: 'OpenAI' | 'VoyageAI' | 'Gemini' | 'Ollama';
5
+ embeddingModel: string;
6
+ openaiApiKey?: string;
7
+ openaiBaseUrl?: string;
8
+ voyageaiApiKey?: string;
9
+ geminiApiKey?: string;
10
+ ollamaModel?: string;
11
+ ollamaHost?: string;
12
+ milvusAddress?: string;
13
+ milvusToken?: string;
14
+ }
15
+ export interface CodebaseSnapshot {
16
+ indexedCodebases: string[];
17
+ indexingCodebases: string[];
18
+ lastUpdated: string;
19
+ }
20
+ export declare function getDefaultModelForProvider(provider: string): string;
21
+ export declare function getEmbeddingModelForProvider(provider: string): string;
22
+ export declare function createMcpConfig(): CodeContextMcpConfig;
23
+ export declare function logConfigurationSummary(config: CodeContextMcpConfig): void;
24
+ export declare function showHelpMessage(): void;
25
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,oBAAoB;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAEhB,iBAAiB,EAAE,QAAQ,GAAG,UAAU,GAAG,QAAQ,GAAG,QAAQ,CAAC;IAC/D,cAAc,EAAE,MAAM,CAAC;IAEvB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,gBAAgB;IAC7B,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,EAAE,MAAM,EAAE,CAAC;IAC5B,WAAW,EAAE,MAAM,CAAC;CACvB;AAGD,wBAAgB,0BAA0B,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAanE;AAGD,wBAAgB,4BAA4B,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAcrE;AAED,wBAAgB,eAAe,IAAI,oBAAoB,CA+BtD;AAED,wBAAgB,uBAAuB,CAAC,MAAM,EAAE,oBAAoB,GAAG,IAAI,CA8B1E;AAED,wBAAgB,eAAe,IAAI,IAAI,CA+CtC"}