mcp-code-indexer 3.1.3__py3-none-any.whl → 3.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_code_indexer/__init__.py +8 -6
- mcp_code_indexer/ask_handler.py +105 -75
- mcp_code_indexer/claude_api_handler.py +125 -82
- mcp_code_indexer/cleanup_manager.py +107 -81
- mcp_code_indexer/database/connection_health.py +212 -161
- mcp_code_indexer/database/database.py +529 -415
- mcp_code_indexer/database/exceptions.py +167 -118
- mcp_code_indexer/database/models.py +54 -19
- mcp_code_indexer/database/retry_executor.py +139 -103
- mcp_code_indexer/deepask_handler.py +178 -140
- mcp_code_indexer/error_handler.py +88 -76
- mcp_code_indexer/file_scanner.py +163 -141
- mcp_code_indexer/git_hook_handler.py +352 -261
- mcp_code_indexer/logging_config.py +76 -94
- mcp_code_indexer/main.py +406 -320
- mcp_code_indexer/middleware/error_middleware.py +106 -71
- mcp_code_indexer/query_preprocessor.py +40 -40
- mcp_code_indexer/server/mcp_server.py +785 -469
- mcp_code_indexer/token_counter.py +54 -47
- {mcp_code_indexer-3.1.3.dist-info → mcp_code_indexer-3.1.5.dist-info}/METADATA +3 -3
- mcp_code_indexer-3.1.5.dist-info/RECORD +37 -0
- mcp_code_indexer-3.1.3.dist-info/RECORD +0 -37
- {mcp_code_indexer-3.1.3.dist-info → mcp_code_indexer-3.1.5.dist-info}/WHEEL +0 -0
- {mcp_code_indexer-3.1.3.dist-info → mcp_code_indexer-3.1.5.dist-info}/entry_points.txt +0 -0
- {mcp_code_indexer-3.1.3.dist-info → mcp_code_indexer-3.1.5.dist-info}/licenses/LICENSE +0 -0
- {mcp_code_indexer-3.1.3.dist-info → mcp_code_indexer-3.1.5.dist-info}/top_level.txt +0 -0
@@ -22,15 +22,15 @@ logger = logging.getLogger(__name__)
|
|
22
22
|
class TokenCounter:
|
23
23
|
"""
|
24
24
|
Handles token counting using tiktoken with offline cache support.
|
25
|
-
|
25
|
+
|
26
26
|
Automatically configures tiktoken to use bundled cache file for offline
|
27
27
|
operation and provides methods to count tokens in various data structures.
|
28
28
|
"""
|
29
|
-
|
29
|
+
|
30
30
|
def __init__(self, token_limit: int = 32000):
|
31
31
|
"""
|
32
32
|
Initialize token counter with specified limit.
|
33
|
-
|
33
|
+
|
34
34
|
Args:
|
35
35
|
token_limit: Maximum tokens before recommending search over overview
|
36
36
|
"""
|
@@ -38,36 +38,36 @@ class TokenCounter:
|
|
38
38
|
self._encoder: Optional[tiktoken.Encoding] = None
|
39
39
|
self._setup_offline_tiktoken()
|
40
40
|
self._init_encoder()
|
41
|
-
|
41
|
+
|
42
42
|
def _setup_offline_tiktoken(self) -> None:
|
43
43
|
"""Configure tiktoken to use bundled encoding file for offline operation."""
|
44
44
|
# Get path to bundled cache directory
|
45
45
|
base_dir = Path(__file__).parent.absolute()
|
46
46
|
cache_dir = base_dir / "tiktoken_cache"
|
47
|
-
|
47
|
+
|
48
48
|
# Ensure cache directory exists
|
49
49
|
if not cache_dir.exists():
|
50
50
|
raise FileNotFoundError(
|
51
51
|
f"Tiktoken cache directory not found at {cache_dir}. "
|
52
52
|
"Please ensure the tiktoken_cache directory exists in the src folder."
|
53
53
|
)
|
54
|
-
|
54
|
+
|
55
55
|
# Set tiktoken to use our bundled cache
|
56
56
|
os.environ["TIKTOKEN_CACHE_DIR"] = str(cache_dir)
|
57
|
-
|
57
|
+
|
58
58
|
# Verify the encoding file exists
|
59
59
|
cache_file = "9b5ad71b2ce5302211f9c61530b329a4922fc6a4"
|
60
60
|
cache_path = cache_dir / cache_file
|
61
|
-
|
61
|
+
|
62
62
|
if not cache_path.exists():
|
63
63
|
raise FileNotFoundError(
|
64
64
|
f"Tiktoken cache file not found at {cache_path}. "
|
65
65
|
"Please ensure the cl100k_base.tiktoken file is properly "
|
66
66
|
f"renamed to {cache_file} and placed in the tiktoken_cache directory."
|
67
67
|
)
|
68
|
-
|
68
|
+
|
69
69
|
logger.debug(f"Configured tiktoken to use cache at {cache_dir}")
|
70
|
-
|
70
|
+
|
71
71
|
def _init_encoder(self) -> None:
|
72
72
|
"""Initialize tiktoken encoder with fallback options."""
|
73
73
|
try:
|
@@ -85,27 +85,27 @@ class TokenCounter:
|
|
85
85
|
"Failed to initialize tiktoken encoder. "
|
86
86
|
"Check that the cache file is properly configured and accessible."
|
87
87
|
) from fallback_error
|
88
|
-
|
88
|
+
|
89
89
|
@property
|
90
90
|
def encoder(self) -> tiktoken.Encoding:
|
91
91
|
"""Get the tiktoken encoder instance."""
|
92
92
|
if self._encoder is None:
|
93
93
|
raise RuntimeError("Token encoder not properly initialized")
|
94
94
|
return self._encoder
|
95
|
-
|
95
|
+
|
96
96
|
def count_tokens(self, text: str) -> int:
|
97
97
|
"""
|
98
98
|
Count tokens in a text string.
|
99
|
-
|
99
|
+
|
100
100
|
Args:
|
101
101
|
text: Input text to count tokens for
|
102
|
-
|
102
|
+
|
103
103
|
Returns:
|
104
104
|
Number of tokens in the text
|
105
105
|
"""
|
106
106
|
if not text:
|
107
107
|
return 0
|
108
|
-
|
108
|
+
|
109
109
|
try:
|
110
110
|
tokens = self.encoder.encode(text)
|
111
111
|
return len(tokens)
|
@@ -113,98 +113,102 @@ class TokenCounter:
|
|
113
113
|
logger.error(f"Failed to count tokens for text: {e}")
|
114
114
|
# Fallback to rough approximation (4 chars per token)
|
115
115
|
return len(text) // 4
|
116
|
-
|
116
|
+
|
117
117
|
def count_file_description_tokens(self, file_desc: FileDescription) -> int:
|
118
118
|
"""
|
119
119
|
Count tokens for a file description in overview format.
|
120
|
-
|
120
|
+
|
121
121
|
Args:
|
122
122
|
file_desc: File description to count tokens for
|
123
|
-
|
123
|
+
|
124
124
|
Returns:
|
125
125
|
Number of tokens for formatted file description
|
126
126
|
"""
|
127
127
|
# Format matches what would be shown in codebase overview
|
128
128
|
formatted_content = f"{file_desc.file_path}\n{file_desc.description}\n"
|
129
129
|
return self.count_tokens(formatted_content)
|
130
|
-
|
130
|
+
|
131
131
|
def count_folder_structure_tokens(self, folder: FolderNode) -> int:
|
132
132
|
"""
|
133
133
|
Count tokens for a complete folder structure.
|
134
|
-
|
134
|
+
|
135
135
|
Args:
|
136
136
|
folder: Root folder node to count tokens for
|
137
|
-
|
137
|
+
|
138
138
|
Returns:
|
139
139
|
Total number of tokens for the folder structure
|
140
140
|
"""
|
141
141
|
total_tokens = 0
|
142
|
-
|
142
|
+
|
143
143
|
# Count tokens for folder name and path
|
144
144
|
folder_header = f"{folder.name}/\n"
|
145
145
|
total_tokens += self.count_tokens(folder_header)
|
146
|
-
|
146
|
+
|
147
147
|
# Count tokens for all files in this folder
|
148
148
|
for file_node in folder.files:
|
149
149
|
file_content = f"{file_node.path}\n{file_node.description}\n"
|
150
150
|
total_tokens += self.count_tokens(file_content)
|
151
|
-
|
151
|
+
|
152
152
|
# Recursively count tokens for subfolders
|
153
153
|
for subfolder in folder.folders:
|
154
154
|
total_tokens += self.count_folder_structure_tokens(subfolder)
|
155
|
-
|
155
|
+
|
156
156
|
return total_tokens
|
157
|
-
|
158
|
-
def calculate_codebase_tokens(
|
157
|
+
|
158
|
+
def calculate_codebase_tokens(
|
159
|
+
self, file_descriptions: List[FileDescription]
|
160
|
+
) -> int:
|
159
161
|
"""
|
160
162
|
Calculate total tokens for a list of file descriptions.
|
161
|
-
|
163
|
+
|
162
164
|
Args:
|
163
165
|
file_descriptions: List of file descriptions to count
|
164
|
-
|
166
|
+
|
165
167
|
Returns:
|
166
168
|
Total token count for all file descriptions
|
167
169
|
"""
|
168
170
|
total_tokens = 0
|
169
|
-
|
171
|
+
|
170
172
|
for file_desc in file_descriptions:
|
171
173
|
total_tokens += self.count_file_description_tokens(file_desc)
|
172
|
-
|
174
|
+
|
173
175
|
return total_tokens
|
174
|
-
|
176
|
+
|
175
177
|
def is_large_codebase(self, total_tokens: int) -> bool:
|
176
178
|
"""
|
177
179
|
Check if codebase exceeds configured token limit.
|
178
|
-
|
180
|
+
|
179
181
|
Args:
|
180
182
|
total_tokens: Total token count to check
|
181
|
-
|
183
|
+
|
182
184
|
Returns:
|
183
185
|
True if codebase exceeds token limit
|
184
186
|
"""
|
185
187
|
return total_tokens > self.token_limit
|
186
|
-
|
188
|
+
|
187
189
|
def get_recommendation(self, total_tokens: int) -> str:
|
188
190
|
"""
|
189
191
|
Get recommendation for codebase navigation approach.
|
190
|
-
|
192
|
+
|
191
193
|
Args:
|
192
194
|
total_tokens: Total token count
|
193
|
-
|
195
|
+
|
194
196
|
Returns:
|
195
197
|
"use_search" or "use_overview" based on token count
|
196
198
|
"""
|
197
199
|
return "use_search" if self.is_large_codebase(total_tokens) else "use_overview"
|
198
|
-
|
199
|
-
def generate_cache_key(
|
200
|
+
|
201
|
+
def generate_cache_key(
|
202
|
+
self, project_id: str, branch: str, content_hash: str
|
203
|
+
) -> str:
|
200
204
|
"""
|
201
205
|
Generate a cache key for token count caching.
|
202
|
-
|
206
|
+
|
203
207
|
Args:
|
204
208
|
project_id: Project identifier
|
205
209
|
branch: Git branch name
|
206
210
|
content_hash: Hash of file contents or descriptions
|
207
|
-
|
211
|
+
|
208
212
|
Returns:
|
209
213
|
Cache key string
|
210
214
|
"""
|
@@ -215,29 +219,32 @@ class TokenCounter:
|
|
215
219
|
def verify_tiktoken_setup() -> bool:
|
216
220
|
"""
|
217
221
|
Verify that tiktoken is properly configured for offline operation.
|
218
|
-
|
222
|
+
|
219
223
|
Returns:
|
220
224
|
True if tiktoken setup is working correctly
|
221
225
|
"""
|
222
226
|
try:
|
223
227
|
counter = TokenCounter()
|
224
|
-
|
228
|
+
|
225
229
|
# Test with a known string
|
226
230
|
test_string = "Hello, world!"
|
227
231
|
token_count = counter.count_tokens(test_string)
|
228
|
-
|
232
|
+
|
229
233
|
# cl100k_base should encode "Hello, world!" to 4 tokens
|
230
234
|
expected_count = 4
|
231
|
-
|
235
|
+
|
232
236
|
if token_count == expected_count:
|
233
237
|
logger.info("Tiktoken offline setup verified successfully")
|
234
238
|
return True
|
235
239
|
else:
|
236
240
|
logger.warning(
|
237
|
-
|
241
|
+
(
|
242
|
+
f"Tiktoken token count mismatch: expected {expected_count}, "
|
243
|
+
f"got {token_count}"
|
244
|
+
)
|
238
245
|
)
|
239
246
|
return False
|
240
|
-
|
247
|
+
|
241
248
|
except Exception as e:
|
242
249
|
logger.error(f"Tiktoken setup verification failed: {e}")
|
243
250
|
return False
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: mcp-code-indexer
|
3
|
-
Version: 3.1.
|
3
|
+
Version: 3.1.5
|
4
4
|
Summary: MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews.
|
5
5
|
Author: MCP Code Indexer Contributors
|
6
6
|
Maintainer: MCP Code Indexer Contributors
|
@@ -59,8 +59,8 @@ Dynamic: requires-python
|
|
59
59
|
|
60
60
|
# MCP Code Indexer 🚀
|
61
61
|
|
62
|
-
[](https://badge.fury.io/py/mcp-code-indexer)
|
63
|
+
[](https://pypi.org/project/mcp-code-indexer/)
|
64
64
|
[](https://opensource.org/licenses/MIT)
|
65
65
|
|
66
66
|
A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
|
@@ -0,0 +1,37 @@
|
|
1
|
+
mcp_code_indexer/__init__.py,sha256=likkTMBFIcnaRqrvESq4WDqEIx1edUL74zilaQeF-Zc,1660
|
2
|
+
mcp_code_indexer/__main__.py,sha256=4Edinoe0ug43hobuLYcjTmGp2YJnlFYN4_8iKvUBJ0Q,213
|
3
|
+
mcp_code_indexer/ask_handler.py,sha256=cy7gVFyXF0c10GZ3Aquktvgw1A8e4_NtBsbjlE1Bc84,9106
|
4
|
+
mcp_code_indexer/claude_api_handler.py,sha256=uZF6P64Cac9AHfO2Q3Whe4exhZyZmqZ1grWT1nHw-Wc,13616
|
5
|
+
mcp_code_indexer/cleanup_manager.py,sha256=qjIAMiJ-F1pfgCwVbNaNE0dfs8Wh9aaWh51DBMCWFuI,9491
|
6
|
+
mcp_code_indexer/deepask_handler.py,sha256=wpKMYnlsOGiaKLvuXIb62jeEb4xnYOmIcvvXjvbgdnc,18475
|
7
|
+
mcp_code_indexer/error_handler.py,sha256=XBjjEriq1diPTGKpHcaBh9fj88_qhuNMwPeLiTWxrds,11431
|
8
|
+
mcp_code_indexer/file_scanner.py,sha256=smY1Yfxfyqb_J5RQz5ETaSgE2_syC2SUUwzJxby3Bg8,11432
|
9
|
+
mcp_code_indexer/git_hook_handler.py,sha256=bEsmoGZJfBxJuVYYzXp-XhIdUmYgxdUptHWq05va9Fo,34453
|
10
|
+
mcp_code_indexer/logging_config.py,sha256=hexJWw7-6QQkH_2BwtKGO1CDOtQnP8F3Yss_yHKnzE4,9816
|
11
|
+
mcp_code_indexer/main.py,sha256=GjwUtfQCxNxsCNtYpEBfYgOC0G5Q0Bcci4keVtNY3Cc,31888
|
12
|
+
mcp_code_indexer/query_preprocessor.py,sha256=PLFR1T9mSn2Mkxw6-GB4GkxyfzjJ2ia3dgLPcziHfVA,5483
|
13
|
+
mcp_code_indexer/token_counter.py,sha256=e6WsyCEWMMSkMwLbcVtr5e8vEqh-kFqNmiJErCNdqHE,8220
|
14
|
+
mcp_code_indexer/data/stop_words_english.txt,sha256=7Zdd9ameVgA6tN_zuXROvHXD4hkWeELVywPhb7FJEkw,6343
|
15
|
+
mcp_code_indexer/database/__init__.py,sha256=aPq_aaRp0aSwOBIq9GkuMNjmLxA411zg2vhdrAuHm-w,38
|
16
|
+
mcp_code_indexer/database/connection_health.py,sha256=D0SqeYc1kunRkg1zObOqoaJ2qDw-_iq3IXRux-JQjgE,24688
|
17
|
+
mcp_code_indexer/database/database.py,sha256=BpsWoy5qXqLQpEZ42dt5efOGSrLhokQyvAc9ZK0afc4,46895
|
18
|
+
mcp_code_indexer/database/exceptions.py,sha256=Cs9_qc-6724DPJc25fPMvNfO3JQCHrOQ80y8Q55w_3Y,10389
|
19
|
+
mcp_code_indexer/database/models.py,sha256=ITF5dMSBCuaunQ3YeaVQOZ5Kb8y59I5Fg0EU7O9Ez3A,7017
|
20
|
+
mcp_code_indexer/database/retry_executor.py,sha256=wBIIbkU1bwQMrjM9AmDWNEQ-cw8IPNobfdeUOLhQVjQ,13528
|
21
|
+
mcp_code_indexer/middleware/__init__.py,sha256=p-mP0pMsfiU2yajCPvokCUxUEkh_lu4XJP1LyyMW2ug,220
|
22
|
+
mcp_code_indexer/middleware/error_middleware.py,sha256=YHd7sm4PdNPIMKD8Nub_N7WaOH2JtiqkHBbTOGyxTno,11685
|
23
|
+
mcp_code_indexer/migrations/001_initial.sql,sha256=hIXkCP4LA_4A9HJ1CHU0a1DD-a6EN6u-uJPMqW0c2Yo,4120
|
24
|
+
mcp_code_indexer/migrations/002_performance_indexes.sql,sha256=FlKbmcJyKAHTKmjxmpk8ABe6eMcQahz8RciRYcREY_E,2846
|
25
|
+
mcp_code_indexer/migrations/003_project_overviews.sql,sha256=pPzn7UmJ_Bda9mJ1nYTN1GeuYwdQHC7Fva6PvWaucUw,891
|
26
|
+
mcp_code_indexer/migrations/004_remove_branch_dependency.sql,sha256=whZvj2qfba1-Xq7Vg4IfpCpIrRKN21AdtG0gZbFSRi4,6466
|
27
|
+
mcp_code_indexer/migrations/005_remove_git_remotes.sql,sha256=vT84AaV1hyN4zq5W67hR14TgAwhW7_RNtBHrCoksxA4,1299
|
28
|
+
mcp_code_indexer/server/__init__.py,sha256=16xMcuriUOBlawRqWNBk6niwrvtv_JD5xvI36X1Vsmk,41
|
29
|
+
mcp_code_indexer/server/mcp_server.py,sha256=IIFXUNvOWlMW-lnpgWwMuzTikYi0PDuiQu---MRTpMM,65761
|
30
|
+
mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
|
31
|
+
mcp_code_indexer/tools/__init__.py,sha256=m01mxML2UdD7y5rih_XNhNSCMzQTz7WQ_T1TeOcYlnE,49
|
32
|
+
mcp_code_indexer-3.1.5.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
|
33
|
+
mcp_code_indexer-3.1.5.dist-info/METADATA,sha256=kYlk6qJ9bUna23EQSI2u3Fk9jbn3Jct7Suv6Z9oc834,19849
|
34
|
+
mcp_code_indexer-3.1.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
35
|
+
mcp_code_indexer-3.1.5.dist-info/entry_points.txt,sha256=8HqWOw1Is7jOP1bvIgaSwouvT9z_Boe-9hd4NzyJOhY,68
|
36
|
+
mcp_code_indexer-3.1.5.dist-info/top_level.txt,sha256=yKYCM-gMGt-cnupGfAhnZaoEsROLB6DQ1KFUuyKx4rw,17
|
37
|
+
mcp_code_indexer-3.1.5.dist-info/RECORD,,
|
@@ -1,37 +0,0 @@
|
|
1
|
-
mcp_code_indexer/__init__.py,sha256=GhY2NLQ6lH3n5mxqw0t8T1gmZGKhM6KvjhZH8xW5O-A,1686
|
2
|
-
mcp_code_indexer/__main__.py,sha256=4Edinoe0ug43hobuLYcjTmGp2YJnlFYN4_8iKvUBJ0Q,213
|
3
|
-
mcp_code_indexer/ask_handler.py,sha256=rFljJtqP_YL3E9H2Hgk04yURzHw_sqm6muB5RTlP_-o,8397
|
4
|
-
mcp_code_indexer/claude_api_handler.py,sha256=4lgp-KsDMOmjrln3QhdJuM5pHvaB3kUwHOUk9l5adi4,13628
|
5
|
-
mcp_code_indexer/cleanup_manager.py,sha256=1x2de8Mr9dL92q4ubEebsWSF_2n8Yxk549ZohYHNIkU,9358
|
6
|
-
mcp_code_indexer/deepask_handler.py,sha256=iAFA1pKfAnurHBprIyP1TaecPzZ5YhBs-oR8Eccxoe4,18323
|
7
|
-
mcp_code_indexer/error_handler.py,sha256=x6dHezVeKcD2ealNLBndt-3SiPiMfh9VOUNoqQSk3rI,11660
|
8
|
-
mcp_code_indexer/file_scanner.py,sha256=ctXeZMROgDThEtjzsANTK9TbK-fhTScMBd4iyuleBT4,11734
|
9
|
-
mcp_code_indexer/git_hook_handler.py,sha256=N0lnDcM7KNmOz2VwBf_fsy4jrPwOVTTYI38y1bDYiq4,34614
|
10
|
-
mcp_code_indexer/logging_config.py,sha256=R5R50xFYy-flgHHh5uVGKV7JIPYKohk2RYa1eEn8kYM,10212
|
11
|
-
mcp_code_indexer/main.py,sha256=NThiusK1ZPgxP_ZYlms4sxifcviupHa1-oH6ytbFVwQ,31122
|
12
|
-
mcp_code_indexer/query_preprocessor.py,sha256=uHYy8FO4FTs7MFKsXoueYIafWDKOIirRgdUzwh8upb4,5773
|
13
|
-
mcp_code_indexer/token_counter.py,sha256=WrifOkbF99nWWHlRlhCHAB2KN7qr83GOHl7apE-hJcE,8460
|
14
|
-
mcp_code_indexer/data/stop_words_english.txt,sha256=7Zdd9ameVgA6tN_zuXROvHXD4hkWeELVywPhb7FJEkw,6343
|
15
|
-
mcp_code_indexer/database/__init__.py,sha256=aPq_aaRp0aSwOBIq9GkuMNjmLxA411zg2vhdrAuHm-w,38
|
16
|
-
mcp_code_indexer/database/connection_health.py,sha256=s2r9L_KipH5NlemAUDnhBQO90Dn4b_0Ht9UDs7F6QPk,24432
|
17
|
-
mcp_code_indexer/database/database.py,sha256=1DxjTlSI-Pdzz5mLqHffS_lYjXamdu7u5OGu78Tp46k,46792
|
18
|
-
mcp_code_indexer/database/exceptions.py,sha256=AgpRA9Z5R-GoWYdQSPeSdYvAXDopFCQkLGN3jD7Ha4E,10215
|
19
|
-
mcp_code_indexer/database/models.py,sha256=t4HJ2HJfRzMWt0kHjfLEh8p_ecqdQIdej5LyQYUqpsI,6858
|
20
|
-
mcp_code_indexer/database/retry_executor.py,sha256=QUayjkCk8OsckVMYiJ_HBQ9NTUss-H8GQeUIUbbw4_U,13419
|
21
|
-
mcp_code_indexer/middleware/__init__.py,sha256=p-mP0pMsfiU2yajCPvokCUxUEkh_lu4XJP1LyyMW2ug,220
|
22
|
-
mcp_code_indexer/middleware/error_middleware.py,sha256=5agJTAkkPogfPGnja1V9JtG9RG-BiOALIJYctK3byJQ,11730
|
23
|
-
mcp_code_indexer/migrations/001_initial.sql,sha256=hIXkCP4LA_4A9HJ1CHU0a1DD-a6EN6u-uJPMqW0c2Yo,4120
|
24
|
-
mcp_code_indexer/migrations/002_performance_indexes.sql,sha256=FlKbmcJyKAHTKmjxmpk8ABe6eMcQahz8RciRYcREY_E,2846
|
25
|
-
mcp_code_indexer/migrations/003_project_overviews.sql,sha256=pPzn7UmJ_Bda9mJ1nYTN1GeuYwdQHC7Fva6PvWaucUw,891
|
26
|
-
mcp_code_indexer/migrations/004_remove_branch_dependency.sql,sha256=whZvj2qfba1-Xq7Vg4IfpCpIrRKN21AdtG0gZbFSRi4,6466
|
27
|
-
mcp_code_indexer/migrations/005_remove_git_remotes.sql,sha256=vT84AaV1hyN4zq5W67hR14TgAwhW7_RNtBHrCoksxA4,1299
|
28
|
-
mcp_code_indexer/server/__init__.py,sha256=16xMcuriUOBlawRqWNBk6niwrvtv_JD5xvI36X1Vsmk,41
|
29
|
-
mcp_code_indexer/server/mcp_server.py,sha256=TUB8huyUddYrA8HKQnL3JN1ff2-WjomCnrKiEx-AJh0,57584
|
30
|
-
mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
|
31
|
-
mcp_code_indexer/tools/__init__.py,sha256=m01mxML2UdD7y5rih_XNhNSCMzQTz7WQ_T1TeOcYlnE,49
|
32
|
-
mcp_code_indexer-3.1.3.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
|
33
|
-
mcp_code_indexer-3.1.3.dist-info/METADATA,sha256=L9IpeZLqo4vvEow-_gFErBcq--a45SKrTrjdTAHvzIA,19849
|
34
|
-
mcp_code_indexer-3.1.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
35
|
-
mcp_code_indexer-3.1.3.dist-info/entry_points.txt,sha256=8HqWOw1Is7jOP1bvIgaSwouvT9z_Boe-9hd4NzyJOhY,68
|
36
|
-
mcp_code_indexer-3.1.3.dist-info/top_level.txt,sha256=yKYCM-gMGt-cnupGfAhnZaoEsROLB6DQ1KFUuyKx4rw,17
|
37
|
-
mcp_code_indexer-3.1.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|