mcp-proxy-adapter 2.1.2__py3-none-any.whl → 2.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {examples → mcp_proxy_adapter/examples}/openapi_server.py +35 -7
- {mcp_proxy_adapter-2.1.2.dist-info → mcp_proxy_adapter-2.1.4.dist-info}/METADATA +98 -2
- mcp_proxy_adapter-2.1.4.dist-info/RECORD +28 -0
- mcp_proxy_adapter-2.1.4.dist-info/top_level.txt +1 -0
- docs/README.md +0 -172
- docs/README_ru.md +0 -172
- docs/architecture.md +0 -251
- docs/architecture_ru.md +0 -343
- docs/command_development.md +0 -250
- docs/command_development_ru.md +0 -593
- docs/deployment.md +0 -251
- docs/deployment_ru.md +0 -1298
- docs/examples.md +0 -254
- docs/examples_ru.md +0 -401
- docs/mcp_proxy_adapter.md +0 -251
- docs/mcp_proxy_adapter_ru.md +0 -405
- docs/quickstart.md +0 -251
- docs/quickstart_ru.md +0 -397
- docs/testing.md +0 -255
- docs/testing_ru.md +0 -469
- docs/validation_ru.md +0 -287
- examples/mcp_proxy_config.json +0 -175
- mcp_proxy_adapter-2.1.2.dist-info/RECORD +0 -61
- mcp_proxy_adapter-2.1.2.dist-info/top_level.txt +0 -5
- scripts/code_analyzer/code_analyzer.py +0 -328
- scripts/code_analyzer/register_commands.py +0 -446
- scripts/publish.py +0 -85
- tests/conftest.py +0 -12
- tests/test_adapter.py +0 -529
- tests/test_adapter_coverage.py +0 -274
- tests/test_basic_dispatcher.py +0 -169
- tests/test_command_registry.py +0 -328
- tests/test_examples.py +0 -32
- tests/test_mcp_proxy_adapter.py +0 -568
- tests/test_mcp_proxy_adapter_basic.py +0 -262
- tests/test_part1.py +0 -348
- tests/test_part2.py +0 -524
- tests/test_schema.py +0 -358
- tests/test_simple_adapter.py +0 -251
- {examples → mcp_proxy_adapter/examples}/analyze_config.py +0 -0
- {examples → mcp_proxy_adapter/examples}/basic_integration.py +0 -0
- {examples → mcp_proxy_adapter/examples}/docstring_and_schema_example.py +0 -0
- {examples → mcp_proxy_adapter/examples}/extension_example.py +0 -0
- {examples → mcp_proxy_adapter/examples}/help_best_practices.py +0 -0
- {examples → mcp_proxy_adapter/examples}/help_usage.py +0 -0
- {examples → mcp_proxy_adapter/examples}/mcp_proxy_client.py +0 -0
- {examples → mcp_proxy_adapter/examples}/project_structure_example.py +0 -0
- {examples → mcp_proxy_adapter/examples}/testing_example.py +0 -0
- {mcp_proxy_adapter-2.1.2.dist-info → mcp_proxy_adapter-2.1.4.dist-info}/WHEEL +0 -0
- {mcp_proxy_adapter-2.1.2.dist-info → mcp_proxy_adapter-2.1.4.dist-info}/licenses/LICENSE +0 -0
@@ -1,328 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
"""
|
3
|
-
Code Analyzer Tool
|
4
|
-
|
5
|
-
This script performs three main functions:
|
6
|
-
1. Lists Python files with more than 350 lines of code
|
7
|
-
2. Creates a call graph showing which function calls which other function
|
8
|
-
3. Creates an index of all functions with their file paths
|
9
|
-
"""
|
10
|
-
|
11
|
-
import os
|
12
|
-
import ast
|
13
|
-
import sys
|
14
|
-
from pathlib import Path
|
15
|
-
import networkx as nx
|
16
|
-
import matplotlib.pyplot as plt
|
17
|
-
from collections import defaultdict
|
18
|
-
|
19
|
-
# Директории, которые следует исключить из анализа
|
20
|
-
EXCLUDED_DIRS = [
|
21
|
-
'.venv',
|
22
|
-
'site-packages',
|
23
|
-
'dist-packages',
|
24
|
-
'__pycache__',
|
25
|
-
'node_modules',
|
26
|
-
'.git'
|
27
|
-
]
|
28
|
-
|
29
|
-
def should_skip_dir(path):
|
30
|
-
"""Проверяет, нужно ли пропустить директорию"""
|
31
|
-
for excluded in EXCLUDED_DIRS:
|
32
|
-
if excluded in path:
|
33
|
-
return True
|
34
|
-
return False
|
35
|
-
|
36
|
-
class FunctionCallVisitor(ast.NodeVisitor):
|
37
|
-
"""AST visitor to collect function calls within each function definition"""
|
38
|
-
|
39
|
-
def __init__(self):
|
40
|
-
self.current_function = None
|
41
|
-
self.call_graph = defaultdict(set)
|
42
|
-
self.functions = {} # Map function names to their full names with class if applicable
|
43
|
-
self.current_class = None
|
44
|
-
|
45
|
-
def visit_ClassDef(self, node):
|
46
|
-
old_class = self.current_class
|
47
|
-
self.current_class = node.name
|
48
|
-
# Visit all children in the class
|
49
|
-
self.generic_visit(node)
|
50
|
-
self.current_class = old_class
|
51
|
-
|
52
|
-
def visit_FunctionDef(self, node):
|
53
|
-
# Save the current function
|
54
|
-
old_function = self.current_function
|
55
|
-
|
56
|
-
# Create fully qualified function name
|
57
|
-
if self.current_class:
|
58
|
-
self.current_function = f"{self.current_class}.{node.name}"
|
59
|
-
else:
|
60
|
-
self.current_function = node.name
|
61
|
-
|
62
|
-
# Map the simple name to the fully qualified name
|
63
|
-
self.functions[node.name] = self.current_function
|
64
|
-
|
65
|
-
# Visit all children in the function
|
66
|
-
self.generic_visit(node)
|
67
|
-
|
68
|
-
# Restore the parent function
|
69
|
-
self.current_function = old_function
|
70
|
-
|
71
|
-
def visit_AsyncFunctionDef(self, node):
|
72
|
-
# Handle async functions just like regular functions
|
73
|
-
self.visit_FunctionDef(node)
|
74
|
-
|
75
|
-
def visit_Call(self, node):
|
76
|
-
# Check if we're inside a function
|
77
|
-
if self.current_function:
|
78
|
-
func_name = None
|
79
|
-
|
80
|
-
# Get the name of the called function
|
81
|
-
if isinstance(node.func, ast.Name):
|
82
|
-
# Direct function call like "foo()"
|
83
|
-
func_name = node.func.id
|
84
|
-
elif isinstance(node.func, ast.Attribute):
|
85
|
-
# Method call like "obj.foo()"
|
86
|
-
func_name = node.func.attr
|
87
|
-
|
88
|
-
if func_name:
|
89
|
-
# Add to the call graph
|
90
|
-
self.call_graph[self.current_function].add(func_name)
|
91
|
-
|
92
|
-
# Continue visiting children
|
93
|
-
self.generic_visit(node)
|
94
|
-
|
95
|
-
def count_lines(file_path):
|
96
|
-
"""Count the number of non-empty, non-comment lines in a file"""
|
97
|
-
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
98
|
-
lines = f.readlines()
|
99
|
-
|
100
|
-
# Count non-empty, non-comment lines
|
101
|
-
count = 0
|
102
|
-
for line in lines:
|
103
|
-
line = line.strip()
|
104
|
-
if line and not line.startswith('#'):
|
105
|
-
count += 1
|
106
|
-
|
107
|
-
return count
|
108
|
-
|
109
|
-
def find_large_files(root_dir, min_lines=350):
|
110
|
-
"""Find Python files with more than min_lines lines of code"""
|
111
|
-
large_files = []
|
112
|
-
|
113
|
-
for root, dirs, files in os.walk(root_dir):
|
114
|
-
# Пропускаем исключенные директории
|
115
|
-
dirs[:] = [d for d in dirs if not should_skip_dir(os.path.join(root, d))]
|
116
|
-
|
117
|
-
for file in files:
|
118
|
-
if file.endswith('.py'):
|
119
|
-
file_path = os.path.join(root, file)
|
120
|
-
|
121
|
-
# Пропускаем файлы в исключенных директориях
|
122
|
-
if should_skip_dir(file_path):
|
123
|
-
continue
|
124
|
-
|
125
|
-
line_count = count_lines(file_path)
|
126
|
-
|
127
|
-
if line_count > min_lines:
|
128
|
-
relative_path = os.path.relpath(file_path, root_dir)
|
129
|
-
large_files.append((relative_path, line_count))
|
130
|
-
|
131
|
-
return large_files
|
132
|
-
|
133
|
-
def create_function_index(root_dir, output_file='function_index.txt'):
|
134
|
-
"""Create an index of all functions with their file paths"""
|
135
|
-
function_index = []
|
136
|
-
modules_analyzed = 0
|
137
|
-
|
138
|
-
# Process each Python file
|
139
|
-
for root, dirs, files in os.walk(root_dir):
|
140
|
-
# Пропускаем исключенные директории
|
141
|
-
dirs[:] = [d for d in dirs if not should_skip_dir(os.path.join(root, d))]
|
142
|
-
|
143
|
-
for file in files:
|
144
|
-
if file.endswith('.py'):
|
145
|
-
file_path = os.path.join(root, file)
|
146
|
-
|
147
|
-
# Пропускаем файлы в исключенных директориях
|
148
|
-
if should_skip_dir(file_path):
|
149
|
-
continue
|
150
|
-
|
151
|
-
try:
|
152
|
-
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
153
|
-
module_content = f.read()
|
154
|
-
|
155
|
-
# Parse the file
|
156
|
-
module = ast.parse(module_content, filename=file_path)
|
157
|
-
|
158
|
-
# Extract the module name from the file path
|
159
|
-
rel_path = os.path.relpath(file_path, root_dir)
|
160
|
-
module_name = os.path.splitext(rel_path)[0].replace(os.path.sep, '.')
|
161
|
-
|
162
|
-
# Find functions and classes
|
163
|
-
for node in ast.walk(module):
|
164
|
-
if isinstance(node, ast.FunctionDef) or isinstance(node, ast.AsyncFunctionDef):
|
165
|
-
function_name = node.name
|
166
|
-
function_index.append((function_name, rel_path))
|
167
|
-
elif isinstance(node, ast.ClassDef):
|
168
|
-
class_name = node.name
|
169
|
-
function_index.append((f"class {class_name}", rel_path))
|
170
|
-
# Find methods in the class
|
171
|
-
for class_node in node.body:
|
172
|
-
if isinstance(class_node, ast.FunctionDef) or isinstance(class_node, ast.AsyncFunctionDef):
|
173
|
-
method_name = class_node.name
|
174
|
-
function_index.append((f"{class_name}.{method_name}", rel_path))
|
175
|
-
|
176
|
-
modules_analyzed += 1
|
177
|
-
|
178
|
-
except (SyntaxError, UnicodeDecodeError) as e:
|
179
|
-
print(f"Error analyzing {file_path}: {e}")
|
180
|
-
|
181
|
-
# Save function index to file
|
182
|
-
with open(output_file, 'w', encoding='utf-8') as f:
|
183
|
-
f.write(f"# Function Index (analyzed {modules_analyzed} modules)\n\n")
|
184
|
-
|
185
|
-
for function_name, file_path in sorted(function_index, key=lambda x: x[0].lower()):
|
186
|
-
f.write(f"{function_name}: {file_path}\n")
|
187
|
-
|
188
|
-
print(f"Function index saved to {output_file}")
|
189
|
-
|
190
|
-
return function_index
|
191
|
-
|
192
|
-
def create_call_graph(root_dir, output_file='call_graph.txt'):
|
193
|
-
"""Create a graph of function calls and save it to a file"""
|
194
|
-
call_graph = defaultdict(set)
|
195
|
-
modules_analyzed = 0
|
196
|
-
|
197
|
-
# Process each Python file
|
198
|
-
for root, dirs, files in os.walk(root_dir):
|
199
|
-
# Пропускаем исключенные директории
|
200
|
-
dirs[:] = [d for d in dirs if not should_skip_dir(os.path.join(root, d))]
|
201
|
-
|
202
|
-
for file in files:
|
203
|
-
if file.endswith('.py'):
|
204
|
-
file_path = os.path.join(root, file)
|
205
|
-
|
206
|
-
# Пропускаем файлы в исключенных директориях
|
207
|
-
if should_skip_dir(file_path):
|
208
|
-
continue
|
209
|
-
|
210
|
-
try:
|
211
|
-
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
212
|
-
module_content = f.read()
|
213
|
-
|
214
|
-
# Parse the file
|
215
|
-
module = ast.parse(module_content, filename=file_path)
|
216
|
-
|
217
|
-
# Extract the module name from the file path
|
218
|
-
rel_path = os.path.relpath(file_path, root_dir)
|
219
|
-
module_name = os.path.splitext(rel_path)[0].replace(os.path.sep, '.')
|
220
|
-
|
221
|
-
# Find function calls
|
222
|
-
visitor = FunctionCallVisitor()
|
223
|
-
visitor.visit(module)
|
224
|
-
|
225
|
-
# Add to the global call graph with module name prefix
|
226
|
-
for caller, callees in visitor.call_graph.items():
|
227
|
-
full_caller = f"{module_name}::{caller}"
|
228
|
-
for callee in callees:
|
229
|
-
# Try to get the fully qualified name if available
|
230
|
-
if callee in visitor.functions:
|
231
|
-
full_callee = f"{module_name}::{visitor.functions[callee]}"
|
232
|
-
else:
|
233
|
-
full_callee = callee
|
234
|
-
|
235
|
-
call_graph[full_caller].add(full_callee)
|
236
|
-
|
237
|
-
modules_analyzed += 1
|
238
|
-
|
239
|
-
except (SyntaxError, UnicodeDecodeError) as e:
|
240
|
-
print(f"Error analyzing {file_path}: {e}")
|
241
|
-
|
242
|
-
# Save call graph to file
|
243
|
-
with open(output_file, 'w', encoding='utf-8') as f:
|
244
|
-
f.write(f"# Function Call Graph (analyzed {modules_analyzed} modules)\n\n")
|
245
|
-
|
246
|
-
for caller, callees in sorted(call_graph.items()):
|
247
|
-
f.write(f"{caller}:\n")
|
248
|
-
for callee in sorted(callees):
|
249
|
-
f.write(f" - {callee}\n")
|
250
|
-
f.write("\n")
|
251
|
-
|
252
|
-
print(f"Call graph saved to {output_file}")
|
253
|
-
|
254
|
-
return call_graph
|
255
|
-
|
256
|
-
def visualize_call_graph(call_graph, output_file='call_graph.png'):
|
257
|
-
"""Create a visual representation of the call graph using NetworkX"""
|
258
|
-
G = nx.DiGraph()
|
259
|
-
|
260
|
-
# Add nodes and edges
|
261
|
-
for caller, callees in call_graph.items():
|
262
|
-
G.add_node(caller)
|
263
|
-
for callee in callees:
|
264
|
-
G.add_node(callee)
|
265
|
-
G.add_edge(caller, callee)
|
266
|
-
|
267
|
-
# Check if the graph is too large to visualize effectively
|
268
|
-
if len(G.nodes) > 100:
|
269
|
-
print(f"Warning: Graph is very large ({len(G.nodes)} nodes), visualization may be cluttered")
|
270
|
-
print("Consider filtering the graph or using a specialized tool like pyan for better visualization")
|
271
|
-
|
272
|
-
try:
|
273
|
-
# Create the plot
|
274
|
-
plt.figure(figsize=(20, 20))
|
275
|
-
pos = nx.spring_layout(G, k=0.3, iterations=50)
|
276
|
-
nx.draw(G, pos, with_labels=True, node_size=100, node_color="skyblue",
|
277
|
-
font_size=8, font_weight="bold", arrows=True,
|
278
|
-
connectionstyle='arc3, rad=0.1', arrowsize=10)
|
279
|
-
|
280
|
-
# Save the figure
|
281
|
-
plt.tight_layout()
|
282
|
-
plt.savefig(output_file, dpi=300, bbox_inches='tight')
|
283
|
-
plt.close()
|
284
|
-
|
285
|
-
print(f"Call graph visualization saved to {output_file}")
|
286
|
-
except Exception as e:
|
287
|
-
print(f"Error creating visualization: {e}")
|
288
|
-
print("Text-based call graph is still available")
|
289
|
-
|
290
|
-
def main():
|
291
|
-
if len(sys.argv) > 1:
|
292
|
-
root_dir = sys.argv[1]
|
293
|
-
else:
|
294
|
-
root_dir = os.getcwd() # Default to current directory
|
295
|
-
|
296
|
-
print(f"Analyzing code in {root_dir} (excluding library files)")
|
297
|
-
|
298
|
-
# Find large files
|
299
|
-
print("\n=== Files with more than 350 lines ===")
|
300
|
-
large_files = find_large_files(root_dir)
|
301
|
-
|
302
|
-
if large_files:
|
303
|
-
with open('large_files.txt', 'w', encoding='utf-8') as f:
|
304
|
-
f.write("# Files with more than 350 lines of code\n\n")
|
305
|
-
for file_path, line_count in sorted(large_files, key=lambda x: x[1], reverse=True):
|
306
|
-
info = f"{file_path}: {line_count} lines"
|
307
|
-
print(info)
|
308
|
-
f.write(f"{info}\n")
|
309
|
-
print(f"\nList of large files saved to large_files.txt")
|
310
|
-
else:
|
311
|
-
print("No files with more than 350 lines found.")
|
312
|
-
|
313
|
-
# Create function index
|
314
|
-
print("\n=== Creating function index ===")
|
315
|
-
create_function_index(root_dir)
|
316
|
-
|
317
|
-
# Create call graph
|
318
|
-
print("\n=== Creating function call graph ===")
|
319
|
-
call_graph = create_call_graph(root_dir)
|
320
|
-
|
321
|
-
# Try to visualize the graph if matplotlib is available
|
322
|
-
try:
|
323
|
-
visualize_call_graph(call_graph)
|
324
|
-
except Exception as e:
|
325
|
-
print(f"Could not create visual graph: {e}")
|
326
|
-
|
327
|
-
if __name__ == "__main__":
|
328
|
-
main()
|