fishertools 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. fishertools/__init__.py +82 -0
  2. fishertools/config/__init__.py +24 -0
  3. fishertools/config/manager.py +247 -0
  4. fishertools/config/models.py +96 -0
  5. fishertools/config/parser.py +265 -0
  6. fishertools/decorators.py +93 -0
  7. fishertools/documentation/__init__.py +38 -0
  8. fishertools/documentation/api.py +242 -0
  9. fishertools/documentation/generator.py +502 -0
  10. fishertools/documentation/models.py +126 -0
  11. fishertools/documentation/visual.py +583 -0
  12. fishertools/errors/__init__.py +29 -0
  13. fishertools/errors/exceptions.py +191 -0
  14. fishertools/errors/explainer.py +303 -0
  15. fishertools/errors/formatters.py +386 -0
  16. fishertools/errors/models.py +228 -0
  17. fishertools/errors/patterns.py +119 -0
  18. fishertools/errors/recovery.py +467 -0
  19. fishertools/examples/__init__.py +22 -0
  20. fishertools/examples/models.py +118 -0
  21. fishertools/examples/repository.py +770 -0
  22. fishertools/helpers.py +116 -0
  23. fishertools/integration.py +451 -0
  24. fishertools/learn/__init__.py +18 -0
  25. fishertools/learn/examples.py +550 -0
  26. fishertools/learn/tips.py +281 -0
  27. fishertools/learning/__init__.py +32 -0
  28. fishertools/learning/core.py +349 -0
  29. fishertools/learning/models.py +112 -0
  30. fishertools/learning/progress.py +314 -0
  31. fishertools/learning/session.py +500 -0
  32. fishertools/learning/tutorial.py +626 -0
  33. fishertools/legacy/__init__.py +76 -0
  34. fishertools/legacy/deprecated.py +261 -0
  35. fishertools/legacy/deprecation.py +149 -0
  36. fishertools/safe/__init__.py +16 -0
  37. fishertools/safe/collections.py +242 -0
  38. fishertools/safe/files.py +240 -0
  39. fishertools/safe/strings.py +15 -0
  40. fishertools/utils.py +57 -0
  41. fishertools-0.2.1.dist-info/METADATA +256 -0
  42. fishertools-0.2.1.dist-info/RECORD +81 -0
  43. fishertools-0.2.1.dist-info/WHEEL +5 -0
  44. fishertools-0.2.1.dist-info/licenses/LICENSE +21 -0
  45. fishertools-0.2.1.dist-info/top_level.txt +2 -0
  46. tests/__init__.py +6 -0
  47. tests/conftest.py +25 -0
  48. tests/test_config/__init__.py +3 -0
  49. tests/test_config/test_basic_config.py +57 -0
  50. tests/test_config/test_config_error_handling.py +287 -0
  51. tests/test_config/test_config_properties.py +435 -0
  52. tests/test_documentation/__init__.py +3 -0
  53. tests/test_documentation/test_documentation_properties.py +253 -0
  54. tests/test_documentation/test_visual_documentation_properties.py +444 -0
  55. tests/test_errors/__init__.py +3 -0
  56. tests/test_errors/test_api.py +301 -0
  57. tests/test_errors/test_error_handling.py +354 -0
  58. tests/test_errors/test_explainer.py +173 -0
  59. tests/test_errors/test_formatters.py +338 -0
  60. tests/test_errors/test_models.py +248 -0
  61. tests/test_errors/test_patterns.py +270 -0
  62. tests/test_examples/__init__.py +3 -0
  63. tests/test_examples/test_example_repository_properties.py +204 -0
  64. tests/test_examples/test_specific_examples.py +303 -0
  65. tests/test_integration.py +298 -0
  66. tests/test_integration_enhancements.py +462 -0
  67. tests/test_learn/__init__.py +3 -0
  68. tests/test_learn/test_examples.py +221 -0
  69. tests/test_learn/test_tips.py +285 -0
  70. tests/test_learning/__init__.py +3 -0
  71. tests/test_learning/test_interactive_learning_properties.py +337 -0
  72. tests/test_learning/test_learning_system_properties.py +194 -0
  73. tests/test_learning/test_progress_tracking_properties.py +279 -0
  74. tests/test_legacy/__init__.py +3 -0
  75. tests/test_legacy/test_backward_compatibility.py +236 -0
  76. tests/test_legacy/test_deprecation_warnings.py +208 -0
  77. tests/test_safe/__init__.py +3 -0
  78. tests/test_safe/test_collections_properties.py +189 -0
  79. tests/test_safe/test_files.py +104 -0
  80. tests/test_structure.py +58 -0
  81. tests/test_structure_enhancements.py +115 -0
@@ -0,0 +1,265 @@
1
+ """
2
+ Configuration file parser supporting multiple formats.
3
+ """
4
+
5
+ import json
6
+ import os
7
+ from typing import Dict, Any, Optional
8
+ from dataclasses import asdict
9
+ from .models import LearningConfig, ValidationResult, ConfigError, ErrorSeverity, ConfigFormat
10
+
11
+ try:
12
+ import yaml
13
+ YAML_AVAILABLE = True
14
+ except ImportError:
15
+ YAML_AVAILABLE = False
16
+
17
+
18
+ class ConfigurationParser:
19
+ """
20
+ Parses configuration files in various formats with validation.
21
+
22
+ Supports JSON, YAML, and TOML formats with comprehensive
23
+ error reporting and validation.
24
+ """
25
+
26
+ def __init__(self):
27
+ """Initialize the configuration parser."""
28
+ pass
29
+
30
+ def parse_file(self, config_path: str) -> Dict[str, Any]:
31
+ """
32
+ Parse a configuration file based on its extension.
33
+
34
+ Args:
35
+ config_path: Path to the configuration file
36
+
37
+ Returns:
38
+ Dict[str, Any]: Parsed configuration data
39
+
40
+ Raises:
41
+ ValueError: If file format is unsupported
42
+ FileNotFoundError: If file doesn't exist
43
+ """
44
+ if not os.path.exists(config_path):
45
+ raise FileNotFoundError(f"Configuration file not found: {config_path}")
46
+
47
+ format_type = self.detect_format(config_path)
48
+
49
+ with open(config_path, 'r', encoding='utf-8') as f:
50
+ content = f.read()
51
+
52
+ if format_type == ConfigFormat.JSON:
53
+ return self.parse_json(content)
54
+ elif format_type == ConfigFormat.YAML:
55
+ return self.parse_yaml(content)
56
+ else:
57
+ raise ValueError(f"Unsupported configuration format: {format_type}")
58
+
59
+ def parse_json(self, content: str) -> Dict[str, Any]:
60
+ """
61
+ Parse JSON configuration content.
62
+
63
+ Args:
64
+ content: JSON content to parse
65
+
66
+ Returns:
67
+ Dict[str, Any]: Parsed configuration data
68
+
69
+ Raises:
70
+ ValueError: If JSON is invalid
71
+ """
72
+ try:
73
+ return json.loads(content)
74
+ except json.JSONDecodeError as e:
75
+ raise ValueError(f"Invalid JSON configuration: {e}")
76
+
77
+ def parse_yaml(self, content: str) -> Dict[str, Any]:
78
+ """
79
+ Parse YAML configuration content.
80
+
81
+ Args:
82
+ content: YAML content to parse
83
+
84
+ Returns:
85
+ Dict[str, Any]: Parsed configuration data
86
+
87
+ Raises:
88
+ ValueError: If YAML is invalid
89
+ """
90
+ if not YAML_AVAILABLE:
91
+ raise ValueError("YAML support not available. Install PyYAML to use YAML configurations.")
92
+
93
+ try:
94
+ return yaml.safe_load(content) or {}
95
+ except yaml.YAMLError as e:
96
+ raise ValueError(f"Invalid YAML configuration: {e}")
97
+
98
+ def format_to_json(self, config: LearningConfig) -> str:
99
+ """
100
+ Format configuration as JSON string.
101
+
102
+ Args:
103
+ config: Configuration to format
104
+
105
+ Returns:
106
+ str: JSON formatted configuration
107
+ """
108
+ config_dict = asdict(config)
109
+ return json.dumps(config_dict, indent=2, ensure_ascii=False)
110
+
111
+ def format_to_yaml(self, config: LearningConfig) -> str:
112
+ """
113
+ Format configuration as YAML string.
114
+
115
+ Args:
116
+ config: Configuration to format
117
+
118
+ Returns:
119
+ str: YAML formatted configuration
120
+ """
121
+ if not YAML_AVAILABLE:
122
+ raise ValueError("YAML support not available. Install PyYAML to use YAML configurations.")
123
+
124
+ config_dict = asdict(config)
125
+ return yaml.dump(config_dict, default_flow_style=False, allow_unicode=True, indent=2)
126
+
127
+ def validate_structure(self, config_data: Dict[str, Any]) -> ValidationResult:
128
+ """
129
+ Validate configuration structure and types.
130
+
131
+ Args:
132
+ config_data: Configuration data to validate
133
+
134
+ Returns:
135
+ ValidationResult: Validation result with errors/warnings
136
+ """
137
+ errors = []
138
+ warnings = []
139
+
140
+ # Define expected fields and their types
141
+ expected_fields = {
142
+ 'default_level': str,
143
+ 'explanation_verbosity': str,
144
+ 'visual_aids_enabled': bool,
145
+ 'diagram_style': str,
146
+ 'color_scheme': str,
147
+ 'progress_tracking_enabled': bool,
148
+ 'save_progress_locally': bool,
149
+ 'suggested_topics_count': int,
150
+ 'max_examples_per_topic': int,
151
+ 'exercise_difficulty_progression': list,
152
+ 'readthedocs_project': (str, type(None)),
153
+ 'sphinx_theme': str,
154
+ 'enable_interactive_sessions': bool,
155
+ 'session_timeout_minutes': int,
156
+ 'max_hint_count': int
157
+ }
158
+
159
+ # Valid values for enum-like fields
160
+ valid_values = {
161
+ 'default_level': ['beginner', 'intermediate', 'advanced'],
162
+ 'explanation_verbosity': ['brief', 'detailed', 'comprehensive']
163
+ }
164
+
165
+ # Check for missing required fields
166
+ required_fields = ['default_level', 'explanation_verbosity']
167
+ for field in required_fields:
168
+ if field not in config_data:
169
+ errors.append(ConfigError(
170
+ message=f"Required field '{field}' is missing",
171
+ field_path=field,
172
+ severity=ErrorSeverity.ERROR,
173
+ suggested_fix=f"Add '{field}' field with a valid value"
174
+ ))
175
+
176
+ # Check field types and values
177
+ for field, expected_type in expected_fields.items():
178
+ if field in config_data:
179
+ value = config_data[field]
180
+
181
+ # Handle nullable fields
182
+ if isinstance(expected_type, tuple):
183
+ if value is not None and not isinstance(value, expected_type[0]):
184
+ errors.append(ConfigError(
185
+ message=f"Field '{field}' has invalid type. Expected {expected_type[0].__name__} or None, got {type(value).__name__}",
186
+ field_path=field,
187
+ severity=ErrorSeverity.ERROR,
188
+ suggested_fix=f"Change '{field}' to a {expected_type[0].__name__} value or null"
189
+ ))
190
+ else:
191
+ if not isinstance(value, expected_type):
192
+ errors.append(ConfigError(
193
+ message=f"Field '{field}' has invalid type. Expected {expected_type.__name__}, got {type(value).__name__}",
194
+ field_path=field,
195
+ severity=ErrorSeverity.ERROR,
196
+ suggested_fix=f"Change '{field}' to a {expected_type.__name__} value"
197
+ ))
198
+
199
+ # Check valid values for enum-like fields
200
+ if field in valid_values and value not in valid_values[field]:
201
+ errors.append(ConfigError(
202
+ message=f"Field '{field}' has invalid value '{value}'. Valid values: {valid_values[field]}",
203
+ field_path=field,
204
+ severity=ErrorSeverity.ERROR,
205
+ suggested_fix=f"Set '{field}' to one of: {', '.join(valid_values[field])}"
206
+ ))
207
+
208
+ # Check for unknown fields (warnings)
209
+ for field in config_data:
210
+ if field not in expected_fields:
211
+ warnings.append(ConfigError(
212
+ message=f"Unknown field '{field}' will be ignored",
213
+ field_path=field,
214
+ severity=ErrorSeverity.WARNING,
215
+ suggested_fix=f"Remove '{field}' field or check for typos"
216
+ ))
217
+
218
+ # Validate numeric ranges
219
+ if 'suggested_topics_count' in config_data:
220
+ value = config_data['suggested_topics_count']
221
+ if isinstance(value, int) and (value < 1 or value > 10):
222
+ warnings.append(ConfigError(
223
+ message=f"Field 'suggested_topics_count' value {value} is outside recommended range (1-10)",
224
+ field_path='suggested_topics_count',
225
+ severity=ErrorSeverity.WARNING,
226
+ suggested_fix="Set 'suggested_topics_count' to a value between 1 and 10"
227
+ ))
228
+
229
+ if 'max_examples_per_topic' in config_data:
230
+ value = config_data['max_examples_per_topic']
231
+ if isinstance(value, int) and (value < 1 or value > 20):
232
+ warnings.append(ConfigError(
233
+ message=f"Field 'max_examples_per_topic' value {value} is outside recommended range (1-20)",
234
+ field_path='max_examples_per_topic',
235
+ severity=ErrorSeverity.WARNING,
236
+ suggested_fix="Set 'max_examples_per_topic' to a value between 1 and 20"
237
+ ))
238
+
239
+ return ValidationResult(
240
+ is_valid=len(errors) == 0,
241
+ errors=errors,
242
+ warnings=warnings
243
+ )
244
+
245
+ def detect_format(self, file_path: str) -> ConfigFormat:
246
+ """
247
+ Detect configuration file format from extension.
248
+
249
+ Args:
250
+ file_path: Path to the configuration file
251
+
252
+ Returns:
253
+ ConfigFormat: Detected file format
254
+ """
255
+ _, ext = os.path.splitext(file_path.lower())
256
+
257
+ if ext == '.json':
258
+ return ConfigFormat.JSON
259
+ elif ext in ['.yaml', '.yml']:
260
+ return ConfigFormat.YAML
261
+ elif ext == '.toml':
262
+ return ConfigFormat.TOML
263
+ else:
264
+ # Default to JSON if extension is unknown
265
+ return ConfigFormat.JSON
@@ -0,0 +1,93 @@
1
+ """
2
+ Полезные декораторы для отладки, профилирования и других задач
3
+ """
4
+
5
+ import time
6
+ import functools
7
+ from typing import Any, Callable
8
+
9
+
10
+ def timer(func: Callable) -> Callable:
11
+ """Декоратор для измерения времени выполнения функции"""
12
+ @functools.wraps(func)
13
+ def wrapper(*args, **kwargs):
14
+ start_time = time.time()
15
+ result = func(*args, **kwargs)
16
+ end_time = time.time()
17
+ print(f"{func.__name__} выполнилась за {end_time - start_time:.4f} секунд")
18
+ return result
19
+ return wrapper
20
+
21
+
22
+ def debug(func: Callable) -> Callable:
23
+ """Декоратор для отладки - выводит аргументы и результат функции"""
24
+ @functools.wraps(func)
25
+ def wrapper(*args, **kwargs):
26
+ print(f"Вызов {func.__name__} с аргументами: args={args}, kwargs={kwargs}")
27
+ result = func(*args, **kwargs)
28
+ print(f"{func.__name__} вернула: {result}")
29
+ return result
30
+ return wrapper
31
+
32
+
33
+ def retry(max_attempts: int = 3, delay: float = 1.0):
34
+ """Декоратор для повторных попыток выполнения функции при ошибке"""
35
+ def decorator(func: Callable) -> Callable:
36
+ @functools.wraps(func)
37
+ def wrapper(*args, **kwargs):
38
+ for attempt in range(max_attempts):
39
+ try:
40
+ return func(*args, **kwargs)
41
+ except Exception as e:
42
+ if attempt == max_attempts - 1:
43
+ raise e
44
+ print(f"Попытка {attempt + 1} не удалась: {e}. Повтор через {delay} сек...")
45
+ time.sleep(delay)
46
+ return wrapper
47
+ return decorator
48
+
49
+
50
+ def cache_result(func: Callable) -> Callable:
51
+ """Простой декоратор для кеширования результатов функции"""
52
+ cache = {}
53
+
54
+ @functools.wraps(func)
55
+ def wrapper(*args, **kwargs):
56
+ # Создаем ключ из аргументов
57
+ key = str(args) + str(sorted(kwargs.items()))
58
+
59
+ if key in cache:
60
+ print(f"Результат {func.__name__} взят из кеша")
61
+ return cache[key]
62
+
63
+ result = func(*args, **kwargs)
64
+ cache[key] = result
65
+ return result
66
+
67
+ return wrapper
68
+
69
+
70
+ def validate_types(**expected_types):
71
+ """Декоратор для проверки типов аргументов функции"""
72
+ def decorator(func: Callable) -> Callable:
73
+ @functools.wraps(func)
74
+ def wrapper(*args, **kwargs):
75
+ # Получаем имена параметров функции
76
+ import inspect
77
+ sig = inspect.signature(func)
78
+ bound_args = sig.bind(*args, **kwargs)
79
+ bound_args.apply_defaults()
80
+
81
+ # Проверяем типы
82
+ for param_name, expected_type in expected_types.items():
83
+ if param_name in bound_args.arguments:
84
+ value = bound_args.arguments[param_name]
85
+ if not isinstance(value, expected_type):
86
+ raise TypeError(
87
+ f"Параметр '{param_name}' должен быть типа {expected_type.__name__}, "
88
+ f"получен {type(value).__name__}"
89
+ )
90
+
91
+ return func(*args, **kwargs)
92
+ return wrapper
93
+ return decorator
@@ -0,0 +1,38 @@
1
+ """
2
+ Documentation Generation Module
3
+
4
+ Provides automatic API documentation generation with Sphinx integration
5
+ and ReadTheDocs publishing capabilities.
6
+ """
7
+
8
+ from .generator import DocumentationGenerator
9
+ from .visual import VisualDocumentation
10
+ from .api import APIGenerator
11
+ from .models import (
12
+ APIInfo,
13
+ FunctionInfo,
14
+ SphinxDocuments,
15
+ NavigationTree,
16
+ ExampleCode,
17
+ PublishResult,
18
+ MermaidDiagram,
19
+ FlowDiagram,
20
+ Flowchart,
21
+ StructureDiagram
22
+ )
23
+
24
+ __all__ = [
25
+ "DocumentationGenerator",
26
+ "VisualDocumentation",
27
+ "APIGenerator",
28
+ "APIInfo",
29
+ "FunctionInfo",
30
+ "SphinxDocuments",
31
+ "NavigationTree",
32
+ "ExampleCode",
33
+ "PublishResult",
34
+ "MermaidDiagram",
35
+ "FlowDiagram",
36
+ "Flowchart",
37
+ "StructureDiagram"
38
+ ]
@@ -0,0 +1,242 @@
1
+ """
2
+ API documentation generator with Sphinx AutoAPI integration.
3
+ """
4
+
5
+ import ast
6
+ import os
7
+ import inspect
8
+ from typing import List, Dict, Any, Optional
9
+ from .models import APIInfo, FunctionInfo
10
+
11
+
12
+ class APIGenerator:
13
+ """
14
+ Generates API documentation using Sphinx AutoAPI.
15
+
16
+ Extracts docstrings, parameter types, and function signatures
17
+ to create comprehensive API documentation.
18
+ """
19
+
20
+ def __init__(self):
21
+ """Initialize the API generator."""
22
+ pass
23
+
24
+ def parse_module(self, module_path: str) -> APIInfo:
25
+ """
26
+ Parse a Python module and extract API information.
27
+
28
+ Args:
29
+ module_path: Path to the Python module file
30
+
31
+ Returns:
32
+ APIInfo: Extracted API information
33
+ """
34
+ with open(module_path, 'r', encoding='utf-8') as f:
35
+ source_code = f.read()
36
+
37
+ tree = ast.parse(source_code)
38
+ module_name = os.path.splitext(os.path.basename(module_path))[0]
39
+
40
+ functions = []
41
+ classes = []
42
+ constants = {}
43
+ imports = []
44
+
45
+ # Extract module-level docstring
46
+ module_docstring = self.extract_docstring(tree)
47
+
48
+ for node in ast.walk(tree):
49
+ if isinstance(node, ast.FunctionDef):
50
+ # Only include top-level functions (not nested or class methods)
51
+ if isinstance(node.parent if hasattr(node, 'parent') else None, ast.Module) or not hasattr(node, 'parent'):
52
+ func_info = self.extract_function_info(node, module_path)
53
+ functions.append(func_info)
54
+
55
+ elif isinstance(node, ast.ClassDef):
56
+ class_info = {
57
+ 'name': node.name,
58
+ 'docstring': self.extract_docstring(node),
59
+ 'methods': [],
60
+ 'line_number': node.lineno
61
+ }
62
+
63
+ # Extract class methods
64
+ for item in node.body:
65
+ if isinstance(item, ast.FunctionDef):
66
+ method_info = self.extract_function_info(item, module_path)
67
+ class_info['methods'].append(method_info)
68
+
69
+ classes.append(class_info)
70
+
71
+ elif isinstance(node, ast.Assign):
72
+ # Extract module-level constants
73
+ for target in node.targets:
74
+ if isinstance(target, ast.Name) and target.id.isupper():
75
+ constants[target.id] = ast.unparse(node.value) if hasattr(ast, 'unparse') else str(node.value)
76
+
77
+ elif isinstance(node, (ast.Import, ast.ImportFrom)):
78
+ if isinstance(node, ast.Import):
79
+ for alias in node.names:
80
+ imports.append(f"import {alias.name}")
81
+ else:
82
+ module = node.module or ""
83
+ for alias in node.names:
84
+ imports.append(f"from {module} import {alias.name}")
85
+
86
+ return APIInfo(
87
+ module_name=module_name,
88
+ functions=functions,
89
+ classes=classes,
90
+ constants=constants,
91
+ imports=imports,
92
+ docstring=module_docstring
93
+ )
94
+
95
+ def extract_function_info(self, func_node: ast.FunctionDef, module_path: str) -> FunctionInfo:
96
+ """
97
+ Extract information from a function AST node.
98
+
99
+ Args:
100
+ func_node: AST node representing a function
101
+ module_path: Path to the module containing the function
102
+
103
+ Returns:
104
+ FunctionInfo: Extracted function information
105
+ """
106
+ docstring = self.extract_docstring(func_node)
107
+ parameters = self.extract_type_annotations(func_node)
108
+
109
+ # Extract return type annotation
110
+ return_type = None
111
+ if func_node.returns:
112
+ if hasattr(ast, 'unparse'):
113
+ return_type = ast.unparse(func_node.returns)
114
+ else:
115
+ return_type = str(func_node.returns)
116
+
117
+ return FunctionInfo(
118
+ name=func_node.name,
119
+ docstring=docstring,
120
+ parameters=parameters,
121
+ return_type=return_type,
122
+ module_path=module_path,
123
+ line_number=func_node.lineno
124
+ )
125
+
126
+ def extract_docstring(self, node: ast.AST) -> Optional[str]:
127
+ """
128
+ Extract docstring from an AST node.
129
+
130
+ Args:
131
+ node: AST node (function, class, or module)
132
+
133
+ Returns:
134
+ Optional[str]: Extracted docstring or None
135
+ """
136
+ if not hasattr(node, 'body') or not node.body:
137
+ return None
138
+
139
+ first_stmt = node.body[0]
140
+ if isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Constant):
141
+ if isinstance(first_stmt.value.value, str):
142
+ return first_stmt.value.value
143
+ elif isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Str):
144
+ # For older Python versions
145
+ return first_stmt.value.s
146
+
147
+ return None
148
+
149
+ def extract_type_annotations(self, func_node: ast.FunctionDef) -> Dict[str, str]:
150
+ """
151
+ Extract type annotations from a function.
152
+
153
+ Args:
154
+ func_node: Function AST node
155
+
156
+ Returns:
157
+ Dict[str, str]: Parameter names mapped to type annotations
158
+ """
159
+ parameters = {}
160
+
161
+ for arg in func_node.args.args:
162
+ param_name = arg.arg
163
+ if arg.annotation:
164
+ if hasattr(ast, 'unparse'):
165
+ param_type = ast.unparse(arg.annotation)
166
+ else:
167
+ param_type = str(arg.annotation)
168
+ parameters[param_name] = param_type
169
+ else:
170
+ parameters[param_name] = "Any"
171
+
172
+ # Handle keyword-only arguments
173
+ for arg in func_node.args.kwonlyargs:
174
+ param_name = arg.arg
175
+ if arg.annotation:
176
+ if hasattr(ast, 'unparse'):
177
+ param_type = ast.unparse(arg.annotation)
178
+ else:
179
+ param_type = str(arg.annotation)
180
+ parameters[param_name] = param_type
181
+ else:
182
+ parameters[param_name] = "Any"
183
+
184
+ return parameters
185
+
186
+ def generate_sphinx_rst(self, api_info: APIInfo) -> str:
187
+ """
188
+ Generate Sphinx RST documentation from API information.
189
+
190
+ Args:
191
+ api_info: API information to document
192
+
193
+ Returns:
194
+ str: Generated RST content
195
+ """
196
+ rst_content = []
197
+
198
+ # Module header
199
+ module_title = f"{api_info.module_name} Module"
200
+ rst_content.append(module_title)
201
+ rst_content.append("=" * len(module_title))
202
+ rst_content.append("")
203
+
204
+ # Module docstring
205
+ if api_info.docstring:
206
+ rst_content.append(api_info.docstring)
207
+ rst_content.append("")
208
+
209
+ # Functions section
210
+ if api_info.functions:
211
+ rst_content.append("Functions")
212
+ rst_content.append("-" * 9)
213
+ rst_content.append("")
214
+
215
+ for func in api_info.functions:
216
+ rst_content.append(f".. autofunction:: {api_info.module_name}.{func.name}")
217
+ rst_content.append("")
218
+
219
+ # Classes section
220
+ if api_info.classes:
221
+ rst_content.append("Classes")
222
+ rst_content.append("-" * 7)
223
+ rst_content.append("")
224
+
225
+ for cls in api_info.classes:
226
+ rst_content.append(f".. autoclass:: {api_info.module_name}.{cls['name']}")
227
+ rst_content.append(" :members:")
228
+ rst_content.append(" :undoc-members:")
229
+ rst_content.append(" :show-inheritance:")
230
+ rst_content.append("")
231
+
232
+ # Constants section
233
+ if api_info.constants:
234
+ rst_content.append("Constants")
235
+ rst_content.append("-" * 9)
236
+ rst_content.append("")
237
+
238
+ for name, value in api_info.constants.items():
239
+ rst_content.append(f".. autodata:: {api_info.module_name}.{name}")
240
+ rst_content.append("")
241
+
242
+ return "\n".join(rst_content)