drf-to-mkdoc 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of drf-to-mkdoc might be problematic. Click here for more details.

Files changed (33) hide show
  1. drf_to_mkdoc/conf/defaults.py +5 -0
  2. drf_to_mkdoc/conf/settings.py +123 -9
  3. drf_to_mkdoc/management/commands/build_docs.py +8 -7
  4. drf_to_mkdoc/management/commands/build_endpoint_docs.py +69 -0
  5. drf_to_mkdoc/management/commands/build_model_docs.py +50 -0
  6. drf_to_mkdoc/management/commands/{generate_model_docs.py → extract_model_data.py} +14 -19
  7. drf_to_mkdoc/utils/ai_tools/__init__.py +0 -0
  8. drf_to_mkdoc/utils/ai_tools/enums.py +13 -0
  9. drf_to_mkdoc/utils/ai_tools/exceptions.py +19 -0
  10. drf_to_mkdoc/utils/ai_tools/providers/__init__.py +0 -0
  11. drf_to_mkdoc/utils/ai_tools/providers/base_provider.py +123 -0
  12. drf_to_mkdoc/utils/ai_tools/providers/gemini_provider.py +80 -0
  13. drf_to_mkdoc/utils/ai_tools/types.py +81 -0
  14. drf_to_mkdoc/utils/commons/__init__.py +0 -0
  15. drf_to_mkdoc/utils/commons/code_extractor.py +22 -0
  16. drf_to_mkdoc/utils/commons/file_utils.py +35 -0
  17. drf_to_mkdoc/utils/commons/model_utils.py +83 -0
  18. drf_to_mkdoc/utils/commons/operation_utils.py +83 -0
  19. drf_to_mkdoc/utils/commons/path_utils.py +78 -0
  20. drf_to_mkdoc/utils/commons/schema_utils.py +230 -0
  21. drf_to_mkdoc/utils/endpoint_detail_generator.py +7 -34
  22. drf_to_mkdoc/utils/endpoint_list_generator.py +1 -1
  23. drf_to_mkdoc/utils/extractors/query_parameter_extractors.py +33 -30
  24. drf_to_mkdoc/utils/model_detail_generator.py +21 -15
  25. drf_to_mkdoc/utils/model_list_generator.py +25 -15
  26. drf_to_mkdoc/utils/schema.py +259 -0
  27. {drf_to_mkdoc-0.2.0.dist-info → drf_to_mkdoc-0.2.1.dist-info}/METADATA +16 -5
  28. {drf_to_mkdoc-0.2.0.dist-info → drf_to_mkdoc-0.2.1.dist-info}/RECORD +31 -16
  29. drf_to_mkdoc/management/commands/generate_docs.py +0 -113
  30. drf_to_mkdoc/utils/common.py +0 -353
  31. {drf_to_mkdoc-0.2.0.dist-info → drf_to_mkdoc-0.2.1.dist-info}/WHEEL +0 -0
  32. {drf_to_mkdoc-0.2.0.dist-info → drf_to_mkdoc-0.2.1.dist-info}/licenses/LICENSE +0 -0
  33. {drf_to_mkdoc-0.2.0.dist-info → drf_to_mkdoc-0.2.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,81 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Any
3
+
4
+ from drf_to_mkdoc.utils.ai_tools.enums import MessageRole
5
+
6
+
7
+ @dataclass
8
+ class TokenUsage:
9
+ """Standardized token usage across all providers"""
10
+
11
+ request_tokens: int
12
+ response_tokens: int
13
+ total_tokens: int
14
+ provider: str
15
+ model: str
16
+
17
+ def to_dict(self) -> dict[str, Any]:
18
+ return {
19
+ "request_tokens": self.request_tokens,
20
+ "response_tokens": self.response_tokens,
21
+ "total_tokens": self.total_tokens,
22
+ "provider": self.provider,
23
+ "model": self.model,
24
+ }
25
+
26
+
27
+ @dataclass
28
+ class Message:
29
+ """Chat message"""
30
+
31
+ role: MessageRole
32
+ content: str
33
+ metadata: dict[str, Any] | None = None
34
+
35
+ def to_dict(self) -> dict[str, Any]:
36
+ result = {"role": self.role.value, "content": self.content}
37
+ if self.metadata:
38
+ result["metadata"] = self.metadata
39
+ return result
40
+
41
+
42
+ @dataclass
43
+ class ChatResponse:
44
+ """Standardized response from AI providers"""
45
+
46
+ content: str
47
+ usage: TokenUsage | None = None
48
+ model: str | None = None
49
+ metadata: dict[str, Any] | None = None
50
+
51
+ def to_dict(self) -> dict[str, Any]:
52
+ result = {
53
+ "content": self.content,
54
+ }
55
+ if self.usage:
56
+ result["usage"] = self.usage.to_dict()
57
+ if self.metadata:
58
+ result["metadata"] = self.metadata
59
+ if self.model is not None:
60
+ result["model"] = self.model
61
+ return result
62
+
63
+
64
+ @dataclass
65
+ class ProviderConfig:
66
+ """Configuration for AI providers"""
67
+
68
+ model_name: str
69
+ api_key: str
70
+ temperature: float = 0.7
71
+ max_tokens: int | None = None
72
+ extra_params: dict[str, Any] = field(default_factory=dict)
73
+
74
+ def to_dict(self) -> dict[str, Any]:
75
+ return {
76
+ "api_key": self.api_key,
77
+ "model_name": self.model_name,
78
+ "temperature": self.temperature,
79
+ "max_tokens": self.max_tokens,
80
+ "extra_params": self.extra_params,
81
+ }
File without changes
@@ -0,0 +1,22 @@
1
+ """Code extraction utilities."""
2
+
3
+ from pathlib import Path
4
+
5
+ from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
6
+
7
+
8
+ def create_ai_code_directories() -> None:
9
+ """Create the directory structure for AI-generated code files."""
10
+ # Get base config directory
11
+ config_dir = Path(drf_to_mkdoc_settings.CONFIG_DIR)
12
+
13
+ # Create AI code directory
14
+ ai_code_dir = config_dir / drf_to_mkdoc_settings.AI_CONFIG_DIR_NAME
15
+
16
+ # Create subdirectories
17
+ subdirs = ["serializers", "views", "permissions"]
18
+
19
+ # Create all directories
20
+ for subdir in subdirs:
21
+ dir_path = ai_code_dir / subdir
22
+ Path.mkdir(dir_path, parents=True, exist_ok=True)
@@ -0,0 +1,35 @@
1
+ """File operation utilities."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+ from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
8
+
9
+
10
+ def write_file(file_path: str, content: str) -> None:
11
+ full_path = Path(drf_to_mkdoc_settings.DOCS_DIR) / file_path
12
+ try:
13
+ full_path.parent.mkdir(parents=True, exist_ok=True)
14
+ tmp_path = full_path.with_suffix(full_path.suffix + ".tmp")
15
+
16
+ with tmp_path.open("w", encoding="utf-8") as f:
17
+ # Use atomic writes to avoid partially written docs.
18
+ f.write(content)
19
+ tmp_path.replace(full_path)
20
+ except OSError as e:
21
+ raise OSError(f"Failed to write file {full_path}: {e}") from e
22
+
23
+
24
+ def load_json_data(file_path: str, raise_not_found: bool = True) -> dict[str, Any] | None:
25
+ json_file = Path(file_path)
26
+ if not json_file.exists():
27
+ if raise_not_found:
28
+ raise FileNotFoundError(f"File not found: {json_file}")
29
+ return None
30
+
31
+ with json_file.open("r", encoding="utf-8") as f:
32
+ try:
33
+ return json.load(f)
34
+ except json.JSONDecodeError as e:
35
+ raise ValueError(f"Invalid JSON in {json_file}: {e}") from e
@@ -0,0 +1,83 @@
1
+ """Model-related utilities."""
2
+
3
+ import importlib
4
+
5
+ from django.apps import apps
6
+ from django.core.exceptions import AppRegistryNotReady
7
+
8
+ from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
9
+ from drf_to_mkdoc.utils.commons.file_utils import load_json_data
10
+
11
+
12
+ def get_model_docstring(class_name: str) -> str | None:
13
+ """Extract docstring from Django model class"""
14
+ try:
15
+ # Check if Django is properly initialized
16
+ apps.check_apps_ready()
17
+
18
+ # Common Django app names to search
19
+ app_names = drf_to_mkdoc_settings.DJANGO_APPS
20
+
21
+ for app_name in app_names:
22
+ try:
23
+ # Try to import the models module
24
+ models_module = importlib.import_module(f"{app_name}.models")
25
+
26
+ # Check if the class exists in this module
27
+ if hasattr(models_module, class_name):
28
+ model_class = getattr(models_module, class_name)
29
+
30
+ # Get the docstring
31
+ docstring = getattr(model_class, "__doc__", None)
32
+
33
+ if docstring:
34
+ # Clean up the docstring
35
+ docstring = docstring.strip()
36
+
37
+ # Filter out auto-generated or generic docstrings
38
+ if (
39
+ docstring
40
+ and not docstring.startswith(class_name + "(")
41
+ and not docstring.startswith("str(object=")
42
+ and not docstring.startswith("Return repr(self)")
43
+ and "django.db.models" not in docstring.lower()
44
+ and len(docstring) > 10
45
+ ): # Minimum meaningful length
46
+ return docstring
47
+
48
+ except (ImportError, AttributeError):
49
+ continue
50
+
51
+ except (ImportError, AppRegistryNotReady):
52
+ # Django not initialized or not available - skip docstring extraction
53
+ pass
54
+
55
+ return None
56
+
57
+
58
+ def get_model_description(class_name: str) -> str:
59
+ """Get a brief description for a model with priority-based selection"""
60
+ # Priority 1: Description from config file
61
+ config = load_json_data(drf_to_mkdoc_settings.DOC_CONFIG_FILE, raise_not_found=False)
62
+ if config and "model_descriptions" in config:
63
+ config_description = config["model_descriptions"].get(class_name, "").strip()
64
+ if config_description:
65
+ return config_description
66
+
67
+ # Priority 2: Extract docstring from model class
68
+ docstring = get_model_docstring(class_name)
69
+ if docstring:
70
+ return docstring
71
+
72
+ # Priority 3: static value
73
+ return "Not provided"
74
+
75
+
76
+ def get_app_descriptions() -> dict[str, str]:
77
+ """Get descriptions for Django apps from config file"""
78
+ config = load_json_data(drf_to_mkdoc_settings.DOC_CONFIG_FILE, raise_not_found=False)
79
+ if config and "app_descriptions" in config:
80
+ return config["app_descriptions"]
81
+
82
+ # Fallback to empty dict if config not available
83
+ return {}
@@ -0,0 +1,83 @@
1
+ """Operation ID and viewset utilities."""
2
+
3
+ import logging
4
+ from functools import lru_cache
5
+ from typing import Any
6
+
7
+ from django.urls import Resolver404, resolve
8
+
9
+ from drf_to_mkdoc.utils.commons.path_utils import substitute_path_params
10
+ from drf_to_mkdoc.utils.commons.schema_utils import get_schema
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @lru_cache
16
+ def get_operation_id_path_map() -> dict[str, tuple[str, list[dict[str, Any]]]]:
17
+ schema = get_schema()
18
+ paths = schema.get("paths", {})
19
+ mapping = {}
20
+
21
+ for path, actions in paths.items():
22
+ for http_method_name, action_data in actions.items():
23
+ if http_method_name.lower() == "parameters" or not isinstance(action_data, dict):
24
+ # Skip path-level parameters entries (e.g., "parameters": [...] in OpenAPI schema)
25
+ continue
26
+ operation_id = action_data.get("operationId")
27
+ if operation_id:
28
+ mapping[operation_id] = (path, action_data.get("parameters", []))
29
+
30
+ return mapping
31
+
32
+
33
+ def extract_viewset_from_operation_id(operation_id: str):
34
+ """Extract the ViewSet class from an OpenAPI operation ID."""
35
+ operation_map = get_operation_id_path_map()
36
+ entry = operation_map.get(operation_id)
37
+ if not entry:
38
+ raise ValueError(f"Unknown operationId: {operation_id!r}")
39
+ path, parameters = entry
40
+
41
+ resolved_path = substitute_path_params(path, parameters)
42
+ try:
43
+ match = resolve(resolved_path)
44
+ view_func = match.func
45
+ if hasattr(view_func, "view_class"):
46
+ # For generic class-based views
47
+ return view_func.view_class
48
+
49
+ if hasattr(view_func, "cls"):
50
+ # For viewsets
51
+ return view_func.cls
52
+
53
+ except Resolver404:
54
+ logger.exception(
55
+ "Failed to resolve path. schema_path=%s tried_path=%s",
56
+ path,
57
+ resolved_path,
58
+ )
59
+ else:
60
+ return view_func
61
+
62
+
63
+ def extract_viewset_name_from_operation_id(operation_id: str):
64
+ view_cls = extract_viewset_from_operation_id(operation_id)
65
+ return view_cls.__name__ if hasattr(view_cls, "__name__") else str(view_cls)
66
+
67
+
68
+ def extract_app_from_operation_id(operation_id: str) -> str:
69
+ view = extract_viewset_from_operation_id(operation_id)
70
+
71
+ if isinstance(view, type):
72
+ module = view.__module__
73
+ elif hasattr(view, "__class__"):
74
+ module = view.__class__.__module__
75
+ else:
76
+ raise TypeError("Expected a view class or instance")
77
+
78
+ return module.split(".")[0]
79
+
80
+
81
+ def format_method_badge(method: str) -> str:
82
+ """Create a colored badge for HTTP method"""
83
+ return f'<span class="method-badge method-{method.lower()}">{method.upper()}</span>'
@@ -0,0 +1,78 @@
1
+ """Path manipulation utilities."""
2
+
3
+ import logging
4
+ import re
5
+ from typing import Any
6
+
7
+ from django.utils.module_loading import import_string
8
+
9
+ from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def substitute_path_params(path: str, parameters: list[dict[str, Any]]) -> str:
15
+ django_path = convert_to_django_path(path, parameters)
16
+
17
+ django_path = re.sub(r"\{[^}]+\}", "1", django_path)
18
+ django_path = re.sub(r"<int:[^>]+>", "1", django_path)
19
+ django_path = re.sub(r"<uuid:[^>]+>", "12345678-1234-5678-9abc-123456789012", django_path)
20
+ django_path = re.sub(r"<float:[^>]+>", "1.0", django_path)
21
+ django_path = re.sub(r"<(?:string|str):[^>]+>", "dummy", django_path)
22
+ django_path = re.sub(r"<path:[^>]+>", "dummy/path", django_path)
23
+ django_path = re.sub(r"<[^:>]+>", "dummy", django_path) # Catch remaining simple params
24
+
25
+ return django_path # noqa: RET504
26
+
27
+
28
+ def convert_to_django_path(path: str, parameters: list[dict[str, Any]]) -> str:
29
+ """
30
+ Convert a path with {param} to a Django-style path with <type:param>.
31
+ If PATH_PARAM_SUBSTITUTE_FUNCTION is set, call it and merge its returned mapping.
32
+ """
33
+ function = None
34
+ func_path = drf_to_mkdoc_settings.PATH_PARAM_SUBSTITUTE_FUNCTION
35
+
36
+ if func_path:
37
+ try:
38
+ function = import_string(func_path)
39
+ except ImportError:
40
+ logger.warning("Invalid PATH_PARAM_SUBSTITUTE_FUNCTION import path: %r", func_path)
41
+
42
+ # If custom function exists and returns a valid value, use it
43
+ mapping = dict(drf_to_mkdoc_settings.PATH_PARAM_SUBSTITUTE_MAPPING or {})
44
+ if callable(function):
45
+ try:
46
+ result = function(path, parameters)
47
+ if result and isinstance(result, dict):
48
+ mapping.update(result)
49
+ except Exception:
50
+ logger.exception("Error in custom path substitutor %r for path %r", func_path, path)
51
+
52
+ # Default Django path conversion
53
+ def replacement(match):
54
+ param_name = match.group(1)
55
+ custom_param_type = mapping.get(param_name)
56
+ if custom_param_type and custom_param_type in ("int", "uuid", "str"):
57
+ converter = custom_param_type
58
+ else:
59
+ param_info = next((p for p in parameters if p.get("name") == param_name), {})
60
+ param_type = param_info.get("schema", {}).get("type")
61
+ param_format = param_info.get("schema", {}).get("format")
62
+
63
+ if param_type == "integer":
64
+ converter = "int"
65
+ elif param_type == "string" and param_format == "uuid":
66
+ converter = "uuid"
67
+ else:
68
+ converter = "str"
69
+
70
+ return f"<{converter}:{param_name}>"
71
+
72
+ return re.sub(r"{(\w+)}", replacement, path)
73
+
74
+
75
+ def create_safe_filename(path: str, method: str) -> str:
76
+ """Create a safe filename from path and method"""
77
+ safe_path = re.sub(r"[^a-zA-Z0-9_-]", "_", path.strip("/"))
78
+ return f"{method.lower()}_{safe_path}.md"
@@ -0,0 +1,230 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Any
4
+
5
+ import yaml
6
+ from drf_spectacular.generators import SchemaGenerator
7
+
8
+ from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
9
+ from drf_to_mkdoc.utils.commons.file_utils import load_json_data
10
+
11
+
12
+ class SchemaValidationError(Exception):
13
+ """Custom exception for schema validation errors."""
14
+
15
+ pass
16
+
17
+
18
+ class QueryParamTypeError(Exception):
19
+ """Custom exception for query parameter type errors."""
20
+
21
+ pass
22
+
23
+
24
+ def load_schema() -> dict[str, Any] | None:
25
+ """Load the OpenAPI schema from doc-schema.yaml"""
26
+ schema_file = Path(drf_to_mkdoc_settings.CONFIG_DIR) / "doc-schema.yaml"
27
+ if not schema_file.exists():
28
+ return None
29
+
30
+ with schema_file.open(encoding="utf-8") as f:
31
+ return yaml.safe_load(f)
32
+
33
+
34
+ def get_custom_schema():
35
+ custom_schema_data = load_json_data(
36
+ drf_to_mkdoc_settings.CUSTOM_SCHEMA_FILE, raise_not_found=False
37
+ )
38
+ if not custom_schema_data:
39
+ return {}
40
+
41
+ for _operation_id, overrides in custom_schema_data.items():
42
+ parameters = overrides.get("parameters", [])
43
+ if not parameters:
44
+ continue
45
+ for parameter in parameters:
46
+ if {"name", "in", "description", "required", "schema"} - set(parameter.keys()):
47
+ raise SchemaValidationError("Required keys are not passed")
48
+
49
+ if parameter["in"] == "query":
50
+ queryparam_type = parameter.get("queryparam_type")
51
+ if not queryparam_type:
52
+ raise QueryParamTypeError("queryparam_type is required for query")
53
+
54
+ if queryparam_type not in (
55
+ {
56
+ "search_fields",
57
+ "filter_fields",
58
+ "ordering_fields",
59
+ "filter_backends",
60
+ "pagination_fields",
61
+ }
62
+ ):
63
+ raise QueryParamTypeError("Invalid queryparam_type")
64
+
65
+ return custom_schema_data
66
+
67
+
68
+ def _merge_parameters(
69
+ base_parameters: list[dict[str, Any]], custom_parameters: list[dict[str, Any]]
70
+ ) -> list[dict[str, Any]]:
71
+ """
72
+ Merge parameters from base and custom schemas, avoiding duplicates.
73
+
74
+ Parameters are considered duplicates if they have the same 'name' and 'in' values.
75
+ Custom parameters will override base parameters with the same (name, in) key.
76
+ """
77
+
78
+ def _get_param_key(param: dict[str, Any]) -> tuple[str, str] | None:
79
+ """Extract (name, in) tuple from parameter, return None if invalid."""
80
+ name = param.get("name")
81
+ location = param.get("in")
82
+ return (name, location) if name and location else None
83
+
84
+ param_index = {}
85
+ for param in base_parameters:
86
+ key = _get_param_key(param)
87
+ if key:
88
+ param_index[key] = param
89
+
90
+ for param in custom_parameters:
91
+ key = _get_param_key(param)
92
+ if key:
93
+ param_index[key] = param
94
+
95
+ return list(param_index.values())
96
+
97
+
98
+ def _build_operation_map(base_schema: dict) -> dict[str, tuple[str, str]]:
99
+ """Build a mapping from operationId → (path, method)."""
100
+ op_map = {}
101
+ HTTP_METHODS = {"get", "post", "put", "patch", "delete", "options", "head", "trace"}
102
+
103
+ for path, actions in base_schema.get("paths", {}).items():
104
+ for method, op_data in actions.items():
105
+ if method.lower() not in HTTP_METHODS or not isinstance(op_data, dict):
106
+ continue
107
+ if not op_data.get("x-metadata"):
108
+ raise ValueError(
109
+ "Missing x-metadata in OpenAPI schema. Please ensure you're using the custom AutoSchema in your REST_FRAMEWORK settings:\n"
110
+ "REST_FRAMEWORK = {\n"
111
+ " 'DEFAULT_SCHEMA_CLASS': 'drf_to_mkdoc.utils.schema.AutoSchema',\n"
112
+ "}\n"
113
+ )
114
+ operation_id = op_data.get("operationId")
115
+ if operation_id:
116
+ op_map[operation_id] = (path, method)
117
+
118
+ return op_map
119
+
120
+
121
+ def _apply_custom_overrides(
122
+ base_schema: dict,
123
+ op_map: dict[str, tuple[str, str]],
124
+ custom_data: dict,
125
+ ) -> None:
126
+ """Apply custom overrides to the base schema."""
127
+ allowed_keys = {"description", "parameters", "requestBody", "responses"}
128
+
129
+ for operation_id, overrides in custom_data.items():
130
+ if operation_id not in op_map:
131
+ continue
132
+
133
+ append_fields = set(overrides.get("append_fields", []))
134
+ path, method = op_map[operation_id]
135
+ target_schema = base_schema["paths"][path][method]
136
+
137
+ for key in allowed_keys:
138
+ if key not in overrides:
139
+ continue
140
+
141
+ custom_value = overrides[key]
142
+ base_value = target_schema.get(key)
143
+
144
+ if key in append_fields:
145
+ if isinstance(base_value, list) and isinstance(custom_value, list):
146
+ if key == "parameters":
147
+ target_schema[key] = _merge_parameters(base_value, custom_value)
148
+ else:
149
+ target_schema[key].extend(custom_value)
150
+ else:
151
+ target_schema[key] = custom_value
152
+ else:
153
+ target_schema[key] = custom_value
154
+
155
+
156
+ def get_schema():
157
+ base_schema = SchemaGenerator().get_schema(request=None, public=True)
158
+ custom_data = get_custom_schema()
159
+ if not custom_data:
160
+ return base_schema
161
+
162
+ operation_map = _build_operation_map(base_schema)
163
+ _apply_custom_overrides(base_schema, operation_map, custom_data)
164
+
165
+ return base_schema
166
+
167
+
168
+ class OperationExtractor:
169
+ """Extracts operation IDs and metadata from OpenAPI schema."""
170
+
171
+ _instance = None
172
+
173
+ def __new__(cls):
174
+ if cls._instance is None:
175
+ cls._instance = super().__new__(cls)
176
+ cls._instance._initialized = False
177
+ return cls._instance
178
+
179
+ def __init__(self):
180
+ if not self._initialized:
181
+ self.schema = get_schema()
182
+ self._operation_map = None
183
+ self._initialized = True
184
+
185
+ def save_operation_map(self) -> None:
186
+ """Save operation map to file."""
187
+ if not self._operation_map:
188
+ self._operation_map = self._build_operation_map()
189
+
190
+ operation_map_path = Path(drf_to_mkdoc_settings.AI_OPERATION_MAP_FILE)
191
+ # Create parent directories if they don't exist
192
+ operation_map_path.parent.mkdir(parents=True, exist_ok=True)
193
+
194
+ with operation_map_path.open("w", encoding="utf-8") as f:
195
+ json.dump(self._operation_map, f, indent=2)
196
+
197
+ @property
198
+ def operation_map(self) -> dict[str, dict[str, Any]] | None:
199
+ """
200
+ Cache and return operation ID mapping.
201
+ Returns dict: operation_id -> {"path": str, ...metadata}
202
+ """
203
+ if self._operation_map is None:
204
+ # Try to load from file first
205
+ self._operation_map = load_json_data(
206
+ drf_to_mkdoc_settings.AI_OPERATION_MAP_FILE, raise_not_found=False
207
+ )
208
+
209
+ # If not found or invalid, build and save
210
+ if self._operation_map is None:
211
+ self._operation_map = self._build_operation_map()
212
+ self.save_operation_map()
213
+
214
+ return self._operation_map
215
+
216
+ def _build_operation_map(self) -> dict[str, dict[str, Any]] | None:
217
+ """Build mapping of operation IDs to paths and metadata."""
218
+ mapping = {}
219
+ paths = self.schema.get("paths", {})
220
+
221
+ for path, methods in paths.items():
222
+ for _method, operation in methods.items():
223
+ operation_id = operation.get("operationId")
224
+ if not operation_id:
225
+ continue
226
+
227
+ metadata = operation.get("x-metadata", {})
228
+ mapping[operation_id] = {"path": path, **metadata}
229
+
230
+ return mapping
@@ -10,14 +10,14 @@ from django.templatetags.static import static
10
10
  from rest_framework import serializers
11
11
 
12
12
  from drf_to_mkdoc.conf.settings import drf_to_mkdoc_settings
13
- from drf_to_mkdoc.utils.common import (
14
- create_safe_filename,
13
+ from drf_to_mkdoc.utils.commons.file_utils import write_file
14
+ from drf_to_mkdoc.utils.commons.operation_utils import (
15
15
  extract_app_from_operation_id,
16
16
  extract_viewset_name_from_operation_id,
17
17
  format_method_badge,
18
- get_custom_schema,
19
- write_file,
20
18
  )
19
+ from drf_to_mkdoc.utils.commons.path_utils import create_safe_filename
20
+ from drf_to_mkdoc.utils.commons.schema_utils import get_custom_schema
21
21
  from drf_to_mkdoc.utils.extractors.query_parameter_extractors import (
22
22
  extract_query_parameters_from_view,
23
23
  )
@@ -32,17 +32,17 @@ def analyze_serializer_method_field_schema(serializer_class, field_name: str) ->
32
32
  """Analyze a SerializerMethodField to determine its actual return type schema."""
33
33
  method_name = f"get_{field_name}"
34
34
 
35
- # Strategy 2: Check type annotations
35
+ # Strategy 1: Check type annotations
36
36
  schema_from_annotations = _extract_schema_from_type_hints(serializer_class, method_name)
37
37
  if schema_from_annotations:
38
38
  return schema_from_annotations
39
39
 
40
- # Strategy 3: Analyze method source code
40
+ # Strategy 2: Analyze method source code
41
41
  schema_from_source = _analyze_method_source_code(serializer_class, method_name)
42
42
  if schema_from_source:
43
43
  return schema_from_source
44
44
 
45
- # Strategy 4: Runtime analysis (sample execution)
45
+ # Strategy 3: Runtime analysis (sample execution)
46
46
  schema_from_runtime = _analyze_method_runtime(serializer_class, method_name)
47
47
  if schema_from_runtime:
48
48
  return schema_from_runtime
@@ -51,33 +51,6 @@ def analyze_serializer_method_field_schema(serializer_class, field_name: str) ->
51
51
  return {"type": "string"}
52
52
 
53
53
 
54
- def _extract_schema_from_decorator(serializer_class, method_name: str) -> dict:
55
- """Extract schema from @extend_schema_field decorator if present."""
56
- try:
57
- method = getattr(serializer_class, method_name, None)
58
- if not method:
59
- return {}
60
-
61
- # Check if method has the decorator attribute (drf-spectacular)
62
- if hasattr(method, "_spectacular_annotation"):
63
- annotation = method._spectacular_annotation
64
- # Handle OpenApiTypes
65
- if hasattr(annotation, "type"):
66
- return {"type": annotation.type}
67
- if isinstance(annotation, dict):
68
- return annotation
69
-
70
- # Check for drf-yasg decorator
71
- if hasattr(method, "_swagger_serializer_method"):
72
- swagger_info = method._swagger_serializer_method
73
- if hasattr(swagger_info, "many") and hasattr(swagger_info, "child"):
74
- return {"type": "array", "items": {"type": "object"}}
75
-
76
- except Exception:
77
- logger.exception("Failed to extract schema from decorator")
78
- return {}
79
-
80
-
81
54
  def _extract_schema_from_type_hints(serializer_class, method_name: str) -> dict:
82
55
  """Extract schema from method type annotations."""
83
56
  try: