otterapi 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +581 -8
- otterapi/__init__.py +73 -0
- otterapi/cli.py +327 -29
- otterapi/codegen/__init__.py +115 -0
- otterapi/codegen/ast_utils.py +134 -5
- otterapi/codegen/client.py +1271 -0
- otterapi/codegen/codegen.py +1736 -0
- otterapi/codegen/dataframes.py +392 -0
- otterapi/codegen/emitter.py +473 -0
- otterapi/codegen/endpoints.py +2597 -343
- otterapi/codegen/pagination.py +1026 -0
- otterapi/codegen/schema.py +593 -0
- otterapi/codegen/splitting.py +1397 -0
- otterapi/codegen/types.py +1345 -0
- otterapi/codegen/utils.py +180 -1
- otterapi/config.py +1017 -24
- otterapi/exceptions.py +231 -0
- otterapi/openapi/__init__.py +46 -0
- otterapi/openapi/v2/__init__.py +86 -0
- otterapi/openapi/v2/spec.json +1607 -0
- otterapi/openapi/v2/v2.py +1776 -0
- otterapi/openapi/v3/__init__.py +131 -0
- otterapi/openapi/v3/spec.json +1651 -0
- otterapi/openapi/v3/v3.py +1557 -0
- otterapi/openapi/v3_1/__init__.py +133 -0
- otterapi/openapi/v3_1/spec.json +1411 -0
- otterapi/openapi/v3_1/v3_1.py +798 -0
- otterapi/openapi/v3_2/__init__.py +133 -0
- otterapi/openapi/v3_2/spec.json +1666 -0
- otterapi/openapi/v3_2/v3_2.py +777 -0
- otterapi/tests/__init__.py +3 -0
- otterapi/tests/fixtures/__init__.py +455 -0
- otterapi/tests/test_ast_utils.py +680 -0
- otterapi/tests/test_codegen.py +610 -0
- otterapi/tests/test_dataframe.py +1038 -0
- otterapi/tests/test_exceptions.py +493 -0
- otterapi/tests/test_openapi_support.py +616 -0
- otterapi/tests/test_openapi_upgrade.py +215 -0
- otterapi/tests/test_pagination.py +1101 -0
- otterapi/tests/test_splitting_config.py +319 -0
- otterapi/tests/test_splitting_integration.py +427 -0
- otterapi/tests/test_splitting_resolver.py +512 -0
- otterapi/tests/test_splitting_tree.py +525 -0
- otterapi-0.0.6.dist-info/METADATA +627 -0
- otterapi-0.0.6.dist-info/RECORD +48 -0
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/WHEEL +1 -1
- otterapi/codegen/generator.py +0 -358
- otterapi/codegen/openapi_processor.py +0 -27
- otterapi/codegen/type_generator.py +0 -559
- otterapi-0.0.5.dist-info/METADATA +0 -54
- otterapi-0.0.5.dist-info/RECORD +0 -16
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/entry_points.txt +0 -0
otterapi/config.py
CHANGED
|
@@ -1,74 +1,1067 @@
|
|
|
1
|
+
"""Configuration management for OtterAPI.
|
|
2
|
+
|
|
3
|
+
This module provides configuration loading and validation for OtterAPI,
|
|
4
|
+
supporting multiple configuration formats (YAML, JSON, TOML) and
|
|
5
|
+
environment variable overrides.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
1
11
|
import os
|
|
12
|
+
import re
|
|
13
|
+
from enum import Enum
|
|
2
14
|
from pathlib import Path
|
|
15
|
+
from typing import Any, Literal
|
|
3
16
|
|
|
4
|
-
from pydantic import BaseModel, Field
|
|
17
|
+
from pydantic import BaseModel, Field, field_validator
|
|
5
18
|
from pydantic_settings import BaseSettings
|
|
6
19
|
|
|
7
|
-
DEFAULT_FILENAMES = ['otter.yaml', 'otter.yml']
|
|
20
|
+
DEFAULT_FILENAMES = ['otter.yaml', 'otter.yml', 'otter.json']
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _expand_env_vars(value: str) -> str:
|
|
24
|
+
"""Expand environment variables in a string.
|
|
25
|
+
|
|
26
|
+
Supports both ${VAR} and ${VAR:-default} syntax.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
value: String potentially containing environment variables.
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
String with environment variables expanded.
|
|
33
|
+
"""
|
|
34
|
+
# Pattern matches ${VAR} or ${VAR:-default}
|
|
35
|
+
pattern = r'\$\{([^}:]+)(?::-([^}]*))?\}'
|
|
36
|
+
|
|
37
|
+
def replacer(match: re.Match) -> str:
|
|
38
|
+
var_name = match.group(1)
|
|
39
|
+
default = match.group(2)
|
|
40
|
+
return os.environ.get(var_name, default if default is not None else '')
|
|
41
|
+
|
|
42
|
+
return re.sub(pattern, replacer, value)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _expand_env_vars_recursive(obj: Any) -> Any:
|
|
46
|
+
"""Recursively expand environment variables in a data structure.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
obj: Data structure (dict, list, or scalar).
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Data structure with all string values having env vars expanded.
|
|
53
|
+
"""
|
|
54
|
+
if isinstance(obj, dict):
|
|
55
|
+
return {k: _expand_env_vars_recursive(v) for k, v in obj.items()}
|
|
56
|
+
elif isinstance(obj, list):
|
|
57
|
+
return [_expand_env_vars_recursive(item) for item in obj]
|
|
58
|
+
elif isinstance(obj, str):
|
|
59
|
+
return _expand_env_vars(obj)
|
|
60
|
+
else:
|
|
61
|
+
return obj
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class SplitStrategy(str, Enum):
|
|
65
|
+
"""Strategy for splitting endpoints into modules."""
|
|
66
|
+
|
|
67
|
+
NONE = 'none' # No splitting, all endpoints in one file
|
|
68
|
+
PATH = 'path' # Split based on URL path segments
|
|
69
|
+
TAG = 'tag' # Split based on OpenAPI tags
|
|
70
|
+
HYBRID = 'hybrid' # Combine tag and path strategies
|
|
71
|
+
CUSTOM = 'custom' # Use custom module_map only
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class EndpointDataFrameConfig(BaseModel):
|
|
75
|
+
"""Per-endpoint DataFrame configuration.
|
|
76
|
+
|
|
77
|
+
Allows overriding the default DataFrame settings for specific endpoints.
|
|
78
|
+
|
|
79
|
+
Attributes:
|
|
80
|
+
enabled: Override whether to generate DataFrame methods for this endpoint.
|
|
81
|
+
path: JSON path to extract data from response (e.g., "data.users").
|
|
82
|
+
pandas: Override whether to generate _df method (pandas).
|
|
83
|
+
polars: Override whether to generate _pl method (polars).
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
enabled: bool | None = Field(
|
|
87
|
+
default=None,
|
|
88
|
+
description='Override whether to generate DataFrame methods.',
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
path: str | None = Field(
|
|
92
|
+
default=None,
|
|
93
|
+
description='JSON path to extract data from response.',
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
pandas: bool | None = Field(
|
|
97
|
+
default=None,
|
|
98
|
+
description='Override whether to generate _df method (pandas).',
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
polars: bool | None = Field(
|
|
102
|
+
default=None,
|
|
103
|
+
description='Override whether to generate _pl method (polars).',
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
model_config = {'extra': 'forbid'}
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class PaginationStyle(str, Enum):
|
|
110
|
+
"""Style of pagination used by an API endpoint."""
|
|
111
|
+
|
|
112
|
+
OFFSET = 'offset' # Offset/limit based pagination
|
|
113
|
+
CURSOR = 'cursor' # Cursor-based pagination
|
|
114
|
+
PAGE = 'page' # Page number based pagination
|
|
115
|
+
LINK = 'link' # Link header pagination (RFC 5988)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class EndpointPaginationConfig(BaseModel):
|
|
119
|
+
"""Per-endpoint pagination configuration.
|
|
120
|
+
|
|
121
|
+
Allows configuring pagination behavior for specific endpoints.
|
|
122
|
+
|
|
123
|
+
Attributes:
|
|
124
|
+
enabled: Override whether to generate pagination methods for this endpoint.
|
|
125
|
+
style: Pagination style for this endpoint.
|
|
126
|
+
offset_param: Name of offset parameter (for offset style).
|
|
127
|
+
limit_param: Name of limit parameter.
|
|
128
|
+
cursor_param: Name of cursor parameter (for cursor style).
|
|
129
|
+
page_param: Name of page parameter (for page style).
|
|
130
|
+
per_page_param: Name of per_page parameter (for page style).
|
|
131
|
+
data_path: JSON path to items array in response.
|
|
132
|
+
total_path: JSON path to total count in response.
|
|
133
|
+
next_cursor_path: JSON path to next cursor in response (for cursor style).
|
|
134
|
+
total_pages_path: JSON path to total pages in response (for page style).
|
|
135
|
+
default_page_size: Default page size for this endpoint.
|
|
136
|
+
max_page_size: Maximum page size for this endpoint.
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
enabled: bool | None = Field(
|
|
140
|
+
default=None,
|
|
141
|
+
description='Override whether to generate pagination methods.',
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
style: PaginationStyle | Literal['offset', 'cursor', 'page', 'link'] | None = Field(
|
|
145
|
+
default=None,
|
|
146
|
+
description='Pagination style for this endpoint.',
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
# Parameter mappings
|
|
150
|
+
offset_param: str | None = Field(
|
|
151
|
+
default=None,
|
|
152
|
+
description='Name of offset parameter.',
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
limit_param: str | None = Field(
|
|
156
|
+
default=None,
|
|
157
|
+
description='Name of limit parameter.',
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
cursor_param: str | None = Field(
|
|
161
|
+
default=None,
|
|
162
|
+
description='Name of cursor parameter.',
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
page_param: str | None = Field(
|
|
166
|
+
default=None,
|
|
167
|
+
description='Name of page parameter.',
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
per_page_param: str | None = Field(
|
|
171
|
+
default=None,
|
|
172
|
+
description='Name of per_page parameter.',
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
# Response mappings
|
|
176
|
+
data_path: str | None = Field(
|
|
177
|
+
default=None,
|
|
178
|
+
description='JSON path to items array in response.',
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
total_path: str | None = Field(
|
|
182
|
+
default=None,
|
|
183
|
+
description='JSON path to total count in response.',
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
next_cursor_path: str | None = Field(
|
|
187
|
+
default=None,
|
|
188
|
+
description='JSON path to next cursor in response.',
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
total_pages_path: str | None = Field(
|
|
192
|
+
default=None,
|
|
193
|
+
description='JSON path to total pages in response.',
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Limits
|
|
197
|
+
default_page_size: int | None = Field(
|
|
198
|
+
default=None,
|
|
199
|
+
description='Default page size for this endpoint.',
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
max_page_size: int | None = Field(
|
|
203
|
+
default=None,
|
|
204
|
+
description='Maximum page size for this endpoint.',
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
model_config = {'extra': 'forbid'}
|
|
208
|
+
|
|
209
|
+
@field_validator('style', mode='before')
|
|
210
|
+
@classmethod
|
|
211
|
+
def normalize_style(cls, v: Any) -> PaginationStyle | None:
|
|
212
|
+
"""Convert string style to enum."""
|
|
213
|
+
if v is None:
|
|
214
|
+
return None
|
|
215
|
+
if isinstance(v, str):
|
|
216
|
+
return PaginationStyle(v.lower())
|
|
217
|
+
return v
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
class PaginationConfig(BaseModel):
|
|
221
|
+
"""Global pagination configuration.
|
|
222
|
+
|
|
223
|
+
When enabled, generates pagination methods for configured endpoints.
|
|
224
|
+
|
|
225
|
+
Attributes:
|
|
226
|
+
enabled: Enable pagination method generation.
|
|
227
|
+
auto_detect: Automatically detect and enable pagination for endpoints
|
|
228
|
+
that have pagination parameters (offset/limit, cursor, page/per_page).
|
|
229
|
+
default_style: Default pagination style when not explicitly configured.
|
|
230
|
+
default_page_size: Default page size for iteration.
|
|
231
|
+
default_data_path: Default JSON path to items array.
|
|
232
|
+
endpoints: Per-endpoint pagination configuration.
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
enabled: bool = Field(
|
|
236
|
+
default=False,
|
|
237
|
+
description='Enable pagination method generation.',
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
auto_detect: bool = Field(
|
|
241
|
+
default=True,
|
|
242
|
+
description=(
|
|
243
|
+
'Automatically detect and enable pagination for endpoints '
|
|
244
|
+
'that have pagination parameters (offset/limit, cursor, page/per_page). '
|
|
245
|
+
'When enabled, endpoints with matching parameters will automatically '
|
|
246
|
+
'get pagination methods generated without explicit configuration.'
|
|
247
|
+
),
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
default_style: PaginationStyle | Literal['offset', 'cursor', 'page', 'link'] = (
|
|
251
|
+
Field(
|
|
252
|
+
default=PaginationStyle.OFFSET,
|
|
253
|
+
description='Default pagination style.',
|
|
254
|
+
)
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
default_page_size: int = Field(
|
|
258
|
+
default=100,
|
|
259
|
+
description='Default page size for iteration.',
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
default_data_path: str | None = Field(
|
|
263
|
+
default=None,
|
|
264
|
+
description='Default JSON path to items array.',
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
default_total_path: str | None = Field(
|
|
268
|
+
default=None,
|
|
269
|
+
description='Default JSON path to total count in response.',
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# Default parameter names
|
|
273
|
+
default_offset_param: str = Field(
|
|
274
|
+
default='offset',
|
|
275
|
+
description='Default name of offset parameter.',
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
default_limit_param: str = Field(
|
|
279
|
+
default='limit',
|
|
280
|
+
description='Default name of limit parameter.',
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
default_cursor_param: str = Field(
|
|
284
|
+
default='cursor',
|
|
285
|
+
description='Default name of cursor parameter.',
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
default_page_param: str = Field(
|
|
289
|
+
default='page',
|
|
290
|
+
description='Default name of page parameter.',
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
default_per_page_param: str = Field(
|
|
294
|
+
default='per_page',
|
|
295
|
+
description='Default name of per_page parameter.',
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
# Per-endpoint configuration
|
|
299
|
+
endpoints: dict[str, EndpointPaginationConfig] = Field(
|
|
300
|
+
default_factory=dict,
|
|
301
|
+
description='Per-endpoint pagination configuration.',
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
model_config = {'extra': 'forbid'}
|
|
305
|
+
|
|
306
|
+
@field_validator('default_style', mode='before')
|
|
307
|
+
@classmethod
|
|
308
|
+
def normalize_default_style(cls, v: Any) -> PaginationStyle:
|
|
309
|
+
"""Convert string style to enum."""
|
|
310
|
+
if isinstance(v, str):
|
|
311
|
+
return PaginationStyle(v.lower())
|
|
312
|
+
return v
|
|
313
|
+
|
|
314
|
+
def should_generate_for_endpoint(
|
|
315
|
+
self,
|
|
316
|
+
endpoint_name: str,
|
|
317
|
+
endpoint_parameters: list | None = None,
|
|
318
|
+
) -> tuple[bool, ResolvedPaginationConfig | None]:
|
|
319
|
+
"""Determine if pagination methods should be generated for an endpoint.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
endpoint_name: The name of the endpoint function.
|
|
323
|
+
endpoint_parameters: Optional list of endpoint parameters for auto-detection.
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
A tuple of (should_generate, resolved_config) indicating
|
|
327
|
+
whether to generate and the resolved configuration.
|
|
328
|
+
"""
|
|
329
|
+
if not self.enabled:
|
|
330
|
+
return False, None
|
|
331
|
+
|
|
332
|
+
# Check for endpoint-specific configuration
|
|
333
|
+
endpoint_config = self.endpoints.get(endpoint_name)
|
|
334
|
+
|
|
335
|
+
if endpoint_config is None:
|
|
336
|
+
# No explicit config - check if auto_detect is enabled
|
|
337
|
+
if not self.auto_detect or endpoint_parameters is None:
|
|
338
|
+
return False, None
|
|
339
|
+
|
|
340
|
+
# Auto-detect pagination based on parameters
|
|
341
|
+
detected_style = self._detect_pagination_style(endpoint_parameters)
|
|
342
|
+
if detected_style is None:
|
|
343
|
+
return False, None
|
|
344
|
+
|
|
345
|
+
# Use defaults for auto-detected endpoints
|
|
346
|
+
resolved = ResolvedPaginationConfig(
|
|
347
|
+
style=detected_style,
|
|
348
|
+
offset_param=self.default_offset_param,
|
|
349
|
+
limit_param=self.default_limit_param,
|
|
350
|
+
cursor_param=self.default_cursor_param,
|
|
351
|
+
page_param=self.default_page_param,
|
|
352
|
+
per_page_param=self.default_per_page_param,
|
|
353
|
+
data_path=self.default_data_path,
|
|
354
|
+
total_path=self.default_total_path,
|
|
355
|
+
next_cursor_path=None,
|
|
356
|
+
total_pages_path=None,
|
|
357
|
+
default_page_size=self.default_page_size,
|
|
358
|
+
max_page_size=None,
|
|
359
|
+
)
|
|
360
|
+
return True, resolved
|
|
361
|
+
|
|
362
|
+
# Check if explicitly disabled
|
|
363
|
+
if endpoint_config.enabled is False:
|
|
364
|
+
return False, None
|
|
365
|
+
|
|
366
|
+
# Resolve the configuration with defaults
|
|
367
|
+
style = endpoint_config.style or self.default_style
|
|
368
|
+
if isinstance(style, str):
|
|
369
|
+
style = PaginationStyle(style.lower())
|
|
370
|
+
|
|
371
|
+
resolved = ResolvedPaginationConfig(
|
|
372
|
+
style=style,
|
|
373
|
+
offset_param=endpoint_config.offset_param or self.default_offset_param,
|
|
374
|
+
limit_param=endpoint_config.limit_param or self.default_limit_param,
|
|
375
|
+
cursor_param=endpoint_config.cursor_param or self.default_cursor_param,
|
|
376
|
+
page_param=endpoint_config.page_param or self.default_page_param,
|
|
377
|
+
per_page_param=endpoint_config.per_page_param
|
|
378
|
+
or self.default_per_page_param,
|
|
379
|
+
data_path=endpoint_config.data_path or self.default_data_path,
|
|
380
|
+
total_path=endpoint_config.total_path or self.default_total_path,
|
|
381
|
+
next_cursor_path=endpoint_config.next_cursor_path,
|
|
382
|
+
total_pages_path=endpoint_config.total_pages_path,
|
|
383
|
+
default_page_size=endpoint_config.default_page_size
|
|
384
|
+
or self.default_page_size,
|
|
385
|
+
max_page_size=endpoint_config.max_page_size,
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
return True, resolved
|
|
389
|
+
|
|
390
|
+
def _detect_pagination_style(self, parameters: list) -> PaginationStyle | None:
|
|
391
|
+
"""Detect pagination style based on endpoint parameters.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
parameters: List of endpoint parameter objects.
|
|
395
|
+
|
|
396
|
+
Returns:
|
|
397
|
+
Detected PaginationStyle or None if no pagination detected.
|
|
398
|
+
"""
|
|
399
|
+
param_names = {p.name for p in parameters if hasattr(p, 'name')}
|
|
400
|
+
|
|
401
|
+
# Check for offset-based pagination (offset + limit)
|
|
402
|
+
if (
|
|
403
|
+
self.default_offset_param in param_names
|
|
404
|
+
and self.default_limit_param in param_names
|
|
405
|
+
):
|
|
406
|
+
return PaginationStyle.OFFSET
|
|
407
|
+
|
|
408
|
+
# Check for cursor-based pagination (cursor + limit)
|
|
409
|
+
if (
|
|
410
|
+
self.default_cursor_param in param_names
|
|
411
|
+
and self.default_limit_param in param_names
|
|
412
|
+
):
|
|
413
|
+
return PaginationStyle.CURSOR
|
|
414
|
+
|
|
415
|
+
# Check for page-based pagination (page + per_page)
|
|
416
|
+
if (
|
|
417
|
+
self.default_page_param in param_names
|
|
418
|
+
and self.default_per_page_param in param_names
|
|
419
|
+
):
|
|
420
|
+
return PaginationStyle.PAGE
|
|
421
|
+
|
|
422
|
+
return None
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
class ResolvedPaginationConfig(BaseModel):
|
|
426
|
+
"""Resolved pagination configuration with all defaults applied.
|
|
427
|
+
|
|
428
|
+
This is the configuration used during code generation after
|
|
429
|
+
merging endpoint-specific config with global defaults.
|
|
430
|
+
"""
|
|
431
|
+
|
|
432
|
+
style: PaginationStyle
|
|
433
|
+
offset_param: str
|
|
434
|
+
limit_param: str
|
|
435
|
+
cursor_param: str
|
|
436
|
+
page_param: str
|
|
437
|
+
per_page_param: str
|
|
438
|
+
data_path: str | None
|
|
439
|
+
total_path: str | None
|
|
440
|
+
next_cursor_path: str | None
|
|
441
|
+
total_pages_path: str | None
|
|
442
|
+
default_page_size: int
|
|
443
|
+
max_page_size: int | None
|
|
444
|
+
|
|
445
|
+
model_config = {'extra': 'forbid'}
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
class DataFrameConfig(BaseModel):
|
|
449
|
+
"""Configuration for DataFrame conversion methods.
|
|
450
|
+
|
|
451
|
+
When enabled, generates additional endpoint methods that return
|
|
452
|
+
pandas DataFrames (_df suffix) and/or polars DataFrames (_pl suffix).
|
|
453
|
+
|
|
454
|
+
Attributes:
|
|
455
|
+
enabled: Enable DataFrame method generation.
|
|
456
|
+
pandas: Generate _df methods returning pandas DataFrames.
|
|
457
|
+
polars: Generate _pl methods returning polars DataFrames.
|
|
458
|
+
default_path: Default JSON path for extracting list data from responses.
|
|
459
|
+
include_all: Generate DataFrame methods for all list-returning endpoints.
|
|
460
|
+
endpoints: Per-endpoint configuration overrides.
|
|
461
|
+
"""
|
|
462
|
+
|
|
463
|
+
enabled: bool = Field(
|
|
464
|
+
default=False,
|
|
465
|
+
description='Enable DataFrame method generation.',
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
pandas: bool = Field(
|
|
469
|
+
default=True,
|
|
470
|
+
description='Generate _df methods (pandas DataFrames).',
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
polars: bool = Field(
|
|
474
|
+
default=False,
|
|
475
|
+
description='Generate _pl methods (polars DataFrames).',
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
default_path: str | None = Field(
|
|
479
|
+
default=None,
|
|
480
|
+
description='Default JSON path for extracting list data.',
|
|
481
|
+
)
|
|
482
|
+
|
|
483
|
+
include_all: bool = Field(
|
|
484
|
+
default=True,
|
|
485
|
+
description='Generate DataFrame methods for all list-returning endpoints.',
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
endpoints: dict[str, EndpointDataFrameConfig] = Field(
|
|
489
|
+
default_factory=dict,
|
|
490
|
+
description='Per-endpoint configuration overrides.',
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
model_config = {'extra': 'forbid'}
|
|
494
|
+
|
|
495
|
+
def should_generate_for_endpoint(
|
|
496
|
+
self,
|
|
497
|
+
endpoint_name: str,
|
|
498
|
+
returns_list: bool = True,
|
|
499
|
+
) -> tuple[bool, bool, str | None]:
|
|
500
|
+
"""Determine if DataFrame methods should be generated for an endpoint.
|
|
501
|
+
|
|
502
|
+
Args:
|
|
503
|
+
endpoint_name: The name of the endpoint function.
|
|
504
|
+
returns_list: Whether the endpoint returns a list type.
|
|
505
|
+
|
|
506
|
+
Returns:
|
|
507
|
+
A tuple of (generate_pandas, generate_polars, path) indicating
|
|
508
|
+
which methods to generate and what path to use.
|
|
509
|
+
"""
|
|
510
|
+
if not self.enabled:
|
|
511
|
+
return False, False, None
|
|
512
|
+
|
|
513
|
+
# Check for endpoint-specific override
|
|
514
|
+
endpoint_config = self.endpoints.get(endpoint_name)
|
|
515
|
+
|
|
516
|
+
if endpoint_config is not None:
|
|
517
|
+
# Endpoint has specific config
|
|
518
|
+
if endpoint_config.enabled is False:
|
|
519
|
+
return False, False, None
|
|
520
|
+
|
|
521
|
+
# Determine pandas generation
|
|
522
|
+
gen_pandas = (
|
|
523
|
+
endpoint_config.pandas
|
|
524
|
+
if endpoint_config.pandas is not None
|
|
525
|
+
else self.pandas
|
|
526
|
+
)
|
|
527
|
+
|
|
528
|
+
# Determine polars generation
|
|
529
|
+
gen_polars = (
|
|
530
|
+
endpoint_config.polars
|
|
531
|
+
if endpoint_config.polars is not None
|
|
532
|
+
else self.polars
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
# Determine path
|
|
536
|
+
path = (
|
|
537
|
+
endpoint_config.path
|
|
538
|
+
if endpoint_config.path is not None
|
|
539
|
+
else self.default_path
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
# If endpoint is explicitly enabled, generate regardless of return type
|
|
543
|
+
if endpoint_config.enabled is True:
|
|
544
|
+
return gen_pandas, gen_polars, path
|
|
545
|
+
|
|
546
|
+
# Otherwise, respect include_all and returns_list
|
|
547
|
+
if self.include_all and returns_list:
|
|
548
|
+
return gen_pandas, gen_polars, path
|
|
549
|
+
|
|
550
|
+
return False, False, None
|
|
551
|
+
|
|
552
|
+
# No endpoint-specific config - use defaults
|
|
553
|
+
if not self.include_all:
|
|
554
|
+
return False, False, None
|
|
555
|
+
|
|
556
|
+
if not returns_list:
|
|
557
|
+
return False, False, None
|
|
558
|
+
|
|
559
|
+
return self.pandas, self.polars, self.default_path
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
class ModuleDefinition(BaseModel):
|
|
563
|
+
"""Definition for a single module or module group.
|
|
564
|
+
|
|
565
|
+
Supports both flat modules (with just paths) and nested module hierarchies.
|
|
566
|
+
|
|
567
|
+
Attributes:
|
|
568
|
+
paths: List of glob patterns to match endpoint paths.
|
|
569
|
+
modules: Nested submodules (recursive structure).
|
|
570
|
+
strip_prefix: Prefix to strip from paths in this group.
|
|
571
|
+
package_prefix: Prefix for generated imports.
|
|
572
|
+
file_name: Override for the generated file name.
|
|
573
|
+
description: Module docstring.
|
|
574
|
+
"""
|
|
575
|
+
|
|
576
|
+
paths: list[str] = Field(default_factory=list)
|
|
577
|
+
modules: dict[str, ModuleDefinition] = Field(default_factory=dict)
|
|
578
|
+
strip_prefix: str | None = None
|
|
579
|
+
package_prefix: str | None = None
|
|
580
|
+
file_name: str | None = None
|
|
581
|
+
description: str | None = None
|
|
582
|
+
|
|
583
|
+
model_config = {'extra': 'forbid'}
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
# Rebuild model to resolve forward references
|
|
587
|
+
ModuleDefinition.model_rebuild()
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
# Type alias for module_map values which can be:
|
|
591
|
+
# - A list of path patterns (shorthand)
|
|
592
|
+
# - A single path pattern string (shorthand)
|
|
593
|
+
# - A full ModuleDefinition
|
|
594
|
+
ModuleMapValue = ModuleDefinition | list[str] | str | dict
|
|
595
|
+
|
|
596
|
+
|
|
597
|
+
class ModuleSplitConfig(BaseModel):
|
|
598
|
+
"""Configuration for splitting endpoints into submodules.
|
|
599
|
+
|
|
600
|
+
Attributes:
|
|
601
|
+
enabled: Whether module splitting is enabled.
|
|
602
|
+
strategy: The splitting strategy to use.
|
|
603
|
+
global_strip_prefixes: Prefixes to strip from all paths before matching.
|
|
604
|
+
path_depth: Number of path segments to use for path-based strategy.
|
|
605
|
+
min_endpoints: Minimum endpoints required per module before consolidating.
|
|
606
|
+
fallback_module: Module name for endpoints that don't match any rule.
|
|
607
|
+
module_map: Custom mapping of module names to path patterns or definitions.
|
|
608
|
+
flat_structure: If True, generate flat file structure instead of directories.
|
|
609
|
+
split_models: Whether to split models per module (advanced).
|
|
610
|
+
shared_models_module: Module name for shared models when split_models is True.
|
|
611
|
+
"""
|
|
612
|
+
|
|
613
|
+
enabled: bool = False
|
|
614
|
+
strategy: SplitStrategy | Literal['none', 'path', 'tag', 'hybrid', 'custom'] = (
|
|
615
|
+
SplitStrategy.HYBRID
|
|
616
|
+
)
|
|
617
|
+
global_strip_prefixes: list[str] = Field(
|
|
618
|
+
default_factory=lambda: ['/api', '/api/v1', '/api/v2', '/api/v3']
|
|
619
|
+
)
|
|
620
|
+
path_depth: int = Field(default=1, ge=1, le=5)
|
|
621
|
+
min_endpoints: int = Field(default=2, ge=1)
|
|
622
|
+
fallback_module: str = 'common'
|
|
623
|
+
module_map: dict[str, ModuleMapValue] = Field(default_factory=dict)
|
|
624
|
+
flat_structure: bool = False
|
|
625
|
+
split_models: bool = False
|
|
626
|
+
shared_models_module: str = '_models'
|
|
627
|
+
|
|
628
|
+
model_config = {'extra': 'forbid'}
|
|
629
|
+
|
|
630
|
+
@field_validator('strategy', mode='before')
|
|
631
|
+
@classmethod
|
|
632
|
+
def normalize_strategy(cls, v: Any) -> SplitStrategy:
|
|
633
|
+
"""Convert string strategy to enum."""
|
|
634
|
+
if isinstance(v, str):
|
|
635
|
+
return SplitStrategy(v.lower())
|
|
636
|
+
return v
|
|
637
|
+
|
|
638
|
+
@field_validator('module_map', mode='before')
|
|
639
|
+
@classmethod
|
|
640
|
+
def normalize_module_map_before(cls, v: Any) -> dict[str, ModuleDefinition]:
|
|
641
|
+
"""Normalize shorthand module_map syntax to full ModuleDefinition objects.
|
|
642
|
+
|
|
643
|
+
Handles:
|
|
644
|
+
- {"users": ["/user/*"]} → ModuleDefinition(paths=["/user/*"])
|
|
645
|
+
- {"users": "/user/*"} → ModuleDefinition(paths=["/user/*"])
|
|
646
|
+
- Nested dicts without paths/modules → nested ModuleDefinition
|
|
647
|
+
"""
|
|
648
|
+
if isinstance(v, dict):
|
|
649
|
+
return _normalize_module_map(v)
|
|
650
|
+
return v
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def _is_module_definition_dict(value: dict) -> bool:
|
|
654
|
+
"""Check if a dict looks like a ModuleDefinition (has known keys)."""
|
|
655
|
+
known_keys = {
|
|
656
|
+
'paths',
|
|
657
|
+
'modules',
|
|
658
|
+
'strip_prefix',
|
|
659
|
+
'package_prefix',
|
|
660
|
+
'file_name',
|
|
661
|
+
'description',
|
|
662
|
+
}
|
|
663
|
+
return bool(set(value.keys()) & known_keys)
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def _normalize_module_map(
|
|
667
|
+
module_map: dict[str, ModuleMapValue],
|
|
668
|
+
) -> dict[str, ModuleDefinition]:
|
|
669
|
+
"""Recursively normalize module_map values to ModuleDefinition objects."""
|
|
670
|
+
normalized: dict[str, ModuleDefinition] = {}
|
|
671
|
+
|
|
672
|
+
for key, value in module_map.items():
|
|
673
|
+
if isinstance(value, ModuleDefinition):
|
|
674
|
+
# Already a ModuleDefinition, but recursively normalize its modules
|
|
675
|
+
if value.modules:
|
|
676
|
+
value = value.model_copy(
|
|
677
|
+
update={'modules': _normalize_module_map(value.modules)}
|
|
678
|
+
)
|
|
679
|
+
normalized[key] = value
|
|
680
|
+
elif isinstance(value, str):
|
|
681
|
+
# Single path pattern string → ModuleDefinition with one path
|
|
682
|
+
normalized[key] = ModuleDefinition(paths=[value])
|
|
683
|
+
elif isinstance(value, list):
|
|
684
|
+
# List of path patterns → ModuleDefinition with paths
|
|
685
|
+
normalized[key] = ModuleDefinition(paths=value)
|
|
686
|
+
elif isinstance(value, dict):
|
|
687
|
+
# Dict that's not yet a ModuleDefinition - could be:
|
|
688
|
+
# 1. A dict that should be parsed as ModuleDefinition (has known keys)
|
|
689
|
+
# 2. A dict of nested modules (shorthand for modules={...})
|
|
690
|
+
if _is_module_definition_dict(value):
|
|
691
|
+
# Try to parse as ModuleDefinition
|
|
692
|
+
definition = ModuleDefinition.model_validate(value)
|
|
693
|
+
if definition.modules:
|
|
694
|
+
definition = definition.model_copy(
|
|
695
|
+
update={'modules': _normalize_module_map(definition.modules)}
|
|
696
|
+
)
|
|
697
|
+
normalized[key] = definition
|
|
698
|
+
else:
|
|
699
|
+
# Treat the dict as a nested module structure
|
|
700
|
+
# This handles cases like: {"identity": {"users": [...], "auth": [...]}}
|
|
701
|
+
nested_modules = _normalize_module_map(value)
|
|
702
|
+
normalized[key] = ModuleDefinition(modules=nested_modules)
|
|
703
|
+
else:
|
|
704
|
+
raise ValueError(
|
|
705
|
+
f"Invalid module_map value for '{key}': expected str, list, dict, "
|
|
706
|
+
f'or ModuleDefinition, got {type(value).__name__}'
|
|
707
|
+
)
|
|
708
|
+
|
|
709
|
+
return normalized
|
|
8
710
|
|
|
9
711
|
|
|
10
712
|
class DocumentConfig(BaseModel):
|
|
11
|
-
"""
|
|
713
|
+
"""Configuration for a single OpenAPI document to be processed.
|
|
714
|
+
|
|
715
|
+
Attributes:
|
|
716
|
+
source: Path or URL to the OpenAPI document.
|
|
717
|
+
output: Output directory for the generated code.
|
|
718
|
+
base_url: Optional base URL override for the API.
|
|
719
|
+
models_file: Name of the generated models file.
|
|
720
|
+
models_import_path: Optional import path for models in endpoints.
|
|
721
|
+
endpoints_file: Name of the generated endpoints file.
|
|
722
|
+
generate_async: Whether to generate async endpoint functions.
|
|
723
|
+
generate_sync: Whether to generate sync endpoint functions.
|
|
724
|
+
client_class_name: Optional name for a generated client class.
|
|
725
|
+
module_split: Configuration for splitting endpoints into submodules.
|
|
726
|
+
"""
|
|
12
727
|
|
|
13
728
|
source: str = Field(..., description='Path or URL to the OpenAPI document.')
|
|
14
729
|
|
|
15
730
|
base_url: str | None = Field(
|
|
16
731
|
None,
|
|
17
|
-
description='Optional base URL to
|
|
732
|
+
description='Optional base URL to override servers defined in the OpenAPI document.',
|
|
18
733
|
)
|
|
19
734
|
|
|
20
735
|
output: str = Field(..., description='Output directory for the generated code.')
|
|
21
736
|
|
|
22
|
-
|
|
23
|
-
|
|
737
|
+
include_paths: list[str] | None = Field(
|
|
738
|
+
default=None,
|
|
739
|
+
description=(
|
|
740
|
+
'List of path patterns to include. Only endpoints matching these patterns '
|
|
741
|
+
'will be generated. Supports glob patterns (e.g., "/api/v1/users/*"). '
|
|
742
|
+
'If None, all paths are included.'
|
|
743
|
+
),
|
|
24
744
|
)
|
|
25
745
|
|
|
746
|
+
exclude_paths: list[str] | None = Field(
|
|
747
|
+
default=None,
|
|
748
|
+
description=(
|
|
749
|
+
'List of path patterns to exclude. Endpoints matching these patterns '
|
|
750
|
+
'will be skipped. Supports glob patterns (e.g., "/internal/*"). '
|
|
751
|
+
'Applied after include_paths filtering.'
|
|
752
|
+
),
|
|
753
|
+
)
|
|
754
|
+
|
|
755
|
+
models_file: str = Field('models.py', description='File name for generated models.')
|
|
756
|
+
|
|
26
757
|
models_import_path: str | None = Field(
|
|
27
758
|
None, description='Optional import path for generated models.'
|
|
28
759
|
)
|
|
29
760
|
|
|
30
|
-
endpoints_file: str
|
|
31
|
-
'endpoints.py', description='
|
|
761
|
+
endpoints_file: str = Field(
|
|
762
|
+
'endpoints.py', description='File name for generated endpoints.'
|
|
763
|
+
)
|
|
764
|
+
|
|
765
|
+
generate_async: bool = Field(
|
|
766
|
+
True, description='Whether to generate async endpoint functions.'
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
generate_sync: bool = Field(
|
|
770
|
+
True, description='Whether to generate sync endpoint functions.'
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
client_class_name: str | None = Field(
|
|
774
|
+
None, description='Optional name for a generated client class.'
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
module_split: ModuleSplitConfig = Field(
|
|
778
|
+
default_factory=ModuleSplitConfig,
|
|
779
|
+
description='Configuration for splitting endpoints into submodules.',
|
|
32
780
|
)
|
|
33
781
|
|
|
782
|
+
dataframe: DataFrameConfig = Field(
|
|
783
|
+
default_factory=DataFrameConfig,
|
|
784
|
+
description='Configuration for DataFrame conversion methods.',
|
|
785
|
+
)
|
|
786
|
+
|
|
787
|
+
pagination: PaginationConfig = Field(
|
|
788
|
+
default_factory=PaginationConfig,
|
|
789
|
+
description='Configuration for automatic pagination.',
|
|
790
|
+
)
|
|
791
|
+
|
|
792
|
+
@field_validator('source')
|
|
793
|
+
@classmethod
|
|
794
|
+
def validate_source(cls, v: str) -> str:
|
|
795
|
+
"""Validate that source is a non-empty string."""
|
|
796
|
+
if not v or not v.strip():
|
|
797
|
+
raise ValueError('source cannot be empty')
|
|
798
|
+
return v.strip()
|
|
799
|
+
|
|
800
|
+
@field_validator('output')
|
|
801
|
+
@classmethod
|
|
802
|
+
def validate_output(cls, v: str) -> str:
|
|
803
|
+
"""Validate that output is a non-empty string."""
|
|
804
|
+
if not v or not v.strip():
|
|
805
|
+
raise ValueError('output cannot be empty')
|
|
806
|
+
return v.strip()
|
|
807
|
+
|
|
808
|
+
@field_validator('models_file', 'endpoints_file')
|
|
809
|
+
@classmethod
|
|
810
|
+
def validate_filename(cls, v: str) -> str:
|
|
811
|
+
"""Validate that file names end with .py."""
|
|
812
|
+
if not v.endswith('.py'):
|
|
813
|
+
raise ValueError(f'File name must end with .py, got: {v}')
|
|
814
|
+
return v
|
|
815
|
+
|
|
34
816
|
|
|
35
817
|
class CodegenConfig(BaseSettings):
|
|
818
|
+
"""Main configuration for OtterAPI code generation.
|
|
819
|
+
|
|
820
|
+
Attributes:
|
|
821
|
+
documents: List of OpenAPI documents to process.
|
|
822
|
+
generate_endpoints: Whether to generate endpoint functions.
|
|
823
|
+
format_output: Whether to format generated code with black/ruff.
|
|
824
|
+
validate_output: Whether to validate generated code syntax.
|
|
825
|
+
create_py_typed: Whether to create py.typed marker files.
|
|
826
|
+
"""
|
|
827
|
+
|
|
36
828
|
documents: list[DocumentConfig] = Field(
|
|
37
829
|
..., description='List of OpenAPI documents to process.'
|
|
38
830
|
)
|
|
39
831
|
|
|
40
832
|
generate_endpoints: bool = Field(
|
|
41
|
-
True, description='Whether to generate
|
|
833
|
+
True, description='Whether to generate endpoint functions.'
|
|
834
|
+
)
|
|
835
|
+
|
|
836
|
+
format_output: bool = Field(
|
|
837
|
+
True, description='Whether to format generated code with black/ruff.'
|
|
838
|
+
)
|
|
839
|
+
|
|
840
|
+
validate_output: bool = Field(
|
|
841
|
+
True, description='Whether to validate generated code syntax.'
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
create_py_typed: bool = Field(
|
|
845
|
+
True, description='Whether to create py.typed marker files.'
|
|
42
846
|
)
|
|
43
847
|
|
|
848
|
+
@field_validator('documents')
|
|
849
|
+
@classmethod
|
|
850
|
+
def validate_documents(cls, v: list[DocumentConfig]) -> list[DocumentConfig]:
|
|
851
|
+
"""Validate that at least one document is configured."""
|
|
852
|
+
if not v:
|
|
853
|
+
raise ValueError('At least one document must be configured')
|
|
854
|
+
return v
|
|
855
|
+
|
|
856
|
+
model_config = {
|
|
857
|
+
'env_prefix': 'OTTER_',
|
|
858
|
+
'env_nested_delimiter': '__',
|
|
859
|
+
}
|
|
860
|
+
|
|
44
861
|
|
|
45
862
|
def load_yaml(path: str | Path) -> dict:
|
|
863
|
+
"""Load configuration from a YAML file.
|
|
864
|
+
|
|
865
|
+
Args:
|
|
866
|
+
path: Path to the YAML file.
|
|
867
|
+
|
|
868
|
+
Returns:
|
|
869
|
+
Parsed configuration dictionary.
|
|
870
|
+
|
|
871
|
+
Raises:
|
|
872
|
+
FileNotFoundError: If the file doesn't exist.
|
|
873
|
+
yaml.YAMLError: If the file is not valid YAML.
|
|
874
|
+
"""
|
|
46
875
|
import yaml
|
|
47
876
|
|
|
48
|
-
|
|
877
|
+
path = Path(path)
|
|
878
|
+
if not path.exists():
|
|
879
|
+
raise FileNotFoundError(f'Configuration file not found: {path}')
|
|
880
|
+
|
|
881
|
+
content = path.read_text(encoding='utf-8')
|
|
882
|
+
data = yaml.safe_load(content)
|
|
883
|
+
|
|
884
|
+
# Expand environment variables
|
|
885
|
+
return _expand_env_vars_recursive(data)
|
|
886
|
+
|
|
887
|
+
|
|
888
|
+
def load_json(path: str | Path) -> dict:
|
|
889
|
+
"""Load configuration from a JSON file.
|
|
890
|
+
|
|
891
|
+
Args:
|
|
892
|
+
path: Path to the JSON file.
|
|
893
|
+
|
|
894
|
+
Returns:
|
|
895
|
+
Parsed configuration dictionary.
|
|
896
|
+
|
|
897
|
+
Raises:
|
|
898
|
+
FileNotFoundError: If the file doesn't exist.
|
|
899
|
+
json.JSONDecodeError: If the file is not valid JSON.
|
|
900
|
+
"""
|
|
901
|
+
path = Path(path)
|
|
902
|
+
if not path.exists():
|
|
903
|
+
raise FileNotFoundError(f'Configuration file not found: {path}')
|
|
904
|
+
|
|
905
|
+
content = path.read_text(encoding='utf-8')
|
|
906
|
+
data = json.loads(content)
|
|
907
|
+
|
|
908
|
+
# Expand environment variables
|
|
909
|
+
return _expand_env_vars_recursive(data)
|
|
910
|
+
|
|
911
|
+
|
|
912
|
+
def load_toml(path: str | Path) -> dict:
|
|
913
|
+
"""Load configuration from a TOML file (pyproject.toml).
|
|
914
|
+
|
|
915
|
+
Args:
|
|
916
|
+
path: Path to the TOML file.
|
|
917
|
+
|
|
918
|
+
Returns:
|
|
919
|
+
Parsed configuration dictionary for the otterapi tool section.
|
|
920
|
+
|
|
921
|
+
Raises:
|
|
922
|
+
FileNotFoundError: If the file doesn't exist.
|
|
923
|
+
KeyError: If the file doesn't contain otterapi configuration.
|
|
924
|
+
"""
|
|
925
|
+
import tomllib
|
|
926
|
+
|
|
927
|
+
path = Path(path)
|
|
928
|
+
if not path.exists():
|
|
929
|
+
raise FileNotFoundError(f'Configuration file not found: {path}')
|
|
930
|
+
|
|
931
|
+
content = path.read_text(encoding='utf-8')
|
|
932
|
+
data = tomllib.loads(content)
|
|
933
|
+
|
|
934
|
+
# Extract tool.otterapi section
|
|
935
|
+
tools = data.get('tool', {})
|
|
936
|
+
if 'otterapi' not in tools:
|
|
937
|
+
raise KeyError(f'No [tool.otterapi] section found in {path}')
|
|
938
|
+
|
|
939
|
+
# Expand environment variables
|
|
940
|
+
return _expand_env_vars_recursive(tools['otterapi'])
|
|
941
|
+
|
|
942
|
+
|
|
943
|
+
def load_config_file(path: str | Path) -> dict:
|
|
944
|
+
"""Load configuration from a file, auto-detecting the format.
|
|
945
|
+
|
|
946
|
+
Args:
|
|
947
|
+
path: Path to the configuration file.
|
|
948
|
+
|
|
949
|
+
Returns:
|
|
950
|
+
Parsed configuration dictionary.
|
|
951
|
+
|
|
952
|
+
Raises:
|
|
953
|
+
FileNotFoundError: If the file doesn't exist.
|
|
954
|
+
ValueError: If the file format is not supported.
|
|
955
|
+
"""
|
|
956
|
+
path = Path(path)
|
|
957
|
+
|
|
958
|
+
if not path.exists():
|
|
959
|
+
raise FileNotFoundError(f'Configuration file not found: {path}')
|
|
960
|
+
|
|
961
|
+
suffix = path.suffix.lower()
|
|
962
|
+
|
|
963
|
+
if suffix in ('.yaml', '.yml'):
|
|
964
|
+
return load_yaml(path)
|
|
965
|
+
elif suffix == '.json':
|
|
966
|
+
return load_json(path)
|
|
967
|
+
elif suffix == '.toml':
|
|
968
|
+
return load_toml(path)
|
|
969
|
+
else:
|
|
970
|
+
# Try to auto-detect based on content
|
|
971
|
+
content = path.read_text(encoding='utf-8').strip()
|
|
972
|
+
if content.startswith('{'):
|
|
973
|
+
return load_json(path)
|
|
974
|
+
elif content.startswith('['):
|
|
975
|
+
return load_toml(path)
|
|
976
|
+
else:
|
|
977
|
+
return load_yaml(path)
|
|
49
978
|
|
|
50
979
|
|
|
51
980
|
def get_config(path: str | None = None) -> CodegenConfig:
|
|
52
|
-
"""Load configuration from a file or
|
|
981
|
+
"""Load OtterAPI configuration from a file or environment.
|
|
982
|
+
|
|
983
|
+
This function attempts to load configuration in the following order:
|
|
984
|
+
1. From the specified path (if provided)
|
|
985
|
+
2. From default config files in the current directory
|
|
986
|
+
3. From pyproject.toml [tool.otterapi] section
|
|
987
|
+
4. From environment variables
|
|
988
|
+
|
|
989
|
+
Args:
|
|
990
|
+
path: Optional path to a configuration file.
|
|
991
|
+
|
|
992
|
+
Returns:
|
|
993
|
+
Validated CodegenConfig object.
|
|
994
|
+
|
|
995
|
+
Raises:
|
|
996
|
+
FileNotFoundError: If no configuration can be found.
|
|
997
|
+
pydantic.ValidationError: If the configuration is invalid.
|
|
998
|
+
"""
|
|
999
|
+
# If path is specified, use it directly
|
|
53
1000
|
if path:
|
|
54
|
-
|
|
1001
|
+
data = load_config_file(path)
|
|
1002
|
+
return CodegenConfig.model_validate(data)
|
|
55
1003
|
|
|
56
|
-
cwd =
|
|
1004
|
+
cwd = Path.cwd()
|
|
57
1005
|
|
|
1006
|
+
# Try default config files
|
|
58
1007
|
for filename in DEFAULT_FILENAMES:
|
|
59
|
-
|
|
60
|
-
if
|
|
61
|
-
|
|
1008
|
+
config_path = cwd / filename
|
|
1009
|
+
if config_path.exists():
|
|
1010
|
+
data = load_config_file(config_path)
|
|
1011
|
+
return CodegenConfig.model_validate(data)
|
|
62
1012
|
|
|
63
|
-
|
|
1013
|
+
# Try pyproject.toml
|
|
1014
|
+
pyproject_path = cwd / 'pyproject.toml'
|
|
1015
|
+
if pyproject_path.exists():
|
|
1016
|
+
try:
|
|
1017
|
+
data = load_toml(pyproject_path)
|
|
1018
|
+
return CodegenConfig.model_validate(data)
|
|
1019
|
+
except KeyError:
|
|
1020
|
+
pass # No otterapi section, continue looking
|
|
64
1021
|
|
|
65
|
-
|
|
66
|
-
|
|
1022
|
+
# Try to build from environment variables
|
|
1023
|
+
env_source = os.environ.get('OTTER_SOURCE')
|
|
1024
|
+
env_output = os.environ.get('OTTER_OUTPUT')
|
|
1025
|
+
|
|
1026
|
+
if env_source and env_output:
|
|
1027
|
+
return CodegenConfig(
|
|
1028
|
+
documents=[
|
|
1029
|
+
DocumentConfig(
|
|
1030
|
+
source=env_source,
|
|
1031
|
+
output=env_output,
|
|
1032
|
+
base_url=os.environ.get('OTTER_BASE_URL'),
|
|
1033
|
+
models_file=os.environ.get('OTTER_MODELS_FILE', 'models.py'),
|
|
1034
|
+
endpoints_file=os.environ.get(
|
|
1035
|
+
'OTTER_ENDPOINTS_FILE', 'endpoints.py'
|
|
1036
|
+
),
|
|
1037
|
+
)
|
|
1038
|
+
]
|
|
1039
|
+
)
|
|
1040
|
+
|
|
1041
|
+
raise FileNotFoundError(
|
|
1042
|
+
'No configuration found. Create an otter.yml file, add [tool.otterapi] '
|
|
1043
|
+
'to pyproject.toml, or set OTTER_SOURCE and OTTER_OUTPUT environment variables.'
|
|
1044
|
+
)
|
|
67
1045
|
|
|
68
|
-
pyproject = tomllib.loads(path.read_text())
|
|
69
|
-
tools = pyproject.get('tool', {})
|
|
70
1046
|
|
|
71
|
-
|
|
72
|
-
|
|
1047
|
+
def create_default_config() -> dict:
|
|
1048
|
+
"""Create a default configuration dictionary.
|
|
73
1049
|
|
|
74
|
-
|
|
1050
|
+
Returns:
|
|
1051
|
+
Dictionary with default configuration values.
|
|
1052
|
+
"""
|
|
1053
|
+
return {
|
|
1054
|
+
'documents': [
|
|
1055
|
+
{
|
|
1056
|
+
'source': 'https://petstore3.swagger.io/api/v3/openapi.json',
|
|
1057
|
+
'output': './client',
|
|
1058
|
+
'models_file': 'models.py',
|
|
1059
|
+
'endpoints_file': 'endpoints.py',
|
|
1060
|
+
'generate_async': True,
|
|
1061
|
+
'generate_sync': True,
|
|
1062
|
+
}
|
|
1063
|
+
],
|
|
1064
|
+
'format_output': True,
|
|
1065
|
+
'validate_output': True,
|
|
1066
|
+
'create_py_typed': True,
|
|
1067
|
+
}
|