otterapi 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. README.md +581 -8
  2. otterapi/__init__.py +73 -0
  3. otterapi/cli.py +327 -29
  4. otterapi/codegen/__init__.py +115 -0
  5. otterapi/codegen/ast_utils.py +134 -5
  6. otterapi/codegen/client.py +1271 -0
  7. otterapi/codegen/codegen.py +1736 -0
  8. otterapi/codegen/dataframes.py +392 -0
  9. otterapi/codegen/emitter.py +473 -0
  10. otterapi/codegen/endpoints.py +2597 -343
  11. otterapi/codegen/pagination.py +1026 -0
  12. otterapi/codegen/schema.py +593 -0
  13. otterapi/codegen/splitting.py +1397 -0
  14. otterapi/codegen/types.py +1345 -0
  15. otterapi/codegen/utils.py +180 -1
  16. otterapi/config.py +1017 -24
  17. otterapi/exceptions.py +231 -0
  18. otterapi/openapi/__init__.py +46 -0
  19. otterapi/openapi/v2/__init__.py +86 -0
  20. otterapi/openapi/v2/spec.json +1607 -0
  21. otterapi/openapi/v2/v2.py +1776 -0
  22. otterapi/openapi/v3/__init__.py +131 -0
  23. otterapi/openapi/v3/spec.json +1651 -0
  24. otterapi/openapi/v3/v3.py +1557 -0
  25. otterapi/openapi/v3_1/__init__.py +133 -0
  26. otterapi/openapi/v3_1/spec.json +1411 -0
  27. otterapi/openapi/v3_1/v3_1.py +798 -0
  28. otterapi/openapi/v3_2/__init__.py +133 -0
  29. otterapi/openapi/v3_2/spec.json +1666 -0
  30. otterapi/openapi/v3_2/v3_2.py +777 -0
  31. otterapi/tests/__init__.py +3 -0
  32. otterapi/tests/fixtures/__init__.py +455 -0
  33. otterapi/tests/test_ast_utils.py +680 -0
  34. otterapi/tests/test_codegen.py +610 -0
  35. otterapi/tests/test_dataframe.py +1038 -0
  36. otterapi/tests/test_exceptions.py +493 -0
  37. otterapi/tests/test_openapi_support.py +616 -0
  38. otterapi/tests/test_openapi_upgrade.py +215 -0
  39. otterapi/tests/test_pagination.py +1101 -0
  40. otterapi/tests/test_splitting_config.py +319 -0
  41. otterapi/tests/test_splitting_integration.py +427 -0
  42. otterapi/tests/test_splitting_resolver.py +512 -0
  43. otterapi/tests/test_splitting_tree.py +525 -0
  44. otterapi-0.0.6.dist-info/METADATA +627 -0
  45. otterapi-0.0.6.dist-info/RECORD +48 -0
  46. {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/WHEEL +1 -1
  47. otterapi/codegen/generator.py +0 -358
  48. otterapi/codegen/openapi_processor.py +0 -27
  49. otterapi/codegen/type_generator.py +0 -559
  50. otterapi-0.0.5.dist-info/METADATA +0 -54
  51. otterapi-0.0.5.dist-info/RECORD +0 -16
  52. {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,392 @@
1
+ """DataFrame utilities for OtterAPI code generation.
2
+
3
+ This module provides utilities for:
4
+ - Generating the _dataframe.py utility file for runtime DataFrame conversion
5
+ - Determining DataFrame generation configuration for endpoints
6
+ - Checking if endpoints return list types suitable for DataFrame conversion
7
+ """
8
+
9
+ import ast
10
+ from dataclasses import dataclass
11
+ from pathlib import Path
12
+ from typing import TYPE_CHECKING
13
+
14
+ from upath import UPath
15
+
16
+ if TYPE_CHECKING:
17
+ from otterapi.codegen.types import Endpoint, Type
18
+ from otterapi.config import DataFrameConfig
19
+
20
+ __all__ = [
21
+ 'DATAFRAME_MODULE_CONTENT',
22
+ 'DataFrameMethodConfig',
23
+ 'generate_dataframe_module',
24
+ 'get_dataframe_imports',
25
+ 'get_dataframe_type_checking_imports',
26
+ 'get_dataframe_config_for_endpoint',
27
+ 'get_dataframe_config_from_parts',
28
+ 'endpoint_returns_list',
29
+ 'response_type_returns_list',
30
+ ]
31
+
32
+
33
+ # =============================================================================
34
+ # DataFrame Module Content
35
+ # =============================================================================
36
+
37
+ DATAFRAME_MODULE_CONTENT = '''\
38
+ """DataFrame conversion utilities for OtterAPI generated clients."""
39
+
40
+ from typing import Any
41
+
42
+
43
+ def _to_dict(obj: Any) -> Any:
44
+ """Convert an object to a dictionary if it has a model_dump method (Pydantic).
45
+
46
+ Args:
47
+ obj: The object to convert.
48
+
49
+ Returns:
50
+ A dictionary if the object is a Pydantic model, otherwise the original object.
51
+ """
52
+ if hasattr(obj, 'model_dump'):
53
+ return obj.model_dump()
54
+ elif hasattr(obj, 'dict'):
55
+ # Pydantic v1 compatibility
56
+ return obj.dict()
57
+ return obj
58
+
59
+
60
+ def _normalize_data(data: list | dict) -> list[dict]:
61
+ """Normalize data to a list of dictionaries for DataFrame conversion.
62
+
63
+ Handles:
64
+ - Lists of Pydantic models
65
+ - Lists of dictionaries
66
+ - Single dictionaries
67
+ - Single Pydantic models
68
+
69
+ Args:
70
+ data: The data to normalize.
71
+
72
+ Returns:
73
+ A list of dictionaries.
74
+ """
75
+ if isinstance(data, dict):
76
+ return [data]
77
+
78
+ if isinstance(data, list):
79
+ if not data:
80
+ return []
81
+ # Check if items need conversion (Pydantic models)
82
+ first = data[0]
83
+ if hasattr(first, 'model_dump') or hasattr(first, 'dict'):
84
+ return [_to_dict(item) for item in data]
85
+ return data
86
+
87
+ # Single Pydantic model
88
+ if hasattr(data, 'model_dump') or hasattr(data, 'dict'):
89
+ return [_to_dict(data)]
90
+
91
+ raise TypeError(
92
+ f"Cannot convert {type(data).__name__} to DataFrame. "
93
+ f"Expected list, dict, or Pydantic model."
94
+ )
95
+
96
+
97
+ def extract_path(data: dict | list, path: str | None) -> list | dict:
98
+ """Extract nested data using dot notation path.
99
+
100
+ Args:
101
+ data: The JSON response data
102
+ path: Dot notation path (e.g., "data.users", "response.items")
103
+
104
+ Returns:
105
+ The extracted data at the specified path
106
+
107
+ Raises:
108
+ KeyError: If the path does not exist in the data
109
+
110
+ Examples:
111
+ >>> extract_path({"data": {"users": [1, 2, 3]}}, "data.users")
112
+ [1, 2, 3]
113
+ >>> extract_path([1, 2, 3], None)
114
+ [1, 2, 3]
115
+ """
116
+ if path is None:
117
+ return data
118
+
119
+ current = data
120
+ for key in path.split("."):
121
+ if isinstance(current, dict):
122
+ if key not in current:
123
+ raise KeyError(
124
+ f"Key '{key}' not found in response. "
125
+ f"Available keys: {list(current.keys())}. Full path: {path}"
126
+ )
127
+ current = current[key]
128
+ elif isinstance(current, list) and key.isdigit():
129
+ current = current[int(key)]
130
+ else:
131
+ raise KeyError(
132
+ f"Cannot access '{key}' on {type(current).__name__}. "
133
+ f"Full path: {path}"
134
+ )
135
+
136
+ return current
137
+
138
+
139
+ def to_pandas(data: list | dict, path: str | None = None):
140
+ """Convert JSON data to a pandas DataFrame.
141
+
142
+ Args:
143
+ data: The JSON data to convert (dict or list)
144
+ path: Optional dot notation path to extract data first
145
+
146
+ Returns:
147
+ pandas.DataFrame
148
+
149
+ Raises:
150
+ ImportError: If pandas is not installed
151
+ TypeError: If data cannot be converted to DataFrame
152
+ """
153
+ try:
154
+ import pandas as pd
155
+ except ImportError:
156
+ raise ImportError(
157
+ "pandas is required for DataFrame conversion. "
158
+ "Install with: pip install pandas"
159
+ )
160
+
161
+ # Extract nested data if path specified
162
+ target_data = extract_path(data, path)
163
+
164
+ # Normalize data to list of dicts (handles Pydantic models)
165
+ normalized = _normalize_data(target_data)
166
+
167
+ # Use json_normalize for nested structure support
168
+ return pd.json_normalize(normalized)
169
+
170
+
171
+ def to_polars(data: list | dict, path: str | None = None):
172
+ """Convert JSON data to a polars DataFrame.
173
+
174
+ Args:
175
+ data: The JSON data to convert (dict or list)
176
+ path: Optional dot notation path to extract data first
177
+
178
+ Returns:
179
+ polars.DataFrame
180
+
181
+ Raises:
182
+ ImportError: If polars is not installed
183
+ TypeError: If data cannot be converted to DataFrame
184
+ """
185
+ try:
186
+ import polars as pl
187
+ except ImportError:
188
+ raise ImportError(
189
+ "polars is required for DataFrame conversion. "
190
+ "Install with: pip install polars"
191
+ )
192
+
193
+ # Extract nested data if path specified
194
+ target_data = extract_path(data, path)
195
+
196
+ # Normalize data to list of dicts (handles Pydantic models)
197
+ normalized = _normalize_data(target_data)
198
+
199
+ return pl.DataFrame(normalized)
200
+ '''
201
+
202
+
203
+ # =============================================================================
204
+ # DataFrame Module Generation
205
+ # =============================================================================
206
+
207
+
208
+ def generate_dataframe_module(output_dir: Path | UPath) -> Path | UPath:
209
+ """Generate the _dataframe.py utility module.
210
+
211
+ Args:
212
+ output_dir: The output directory where the module should be written.
213
+
214
+ Returns:
215
+ The path to the generated file.
216
+ """
217
+ output_dir = UPath(output_dir)
218
+ output_dir.mkdir(parents=True, exist_ok=True)
219
+
220
+ file_path = output_dir / '_dataframe.py'
221
+ file_path.write_text(DATAFRAME_MODULE_CONTENT)
222
+
223
+ return file_path
224
+
225
+
226
+ def get_dataframe_imports() -> dict[str, set[str]]:
227
+ """Get the imports needed for DataFrame method generation.
228
+
229
+ Returns:
230
+ A dictionary mapping module names to sets of imported names.
231
+ """
232
+ return {
233
+ 'typing': {'TYPE_CHECKING'},
234
+ }
235
+
236
+
237
+ def get_dataframe_type_checking_imports() -> list[str]:
238
+ """Get the TYPE_CHECKING imports for pandas and polars.
239
+
240
+ Returns:
241
+ List of import statements to include inside TYPE_CHECKING block.
242
+ """
243
+ return [
244
+ 'import pandas as pd',
245
+ 'import polars as pl',
246
+ ]
247
+
248
+
249
+ # =============================================================================
250
+ # DataFrame Configuration
251
+ # =============================================================================
252
+
253
+
254
+ @dataclass
255
+ class DataFrameMethodConfig:
256
+ """Configuration for DataFrame method generation.
257
+
258
+ Attributes:
259
+ generate_pandas: Whether to generate pandas DataFrame methods.
260
+ generate_polars: Whether to generate polars DataFrame methods.
261
+ path: Default JSONPath for extracting data from responses.
262
+ """
263
+
264
+ generate_pandas: bool = False
265
+ generate_polars: bool = False
266
+ path: str | None = None
267
+
268
+
269
+ def endpoint_returns_list(endpoint: 'Endpoint') -> bool:
270
+ """Check if an endpoint returns a list type.
271
+
272
+ Examines the endpoint's response type annotation AST to determine
273
+ if it represents a list type.
274
+
275
+ Args:
276
+ endpoint: The endpoint to check.
277
+
278
+ Returns:
279
+ True if the endpoint returns a list, False otherwise.
280
+
281
+ Example:
282
+ >>> # Endpoint with response_type annotation of list[Pet]
283
+ >>> endpoint_returns_list(endpoint)
284
+ True
285
+ """
286
+ if not endpoint.response_type:
287
+ return False
288
+
289
+ return response_type_returns_list(endpoint.response_type)
290
+
291
+
292
+ def response_type_returns_list(response_type: 'Type | None') -> bool:
293
+ """Check if a response type represents a list.
294
+
295
+ This is a lower-level version of endpoint_returns_list that works
296
+ directly with Type objects.
297
+
298
+ Args:
299
+ response_type: The Type object to check, or None.
300
+
301
+ Returns:
302
+ True if the type represents a list, False otherwise.
303
+ """
304
+ if not response_type:
305
+ return False
306
+
307
+ if response_type.annotation_ast:
308
+ ann = response_type.annotation_ast
309
+ if isinstance(ann, ast.Subscript):
310
+ if isinstance(ann.value, ast.Name) and ann.value.id == 'list':
311
+ return True
312
+
313
+ return False
314
+
315
+
316
+ def get_dataframe_config_for_endpoint(
317
+ endpoint: 'Endpoint',
318
+ dataframe_config: 'DataFrameConfig',
319
+ ) -> DataFrameMethodConfig:
320
+ """Get the DataFrame method configuration for an endpoint.
321
+
322
+ Determines whether to generate pandas and/or polars DataFrame methods
323
+ for a given endpoint based on the configuration and whether the
324
+ endpoint returns a list type.
325
+
326
+ Args:
327
+ endpoint: The endpoint to get configuration for.
328
+ dataframe_config: The global DataFrame configuration.
329
+
330
+ Returns:
331
+ DataFrameMethodConfig with generation flags and default path.
332
+
333
+ Example:
334
+ >>> config = get_dataframe_config_for_endpoint(endpoint, df_config)
335
+ >>> if config.generate_pandas:
336
+ ... # Generate pandas method
337
+ """
338
+ if not dataframe_config.enabled:
339
+ return DataFrameMethodConfig()
340
+
341
+ # Check if this endpoint returns a list type
342
+ returns_list = endpoint_returns_list(endpoint)
343
+
344
+ # Get the sync function name for config lookup
345
+ endpoint_name = endpoint.fn.name
346
+
347
+ # Use the config method to determine what to generate
348
+ gen_pandas, gen_polars, path = dataframe_config.should_generate_for_endpoint(
349
+ endpoint_name=endpoint_name,
350
+ returns_list=returns_list,
351
+ )
352
+
353
+ return DataFrameMethodConfig(
354
+ generate_pandas=gen_pandas,
355
+ generate_polars=gen_polars,
356
+ path=path,
357
+ )
358
+
359
+
360
+ def get_dataframe_config_from_parts(
361
+ endpoint_name: str,
362
+ response_type: 'Type | None',
363
+ dataframe_config: 'DataFrameConfig',
364
+ ) -> DataFrameMethodConfig:
365
+ """Get DataFrame configuration using individual endpoint parts.
366
+
367
+ This variant is useful when you have the endpoint name and response
368
+ type separately rather than a full Endpoint object.
369
+
370
+ Args:
371
+ endpoint_name: The name of the endpoint function.
372
+ response_type: The response Type object, or None.
373
+ dataframe_config: The global DataFrame configuration.
374
+
375
+ Returns:
376
+ DataFrameMethodConfig with generation flags and default path.
377
+ """
378
+ if not dataframe_config.enabled:
379
+ return DataFrameMethodConfig()
380
+
381
+ returns_list = response_type_returns_list(response_type)
382
+
383
+ gen_pandas, gen_polars, path = dataframe_config.should_generate_for_endpoint(
384
+ endpoint_name=endpoint_name,
385
+ returns_list=returns_list,
386
+ )
387
+
388
+ return DataFrameMethodConfig(
389
+ generate_pandas=gen_pandas,
390
+ generate_polars=gen_polars,
391
+ path=path,
392
+ )