otterapi 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +581 -8
- otterapi/__init__.py +73 -0
- otterapi/cli.py +327 -29
- otterapi/codegen/__init__.py +115 -0
- otterapi/codegen/ast_utils.py +134 -5
- otterapi/codegen/client.py +1271 -0
- otterapi/codegen/codegen.py +1736 -0
- otterapi/codegen/dataframes.py +392 -0
- otterapi/codegen/emitter.py +473 -0
- otterapi/codegen/endpoints.py +2597 -343
- otterapi/codegen/pagination.py +1026 -0
- otterapi/codegen/schema.py +593 -0
- otterapi/codegen/splitting.py +1397 -0
- otterapi/codegen/types.py +1345 -0
- otterapi/codegen/utils.py +180 -1
- otterapi/config.py +1017 -24
- otterapi/exceptions.py +231 -0
- otterapi/openapi/__init__.py +46 -0
- otterapi/openapi/v2/__init__.py +86 -0
- otterapi/openapi/v2/spec.json +1607 -0
- otterapi/openapi/v2/v2.py +1776 -0
- otterapi/openapi/v3/__init__.py +131 -0
- otterapi/openapi/v3/spec.json +1651 -0
- otterapi/openapi/v3/v3.py +1557 -0
- otterapi/openapi/v3_1/__init__.py +133 -0
- otterapi/openapi/v3_1/spec.json +1411 -0
- otterapi/openapi/v3_1/v3_1.py +798 -0
- otterapi/openapi/v3_2/__init__.py +133 -0
- otterapi/openapi/v3_2/spec.json +1666 -0
- otterapi/openapi/v3_2/v3_2.py +777 -0
- otterapi/tests/__init__.py +3 -0
- otterapi/tests/fixtures/__init__.py +455 -0
- otterapi/tests/test_ast_utils.py +680 -0
- otterapi/tests/test_codegen.py +610 -0
- otterapi/tests/test_dataframe.py +1038 -0
- otterapi/tests/test_exceptions.py +493 -0
- otterapi/tests/test_openapi_support.py +616 -0
- otterapi/tests/test_openapi_upgrade.py +215 -0
- otterapi/tests/test_pagination.py +1101 -0
- otterapi/tests/test_splitting_config.py +319 -0
- otterapi/tests/test_splitting_integration.py +427 -0
- otterapi/tests/test_splitting_resolver.py +512 -0
- otterapi/tests/test_splitting_tree.py +525 -0
- otterapi-0.0.6.dist-info/METADATA +627 -0
- otterapi-0.0.6.dist-info/RECORD +48 -0
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/WHEEL +1 -1
- otterapi/codegen/generator.py +0 -358
- otterapi/codegen/openapi_processor.py +0 -27
- otterapi/codegen/type_generator.py +0 -559
- otterapi-0.0.5.dist-info/METADATA +0 -54
- otterapi-0.0.5.dist-info/RECORD +0 -16
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,1397 @@
|
|
|
1
|
+
"""Module splitting utilities for organizing endpoints into a hierarchy.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for:
|
|
4
|
+
- Building a hierarchical structure of modules from endpoints (ModuleTree)
|
|
5
|
+
- Resolving endpoint paths to target modules (ModuleMapResolver)
|
|
6
|
+
- Emitting organized endpoint modules to the filesystem (SplitModuleEmitter)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import ast
|
|
12
|
+
import fnmatch
|
|
13
|
+
import re
|
|
14
|
+
from collections.abc import Iterator
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
|
+
|
|
19
|
+
from upath import UPath
|
|
20
|
+
|
|
21
|
+
from otterapi.codegen.ast_utils import ImportCollector, _all, _name
|
|
22
|
+
from otterapi.codegen.utils import write_mod
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from otterapi.codegen.types import Endpoint, Type
|
|
26
|
+
from otterapi.config import (
|
|
27
|
+
DataFrameConfig,
|
|
28
|
+
ModuleDefinition,
|
|
29
|
+
ModuleSplitConfig,
|
|
30
|
+
PaginationConfig,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
__all__ = [
|
|
34
|
+
# Tree classes
|
|
35
|
+
'ModuleTree',
|
|
36
|
+
'ModuleTreeBuilder',
|
|
37
|
+
'build_module_tree',
|
|
38
|
+
# Resolver classes
|
|
39
|
+
'ResolvedModule',
|
|
40
|
+
'ModuleMapResolver',
|
|
41
|
+
# Emitter classes
|
|
42
|
+
'EmittedModule',
|
|
43
|
+
'SplitModuleEmitter',
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# =============================================================================
|
|
48
|
+
# Module Tree
|
|
49
|
+
# =============================================================================
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class ModuleTree:
|
|
54
|
+
"""Tree structure representing the module hierarchy.
|
|
55
|
+
|
|
56
|
+
Each node in the tree can contain endpoints and/or child modules.
|
|
57
|
+
The root node typically has an empty name and contains top-level modules.
|
|
58
|
+
|
|
59
|
+
Attributes:
|
|
60
|
+
name: The name of this module node.
|
|
61
|
+
endpoints: List of endpoints that belong directly to this module.
|
|
62
|
+
children: Child modules keyed by their name.
|
|
63
|
+
definition: The ModuleDefinition associated with this node (if any).
|
|
64
|
+
description: Optional description for this module.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
name: str
|
|
68
|
+
endpoints: list[Endpoint] = field(default_factory=list)
|
|
69
|
+
children: dict[str, ModuleTree] = field(default_factory=dict)
|
|
70
|
+
definition: ModuleDefinition | None = None
|
|
71
|
+
description: str | None = None
|
|
72
|
+
|
|
73
|
+
def add_endpoint(self, module_path: list[str], endpoint: Endpoint) -> None:
|
|
74
|
+
"""Add an endpoint to the tree at the specified module path.
|
|
75
|
+
|
|
76
|
+
Creates intermediate nodes as needed.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
module_path: List of module path components, e.g., ["api", "v1", "users"].
|
|
80
|
+
endpoint: The endpoint to add.
|
|
81
|
+
"""
|
|
82
|
+
if not module_path:
|
|
83
|
+
self.endpoints.append(endpoint)
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
current = self
|
|
87
|
+
for part in module_path:
|
|
88
|
+
if part not in current.children:
|
|
89
|
+
current.children[part] = ModuleTree(name=part)
|
|
90
|
+
current = current.children[part]
|
|
91
|
+
|
|
92
|
+
current.endpoints.append(endpoint)
|
|
93
|
+
|
|
94
|
+
def get_node(self, module_path: list[str]) -> ModuleTree | None:
|
|
95
|
+
"""Get a node at the specified path.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
module_path: List of module path components.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
The ModuleTree node at the path, or None if not found.
|
|
102
|
+
"""
|
|
103
|
+
if not module_path:
|
|
104
|
+
return self
|
|
105
|
+
|
|
106
|
+
current = self
|
|
107
|
+
for part in module_path:
|
|
108
|
+
if part not in current.children:
|
|
109
|
+
return None
|
|
110
|
+
current = current.children[part]
|
|
111
|
+
|
|
112
|
+
return current
|
|
113
|
+
|
|
114
|
+
def walk(self) -> Iterator[tuple[list[str], ModuleTree]]:
|
|
115
|
+
"""Iterate over all nodes in the tree depth-first.
|
|
116
|
+
|
|
117
|
+
Yields:
|
|
118
|
+
Tuples of (module_path, node) for each node in the tree.
|
|
119
|
+
"""
|
|
120
|
+
yield from self._walk_recursive([])
|
|
121
|
+
|
|
122
|
+
def _walk_recursive(
|
|
123
|
+
self, current_path: list[str]
|
|
124
|
+
) -> Iterator[tuple[list[str], ModuleTree]]:
|
|
125
|
+
"""Recursively walk the tree."""
|
|
126
|
+
yield current_path, self
|
|
127
|
+
|
|
128
|
+
for child_name, child_node in sorted(self.children.items()):
|
|
129
|
+
child_path = current_path + [child_name]
|
|
130
|
+
yield from child_node._walk_recursive(child_path)
|
|
131
|
+
|
|
132
|
+
def walk_leaves(self) -> Iterator[tuple[list[str], ModuleTree]]:
|
|
133
|
+
"""Iterate over leaf nodes (nodes with endpoints).
|
|
134
|
+
|
|
135
|
+
Yields:
|
|
136
|
+
Tuples of (module_path, node) for leaf nodes with endpoints.
|
|
137
|
+
"""
|
|
138
|
+
for path, node in self.walk():
|
|
139
|
+
if node.endpoints:
|
|
140
|
+
yield path, node
|
|
141
|
+
|
|
142
|
+
def count_endpoints(self) -> int:
|
|
143
|
+
"""Count the total number of endpoints in this subtree."""
|
|
144
|
+
total = len(self.endpoints)
|
|
145
|
+
for child in self.children.values():
|
|
146
|
+
total += child.count_endpoints()
|
|
147
|
+
return total
|
|
148
|
+
|
|
149
|
+
def is_empty(self) -> bool:
|
|
150
|
+
"""Check if this subtree has no endpoints."""
|
|
151
|
+
return self.count_endpoints() == 0
|
|
152
|
+
|
|
153
|
+
def flatten(self) -> dict[str, list[Endpoint]]:
|
|
154
|
+
"""Flatten the tree into a dictionary mapping module paths to endpoints."""
|
|
155
|
+
result: dict[str, list[Endpoint]] = {}
|
|
156
|
+
|
|
157
|
+
for path, node in self.walk():
|
|
158
|
+
if node.endpoints:
|
|
159
|
+
module_name = '.'.join(path) if path else '__root__'
|
|
160
|
+
result[module_name] = node.endpoints
|
|
161
|
+
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class ModuleTreeBuilder:
|
|
166
|
+
"""Builds a ModuleTree from a list of endpoints and configuration.
|
|
167
|
+
|
|
168
|
+
Example:
|
|
169
|
+
>>> from otterapi.config import ModuleSplitConfig
|
|
170
|
+
>>> config = ModuleSplitConfig(enabled=True, strategy="tag")
|
|
171
|
+
>>> builder = ModuleTreeBuilder(config)
|
|
172
|
+
>>> tree = builder.build(endpoints)
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
def __init__(self, config: ModuleSplitConfig):
|
|
176
|
+
"""Initialize the tree builder.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
config: The module split configuration.
|
|
180
|
+
"""
|
|
181
|
+
self.config = config
|
|
182
|
+
self.resolver = ModuleMapResolver(config)
|
|
183
|
+
|
|
184
|
+
def build(self, endpoints: list[Endpoint]) -> ModuleTree:
|
|
185
|
+
"""Build a module tree from a list of endpoints.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
endpoints: List of Endpoint objects to organize.
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
A ModuleTree with endpoints organized according to the configuration.
|
|
192
|
+
"""
|
|
193
|
+
root = ModuleTree(name='__root__')
|
|
194
|
+
|
|
195
|
+
for endpoint in endpoints:
|
|
196
|
+
tags = getattr(endpoint, 'tags', None)
|
|
197
|
+
|
|
198
|
+
resolved = self.resolver.resolve(
|
|
199
|
+
path=endpoint.path,
|
|
200
|
+
method=endpoint.method,
|
|
201
|
+
tags=tags,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
root.add_endpoint(resolved.module_path, endpoint)
|
|
205
|
+
|
|
206
|
+
if resolved.definition:
|
|
207
|
+
node = root.get_node(resolved.module_path)
|
|
208
|
+
if node and not node.definition:
|
|
209
|
+
node.definition = resolved.definition
|
|
210
|
+
if resolved.definition.description:
|
|
211
|
+
node.description = resolved.definition.description
|
|
212
|
+
|
|
213
|
+
if self.config.min_endpoints > 1:
|
|
214
|
+
self._consolidate_small_modules(root)
|
|
215
|
+
|
|
216
|
+
return root
|
|
217
|
+
|
|
218
|
+
def _consolidate_small_modules(self, root: ModuleTree) -> None:
|
|
219
|
+
"""Consolidate modules with fewer than min_endpoints into fallback."""
|
|
220
|
+
to_consolidate: list[tuple[list[str], ModuleTree]] = []
|
|
221
|
+
|
|
222
|
+
for path, node in root.walk():
|
|
223
|
+
if not path or path == [self.config.fallback_module]:
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
if node.endpoints and not node.children:
|
|
227
|
+
if len(node.endpoints) < self.config.min_endpoints:
|
|
228
|
+
to_consolidate.append((path, node))
|
|
229
|
+
|
|
230
|
+
for path, node in to_consolidate:
|
|
231
|
+
for endpoint in node.endpoints:
|
|
232
|
+
root.add_endpoint([self.config.fallback_module], endpoint)
|
|
233
|
+
node.endpoints = []
|
|
234
|
+
|
|
235
|
+
self._remove_empty_nodes(root)
|
|
236
|
+
|
|
237
|
+
def _remove_empty_nodes(self, node: ModuleTree) -> bool:
|
|
238
|
+
"""Remove empty nodes from the tree."""
|
|
239
|
+
empty_children = []
|
|
240
|
+
for child_name, child_node in node.children.items():
|
|
241
|
+
if self._remove_empty_nodes(child_node):
|
|
242
|
+
empty_children.append(child_name)
|
|
243
|
+
|
|
244
|
+
for child_name in empty_children:
|
|
245
|
+
del node.children[child_name]
|
|
246
|
+
|
|
247
|
+
return not node.endpoints and not node.children
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def build_module_tree(
|
|
251
|
+
endpoints: list[Endpoint],
|
|
252
|
+
config: ModuleSplitConfig,
|
|
253
|
+
) -> ModuleTree:
|
|
254
|
+
"""Convenience function to build a module tree.
|
|
255
|
+
|
|
256
|
+
Args:
|
|
257
|
+
endpoints: List of Endpoint objects to organize.
|
|
258
|
+
config: The module split configuration.
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
A ModuleTree with endpoints organized according to the configuration.
|
|
262
|
+
"""
|
|
263
|
+
builder = ModuleTreeBuilder(config)
|
|
264
|
+
return builder.build(endpoints)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
# =============================================================================
|
|
268
|
+
# Module Map Resolver
|
|
269
|
+
# =============================================================================
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
@dataclass
|
|
273
|
+
class ResolvedModule:
|
|
274
|
+
"""Result of resolving an endpoint to a module.
|
|
275
|
+
|
|
276
|
+
Attributes:
|
|
277
|
+
module_path: List of module path components, e.g., ["api", "v1", "users"].
|
|
278
|
+
definition: The ModuleDefinition that matched (if any).
|
|
279
|
+
resolution: How the module was resolved: "custom", "tag", "path", "fallback".
|
|
280
|
+
stripped_path: The endpoint path after applying strip_prefix transformations.
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
module_path: list[str]
|
|
284
|
+
definition: ModuleDefinition | None = None
|
|
285
|
+
resolution: str = 'fallback'
|
|
286
|
+
stripped_path: str = ''
|
|
287
|
+
|
|
288
|
+
@property
|
|
289
|
+
def module_name(self) -> str:
|
|
290
|
+
"""Get the dotted module name, e.g., 'api.v1.users'."""
|
|
291
|
+
return '.'.join(self.module_path)
|
|
292
|
+
|
|
293
|
+
@property
|
|
294
|
+
def file_path(self) -> str:
|
|
295
|
+
"""Get the relative file path for this module."""
|
|
296
|
+
if len(self.module_path) == 1:
|
|
297
|
+
return f'{self.module_path[0]}.py'
|
|
298
|
+
return '/'.join(self.module_path[:-1]) + f'/{self.module_path[-1]}.py'
|
|
299
|
+
|
|
300
|
+
@property
|
|
301
|
+
def flat_file_path(self) -> str:
|
|
302
|
+
"""Get the flat file path for this module, e.g., 'api_v1_users.py'."""
|
|
303
|
+
return '_'.join(self.module_path) + '.py'
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
class ModuleMapResolver:
|
|
307
|
+
"""Resolves endpoint paths to target modules based on configuration.
|
|
308
|
+
|
|
309
|
+
The resolver uses the following priority:
|
|
310
|
+
1. Custom module_map patterns (if strategy is 'custom' or 'hybrid')
|
|
311
|
+
2. OpenAPI tags (if strategy is 'tag' or 'hybrid')
|
|
312
|
+
3. Path segments (if strategy is 'path' or 'hybrid')
|
|
313
|
+
4. Fallback module
|
|
314
|
+
|
|
315
|
+
Example:
|
|
316
|
+
>>> from otterapi.config import ModuleSplitConfig
|
|
317
|
+
>>> config = ModuleSplitConfig(
|
|
318
|
+
... enabled=True,
|
|
319
|
+
... strategy="custom",
|
|
320
|
+
... module_map={"users": ["/users/*", "/user/*"]}
|
|
321
|
+
... )
|
|
322
|
+
>>> resolver = ModuleMapResolver(config)
|
|
323
|
+
>>> result = resolver.resolve("/users/123", "GET", tags=None)
|
|
324
|
+
>>> result.module_name
|
|
325
|
+
'users'
|
|
326
|
+
"""
|
|
327
|
+
|
|
328
|
+
def __init__(self, config: ModuleSplitConfig):
|
|
329
|
+
"""Initialize the resolver with configuration."""
|
|
330
|
+
self.config = config
|
|
331
|
+
self._compiled_patterns: dict[str, list[re.Pattern]] = {}
|
|
332
|
+
|
|
333
|
+
def resolve(
|
|
334
|
+
self,
|
|
335
|
+
path: str,
|
|
336
|
+
method: str,
|
|
337
|
+
tags: list[str] | None = None,
|
|
338
|
+
) -> ResolvedModule:
|
|
339
|
+
"""Resolve an endpoint path to a target module.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
path: The API endpoint path, e.g., "/users/{id}".
|
|
343
|
+
method: The HTTP method (GET, POST, etc.).
|
|
344
|
+
tags: Optional list of OpenAPI tags for this operation.
|
|
345
|
+
|
|
346
|
+
Returns:
|
|
347
|
+
A ResolvedModule indicating where the endpoint should be placed.
|
|
348
|
+
"""
|
|
349
|
+
stripped_path = self._strip_global_prefixes(path)
|
|
350
|
+
|
|
351
|
+
if self.config.module_map and self._should_use_module_map():
|
|
352
|
+
result = self._match_module_map(stripped_path, self.config.module_map, [])
|
|
353
|
+
if result:
|
|
354
|
+
result.stripped_path = stripped_path
|
|
355
|
+
return result
|
|
356
|
+
|
|
357
|
+
if self._should_use_tags() and tags:
|
|
358
|
+
module_name = self._sanitize(tags[0])
|
|
359
|
+
return ResolvedModule(
|
|
360
|
+
module_path=[module_name],
|
|
361
|
+
resolution='tag',
|
|
362
|
+
stripped_path=stripped_path,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
if self._should_use_path():
|
|
366
|
+
module_name = self._extract_from_path(stripped_path)
|
|
367
|
+
if module_name:
|
|
368
|
+
return ResolvedModule(
|
|
369
|
+
module_path=[module_name],
|
|
370
|
+
resolution='path',
|
|
371
|
+
stripped_path=stripped_path,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
return ResolvedModule(
|
|
375
|
+
module_path=[self.config.fallback_module],
|
|
376
|
+
resolution='fallback',
|
|
377
|
+
stripped_path=stripped_path,
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
def _should_use_module_map(self) -> bool:
|
|
381
|
+
"""Check if module_map should be used based on strategy."""
|
|
382
|
+
from otterapi.config import SplitStrategy
|
|
383
|
+
|
|
384
|
+
strategy = self.config.strategy
|
|
385
|
+
if isinstance(strategy, str):
|
|
386
|
+
strategy = SplitStrategy(strategy)
|
|
387
|
+
return strategy in (SplitStrategy.CUSTOM, SplitStrategy.HYBRID)
|
|
388
|
+
|
|
389
|
+
def _should_use_tags(self) -> bool:
|
|
390
|
+
"""Check if tags should be used based on strategy."""
|
|
391
|
+
from otterapi.config import SplitStrategy
|
|
392
|
+
|
|
393
|
+
strategy = self.config.strategy
|
|
394
|
+
if isinstance(strategy, str):
|
|
395
|
+
strategy = SplitStrategy(strategy)
|
|
396
|
+
return strategy in (SplitStrategy.TAG, SplitStrategy.HYBRID)
|
|
397
|
+
|
|
398
|
+
def _should_use_path(self) -> bool:
|
|
399
|
+
"""Check if path extraction should be used based on strategy."""
|
|
400
|
+
from otterapi.config import SplitStrategy
|
|
401
|
+
|
|
402
|
+
strategy = self.config.strategy
|
|
403
|
+
if isinstance(strategy, str):
|
|
404
|
+
strategy = SplitStrategy(strategy)
|
|
405
|
+
return strategy in (SplitStrategy.PATH, SplitStrategy.HYBRID)
|
|
406
|
+
|
|
407
|
+
def _strip_global_prefixes(self, path: str) -> str:
|
|
408
|
+
"""Strip configured global prefixes from the path."""
|
|
409
|
+
for prefix in self.config.global_strip_prefixes:
|
|
410
|
+
if path.startswith(prefix):
|
|
411
|
+
stripped = path[len(prefix) :]
|
|
412
|
+
if not stripped.startswith('/'):
|
|
413
|
+
stripped = '/' + stripped
|
|
414
|
+
return stripped
|
|
415
|
+
return path
|
|
416
|
+
|
|
417
|
+
def _match_module_map(
|
|
418
|
+
self,
|
|
419
|
+
path: str,
|
|
420
|
+
module_map: dict[str, ModuleDefinition],
|
|
421
|
+
parent_path: list[str],
|
|
422
|
+
parent_definition: ModuleDefinition | None = None,
|
|
423
|
+
) -> ResolvedModule | None:
|
|
424
|
+
"""Recursively match a path against the module_map."""
|
|
425
|
+
working_path = path
|
|
426
|
+
if parent_definition and parent_definition.strip_prefix:
|
|
427
|
+
if working_path.startswith(parent_definition.strip_prefix):
|
|
428
|
+
working_path = working_path[len(parent_definition.strip_prefix) :]
|
|
429
|
+
if not working_path.startswith('/'):
|
|
430
|
+
working_path = '/' + working_path
|
|
431
|
+
|
|
432
|
+
for module_name, definition in module_map.items():
|
|
433
|
+
current_path = parent_path + [module_name]
|
|
434
|
+
|
|
435
|
+
match_path = working_path
|
|
436
|
+
if definition.strip_prefix:
|
|
437
|
+
if match_path.startswith(definition.strip_prefix):
|
|
438
|
+
match_path = match_path[len(definition.strip_prefix) :]
|
|
439
|
+
if not match_path.startswith('/'):
|
|
440
|
+
match_path = '/' + match_path
|
|
441
|
+
|
|
442
|
+
if definition.paths:
|
|
443
|
+
for pattern in definition.paths:
|
|
444
|
+
if self._path_matches(working_path, pattern):
|
|
445
|
+
if definition.modules:
|
|
446
|
+
nested_result = self._match_module_map(
|
|
447
|
+
match_path,
|
|
448
|
+
definition.modules,
|
|
449
|
+
current_path,
|
|
450
|
+
definition,
|
|
451
|
+
)
|
|
452
|
+
if nested_result:
|
|
453
|
+
return nested_result
|
|
454
|
+
|
|
455
|
+
return ResolvedModule(
|
|
456
|
+
module_path=current_path,
|
|
457
|
+
definition=definition,
|
|
458
|
+
resolution='custom',
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
elif definition.modules:
|
|
462
|
+
nested_result = self._match_module_map(
|
|
463
|
+
match_path,
|
|
464
|
+
definition.modules,
|
|
465
|
+
current_path,
|
|
466
|
+
definition,
|
|
467
|
+
)
|
|
468
|
+
if nested_result:
|
|
469
|
+
return nested_result
|
|
470
|
+
|
|
471
|
+
return None
|
|
472
|
+
|
|
473
|
+
def _path_matches(self, path: str, pattern: str) -> bool:
|
|
474
|
+
"""Check if a path matches a glob pattern."""
|
|
475
|
+
path = path.rstrip('/')
|
|
476
|
+
pattern = pattern.rstrip('/')
|
|
477
|
+
|
|
478
|
+
if '**' in pattern:
|
|
479
|
+
regex_pattern = self._glob_to_regex(pattern)
|
|
480
|
+
return bool(re.match(regex_pattern, path))
|
|
481
|
+
|
|
482
|
+
return fnmatch.fnmatch(path, pattern)
|
|
483
|
+
|
|
484
|
+
def _glob_to_regex(self, pattern: str) -> str:
|
|
485
|
+
"""Convert a glob pattern to a regex pattern."""
|
|
486
|
+
result = ''
|
|
487
|
+
i = 0
|
|
488
|
+
while i < len(pattern):
|
|
489
|
+
if pattern[i : i + 2] == '**':
|
|
490
|
+
result += '.*'
|
|
491
|
+
i += 2
|
|
492
|
+
elif pattern[i] == '*':
|
|
493
|
+
result += '[^/]*'
|
|
494
|
+
i += 1
|
|
495
|
+
elif pattern[i] == '?':
|
|
496
|
+
result += '[^/]'
|
|
497
|
+
i += 1
|
|
498
|
+
else:
|
|
499
|
+
if pattern[i] in r'\.^$+{}[]|()':
|
|
500
|
+
result += '\\' + pattern[i]
|
|
501
|
+
else:
|
|
502
|
+
result += pattern[i]
|
|
503
|
+
i += 1
|
|
504
|
+
|
|
505
|
+
return f'^{result}$'
|
|
506
|
+
|
|
507
|
+
def _extract_from_path(self, path: str) -> str | None:
|
|
508
|
+
"""Extract a module name from the path based on path_depth."""
|
|
509
|
+
segments = [s for s in path.split('/') if s and not s.startswith('{')]
|
|
510
|
+
|
|
511
|
+
if not segments:
|
|
512
|
+
return None
|
|
513
|
+
|
|
514
|
+
depth = min(self.config.path_depth, len(segments))
|
|
515
|
+
if depth == 1:
|
|
516
|
+
return self._sanitize(segments[0])
|
|
517
|
+
else:
|
|
518
|
+
return '_'.join(self._sanitize(s) for s in segments[:depth])
|
|
519
|
+
|
|
520
|
+
def _sanitize(self, name: str) -> str:
|
|
521
|
+
"""Sanitize a name to be a valid Python identifier."""
|
|
522
|
+
import keyword
|
|
523
|
+
|
|
524
|
+
sanitized = re.sub(r'[-\s]+', '_', name.lower())
|
|
525
|
+
sanitized = re.sub(r'[^a-z0-9_]', '', sanitized)
|
|
526
|
+
|
|
527
|
+
if sanitized and sanitized[0].isdigit():
|
|
528
|
+
sanitized = '_' + sanitized
|
|
529
|
+
|
|
530
|
+
if keyword.iskeyword(sanitized):
|
|
531
|
+
sanitized = sanitized + '_'
|
|
532
|
+
|
|
533
|
+
return sanitized or 'module'
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
# =============================================================================
|
|
537
|
+
# Split Module Emitter
|
|
538
|
+
# =============================================================================
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
@dataclass
|
|
542
|
+
class EmittedModule:
|
|
543
|
+
"""Information about an emitted module.
|
|
544
|
+
|
|
545
|
+
Attributes:
|
|
546
|
+
path: The file path where the module was written.
|
|
547
|
+
module_path: The module path components, e.g., ["api", "v1", "users"].
|
|
548
|
+
endpoint_names: Names of endpoints (functions) in this module.
|
|
549
|
+
"""
|
|
550
|
+
|
|
551
|
+
path: Path
|
|
552
|
+
module_path: list[str]
|
|
553
|
+
endpoint_names: list[str] = field(default_factory=list)
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
class SplitModuleEmitter:
|
|
557
|
+
"""Emits split modules based on a ModuleTree structure.
|
|
558
|
+
|
|
559
|
+
This emitter generates:
|
|
560
|
+
- Endpoint files organized by module hierarchy
|
|
561
|
+
- __init__.py files with proper exports
|
|
562
|
+
- Handles both flat and nested directory structures
|
|
563
|
+
|
|
564
|
+
Example:
|
|
565
|
+
>>> from otterapi.config import ModuleSplitConfig
|
|
566
|
+
>>> config = ModuleSplitConfig(enabled=True)
|
|
567
|
+
>>> emitter = SplitModuleEmitter(config, output_dir, models_file)
|
|
568
|
+
>>> emitter.emit(tree, base_url)
|
|
569
|
+
"""
|
|
570
|
+
|
|
571
|
+
def __init__(
|
|
572
|
+
self,
|
|
573
|
+
config: ModuleSplitConfig,
|
|
574
|
+
output_dir: Path | UPath,
|
|
575
|
+
models_file: Path | UPath,
|
|
576
|
+
models_import_path: str | None = None,
|
|
577
|
+
client_class_name: str = 'APIClient',
|
|
578
|
+
dataframe_config: DataFrameConfig | None = None,
|
|
579
|
+
pagination_config: PaginationConfig | None = None,
|
|
580
|
+
):
|
|
581
|
+
"""Initialize the split module emitter.
|
|
582
|
+
|
|
583
|
+
Args:
|
|
584
|
+
config: The module split configuration.
|
|
585
|
+
output_dir: The root output directory for generated files.
|
|
586
|
+
models_file: Path to the models file for import generation.
|
|
587
|
+
models_import_path: Optional custom import path for models.
|
|
588
|
+
client_class_name: Name of the client class.
|
|
589
|
+
dataframe_config: Optional DataFrame configuration.
|
|
590
|
+
pagination_config: Optional pagination configuration.
|
|
591
|
+
"""
|
|
592
|
+
self.config = config
|
|
593
|
+
self.output_dir = UPath(output_dir)
|
|
594
|
+
self.models_file = UPath(models_file)
|
|
595
|
+
self.models_import_path = models_import_path
|
|
596
|
+
self.client_class_name = client_class_name
|
|
597
|
+
self.dataframe_config = dataframe_config
|
|
598
|
+
self.pagination_config = pagination_config
|
|
599
|
+
self._emitted_modules: list[EmittedModule] = []
|
|
600
|
+
self._typegen_types: dict[str, Type] = {}
|
|
601
|
+
self._is_flat: bool = False # Track if we're emitting flat structure
|
|
602
|
+
|
|
603
|
+
def emit(
|
|
604
|
+
self,
|
|
605
|
+
tree: ModuleTree,
|
|
606
|
+
base_url: str,
|
|
607
|
+
typegen_types: dict[str, Type] | None = None,
|
|
608
|
+
) -> list[EmittedModule]:
|
|
609
|
+
"""Emit all modules from the tree.
|
|
610
|
+
|
|
611
|
+
Args:
|
|
612
|
+
tree: The ModuleTree containing organized endpoints.
|
|
613
|
+
base_url: The base URL for API requests.
|
|
614
|
+
typegen_types: Optional dict of types for collecting model imports.
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
List of EmittedModule objects describing what was written.
|
|
618
|
+
"""
|
|
619
|
+
self._emitted_modules = []
|
|
620
|
+
self._typegen_types = typegen_types or {}
|
|
621
|
+
|
|
622
|
+
if self.config.flat_structure:
|
|
623
|
+
self._is_flat = True
|
|
624
|
+
self._emit_flat(tree, base_url)
|
|
625
|
+
else:
|
|
626
|
+
self._is_flat = False
|
|
627
|
+
self._emit_nested(tree, base_url)
|
|
628
|
+
|
|
629
|
+
return self._emitted_modules
|
|
630
|
+
|
|
631
|
+
def _emit_flat(self, tree: ModuleTree, base_url: str) -> None:
|
|
632
|
+
"""Emit modules as flat files (no subdirectories)."""
|
|
633
|
+
all_exports: dict[str, list[str]] = {}
|
|
634
|
+
|
|
635
|
+
for module_path, node in tree.walk_leaves():
|
|
636
|
+
if not node.endpoints:
|
|
637
|
+
continue
|
|
638
|
+
|
|
639
|
+
flat_name = '_'.join(module_path) if module_path else 'endpoints'
|
|
640
|
+
file_path = self.output_dir / f'{flat_name}.py'
|
|
641
|
+
|
|
642
|
+
endpoint_names = self._emit_module_file(
|
|
643
|
+
file_path=file_path,
|
|
644
|
+
endpoints=node.endpoints,
|
|
645
|
+
base_url=base_url,
|
|
646
|
+
description=node.description,
|
|
647
|
+
module_path=module_path,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
all_exports[flat_name] = endpoint_names
|
|
651
|
+
|
|
652
|
+
self._emitted_modules.append(
|
|
653
|
+
EmittedModule(
|
|
654
|
+
path=file_path,
|
|
655
|
+
module_path=module_path,
|
|
656
|
+
endpoint_names=endpoint_names,
|
|
657
|
+
)
|
|
658
|
+
)
|
|
659
|
+
|
|
660
|
+
self._emit_flat_init(all_exports)
|
|
661
|
+
|
|
662
|
+
def _emit_nested(self, tree: ModuleTree, base_url: str) -> None:
|
|
663
|
+
"""Emit modules as nested directories."""
|
|
664
|
+
directories: set[tuple[str, ...]] = set()
|
|
665
|
+
|
|
666
|
+
for module_path, node in tree.walk_leaves():
|
|
667
|
+
if not node.endpoints:
|
|
668
|
+
continue
|
|
669
|
+
|
|
670
|
+
if len(module_path) > 1:
|
|
671
|
+
dir_path = self.output_dir / '/'.join(module_path[:-1])
|
|
672
|
+
dir_path.mkdir(parents=True, exist_ok=True)
|
|
673
|
+
directories.add(tuple(module_path[:-1]))
|
|
674
|
+
|
|
675
|
+
for i in range(1, len(module_path) - 1):
|
|
676
|
+
directories.add(tuple(module_path[:i]))
|
|
677
|
+
|
|
678
|
+
file_path = dir_path / f'{module_path[-1]}.py'
|
|
679
|
+
else:
|
|
680
|
+
module_name = module_path[0] if module_path else 'endpoints'
|
|
681
|
+
file_path = self.output_dir / f'{module_name}.py'
|
|
682
|
+
|
|
683
|
+
endpoint_names = self._emit_module_file(
|
|
684
|
+
file_path=file_path,
|
|
685
|
+
endpoints=node.endpoints,
|
|
686
|
+
base_url=base_url,
|
|
687
|
+
description=node.description,
|
|
688
|
+
module_path=module_path,
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
self._emitted_modules.append(
|
|
692
|
+
EmittedModule(
|
|
693
|
+
path=file_path,
|
|
694
|
+
module_path=module_path,
|
|
695
|
+
endpoint_names=endpoint_names,
|
|
696
|
+
)
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
self._emit_nested_inits(tree, directories)
|
|
700
|
+
|
|
701
|
+
def _emit_module_file(
|
|
702
|
+
self,
|
|
703
|
+
file_path: Path | UPath,
|
|
704
|
+
endpoints: list[Endpoint],
|
|
705
|
+
base_url: str,
|
|
706
|
+
description: str | None = None,
|
|
707
|
+
module_path: list[str] | None = None,
|
|
708
|
+
) -> list[str]:
|
|
709
|
+
"""Emit a single endpoint module file."""
|
|
710
|
+
from otterapi.codegen.dataframes import get_dataframe_config_for_endpoint
|
|
711
|
+
from otterapi.codegen.endpoints import (
|
|
712
|
+
build_default_client_code,
|
|
713
|
+
build_standalone_dataframe_fn,
|
|
714
|
+
build_standalone_endpoint_fn,
|
|
715
|
+
build_standalone_paginated_dataframe_fn,
|
|
716
|
+
build_standalone_paginated_fn,
|
|
717
|
+
build_standalone_paginated_iter_fn,
|
|
718
|
+
)
|
|
719
|
+
from otterapi.codegen.pagination import get_pagination_config_for_endpoint
|
|
720
|
+
|
|
721
|
+
body: list[ast.stmt] = []
|
|
722
|
+
|
|
723
|
+
if description:
|
|
724
|
+
body.append(ast.Expr(value=ast.Constant(value=description)))
|
|
725
|
+
|
|
726
|
+
import_collector = ImportCollector()
|
|
727
|
+
|
|
728
|
+
client_stmts, client_imports = build_default_client_code()
|
|
729
|
+
body.extend(client_stmts)
|
|
730
|
+
import_collector.add_imports(client_imports)
|
|
731
|
+
|
|
732
|
+
has_dataframe_methods = False
|
|
733
|
+
has_pagination_methods = False
|
|
734
|
+
endpoint_names: list[str] = []
|
|
735
|
+
|
|
736
|
+
for endpoint in endpoints:
|
|
737
|
+
# Track whether we generated paginated DataFrame methods for this endpoint
|
|
738
|
+
generated_paginated_df = False
|
|
739
|
+
|
|
740
|
+
# Check if this endpoint has pagination configured
|
|
741
|
+
pag_config = None
|
|
742
|
+
if self.pagination_config and self.pagination_config.enabled:
|
|
743
|
+
pag_config = get_pagination_config_for_endpoint(
|
|
744
|
+
endpoint.sync_fn_name,
|
|
745
|
+
self.pagination_config,
|
|
746
|
+
endpoint.parameters,
|
|
747
|
+
)
|
|
748
|
+
|
|
749
|
+
# Generate pagination methods if configured, otherwise regular functions
|
|
750
|
+
if pag_config:
|
|
751
|
+
has_pagination_methods = True
|
|
752
|
+
|
|
753
|
+
# Get item type from response type if it's a list
|
|
754
|
+
item_type_ast = self._get_item_type_ast(endpoint)
|
|
755
|
+
|
|
756
|
+
# Build pagination config dict
|
|
757
|
+
pag_dict = {
|
|
758
|
+
'offset_param': pag_config.offset_param,
|
|
759
|
+
'limit_param': pag_config.limit_param,
|
|
760
|
+
'cursor_param': pag_config.cursor_param,
|
|
761
|
+
'page_param': pag_config.page_param,
|
|
762
|
+
'per_page_param': pag_config.per_page_param,
|
|
763
|
+
'data_path': pag_config.data_path,
|
|
764
|
+
'total_path': pag_config.total_path,
|
|
765
|
+
'next_cursor_path': pag_config.next_cursor_path,
|
|
766
|
+
'total_pages_path': pag_config.total_pages_path,
|
|
767
|
+
'default_page_size': pag_config.default_page_size,
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
# Sync paginated function
|
|
771
|
+
pag_fn, pag_imports = build_standalone_paginated_fn(
|
|
772
|
+
fn_name=endpoint.sync_fn_name,
|
|
773
|
+
method=endpoint.method,
|
|
774
|
+
path=endpoint.path,
|
|
775
|
+
parameters=endpoint.parameters,
|
|
776
|
+
request_body_info=endpoint.request_body,
|
|
777
|
+
response_type=endpoint.response_type,
|
|
778
|
+
pagination_style=pag_config.style,
|
|
779
|
+
pagination_config=pag_dict,
|
|
780
|
+
item_type_ast=item_type_ast,
|
|
781
|
+
docs=endpoint.description,
|
|
782
|
+
is_async=False,
|
|
783
|
+
)
|
|
784
|
+
endpoint_names.append(endpoint.sync_fn_name)
|
|
785
|
+
body.append(pag_fn)
|
|
786
|
+
import_collector.add_imports(pag_imports)
|
|
787
|
+
|
|
788
|
+
# Async paginated function
|
|
789
|
+
async_pag_fn, async_pag_imports = build_standalone_paginated_fn(
|
|
790
|
+
fn_name=endpoint.async_fn_name,
|
|
791
|
+
method=endpoint.method,
|
|
792
|
+
path=endpoint.path,
|
|
793
|
+
parameters=endpoint.parameters,
|
|
794
|
+
request_body_info=endpoint.request_body,
|
|
795
|
+
response_type=endpoint.response_type,
|
|
796
|
+
pagination_style=pag_config.style,
|
|
797
|
+
pagination_config=pag_dict,
|
|
798
|
+
item_type_ast=item_type_ast,
|
|
799
|
+
docs=endpoint.description,
|
|
800
|
+
is_async=True,
|
|
801
|
+
)
|
|
802
|
+
endpoint_names.append(endpoint.async_fn_name)
|
|
803
|
+
body.append(async_pag_fn)
|
|
804
|
+
import_collector.add_imports(async_pag_imports)
|
|
805
|
+
|
|
806
|
+
# Sync iterator function
|
|
807
|
+
iter_fn_name = f'{endpoint.sync_fn_name}_iter'
|
|
808
|
+
iter_fn, iter_imports = build_standalone_paginated_iter_fn(
|
|
809
|
+
fn_name=iter_fn_name,
|
|
810
|
+
method=endpoint.method,
|
|
811
|
+
path=endpoint.path,
|
|
812
|
+
parameters=endpoint.parameters,
|
|
813
|
+
request_body_info=endpoint.request_body,
|
|
814
|
+
response_type=endpoint.response_type,
|
|
815
|
+
pagination_style=pag_config.style,
|
|
816
|
+
pagination_config=pag_dict,
|
|
817
|
+
item_type_ast=item_type_ast,
|
|
818
|
+
docs=endpoint.description,
|
|
819
|
+
is_async=False,
|
|
820
|
+
)
|
|
821
|
+
endpoint_names.append(iter_fn_name)
|
|
822
|
+
body.append(iter_fn)
|
|
823
|
+
import_collector.add_imports(iter_imports)
|
|
824
|
+
|
|
825
|
+
# Async iterator function
|
|
826
|
+
async_iter_fn_name = f'{endpoint.async_fn_name}_iter'
|
|
827
|
+
async_iter_fn, async_iter_imports = build_standalone_paginated_iter_fn(
|
|
828
|
+
fn_name=async_iter_fn_name,
|
|
829
|
+
method=endpoint.method,
|
|
830
|
+
path=endpoint.path,
|
|
831
|
+
parameters=endpoint.parameters,
|
|
832
|
+
request_body_info=endpoint.request_body,
|
|
833
|
+
response_type=endpoint.response_type,
|
|
834
|
+
pagination_style=pag_config.style,
|
|
835
|
+
pagination_config=pag_dict,
|
|
836
|
+
item_type_ast=item_type_ast,
|
|
837
|
+
docs=endpoint.description,
|
|
838
|
+
is_async=True,
|
|
839
|
+
)
|
|
840
|
+
endpoint_names.append(async_iter_fn_name)
|
|
841
|
+
body.append(async_iter_fn)
|
|
842
|
+
import_collector.add_imports(async_iter_imports)
|
|
843
|
+
|
|
844
|
+
# Generate paginated DataFrame methods if dataframe is enabled
|
|
845
|
+
# For paginated endpoints, we know they return lists, so check config directly
|
|
846
|
+
|
|
847
|
+
if self.dataframe_config and self.dataframe_config.enabled:
|
|
848
|
+
# Check if endpoint is explicitly disabled
|
|
849
|
+
endpoint_df_config = self.dataframe_config.endpoints.get(
|
|
850
|
+
endpoint.sync_fn_name
|
|
851
|
+
)
|
|
852
|
+
if endpoint_df_config and endpoint_df_config.enabled is False:
|
|
853
|
+
pass # Skip DataFrame generation for this endpoint
|
|
854
|
+
elif self.dataframe_config.pandas:
|
|
855
|
+
generated_paginated_df = True
|
|
856
|
+
has_dataframe_methods = True
|
|
857
|
+
# Sync pandas paginated method
|
|
858
|
+
pandas_fn_name = f'{endpoint.sync_fn_name}_df'
|
|
859
|
+
pandas_fn, pandas_imports = (
|
|
860
|
+
build_standalone_paginated_dataframe_fn(
|
|
861
|
+
fn_name=pandas_fn_name,
|
|
862
|
+
method=endpoint.method,
|
|
863
|
+
path=endpoint.path,
|
|
864
|
+
parameters=endpoint.parameters,
|
|
865
|
+
request_body_info=endpoint.request_body,
|
|
866
|
+
response_type=endpoint.response_type,
|
|
867
|
+
pagination_style=pag_config.style,
|
|
868
|
+
pagination_config=pag_dict,
|
|
869
|
+
library='pandas',
|
|
870
|
+
item_type_ast=item_type_ast,
|
|
871
|
+
docs=endpoint.description,
|
|
872
|
+
is_async=False,
|
|
873
|
+
)
|
|
874
|
+
)
|
|
875
|
+
endpoint_names.append(pandas_fn_name)
|
|
876
|
+
body.append(pandas_fn)
|
|
877
|
+
import_collector.add_imports(pandas_imports)
|
|
878
|
+
|
|
879
|
+
# Async pandas paginated method
|
|
880
|
+
async_pandas_fn_name = f'{endpoint.async_fn_name}_df'
|
|
881
|
+
async_pandas_fn, async_pandas_imports = (
|
|
882
|
+
build_standalone_paginated_dataframe_fn(
|
|
883
|
+
fn_name=async_pandas_fn_name,
|
|
884
|
+
method=endpoint.method,
|
|
885
|
+
path=endpoint.path,
|
|
886
|
+
parameters=endpoint.parameters,
|
|
887
|
+
request_body_info=endpoint.request_body,
|
|
888
|
+
response_type=endpoint.response_type,
|
|
889
|
+
pagination_style=pag_config.style,
|
|
890
|
+
pagination_config=pag_dict,
|
|
891
|
+
library='pandas',
|
|
892
|
+
item_type_ast=item_type_ast,
|
|
893
|
+
docs=endpoint.description,
|
|
894
|
+
is_async=True,
|
|
895
|
+
)
|
|
896
|
+
)
|
|
897
|
+
endpoint_names.append(async_pandas_fn_name)
|
|
898
|
+
body.append(async_pandas_fn)
|
|
899
|
+
import_collector.add_imports(async_pandas_imports)
|
|
900
|
+
|
|
901
|
+
if self.dataframe_config.polars:
|
|
902
|
+
generated_paginated_df = True
|
|
903
|
+
has_dataframe_methods = True
|
|
904
|
+
# Sync polars paginated method
|
|
905
|
+
polars_fn_name = f'{endpoint.sync_fn_name}_pl'
|
|
906
|
+
polars_fn, polars_imports = (
|
|
907
|
+
build_standalone_paginated_dataframe_fn(
|
|
908
|
+
fn_name=polars_fn_name,
|
|
909
|
+
method=endpoint.method,
|
|
910
|
+
path=endpoint.path,
|
|
911
|
+
parameters=endpoint.parameters,
|
|
912
|
+
request_body_info=endpoint.request_body,
|
|
913
|
+
response_type=endpoint.response_type,
|
|
914
|
+
pagination_style=pag_config.style,
|
|
915
|
+
pagination_config=pag_dict,
|
|
916
|
+
library='polars',
|
|
917
|
+
item_type_ast=item_type_ast,
|
|
918
|
+
docs=endpoint.description,
|
|
919
|
+
is_async=False,
|
|
920
|
+
)
|
|
921
|
+
)
|
|
922
|
+
endpoint_names.append(polars_fn_name)
|
|
923
|
+
body.append(polars_fn)
|
|
924
|
+
import_collector.add_imports(polars_imports)
|
|
925
|
+
|
|
926
|
+
# Async polars paginated method
|
|
927
|
+
async_polars_fn_name = f'{endpoint.async_fn_name}_pl'
|
|
928
|
+
async_polars_fn, async_polars_imports = (
|
|
929
|
+
build_standalone_paginated_dataframe_fn(
|
|
930
|
+
fn_name=async_polars_fn_name,
|
|
931
|
+
method=endpoint.method,
|
|
932
|
+
path=endpoint.path,
|
|
933
|
+
parameters=endpoint.parameters,
|
|
934
|
+
request_body_info=endpoint.request_body,
|
|
935
|
+
response_type=endpoint.response_type,
|
|
936
|
+
pagination_style=pag_config.style,
|
|
937
|
+
pagination_config=pag_dict,
|
|
938
|
+
library='polars',
|
|
939
|
+
item_type_ast=item_type_ast,
|
|
940
|
+
docs=endpoint.description,
|
|
941
|
+
is_async=True,
|
|
942
|
+
)
|
|
943
|
+
)
|
|
944
|
+
endpoint_names.append(async_polars_fn_name)
|
|
945
|
+
body.append(async_polars_fn)
|
|
946
|
+
import_collector.add_imports(async_polars_imports)
|
|
947
|
+
else:
|
|
948
|
+
# Build sync standalone function
|
|
949
|
+
sync_fn, sync_imports = build_standalone_endpoint_fn(
|
|
950
|
+
fn_name=endpoint.sync_fn_name,
|
|
951
|
+
method=endpoint.method,
|
|
952
|
+
path=endpoint.path,
|
|
953
|
+
parameters=endpoint.parameters,
|
|
954
|
+
request_body_info=endpoint.request_body,
|
|
955
|
+
response_type=endpoint.response_type,
|
|
956
|
+
response_infos=endpoint.response_infos,
|
|
957
|
+
docs=endpoint.description,
|
|
958
|
+
is_async=False,
|
|
959
|
+
)
|
|
960
|
+
endpoint_names.append(endpoint.sync_fn_name)
|
|
961
|
+
body.append(sync_fn)
|
|
962
|
+
import_collector.add_imports(sync_imports)
|
|
963
|
+
|
|
964
|
+
# Build async standalone function
|
|
965
|
+
async_fn, async_imports = build_standalone_endpoint_fn(
|
|
966
|
+
fn_name=endpoint.async_fn_name,
|
|
967
|
+
method=endpoint.method,
|
|
968
|
+
path=endpoint.path,
|
|
969
|
+
parameters=endpoint.parameters,
|
|
970
|
+
request_body_info=endpoint.request_body,
|
|
971
|
+
response_type=endpoint.response_type,
|
|
972
|
+
response_infos=endpoint.response_infos,
|
|
973
|
+
docs=endpoint.description,
|
|
974
|
+
is_async=True,
|
|
975
|
+
)
|
|
976
|
+
endpoint_names.append(endpoint.async_fn_name)
|
|
977
|
+
body.append(async_fn)
|
|
978
|
+
import_collector.add_imports(async_imports)
|
|
979
|
+
|
|
980
|
+
# Generate DataFrame methods if configured
|
|
981
|
+
# Skip if paginated DataFrame methods were already generated for this endpoint
|
|
982
|
+
if (
|
|
983
|
+
self.dataframe_config
|
|
984
|
+
and self.dataframe_config.enabled
|
|
985
|
+
and not generated_paginated_df
|
|
986
|
+
):
|
|
987
|
+
df_config = get_dataframe_config_for_endpoint(
|
|
988
|
+
endpoint, self.dataframe_config
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
if df_config.generate_pandas:
|
|
992
|
+
has_dataframe_methods = True
|
|
993
|
+
pandas_fn_name = f'{endpoint.sync_fn_name}_df'
|
|
994
|
+
pandas_fn, pandas_imports = build_standalone_dataframe_fn(
|
|
995
|
+
fn_name=pandas_fn_name,
|
|
996
|
+
method=endpoint.method,
|
|
997
|
+
path=endpoint.path,
|
|
998
|
+
parameters=endpoint.parameters,
|
|
999
|
+
request_body_info=endpoint.request_body,
|
|
1000
|
+
library='pandas',
|
|
1001
|
+
default_path=df_config.path,
|
|
1002
|
+
docs=endpoint.description,
|
|
1003
|
+
is_async=False,
|
|
1004
|
+
)
|
|
1005
|
+
endpoint_names.append(pandas_fn_name)
|
|
1006
|
+
body.append(pandas_fn)
|
|
1007
|
+
import_collector.add_imports(pandas_imports)
|
|
1008
|
+
|
|
1009
|
+
async_pandas_fn_name = f'{endpoint.async_fn_name}_df'
|
|
1010
|
+
async_pandas_fn, async_pandas_imports = (
|
|
1011
|
+
build_standalone_dataframe_fn(
|
|
1012
|
+
fn_name=async_pandas_fn_name,
|
|
1013
|
+
method=endpoint.method,
|
|
1014
|
+
path=endpoint.path,
|
|
1015
|
+
parameters=endpoint.parameters,
|
|
1016
|
+
request_body_info=endpoint.request_body,
|
|
1017
|
+
library='pandas',
|
|
1018
|
+
default_path=df_config.path,
|
|
1019
|
+
docs=endpoint.description,
|
|
1020
|
+
is_async=True,
|
|
1021
|
+
)
|
|
1022
|
+
)
|
|
1023
|
+
endpoint_names.append(async_pandas_fn_name)
|
|
1024
|
+
body.append(async_pandas_fn)
|
|
1025
|
+
import_collector.add_imports(async_pandas_imports)
|
|
1026
|
+
|
|
1027
|
+
if df_config.generate_polars:
|
|
1028
|
+
has_dataframe_methods = True
|
|
1029
|
+
polars_fn_name = f'{endpoint.sync_fn_name}_pl'
|
|
1030
|
+
polars_fn, polars_imports = build_standalone_dataframe_fn(
|
|
1031
|
+
fn_name=polars_fn_name,
|
|
1032
|
+
method=endpoint.method,
|
|
1033
|
+
path=endpoint.path,
|
|
1034
|
+
parameters=endpoint.parameters,
|
|
1035
|
+
request_body_info=endpoint.request_body,
|
|
1036
|
+
library='polars',
|
|
1037
|
+
default_path=df_config.path,
|
|
1038
|
+
docs=endpoint.description,
|
|
1039
|
+
is_async=False,
|
|
1040
|
+
)
|
|
1041
|
+
endpoint_names.append(polars_fn_name)
|
|
1042
|
+
body.append(polars_fn)
|
|
1043
|
+
import_collector.add_imports(polars_imports)
|
|
1044
|
+
|
|
1045
|
+
async_polars_fn_name = f'{endpoint.async_fn_name}_pl'
|
|
1046
|
+
async_polars_fn, async_polars_imports = (
|
|
1047
|
+
build_standalone_dataframe_fn(
|
|
1048
|
+
fn_name=async_polars_fn_name,
|
|
1049
|
+
method=endpoint.method,
|
|
1050
|
+
path=endpoint.path,
|
|
1051
|
+
parameters=endpoint.parameters,
|
|
1052
|
+
request_body_info=endpoint.request_body,
|
|
1053
|
+
library='polars',
|
|
1054
|
+
default_path=df_config.path,
|
|
1055
|
+
docs=endpoint.description,
|
|
1056
|
+
is_async=True,
|
|
1057
|
+
)
|
|
1058
|
+
)
|
|
1059
|
+
endpoint_names.append(async_polars_fn_name)
|
|
1060
|
+
body.append(async_polars_fn)
|
|
1061
|
+
import_collector.add_imports(async_polars_imports)
|
|
1062
|
+
|
|
1063
|
+
# Add __all__ export
|
|
1064
|
+
body.insert(0, _all(sorted(endpoint_names)))
|
|
1065
|
+
|
|
1066
|
+
# Add model imports
|
|
1067
|
+
model_names = self._collect_used_model_names(endpoints)
|
|
1068
|
+
if model_names:
|
|
1069
|
+
model_import = self._create_model_import(model_names, module_path)
|
|
1070
|
+
body.insert(0, model_import)
|
|
1071
|
+
|
|
1072
|
+
# Add Client import
|
|
1073
|
+
client_import = self._create_client_import(module_path)
|
|
1074
|
+
body.insert(0, client_import)
|
|
1075
|
+
|
|
1076
|
+
# Add TYPE_CHECKING block for DataFrame type hints if needed
|
|
1077
|
+
if has_dataframe_methods:
|
|
1078
|
+
import_collector.add_imports({'typing': {'TYPE_CHECKING'}})
|
|
1079
|
+
type_checking_block = ast.If(
|
|
1080
|
+
test=_name('TYPE_CHECKING'),
|
|
1081
|
+
body=[
|
|
1082
|
+
ast.Import(names=[ast.alias(name='pandas', asname='pd')]),
|
|
1083
|
+
ast.Import(names=[ast.alias(name='polars', asname='pl')]),
|
|
1084
|
+
],
|
|
1085
|
+
orelse=[],
|
|
1086
|
+
)
|
|
1087
|
+
body.insert(0, type_checking_block)
|
|
1088
|
+
|
|
1089
|
+
dataframe_import = ast.ImportFrom(
|
|
1090
|
+
module='_dataframe',
|
|
1091
|
+
names=[
|
|
1092
|
+
ast.alias(name='to_pandas', asname=None),
|
|
1093
|
+
ast.alias(name='to_polars', asname=None),
|
|
1094
|
+
],
|
|
1095
|
+
level=1,
|
|
1096
|
+
)
|
|
1097
|
+
body.insert(0, dataframe_import)
|
|
1098
|
+
|
|
1099
|
+
# Add pagination imports if needed
|
|
1100
|
+
if has_pagination_methods:
|
|
1101
|
+
import_collector.add_imports(
|
|
1102
|
+
{'collections.abc': {'Iterator', 'AsyncIterator'}}
|
|
1103
|
+
)
|
|
1104
|
+
pagination_import = ast.ImportFrom(
|
|
1105
|
+
module='_pagination',
|
|
1106
|
+
names=[
|
|
1107
|
+
ast.alias(name='paginate_offset', asname=None),
|
|
1108
|
+
ast.alias(name='paginate_offset_async', asname=None),
|
|
1109
|
+
ast.alias(name='paginate_cursor', asname=None),
|
|
1110
|
+
ast.alias(name='paginate_cursor_async', asname=None),
|
|
1111
|
+
ast.alias(name='paginate_page', asname=None),
|
|
1112
|
+
ast.alias(name='paginate_page_async', asname=None),
|
|
1113
|
+
ast.alias(name='iterate_offset', asname=None),
|
|
1114
|
+
ast.alias(name='iterate_offset_async', asname=None),
|
|
1115
|
+
ast.alias(name='iterate_cursor', asname=None),
|
|
1116
|
+
ast.alias(name='iterate_cursor_async', asname=None),
|
|
1117
|
+
ast.alias(name='iterate_page', asname=None),
|
|
1118
|
+
ast.alias(name='iterate_page_async', asname=None),
|
|
1119
|
+
ast.alias(name='extract_path', asname=None),
|
|
1120
|
+
],
|
|
1121
|
+
level=1,
|
|
1122
|
+
)
|
|
1123
|
+
body.insert(0, pagination_import)
|
|
1124
|
+
|
|
1125
|
+
# Add all other imports at the beginning
|
|
1126
|
+
for import_stmt in import_collector.to_ast():
|
|
1127
|
+
body.insert(0, import_stmt)
|
|
1128
|
+
|
|
1129
|
+
# Write the file
|
|
1130
|
+
file_path = UPath(file_path)
|
|
1131
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1132
|
+
write_mod(body, file_path)
|
|
1133
|
+
|
|
1134
|
+
return endpoint_names
|
|
1135
|
+
|
|
1136
|
+
def _get_item_type_ast(self, endpoint: Endpoint) -> ast.expr | None:
|
|
1137
|
+
"""Extract the item type AST from a list response type.
|
|
1138
|
+
|
|
1139
|
+
For example, if response_type is list[User], returns the AST for User.
|
|
1140
|
+
|
|
1141
|
+
Args:
|
|
1142
|
+
endpoint: The endpoint to check.
|
|
1143
|
+
|
|
1144
|
+
Returns:
|
|
1145
|
+
The AST expression for the item type, or None if not a list type.
|
|
1146
|
+
"""
|
|
1147
|
+
if not endpoint.response_type or not endpoint.response_type.annotation_ast:
|
|
1148
|
+
return None
|
|
1149
|
+
|
|
1150
|
+
ann = endpoint.response_type.annotation_ast
|
|
1151
|
+
if isinstance(ann, ast.Subscript):
|
|
1152
|
+
if isinstance(ann.value, ast.Name) and ann.value.id == 'list':
|
|
1153
|
+
return ann.slice
|
|
1154
|
+
|
|
1155
|
+
return None
|
|
1156
|
+
|
|
1157
|
+
def _create_client_import(
|
|
1158
|
+
self, module_path: list[str] | None = None
|
|
1159
|
+
) -> ast.ImportFrom:
|
|
1160
|
+
"""Create import statement for the Client class.
|
|
1161
|
+
|
|
1162
|
+
For flat structure, all files are in the same directory as client.py,
|
|
1163
|
+
so we always use level=1 (single dot relative import).
|
|
1164
|
+
|
|
1165
|
+
For nested structure, we need to go up the directory tree based on
|
|
1166
|
+
the actual file location:
|
|
1167
|
+
- module_path=['loot'] -> file at output_dir/loot.py -> level=1
|
|
1168
|
+
- module_path=['api', 'users'] -> file at output_dir/api/users.py -> level=2
|
|
1169
|
+
"""
|
|
1170
|
+
if self._is_flat:
|
|
1171
|
+
# Flat structure: all files in same directory as client.py
|
|
1172
|
+
level = 1
|
|
1173
|
+
else:
|
|
1174
|
+
# Nested structure: actual directory depth is len(module_path) - 1
|
|
1175
|
+
# because the last element is the filename, not a directory
|
|
1176
|
+
# e.g., ['loot'] -> depth 0 -> level 1
|
|
1177
|
+
# e.g., ['api', 'users'] -> depth 1 -> level 2
|
|
1178
|
+
depth = len(module_path) - 1 if module_path and len(module_path) > 1 else 0
|
|
1179
|
+
level = depth + 1
|
|
1180
|
+
|
|
1181
|
+
return ast.ImportFrom(
|
|
1182
|
+
module='client',
|
|
1183
|
+
names=[ast.alias(name='Client', asname=None)],
|
|
1184
|
+
level=level,
|
|
1185
|
+
)
|
|
1186
|
+
|
|
1187
|
+
def _collect_used_model_names(self, endpoints: list[Endpoint]) -> set[str]:
|
|
1188
|
+
"""Collect model names used in the given endpoints."""
|
|
1189
|
+
from otterapi.codegen.types import collect_used_model_names
|
|
1190
|
+
|
|
1191
|
+
return collect_used_model_names(endpoints, self._typegen_types)
|
|
1192
|
+
|
|
1193
|
+
def _create_model_import(
|
|
1194
|
+
self, model_names: set[str], module_path: list[str] | None = None
|
|
1195
|
+
) -> ast.ImportFrom:
|
|
1196
|
+
"""Create import statement for models.
|
|
1197
|
+
|
|
1198
|
+
For flat structure, all files are in the same directory as models.py,
|
|
1199
|
+
so we always use level=1 (single dot relative import).
|
|
1200
|
+
|
|
1201
|
+
For nested structure, we need to go up the directory tree based on
|
|
1202
|
+
the actual file location:
|
|
1203
|
+
- module_path=['loot'] -> file at output_dir/loot.py -> level=1
|
|
1204
|
+
- module_path=['api', 'users'] -> file at output_dir/api/users.py -> level=2
|
|
1205
|
+
"""
|
|
1206
|
+
if self._is_flat:
|
|
1207
|
+
# Flat structure: all files in same directory as models.py
|
|
1208
|
+
level = 1
|
|
1209
|
+
else:
|
|
1210
|
+
# Nested structure: actual directory depth is len(module_path) - 1
|
|
1211
|
+
# because the last element is the filename, not a directory
|
|
1212
|
+
# e.g., ['loot'] -> depth 0 -> level 1
|
|
1213
|
+
# e.g., ['api', 'users'] -> depth 1 -> level 2
|
|
1214
|
+
depth = len(module_path) - 1 if module_path and len(module_path) > 1 else 0
|
|
1215
|
+
level = depth + 1
|
|
1216
|
+
|
|
1217
|
+
return ast.ImportFrom(
|
|
1218
|
+
module='models',
|
|
1219
|
+
names=[ast.alias(name=name, asname=None) for name in sorted(model_names)],
|
|
1220
|
+
level=level,
|
|
1221
|
+
)
|
|
1222
|
+
|
|
1223
|
+
def _emit_flat_init(self, all_exports: dict[str, list[str]]) -> None:
|
|
1224
|
+
"""Emit __init__.py for flat structure."""
|
|
1225
|
+
body: list[ast.stmt] = []
|
|
1226
|
+
all_names: list[str] = []
|
|
1227
|
+
|
|
1228
|
+
for module_name, endpoint_names in sorted(all_exports.items()):
|
|
1229
|
+
body.append(
|
|
1230
|
+
ast.ImportFrom(
|
|
1231
|
+
module=module_name,
|
|
1232
|
+
names=[
|
|
1233
|
+
ast.alias(name=name, asname=None)
|
|
1234
|
+
for name in sorted(endpoint_names)
|
|
1235
|
+
],
|
|
1236
|
+
level=1,
|
|
1237
|
+
)
|
|
1238
|
+
)
|
|
1239
|
+
all_names.extend(endpoint_names)
|
|
1240
|
+
|
|
1241
|
+
if all_names:
|
|
1242
|
+
body.insert(0, _all(sorted(all_names)))
|
|
1243
|
+
|
|
1244
|
+
self._emit_root_init_base(body)
|
|
1245
|
+
|
|
1246
|
+
def _emit_nested_inits(
|
|
1247
|
+
self, tree: ModuleTree, directories: set[tuple[str, ...]]
|
|
1248
|
+
) -> None:
|
|
1249
|
+
"""Emit __init__.py files for nested structure."""
|
|
1250
|
+
for dir_tuple in sorted(directories, key=len, reverse=True):
|
|
1251
|
+
dir_path = self.output_dir / '/'.join(dir_tuple)
|
|
1252
|
+
self._emit_directory_init(dir_path, list(dir_tuple))
|
|
1253
|
+
|
|
1254
|
+
self._emit_root_init(tree)
|
|
1255
|
+
|
|
1256
|
+
def _emit_directory_init(
|
|
1257
|
+
self, dir_path: Path | UPath, module_path: list[str]
|
|
1258
|
+
) -> None:
|
|
1259
|
+
"""Emit __init__.py for a directory."""
|
|
1260
|
+
body: list[ast.stmt] = []
|
|
1261
|
+
all_names: list[str] = []
|
|
1262
|
+
|
|
1263
|
+
for emitted in self._emitted_modules:
|
|
1264
|
+
if (
|
|
1265
|
+
len(emitted.module_path) > len(module_path)
|
|
1266
|
+
and emitted.module_path[: len(module_path)] == module_path
|
|
1267
|
+
):
|
|
1268
|
+
remaining = emitted.module_path[len(module_path) :]
|
|
1269
|
+
if len(remaining) == 1:
|
|
1270
|
+
body.append(
|
|
1271
|
+
ast.ImportFrom(
|
|
1272
|
+
module=remaining[0],
|
|
1273
|
+
names=[
|
|
1274
|
+
ast.alias(name=name, asname=None)
|
|
1275
|
+
for name in sorted(emitted.endpoint_names)
|
|
1276
|
+
],
|
|
1277
|
+
level=1,
|
|
1278
|
+
)
|
|
1279
|
+
)
|
|
1280
|
+
all_names.extend(emitted.endpoint_names)
|
|
1281
|
+
|
|
1282
|
+
if all_names:
|
|
1283
|
+
body.insert(0, _all(sorted(all_names)))
|
|
1284
|
+
|
|
1285
|
+
init_path = UPath(dir_path) / '__init__.py'
|
|
1286
|
+
if body:
|
|
1287
|
+
write_mod(body, init_path)
|
|
1288
|
+
else:
|
|
1289
|
+
init_path.touch()
|
|
1290
|
+
|
|
1291
|
+
def _emit_root_init(self, tree: ModuleTree) -> None:
|
|
1292
|
+
"""Emit the root __init__.py file."""
|
|
1293
|
+
body: list[ast.stmt] = []
|
|
1294
|
+
all_names: list[str] = []
|
|
1295
|
+
imported_modules: set[str] = set()
|
|
1296
|
+
|
|
1297
|
+
for emitted in self._emitted_modules:
|
|
1298
|
+
if len(emitted.module_path) == 1:
|
|
1299
|
+
module_name = emitted.module_path[0]
|
|
1300
|
+
if module_name not in imported_modules:
|
|
1301
|
+
body.append(
|
|
1302
|
+
ast.ImportFrom(
|
|
1303
|
+
module=module_name,
|
|
1304
|
+
names=[
|
|
1305
|
+
ast.alias(name=name, asname=None)
|
|
1306
|
+
for name in sorted(emitted.endpoint_names)
|
|
1307
|
+
],
|
|
1308
|
+
level=1,
|
|
1309
|
+
)
|
|
1310
|
+
)
|
|
1311
|
+
all_names.extend(emitted.endpoint_names)
|
|
1312
|
+
imported_modules.add(module_name)
|
|
1313
|
+
elif len(emitted.module_path) > 1:
|
|
1314
|
+
top_module = emitted.module_path[0]
|
|
1315
|
+
if top_module not in imported_modules:
|
|
1316
|
+
body.append(
|
|
1317
|
+
ast.ImportFrom(
|
|
1318
|
+
module=f'.{top_module}',
|
|
1319
|
+
names=[ast.alias(name='*', asname=None)],
|
|
1320
|
+
level=0,
|
|
1321
|
+
)
|
|
1322
|
+
)
|
|
1323
|
+
imported_modules.add(top_module)
|
|
1324
|
+
|
|
1325
|
+
for emitted in self._emitted_modules:
|
|
1326
|
+
if len(emitted.module_path) > 1:
|
|
1327
|
+
all_names.extend(emitted.endpoint_names)
|
|
1328
|
+
|
|
1329
|
+
self._emit_root_init_base(body, all_names)
|
|
1330
|
+
|
|
1331
|
+
def _emit_root_init_base(
|
|
1332
|
+
self, body: list[ast.stmt], all_names: list[str] | None = None
|
|
1333
|
+
) -> None:
|
|
1334
|
+
"""Emit the base root __init__.py content."""
|
|
1335
|
+
if all_names is None:
|
|
1336
|
+
all_names = []
|
|
1337
|
+
|
|
1338
|
+
# Import and export Client
|
|
1339
|
+
body.append(
|
|
1340
|
+
ast.ImportFrom(
|
|
1341
|
+
module='client',
|
|
1342
|
+
names=[ast.alias(name='Client', asname=None)],
|
|
1343
|
+
level=1,
|
|
1344
|
+
)
|
|
1345
|
+
)
|
|
1346
|
+
all_names.append('Client')
|
|
1347
|
+
|
|
1348
|
+
# Import and export BaseClient
|
|
1349
|
+
base_client_name = f'Base{self.client_class_name}'
|
|
1350
|
+
body.append(
|
|
1351
|
+
ast.ImportFrom(
|
|
1352
|
+
module='_client',
|
|
1353
|
+
names=[ast.alias(name=base_client_name, asname=None)],
|
|
1354
|
+
level=1,
|
|
1355
|
+
)
|
|
1356
|
+
)
|
|
1357
|
+
all_names.append(base_client_name)
|
|
1358
|
+
|
|
1359
|
+
# Import and export all models
|
|
1360
|
+
model_names = self._get_model_names()
|
|
1361
|
+
if model_names:
|
|
1362
|
+
body.append(
|
|
1363
|
+
ast.ImportFrom(
|
|
1364
|
+
module='models',
|
|
1365
|
+
names=[
|
|
1366
|
+
ast.alias(name=name, asname=None)
|
|
1367
|
+
for name in sorted(model_names)
|
|
1368
|
+
],
|
|
1369
|
+
level=1,
|
|
1370
|
+
)
|
|
1371
|
+
)
|
|
1372
|
+
all_names.extend(model_names)
|
|
1373
|
+
|
|
1374
|
+
# Add __all__
|
|
1375
|
+
unique_names = sorted(set(all_names))
|
|
1376
|
+
if unique_names:
|
|
1377
|
+
body.insert(0, _all(unique_names))
|
|
1378
|
+
|
|
1379
|
+
# Write __init__.py
|
|
1380
|
+
init_path = self.output_dir / '__init__.py'
|
|
1381
|
+
if body:
|
|
1382
|
+
write_mod(body, init_path)
|
|
1383
|
+
|
|
1384
|
+
def _get_model_names(self) -> list[str]:
|
|
1385
|
+
"""Get model names from the typegen_types."""
|
|
1386
|
+
if not self._typegen_types:
|
|
1387
|
+
return []
|
|
1388
|
+
|
|
1389
|
+
return [
|
|
1390
|
+
type_.name
|
|
1391
|
+
for type_ in self._typegen_types.values()
|
|
1392
|
+
if type_.name and type_.implementation_ast
|
|
1393
|
+
]
|
|
1394
|
+
|
|
1395
|
+
def get_emitted_modules(self) -> list[EmittedModule]:
|
|
1396
|
+
"""Get list of all emitted modules."""
|
|
1397
|
+
return self._emitted_modules.copy()
|