otterapi 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +581 -8
- otterapi/__init__.py +73 -0
- otterapi/cli.py +327 -29
- otterapi/codegen/__init__.py +115 -0
- otterapi/codegen/ast_utils.py +134 -5
- otterapi/codegen/client.py +1271 -0
- otterapi/codegen/codegen.py +1736 -0
- otterapi/codegen/dataframes.py +392 -0
- otterapi/codegen/emitter.py +473 -0
- otterapi/codegen/endpoints.py +2597 -343
- otterapi/codegen/pagination.py +1026 -0
- otterapi/codegen/schema.py +593 -0
- otterapi/codegen/splitting.py +1397 -0
- otterapi/codegen/types.py +1345 -0
- otterapi/codegen/utils.py +180 -1
- otterapi/config.py +1017 -24
- otterapi/exceptions.py +231 -0
- otterapi/openapi/__init__.py +46 -0
- otterapi/openapi/v2/__init__.py +86 -0
- otterapi/openapi/v2/spec.json +1607 -0
- otterapi/openapi/v2/v2.py +1776 -0
- otterapi/openapi/v3/__init__.py +131 -0
- otterapi/openapi/v3/spec.json +1651 -0
- otterapi/openapi/v3/v3.py +1557 -0
- otterapi/openapi/v3_1/__init__.py +133 -0
- otterapi/openapi/v3_1/spec.json +1411 -0
- otterapi/openapi/v3_1/v3_1.py +798 -0
- otterapi/openapi/v3_2/__init__.py +133 -0
- otterapi/openapi/v3_2/spec.json +1666 -0
- otterapi/openapi/v3_2/v3_2.py +777 -0
- otterapi/tests/__init__.py +3 -0
- otterapi/tests/fixtures/__init__.py +455 -0
- otterapi/tests/test_ast_utils.py +680 -0
- otterapi/tests/test_codegen.py +610 -0
- otterapi/tests/test_dataframe.py +1038 -0
- otterapi/tests/test_exceptions.py +493 -0
- otterapi/tests/test_openapi_support.py +616 -0
- otterapi/tests/test_openapi_upgrade.py +215 -0
- otterapi/tests/test_pagination.py +1101 -0
- otterapi/tests/test_splitting_config.py +319 -0
- otterapi/tests/test_splitting_integration.py +427 -0
- otterapi/tests/test_splitting_resolver.py +512 -0
- otterapi/tests/test_splitting_tree.py +525 -0
- otterapi-0.0.6.dist-info/METADATA +627 -0
- otterapi-0.0.6.dist-info/RECORD +48 -0
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/WHEEL +1 -1
- otterapi/codegen/generator.py +0 -358
- otterapi/codegen/openapi_processor.py +0 -27
- otterapi/codegen/type_generator.py +0 -559
- otterapi-0.0.5.dist-info/METADATA +0 -54
- otterapi-0.0.5.dist-info/RECORD +0 -16
- {otterapi-0.0.5.dist-info → otterapi-0.0.6.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,1026 @@
|
|
|
1
|
+
"""Pagination utilities for OtterAPI code generation.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for:
|
|
4
|
+
- Generating the _pagination.py utility file for runtime pagination
|
|
5
|
+
- Determining pagination configuration for endpoints
|
|
6
|
+
- Building paginated endpoint functions
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import ast
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING
|
|
13
|
+
|
|
14
|
+
from upath import UPath
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from otterapi.codegen.types import Type
|
|
18
|
+
from otterapi.config import PaginationConfig
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
'PAGINATION_MODULE_CONTENT',
|
|
22
|
+
'PaginationMethodConfig',
|
|
23
|
+
'generate_pagination_module',
|
|
24
|
+
'get_pagination_imports',
|
|
25
|
+
'get_pagination_type_checking_imports',
|
|
26
|
+
'get_pagination_config_for_endpoint',
|
|
27
|
+
'endpoint_is_paginated',
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# =============================================================================
|
|
32
|
+
# Pagination Module Content
|
|
33
|
+
# =============================================================================
|
|
34
|
+
|
|
35
|
+
PAGINATION_MODULE_CONTENT = '''\
|
|
36
|
+
"""Pagination utilities for OtterAPI generated clients."""
|
|
37
|
+
|
|
38
|
+
from collections.abc import AsyncIterator, Callable, Iterator
|
|
39
|
+
from dataclasses import dataclass
|
|
40
|
+
from typing import Any, TypeVar
|
|
41
|
+
|
|
42
|
+
T = TypeVar("T")
|
|
43
|
+
PageT = TypeVar("PageT")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class OffsetPaginationConfig:
|
|
48
|
+
"""Configuration for offset-based pagination."""
|
|
49
|
+
|
|
50
|
+
offset_param: str = "offset"
|
|
51
|
+
limit_param: str = "limit"
|
|
52
|
+
data_path: str | None = None
|
|
53
|
+
total_path: str | None = None
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class CursorPaginationConfig:
|
|
58
|
+
"""Configuration for cursor-based pagination."""
|
|
59
|
+
|
|
60
|
+
cursor_param: str = "cursor"
|
|
61
|
+
limit_param: str = "limit"
|
|
62
|
+
data_path: str | None = None
|
|
63
|
+
next_cursor_path: str | None = None
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass
|
|
67
|
+
class PagePaginationConfig:
|
|
68
|
+
"""Configuration for page-based pagination."""
|
|
69
|
+
|
|
70
|
+
page_param: str = "page"
|
|
71
|
+
per_page_param: str = "per_page"
|
|
72
|
+
data_path: str | None = None
|
|
73
|
+
total_pages_path: str | None = None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def extract_path(data: dict | list, path: str | None) -> Any:
|
|
77
|
+
"""Extract nested data using dot notation path.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
data: The response data (dict or list).
|
|
81
|
+
path: Dot notation path (e.g., "data.users").
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The extracted data at the specified path.
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
KeyError: If the path does not exist in the data.
|
|
88
|
+
|
|
89
|
+
Examples:
|
|
90
|
+
>>> extract_path({"data": {"users": [1, 2, 3]}}, "data.users")
|
|
91
|
+
[1, 2, 3]
|
|
92
|
+
>>> extract_path([1, 2, 3], None)
|
|
93
|
+
[1, 2, 3]
|
|
94
|
+
"""
|
|
95
|
+
if path is None:
|
|
96
|
+
return data
|
|
97
|
+
|
|
98
|
+
current = data
|
|
99
|
+
for key in path.split("."):
|
|
100
|
+
if isinstance(current, dict):
|
|
101
|
+
if key not in current:
|
|
102
|
+
raise KeyError(
|
|
103
|
+
f"Key '{key}' not found in response. "
|
|
104
|
+
f"Available keys: {list(current.keys())}. Full path: {path}"
|
|
105
|
+
)
|
|
106
|
+
current = current[key]
|
|
107
|
+
elif isinstance(current, list) and key.isdigit():
|
|
108
|
+
current = current[int(key)]
|
|
109
|
+
else:
|
|
110
|
+
raise KeyError(
|
|
111
|
+
f"Cannot access '{key}' on {type(current).__name__}. "
|
|
112
|
+
f"Full path: {path}"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return current
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def paginate_offset(
|
|
119
|
+
fetch_page: Callable[[int, int], PageT],
|
|
120
|
+
extract_items: Callable[[PageT], list[T]],
|
|
121
|
+
get_total: Callable[[PageT], int | None] | None = None,
|
|
122
|
+
*,
|
|
123
|
+
start_offset: int = 0,
|
|
124
|
+
page_size: int = 100,
|
|
125
|
+
max_items: int | None = None,
|
|
126
|
+
) -> list[T]:
|
|
127
|
+
"""Generic offset-based pagination that returns all items.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
fetch_page: Function that fetches a page given (offset, limit).
|
|
131
|
+
extract_items: Function that extracts items from a page response.
|
|
132
|
+
get_total: Optional function to get total count from response.
|
|
133
|
+
start_offset: Starting offset (default: 0).
|
|
134
|
+
page_size: Items per page (default: 100).
|
|
135
|
+
max_items: Maximum items to return (default: unlimited).
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
List of all items.
|
|
139
|
+
"""
|
|
140
|
+
all_items: list[T] = []
|
|
141
|
+
current_offset = start_offset
|
|
142
|
+
|
|
143
|
+
while True:
|
|
144
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
145
|
+
break
|
|
146
|
+
|
|
147
|
+
request_limit = page_size
|
|
148
|
+
if max_items is not None:
|
|
149
|
+
remaining = max_items - len(all_items)
|
|
150
|
+
request_limit = min(page_size, remaining)
|
|
151
|
+
|
|
152
|
+
page = fetch_page(current_offset, request_limit)
|
|
153
|
+
items = extract_items(page)
|
|
154
|
+
|
|
155
|
+
if not items:
|
|
156
|
+
break
|
|
157
|
+
|
|
158
|
+
all_items.extend(items)
|
|
159
|
+
|
|
160
|
+
if len(items) < request_limit:
|
|
161
|
+
break
|
|
162
|
+
|
|
163
|
+
if get_total is not None:
|
|
164
|
+
total = get_total(page)
|
|
165
|
+
if total is not None and current_offset + len(items) >= total:
|
|
166
|
+
break
|
|
167
|
+
|
|
168
|
+
current_offset += len(items)
|
|
169
|
+
|
|
170
|
+
if max_items is not None and len(all_items) > max_items:
|
|
171
|
+
return all_items[:max_items]
|
|
172
|
+
|
|
173
|
+
return all_items
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def paginate_offset_async(
|
|
177
|
+
fetch_page: Callable[[int, int], Any], # Returns Awaitable[PageT]
|
|
178
|
+
extract_items: Callable[[PageT], list[T]],
|
|
179
|
+
get_total: Callable[[PageT], int | None] | None = None,
|
|
180
|
+
*,
|
|
181
|
+
start_offset: int = 0,
|
|
182
|
+
page_size: int = 100,
|
|
183
|
+
max_items: int | None = None,
|
|
184
|
+
) -> list[T]:
|
|
185
|
+
"""Async version of paginate_offset."""
|
|
186
|
+
all_items: list[T] = []
|
|
187
|
+
current_offset = start_offset
|
|
188
|
+
|
|
189
|
+
while True:
|
|
190
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
191
|
+
break
|
|
192
|
+
|
|
193
|
+
request_limit = page_size
|
|
194
|
+
if max_items is not None:
|
|
195
|
+
remaining = max_items - len(all_items)
|
|
196
|
+
request_limit = min(page_size, remaining)
|
|
197
|
+
|
|
198
|
+
page = await fetch_page(current_offset, request_limit)
|
|
199
|
+
items = extract_items(page)
|
|
200
|
+
|
|
201
|
+
if not items:
|
|
202
|
+
break
|
|
203
|
+
|
|
204
|
+
all_items.extend(items)
|
|
205
|
+
|
|
206
|
+
if len(items) < request_limit:
|
|
207
|
+
break
|
|
208
|
+
|
|
209
|
+
if get_total is not None:
|
|
210
|
+
total = get_total(page)
|
|
211
|
+
if total is not None and current_offset + len(items) >= total:
|
|
212
|
+
break
|
|
213
|
+
|
|
214
|
+
current_offset += len(items)
|
|
215
|
+
|
|
216
|
+
if max_items is not None and len(all_items) > max_items:
|
|
217
|
+
return all_items[:max_items]
|
|
218
|
+
|
|
219
|
+
return all_items
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def iterate_offset(
|
|
223
|
+
fetch_page: Callable[[int, int], PageT],
|
|
224
|
+
extract_items: Callable[[PageT], list[T]],
|
|
225
|
+
get_total: Callable[[PageT], int | None] | None = None,
|
|
226
|
+
*,
|
|
227
|
+
start_offset: int = 0,
|
|
228
|
+
page_size: int = 100,
|
|
229
|
+
max_items: int | None = None,
|
|
230
|
+
) -> Iterator[T]:
|
|
231
|
+
"""Generic offset-based pagination iterator (streaming).
|
|
232
|
+
|
|
233
|
+
Yields items one at a time for memory-efficient processing.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
fetch_page: Function that fetches a page given (offset, limit).
|
|
237
|
+
extract_items: Function that extracts items from a page response.
|
|
238
|
+
get_total: Optional function to get total count from response.
|
|
239
|
+
start_offset: Starting offset (default: 0).
|
|
240
|
+
page_size: Items per page (default: 100).
|
|
241
|
+
max_items: Maximum items to yield (default: unlimited).
|
|
242
|
+
|
|
243
|
+
Yields:
|
|
244
|
+
Items one at a time.
|
|
245
|
+
"""
|
|
246
|
+
current_offset = start_offset
|
|
247
|
+
items_yielded = 0
|
|
248
|
+
|
|
249
|
+
while True:
|
|
250
|
+
if max_items is not None and items_yielded >= max_items:
|
|
251
|
+
return
|
|
252
|
+
|
|
253
|
+
request_limit = page_size
|
|
254
|
+
if max_items is not None:
|
|
255
|
+
remaining = max_items - items_yielded
|
|
256
|
+
request_limit = min(page_size, remaining)
|
|
257
|
+
|
|
258
|
+
page = fetch_page(current_offset, request_limit)
|
|
259
|
+
items = extract_items(page)
|
|
260
|
+
|
|
261
|
+
if not items:
|
|
262
|
+
return
|
|
263
|
+
|
|
264
|
+
for item in items:
|
|
265
|
+
yield item
|
|
266
|
+
items_yielded += 1
|
|
267
|
+
|
|
268
|
+
if max_items is not None and items_yielded >= max_items:
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
if len(items) < request_limit:
|
|
272
|
+
return
|
|
273
|
+
|
|
274
|
+
if get_total is not None:
|
|
275
|
+
total = get_total(page)
|
|
276
|
+
if total is not None and current_offset + len(items) >= total:
|
|
277
|
+
return
|
|
278
|
+
|
|
279
|
+
current_offset += len(items)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
async def iterate_offset_async(
|
|
283
|
+
fetch_page: Callable[[int, int], Any], # Returns Awaitable[PageT]
|
|
284
|
+
extract_items: Callable[[PageT], list[T]],
|
|
285
|
+
get_total: Callable[[PageT], int | None] | None = None,
|
|
286
|
+
*,
|
|
287
|
+
start_offset: int = 0,
|
|
288
|
+
page_size: int = 100,
|
|
289
|
+
max_items: int | None = None,
|
|
290
|
+
) -> AsyncIterator[T]:
|
|
291
|
+
"""Async version of iterate_offset."""
|
|
292
|
+
current_offset = start_offset
|
|
293
|
+
items_yielded = 0
|
|
294
|
+
|
|
295
|
+
while True:
|
|
296
|
+
if max_items is not None and items_yielded >= max_items:
|
|
297
|
+
return
|
|
298
|
+
|
|
299
|
+
request_limit = page_size
|
|
300
|
+
if max_items is not None:
|
|
301
|
+
remaining = max_items - items_yielded
|
|
302
|
+
request_limit = min(page_size, remaining)
|
|
303
|
+
|
|
304
|
+
page = await fetch_page(current_offset, request_limit)
|
|
305
|
+
items = extract_items(page)
|
|
306
|
+
|
|
307
|
+
if not items:
|
|
308
|
+
return
|
|
309
|
+
|
|
310
|
+
for item in items:
|
|
311
|
+
yield item
|
|
312
|
+
items_yielded += 1
|
|
313
|
+
|
|
314
|
+
if max_items is not None and items_yielded >= max_items:
|
|
315
|
+
return
|
|
316
|
+
|
|
317
|
+
if len(items) < request_limit:
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
if get_total is not None:
|
|
321
|
+
total = get_total(page)
|
|
322
|
+
if total is not None and current_offset + len(items) >= total:
|
|
323
|
+
return
|
|
324
|
+
|
|
325
|
+
current_offset += len(items)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def paginate_cursor(
|
|
329
|
+
fetch_page: Callable[[str | None, int], PageT],
|
|
330
|
+
extract_items: Callable[[PageT], list[T]],
|
|
331
|
+
get_next_cursor: Callable[[PageT], str | None],
|
|
332
|
+
*,
|
|
333
|
+
start_cursor: str | None = None,
|
|
334
|
+
page_size: int = 100,
|
|
335
|
+
max_items: int | None = None,
|
|
336
|
+
) -> list[T]:
|
|
337
|
+
"""Generic cursor-based pagination that returns all items.
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
fetch_page: Function that fetches a page given (cursor, limit).
|
|
341
|
+
extract_items: Function that extracts items from a page response.
|
|
342
|
+
get_next_cursor: Function to get next cursor from response.
|
|
343
|
+
start_cursor: Starting cursor (default: None for first page).
|
|
344
|
+
page_size: Items per page (default: 100).
|
|
345
|
+
max_items: Maximum items to return (default: unlimited).
|
|
346
|
+
|
|
347
|
+
Returns:
|
|
348
|
+
List of all items.
|
|
349
|
+
"""
|
|
350
|
+
all_items: list[T] = []
|
|
351
|
+
current_cursor = start_cursor
|
|
352
|
+
|
|
353
|
+
while True:
|
|
354
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
355
|
+
break
|
|
356
|
+
|
|
357
|
+
page = fetch_page(current_cursor, page_size)
|
|
358
|
+
items = extract_items(page)
|
|
359
|
+
|
|
360
|
+
if not items:
|
|
361
|
+
break
|
|
362
|
+
|
|
363
|
+
all_items.extend(items)
|
|
364
|
+
|
|
365
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
366
|
+
break
|
|
367
|
+
|
|
368
|
+
current_cursor = get_next_cursor(page)
|
|
369
|
+
if not current_cursor:
|
|
370
|
+
break
|
|
371
|
+
|
|
372
|
+
if max_items is not None and len(all_items) > max_items:
|
|
373
|
+
return all_items[:max_items]
|
|
374
|
+
|
|
375
|
+
return all_items
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
async def paginate_cursor_async(
|
|
379
|
+
fetch_page: Callable[[str | None, int], Any], # Returns Awaitable[PageT]
|
|
380
|
+
extract_items: Callable[[PageT], list[T]],
|
|
381
|
+
get_next_cursor: Callable[[PageT], str | None],
|
|
382
|
+
*,
|
|
383
|
+
start_cursor: str | None = None,
|
|
384
|
+
page_size: int = 100,
|
|
385
|
+
max_items: int | None = None,
|
|
386
|
+
) -> list[T]:
|
|
387
|
+
"""Async version of paginate_cursor."""
|
|
388
|
+
all_items: list[T] = []
|
|
389
|
+
current_cursor = start_cursor
|
|
390
|
+
|
|
391
|
+
while True:
|
|
392
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
393
|
+
break
|
|
394
|
+
|
|
395
|
+
page = await fetch_page(current_cursor, page_size)
|
|
396
|
+
items = extract_items(page)
|
|
397
|
+
|
|
398
|
+
if not items:
|
|
399
|
+
break
|
|
400
|
+
|
|
401
|
+
all_items.extend(items)
|
|
402
|
+
|
|
403
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
404
|
+
break
|
|
405
|
+
|
|
406
|
+
current_cursor = get_next_cursor(page)
|
|
407
|
+
if not current_cursor:
|
|
408
|
+
break
|
|
409
|
+
|
|
410
|
+
if max_items is not None and len(all_items) > max_items:
|
|
411
|
+
return all_items[:max_items]
|
|
412
|
+
|
|
413
|
+
return all_items
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def iterate_cursor(
|
|
417
|
+
fetch_page: Callable[[str | None, int], PageT],
|
|
418
|
+
extract_items: Callable[[PageT], list[T]],
|
|
419
|
+
get_next_cursor: Callable[[PageT], str | None],
|
|
420
|
+
*,
|
|
421
|
+
start_cursor: str | None = None,
|
|
422
|
+
page_size: int = 100,
|
|
423
|
+
max_items: int | None = None,
|
|
424
|
+
) -> Iterator[T]:
|
|
425
|
+
"""Generic cursor-based pagination iterator (streaming).
|
|
426
|
+
|
|
427
|
+
Args:
|
|
428
|
+
fetch_page: Function that fetches a page given (cursor, limit).
|
|
429
|
+
extract_items: Function that extracts items from a page response.
|
|
430
|
+
get_next_cursor: Function to get next cursor from response.
|
|
431
|
+
start_cursor: Starting cursor (default: None for first page).
|
|
432
|
+
page_size: Items per page (default: 100).
|
|
433
|
+
max_items: Maximum items to yield (default: unlimited).
|
|
434
|
+
|
|
435
|
+
Yields:
|
|
436
|
+
Items one at a time.
|
|
437
|
+
"""
|
|
438
|
+
current_cursor = start_cursor
|
|
439
|
+
items_yielded = 0
|
|
440
|
+
|
|
441
|
+
while True:
|
|
442
|
+
if max_items is not None and items_yielded >= max_items:
|
|
443
|
+
return
|
|
444
|
+
|
|
445
|
+
page = fetch_page(current_cursor, page_size)
|
|
446
|
+
items = extract_items(page)
|
|
447
|
+
|
|
448
|
+
if not items:
|
|
449
|
+
return
|
|
450
|
+
|
|
451
|
+
for item in items:
|
|
452
|
+
yield item
|
|
453
|
+
items_yielded += 1
|
|
454
|
+
|
|
455
|
+
if max_items is not None and items_yielded >= max_items:
|
|
456
|
+
return
|
|
457
|
+
|
|
458
|
+
current_cursor = get_next_cursor(page)
|
|
459
|
+
if not current_cursor:
|
|
460
|
+
return
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
async def iterate_cursor_async(
|
|
464
|
+
fetch_page: Callable[[str | None, int], Any], # Returns Awaitable[PageT]
|
|
465
|
+
extract_items: Callable[[PageT], list[T]],
|
|
466
|
+
get_next_cursor: Callable[[PageT], str | None],
|
|
467
|
+
*,
|
|
468
|
+
start_cursor: str | None = None,
|
|
469
|
+
page_size: int = 100,
|
|
470
|
+
max_items: int | None = None,
|
|
471
|
+
) -> AsyncIterator[T]:
|
|
472
|
+
"""Async version of iterate_cursor."""
|
|
473
|
+
current_cursor = start_cursor
|
|
474
|
+
items_yielded = 0
|
|
475
|
+
|
|
476
|
+
while True:
|
|
477
|
+
if max_items is not None and items_yielded >= max_items:
|
|
478
|
+
return
|
|
479
|
+
|
|
480
|
+
page = await fetch_page(current_cursor, page_size)
|
|
481
|
+
items = extract_items(page)
|
|
482
|
+
|
|
483
|
+
if not items:
|
|
484
|
+
return
|
|
485
|
+
|
|
486
|
+
for item in items:
|
|
487
|
+
yield item
|
|
488
|
+
items_yielded += 1
|
|
489
|
+
|
|
490
|
+
if max_items is not None and items_yielded >= max_items:
|
|
491
|
+
return
|
|
492
|
+
|
|
493
|
+
current_cursor = get_next_cursor(page)
|
|
494
|
+
if not current_cursor:
|
|
495
|
+
return
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
def iterate_page(
|
|
499
|
+
fetch_page: Callable[[int, int], PageT],
|
|
500
|
+
extract_items: Callable[[PageT], list[T]],
|
|
501
|
+
get_total_pages: Callable[[PageT], int | None] | None = None,
|
|
502
|
+
*,
|
|
503
|
+
start_page: int = 1,
|
|
504
|
+
page_size: int = 100,
|
|
505
|
+
max_items: int | None = None,
|
|
506
|
+
max_pages: int | None = None,
|
|
507
|
+
) -> Iterator[T]:
|
|
508
|
+
"""Generic page-based pagination iterator (streaming).
|
|
509
|
+
|
|
510
|
+
Yields items one at a time for memory-efficient processing.
|
|
511
|
+
|
|
512
|
+
Args:
|
|
513
|
+
fetch_page: Function that fetches a page given (page, per_page).
|
|
514
|
+
extract_items: Function that extracts items from a page response.
|
|
515
|
+
get_total_pages: Optional function to get total pages from response.
|
|
516
|
+
start_page: Starting page number (default: 1).
|
|
517
|
+
page_size: Items per page (default: 100).
|
|
518
|
+
max_items: Maximum items to yield (default: unlimited).
|
|
519
|
+
max_pages: Maximum pages to fetch (default: unlimited).
|
|
520
|
+
|
|
521
|
+
Yields:
|
|
522
|
+
Items one at a time.
|
|
523
|
+
"""
|
|
524
|
+
current_page = start_page
|
|
525
|
+
items_yielded = 0
|
|
526
|
+
pages_fetched = 0
|
|
527
|
+
|
|
528
|
+
while True:
|
|
529
|
+
if max_items is not None and items_yielded >= max_items:
|
|
530
|
+
return
|
|
531
|
+
|
|
532
|
+
if max_pages is not None and pages_fetched >= max_pages:
|
|
533
|
+
return
|
|
534
|
+
|
|
535
|
+
page = fetch_page(current_page, page_size)
|
|
536
|
+
items = extract_items(page)
|
|
537
|
+
pages_fetched += 1
|
|
538
|
+
|
|
539
|
+
if not items:
|
|
540
|
+
return
|
|
541
|
+
|
|
542
|
+
for item in items:
|
|
543
|
+
yield item
|
|
544
|
+
items_yielded += 1
|
|
545
|
+
|
|
546
|
+
if max_items is not None and items_yielded >= max_items:
|
|
547
|
+
return
|
|
548
|
+
|
|
549
|
+
if len(items) < page_size:
|
|
550
|
+
return
|
|
551
|
+
|
|
552
|
+
if get_total_pages is not None:
|
|
553
|
+
total_pages = get_total_pages(page)
|
|
554
|
+
if total_pages is not None and current_page >= total_pages:
|
|
555
|
+
return
|
|
556
|
+
|
|
557
|
+
current_page += 1
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
async def iterate_page_async(
|
|
561
|
+
fetch_page: Callable[[int, int], Any], # Returns Awaitable[PageT]
|
|
562
|
+
extract_items: Callable[[PageT], list[T]],
|
|
563
|
+
get_total_pages: Callable[[PageT], int | None] | None = None,
|
|
564
|
+
*,
|
|
565
|
+
start_page: int = 1,
|
|
566
|
+
page_size: int = 100,
|
|
567
|
+
max_items: int | None = None,
|
|
568
|
+
max_pages: int | None = None,
|
|
569
|
+
) -> AsyncIterator[T]:
|
|
570
|
+
"""Async version of iterate_page."""
|
|
571
|
+
current_page = start_page
|
|
572
|
+
items_yielded = 0
|
|
573
|
+
pages_fetched = 0
|
|
574
|
+
|
|
575
|
+
while True:
|
|
576
|
+
if max_items is not None and items_yielded >= max_items:
|
|
577
|
+
return
|
|
578
|
+
|
|
579
|
+
if max_pages is not None and pages_fetched >= max_pages:
|
|
580
|
+
return
|
|
581
|
+
|
|
582
|
+
page = await fetch_page(current_page, page_size)
|
|
583
|
+
items = extract_items(page)
|
|
584
|
+
pages_fetched += 1
|
|
585
|
+
|
|
586
|
+
if not items:
|
|
587
|
+
return
|
|
588
|
+
|
|
589
|
+
for item in items:
|
|
590
|
+
yield item
|
|
591
|
+
items_yielded += 1
|
|
592
|
+
|
|
593
|
+
if max_items is not None and items_yielded >= max_items:
|
|
594
|
+
return
|
|
595
|
+
|
|
596
|
+
if len(items) < page_size:
|
|
597
|
+
return
|
|
598
|
+
|
|
599
|
+
if get_total_pages is not None:
|
|
600
|
+
total_pages = get_total_pages(page)
|
|
601
|
+
if total_pages is not None and current_page >= total_pages:
|
|
602
|
+
return
|
|
603
|
+
|
|
604
|
+
current_page += 1
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
def paginate_page(
|
|
608
|
+
fetch_page: Callable[[int, int], PageT],
|
|
609
|
+
extract_items: Callable[[PageT], list[T]],
|
|
610
|
+
get_total_pages: Callable[[PageT], int | None] | None = None,
|
|
611
|
+
*,
|
|
612
|
+
start_page: int = 1,
|
|
613
|
+
page_size: int = 100,
|
|
614
|
+
max_items: int | None = None,
|
|
615
|
+
max_pages: int | None = None,
|
|
616
|
+
) -> list[T]:
|
|
617
|
+
"""Generic page-based pagination that returns all items.
|
|
618
|
+
|
|
619
|
+
Args:
|
|
620
|
+
fetch_page: Function that fetches a page given (page, per_page).
|
|
621
|
+
extract_items: Function that extracts items from a page response.
|
|
622
|
+
get_total_pages: Optional function to get total pages from response.
|
|
623
|
+
start_page: Starting page number (default: 1).
|
|
624
|
+
page_size: Items per page (default: 100).
|
|
625
|
+
max_items: Maximum items to return (default: unlimited).
|
|
626
|
+
max_pages: Maximum pages to fetch (default: unlimited).
|
|
627
|
+
|
|
628
|
+
Returns:
|
|
629
|
+
List of all items.
|
|
630
|
+
"""
|
|
631
|
+
all_items: list[T] = []
|
|
632
|
+
current_page = start_page
|
|
633
|
+
pages_fetched = 0
|
|
634
|
+
|
|
635
|
+
while True:
|
|
636
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
637
|
+
break
|
|
638
|
+
|
|
639
|
+
if max_pages is not None and pages_fetched >= max_pages:
|
|
640
|
+
break
|
|
641
|
+
|
|
642
|
+
page = fetch_page(current_page, page_size)
|
|
643
|
+
items = extract_items(page)
|
|
644
|
+
pages_fetched += 1
|
|
645
|
+
|
|
646
|
+
if not items:
|
|
647
|
+
break
|
|
648
|
+
|
|
649
|
+
all_items.extend(items)
|
|
650
|
+
|
|
651
|
+
if len(items) < page_size:
|
|
652
|
+
break
|
|
653
|
+
|
|
654
|
+
if get_total_pages is not None:
|
|
655
|
+
total_pages = get_total_pages(page)
|
|
656
|
+
if total_pages is not None and current_page >= total_pages:
|
|
657
|
+
break
|
|
658
|
+
|
|
659
|
+
current_page += 1
|
|
660
|
+
|
|
661
|
+
if max_items is not None and len(all_items) > max_items:
|
|
662
|
+
return all_items[:max_items]
|
|
663
|
+
|
|
664
|
+
return all_items
|
|
665
|
+
|
|
666
|
+
|
|
667
|
+
async def paginate_page_async(
|
|
668
|
+
fetch_page: Callable[[int, int], Any], # Returns Awaitable[PageT]
|
|
669
|
+
extract_items: Callable[[PageT], list[T]],
|
|
670
|
+
get_total_pages: Callable[[PageT], int | None] | None = None,
|
|
671
|
+
*,
|
|
672
|
+
start_page: int = 1,
|
|
673
|
+
page_size: int = 100,
|
|
674
|
+
max_items: int | None = None,
|
|
675
|
+
max_pages: int | None = None,
|
|
676
|
+
) -> list[T]:
|
|
677
|
+
"""Async version of paginate_page."""
|
|
678
|
+
all_items: list[T] = []
|
|
679
|
+
current_page = start_page
|
|
680
|
+
pages_fetched = 0
|
|
681
|
+
|
|
682
|
+
while True:
|
|
683
|
+
if max_items is not None and len(all_items) >= max_items:
|
|
684
|
+
break
|
|
685
|
+
|
|
686
|
+
if max_pages is not None and pages_fetched >= max_pages:
|
|
687
|
+
break
|
|
688
|
+
|
|
689
|
+
page = await fetch_page(current_page, page_size)
|
|
690
|
+
items = extract_items(page)
|
|
691
|
+
pages_fetched += 1
|
|
692
|
+
|
|
693
|
+
if not items:
|
|
694
|
+
break
|
|
695
|
+
|
|
696
|
+
all_items.extend(items)
|
|
697
|
+
|
|
698
|
+
if len(items) < page_size:
|
|
699
|
+
break
|
|
700
|
+
|
|
701
|
+
if get_total_pages is not None:
|
|
702
|
+
total_pages = get_total_pages(page)
|
|
703
|
+
if total_pages is not None and current_page >= total_pages:
|
|
704
|
+
break
|
|
705
|
+
|
|
706
|
+
current_page += 1
|
|
707
|
+
|
|
708
|
+
if max_items is not None and len(all_items) > max_items:
|
|
709
|
+
return all_items[:max_items]
|
|
710
|
+
|
|
711
|
+
return all_items
|
|
712
|
+
'''
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
# =============================================================================
|
|
716
|
+
# Pagination Module Generation
|
|
717
|
+
# =============================================================================
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
def generate_pagination_module(output_dir: Path | UPath) -> Path | UPath:
|
|
721
|
+
"""Generate the _pagination.py utility module.
|
|
722
|
+
|
|
723
|
+
Args:
|
|
724
|
+
output_dir: The output directory where the module should be written.
|
|
725
|
+
|
|
726
|
+
Returns:
|
|
727
|
+
The path to the generated file.
|
|
728
|
+
"""
|
|
729
|
+
output_dir = UPath(output_dir)
|
|
730
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
731
|
+
|
|
732
|
+
file_path = output_dir / '_pagination.py'
|
|
733
|
+
file_path.write_text(PAGINATION_MODULE_CONTENT)
|
|
734
|
+
|
|
735
|
+
return file_path
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
def get_pagination_imports() -> dict[str, set[str]]:
|
|
739
|
+
"""Get the imports needed for pagination method generation.
|
|
740
|
+
|
|
741
|
+
Returns:
|
|
742
|
+
A dictionary mapping module names to sets of imported names.
|
|
743
|
+
"""
|
|
744
|
+
return {
|
|
745
|
+
'typing': {'TYPE_CHECKING'},
|
|
746
|
+
'collections.abc': {'Iterator', 'AsyncIterator'},
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
|
|
750
|
+
def get_pagination_type_checking_imports() -> list[str]:
|
|
751
|
+
"""Get the TYPE_CHECKING imports for pagination.
|
|
752
|
+
|
|
753
|
+
Returns:
|
|
754
|
+
List of import statements to include inside TYPE_CHECKING block.
|
|
755
|
+
"""
|
|
756
|
+
return []
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
# =============================================================================
|
|
760
|
+
# Pagination Configuration
|
|
761
|
+
# =============================================================================
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
@dataclass
|
|
765
|
+
class PaginationMethodConfig:
|
|
766
|
+
"""Configuration for pagination method generation.
|
|
767
|
+
|
|
768
|
+
Attributes:
|
|
769
|
+
style: The pagination style to use.
|
|
770
|
+
offset_param: Name of offset parameter (for offset style).
|
|
771
|
+
limit_param: Name of limit parameter.
|
|
772
|
+
cursor_param: Name of cursor parameter (for cursor style).
|
|
773
|
+
page_param: Name of page parameter (for page style).
|
|
774
|
+
per_page_param: Name of per_page parameter (for page style).
|
|
775
|
+
data_path: JSON path to items array in response.
|
|
776
|
+
total_path: JSON path to total count in response.
|
|
777
|
+
next_cursor_path: JSON path to next cursor in response.
|
|
778
|
+
total_pages_path: JSON path to total pages in response.
|
|
779
|
+
default_page_size: Default page size.
|
|
780
|
+
max_page_size: Maximum page size.
|
|
781
|
+
"""
|
|
782
|
+
|
|
783
|
+
style: str = 'offset'
|
|
784
|
+
offset_param: str = 'offset'
|
|
785
|
+
limit_param: str = 'limit'
|
|
786
|
+
cursor_param: str = 'cursor'
|
|
787
|
+
page_param: str = 'page'
|
|
788
|
+
per_page_param: str = 'per_page'
|
|
789
|
+
data_path: str | None = None
|
|
790
|
+
total_path: str | None = None
|
|
791
|
+
next_cursor_path: str | None = None
|
|
792
|
+
total_pages_path: str | None = None
|
|
793
|
+
default_page_size: int = 100
|
|
794
|
+
max_page_size: int | None = None
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
def endpoint_is_paginated(
|
|
798
|
+
endpoint_name: str,
|
|
799
|
+
pagination_config: 'PaginationConfig',
|
|
800
|
+
endpoint_parameters: list | None = None,
|
|
801
|
+
) -> bool:
|
|
802
|
+
"""Check if an endpoint is configured for pagination.
|
|
803
|
+
|
|
804
|
+
Args:
|
|
805
|
+
endpoint_name: The name of the endpoint function.
|
|
806
|
+
pagination_config: The global pagination configuration.
|
|
807
|
+
endpoint_parameters: Optional list of endpoint parameters for auto-detection.
|
|
808
|
+
|
|
809
|
+
Returns:
|
|
810
|
+
True if the endpoint should have pagination methods generated.
|
|
811
|
+
"""
|
|
812
|
+
if not pagination_config.enabled:
|
|
813
|
+
return False
|
|
814
|
+
|
|
815
|
+
should_generate, _ = pagination_config.should_generate_for_endpoint(
|
|
816
|
+
endpoint_name, endpoint_parameters
|
|
817
|
+
)
|
|
818
|
+
return should_generate
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def get_pagination_config_for_endpoint(
|
|
822
|
+
endpoint_name: str,
|
|
823
|
+
pagination_config: 'PaginationConfig',
|
|
824
|
+
endpoint_parameters: list | None = None,
|
|
825
|
+
) -> PaginationMethodConfig | None:
|
|
826
|
+
"""Get the pagination method configuration for an endpoint.
|
|
827
|
+
|
|
828
|
+
Args:
|
|
829
|
+
endpoint_name: The name of the endpoint function.
|
|
830
|
+
pagination_config: The global pagination configuration.
|
|
831
|
+
endpoint_parameters: Optional list of endpoint parameters for auto-detection.
|
|
832
|
+
|
|
833
|
+
Returns:
|
|
834
|
+
PaginationMethodConfig if pagination is configured, None otherwise.
|
|
835
|
+
"""
|
|
836
|
+
if not pagination_config.enabled:
|
|
837
|
+
return None
|
|
838
|
+
|
|
839
|
+
should_generate, resolved = pagination_config.should_generate_for_endpoint(
|
|
840
|
+
endpoint_name, endpoint_parameters
|
|
841
|
+
)
|
|
842
|
+
|
|
843
|
+
if not should_generate or resolved is None:
|
|
844
|
+
return None
|
|
845
|
+
|
|
846
|
+
return PaginationMethodConfig(
|
|
847
|
+
style=resolved.style.value,
|
|
848
|
+
offset_param=resolved.offset_param,
|
|
849
|
+
limit_param=resolved.limit_param,
|
|
850
|
+
cursor_param=resolved.cursor_param,
|
|
851
|
+
page_param=resolved.page_param,
|
|
852
|
+
per_page_param=resolved.per_page_param,
|
|
853
|
+
data_path=resolved.data_path,
|
|
854
|
+
total_path=resolved.total_path,
|
|
855
|
+
next_cursor_path=resolved.next_cursor_path,
|
|
856
|
+
total_pages_path=resolved.total_pages_path,
|
|
857
|
+
default_page_size=resolved.default_page_size,
|
|
858
|
+
max_page_size=resolved.max_page_size,
|
|
859
|
+
)
|
|
860
|
+
|
|
861
|
+
|
|
862
|
+
# =============================================================================
|
|
863
|
+
# Pagination Function Building Utilities
|
|
864
|
+
# =============================================================================
|
|
865
|
+
|
|
866
|
+
|
|
867
|
+
def get_item_type_from_list_type(response_type: 'Type | None') -> ast.expr | None:
|
|
868
|
+
"""Extract the item type from a list response type.
|
|
869
|
+
|
|
870
|
+
For example, if response_type is list[User], returns the AST for User.
|
|
871
|
+
|
|
872
|
+
Args:
|
|
873
|
+
response_type: The response Type object.
|
|
874
|
+
|
|
875
|
+
Returns:
|
|
876
|
+
The AST expression for the item type, or None if not a list type.
|
|
877
|
+
"""
|
|
878
|
+
if not response_type or not response_type.annotation_ast:
|
|
879
|
+
return None
|
|
880
|
+
|
|
881
|
+
ann = response_type.annotation_ast
|
|
882
|
+
if isinstance(ann, ast.Subscript):
|
|
883
|
+
if isinstance(ann.value, ast.Name) and ann.value.id == 'list':
|
|
884
|
+
return ann.slice
|
|
885
|
+
|
|
886
|
+
return None
|
|
887
|
+
|
|
888
|
+
|
|
889
|
+
def build_extract_items_lambda(
|
|
890
|
+
data_path: str | None, attr_name: str | None
|
|
891
|
+
) -> ast.expr:
|
|
892
|
+
"""Build a lambda expression for extracting items from a page response.
|
|
893
|
+
|
|
894
|
+
Args:
|
|
895
|
+
data_path: Optional JSON path to items (e.g., "data.users").
|
|
896
|
+
attr_name: Optional attribute name on response model (e.g., "users").
|
|
897
|
+
|
|
898
|
+
Returns:
|
|
899
|
+
AST for a lambda expression like `lambda page: page.users` or
|
|
900
|
+
`lambda page: extract_path(page, "data.users")`.
|
|
901
|
+
"""
|
|
902
|
+
page_arg = ast.arg(arg='page', annotation=None)
|
|
903
|
+
|
|
904
|
+
if attr_name:
|
|
905
|
+
# lambda page: page.attr_name
|
|
906
|
+
body = ast.Attribute(
|
|
907
|
+
value=ast.Name(id='page', ctx=ast.Load()),
|
|
908
|
+
attr=attr_name,
|
|
909
|
+
ctx=ast.Load(),
|
|
910
|
+
)
|
|
911
|
+
elif data_path:
|
|
912
|
+
# lambda page: extract_path(page, "data_path")
|
|
913
|
+
body = ast.Call(
|
|
914
|
+
func=ast.Name(id='extract_path', ctx=ast.Load()),
|
|
915
|
+
args=[
|
|
916
|
+
ast.Name(id='page', ctx=ast.Load()),
|
|
917
|
+
ast.Constant(value=data_path),
|
|
918
|
+
],
|
|
919
|
+
keywords=[],
|
|
920
|
+
)
|
|
921
|
+
else:
|
|
922
|
+
# lambda page: page (assume response is the list itself)
|
|
923
|
+
body = ast.Name(id='page', ctx=ast.Load())
|
|
924
|
+
|
|
925
|
+
return ast.Lambda(
|
|
926
|
+
args=ast.arguments(
|
|
927
|
+
posonlyargs=[],
|
|
928
|
+
args=[page_arg],
|
|
929
|
+
kwonlyargs=[],
|
|
930
|
+
kw_defaults=[],
|
|
931
|
+
defaults=[],
|
|
932
|
+
),
|
|
933
|
+
body=body,
|
|
934
|
+
)
|
|
935
|
+
|
|
936
|
+
|
|
937
|
+
def build_get_total_lambda(
|
|
938
|
+
total_path: str | None, attr_name: str | None
|
|
939
|
+
) -> ast.expr | None:
|
|
940
|
+
"""Build a lambda expression for getting total count from a page response.
|
|
941
|
+
|
|
942
|
+
Args:
|
|
943
|
+
total_path: Optional JSON path to total (e.g., "meta.total").
|
|
944
|
+
attr_name: Optional attribute name on response model (e.g., "total").
|
|
945
|
+
|
|
946
|
+
Returns:
|
|
947
|
+
AST for a lambda expression or None if no total is available.
|
|
948
|
+
"""
|
|
949
|
+
if not total_path and not attr_name:
|
|
950
|
+
return None
|
|
951
|
+
|
|
952
|
+
page_arg = ast.arg(arg='page', annotation=None)
|
|
953
|
+
|
|
954
|
+
if attr_name:
|
|
955
|
+
# lambda page: page.attr_name
|
|
956
|
+
body = ast.Attribute(
|
|
957
|
+
value=ast.Name(id='page', ctx=ast.Load()),
|
|
958
|
+
attr=attr_name,
|
|
959
|
+
ctx=ast.Load(),
|
|
960
|
+
)
|
|
961
|
+
else:
|
|
962
|
+
# lambda page: extract_path(page, "total_path")
|
|
963
|
+
body = ast.Call(
|
|
964
|
+
func=ast.Name(id='extract_path', ctx=ast.Load()),
|
|
965
|
+
args=[
|
|
966
|
+
ast.Name(id='page', ctx=ast.Load()),
|
|
967
|
+
ast.Constant(value=total_path),
|
|
968
|
+
],
|
|
969
|
+
keywords=[],
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
return ast.Lambda(
|
|
973
|
+
args=ast.arguments(
|
|
974
|
+
posonlyargs=[],
|
|
975
|
+
args=[page_arg],
|
|
976
|
+
kwonlyargs=[],
|
|
977
|
+
kw_defaults=[],
|
|
978
|
+
defaults=[],
|
|
979
|
+
),
|
|
980
|
+
body=body,
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
|
|
984
|
+
def build_get_next_cursor_lambda(
|
|
985
|
+
next_cursor_path: str | None, attr_name: str | None
|
|
986
|
+
) -> ast.expr | None:
|
|
987
|
+
"""Build a lambda expression for getting next cursor from a page response.
|
|
988
|
+
|
|
989
|
+
Args:
|
|
990
|
+
next_cursor_path: Optional JSON path to next cursor.
|
|
991
|
+
attr_name: Optional attribute name on response model.
|
|
992
|
+
|
|
993
|
+
Returns:
|
|
994
|
+
AST for a lambda expression or None if no cursor path is available.
|
|
995
|
+
"""
|
|
996
|
+
if not next_cursor_path and not attr_name:
|
|
997
|
+
return None
|
|
998
|
+
|
|
999
|
+
page_arg = ast.arg(arg='page', annotation=None)
|
|
1000
|
+
|
|
1001
|
+
if attr_name:
|
|
1002
|
+
body = ast.Attribute(
|
|
1003
|
+
value=ast.Name(id='page', ctx=ast.Load()),
|
|
1004
|
+
attr=attr_name,
|
|
1005
|
+
ctx=ast.Load(),
|
|
1006
|
+
)
|
|
1007
|
+
else:
|
|
1008
|
+
body = ast.Call(
|
|
1009
|
+
func=ast.Name(id='extract_path', ctx=ast.Load()),
|
|
1010
|
+
args=[
|
|
1011
|
+
ast.Name(id='page', ctx=ast.Load()),
|
|
1012
|
+
ast.Constant(value=next_cursor_path),
|
|
1013
|
+
],
|
|
1014
|
+
keywords=[],
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
return ast.Lambda(
|
|
1018
|
+
args=ast.arguments(
|
|
1019
|
+
posonlyargs=[],
|
|
1020
|
+
args=[page_arg],
|
|
1021
|
+
kwonlyargs=[],
|
|
1022
|
+
kw_defaults=[],
|
|
1023
|
+
defaults=[],
|
|
1024
|
+
),
|
|
1025
|
+
body=body,
|
|
1026
|
+
)
|