pulse-framework 0.1.62__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +1493 -0
- pulse/_examples.py +29 -0
- pulse/app.py +1086 -0
- pulse/channel.py +607 -0
- pulse/cli/__init__.py +0 -0
- pulse/cli/cmd.py +575 -0
- pulse/cli/dependencies.py +181 -0
- pulse/cli/folder_lock.py +134 -0
- pulse/cli/helpers.py +271 -0
- pulse/cli/logging.py +102 -0
- pulse/cli/models.py +35 -0
- pulse/cli/packages.py +262 -0
- pulse/cli/processes.py +292 -0
- pulse/cli/secrets.py +39 -0
- pulse/cli/uvicorn_log_config.py +87 -0
- pulse/code_analysis.py +38 -0
- pulse/codegen/__init__.py +0 -0
- pulse/codegen/codegen.py +359 -0
- pulse/codegen/templates/__init__.py +0 -0
- pulse/codegen/templates/layout.py +106 -0
- pulse/codegen/templates/route.py +345 -0
- pulse/codegen/templates/routes_ts.py +42 -0
- pulse/codegen/utils.py +20 -0
- pulse/component.py +237 -0
- pulse/components/__init__.py +0 -0
- pulse/components/for_.py +83 -0
- pulse/components/if_.py +86 -0
- pulse/components/react_router.py +94 -0
- pulse/context.py +108 -0
- pulse/cookies.py +322 -0
- pulse/decorators.py +344 -0
- pulse/dom/__init__.py +0 -0
- pulse/dom/elements.py +1024 -0
- pulse/dom/events.py +445 -0
- pulse/dom/props.py +1250 -0
- pulse/dom/svg.py +0 -0
- pulse/dom/tags.py +328 -0
- pulse/dom/tags.pyi +480 -0
- pulse/env.py +178 -0
- pulse/form.py +538 -0
- pulse/helpers.py +541 -0
- pulse/hooks/__init__.py +0 -0
- pulse/hooks/core.py +452 -0
- pulse/hooks/effects.py +88 -0
- pulse/hooks/init.py +668 -0
- pulse/hooks/runtime.py +464 -0
- pulse/hooks/setup.py +254 -0
- pulse/hooks/stable.py +138 -0
- pulse/hooks/state.py +192 -0
- pulse/js/__init__.py +125 -0
- pulse/js/__init__.pyi +115 -0
- pulse/js/_types.py +299 -0
- pulse/js/array.py +339 -0
- pulse/js/console.py +50 -0
- pulse/js/date.py +119 -0
- pulse/js/document.py +145 -0
- pulse/js/error.py +140 -0
- pulse/js/json.py +66 -0
- pulse/js/map.py +97 -0
- pulse/js/math.py +69 -0
- pulse/js/navigator.py +79 -0
- pulse/js/number.py +57 -0
- pulse/js/obj.py +81 -0
- pulse/js/object.py +172 -0
- pulse/js/promise.py +172 -0
- pulse/js/pulse.py +115 -0
- pulse/js/react.py +495 -0
- pulse/js/regexp.py +57 -0
- pulse/js/set.py +124 -0
- pulse/js/string.py +38 -0
- pulse/js/weakmap.py +53 -0
- pulse/js/weakset.py +48 -0
- pulse/js/window.py +205 -0
- pulse/messages.py +202 -0
- pulse/middleware.py +471 -0
- pulse/plugin.py +96 -0
- pulse/proxy.py +242 -0
- pulse/py.typed +0 -0
- pulse/queries/__init__.py +0 -0
- pulse/queries/client.py +609 -0
- pulse/queries/common.py +101 -0
- pulse/queries/effect.py +55 -0
- pulse/queries/infinite_query.py +1418 -0
- pulse/queries/mutation.py +295 -0
- pulse/queries/protocol.py +136 -0
- pulse/queries/query.py +1314 -0
- pulse/queries/store.py +120 -0
- pulse/react_component.py +88 -0
- pulse/reactive.py +1208 -0
- pulse/reactive_extensions.py +1172 -0
- pulse/render_session.py +768 -0
- pulse/renderer.py +584 -0
- pulse/request.py +205 -0
- pulse/routing.py +598 -0
- pulse/serializer.py +279 -0
- pulse/state.py +556 -0
- pulse/test_helpers.py +15 -0
- pulse/transpiler/__init__.py +111 -0
- pulse/transpiler/assets.py +81 -0
- pulse/transpiler/builtins.py +1029 -0
- pulse/transpiler/dynamic_import.py +130 -0
- pulse/transpiler/emit_context.py +49 -0
- pulse/transpiler/errors.py +96 -0
- pulse/transpiler/function.py +611 -0
- pulse/transpiler/id.py +18 -0
- pulse/transpiler/imports.py +341 -0
- pulse/transpiler/js_module.py +336 -0
- pulse/transpiler/modules/__init__.py +33 -0
- pulse/transpiler/modules/asyncio.py +57 -0
- pulse/transpiler/modules/json.py +24 -0
- pulse/transpiler/modules/math.py +265 -0
- pulse/transpiler/modules/pulse/__init__.py +5 -0
- pulse/transpiler/modules/pulse/tags.py +250 -0
- pulse/transpiler/modules/typing.py +63 -0
- pulse/transpiler/nodes.py +1987 -0
- pulse/transpiler/py_module.py +135 -0
- pulse/transpiler/transpiler.py +1100 -0
- pulse/transpiler/vdom.py +256 -0
- pulse/types/__init__.py +0 -0
- pulse/types/event_handler.py +50 -0
- pulse/user_session.py +386 -0
- pulse/version.py +69 -0
- pulse_framework-0.1.62.dist-info/METADATA +198 -0
- pulse_framework-0.1.62.dist-info/RECORD +126 -0
- pulse_framework-0.1.62.dist-info/WHEEL +4 -0
- pulse_framework-0.1.62.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,1418 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import inspect
|
|
4
|
+
import time
|
|
5
|
+
from collections import deque
|
|
6
|
+
from collections.abc import Awaitable, Callable
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import (
|
|
9
|
+
Any,
|
|
10
|
+
Generic,
|
|
11
|
+
NamedTuple,
|
|
12
|
+
TypeVar,
|
|
13
|
+
cast,
|
|
14
|
+
overload,
|
|
15
|
+
override,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
from pulse.context import PulseContext
|
|
19
|
+
from pulse.helpers import (
|
|
20
|
+
MISSING,
|
|
21
|
+
Disposable,
|
|
22
|
+
call_flexible,
|
|
23
|
+
later,
|
|
24
|
+
maybe_await,
|
|
25
|
+
)
|
|
26
|
+
from pulse.queries.common import (
|
|
27
|
+
ActionError,
|
|
28
|
+
ActionResult,
|
|
29
|
+
ActionSuccess,
|
|
30
|
+
OnErrorFn,
|
|
31
|
+
OnSuccessFn,
|
|
32
|
+
QueryKey,
|
|
33
|
+
QueryStatus,
|
|
34
|
+
bind_state,
|
|
35
|
+
)
|
|
36
|
+
from pulse.queries.query import RETRY_DELAY_DEFAULT, QueryConfig
|
|
37
|
+
from pulse.reactive import Computed, Effect, Signal, Untrack
|
|
38
|
+
from pulse.reactive_extensions import ReactiveList, unwrap
|
|
39
|
+
from pulse.state import InitializableProperty, State
|
|
40
|
+
|
|
41
|
+
T = TypeVar("T")
|
|
42
|
+
TParam = TypeVar("TParam")
|
|
43
|
+
TState = TypeVar("TState", bound=State)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class Page(NamedTuple, Generic[T, TParam]):
|
|
47
|
+
"""Named tuple representing a page in an infinite query.
|
|
48
|
+
|
|
49
|
+
Each page contains the fetched data and the parameter used to fetch it,
|
|
50
|
+
enabling cursor-based or offset-based pagination.
|
|
51
|
+
|
|
52
|
+
Attributes:
|
|
53
|
+
data: The fetched page data of type T.
|
|
54
|
+
param: The page parameter (cursor, offset, etc.) used to fetch this page.
|
|
55
|
+
|
|
56
|
+
Example:
|
|
57
|
+
|
|
58
|
+
```python
|
|
59
|
+
# Access pages from infinite query result
|
|
60
|
+
for page in state.posts.data:
|
|
61
|
+
print(f"Page param: {page.param}")
|
|
62
|
+
for post in page.data:
|
|
63
|
+
print(post.title)
|
|
64
|
+
```
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
data: T
|
|
68
|
+
param: TParam
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
72
|
+
# Action types for the task queue (pure data)
|
|
73
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@dataclass
|
|
77
|
+
class FetchNext(Generic[T, TParam]):
|
|
78
|
+
"""Fetch the next page."""
|
|
79
|
+
|
|
80
|
+
fetch_fn: Callable[[TParam], Awaitable[T]]
|
|
81
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None
|
|
82
|
+
future: "asyncio.Future[ActionResult[Page[T, TParam] | None]]" = field(
|
|
83
|
+
default_factory=asyncio.Future
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass
|
|
88
|
+
class FetchPrevious(Generic[T, TParam]):
|
|
89
|
+
"""Fetch the previous page."""
|
|
90
|
+
|
|
91
|
+
fetch_fn: Callable[[TParam], Awaitable[T]]
|
|
92
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None
|
|
93
|
+
future: "asyncio.Future[ActionResult[Page[T, TParam] | None]]" = field(
|
|
94
|
+
default_factory=asyncio.Future
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class Refetch(Generic[T, TParam]):
|
|
100
|
+
"""Refetch all pages."""
|
|
101
|
+
|
|
102
|
+
fetch_fn: Callable[[TParam], Awaitable[T]]
|
|
103
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None
|
|
104
|
+
refetch_page: Callable[[T, int, list[T]], bool] | None = None
|
|
105
|
+
future: "asyncio.Future[ActionResult[list[Page[T, TParam]]]]" = field(
|
|
106
|
+
default_factory=asyncio.Future
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@dataclass
|
|
111
|
+
class RefetchPage(Generic[T, TParam]):
|
|
112
|
+
"""Refetch a single page by param."""
|
|
113
|
+
|
|
114
|
+
fetch_fn: Callable[[TParam], Awaitable[T]]
|
|
115
|
+
param: TParam
|
|
116
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None
|
|
117
|
+
future: "asyncio.Future[ActionResult[T | None]]" = field(
|
|
118
|
+
default_factory=asyncio.Future
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
Action = (
|
|
123
|
+
FetchNext[T, TParam]
|
|
124
|
+
| FetchPrevious[T, TParam]
|
|
125
|
+
| Refetch[T, TParam]
|
|
126
|
+
| RefetchPage[T, TParam]
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@dataclass(slots=True)
|
|
131
|
+
class InfiniteQueryConfig(QueryConfig[list[Page[T, TParam]]], Generic[T, TParam]):
|
|
132
|
+
"""Configuration for InfiniteQuery. Contains all QueryConfig fields plus infinite query specific options."""
|
|
133
|
+
|
|
134
|
+
initial_page_param: TParam
|
|
135
|
+
get_next_page_param: Callable[[list[Page[T, TParam]]], TParam | None]
|
|
136
|
+
get_previous_page_param: Callable[[list[Page[T, TParam]]], TParam | None] | None
|
|
137
|
+
max_pages: int
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
141
|
+
"""Paginated query that stores data as a list of Page(data, param)."""
|
|
142
|
+
|
|
143
|
+
key: QueryKey
|
|
144
|
+
cfg: InfiniteQueryConfig[T, TParam]
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def fn(self) -> Callable[[TParam], Awaitable[T]]:
|
|
148
|
+
"""Get the fetch function from the first observer."""
|
|
149
|
+
if len(self._observers) == 0:
|
|
150
|
+
raise RuntimeError(
|
|
151
|
+
f"InfiniteQuery '{self.key}' has no observers. Cannot access fetch function."
|
|
152
|
+
)
|
|
153
|
+
return self._observers[0]._fetch_fn # pyright: ignore[reportPrivateUsage]
|
|
154
|
+
|
|
155
|
+
# Reactive state
|
|
156
|
+
pages: ReactiveList[Page[T, TParam]]
|
|
157
|
+
error: Signal[Exception | None]
|
|
158
|
+
last_updated: Signal[float]
|
|
159
|
+
status: Signal[QueryStatus]
|
|
160
|
+
is_fetching: Signal[bool]
|
|
161
|
+
retries: Signal[int]
|
|
162
|
+
retry_reason: Signal[Exception | None]
|
|
163
|
+
|
|
164
|
+
has_next_page: Signal[bool]
|
|
165
|
+
has_previous_page: Signal[bool]
|
|
166
|
+
current_action: "Signal[Action[T, TParam] | None]"
|
|
167
|
+
|
|
168
|
+
# Task queue
|
|
169
|
+
_queue: deque[Action[T, TParam]]
|
|
170
|
+
_queue_task: asyncio.Task[None] | None
|
|
171
|
+
|
|
172
|
+
_observers: "list[InfiniteQueryResult[T, TParam]]"
|
|
173
|
+
_gc_handle: asyncio.TimerHandle | None
|
|
174
|
+
|
|
175
|
+
def __init__(
|
|
176
|
+
self,
|
|
177
|
+
key: QueryKey,
|
|
178
|
+
*,
|
|
179
|
+
initial_page_param: TParam,
|
|
180
|
+
get_next_page_param: Callable[[list[Page[T, TParam]]], TParam | None],
|
|
181
|
+
get_previous_page_param: (
|
|
182
|
+
Callable[[list[Page[T, TParam]]], TParam | None] | None
|
|
183
|
+
) = None,
|
|
184
|
+
max_pages: int = 0,
|
|
185
|
+
retries: int = 3,
|
|
186
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
187
|
+
initial_data: list[Page[T, TParam]] | None | Any = MISSING,
|
|
188
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
189
|
+
gc_time: float = 300.0,
|
|
190
|
+
on_dispose: Callable[[Any], None] | None = None,
|
|
191
|
+
):
|
|
192
|
+
self.key = key
|
|
193
|
+
|
|
194
|
+
self.cfg = InfiniteQueryConfig(
|
|
195
|
+
retries=retries,
|
|
196
|
+
retry_delay=retry_delay,
|
|
197
|
+
initial_data=initial_data,
|
|
198
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
199
|
+
gc_time=gc_time,
|
|
200
|
+
on_dispose=on_dispose,
|
|
201
|
+
initial_page_param=initial_page_param,
|
|
202
|
+
get_next_page_param=get_next_page_param,
|
|
203
|
+
get_previous_page_param=get_previous_page_param,
|
|
204
|
+
max_pages=max_pages,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
initial_pages: list[Page[T, TParam]]
|
|
208
|
+
if initial_data is MISSING:
|
|
209
|
+
initial_pages = []
|
|
210
|
+
else:
|
|
211
|
+
initial_pages = cast(list[Page[T, TParam]], initial_data) or []
|
|
212
|
+
|
|
213
|
+
self.pages = ReactiveList(initial_pages)
|
|
214
|
+
self.error = Signal(None, name=f"inf_query.error({key})")
|
|
215
|
+
self.last_updated = Signal(0.0, name=f"inf_query.last_updated({key})")
|
|
216
|
+
if initial_data_updated_at:
|
|
217
|
+
self.set_updated_at(initial_data_updated_at)
|
|
218
|
+
|
|
219
|
+
self.status = Signal(
|
|
220
|
+
"loading" if len(initial_pages) == 0 else "success",
|
|
221
|
+
name=f"inf_query.status({key})",
|
|
222
|
+
)
|
|
223
|
+
self.is_fetching = Signal(False, name=f"inf_query.is_fetching({key})")
|
|
224
|
+
self.retries = Signal(0, name=f"inf_query.retries({key})")
|
|
225
|
+
self.retry_reason = Signal(None, name=f"inf_query.retry_reason({key})")
|
|
226
|
+
|
|
227
|
+
self.has_next_page = Signal(False, name=f"inf_query.has_next({key})")
|
|
228
|
+
self.has_previous_page = Signal(False, name=f"inf_query.has_prev({key})")
|
|
229
|
+
self.current_action = Signal(None, name=f"inf_query.current_action({key})")
|
|
230
|
+
|
|
231
|
+
self._queue = deque()
|
|
232
|
+
self._queue_task = None
|
|
233
|
+
self._observers = []
|
|
234
|
+
self._gc_handle = None
|
|
235
|
+
|
|
236
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
237
|
+
# Commit functions - update state after pages have been modified
|
|
238
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
239
|
+
|
|
240
|
+
async def commit(self):
|
|
241
|
+
"""Commit current pages state and run success callbacks."""
|
|
242
|
+
self._commit_sync()
|
|
243
|
+
|
|
244
|
+
for obs in self._observers:
|
|
245
|
+
if obs._on_success is not None: # pyright: ignore[reportPrivateUsage]
|
|
246
|
+
await maybe_await(call_flexible(obs._on_success, self.pages)) # pyright: ignore[reportPrivateUsage]
|
|
247
|
+
|
|
248
|
+
async def _commit_error(self, error: Exception):
|
|
249
|
+
"""Commit error state and run error callbacks."""
|
|
250
|
+
self._commit_error_sync(error)
|
|
251
|
+
|
|
252
|
+
for obs in self._observers:
|
|
253
|
+
if obs._on_error is not None: # pyright: ignore[reportPrivateUsage]
|
|
254
|
+
await maybe_await(call_flexible(obs._on_error, error)) # pyright: ignore[reportPrivateUsage]
|
|
255
|
+
|
|
256
|
+
def _commit_sync(self):
|
|
257
|
+
"""Synchronous commit - updates state based on current pages."""
|
|
258
|
+
self._update_has_more()
|
|
259
|
+
self.last_updated.write(time.time())
|
|
260
|
+
self.error.write(None)
|
|
261
|
+
self.status.write("success")
|
|
262
|
+
self.retries.write(0)
|
|
263
|
+
self.retry_reason.write(None)
|
|
264
|
+
|
|
265
|
+
def _commit_error_sync(self, error: Exception):
|
|
266
|
+
"""Synchronous error commit for set_error (no callbacks)."""
|
|
267
|
+
self.error.write(error)
|
|
268
|
+
self.last_updated.write(time.time())
|
|
269
|
+
self.status.write("error")
|
|
270
|
+
self.is_fetching.write(False)
|
|
271
|
+
|
|
272
|
+
def _record_retry(self, reason: Exception):
|
|
273
|
+
"""Record a failed retry attempt."""
|
|
274
|
+
self.retries.write(self.retries.read() + 1)
|
|
275
|
+
self.retry_reason.write(reason)
|
|
276
|
+
|
|
277
|
+
def _reset_retries(self):
|
|
278
|
+
"""Reset retry state at start of operation."""
|
|
279
|
+
self.retries.write(0)
|
|
280
|
+
self.retry_reason.write(None)
|
|
281
|
+
|
|
282
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
283
|
+
# Public API
|
|
284
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
285
|
+
|
|
286
|
+
def set_updated_at(self, updated_at: float | dt.datetime):
|
|
287
|
+
if isinstance(updated_at, dt.datetime):
|
|
288
|
+
updated_at = updated_at.timestamp()
|
|
289
|
+
self.last_updated.write(updated_at)
|
|
290
|
+
|
|
291
|
+
def set_initial_data(
|
|
292
|
+
self,
|
|
293
|
+
pages: list[Page[T, TParam]] | Callable[[], list[Page[T, TParam]]],
|
|
294
|
+
updated_at: float | dt.datetime | None = None,
|
|
295
|
+
):
|
|
296
|
+
"""Set initial pages while the query is still loading."""
|
|
297
|
+
if self.status() != "loading":
|
|
298
|
+
return
|
|
299
|
+
value = pages() if callable(pages) else pages
|
|
300
|
+
self.set_data(value, updated_at=updated_at)
|
|
301
|
+
|
|
302
|
+
def set_data(
|
|
303
|
+
self,
|
|
304
|
+
pages: list[Page[T, TParam]]
|
|
305
|
+
| Callable[[list[Page[T, TParam]]], list[Page[T, TParam]]],
|
|
306
|
+
updated_at: float | dt.datetime | None = None,
|
|
307
|
+
):
|
|
308
|
+
"""Set pages manually, keeping has_next/prev in sync."""
|
|
309
|
+
new_pages = pages(self.pages) if callable(pages) else pages
|
|
310
|
+
self.pages.clear()
|
|
311
|
+
self.pages.extend(new_pages)
|
|
312
|
+
self._trim_back()
|
|
313
|
+
self._commit_sync()
|
|
314
|
+
if updated_at is not None:
|
|
315
|
+
self.set_updated_at(updated_at)
|
|
316
|
+
|
|
317
|
+
def set_error(
|
|
318
|
+
self, error: Exception, *, updated_at: float | dt.datetime | None = None
|
|
319
|
+
):
|
|
320
|
+
self._commit_error_sync(error)
|
|
321
|
+
if updated_at is not None:
|
|
322
|
+
self.set_updated_at(updated_at)
|
|
323
|
+
|
|
324
|
+
async def wait(
|
|
325
|
+
self,
|
|
326
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
327
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
328
|
+
) -> ActionResult[list[Page[T, TParam]]]:
|
|
329
|
+
"""Wait for initial data or until queue is empty."""
|
|
330
|
+
# If no data and loading, enqueue initial fetch (unless already processing)
|
|
331
|
+
if len(self.pages) == 0 and self.status() == "loading":
|
|
332
|
+
if self._queue_task is None or self._queue_task.done():
|
|
333
|
+
# Use provided fetch_fn or fall back to first observer's fetch_fn
|
|
334
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
335
|
+
self._enqueue(Refetch(fetch_fn=fn, observer=observer))
|
|
336
|
+
# Wait for any in-progress queue processing
|
|
337
|
+
if self._queue_task and not self._queue_task.done():
|
|
338
|
+
await self._queue_task
|
|
339
|
+
# Return result based on current state
|
|
340
|
+
if self.status() == "error":
|
|
341
|
+
return ActionError(cast(Exception, self.error()))
|
|
342
|
+
return ActionSuccess(list(self.pages))
|
|
343
|
+
|
|
344
|
+
def observe(self, observer: Any):
|
|
345
|
+
self._observers.append(observer)
|
|
346
|
+
self.cancel_gc()
|
|
347
|
+
gc_time = getattr(observer, "_gc_time", 0)
|
|
348
|
+
if gc_time and gc_time > 0:
|
|
349
|
+
self.cfg.gc_time = max(self.cfg.gc_time, gc_time)
|
|
350
|
+
|
|
351
|
+
def unobserve(self, observer: "InfiniteQueryResult[T, TParam]"):
|
|
352
|
+
"""Unregister an observer. Cancels pending actions. Schedules GC if no observers remain."""
|
|
353
|
+
if observer in self._observers:
|
|
354
|
+
self._observers.remove(observer)
|
|
355
|
+
|
|
356
|
+
# Cancel pending actions from this observer
|
|
357
|
+
self._cancel_observer_actions(observer)
|
|
358
|
+
|
|
359
|
+
if len(self._observers) == 0:
|
|
360
|
+
self.schedule_gc()
|
|
361
|
+
|
|
362
|
+
def invalidate(
|
|
363
|
+
self,
|
|
364
|
+
*,
|
|
365
|
+
cancel_fetch: bool = False,
|
|
366
|
+
refetch_page: Callable[[T, int, list[T]], bool] | None = None,
|
|
367
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
368
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
369
|
+
):
|
|
370
|
+
"""Enqueue a refetch. Synchronous - does not wait for completion."""
|
|
371
|
+
if cancel_fetch:
|
|
372
|
+
self._cancel_queue()
|
|
373
|
+
if len(self._observers) > 0:
|
|
374
|
+
# Use provided fetch_fn or fall back to first observer's fetch_fn
|
|
375
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
376
|
+
self._enqueue(
|
|
377
|
+
Refetch(fetch_fn=fn, observer=observer, refetch_page=refetch_page)
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
def schedule_gc(self):
|
|
381
|
+
self.cancel_gc()
|
|
382
|
+
if self.cfg.gc_time > 0:
|
|
383
|
+
self._gc_handle = later(self.cfg.gc_time, self.dispose)
|
|
384
|
+
else:
|
|
385
|
+
self.dispose()
|
|
386
|
+
|
|
387
|
+
def cancel_gc(self):
|
|
388
|
+
if self._gc_handle:
|
|
389
|
+
self._gc_handle.cancel()
|
|
390
|
+
self._gc_handle = None
|
|
391
|
+
|
|
392
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
393
|
+
# Page param computation
|
|
394
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
395
|
+
|
|
396
|
+
def compute_next_param(self) -> TParam | None:
|
|
397
|
+
if len(self.pages) == 0:
|
|
398
|
+
return self.cfg.initial_page_param
|
|
399
|
+
return self.cfg.get_next_page_param(self.pages)
|
|
400
|
+
|
|
401
|
+
def compute_previous_param(self) -> TParam | None:
|
|
402
|
+
if self.cfg.get_previous_page_param is None:
|
|
403
|
+
return None
|
|
404
|
+
if len(self.pages) == 0:
|
|
405
|
+
return None
|
|
406
|
+
return self.cfg.get_previous_page_param(self.pages)
|
|
407
|
+
|
|
408
|
+
def _update_has_more(self):
|
|
409
|
+
if len(self.pages) == 0:
|
|
410
|
+
self.has_next_page.write(False)
|
|
411
|
+
self.has_previous_page.write(self.cfg.get_previous_page_param is not None)
|
|
412
|
+
return
|
|
413
|
+
next_param = self.cfg.get_next_page_param(self.pages)
|
|
414
|
+
prev_param = None
|
|
415
|
+
if self.cfg.get_previous_page_param:
|
|
416
|
+
prev_param = self.cfg.get_previous_page_param(self.pages)
|
|
417
|
+
self.has_next_page.write(next_param is not None)
|
|
418
|
+
self.has_previous_page.write(prev_param is not None)
|
|
419
|
+
|
|
420
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
421
|
+
# Trimming helpers
|
|
422
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
423
|
+
|
|
424
|
+
def _trim_front(self):
|
|
425
|
+
"""Trim pages from front when over max_pages."""
|
|
426
|
+
if self.cfg.max_pages and self.cfg.max_pages > 0:
|
|
427
|
+
while len(self.pages) > self.cfg.max_pages:
|
|
428
|
+
self.pages.pop(0)
|
|
429
|
+
|
|
430
|
+
def _trim_back(self):
|
|
431
|
+
"""Trim pages from back when over max_pages."""
|
|
432
|
+
if self.cfg.max_pages and self.cfg.max_pages > 0:
|
|
433
|
+
while len(self.pages) > self.cfg.max_pages:
|
|
434
|
+
self.pages.pop()
|
|
435
|
+
|
|
436
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
437
|
+
# Task Queue
|
|
438
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
439
|
+
|
|
440
|
+
def _cancel_queue(self):
|
|
441
|
+
"""Cancel all pending and in-flight actions."""
|
|
442
|
+
# Cancel pending actions in the queue
|
|
443
|
+
while self._queue:
|
|
444
|
+
action = self._queue.popleft()
|
|
445
|
+
if not action.future.done():
|
|
446
|
+
action.future.cancel()
|
|
447
|
+
|
|
448
|
+
# Cancel the currently executing action and task
|
|
449
|
+
current = self.current_action.read()
|
|
450
|
+
if current is not None and not current.future.done():
|
|
451
|
+
current.future.cancel()
|
|
452
|
+
|
|
453
|
+
if self._queue_task and not self._queue_task.done():
|
|
454
|
+
self._queue_task.cancel()
|
|
455
|
+
self._queue_task = None
|
|
456
|
+
|
|
457
|
+
def _cancel_observer_actions(
|
|
458
|
+
self, observer: "InfiniteQueryResult[T, TParam]"
|
|
459
|
+
) -> None:
|
|
460
|
+
"""Cancel pending actions from a specific observer.
|
|
461
|
+
|
|
462
|
+
Note: Does not cancel the currently executing action to avoid disrupting the
|
|
463
|
+
queue processor. The fetch will complete but results will be ignored since
|
|
464
|
+
the observer is disposed.
|
|
465
|
+
"""
|
|
466
|
+
# Cancel pending actions from this observer (not the currently executing one)
|
|
467
|
+
remaining: deque[Action[T, TParam]] = deque()
|
|
468
|
+
while self._queue:
|
|
469
|
+
action = self._queue.popleft()
|
|
470
|
+
if action.observer is observer:
|
|
471
|
+
if not action.future.done():
|
|
472
|
+
action.future.cancel()
|
|
473
|
+
else:
|
|
474
|
+
remaining.append(action)
|
|
475
|
+
self._queue = remaining
|
|
476
|
+
|
|
477
|
+
def _enqueue(
|
|
478
|
+
self,
|
|
479
|
+
action: "FetchNext[T, TParam] | FetchPrevious[T, TParam] | Refetch[T, TParam] | RefetchPage[T, TParam]",
|
|
480
|
+
*,
|
|
481
|
+
cancel_fetch: bool = False,
|
|
482
|
+
) -> asyncio.Future[Any]:
|
|
483
|
+
"""Enqueue an action and ensure the processor is running."""
|
|
484
|
+
if cancel_fetch:
|
|
485
|
+
self._cancel_queue()
|
|
486
|
+
|
|
487
|
+
self._queue.append(action)
|
|
488
|
+
self._ensure_processor()
|
|
489
|
+
return action.future
|
|
490
|
+
|
|
491
|
+
def _ensure_processor(self):
|
|
492
|
+
"""Ensure the queue processor task is running."""
|
|
493
|
+
if self._queue_task is None or self._queue_task.done():
|
|
494
|
+
# Create task with no reactive scope to avoid inheriting deps from caller
|
|
495
|
+
with Untrack():
|
|
496
|
+
self._queue_task = asyncio.create_task(self._process_queue())
|
|
497
|
+
return self._queue_task
|
|
498
|
+
|
|
499
|
+
async def _process_queue(self):
|
|
500
|
+
"""Process queued actions sequentially with retry logic."""
|
|
501
|
+
while self._queue:
|
|
502
|
+
action = self._queue.popleft()
|
|
503
|
+
|
|
504
|
+
if action.future.cancelled():
|
|
505
|
+
continue
|
|
506
|
+
|
|
507
|
+
# Reset state for new action
|
|
508
|
+
self._reset_retries()
|
|
509
|
+
self.is_fetching.write(True)
|
|
510
|
+
self.current_action.write(action)
|
|
511
|
+
|
|
512
|
+
try:
|
|
513
|
+
while True:
|
|
514
|
+
try:
|
|
515
|
+
result = await self._execute_action(action)
|
|
516
|
+
if not action.future.done():
|
|
517
|
+
action.future.set_result(ActionSuccess(result))
|
|
518
|
+
break
|
|
519
|
+
except asyncio.CancelledError:
|
|
520
|
+
raise
|
|
521
|
+
except Exception as e:
|
|
522
|
+
if self.retries.read() < self.cfg.retries:
|
|
523
|
+
self._record_retry(e)
|
|
524
|
+
await asyncio.sleep(self.cfg.retry_delay)
|
|
525
|
+
continue
|
|
526
|
+
raise
|
|
527
|
+
except asyncio.CancelledError:
|
|
528
|
+
if not action.future.done():
|
|
529
|
+
action.future.cancel()
|
|
530
|
+
raise
|
|
531
|
+
except Exception as e:
|
|
532
|
+
self.retry_reason.write(e)
|
|
533
|
+
await self._commit_error(e)
|
|
534
|
+
if not action.future.done():
|
|
535
|
+
action.future.set_result(ActionError(e))
|
|
536
|
+
finally:
|
|
537
|
+
# Only reset state if we're still the current action
|
|
538
|
+
# (not replaced by another action via cancel_fetch)
|
|
539
|
+
if self.current_action.read() is action:
|
|
540
|
+
self.is_fetching.write(False)
|
|
541
|
+
self.current_action.write(None)
|
|
542
|
+
|
|
543
|
+
async def _execute_action(
|
|
544
|
+
self,
|
|
545
|
+
action: "FetchNext[T, TParam] | FetchPrevious[T, TParam] | Refetch[T, TParam] | RefetchPage[T, TParam]",
|
|
546
|
+
) -> Any:
|
|
547
|
+
"""Execute a single action."""
|
|
548
|
+
if isinstance(action, FetchNext):
|
|
549
|
+
return await self._execute_fetch_next(action)
|
|
550
|
+
elif isinstance(action, FetchPrevious):
|
|
551
|
+
return await self._execute_fetch_previous(action)
|
|
552
|
+
elif isinstance(action, Refetch):
|
|
553
|
+
return await self._execute_refetch_all(action)
|
|
554
|
+
elif isinstance(action, RefetchPage):
|
|
555
|
+
return await self._execute_refetch_one(action)
|
|
556
|
+
else:
|
|
557
|
+
raise TypeError(f"Unknown action type: {type(action)}")
|
|
558
|
+
|
|
559
|
+
async def _execute_fetch_next(
|
|
560
|
+
self, action: "FetchNext[T, TParam]"
|
|
561
|
+
) -> Page[T, TParam] | None:
|
|
562
|
+
next_param = self.compute_next_param()
|
|
563
|
+
if next_param is None:
|
|
564
|
+
self.has_next_page.write(False)
|
|
565
|
+
return None
|
|
566
|
+
|
|
567
|
+
page = await action.fetch_fn(next_param)
|
|
568
|
+
page = Page(page, next_param)
|
|
569
|
+
self.pages.append(page)
|
|
570
|
+
self._trim_front()
|
|
571
|
+
await self.commit()
|
|
572
|
+
return page
|
|
573
|
+
|
|
574
|
+
async def _execute_fetch_previous(
|
|
575
|
+
self, action: "FetchPrevious[T, TParam]"
|
|
576
|
+
) -> Page[T, TParam] | None:
|
|
577
|
+
prev_param = self.compute_previous_param()
|
|
578
|
+
if prev_param is None:
|
|
579
|
+
self.has_previous_page.write(False)
|
|
580
|
+
return None
|
|
581
|
+
|
|
582
|
+
data = await action.fetch_fn(prev_param)
|
|
583
|
+
page = Page(data, prev_param)
|
|
584
|
+
self.pages.insert(0, page)
|
|
585
|
+
self._trim_back()
|
|
586
|
+
await self.commit()
|
|
587
|
+
return page
|
|
588
|
+
|
|
589
|
+
async def _execute_refetch_all(
|
|
590
|
+
self, action: "Refetch[T, TParam]"
|
|
591
|
+
) -> list[Page[T, TParam]]:
|
|
592
|
+
if len(self.pages) == 0:
|
|
593
|
+
page = await action.fetch_fn(self.cfg.initial_page_param)
|
|
594
|
+
self.pages.append(Page(page, self.cfg.initial_page_param))
|
|
595
|
+
await self.commit()
|
|
596
|
+
return self.pages
|
|
597
|
+
|
|
598
|
+
page_param: TParam = self.pages[0].param
|
|
599
|
+
num_existing = len(self.pages)
|
|
600
|
+
|
|
601
|
+
for idx in range(num_existing):
|
|
602
|
+
old_page = self.pages[idx]
|
|
603
|
+
should_refetch = True
|
|
604
|
+
if action.refetch_page is not None:
|
|
605
|
+
should_refetch = bool(
|
|
606
|
+
action.refetch_page(
|
|
607
|
+
old_page.data, idx, [p.data for p in self.pages]
|
|
608
|
+
)
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
if should_refetch:
|
|
612
|
+
page = await action.fetch_fn(page_param)
|
|
613
|
+
else:
|
|
614
|
+
page = old_page.data
|
|
615
|
+
self.pages[idx] = Page(page, page_param)
|
|
616
|
+
|
|
617
|
+
next_param = self.cfg.get_next_page_param(self.pages[: idx + 1])
|
|
618
|
+
if next_param is None:
|
|
619
|
+
# Trim remaining pages if we ended early
|
|
620
|
+
while len(self.pages) > idx + 1:
|
|
621
|
+
self.pages.pop()
|
|
622
|
+
break
|
|
623
|
+
page_param = next_param
|
|
624
|
+
|
|
625
|
+
await self.commit()
|
|
626
|
+
return self.pages
|
|
627
|
+
|
|
628
|
+
async def _execute_refetch_one(self, action: "RefetchPage[T, TParam]") -> T | None:
|
|
629
|
+
idx = next(
|
|
630
|
+
(i for i, p in enumerate(self.pages) if p.param == action.param),
|
|
631
|
+
None,
|
|
632
|
+
)
|
|
633
|
+
if idx is None:
|
|
634
|
+
return None
|
|
635
|
+
|
|
636
|
+
page = await action.fetch_fn(action.param)
|
|
637
|
+
self.pages[idx] = Page(page, action.param)
|
|
638
|
+
await self.commit()
|
|
639
|
+
return page
|
|
640
|
+
|
|
641
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
642
|
+
# Public fetch API
|
|
643
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
644
|
+
|
|
645
|
+
async def fetch_next_page(
|
|
646
|
+
self,
|
|
647
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
648
|
+
*,
|
|
649
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
650
|
+
cancel_fetch: bool = False,
|
|
651
|
+
) -> ActionResult[Page[T, TParam] | None]:
|
|
652
|
+
"""
|
|
653
|
+
Fetch the next page. Queued for sequential execution.
|
|
654
|
+
|
|
655
|
+
Note: Prefer calling fetch_next_page() on InfiniteQueryResult to ensure the
|
|
656
|
+
correct fetch function is used. When called directly on InfiniteQuery, uses
|
|
657
|
+
the first observer's fetch function if not provided.
|
|
658
|
+
"""
|
|
659
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
660
|
+
action: FetchNext[T, TParam] = FetchNext(fetch_fn=fn, observer=observer)
|
|
661
|
+
return await self._enqueue(action, cancel_fetch=cancel_fetch)
|
|
662
|
+
|
|
663
|
+
async def fetch_previous_page(
|
|
664
|
+
self,
|
|
665
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
666
|
+
*,
|
|
667
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
668
|
+
cancel_fetch: bool = False,
|
|
669
|
+
) -> ActionResult[Page[T, TParam] | None]:
|
|
670
|
+
"""
|
|
671
|
+
Fetch the previous page. Queued for sequential execution.
|
|
672
|
+
|
|
673
|
+
Note: Prefer calling fetch_previous_page() on InfiniteQueryResult to ensure
|
|
674
|
+
the correct fetch function is used. When called directly on InfiniteQuery,
|
|
675
|
+
uses the first observer's fetch function if not provided.
|
|
676
|
+
"""
|
|
677
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
678
|
+
action: FetchPrevious[T, TParam] = FetchPrevious(fetch_fn=fn, observer=observer)
|
|
679
|
+
return await self._enqueue(action, cancel_fetch=cancel_fetch)
|
|
680
|
+
|
|
681
|
+
async def refetch(
|
|
682
|
+
self,
|
|
683
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
684
|
+
*,
|
|
685
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
686
|
+
cancel_fetch: bool = False,
|
|
687
|
+
refetch_page: Callable[[T, int, list[T]], bool] | None = None,
|
|
688
|
+
) -> ActionResult[list[Page[T, TParam]]]:
|
|
689
|
+
"""
|
|
690
|
+
Refetch all pages. Queued for sequential execution.
|
|
691
|
+
|
|
692
|
+
Note: Prefer calling refetch() on InfiniteQueryResult to ensure the correct
|
|
693
|
+
fetch function is used. When called directly on InfiniteQuery, uses the first
|
|
694
|
+
observer's fetch function if not provided.
|
|
695
|
+
"""
|
|
696
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
697
|
+
action: Refetch[T, TParam] = Refetch(
|
|
698
|
+
fetch_fn=fn, observer=observer, refetch_page=refetch_page
|
|
699
|
+
)
|
|
700
|
+
return await self._enqueue(action, cancel_fetch=cancel_fetch)
|
|
701
|
+
|
|
702
|
+
async def refetch_page(
|
|
703
|
+
self,
|
|
704
|
+
param: TParam,
|
|
705
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
706
|
+
*,
|
|
707
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
708
|
+
cancel_fetch: bool = False,
|
|
709
|
+
) -> ActionResult[T | None]:
|
|
710
|
+
"""
|
|
711
|
+
Refetch an existing page by its param. Queued for sequential execution.
|
|
712
|
+
|
|
713
|
+
Note: Prefer calling refetch_page() on InfiniteQueryResult to ensure the
|
|
714
|
+
correct fetch function is used. When called directly on InfiniteQuery, uses
|
|
715
|
+
the first observer's fetch function if not provided.
|
|
716
|
+
"""
|
|
717
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
718
|
+
action: RefetchPage[T, TParam] = RefetchPage(
|
|
719
|
+
fetch_fn=fn, param=param, observer=observer
|
|
720
|
+
)
|
|
721
|
+
return await self._enqueue(action, cancel_fetch=cancel_fetch)
|
|
722
|
+
|
|
723
|
+
@override
|
|
724
|
+
def dispose(self):
|
|
725
|
+
self._cancel_queue()
|
|
726
|
+
if self._queue_task and not self._queue_task.done():
|
|
727
|
+
self._queue_task.cancel()
|
|
728
|
+
if self.cfg.on_dispose:
|
|
729
|
+
self.cfg.on_dispose(self)
|
|
730
|
+
|
|
731
|
+
|
|
732
|
+
def none_if_missing(value: Any):
|
|
733
|
+
return None if value is MISSING else value
|
|
734
|
+
|
|
735
|
+
|
|
736
|
+
class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
737
|
+
"""Observer wrapper for InfiniteQuery with lifecycle and stale tracking.
|
|
738
|
+
|
|
739
|
+
InfiniteQueryResult provides the interface for interacting with paginated
|
|
740
|
+
queries. It manages observation lifecycle, staleness tracking, and exposes
|
|
741
|
+
reactive properties and methods for pagination.
|
|
742
|
+
|
|
743
|
+
Attributes:
|
|
744
|
+
data: List of Page objects or None if not loaded.
|
|
745
|
+
pages: List of page data only (without params) or None.
|
|
746
|
+
page_params: List of page parameters only or None.
|
|
747
|
+
error: The last error encountered, or None.
|
|
748
|
+
status: Current QueryStatus ("loading", "success", "error").
|
|
749
|
+
is_loading: Whether status is "loading".
|
|
750
|
+
is_success: Whether status is "success".
|
|
751
|
+
is_error: Whether status is "error".
|
|
752
|
+
is_fetching: Whether any fetch is in progress.
|
|
753
|
+
has_next_page: Whether more pages are available forward.
|
|
754
|
+
has_previous_page: Whether previous pages are available.
|
|
755
|
+
is_fetching_next_page: Whether fetching the next page.
|
|
756
|
+
is_fetching_previous_page: Whether fetching the previous page.
|
|
757
|
+
|
|
758
|
+
Example:
|
|
759
|
+
|
|
760
|
+
```python
|
|
761
|
+
# Access infinite query result
|
|
762
|
+
if state.posts.is_loading:
|
|
763
|
+
show_skeleton()
|
|
764
|
+
elif state.posts.data:
|
|
765
|
+
for page in state.posts.data:
|
|
766
|
+
render_posts(page.data)
|
|
767
|
+
if state.posts.has_next_page:
|
|
768
|
+
Button("Load More", on_click=state.posts.fetch_next_page)
|
|
769
|
+
```
|
|
770
|
+
"""
|
|
771
|
+
|
|
772
|
+
_query: Computed[InfiniteQuery[T, TParam]]
|
|
773
|
+
_fetch_fn: Callable[[TParam], Awaitable[T]]
|
|
774
|
+
_stale_time: float
|
|
775
|
+
_gc_time: float
|
|
776
|
+
_refetch_interval: float | None
|
|
777
|
+
_keep_previous_data: bool
|
|
778
|
+
_on_success: Callable[[list[Page[T, TParam]]], Awaitable[None] | None] | None
|
|
779
|
+
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
780
|
+
_observe_effect: Effect
|
|
781
|
+
_interval_effect: Effect | None
|
|
782
|
+
_data_computed: Computed[list[Page[T, TParam]] | None]
|
|
783
|
+
_enabled: Signal[bool]
|
|
784
|
+
_fetch_on_mount: bool
|
|
785
|
+
|
|
786
|
+
def __init__(
|
|
787
|
+
self,
|
|
788
|
+
query: Computed[InfiniteQuery[T, TParam]],
|
|
789
|
+
fetch_fn: Callable[[TParam], Awaitable[T]],
|
|
790
|
+
stale_time: float = 0.0,
|
|
791
|
+
gc_time: float = 300.0,
|
|
792
|
+
refetch_interval: float | None = None,
|
|
793
|
+
keep_previous_data: bool = False,
|
|
794
|
+
on_success: Callable[[list[Page[T, TParam]]], Awaitable[None] | None]
|
|
795
|
+
| None = None,
|
|
796
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
797
|
+
enabled: bool = True,
|
|
798
|
+
fetch_on_mount: bool = True,
|
|
799
|
+
):
|
|
800
|
+
self._query = query
|
|
801
|
+
self._fetch_fn = fetch_fn
|
|
802
|
+
self._stale_time = stale_time
|
|
803
|
+
self._gc_time = gc_time
|
|
804
|
+
self._refetch_interval = refetch_interval
|
|
805
|
+
self._keep_previous_data = keep_previous_data
|
|
806
|
+
self._on_success = on_success
|
|
807
|
+
self._on_error = on_error
|
|
808
|
+
self._enabled = Signal(enabled, name=f"inf_query.enabled({query().key})")
|
|
809
|
+
self._fetch_on_mount = fetch_on_mount
|
|
810
|
+
self._interval_effect = None
|
|
811
|
+
|
|
812
|
+
def observe_effect():
|
|
813
|
+
q = self._query()
|
|
814
|
+
enabled = self._enabled()
|
|
815
|
+
|
|
816
|
+
with Untrack():
|
|
817
|
+
q.observe(self)
|
|
818
|
+
|
|
819
|
+
if enabled and fetch_on_mount and self.is_stale():
|
|
820
|
+
q.invalidate()
|
|
821
|
+
|
|
822
|
+
# Return cleanup function that captures the query (old query on key change)
|
|
823
|
+
def cleanup():
|
|
824
|
+
q.unobserve(self)
|
|
825
|
+
|
|
826
|
+
return cleanup
|
|
827
|
+
|
|
828
|
+
self._observe_effect = Effect(
|
|
829
|
+
observe_effect,
|
|
830
|
+
name=f"inf_query_observe({self._query().key})",
|
|
831
|
+
immediate=True,
|
|
832
|
+
)
|
|
833
|
+
self._data_computed = Computed(
|
|
834
|
+
self._data_computed_fn, name=f"inf_query_data({self._query().key})"
|
|
835
|
+
)
|
|
836
|
+
|
|
837
|
+
# Set up interval effect if interval is specified
|
|
838
|
+
if refetch_interval is not None and refetch_interval > 0:
|
|
839
|
+
self._setup_interval_effect(refetch_interval)
|
|
840
|
+
|
|
841
|
+
def _setup_interval_effect(self, interval: float):
|
|
842
|
+
"""Create an effect that invalidates the query at the specified interval."""
|
|
843
|
+
|
|
844
|
+
def interval_fn():
|
|
845
|
+
# Read enabled to make this effect reactive to enabled changes
|
|
846
|
+
if self._enabled():
|
|
847
|
+
self._query().invalidate()
|
|
848
|
+
|
|
849
|
+
self._interval_effect = Effect(
|
|
850
|
+
interval_fn,
|
|
851
|
+
name=f"inf_query_interval({self._query().key})",
|
|
852
|
+
interval=interval,
|
|
853
|
+
immediate=True,
|
|
854
|
+
)
|
|
855
|
+
|
|
856
|
+
@property
|
|
857
|
+
def status(self) -> QueryStatus:
|
|
858
|
+
return self._query().status()
|
|
859
|
+
|
|
860
|
+
@property
|
|
861
|
+
def is_loading(self) -> bool:
|
|
862
|
+
return self.status == "loading"
|
|
863
|
+
|
|
864
|
+
@property
|
|
865
|
+
def is_success(self) -> bool:
|
|
866
|
+
return self.status == "success"
|
|
867
|
+
|
|
868
|
+
@property
|
|
869
|
+
def is_error(self) -> bool:
|
|
870
|
+
return self.status == "error"
|
|
871
|
+
|
|
872
|
+
@property
|
|
873
|
+
def is_fetching(self) -> bool:
|
|
874
|
+
return self._query().is_fetching()
|
|
875
|
+
|
|
876
|
+
@property
|
|
877
|
+
def error(self) -> Exception | None:
|
|
878
|
+
return self._query().error.read()
|
|
879
|
+
|
|
880
|
+
def _data_computed_fn(
|
|
881
|
+
self, prev: list[Page[T, TParam]] | None
|
|
882
|
+
) -> list[Page[T, TParam]] | None:
|
|
883
|
+
query = self._query()
|
|
884
|
+
if self._keep_previous_data and query.status() != "success":
|
|
885
|
+
return prev
|
|
886
|
+
# Access pages.version to subscribe to structural changes
|
|
887
|
+
result = unwrap(query.pages) if len(query.pages) > 0 else None
|
|
888
|
+
return result
|
|
889
|
+
|
|
890
|
+
@property
|
|
891
|
+
def data(self) -> list[Page[T, TParam]] | None:
|
|
892
|
+
return self._data_computed()
|
|
893
|
+
|
|
894
|
+
@property
|
|
895
|
+
def pages(self) -> list[T] | None:
|
|
896
|
+
d = self.data
|
|
897
|
+
return [p.data for p in d] if d else None
|
|
898
|
+
|
|
899
|
+
@property
|
|
900
|
+
def page_params(self) -> list[TParam] | None:
|
|
901
|
+
d = self.data
|
|
902
|
+
return [p.param for p in d] if d else None
|
|
903
|
+
|
|
904
|
+
@property
|
|
905
|
+
def has_next_page(self) -> bool:
|
|
906
|
+
return self._query().has_next_page()
|
|
907
|
+
|
|
908
|
+
@property
|
|
909
|
+
def has_previous_page(self) -> bool:
|
|
910
|
+
return self._query().has_previous_page()
|
|
911
|
+
|
|
912
|
+
@property
|
|
913
|
+
def is_fetching_next_page(self) -> bool:
|
|
914
|
+
return isinstance(self._query().current_action(), FetchNext)
|
|
915
|
+
|
|
916
|
+
@property
|
|
917
|
+
def is_fetching_previous_page(self) -> bool:
|
|
918
|
+
return isinstance(self._query().current_action(), FetchPrevious)
|
|
919
|
+
|
|
920
|
+
def is_stale(self) -> bool:
|
|
921
|
+
if self._stale_time <= 0:
|
|
922
|
+
return False
|
|
923
|
+
query = self._query()
|
|
924
|
+
return (time.time() - query.last_updated.read()) > self._stale_time
|
|
925
|
+
|
|
926
|
+
async def fetch_next_page(
|
|
927
|
+
self,
|
|
928
|
+
*,
|
|
929
|
+
cancel_fetch: bool = False,
|
|
930
|
+
) -> ActionResult[Page[T, TParam] | None]:
|
|
931
|
+
return await self._query().fetch_next_page(
|
|
932
|
+
self._fetch_fn, observer=self, cancel_fetch=cancel_fetch
|
|
933
|
+
)
|
|
934
|
+
|
|
935
|
+
async def fetch_previous_page(
|
|
936
|
+
self,
|
|
937
|
+
*,
|
|
938
|
+
cancel_fetch: bool = False,
|
|
939
|
+
) -> ActionResult[Page[T, TParam] | None]:
|
|
940
|
+
return await self._query().fetch_previous_page(
|
|
941
|
+
self._fetch_fn, observer=self, cancel_fetch=cancel_fetch
|
|
942
|
+
)
|
|
943
|
+
|
|
944
|
+
async def fetch_page(
|
|
945
|
+
self,
|
|
946
|
+
page_param: TParam,
|
|
947
|
+
*,
|
|
948
|
+
cancel_fetch: bool = False,
|
|
949
|
+
) -> ActionResult[T | None]:
|
|
950
|
+
return await self._query().refetch_page(
|
|
951
|
+
page_param,
|
|
952
|
+
fetch_fn=self._fetch_fn,
|
|
953
|
+
observer=self,
|
|
954
|
+
cancel_fetch=cancel_fetch,
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
def set_initial_data(
|
|
958
|
+
self,
|
|
959
|
+
pages: list[Page[T, TParam]] | Callable[[], list[Page[T, TParam]]],
|
|
960
|
+
updated_at: float | dt.datetime | None = None,
|
|
961
|
+
):
|
|
962
|
+
return self._query().set_initial_data(pages, updated_at=updated_at)
|
|
963
|
+
|
|
964
|
+
def set_data(
|
|
965
|
+
self,
|
|
966
|
+
pages: list[Page[T, TParam]]
|
|
967
|
+
| Callable[[list[Page[T, TParam]] | None], list[Page[T, TParam]]],
|
|
968
|
+
updated_at: float | dt.datetime | None = None,
|
|
969
|
+
):
|
|
970
|
+
return self._query().set_data(pages, updated_at=updated_at)
|
|
971
|
+
|
|
972
|
+
async def refetch(
|
|
973
|
+
self,
|
|
974
|
+
*,
|
|
975
|
+
cancel_fetch: bool = False,
|
|
976
|
+
refetch_page: Callable[[T, int, list[T]], bool] | None = None,
|
|
977
|
+
) -> ActionResult[list[Page[T, TParam]]]:
|
|
978
|
+
return await self._query().refetch(
|
|
979
|
+
self._fetch_fn,
|
|
980
|
+
observer=self,
|
|
981
|
+
cancel_fetch=cancel_fetch,
|
|
982
|
+
refetch_page=refetch_page,
|
|
983
|
+
)
|
|
984
|
+
|
|
985
|
+
async def wait(self) -> ActionResult[list[Page[T, TParam]]]:
|
|
986
|
+
return await self._query().wait(fetch_fn=self._fetch_fn, observer=self)
|
|
987
|
+
|
|
988
|
+
def invalidate(self):
|
|
989
|
+
query = self._query()
|
|
990
|
+
query.invalidate(fetch_fn=self._fetch_fn, observer=self)
|
|
991
|
+
|
|
992
|
+
def enable(self):
|
|
993
|
+
self._enabled.write(True)
|
|
994
|
+
|
|
995
|
+
def disable(self):
|
|
996
|
+
self._enabled.write(False)
|
|
997
|
+
|
|
998
|
+
def set_error(self, error: Exception):
|
|
999
|
+
query = self._query()
|
|
1000
|
+
query.set_error(error)
|
|
1001
|
+
|
|
1002
|
+
@override
|
|
1003
|
+
def dispose(self):
|
|
1004
|
+
"""Clean up the result and its observe effect."""
|
|
1005
|
+
if self._interval_effect is not None:
|
|
1006
|
+
self._interval_effect.dispose()
|
|
1007
|
+
self._observe_effect.dispose()
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
|
|
1011
|
+
"""Descriptor for state-bound infinite queries created by the @infinite_query decorator.
|
|
1012
|
+
|
|
1013
|
+
InfiniteQueryProperty is the return type of the ``@infinite_query`` decorator.
|
|
1014
|
+
It acts as a descriptor that creates and manages InfiniteQueryResult instances
|
|
1015
|
+
for each State object.
|
|
1016
|
+
|
|
1017
|
+
When accessed on a State instance, returns an InfiniteQueryResult with reactive
|
|
1018
|
+
properties for pagination state and methods for fetching pages.
|
|
1019
|
+
|
|
1020
|
+
Required decorators:
|
|
1021
|
+
- ``@infinite_query_prop.key``: Define the query key (required).
|
|
1022
|
+
- ``@infinite_query_prop.get_next_page_param``: Define how to get next page param.
|
|
1023
|
+
|
|
1024
|
+
Optional decorators:
|
|
1025
|
+
- ``@infinite_query_prop.get_previous_page_param``: For bi-directional pagination.
|
|
1026
|
+
- ``@infinite_query_prop.on_success``: Handle successful fetch.
|
|
1027
|
+
- ``@infinite_query_prop.on_error``: Handle fetch errors.
|
|
1028
|
+
|
|
1029
|
+
Example:
|
|
1030
|
+
|
|
1031
|
+
```python
|
|
1032
|
+
class FeedState(ps.State):
|
|
1033
|
+
feed_type: str = "home"
|
|
1034
|
+
|
|
1035
|
+
@ps.infinite_query(initial_page_param=None)
|
|
1036
|
+
async def posts(self, cursor: str | None) -> list[Post]:
|
|
1037
|
+
return await api.get_posts(cursor=cursor)
|
|
1038
|
+
|
|
1039
|
+
@posts.key
|
|
1040
|
+
def _posts_key(self):
|
|
1041
|
+
return ("feed", self.feed_type)
|
|
1042
|
+
|
|
1043
|
+
@posts.get_next_page_param
|
|
1044
|
+
def _next_cursor(self, pages: list[Page]) -> str | None:
|
|
1045
|
+
if not pages:
|
|
1046
|
+
return None
|
|
1047
|
+
last = pages[-1]
|
|
1048
|
+
return last.data[-1].id if last.data else None
|
|
1049
|
+
```
|
|
1050
|
+
"""
|
|
1051
|
+
|
|
1052
|
+
name: str
|
|
1053
|
+
_fetch_fn: "Callable[[TState, TParam], Awaitable[T]]"
|
|
1054
|
+
_keep_alive: bool
|
|
1055
|
+
_keep_previous_data: bool
|
|
1056
|
+
_stale_time: float
|
|
1057
|
+
_gc_time: float
|
|
1058
|
+
_refetch_interval: float | None
|
|
1059
|
+
_retries: int
|
|
1060
|
+
_retry_delay: float
|
|
1061
|
+
_initial_page_param: TParam
|
|
1062
|
+
_get_next_page_param: (
|
|
1063
|
+
Callable[[TState, list[Page[T, TParam]]], TParam | None] | None
|
|
1064
|
+
)
|
|
1065
|
+
_get_previous_page_param: (
|
|
1066
|
+
Callable[[TState, list[Page[T, TParam]]], TParam | None] | None
|
|
1067
|
+
)
|
|
1068
|
+
_max_pages: int
|
|
1069
|
+
_key: QueryKey | Callable[[TState], QueryKey] | None
|
|
1070
|
+
# Not using OnSuccessFn and OnErrorFn since unions of callables are not well
|
|
1071
|
+
# supported in the type system. We just need to be careful to use
|
|
1072
|
+
# call_flexible to invoke these functions.
|
|
1073
|
+
_on_success_fn: Callable[[TState, list[T]], Any] | None
|
|
1074
|
+
_on_error_fn: Callable[[TState, Exception], Any] | None
|
|
1075
|
+
_initial_data_updated_at: float | dt.datetime | None
|
|
1076
|
+
_enabled: bool
|
|
1077
|
+
_fetch_on_mount: bool
|
|
1078
|
+
_priv_result: str
|
|
1079
|
+
|
|
1080
|
+
def __init__(
|
|
1081
|
+
self,
|
|
1082
|
+
name: str,
|
|
1083
|
+
fetch_fn: "Callable[[TState, TParam], Awaitable[T]]",
|
|
1084
|
+
*,
|
|
1085
|
+
initial_page_param: TParam,
|
|
1086
|
+
max_pages: int,
|
|
1087
|
+
stale_time: float,
|
|
1088
|
+
gc_time: float,
|
|
1089
|
+
refetch_interval: float | None = None,
|
|
1090
|
+
keep_previous_data: bool,
|
|
1091
|
+
retries: int,
|
|
1092
|
+
retry_delay: float,
|
|
1093
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1094
|
+
enabled: bool = True,
|
|
1095
|
+
fetch_on_mount: bool = True,
|
|
1096
|
+
key: QueryKey | Callable[[TState], QueryKey] | None = None,
|
|
1097
|
+
):
|
|
1098
|
+
self.name = name
|
|
1099
|
+
self._fetch_fn = fetch_fn
|
|
1100
|
+
self._initial_page_param = initial_page_param
|
|
1101
|
+
self._get_next_page_param = None
|
|
1102
|
+
self._get_previous_page_param = None
|
|
1103
|
+
self._max_pages = max_pages
|
|
1104
|
+
self._keep_previous_data = keep_previous_data
|
|
1105
|
+
self._stale_time = stale_time
|
|
1106
|
+
self._gc_time = gc_time
|
|
1107
|
+
self._refetch_interval = refetch_interval
|
|
1108
|
+
self._retries = retries
|
|
1109
|
+
self._retry_delay = retry_delay
|
|
1110
|
+
self._on_success_fn = None
|
|
1111
|
+
self._on_error_fn = None
|
|
1112
|
+
self._key = key
|
|
1113
|
+
self._initial_data_updated_at = initial_data_updated_at
|
|
1114
|
+
self._enabled = enabled
|
|
1115
|
+
self._fetch_on_mount = fetch_on_mount
|
|
1116
|
+
self._priv_result = f"__inf_query_{name}"
|
|
1117
|
+
|
|
1118
|
+
def key(self, fn: Callable[[TState], QueryKey]):
|
|
1119
|
+
if self._key is not None:
|
|
1120
|
+
raise RuntimeError(
|
|
1121
|
+
f"Cannot use @{self.name}.key decorator when a key is already provided to @infinite_query(key=...)."
|
|
1122
|
+
)
|
|
1123
|
+
self._key = fn
|
|
1124
|
+
return fn
|
|
1125
|
+
|
|
1126
|
+
def on_success(self, fn: OnSuccessFn[TState, list[T]]):
|
|
1127
|
+
if self._on_success_fn is not None:
|
|
1128
|
+
raise RuntimeError(
|
|
1129
|
+
f"Duplicate on_success() decorator for infinite query '{self.name}'. Only one is allowed."
|
|
1130
|
+
)
|
|
1131
|
+
self._on_success_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
1132
|
+
return fn
|
|
1133
|
+
|
|
1134
|
+
def on_error(self, fn: OnErrorFn[TState]):
|
|
1135
|
+
if self._on_error_fn is not None:
|
|
1136
|
+
raise RuntimeError(
|
|
1137
|
+
f"Duplicate on_error() decorator for infinite query '{self.name}'. Only one is allowed."
|
|
1138
|
+
)
|
|
1139
|
+
self._on_error_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
1140
|
+
return fn
|
|
1141
|
+
|
|
1142
|
+
def get_next_page_param(
|
|
1143
|
+
self,
|
|
1144
|
+
fn: Callable[[TState, list[Page[T, TParam]]], TParam | None],
|
|
1145
|
+
) -> Callable[[TState, list[Page[T, TParam]]], TParam | None]:
|
|
1146
|
+
if self._get_next_page_param is not None:
|
|
1147
|
+
raise RuntimeError(
|
|
1148
|
+
f"Duplicate get_next_page_param() decorator for infinite query '{self.name}'. Only one is allowed."
|
|
1149
|
+
)
|
|
1150
|
+
self._get_next_page_param = fn
|
|
1151
|
+
return fn
|
|
1152
|
+
|
|
1153
|
+
def get_previous_page_param(
|
|
1154
|
+
self,
|
|
1155
|
+
fn: Callable[[TState, list[Page[T, TParam]]], TParam | None],
|
|
1156
|
+
) -> Callable[[TState, list[Page[T, TParam]]], TParam | None]:
|
|
1157
|
+
if self._get_previous_page_param is not None:
|
|
1158
|
+
raise RuntimeError(
|
|
1159
|
+
f"Duplicate get_previous_page_param() decorator for infinite query '{self.name}'. Only one is allowed."
|
|
1160
|
+
)
|
|
1161
|
+
self._get_previous_page_param = fn
|
|
1162
|
+
return fn
|
|
1163
|
+
|
|
1164
|
+
@override
|
|
1165
|
+
def initialize(self, state: Any, name: str) -> InfiniteQueryResult[T, TParam]:
|
|
1166
|
+
result: InfiniteQueryResult[T, TParam] | None = getattr(
|
|
1167
|
+
state, self._priv_result, None
|
|
1168
|
+
)
|
|
1169
|
+
if result:
|
|
1170
|
+
return result
|
|
1171
|
+
|
|
1172
|
+
if self._get_next_page_param is None:
|
|
1173
|
+
raise RuntimeError(
|
|
1174
|
+
f"get_next_page_param must be set via @{self.name}.get_next_page_param decorator"
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
fetch_fn = bind_state(state, self._fetch_fn)
|
|
1178
|
+
|
|
1179
|
+
next_fn = bind_state(state, self._get_next_page_param)
|
|
1180
|
+
prev_fn = (
|
|
1181
|
+
bind_state(state, self._get_previous_page_param)
|
|
1182
|
+
if self._get_previous_page_param
|
|
1183
|
+
else None
|
|
1184
|
+
)
|
|
1185
|
+
|
|
1186
|
+
if self._key is None:
|
|
1187
|
+
raise RuntimeError(
|
|
1188
|
+
f"key is required for infinite query '{self.name}'. Provide a key via @infinite_query(key=...) or @{self.name}.key decorator."
|
|
1189
|
+
)
|
|
1190
|
+
query = self._resolve_keyed(
|
|
1191
|
+
state, fetch_fn, next_fn, prev_fn, self._initial_data_updated_at
|
|
1192
|
+
)
|
|
1193
|
+
|
|
1194
|
+
on_success = None
|
|
1195
|
+
if self._on_success_fn:
|
|
1196
|
+
bound_fn = bind_state(state, self._on_success_fn)
|
|
1197
|
+
|
|
1198
|
+
async def on_success_wrapper(data: list[Page[T, TParam]]):
|
|
1199
|
+
await maybe_await(call_flexible(bound_fn, [p.data for p in data]))
|
|
1200
|
+
|
|
1201
|
+
on_success = on_success_wrapper
|
|
1202
|
+
|
|
1203
|
+
result = InfiniteQueryResult(
|
|
1204
|
+
query=query,
|
|
1205
|
+
fetch_fn=fetch_fn,
|
|
1206
|
+
stale_time=self._stale_time,
|
|
1207
|
+
keep_previous_data=self._keep_previous_data,
|
|
1208
|
+
gc_time=self._gc_time,
|
|
1209
|
+
refetch_interval=self._refetch_interval,
|
|
1210
|
+
on_success=on_success,
|
|
1211
|
+
on_error=bind_state(state, self._on_error_fn)
|
|
1212
|
+
if self._on_error_fn
|
|
1213
|
+
else None,
|
|
1214
|
+
enabled=self._enabled,
|
|
1215
|
+
fetch_on_mount=self._fetch_on_mount,
|
|
1216
|
+
)
|
|
1217
|
+
|
|
1218
|
+
setattr(state, self._priv_result, result)
|
|
1219
|
+
return result
|
|
1220
|
+
|
|
1221
|
+
def _resolve_keyed(
|
|
1222
|
+
self,
|
|
1223
|
+
state: TState,
|
|
1224
|
+
fetch_fn: Callable[[TParam], Awaitable[T]],
|
|
1225
|
+
next_fn: Callable[[list[Page[T, TParam]]], TParam | None],
|
|
1226
|
+
prev_fn: Callable[[list[Page[T, TParam]]], TParam | None] | None,
|
|
1227
|
+
initial_data_updated_at: float | dt.datetime | None,
|
|
1228
|
+
) -> Computed[InfiniteQuery[T, TParam]]:
|
|
1229
|
+
assert self._key is not None
|
|
1230
|
+
|
|
1231
|
+
# Create a Computed for the key - passthrough for constant keys, reactive for function keys
|
|
1232
|
+
if callable(self._key):
|
|
1233
|
+
key_computed = Computed(
|
|
1234
|
+
bind_state(state, self._key), name=f"inf_query.key.{self.name}"
|
|
1235
|
+
)
|
|
1236
|
+
else:
|
|
1237
|
+
constant_key = self._key # ensure a constant reference
|
|
1238
|
+
key_computed = Computed(
|
|
1239
|
+
lambda: constant_key, name=f"inf_query.key.{self.name}"
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
render = PulseContext.get().render
|
|
1243
|
+
if render is None:
|
|
1244
|
+
raise RuntimeError("No render session available")
|
|
1245
|
+
store = render.query_store
|
|
1246
|
+
|
|
1247
|
+
def query() -> InfiniteQuery[T, TParam]:
|
|
1248
|
+
key = key_computed()
|
|
1249
|
+
return cast(
|
|
1250
|
+
InfiniteQuery[T, TParam],
|
|
1251
|
+
store.ensure_infinite(
|
|
1252
|
+
key,
|
|
1253
|
+
initial_page_param=self._initial_page_param,
|
|
1254
|
+
get_next_page_param=next_fn,
|
|
1255
|
+
get_previous_page_param=prev_fn,
|
|
1256
|
+
max_pages=self._max_pages,
|
|
1257
|
+
gc_time=self._gc_time,
|
|
1258
|
+
retries=self._retries,
|
|
1259
|
+
retry_delay=self._retry_delay,
|
|
1260
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
1261
|
+
),
|
|
1262
|
+
)
|
|
1263
|
+
|
|
1264
|
+
return Computed(query, name=f"inf_query.{self.name}")
|
|
1265
|
+
|
|
1266
|
+
def __get__(self, obj: Any, objtype: Any = None) -> InfiniteQueryResult[T, TParam]:
|
|
1267
|
+
if obj is None:
|
|
1268
|
+
return self # pyright: ignore[reportReturnType]
|
|
1269
|
+
return self.initialize(obj, self.name)
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
@overload
|
|
1273
|
+
def infinite_query(
|
|
1274
|
+
fn: Callable[[TState, TParam], Awaitable[T]],
|
|
1275
|
+
*,
|
|
1276
|
+
initial_page_param: TParam,
|
|
1277
|
+
max_pages: int = 0,
|
|
1278
|
+
stale_time: float = 0.0,
|
|
1279
|
+
gc_time: float | None = 300.0,
|
|
1280
|
+
refetch_interval: float | None = None,
|
|
1281
|
+
keep_previous_data: bool = False,
|
|
1282
|
+
retries: int = 3,
|
|
1283
|
+
retry_delay: float | None = None,
|
|
1284
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1285
|
+
enabled: bool = True,
|
|
1286
|
+
fetch_on_mount: bool = True,
|
|
1287
|
+
key: QueryKey | None = None,
|
|
1288
|
+
) -> InfiniteQueryProperty[T, TParam, TState]: ...
|
|
1289
|
+
|
|
1290
|
+
|
|
1291
|
+
@overload
|
|
1292
|
+
def infinite_query(
|
|
1293
|
+
fn: None = None,
|
|
1294
|
+
*,
|
|
1295
|
+
initial_page_param: TParam,
|
|
1296
|
+
max_pages: int = 0,
|
|
1297
|
+
stale_time: float = 0.0,
|
|
1298
|
+
gc_time: float | None = 300.0,
|
|
1299
|
+
refetch_interval: float | None = None,
|
|
1300
|
+
keep_previous_data: bool = False,
|
|
1301
|
+
retries: int = 3,
|
|
1302
|
+
retry_delay: float | None = None,
|
|
1303
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1304
|
+
enabled: bool = True,
|
|
1305
|
+
fetch_on_mount: bool = True,
|
|
1306
|
+
key: QueryKey | None = None,
|
|
1307
|
+
) -> Callable[
|
|
1308
|
+
[Callable[[TState, Any], Awaitable[T]]],
|
|
1309
|
+
InfiniteQueryProperty[T, TParam, TState],
|
|
1310
|
+
]: ...
|
|
1311
|
+
|
|
1312
|
+
|
|
1313
|
+
def infinite_query(
|
|
1314
|
+
fn: Callable[[TState, TParam], Awaitable[T]] | None = None,
|
|
1315
|
+
*,
|
|
1316
|
+
initial_page_param: TParam,
|
|
1317
|
+
max_pages: int = 0,
|
|
1318
|
+
stale_time: float = 0.0,
|
|
1319
|
+
gc_time: float | None = 300.0,
|
|
1320
|
+
refetch_interval: float | None = None,
|
|
1321
|
+
keep_previous_data: bool = False,
|
|
1322
|
+
retries: int = 3,
|
|
1323
|
+
retry_delay: float | None = None,
|
|
1324
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1325
|
+
enabled: bool = True,
|
|
1326
|
+
fetch_on_mount: bool = True,
|
|
1327
|
+
key: QueryKey | None = None,
|
|
1328
|
+
) -> (
|
|
1329
|
+
InfiniteQueryProperty[T, TParam, TState]
|
|
1330
|
+
| Callable[
|
|
1331
|
+
[Callable[[TState, Any], Awaitable[T]]],
|
|
1332
|
+
InfiniteQueryProperty[T, TParam, TState],
|
|
1333
|
+
]
|
|
1334
|
+
):
|
|
1335
|
+
"""Decorator for paginated queries on State methods.
|
|
1336
|
+
|
|
1337
|
+
Creates a reactive infinite query that supports cursor-based or offset-based
|
|
1338
|
+
pagination. Data is stored as a list of pages, each with its data and the
|
|
1339
|
+
parameter used to fetch it.
|
|
1340
|
+
|
|
1341
|
+
Requires ``@query_prop.key`` and ``@query_prop.get_next_page_param`` decorators.
|
|
1342
|
+
|
|
1343
|
+
Args:
|
|
1344
|
+
fn: The async method to decorate (when used without parentheses).
|
|
1345
|
+
initial_page_param: The parameter for fetching the first page (required).
|
|
1346
|
+
max_pages: Maximum pages to keep in memory (0 = unlimited).
|
|
1347
|
+
stale_time: Seconds before data is considered stale (default 0.0).
|
|
1348
|
+
gc_time: Seconds to keep unused query in cache (default 300.0).
|
|
1349
|
+
refetch_interval: Auto-refetch interval in seconds (default None).
|
|
1350
|
+
keep_previous_data: Keep previous data while refetching (default False).
|
|
1351
|
+
retries: Number of retry attempts on failure (default 3).
|
|
1352
|
+
retry_delay: Delay between retries in seconds (default 2.0).
|
|
1353
|
+
initial_data_updated_at: Timestamp for initial data staleness.
|
|
1354
|
+
enabled: Whether query is enabled (default True).
|
|
1355
|
+
fetch_on_mount: Fetch when component mounts (default True).
|
|
1356
|
+
key: Static query key for sharing across instances.
|
|
1357
|
+
|
|
1358
|
+
Returns:
|
|
1359
|
+
InfiniteQueryProperty that creates InfiniteQueryResult instances when accessed.
|
|
1360
|
+
|
|
1361
|
+
Example:
|
|
1362
|
+
|
|
1363
|
+
```python
|
|
1364
|
+
class FeedState(ps.State):
|
|
1365
|
+
@ps.infinite_query(initial_page_param=None, key=("feed",))
|
|
1366
|
+
async def posts(self, cursor: str | None) -> list[Post]:
|
|
1367
|
+
return await api.get_posts(cursor=cursor)
|
|
1368
|
+
|
|
1369
|
+
@posts.key
|
|
1370
|
+
def _posts_key(self):
|
|
1371
|
+
return ("feed", self.feed_type)
|
|
1372
|
+
|
|
1373
|
+
@posts.get_next_page_param
|
|
1374
|
+
def _next_cursor(self, pages: list[Page]) -> str | None:
|
|
1375
|
+
if not pages:
|
|
1376
|
+
return None
|
|
1377
|
+
last = pages[-1]
|
|
1378
|
+
return last.data[-1].id if last.data else None
|
|
1379
|
+
|
|
1380
|
+
@posts.get_previous_page_param
|
|
1381
|
+
def _prev_cursor(self, pages: list[Page]) -> str | None:
|
|
1382
|
+
if not pages:
|
|
1383
|
+
return None
|
|
1384
|
+
first = pages[0]
|
|
1385
|
+
return first.data[0].id if first.data else None
|
|
1386
|
+
```
|
|
1387
|
+
"""
|
|
1388
|
+
|
|
1389
|
+
def decorator(
|
|
1390
|
+
func: Callable[[TState, TParam], Awaitable[T]], /
|
|
1391
|
+
) -> InfiniteQueryProperty[T, TParam, TState]:
|
|
1392
|
+
sig = inspect.signature(func)
|
|
1393
|
+
params = list(sig.parameters.values())
|
|
1394
|
+
if not (len(params) == 2 and params[0].name == "self"):
|
|
1395
|
+
raise TypeError(
|
|
1396
|
+
"@infinite_query must be applied to a state method with signature (self, page_param)"
|
|
1397
|
+
)
|
|
1398
|
+
|
|
1399
|
+
return InfiniteQueryProperty(
|
|
1400
|
+
func.__name__,
|
|
1401
|
+
func,
|
|
1402
|
+
initial_page_param=initial_page_param,
|
|
1403
|
+
max_pages=max_pages,
|
|
1404
|
+
stale_time=stale_time,
|
|
1405
|
+
gc_time=gc_time if gc_time is not None else 300.0,
|
|
1406
|
+
refetch_interval=refetch_interval,
|
|
1407
|
+
keep_previous_data=keep_previous_data,
|
|
1408
|
+
retries=retries,
|
|
1409
|
+
retry_delay=RETRY_DELAY_DEFAULT if retry_delay is None else retry_delay,
|
|
1410
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
1411
|
+
enabled=enabled,
|
|
1412
|
+
fetch_on_mount=fetch_on_mount,
|
|
1413
|
+
key=key,
|
|
1414
|
+
)
|
|
1415
|
+
|
|
1416
|
+
if fn:
|
|
1417
|
+
return decorator(fn)
|
|
1418
|
+
return decorator
|