pulse-framework 0.1.62__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +1493 -0
- pulse/_examples.py +29 -0
- pulse/app.py +1086 -0
- pulse/channel.py +607 -0
- pulse/cli/__init__.py +0 -0
- pulse/cli/cmd.py +575 -0
- pulse/cli/dependencies.py +181 -0
- pulse/cli/folder_lock.py +134 -0
- pulse/cli/helpers.py +271 -0
- pulse/cli/logging.py +102 -0
- pulse/cli/models.py +35 -0
- pulse/cli/packages.py +262 -0
- pulse/cli/processes.py +292 -0
- pulse/cli/secrets.py +39 -0
- pulse/cli/uvicorn_log_config.py +87 -0
- pulse/code_analysis.py +38 -0
- pulse/codegen/__init__.py +0 -0
- pulse/codegen/codegen.py +359 -0
- pulse/codegen/templates/__init__.py +0 -0
- pulse/codegen/templates/layout.py +106 -0
- pulse/codegen/templates/route.py +345 -0
- pulse/codegen/templates/routes_ts.py +42 -0
- pulse/codegen/utils.py +20 -0
- pulse/component.py +237 -0
- pulse/components/__init__.py +0 -0
- pulse/components/for_.py +83 -0
- pulse/components/if_.py +86 -0
- pulse/components/react_router.py +94 -0
- pulse/context.py +108 -0
- pulse/cookies.py +322 -0
- pulse/decorators.py +344 -0
- pulse/dom/__init__.py +0 -0
- pulse/dom/elements.py +1024 -0
- pulse/dom/events.py +445 -0
- pulse/dom/props.py +1250 -0
- pulse/dom/svg.py +0 -0
- pulse/dom/tags.py +328 -0
- pulse/dom/tags.pyi +480 -0
- pulse/env.py +178 -0
- pulse/form.py +538 -0
- pulse/helpers.py +541 -0
- pulse/hooks/__init__.py +0 -0
- pulse/hooks/core.py +452 -0
- pulse/hooks/effects.py +88 -0
- pulse/hooks/init.py +668 -0
- pulse/hooks/runtime.py +464 -0
- pulse/hooks/setup.py +254 -0
- pulse/hooks/stable.py +138 -0
- pulse/hooks/state.py +192 -0
- pulse/js/__init__.py +125 -0
- pulse/js/__init__.pyi +115 -0
- pulse/js/_types.py +299 -0
- pulse/js/array.py +339 -0
- pulse/js/console.py +50 -0
- pulse/js/date.py +119 -0
- pulse/js/document.py +145 -0
- pulse/js/error.py +140 -0
- pulse/js/json.py +66 -0
- pulse/js/map.py +97 -0
- pulse/js/math.py +69 -0
- pulse/js/navigator.py +79 -0
- pulse/js/number.py +57 -0
- pulse/js/obj.py +81 -0
- pulse/js/object.py +172 -0
- pulse/js/promise.py +172 -0
- pulse/js/pulse.py +115 -0
- pulse/js/react.py +495 -0
- pulse/js/regexp.py +57 -0
- pulse/js/set.py +124 -0
- pulse/js/string.py +38 -0
- pulse/js/weakmap.py +53 -0
- pulse/js/weakset.py +48 -0
- pulse/js/window.py +205 -0
- pulse/messages.py +202 -0
- pulse/middleware.py +471 -0
- pulse/plugin.py +96 -0
- pulse/proxy.py +242 -0
- pulse/py.typed +0 -0
- pulse/queries/__init__.py +0 -0
- pulse/queries/client.py +609 -0
- pulse/queries/common.py +101 -0
- pulse/queries/effect.py +55 -0
- pulse/queries/infinite_query.py +1418 -0
- pulse/queries/mutation.py +295 -0
- pulse/queries/protocol.py +136 -0
- pulse/queries/query.py +1314 -0
- pulse/queries/store.py +120 -0
- pulse/react_component.py +88 -0
- pulse/reactive.py +1208 -0
- pulse/reactive_extensions.py +1172 -0
- pulse/render_session.py +768 -0
- pulse/renderer.py +584 -0
- pulse/request.py +205 -0
- pulse/routing.py +598 -0
- pulse/serializer.py +279 -0
- pulse/state.py +556 -0
- pulse/test_helpers.py +15 -0
- pulse/transpiler/__init__.py +111 -0
- pulse/transpiler/assets.py +81 -0
- pulse/transpiler/builtins.py +1029 -0
- pulse/transpiler/dynamic_import.py +130 -0
- pulse/transpiler/emit_context.py +49 -0
- pulse/transpiler/errors.py +96 -0
- pulse/transpiler/function.py +611 -0
- pulse/transpiler/id.py +18 -0
- pulse/transpiler/imports.py +341 -0
- pulse/transpiler/js_module.py +336 -0
- pulse/transpiler/modules/__init__.py +33 -0
- pulse/transpiler/modules/asyncio.py +57 -0
- pulse/transpiler/modules/json.py +24 -0
- pulse/transpiler/modules/math.py +265 -0
- pulse/transpiler/modules/pulse/__init__.py +5 -0
- pulse/transpiler/modules/pulse/tags.py +250 -0
- pulse/transpiler/modules/typing.py +63 -0
- pulse/transpiler/nodes.py +1987 -0
- pulse/transpiler/py_module.py +135 -0
- pulse/transpiler/transpiler.py +1100 -0
- pulse/transpiler/vdom.py +256 -0
- pulse/types/__init__.py +0 -0
- pulse/types/event_handler.py +50 -0
- pulse/user_session.py +386 -0
- pulse/version.py +69 -0
- pulse_framework-0.1.62.dist-info/METADATA +198 -0
- pulse_framework-0.1.62.dist-info/RECORD +126 -0
- pulse_framework-0.1.62.dist-info/WHEEL +4 -0
- pulse_framework-0.1.62.dist-info/entry_points.txt +3 -0
pulse/queries/query.py
ADDED
|
@@ -0,0 +1,1314 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import inspect
|
|
4
|
+
import time
|
|
5
|
+
from collections.abc import Awaitable, Callable
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import (
|
|
8
|
+
TYPE_CHECKING,
|
|
9
|
+
Any,
|
|
10
|
+
Generic,
|
|
11
|
+
TypeVar,
|
|
12
|
+
cast,
|
|
13
|
+
overload,
|
|
14
|
+
override,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
from pulse.context import PulseContext
|
|
18
|
+
from pulse.helpers import (
|
|
19
|
+
MISSING,
|
|
20
|
+
Disposable,
|
|
21
|
+
call_flexible,
|
|
22
|
+
is_pytest,
|
|
23
|
+
later,
|
|
24
|
+
maybe_await,
|
|
25
|
+
)
|
|
26
|
+
from pulse.queries.common import (
|
|
27
|
+
ActionError,
|
|
28
|
+
ActionResult,
|
|
29
|
+
ActionSuccess,
|
|
30
|
+
OnErrorFn,
|
|
31
|
+
OnSuccessFn,
|
|
32
|
+
QueryKey,
|
|
33
|
+
QueryStatus,
|
|
34
|
+
bind_state,
|
|
35
|
+
)
|
|
36
|
+
from pulse.queries.effect import AsyncQueryEffect
|
|
37
|
+
from pulse.reactive import Computed, Effect, Signal, Untrack
|
|
38
|
+
from pulse.state import InitializableProperty, State
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from pulse.queries.protocol import QueryResult
|
|
42
|
+
|
|
43
|
+
T = TypeVar("T")
|
|
44
|
+
TState = TypeVar("TState", bound=State)
|
|
45
|
+
|
|
46
|
+
RETRY_DELAY_DEFAULT = 2.0 if not is_pytest() else 0.01
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass(slots=True)
|
|
50
|
+
class QueryConfig(Generic[T]):
|
|
51
|
+
"""Configuration options for a query.
|
|
52
|
+
|
|
53
|
+
Stores immutable configuration that controls query behavior including
|
|
54
|
+
retry logic, caching, and lifecycle callbacks.
|
|
55
|
+
|
|
56
|
+
Attributes:
|
|
57
|
+
retries: Number of retry attempts on failure (default 3).
|
|
58
|
+
retry_delay: Delay in seconds between retry attempts (default 2.0).
|
|
59
|
+
initial_data: Initial data value or factory function.
|
|
60
|
+
initial_data_updated_at: Timestamp for initial data staleness calculation.
|
|
61
|
+
gc_time: Seconds to keep unused query in cache before garbage collection.
|
|
62
|
+
on_dispose: Callback invoked when query is disposed.
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
retries: int
|
|
66
|
+
retry_delay: float
|
|
67
|
+
initial_data: T | Callable[[], T] | None
|
|
68
|
+
initial_data_updated_at: float | dt.datetime | None
|
|
69
|
+
gc_time: float
|
|
70
|
+
on_dispose: Callable[[Any], None] | None
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class QueryState(Generic[T]):
|
|
74
|
+
"""Container for query state signals and manipulation methods.
|
|
75
|
+
|
|
76
|
+
Manages reactive signals for query data, status, errors, and retry state.
|
|
77
|
+
Used by both KeyedQuery and UnkeyedQuery via composition.
|
|
78
|
+
|
|
79
|
+
Attributes:
|
|
80
|
+
cfg: Query configuration options.
|
|
81
|
+
data: Signal containing the fetched data or None.
|
|
82
|
+
error: Signal containing the last error or None.
|
|
83
|
+
last_updated: Signal with timestamp of last successful update.
|
|
84
|
+
status: Signal with current QueryStatus ("loading", "success", "error").
|
|
85
|
+
is_fetching: Signal indicating if a fetch is in progress.
|
|
86
|
+
retries: Signal with current retry attempt count.
|
|
87
|
+
retry_reason: Signal with exception from last failed retry.
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
cfg: QueryConfig[T]
|
|
91
|
+
|
|
92
|
+
# Reactive signals for query state
|
|
93
|
+
data: Signal[T | None]
|
|
94
|
+
error: Signal[Exception | None]
|
|
95
|
+
last_updated: Signal[float]
|
|
96
|
+
status: Signal[QueryStatus]
|
|
97
|
+
is_fetching: Signal[bool]
|
|
98
|
+
retries: Signal[int]
|
|
99
|
+
retry_reason: Signal[Exception | None]
|
|
100
|
+
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
name: str,
|
|
104
|
+
retries: int = 3,
|
|
105
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
106
|
+
initial_data: T | None = MISSING,
|
|
107
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
108
|
+
gc_time: float = 300.0,
|
|
109
|
+
on_dispose: Callable[[Any], None] | None = None,
|
|
110
|
+
):
|
|
111
|
+
self.cfg = QueryConfig(
|
|
112
|
+
retries=retries,
|
|
113
|
+
retry_delay=retry_delay,
|
|
114
|
+
initial_data=initial_data,
|
|
115
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
116
|
+
gc_time=gc_time,
|
|
117
|
+
on_dispose=on_dispose,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Initialize reactive signals
|
|
121
|
+
self.data = Signal(
|
|
122
|
+
None if initial_data is MISSING else initial_data,
|
|
123
|
+
name=f"query.data({name})",
|
|
124
|
+
)
|
|
125
|
+
self.error = Signal(None, name=f"query.error({name})")
|
|
126
|
+
|
|
127
|
+
self.last_updated = Signal(
|
|
128
|
+
0.0,
|
|
129
|
+
name=f"query.last_updated({name})",
|
|
130
|
+
)
|
|
131
|
+
if initial_data_updated_at:
|
|
132
|
+
self.set_updated_at(initial_data_updated_at)
|
|
133
|
+
|
|
134
|
+
self.status = Signal(
|
|
135
|
+
"loading" if initial_data is MISSING else "success",
|
|
136
|
+
name=f"query.status({name})",
|
|
137
|
+
)
|
|
138
|
+
self.is_fetching = Signal(False, name=f"query.is_fetching({name})")
|
|
139
|
+
self.retries = Signal(0, name=f"query.retries({name})")
|
|
140
|
+
self.retry_reason = Signal(None, name=f"query.retry_reason({name})")
|
|
141
|
+
|
|
142
|
+
def set_data(
|
|
143
|
+
self,
|
|
144
|
+
data: T | Callable[[T | None], T],
|
|
145
|
+
*,
|
|
146
|
+
updated_at: float | dt.datetime | None = None,
|
|
147
|
+
):
|
|
148
|
+
"""Set data manually, accepting a value or updater function."""
|
|
149
|
+
current = self.data.read()
|
|
150
|
+
new_value = cast(T, data(current) if callable(data) else data)
|
|
151
|
+
self.set_success(new_value, manual=True)
|
|
152
|
+
if updated_at is not None:
|
|
153
|
+
self.set_updated_at(updated_at)
|
|
154
|
+
|
|
155
|
+
def set_updated_at(self, updated_at: float | dt.datetime):
|
|
156
|
+
if isinstance(updated_at, dt.datetime):
|
|
157
|
+
updated_at = updated_at.timestamp()
|
|
158
|
+
self.last_updated.write(updated_at)
|
|
159
|
+
|
|
160
|
+
def set_initial_data(
|
|
161
|
+
self,
|
|
162
|
+
data: T | Callable[[], T],
|
|
163
|
+
*,
|
|
164
|
+
updated_at: float | dt.datetime | None = None,
|
|
165
|
+
):
|
|
166
|
+
"""
|
|
167
|
+
Set data as if it were provided as initial_data.
|
|
168
|
+
Optionally supply an updated_at timestamp to seed staleness calculations.
|
|
169
|
+
"""
|
|
170
|
+
if self.status() == "loading":
|
|
171
|
+
value = cast(T, data() if callable(data) else data)
|
|
172
|
+
self.set_data(value, updated_at=updated_at)
|
|
173
|
+
|
|
174
|
+
def set_error(
|
|
175
|
+
self, error: Exception, *, updated_at: float | dt.datetime | None = None
|
|
176
|
+
):
|
|
177
|
+
self.apply_error(error, manual=True)
|
|
178
|
+
if updated_at is not None:
|
|
179
|
+
self.set_updated_at(updated_at)
|
|
180
|
+
|
|
181
|
+
def set_success(self, data: T, manual: bool = False):
|
|
182
|
+
"""Set success state with data."""
|
|
183
|
+
self.data.write(data)
|
|
184
|
+
self.last_updated.write(time.time())
|
|
185
|
+
self.error.write(None)
|
|
186
|
+
self.status.write("success")
|
|
187
|
+
if not manual:
|
|
188
|
+
self.is_fetching.write(False)
|
|
189
|
+
self.retries.write(0)
|
|
190
|
+
self.retry_reason.write(None)
|
|
191
|
+
|
|
192
|
+
def apply_error(self, error: Exception, manual: bool = False):
|
|
193
|
+
"""Apply error state to the query."""
|
|
194
|
+
self.error.write(error)
|
|
195
|
+
self.last_updated.write(time.time())
|
|
196
|
+
self.status.write("error")
|
|
197
|
+
if not manual:
|
|
198
|
+
self.is_fetching.write(False)
|
|
199
|
+
# Don't reset retries on final error - preserve for debugging
|
|
200
|
+
|
|
201
|
+
def failed_retry(self, reason: Exception):
|
|
202
|
+
"""Record a failed retry attempt."""
|
|
203
|
+
self.retries.write(self.retries.read() + 1)
|
|
204
|
+
self.retry_reason.write(reason)
|
|
205
|
+
|
|
206
|
+
def reset_retries(self):
|
|
207
|
+
"""Reset retry state at start of fetch."""
|
|
208
|
+
self.retries.write(0)
|
|
209
|
+
self.retry_reason.write(None)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
async def run_fetch_with_retries(
|
|
213
|
+
state: QueryState[T],
|
|
214
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
215
|
+
on_success: Callable[[T], Awaitable[None] | None] | None = None,
|
|
216
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
217
|
+
untrack: bool = False,
|
|
218
|
+
) -> None:
|
|
219
|
+
"""
|
|
220
|
+
Execute a fetch with retry logic, updating QueryState.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
state: The QueryState to update
|
|
224
|
+
fetch_fn: Async function to fetch data
|
|
225
|
+
on_success: Optional callback on success
|
|
226
|
+
on_error: Optional callback on error
|
|
227
|
+
untrack: If True, wrap fetch_fn in Untrack() to prevent dependency tracking.
|
|
228
|
+
Use for keyed queries where fetch is triggered via asyncio.create_task.
|
|
229
|
+
"""
|
|
230
|
+
state.reset_retries()
|
|
231
|
+
|
|
232
|
+
while True:
|
|
233
|
+
try:
|
|
234
|
+
if untrack:
|
|
235
|
+
with Untrack():
|
|
236
|
+
result = await fetch_fn()
|
|
237
|
+
else:
|
|
238
|
+
result = await fetch_fn()
|
|
239
|
+
state.set_success(result)
|
|
240
|
+
if on_success:
|
|
241
|
+
await maybe_await(call_flexible(on_success, result))
|
|
242
|
+
return
|
|
243
|
+
except asyncio.CancelledError:
|
|
244
|
+
raise
|
|
245
|
+
except Exception as e:
|
|
246
|
+
current_retries = state.retries.read()
|
|
247
|
+
if current_retries < state.cfg.retries:
|
|
248
|
+
state.failed_retry(e)
|
|
249
|
+
await asyncio.sleep(state.cfg.retry_delay)
|
|
250
|
+
else:
|
|
251
|
+
state.retry_reason.write(e)
|
|
252
|
+
state.apply_error(e)
|
|
253
|
+
if on_error:
|
|
254
|
+
await maybe_await(call_flexible(on_error, e))
|
|
255
|
+
return
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class KeyedQuery(Generic[T], Disposable):
|
|
259
|
+
"""
|
|
260
|
+
Query for keyed queries (shared across observers).
|
|
261
|
+
Uses direct task management without dependency tracking.
|
|
262
|
+
Multiple observers can share the same query.
|
|
263
|
+
"""
|
|
264
|
+
|
|
265
|
+
key: QueryKey
|
|
266
|
+
state: QueryState[T]
|
|
267
|
+
observers: "list[KeyedQueryResult[T]]"
|
|
268
|
+
_task: asyncio.Task[None] | None
|
|
269
|
+
_task_initiator: "KeyedQueryResult[T] | None"
|
|
270
|
+
_gc_handle: asyncio.TimerHandle | None
|
|
271
|
+
|
|
272
|
+
def __init__(
|
|
273
|
+
self,
|
|
274
|
+
key: QueryKey,
|
|
275
|
+
retries: int = 3,
|
|
276
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
277
|
+
initial_data: T | None = MISSING,
|
|
278
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
279
|
+
gc_time: float = 300.0,
|
|
280
|
+
on_dispose: Callable[[Any], None] | None = None,
|
|
281
|
+
):
|
|
282
|
+
self.key = key
|
|
283
|
+
self.state = QueryState(
|
|
284
|
+
name=str(key),
|
|
285
|
+
retries=retries,
|
|
286
|
+
retry_delay=retry_delay,
|
|
287
|
+
initial_data=initial_data,
|
|
288
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
289
|
+
gc_time=gc_time,
|
|
290
|
+
on_dispose=on_dispose,
|
|
291
|
+
)
|
|
292
|
+
self.observers = []
|
|
293
|
+
self._task = None
|
|
294
|
+
self._task_initiator = None
|
|
295
|
+
self._gc_handle = None
|
|
296
|
+
|
|
297
|
+
# --- Delegate signal access to state ---
|
|
298
|
+
@property
|
|
299
|
+
def data(self) -> Signal[T | None]:
|
|
300
|
+
return self.state.data
|
|
301
|
+
|
|
302
|
+
@property
|
|
303
|
+
def error(self) -> Signal[Exception | None]:
|
|
304
|
+
return self.state.error
|
|
305
|
+
|
|
306
|
+
@property
|
|
307
|
+
def last_updated(self) -> Signal[float]:
|
|
308
|
+
return self.state.last_updated
|
|
309
|
+
|
|
310
|
+
@property
|
|
311
|
+
def status(self) -> Signal[QueryStatus]:
|
|
312
|
+
return self.state.status
|
|
313
|
+
|
|
314
|
+
@property
|
|
315
|
+
def is_fetching(self) -> Signal[bool]:
|
|
316
|
+
return self.state.is_fetching
|
|
317
|
+
|
|
318
|
+
@property
|
|
319
|
+
def retries(self) -> Signal[int]:
|
|
320
|
+
return self.state.retries
|
|
321
|
+
|
|
322
|
+
@property
|
|
323
|
+
def retry_reason(self) -> Signal[Exception | None]:
|
|
324
|
+
return self.state.retry_reason
|
|
325
|
+
|
|
326
|
+
@property
|
|
327
|
+
def cfg(self) -> QueryConfig[T]:
|
|
328
|
+
return self.state.cfg
|
|
329
|
+
|
|
330
|
+
# --- Delegate state methods ---
|
|
331
|
+
def set_data(
|
|
332
|
+
self,
|
|
333
|
+
data: T | Callable[[T | None], T],
|
|
334
|
+
*,
|
|
335
|
+
updated_at: float | dt.datetime | None = None,
|
|
336
|
+
):
|
|
337
|
+
self.state.set_data(data, updated_at=updated_at)
|
|
338
|
+
|
|
339
|
+
def set_initial_data(
|
|
340
|
+
self,
|
|
341
|
+
data: T | Callable[[], T],
|
|
342
|
+
*,
|
|
343
|
+
updated_at: float | dt.datetime | None = None,
|
|
344
|
+
):
|
|
345
|
+
self.state.set_initial_data(data, updated_at=updated_at)
|
|
346
|
+
|
|
347
|
+
def set_error(
|
|
348
|
+
self, error: Exception, *, updated_at: float | dt.datetime | None = None
|
|
349
|
+
):
|
|
350
|
+
self.state.set_error(error, updated_at=updated_at)
|
|
351
|
+
|
|
352
|
+
# --- Query-specific methods ---
|
|
353
|
+
@property
|
|
354
|
+
def is_scheduled(self) -> bool:
|
|
355
|
+
"""Check if a fetch is currently scheduled/running."""
|
|
356
|
+
return self._task is not None and not self._task.done()
|
|
357
|
+
|
|
358
|
+
async def _run_fetch(
|
|
359
|
+
self,
|
|
360
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
361
|
+
observers: "list[KeyedQueryResult[T]]",
|
|
362
|
+
) -> None:
|
|
363
|
+
"""Execute the fetch with retry logic."""
|
|
364
|
+
|
|
365
|
+
async def on_success(result: T):
|
|
366
|
+
for obs in observers:
|
|
367
|
+
if obs._on_success: # pyright: ignore[reportPrivateUsage]
|
|
368
|
+
await maybe_await(call_flexible(obs._on_success, result)) # pyright: ignore[reportPrivateUsage]
|
|
369
|
+
|
|
370
|
+
async def on_error(e: Exception):
|
|
371
|
+
for obs in observers:
|
|
372
|
+
if obs._on_error: # pyright: ignore[reportPrivateUsage]
|
|
373
|
+
await maybe_await(call_flexible(obs._on_error, e)) # pyright: ignore[reportPrivateUsage]
|
|
374
|
+
|
|
375
|
+
await run_fetch_with_retries(
|
|
376
|
+
self.state,
|
|
377
|
+
fetch_fn,
|
|
378
|
+
on_success=on_success,
|
|
379
|
+
on_error=on_error,
|
|
380
|
+
untrack=True, # Keyed queries use asyncio.create_task, need to untrack
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
def run_fetch(
|
|
384
|
+
self,
|
|
385
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
386
|
+
cancel_previous: bool = True,
|
|
387
|
+
initiator: "KeyedQueryResult[T] | None" = None,
|
|
388
|
+
) -> asyncio.Task[None]:
|
|
389
|
+
"""
|
|
390
|
+
Start a fetch with the given fetch function.
|
|
391
|
+
Cancels any in-flight fetch if cancel_previous is True.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
fetch_fn: The async function to fetch data.
|
|
395
|
+
cancel_previous: If True, cancels any in-flight fetch before starting.
|
|
396
|
+
initiator: The KeyedQueryResult observer that initiated this fetch (for cancellation tracking).
|
|
397
|
+
"""
|
|
398
|
+
if cancel_previous and self._task and not self._task.done():
|
|
399
|
+
self._task.cancel()
|
|
400
|
+
|
|
401
|
+
self.state.is_fetching.write(True)
|
|
402
|
+
# Capture current observers at fetch start
|
|
403
|
+
observers = list(self.observers)
|
|
404
|
+
self._task = asyncio.create_task(self._run_fetch(fetch_fn, observers))
|
|
405
|
+
self._task_initiator = initiator
|
|
406
|
+
return self._task
|
|
407
|
+
|
|
408
|
+
async def wait(self) -> ActionResult[T]:
|
|
409
|
+
"""Wait for the current fetch to complete."""
|
|
410
|
+
while self._task and not self._task.done():
|
|
411
|
+
try:
|
|
412
|
+
await self._task
|
|
413
|
+
except asyncio.CancelledError:
|
|
414
|
+
# Task was cancelled (probably by a new refetch).
|
|
415
|
+
# If there's a new task, wait for that one instead.
|
|
416
|
+
# If no new task, re-raise the cancellation.
|
|
417
|
+
# Note: self._task may have been reassigned by run_fetch() after await
|
|
418
|
+
if self._task is None or self._task.done(): # pyright: ignore[reportUnnecessaryComparison]
|
|
419
|
+
raise
|
|
420
|
+
# Otherwise, loop and wait for the new task
|
|
421
|
+
# Return result based on current state
|
|
422
|
+
if self.state.status() == "error":
|
|
423
|
+
return ActionError(cast(Exception, self.state.error.read()))
|
|
424
|
+
return ActionSuccess(cast(T, self.state.data.read()))
|
|
425
|
+
|
|
426
|
+
def cancel(self) -> None:
|
|
427
|
+
"""Cancel the current fetch if running."""
|
|
428
|
+
if self._task and not self._task.done():
|
|
429
|
+
self._task.cancel()
|
|
430
|
+
self._task = None
|
|
431
|
+
self._task_initiator = None
|
|
432
|
+
|
|
433
|
+
def _get_first_observer_fetch_fn(self) -> Callable[[], Awaitable[T]]:
|
|
434
|
+
"""Get the fetch function from the first observer."""
|
|
435
|
+
if len(self.observers) == 0:
|
|
436
|
+
raise RuntimeError(
|
|
437
|
+
f"Query '{self.key}' has no observers. Cannot access fetch function."
|
|
438
|
+
)
|
|
439
|
+
return self.observers[0]._fetch_fn # pyright: ignore[reportPrivateUsage]
|
|
440
|
+
|
|
441
|
+
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
442
|
+
"""
|
|
443
|
+
Reruns the query and returns the result.
|
|
444
|
+
Uses the first observer's fetch function.
|
|
445
|
+
|
|
446
|
+
Note: Prefer calling refetch() on KeyedQueryResult to ensure the correct fetch function is used.
|
|
447
|
+
"""
|
|
448
|
+
fetch_fn = self._get_first_observer_fetch_fn()
|
|
449
|
+
if cancel_refetch or not self.is_fetching():
|
|
450
|
+
self.run_fetch(fetch_fn, cancel_previous=cancel_refetch)
|
|
451
|
+
return await self.wait()
|
|
452
|
+
|
|
453
|
+
def invalidate(self, cancel_refetch: bool = False):
|
|
454
|
+
"""
|
|
455
|
+
Marks query as stale. If there are active observers, triggers a refetch.
|
|
456
|
+
Uses the first observer's fetch function.
|
|
457
|
+
|
|
458
|
+
Note: Prefer calling invalidate() on KeyedQueryResult to ensure the correct fetch function is used.
|
|
459
|
+
"""
|
|
460
|
+
if len(self.observers) > 0:
|
|
461
|
+
fetch_fn = self._get_first_observer_fetch_fn()
|
|
462
|
+
if not self.is_scheduled or cancel_refetch:
|
|
463
|
+
self.run_fetch(fetch_fn, cancel_previous=cancel_refetch)
|
|
464
|
+
|
|
465
|
+
def observe(self, observer: "KeyedQueryResult[T]"):
|
|
466
|
+
"""Register an observer."""
|
|
467
|
+
self.observers.append(observer)
|
|
468
|
+
self.cancel_gc()
|
|
469
|
+
if observer._gc_time > 0: # pyright: ignore[reportPrivateUsage]
|
|
470
|
+
self.cfg.gc_time = max(self.cfg.gc_time, observer._gc_time) # pyright: ignore[reportPrivateUsage]
|
|
471
|
+
|
|
472
|
+
def unobserve(self, observer: "KeyedQueryResult[T]"):
|
|
473
|
+
"""Unregister an observer. Schedules GC if no observers remain."""
|
|
474
|
+
if observer in self.observers:
|
|
475
|
+
self.observers.remove(observer)
|
|
476
|
+
|
|
477
|
+
# If the departing observer initiated the ongoing fetch, cancel it
|
|
478
|
+
if self._task_initiator is observer and self._task and not self._task.done():
|
|
479
|
+
self._task.cancel()
|
|
480
|
+
self._task = None
|
|
481
|
+
self._task_initiator = None
|
|
482
|
+
# Reschedule from another observer if any remain
|
|
483
|
+
if len(self.observers) > 0:
|
|
484
|
+
fetch_fn = self._get_first_observer_fetch_fn()
|
|
485
|
+
self.run_fetch(
|
|
486
|
+
fetch_fn, cancel_previous=False, initiator=self.observers[0]
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
if len(self.observers) == 0:
|
|
490
|
+
self.schedule_gc()
|
|
491
|
+
|
|
492
|
+
def schedule_gc(self):
|
|
493
|
+
self.cancel_gc()
|
|
494
|
+
if self.cfg.gc_time > 0:
|
|
495
|
+
self._gc_handle = later(self.cfg.gc_time, self.dispose)
|
|
496
|
+
else:
|
|
497
|
+
self.dispose()
|
|
498
|
+
|
|
499
|
+
def cancel_gc(self):
|
|
500
|
+
if self._gc_handle:
|
|
501
|
+
self._gc_handle.cancel()
|
|
502
|
+
self._gc_handle = None
|
|
503
|
+
|
|
504
|
+
@override
|
|
505
|
+
def dispose(self):
|
|
506
|
+
"""Clean up the query, cancelling any in-flight fetch."""
|
|
507
|
+
self.cancel()
|
|
508
|
+
if self.cfg.on_dispose:
|
|
509
|
+
self.cfg.on_dispose(self)
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
class UnkeyedQueryResult(Generic[T], Disposable):
|
|
513
|
+
"""
|
|
514
|
+
Query for unkeyed queries (single observer with dependency tracking).
|
|
515
|
+
Uses an AsyncEffect to track dependencies and re-run on changes.
|
|
516
|
+
|
|
517
|
+
Unlike KeyedQuery which separates the query from its observer (KeyedQueryResult),
|
|
518
|
+
UnkeyedQuery combines both since there's always exactly one observer.
|
|
519
|
+
"""
|
|
520
|
+
|
|
521
|
+
state: QueryState[T]
|
|
522
|
+
_effect: AsyncQueryEffect
|
|
523
|
+
_fetch_fn: Callable[[], Awaitable[T]]
|
|
524
|
+
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
525
|
+
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
526
|
+
_stale_time: float
|
|
527
|
+
_refetch_interval: float | None
|
|
528
|
+
_keep_previous_data: bool
|
|
529
|
+
_enabled: Signal[bool]
|
|
530
|
+
_interval_effect: Effect | None
|
|
531
|
+
_data_computed: Computed[T | None]
|
|
532
|
+
|
|
533
|
+
def __init__(
|
|
534
|
+
self,
|
|
535
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
536
|
+
on_success: Callable[[T], Awaitable[None] | None] | None = None,
|
|
537
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
538
|
+
retries: int = 3,
|
|
539
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
540
|
+
initial_data: T | None = MISSING,
|
|
541
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
542
|
+
gc_time: float = 300.0,
|
|
543
|
+
stale_time: float = 0.0,
|
|
544
|
+
refetch_interval: float | None = None,
|
|
545
|
+
keep_previous_data: bool = False,
|
|
546
|
+
enabled: bool = True,
|
|
547
|
+
fetch_on_mount: bool = True,
|
|
548
|
+
):
|
|
549
|
+
self.state = QueryState(
|
|
550
|
+
name="unkeyed",
|
|
551
|
+
retries=retries,
|
|
552
|
+
retry_delay=retry_delay,
|
|
553
|
+
initial_data=initial_data,
|
|
554
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
555
|
+
gc_time=gc_time,
|
|
556
|
+
on_dispose=None,
|
|
557
|
+
)
|
|
558
|
+
self._fetch_fn = fetch_fn
|
|
559
|
+
self._on_success = on_success
|
|
560
|
+
self._on_error = on_error
|
|
561
|
+
self._stale_time = stale_time
|
|
562
|
+
self._refetch_interval = refetch_interval
|
|
563
|
+
self._keep_previous_data = keep_previous_data
|
|
564
|
+
self._enabled = Signal(enabled, name="query.enabled(unkeyed)")
|
|
565
|
+
self._interval_effect = None
|
|
566
|
+
|
|
567
|
+
# Create effect with auto-tracking (deps=None)
|
|
568
|
+
# Pass state as fetcher since it has the Signal attributes directly
|
|
569
|
+
self._effect = AsyncQueryEffect(
|
|
570
|
+
self._run,
|
|
571
|
+
fetcher=self.state,
|
|
572
|
+
name="unkeyed_query_effect",
|
|
573
|
+
deps=None, # Auto-track dependencies
|
|
574
|
+
lazy=True,
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
# Computed for keep_previous_data logic
|
|
578
|
+
self._data_computed = Computed(
|
|
579
|
+
self._data_computed_fn, name="query_data(unkeyed)"
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
# Schedule initial fetch if stale (untracked to avoid reactive loop)
|
|
583
|
+
with Untrack():
|
|
584
|
+
if enabled and fetch_on_mount and self.is_stale():
|
|
585
|
+
self.schedule()
|
|
586
|
+
|
|
587
|
+
# Set up interval effect if interval is specified
|
|
588
|
+
if refetch_interval is not None and refetch_interval > 0:
|
|
589
|
+
self._setup_interval_effect(refetch_interval)
|
|
590
|
+
|
|
591
|
+
def _setup_interval_effect(self, interval: float):
|
|
592
|
+
"""Create an effect that invalidates the query at the specified interval."""
|
|
593
|
+
|
|
594
|
+
def interval_fn():
|
|
595
|
+
if self._enabled():
|
|
596
|
+
self.schedule()
|
|
597
|
+
|
|
598
|
+
self._interval_effect = Effect(
|
|
599
|
+
interval_fn,
|
|
600
|
+
name="query_interval(unkeyed)",
|
|
601
|
+
interval=interval,
|
|
602
|
+
immediate=True,
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
606
|
+
if self._keep_previous_data and self.state.status() != "success":
|
|
607
|
+
return prev
|
|
608
|
+
raw = self.state.data()
|
|
609
|
+
if raw is None:
|
|
610
|
+
return None
|
|
611
|
+
return raw
|
|
612
|
+
|
|
613
|
+
# --- Status properties ---
|
|
614
|
+
@property
|
|
615
|
+
def status(self) -> QueryStatus:
|
|
616
|
+
return self.state.status()
|
|
617
|
+
|
|
618
|
+
@property
|
|
619
|
+
def is_loading(self) -> bool:
|
|
620
|
+
return self.status == "loading"
|
|
621
|
+
|
|
622
|
+
@property
|
|
623
|
+
def is_success(self) -> bool:
|
|
624
|
+
return self.status == "success"
|
|
625
|
+
|
|
626
|
+
@property
|
|
627
|
+
def is_error(self) -> bool:
|
|
628
|
+
return self.status == "error"
|
|
629
|
+
|
|
630
|
+
@property
|
|
631
|
+
def is_fetching(self) -> bool:
|
|
632
|
+
return self.state.is_fetching()
|
|
633
|
+
|
|
634
|
+
@property
|
|
635
|
+
def error(self) -> Exception | None:
|
|
636
|
+
return self.state.error.read()
|
|
637
|
+
|
|
638
|
+
@property
|
|
639
|
+
def data(self) -> T | None:
|
|
640
|
+
return self._data_computed()
|
|
641
|
+
|
|
642
|
+
# --- State methods ---
|
|
643
|
+
def set_data(self, data: T | Callable[[T | None], T]):
|
|
644
|
+
"""Optimistically set data without changing loading/error state."""
|
|
645
|
+
self.state.set_data(data)
|
|
646
|
+
|
|
647
|
+
def set_initial_data(
|
|
648
|
+
self,
|
|
649
|
+
data: T | Callable[[], T],
|
|
650
|
+
*,
|
|
651
|
+
updated_at: float | dt.datetime | None = None,
|
|
652
|
+
):
|
|
653
|
+
"""Seed initial data and optional freshness timestamp."""
|
|
654
|
+
self.state.set_initial_data(data, updated_at=updated_at)
|
|
655
|
+
|
|
656
|
+
def set_error(self, error: Exception):
|
|
657
|
+
"""Set error state on the query."""
|
|
658
|
+
self.state.set_error(error)
|
|
659
|
+
|
|
660
|
+
def enable(self):
|
|
661
|
+
"""Enable the query."""
|
|
662
|
+
self._enabled.write(True)
|
|
663
|
+
|
|
664
|
+
def disable(self):
|
|
665
|
+
"""Disable the query, preventing it from fetching."""
|
|
666
|
+
self._enabled.write(False)
|
|
667
|
+
|
|
668
|
+
# --- Query operations ---
|
|
669
|
+
def is_stale(self) -> bool:
|
|
670
|
+
"""Check if the query data is stale based on stale_time."""
|
|
671
|
+
return (time.time() - self.state.last_updated.read()) > self._stale_time
|
|
672
|
+
|
|
673
|
+
async def _run(self):
|
|
674
|
+
"""Run the fetch through the effect (for dependency tracking)."""
|
|
675
|
+
# Unkeyed queries run inside AsyncEffect which has its own scope,
|
|
676
|
+
# so we don't need untrack=True here - deps should be tracked
|
|
677
|
+
await run_fetch_with_retries(
|
|
678
|
+
self.state,
|
|
679
|
+
self._fetch_fn,
|
|
680
|
+
on_success=self._on_success,
|
|
681
|
+
on_error=self._on_error,
|
|
682
|
+
untrack=False,
|
|
683
|
+
)
|
|
684
|
+
|
|
685
|
+
def schedule(self):
|
|
686
|
+
"""Schedule the effect to run."""
|
|
687
|
+
self._effect.schedule()
|
|
688
|
+
|
|
689
|
+
@property
|
|
690
|
+
def is_scheduled(self) -> bool:
|
|
691
|
+
"""Check if a fetch is currently scheduled/running."""
|
|
692
|
+
return self._effect.is_scheduled
|
|
693
|
+
|
|
694
|
+
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
695
|
+
"""Refetch the query data through the effect."""
|
|
696
|
+
if cancel_refetch:
|
|
697
|
+
self.cancel()
|
|
698
|
+
self.schedule()
|
|
699
|
+
return await self.wait()
|
|
700
|
+
|
|
701
|
+
async def wait(self) -> ActionResult[T]:
|
|
702
|
+
"""Wait for the current query to complete."""
|
|
703
|
+
# If loading and no task, schedule a fetch
|
|
704
|
+
if self.state.status() == "loading" and not self.state.is_fetching():
|
|
705
|
+
self.schedule()
|
|
706
|
+
await self._effect.wait()
|
|
707
|
+
if self.state.status() == "error":
|
|
708
|
+
return ActionError(cast(Exception, self.state.error.read()))
|
|
709
|
+
return ActionSuccess(cast(T, self.state.data.read()))
|
|
710
|
+
|
|
711
|
+
def invalidate(self):
|
|
712
|
+
"""Mark the query as stale and refetch through the effect."""
|
|
713
|
+
if not self.is_scheduled:
|
|
714
|
+
self.schedule()
|
|
715
|
+
|
|
716
|
+
def cancel(self) -> None:
|
|
717
|
+
"""Cancel the current fetch if running."""
|
|
718
|
+
self._effect.cancel(cancel_interval=False)
|
|
719
|
+
|
|
720
|
+
@override
|
|
721
|
+
def dispose(self):
|
|
722
|
+
"""Clean up the query and its effect."""
|
|
723
|
+
if self._interval_effect is not None:
|
|
724
|
+
self._interval_effect.dispose()
|
|
725
|
+
self._effect.dispose()
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
class KeyedQueryResult(Generic[T], Disposable):
|
|
729
|
+
"""
|
|
730
|
+
Observer wrapper for keyed queries.
|
|
731
|
+
Handles observation lifecycle, staleness tracking, and provides query operations.
|
|
732
|
+
"""
|
|
733
|
+
|
|
734
|
+
_query: Computed[KeyedQuery[T]]
|
|
735
|
+
_fetch_fn: Callable[[], Awaitable[T]]
|
|
736
|
+
_stale_time: float
|
|
737
|
+
_gc_time: float
|
|
738
|
+
_refetch_interval: float | None
|
|
739
|
+
_keep_previous_data: bool
|
|
740
|
+
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
741
|
+
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
742
|
+
_observe_effect: Effect
|
|
743
|
+
_interval_effect: Effect | None
|
|
744
|
+
_data_computed: Computed[T | None]
|
|
745
|
+
_enabled: Signal[bool]
|
|
746
|
+
_fetch_on_mount: bool
|
|
747
|
+
|
|
748
|
+
def __init__(
|
|
749
|
+
self,
|
|
750
|
+
query: Computed[KeyedQuery[T]],
|
|
751
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
752
|
+
stale_time: float = 0.0,
|
|
753
|
+
gc_time: float = 300.0,
|
|
754
|
+
refetch_interval: float | None = None,
|
|
755
|
+
keep_previous_data: bool = False,
|
|
756
|
+
on_success: Callable[[T], Awaitable[None] | None] | None = None,
|
|
757
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
758
|
+
enabled: bool = True,
|
|
759
|
+
fetch_on_mount: bool = True,
|
|
760
|
+
):
|
|
761
|
+
self._query = query
|
|
762
|
+
self._fetch_fn = fetch_fn
|
|
763
|
+
self._stale_time = stale_time
|
|
764
|
+
self._gc_time = gc_time
|
|
765
|
+
self._refetch_interval = refetch_interval
|
|
766
|
+
self._keep_previous_data = keep_previous_data
|
|
767
|
+
self._on_success = on_success
|
|
768
|
+
self._on_error = on_error
|
|
769
|
+
self._enabled = Signal(enabled, name=f"query.enabled({query().key})")
|
|
770
|
+
self._interval_effect = None
|
|
771
|
+
|
|
772
|
+
def observe_effect():
|
|
773
|
+
q = self._query()
|
|
774
|
+
enabled = self._enabled()
|
|
775
|
+
|
|
776
|
+
with Untrack():
|
|
777
|
+
q.observe(self)
|
|
778
|
+
|
|
779
|
+
# If stale or loading, schedule refetch (only when enabled)
|
|
780
|
+
if enabled and fetch_on_mount and self.is_stale():
|
|
781
|
+
self.invalidate()
|
|
782
|
+
|
|
783
|
+
# Return cleanup function that captures the query (old query on key change)
|
|
784
|
+
def cleanup():
|
|
785
|
+
q.unobserve(self)
|
|
786
|
+
|
|
787
|
+
return cleanup
|
|
788
|
+
|
|
789
|
+
self._observe_effect = Effect(
|
|
790
|
+
observe_effect,
|
|
791
|
+
name=f"query_observe({self._query().key})",
|
|
792
|
+
immediate=True,
|
|
793
|
+
)
|
|
794
|
+
self._data_computed = Computed(
|
|
795
|
+
self._data_computed_fn, name=f"query_data({self._query().key})"
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
# Set up interval effect if interval is specified
|
|
799
|
+
if refetch_interval is not None and refetch_interval > 0:
|
|
800
|
+
self._setup_interval_effect(refetch_interval)
|
|
801
|
+
|
|
802
|
+
def _setup_interval_effect(self, interval: float):
|
|
803
|
+
"""Create an effect that invalidates the query at the specified interval."""
|
|
804
|
+
|
|
805
|
+
def interval_fn():
|
|
806
|
+
# Read enabled to make this effect reactive to enabled changes
|
|
807
|
+
if self._enabled():
|
|
808
|
+
self.invalidate()
|
|
809
|
+
|
|
810
|
+
self._interval_effect = Effect(
|
|
811
|
+
interval_fn,
|
|
812
|
+
name=f"query_interval({self._query().key})",
|
|
813
|
+
interval=interval,
|
|
814
|
+
immediate=True,
|
|
815
|
+
)
|
|
816
|
+
|
|
817
|
+
@property
|
|
818
|
+
def status(self) -> QueryStatus:
|
|
819
|
+
return self._query().status()
|
|
820
|
+
|
|
821
|
+
@property
|
|
822
|
+
def is_loading(self) -> bool:
|
|
823
|
+
return self.status == "loading"
|
|
824
|
+
|
|
825
|
+
@property
|
|
826
|
+
def is_success(self) -> bool:
|
|
827
|
+
return self.status == "success"
|
|
828
|
+
|
|
829
|
+
@property
|
|
830
|
+
def is_error(self) -> bool:
|
|
831
|
+
return self.status == "error"
|
|
832
|
+
|
|
833
|
+
@property
|
|
834
|
+
def is_fetching(self) -> bool:
|
|
835
|
+
return self._query().is_fetching()
|
|
836
|
+
|
|
837
|
+
@property
|
|
838
|
+
def is_scheduled(self) -> bool:
|
|
839
|
+
return self._query().is_scheduled
|
|
840
|
+
|
|
841
|
+
@property
|
|
842
|
+
def error(self) -> Exception | None:
|
|
843
|
+
return self._query().error.read()
|
|
844
|
+
|
|
845
|
+
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
846
|
+
query = self._query()
|
|
847
|
+
if self._keep_previous_data and query.status() != "success":
|
|
848
|
+
return prev
|
|
849
|
+
raw = query.data()
|
|
850
|
+
if raw is None:
|
|
851
|
+
return None
|
|
852
|
+
return raw
|
|
853
|
+
|
|
854
|
+
@property
|
|
855
|
+
def data(self) -> T | None:
|
|
856
|
+
return self._data_computed()
|
|
857
|
+
|
|
858
|
+
def is_stale(self) -> bool:
|
|
859
|
+
"""Check if the query data is stale based on stale_time."""
|
|
860
|
+
query = self._query()
|
|
861
|
+
return (time.time() - query.last_updated.read()) > self._stale_time
|
|
862
|
+
|
|
863
|
+
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
864
|
+
"""
|
|
865
|
+
Refetch the query data using this observer's fetch function.
|
|
866
|
+
If cancel_refetch is True (default), cancels any in-flight request and starts a new one.
|
|
867
|
+
If cancel_refetch is False, deduplicates requests if one is already in flight.
|
|
868
|
+
"""
|
|
869
|
+
query = self._query()
|
|
870
|
+
if cancel_refetch or not query.is_fetching():
|
|
871
|
+
query.run_fetch(
|
|
872
|
+
self._fetch_fn, cancel_previous=cancel_refetch, initiator=self
|
|
873
|
+
)
|
|
874
|
+
return await self.wait()
|
|
875
|
+
|
|
876
|
+
async def wait(self) -> ActionResult[T]:
|
|
877
|
+
"""Wait for the current query to complete."""
|
|
878
|
+
query = self._query()
|
|
879
|
+
# If loading and no task, start a fetch with this observer's fetch function
|
|
880
|
+
if query.status() == "loading" and not query.is_fetching():
|
|
881
|
+
query.run_fetch(self._fetch_fn, initiator=self)
|
|
882
|
+
return await query.wait()
|
|
883
|
+
|
|
884
|
+
def invalidate(self):
|
|
885
|
+
"""Mark the query as stale and refetch using this observer's fetch function."""
|
|
886
|
+
query = self._query()
|
|
887
|
+
if not query.is_scheduled and len(query.observers) > 0:
|
|
888
|
+
query.run_fetch(self._fetch_fn, cancel_previous=False, initiator=self)
|
|
889
|
+
|
|
890
|
+
def set_data(self, data: T | Callable[[T | None], T]):
|
|
891
|
+
"""Optimistically set data without changing loading/error state."""
|
|
892
|
+
query = self._query()
|
|
893
|
+
query.set_data(data)
|
|
894
|
+
|
|
895
|
+
def set_initial_data(
|
|
896
|
+
self,
|
|
897
|
+
data: T | Callable[[], T],
|
|
898
|
+
*,
|
|
899
|
+
updated_at: float | dt.datetime | None = None,
|
|
900
|
+
):
|
|
901
|
+
"""Seed initial data and optional freshness timestamp."""
|
|
902
|
+
query = self._query()
|
|
903
|
+
query.set_initial_data(data, updated_at=updated_at)
|
|
904
|
+
|
|
905
|
+
def set_error(self, error: Exception):
|
|
906
|
+
"""Set error state on the query."""
|
|
907
|
+
query = self._query()
|
|
908
|
+
query.set_error(error)
|
|
909
|
+
|
|
910
|
+
def enable(self):
|
|
911
|
+
"""Enable the query."""
|
|
912
|
+
self._enabled.write(True)
|
|
913
|
+
|
|
914
|
+
def disable(self):
|
|
915
|
+
"""Disable the query, preventing it from fetching."""
|
|
916
|
+
self._enabled.write(False)
|
|
917
|
+
|
|
918
|
+
@override
|
|
919
|
+
def dispose(self):
|
|
920
|
+
"""Clean up the result and its observe effect."""
|
|
921
|
+
if self._interval_effect is not None and not self._interval_effect.__disposed__:
|
|
922
|
+
self._interval_effect.dispose()
|
|
923
|
+
if not self._observe_effect.__disposed__:
|
|
924
|
+
self._observe_effect.dispose()
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
928
|
+
"""Descriptor for state-bound queries created by the @query decorator.
|
|
929
|
+
|
|
930
|
+
QueryProperty is the return type of the ``@query`` decorator. It acts as a
|
|
931
|
+
descriptor that creates and manages query instances for each State object.
|
|
932
|
+
|
|
933
|
+
When accessed on a State instance, returns a QueryResult with reactive
|
|
934
|
+
properties (data, status, error) and methods (refetch, invalidate, etc.).
|
|
935
|
+
|
|
936
|
+
Supports additional decorators for customization:
|
|
937
|
+
- ``@query_prop.key``: Define dynamic query key for sharing.
|
|
938
|
+
- ``@query_prop.initial_data``: Provide initial/placeholder data.
|
|
939
|
+
- ``@query_prop.on_success``: Handle successful fetch.
|
|
940
|
+
- ``@query_prop.on_error``: Handle fetch errors.
|
|
941
|
+
|
|
942
|
+
Example:
|
|
943
|
+
|
|
944
|
+
```python
|
|
945
|
+
class UserState(ps.State):
|
|
946
|
+
user_id: str = ""
|
|
947
|
+
|
|
948
|
+
@ps.query
|
|
949
|
+
async def user(self) -> User:
|
|
950
|
+
return await api.get_user(self.user_id)
|
|
951
|
+
|
|
952
|
+
@user.key
|
|
953
|
+
def _user_key(self):
|
|
954
|
+
return ("user", self.user_id)
|
|
955
|
+
|
|
956
|
+
@user.on_success
|
|
957
|
+
def _on_user_loaded(self, data: User):
|
|
958
|
+
print(f"Loaded user: {data.name}")
|
|
959
|
+
```
|
|
960
|
+
"""
|
|
961
|
+
|
|
962
|
+
name: str
|
|
963
|
+
_fetch_fn: "Callable[[TState], Awaitable[T]]"
|
|
964
|
+
_keep_alive: bool
|
|
965
|
+
_keep_previous_data: bool
|
|
966
|
+
_stale_time: float
|
|
967
|
+
_gc_time: float
|
|
968
|
+
_refetch_interval: float | None
|
|
969
|
+
_retries: int
|
|
970
|
+
_retry_delay: float
|
|
971
|
+
_initial_data_updated_at: float | dt.datetime | None
|
|
972
|
+
_enabled: bool
|
|
973
|
+
_initial_data: T | Callable[[TState], T] | None
|
|
974
|
+
_key: QueryKey | Callable[[TState], QueryKey] | None
|
|
975
|
+
# Not using OnSuccessFn and OnErrorFn since unions of callables are not well
|
|
976
|
+
# supported in the type system. We just need to be careful to use
|
|
977
|
+
# call_flexible to invoke these functions.
|
|
978
|
+
_on_success_fn: Callable[[TState, T], Any] | None
|
|
979
|
+
_on_error_fn: Callable[[TState, Exception], Any] | None
|
|
980
|
+
_fetch_on_mount: bool
|
|
981
|
+
_priv_result: str
|
|
982
|
+
|
|
983
|
+
def __init__(
|
|
984
|
+
self,
|
|
985
|
+
name: str,
|
|
986
|
+
fetch_fn: "Callable[[TState], Awaitable[T]]",
|
|
987
|
+
keep_previous_data: bool = False,
|
|
988
|
+
stale_time: float = 0.0,
|
|
989
|
+
gc_time: float = 300.0,
|
|
990
|
+
refetch_interval: float | None = None,
|
|
991
|
+
retries: int = 3,
|
|
992
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
993
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
994
|
+
enabled: bool = True,
|
|
995
|
+
fetch_on_mount: bool = True,
|
|
996
|
+
key: QueryKey | Callable[[TState], QueryKey] | None = None,
|
|
997
|
+
):
|
|
998
|
+
self.name = name
|
|
999
|
+
self._fetch_fn = fetch_fn
|
|
1000
|
+
self._key = key
|
|
1001
|
+
self._on_success_fn = None
|
|
1002
|
+
self._on_error_fn = None
|
|
1003
|
+
self._keep_previous_data = keep_previous_data
|
|
1004
|
+
self._stale_time = stale_time
|
|
1005
|
+
self._gc_time = gc_time
|
|
1006
|
+
self._refetch_interval = refetch_interval
|
|
1007
|
+
self._retries = retries
|
|
1008
|
+
self._retry_delay = retry_delay
|
|
1009
|
+
self._initial_data_updated_at = initial_data_updated_at
|
|
1010
|
+
self._initial_data = MISSING # pyright: ignore[reportAttributeAccessIssue]
|
|
1011
|
+
self._enabled = enabled
|
|
1012
|
+
self._fetch_on_mount = fetch_on_mount
|
|
1013
|
+
self._priv_result = f"__query_{name}"
|
|
1014
|
+
|
|
1015
|
+
# Decorator to attach a key function
|
|
1016
|
+
def key(self, fn: Callable[[TState], QueryKey]):
|
|
1017
|
+
if self._key is not None:
|
|
1018
|
+
raise RuntimeError(
|
|
1019
|
+
f"Cannot use @{self.name}.key decorator when a key is already provided to @query(key=...)."
|
|
1020
|
+
)
|
|
1021
|
+
self._key = fn
|
|
1022
|
+
return fn
|
|
1023
|
+
|
|
1024
|
+
# Decorator to attach a function providing initial data
|
|
1025
|
+
def initial_data(self, fn: Callable[[TState], T]):
|
|
1026
|
+
if self._initial_data is not MISSING:
|
|
1027
|
+
raise RuntimeError(
|
|
1028
|
+
f"Duplicate initial_data() decorator for query '{self.name}'. Only one is allowed."
|
|
1029
|
+
)
|
|
1030
|
+
self._initial_data = fn
|
|
1031
|
+
return fn
|
|
1032
|
+
|
|
1033
|
+
# Decorator to attach an on-success handler (sync or async)
|
|
1034
|
+
def on_success(self, fn: OnSuccessFn[TState, T]):
|
|
1035
|
+
if self._on_success_fn is not None:
|
|
1036
|
+
raise RuntimeError(
|
|
1037
|
+
f"Duplicate on_success() decorator for query '{self.name}'. Only one is allowed."
|
|
1038
|
+
)
|
|
1039
|
+
self._on_success_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
1040
|
+
return fn
|
|
1041
|
+
|
|
1042
|
+
# Decorator to attach an on-error handler (sync or async)
|
|
1043
|
+
def on_error(self, fn: OnErrorFn[TState]):
|
|
1044
|
+
if self._on_error_fn is not None:
|
|
1045
|
+
raise RuntimeError(
|
|
1046
|
+
f"Duplicate on_error() decorator for query '{self.name}'. Only one is allowed."
|
|
1047
|
+
)
|
|
1048
|
+
self._on_error_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
1049
|
+
return fn
|
|
1050
|
+
|
|
1051
|
+
@override
|
|
1052
|
+
def initialize(
|
|
1053
|
+
self, state: Any, name: str
|
|
1054
|
+
) -> KeyedQueryResult[T] | UnkeyedQueryResult[T]:
|
|
1055
|
+
# Return cached query instance if present
|
|
1056
|
+
result: KeyedQueryResult[T] | UnkeyedQueryResult[T] | None = getattr(
|
|
1057
|
+
state, self._priv_result, None
|
|
1058
|
+
)
|
|
1059
|
+
if result:
|
|
1060
|
+
# Don't re-initialize, just return the cached instance
|
|
1061
|
+
return result
|
|
1062
|
+
|
|
1063
|
+
# Bind methods to this instance
|
|
1064
|
+
fetch_fn = bind_state(state, self._fetch_fn)
|
|
1065
|
+
initial_data = cast(
|
|
1066
|
+
T | None,
|
|
1067
|
+
(
|
|
1068
|
+
call_flexible(self._initial_data, state)
|
|
1069
|
+
if callable(self._initial_data)
|
|
1070
|
+
else self._initial_data
|
|
1071
|
+
),
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
if self._key is None:
|
|
1075
|
+
# Unkeyed query: create UnkeyedQuery with single observer
|
|
1076
|
+
result = self._create_unkeyed(
|
|
1077
|
+
fetch_fn,
|
|
1078
|
+
initial_data,
|
|
1079
|
+
self._initial_data_updated_at,
|
|
1080
|
+
state,
|
|
1081
|
+
)
|
|
1082
|
+
else:
|
|
1083
|
+
# Keyed query: use session-wide QueryStore
|
|
1084
|
+
result = self._create_keyed(
|
|
1085
|
+
state,
|
|
1086
|
+
fetch_fn,
|
|
1087
|
+
initial_data,
|
|
1088
|
+
self._initial_data_updated_at,
|
|
1089
|
+
)
|
|
1090
|
+
|
|
1091
|
+
# Store result on the instance
|
|
1092
|
+
setattr(state, self._priv_result, result)
|
|
1093
|
+
return result
|
|
1094
|
+
|
|
1095
|
+
def _create_keyed(
|
|
1096
|
+
self,
|
|
1097
|
+
state: TState,
|
|
1098
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
1099
|
+
initial_data: T | None,
|
|
1100
|
+
initial_data_updated_at: float | dt.datetime | None,
|
|
1101
|
+
) -> KeyedQueryResult[T]:
|
|
1102
|
+
"""Create or get a keyed query from the session store."""
|
|
1103
|
+
assert self._key is not None
|
|
1104
|
+
|
|
1105
|
+
# Create a Computed for the key - passthrough for constant keys, reactive for function keys
|
|
1106
|
+
if callable(self._key):
|
|
1107
|
+
key_computed = Computed(
|
|
1108
|
+
bind_state(state, self._key), name=f"query.key.{self.name}"
|
|
1109
|
+
)
|
|
1110
|
+
else:
|
|
1111
|
+
const_key = self._key # ensure a constant reference
|
|
1112
|
+
key_computed = Computed(lambda: const_key, name=f"query.key.{self.name}")
|
|
1113
|
+
|
|
1114
|
+
render = PulseContext.get().render
|
|
1115
|
+
if render is None:
|
|
1116
|
+
raise RuntimeError("No render session available")
|
|
1117
|
+
store = render.query_store
|
|
1118
|
+
|
|
1119
|
+
def query() -> KeyedQuery[T]:
|
|
1120
|
+
key = key_computed()
|
|
1121
|
+
# Use Untrack to avoid an error due to creating an Effect within a computed
|
|
1122
|
+
with Untrack():
|
|
1123
|
+
return store.ensure(
|
|
1124
|
+
key,
|
|
1125
|
+
initial_data,
|
|
1126
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
1127
|
+
gc_time=self._gc_time,
|
|
1128
|
+
retries=self._retries,
|
|
1129
|
+
retry_delay=self._retry_delay,
|
|
1130
|
+
)
|
|
1131
|
+
|
|
1132
|
+
query_computed = Computed(query, name=f"query.{self.name}")
|
|
1133
|
+
|
|
1134
|
+
return KeyedQueryResult[T](
|
|
1135
|
+
query=query_computed,
|
|
1136
|
+
fetch_fn=fetch_fn,
|
|
1137
|
+
stale_time=self._stale_time,
|
|
1138
|
+
keep_previous_data=self._keep_previous_data,
|
|
1139
|
+
gc_time=self._gc_time,
|
|
1140
|
+
refetch_interval=self._refetch_interval,
|
|
1141
|
+
on_success=bind_state(state, self._on_success_fn)
|
|
1142
|
+
if self._on_success_fn
|
|
1143
|
+
else None,
|
|
1144
|
+
on_error=bind_state(state, self._on_error_fn)
|
|
1145
|
+
if self._on_error_fn
|
|
1146
|
+
else None,
|
|
1147
|
+
enabled=self._enabled,
|
|
1148
|
+
fetch_on_mount=self._fetch_on_mount,
|
|
1149
|
+
)
|
|
1150
|
+
|
|
1151
|
+
def _create_unkeyed(
|
|
1152
|
+
self,
|
|
1153
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
1154
|
+
initial_data: T | None,
|
|
1155
|
+
initial_data_updated_at: float | dt.datetime | None,
|
|
1156
|
+
state: TState,
|
|
1157
|
+
) -> UnkeyedQueryResult[T]:
|
|
1158
|
+
"""Create a private unkeyed query."""
|
|
1159
|
+
return UnkeyedQueryResult[T](
|
|
1160
|
+
fetch_fn=fetch_fn,
|
|
1161
|
+
on_success=bind_state(state, self._on_success_fn)
|
|
1162
|
+
if self._on_success_fn
|
|
1163
|
+
else None,
|
|
1164
|
+
on_error=bind_state(state, self._on_error_fn)
|
|
1165
|
+
if self._on_error_fn
|
|
1166
|
+
else None,
|
|
1167
|
+
retries=self._retries,
|
|
1168
|
+
retry_delay=self._retry_delay,
|
|
1169
|
+
initial_data=initial_data,
|
|
1170
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
1171
|
+
gc_time=self._gc_time,
|
|
1172
|
+
stale_time=self._stale_time,
|
|
1173
|
+
keep_previous_data=self._keep_previous_data,
|
|
1174
|
+
refetch_interval=self._refetch_interval,
|
|
1175
|
+
enabled=self._enabled,
|
|
1176
|
+
fetch_on_mount=self._fetch_on_mount,
|
|
1177
|
+
)
|
|
1178
|
+
|
|
1179
|
+
def __get__(self, obj: Any, objtype: Any = None) -> "QueryResult[T]":
|
|
1180
|
+
if obj is None:
|
|
1181
|
+
return self # pyright: ignore[reportReturnType]
|
|
1182
|
+
return self.initialize(obj, self.name)
|
|
1183
|
+
|
|
1184
|
+
|
|
1185
|
+
@overload
|
|
1186
|
+
def query(
|
|
1187
|
+
fn: Callable[[TState], Awaitable[T]],
|
|
1188
|
+
*,
|
|
1189
|
+
stale_time: float = 0.0,
|
|
1190
|
+
gc_time: float | None = 300.0,
|
|
1191
|
+
refetch_interval: float | None = None,
|
|
1192
|
+
keep_previous_data: bool = False,
|
|
1193
|
+
retries: int = 3,
|
|
1194
|
+
retry_delay: float | None = None,
|
|
1195
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1196
|
+
enabled: bool = True,
|
|
1197
|
+
fetch_on_mount: bool = True,
|
|
1198
|
+
key: QueryKey | None = None,
|
|
1199
|
+
) -> QueryProperty[T, TState]: ...
|
|
1200
|
+
|
|
1201
|
+
|
|
1202
|
+
@overload
|
|
1203
|
+
def query(
|
|
1204
|
+
fn: None = None,
|
|
1205
|
+
*,
|
|
1206
|
+
stale_time: float = 0.0,
|
|
1207
|
+
gc_time: float | None = 300.0,
|
|
1208
|
+
refetch_interval: float | None = None,
|
|
1209
|
+
keep_previous_data: bool = False,
|
|
1210
|
+
retries: int = 3,
|
|
1211
|
+
retry_delay: float | None = None,
|
|
1212
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1213
|
+
enabled: bool = True,
|
|
1214
|
+
fetch_on_mount: bool = True,
|
|
1215
|
+
key: QueryKey | None = None,
|
|
1216
|
+
) -> Callable[[Callable[[TState], Awaitable[T]]], QueryProperty[T, TState]]: ...
|
|
1217
|
+
|
|
1218
|
+
|
|
1219
|
+
def query(
|
|
1220
|
+
fn: Callable[[TState], Awaitable[T]] | None = None,
|
|
1221
|
+
*,
|
|
1222
|
+
stale_time: float = 0.0,
|
|
1223
|
+
gc_time: float | None = 300.0,
|
|
1224
|
+
refetch_interval: float | None = None,
|
|
1225
|
+
keep_previous_data: bool = False,
|
|
1226
|
+
retries: int = 3,
|
|
1227
|
+
retry_delay: float | None = None,
|
|
1228
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
1229
|
+
enabled: bool = True,
|
|
1230
|
+
fetch_on_mount: bool = True,
|
|
1231
|
+
key: QueryKey | None = None,
|
|
1232
|
+
) -> (
|
|
1233
|
+
QueryProperty[T, TState]
|
|
1234
|
+
| Callable[[Callable[[TState], Awaitable[T]]], QueryProperty[T, TState]]
|
|
1235
|
+
):
|
|
1236
|
+
"""Decorator for async data fetching on State methods.
|
|
1237
|
+
|
|
1238
|
+
Creates a reactive query that automatically fetches data, handles loading
|
|
1239
|
+
states, retries on failure, and caches results. Queries can be shared
|
|
1240
|
+
across components using keys.
|
|
1241
|
+
|
|
1242
|
+
Args:
|
|
1243
|
+
fn: The async method to decorate (when used without parentheses).
|
|
1244
|
+
stale_time: Seconds before data is considered stale (default 0.0).
|
|
1245
|
+
gc_time: Seconds to keep unused query in cache (default 300.0, None to disable).
|
|
1246
|
+
refetch_interval: Auto-refetch interval in seconds (default None, disabled).
|
|
1247
|
+
keep_previous_data: Keep previous data while refetching (default False).
|
|
1248
|
+
retries: Number of retry attempts on failure (default 3).
|
|
1249
|
+
retry_delay: Delay between retries in seconds (default 2.0).
|
|
1250
|
+
initial_data_updated_at: Timestamp for initial data staleness calculation.
|
|
1251
|
+
enabled: Whether query is enabled (default True).
|
|
1252
|
+
fetch_on_mount: Fetch when component mounts (default True).
|
|
1253
|
+
key: Static query key for sharing across instances.
|
|
1254
|
+
|
|
1255
|
+
Returns:
|
|
1256
|
+
QueryProperty that creates QueryResult instances when accessed.
|
|
1257
|
+
|
|
1258
|
+
Example:
|
|
1259
|
+
|
|
1260
|
+
Basic usage:
|
|
1261
|
+
|
|
1262
|
+
```python
|
|
1263
|
+
class UserState(ps.State):
|
|
1264
|
+
user_id: str = ""
|
|
1265
|
+
|
|
1266
|
+
@ps.query
|
|
1267
|
+
async def user(self) -> User:
|
|
1268
|
+
return await api.get_user(self.user_id)
|
|
1269
|
+
```
|
|
1270
|
+
|
|
1271
|
+
With options:
|
|
1272
|
+
|
|
1273
|
+
```python
|
|
1274
|
+
@ps.query(stale_time=60, refetch_interval=300)
|
|
1275
|
+
async def user(self) -> User:
|
|
1276
|
+
return await api.get_user(self.user_id)
|
|
1277
|
+
```
|
|
1278
|
+
|
|
1279
|
+
Keyed query (shared across instances):
|
|
1280
|
+
|
|
1281
|
+
```python
|
|
1282
|
+
@ps.query(key=("users", "current"))
|
|
1283
|
+
async def current_user(self) -> User:
|
|
1284
|
+
return await api.get_current_user()
|
|
1285
|
+
```
|
|
1286
|
+
"""
|
|
1287
|
+
|
|
1288
|
+
def decorator(
|
|
1289
|
+
func: Callable[[TState], Awaitable[T]], /
|
|
1290
|
+
) -> QueryProperty[T, TState]:
|
|
1291
|
+
sig = inspect.signature(func)
|
|
1292
|
+
params = list(sig.parameters.values())
|
|
1293
|
+
# Only state-method form supported for now (single 'self')
|
|
1294
|
+
if not (len(params) == 1 and params[0].name == "self"):
|
|
1295
|
+
raise TypeError("@query currently only supports state methods (self)")
|
|
1296
|
+
|
|
1297
|
+
return QueryProperty(
|
|
1298
|
+
func.__name__,
|
|
1299
|
+
func,
|
|
1300
|
+
stale_time=stale_time,
|
|
1301
|
+
gc_time=gc_time if gc_time is not None else 300.0,
|
|
1302
|
+
refetch_interval=refetch_interval,
|
|
1303
|
+
keep_previous_data=keep_previous_data,
|
|
1304
|
+
retries=retries,
|
|
1305
|
+
retry_delay=RETRY_DELAY_DEFAULT if retry_delay is None else retry_delay,
|
|
1306
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
1307
|
+
enabled=enabled,
|
|
1308
|
+
fetch_on_mount=fetch_on_mount,
|
|
1309
|
+
key=key,
|
|
1310
|
+
)
|
|
1311
|
+
|
|
1312
|
+
if fn:
|
|
1313
|
+
return decorator(fn)
|
|
1314
|
+
return decorator
|