pulse-framework 0.1.42__py3-none-any.whl → 0.1.44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +12 -3
- pulse/decorators.py +8 -172
- pulse/helpers.py +39 -23
- pulse/queries/client.py +462 -0
- pulse/queries/common.py +28 -0
- pulse/queries/effect.py +39 -0
- pulse/queries/infinite_query.py +1157 -0
- pulse/queries/mutation.py +47 -0
- pulse/queries/query.py +560 -53
- pulse/queries/store.py +81 -18
- pulse/reactive.py +95 -20
- pulse/reactive_extensions.py +19 -7
- pulse/state.py +5 -0
- pulse/user_session.py +7 -3
- {pulse_framework-0.1.42.dist-info → pulse_framework-0.1.44.dist-info}/METADATA +1 -1
- {pulse_framework-0.1.42.dist-info → pulse_framework-0.1.44.dist-info}/RECORD +18 -16
- pulse/queries/query_observer.py +0 -365
- {pulse_framework-0.1.42.dist-info → pulse_framework-0.1.44.dist-info}/WHEEL +0 -0
- {pulse_framework-0.1.42.dist-info → pulse_framework-0.1.44.dist-info}/entry_points.txt +0 -0
pulse/queries/query.py
CHANGED
|
@@ -1,18 +1,19 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import inspect
|
|
2
4
|
import time
|
|
3
|
-
from collections.abc import Awaitable, Callable
|
|
5
|
+
from collections.abc import Awaitable, Callable
|
|
4
6
|
from dataclasses import dataclass
|
|
5
7
|
from typing import (
|
|
6
|
-
TYPE_CHECKING,
|
|
7
8
|
Any,
|
|
8
9
|
Generic,
|
|
9
|
-
Literal,
|
|
10
|
-
TypeAlias,
|
|
11
10
|
TypeVar,
|
|
12
11
|
cast,
|
|
12
|
+
overload,
|
|
13
13
|
override,
|
|
14
14
|
)
|
|
15
15
|
|
|
16
|
+
from pulse.context import PulseContext
|
|
16
17
|
from pulse.helpers import (
|
|
17
18
|
MISSING,
|
|
18
19
|
Disposable,
|
|
@@ -21,41 +22,24 @@ from pulse.helpers import (
|
|
|
21
22
|
later,
|
|
22
23
|
maybe_await,
|
|
23
24
|
)
|
|
24
|
-
from pulse.
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
from pulse.queries.common import (
|
|
26
|
+
ActionError,
|
|
27
|
+
ActionResult,
|
|
28
|
+
ActionSuccess,
|
|
29
|
+
OnErrorFn,
|
|
30
|
+
OnSuccessFn,
|
|
31
|
+
QueryKey,
|
|
32
|
+
QueryStatus,
|
|
33
|
+
bind_state,
|
|
34
|
+
)
|
|
35
|
+
from pulse.queries.effect import AsyncQueryEffect
|
|
36
|
+
from pulse.reactive import AsyncEffect, Computed, Effect, Signal, Untrack
|
|
37
|
+
from pulse.state import InitializableProperty, State
|
|
28
38
|
|
|
29
39
|
T = TypeVar("T")
|
|
30
|
-
|
|
31
|
-
QueryStatus: TypeAlias = Literal["loading", "success", "error"]
|
|
32
|
-
QueryFetchStatus: TypeAlias = Literal["idle", "fetching", "paused"]
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class AsyncQueryEffect(AsyncEffect):
|
|
36
|
-
"""
|
|
37
|
-
Specialized AsyncEffect for queries that synchronously sets loading state
|
|
38
|
-
when rescheduled/run.
|
|
39
|
-
"""
|
|
40
|
+
TState = TypeVar("TState", bound=State)
|
|
40
41
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def __init__(
|
|
44
|
-
self,
|
|
45
|
-
fn: Callable[[], Awaitable[None]],
|
|
46
|
-
query: "Query[Any]",
|
|
47
|
-
name: str | None = None,
|
|
48
|
-
lazy: bool = False,
|
|
49
|
-
deps: list[Signal[Any] | Computed[Any]] | None = None,
|
|
50
|
-
):
|
|
51
|
-
self.query = query
|
|
52
|
-
super().__init__(fn, name=name, lazy=lazy, deps=deps)
|
|
53
|
-
|
|
54
|
-
@override
|
|
55
|
-
def run(self) -> asyncio.Task[Any]:
|
|
56
|
-
# Immediately set loading state before running the effect
|
|
57
|
-
self.query.fetch_status.write("fetching")
|
|
58
|
-
return super().run()
|
|
42
|
+
RETRY_DELAY_DEFAULT = 2.0 if not is_pytest() else 0.01
|
|
59
43
|
|
|
60
44
|
|
|
61
45
|
@dataclass(slots=True)
|
|
@@ -63,13 +47,11 @@ class QueryConfig(Generic[T]):
|
|
|
63
47
|
retries: int
|
|
64
48
|
retry_delay: float
|
|
65
49
|
initial_data: T | Callable[[], T] | None
|
|
50
|
+
initial_data_updated_at: float | dt.datetime | None
|
|
66
51
|
gc_time: float
|
|
67
52
|
on_dispose: Callable[[Any], None] | None
|
|
68
53
|
|
|
69
54
|
|
|
70
|
-
RETRY_DELAY_DEFAULT = 2.0 if not is_pytest() else 0.01
|
|
71
|
-
|
|
72
|
-
|
|
73
55
|
class Query(Generic[T], Disposable):
|
|
74
56
|
"""
|
|
75
57
|
Represents a single query instance in a store.
|
|
@@ -85,7 +67,7 @@ class Query(Generic[T], Disposable):
|
|
|
85
67
|
error: Signal[Exception | None]
|
|
86
68
|
last_updated: Signal[float]
|
|
87
69
|
status: Signal[QueryStatus]
|
|
88
|
-
|
|
70
|
+
is_fetching: Signal[bool]
|
|
89
71
|
retries: Signal[int]
|
|
90
72
|
retry_reason: Signal[Exception | None]
|
|
91
73
|
|
|
@@ -100,6 +82,7 @@ class Query(Generic[T], Disposable):
|
|
|
100
82
|
retries: int = 3,
|
|
101
83
|
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
102
84
|
initial_data: T | None = MISSING,
|
|
85
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
103
86
|
gc_time: float = 300.0,
|
|
104
87
|
on_dispose: Callable[[Any], None] | None = None,
|
|
105
88
|
):
|
|
@@ -109,6 +92,7 @@ class Query(Generic[T], Disposable):
|
|
|
109
92
|
retries=retries,
|
|
110
93
|
retry_delay=retry_delay,
|
|
111
94
|
initial_data=initial_data,
|
|
95
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
112
96
|
gc_time=gc_time,
|
|
113
97
|
on_dispose=on_dispose,
|
|
114
98
|
)
|
|
@@ -118,15 +102,19 @@ class Query(Generic[T], Disposable):
|
|
|
118
102
|
None if initial_data is MISSING else initial_data, name=f"query.data({key})"
|
|
119
103
|
)
|
|
120
104
|
self.error = Signal(None, name=f"query.error({key})")
|
|
105
|
+
|
|
121
106
|
self.last_updated = Signal(
|
|
122
|
-
|
|
107
|
+
0.0,
|
|
123
108
|
name=f"query.last_updated({key})",
|
|
124
109
|
)
|
|
110
|
+
if initial_data_updated_at:
|
|
111
|
+
self.set_updated_at(initial_data_updated_at)
|
|
112
|
+
|
|
125
113
|
self.status = Signal(
|
|
126
114
|
"loading" if initial_data is MISSING else "success",
|
|
127
115
|
name=f"query.status({key})",
|
|
128
116
|
)
|
|
129
|
-
self.
|
|
117
|
+
self.is_fetching = Signal(False, name=f"query.is_fetching({key})")
|
|
130
118
|
self.retries = Signal(0, name=f"query.retries({key})")
|
|
131
119
|
self.retry_reason = Signal(None, name=f"query.retry_reason({key})")
|
|
132
120
|
|
|
@@ -135,11 +123,44 @@ class Query(Generic[T], Disposable):
|
|
|
135
123
|
# Effect is created lazily on first observation
|
|
136
124
|
self._effect = None
|
|
137
125
|
|
|
138
|
-
def set_data(
|
|
139
|
-
self
|
|
126
|
+
def set_data(
|
|
127
|
+
self,
|
|
128
|
+
data: T | Callable[[T | None], T],
|
|
129
|
+
*,
|
|
130
|
+
updated_at: float | dt.datetime | None = None,
|
|
131
|
+
):
|
|
132
|
+
"""Set data manually, accepting a value or updater function."""
|
|
133
|
+
current = self.data.read()
|
|
134
|
+
new_value = cast(T, data(current) if callable(data) else data)
|
|
135
|
+
self._set_success(new_value, manual=True)
|
|
136
|
+
if updated_at is not None:
|
|
137
|
+
self.set_updated_at(updated_at)
|
|
138
|
+
|
|
139
|
+
def set_updated_at(self, updated_at: float | dt.datetime):
|
|
140
|
+
if isinstance(updated_at, dt.datetime):
|
|
141
|
+
updated_at = updated_at.timestamp()
|
|
142
|
+
self.last_updated.write(updated_at)
|
|
143
|
+
|
|
144
|
+
def set_initial_data(
|
|
145
|
+
self,
|
|
146
|
+
data: T | Callable[[], T],
|
|
147
|
+
*,
|
|
148
|
+
updated_at: float | dt.datetime | None = None,
|
|
149
|
+
):
|
|
150
|
+
"""
|
|
151
|
+
Set data as if it were provided as initial_data.
|
|
152
|
+
Optionally supply an updated_at timestamp to seed staleness calculations.
|
|
153
|
+
"""
|
|
154
|
+
if self.status() == "loading":
|
|
155
|
+
value = cast(T, data() if callable(data) else data)
|
|
156
|
+
self.set_data(value, updated_at=updated_at)
|
|
140
157
|
|
|
141
|
-
def set_error(
|
|
158
|
+
def set_error(
|
|
159
|
+
self, error: Exception, *, updated_at: float | dt.datetime | None = None
|
|
160
|
+
):
|
|
142
161
|
self._set_error(error, manual=True)
|
|
162
|
+
if updated_at is not None:
|
|
163
|
+
self.set_updated_at(updated_at)
|
|
143
164
|
|
|
144
165
|
def _set_success(self, data: T, manual: bool = False):
|
|
145
166
|
self.data.write(data)
|
|
@@ -147,7 +168,7 @@ class Query(Generic[T], Disposable):
|
|
|
147
168
|
self.error.write(None)
|
|
148
169
|
self.status.write("success")
|
|
149
170
|
if not manual:
|
|
150
|
-
self.
|
|
171
|
+
self.is_fetching.write(False)
|
|
151
172
|
self.retries.write(0)
|
|
152
173
|
self.retry_reason.write(None)
|
|
153
174
|
|
|
@@ -156,7 +177,7 @@ class Query(Generic[T], Disposable):
|
|
|
156
177
|
self.last_updated.write(time.time())
|
|
157
178
|
self.status.write("error")
|
|
158
179
|
if not manual:
|
|
159
|
-
self.
|
|
180
|
+
self.is_fetching.write(False)
|
|
160
181
|
# Don't reset retries on final error - preserve for debugging
|
|
161
182
|
# retry_reason is updated to the final error in _run
|
|
162
183
|
|
|
@@ -170,7 +191,7 @@ class Query(Generic[T], Disposable):
|
|
|
170
191
|
if self._effect is None:
|
|
171
192
|
self._effect = AsyncQueryEffect(
|
|
172
193
|
self._run,
|
|
173
|
-
|
|
194
|
+
fetcher=self,
|
|
174
195
|
name=f"query_effect({self.key})",
|
|
175
196
|
deps=[] if self.key is not None else None,
|
|
176
197
|
)
|
|
@@ -207,19 +228,25 @@ class Query(Generic[T], Disposable):
|
|
|
207
228
|
await maybe_await(call_flexible(obs._on_error, e)) # pyright: ignore[reportPrivateUsage]
|
|
208
229
|
return
|
|
209
230
|
|
|
210
|
-
async def refetch(self, cancel_refetch: bool = True) -> T:
|
|
231
|
+
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
211
232
|
"""
|
|
212
233
|
Reruns the query and returns the result.
|
|
213
234
|
If cancel_refetch is True (default), cancels any in-flight request and starts a new one.
|
|
214
235
|
If cancel_refetch is False, deduplicates requests if one is already in flight.
|
|
215
236
|
"""
|
|
216
|
-
if cancel_refetch:
|
|
217
|
-
self.effect.
|
|
237
|
+
if cancel_refetch or not self.is_fetching():
|
|
238
|
+
self.effect.schedule()
|
|
218
239
|
return await self.wait()
|
|
219
240
|
|
|
220
|
-
async def wait(self) -> T:
|
|
241
|
+
async def wait(self) -> ActionResult[T]:
|
|
242
|
+
# If loading and no task, schedule a refetch
|
|
243
|
+
if self.status() == "loading" and not self.is_fetching():
|
|
244
|
+
self.effect.schedule()
|
|
221
245
|
await self.effect.wait()
|
|
222
|
-
|
|
246
|
+
# Return result based on current state
|
|
247
|
+
if self.status() == "error":
|
|
248
|
+
return ActionError(cast(Exception, self.error.read()))
|
|
249
|
+
return ActionSuccess(cast(T, self.data.read()))
|
|
223
250
|
|
|
224
251
|
def invalidate(self, cancel_refetch: bool = False):
|
|
225
252
|
"""
|
|
@@ -268,3 +295,483 @@ class Query(Generic[T], Disposable):
|
|
|
268
295
|
|
|
269
296
|
if self.cfg.on_dispose:
|
|
270
297
|
self.cfg.on_dispose(self)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
class QueryResult(Generic[T], Disposable):
|
|
301
|
+
"""
|
|
302
|
+
Thin wrapper around Query that adds callbacks, staleness tracking,
|
|
303
|
+
and observation lifecycle.
|
|
304
|
+
|
|
305
|
+
For keyed queries, uses a Computed to resolve the correct query based on the key.
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
_query: Computed[Query[T]]
|
|
309
|
+
_stale_time: float
|
|
310
|
+
_gc_time: float
|
|
311
|
+
_refetch_interval: float | None
|
|
312
|
+
_keep_previous_data: bool
|
|
313
|
+
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
314
|
+
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
315
|
+
_callback_effect: Effect
|
|
316
|
+
_observe_effect: Effect
|
|
317
|
+
_interval_effect: Effect | None
|
|
318
|
+
_data_computed: Computed[T | None]
|
|
319
|
+
_disposed_data: T | None
|
|
320
|
+
_enabled: Signal[bool]
|
|
321
|
+
_fetch_on_mount: bool
|
|
322
|
+
_is_observing: bool
|
|
323
|
+
|
|
324
|
+
def __init__(
|
|
325
|
+
self,
|
|
326
|
+
query: Computed[Query[T]],
|
|
327
|
+
stale_time: float = 0.0,
|
|
328
|
+
gc_time: float = 300.0,
|
|
329
|
+
refetch_interval: float | None = None,
|
|
330
|
+
keep_previous_data: bool = False,
|
|
331
|
+
on_success: Callable[[T], Awaitable[None] | None] | None = None,
|
|
332
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
333
|
+
enabled: bool = True,
|
|
334
|
+
fetch_on_mount: bool = True,
|
|
335
|
+
):
|
|
336
|
+
self._query = query
|
|
337
|
+
self._stale_time = stale_time
|
|
338
|
+
self._gc_time = gc_time
|
|
339
|
+
self._refetch_interval = refetch_interval
|
|
340
|
+
self._keep_previous_data = keep_previous_data
|
|
341
|
+
self._on_success = on_success
|
|
342
|
+
self._on_error = on_error
|
|
343
|
+
self._disposed_data = None
|
|
344
|
+
self._enabled = Signal(enabled, name=f"query.enabled({query().key})")
|
|
345
|
+
self._interval_effect = None
|
|
346
|
+
|
|
347
|
+
def observe_effect():
|
|
348
|
+
query = self._query()
|
|
349
|
+
enabled = self._enabled()
|
|
350
|
+
with Untrack():
|
|
351
|
+
query.observe(self)
|
|
352
|
+
|
|
353
|
+
# If stale or loading, schedule refetch (only when enabled)
|
|
354
|
+
if enabled and fetch_on_mount and self.is_stale():
|
|
355
|
+
query.invalidate()
|
|
356
|
+
|
|
357
|
+
# Return cleanup function that captures the observer
|
|
358
|
+
def cleanup():
|
|
359
|
+
query.unobserve(self)
|
|
360
|
+
|
|
361
|
+
return cleanup
|
|
362
|
+
|
|
363
|
+
self._observe_effect = Effect(
|
|
364
|
+
observe_effect,
|
|
365
|
+
name=f"query_observe({self._query().key})",
|
|
366
|
+
immediate=True,
|
|
367
|
+
)
|
|
368
|
+
self._data_computed = Computed(
|
|
369
|
+
self._data_computed_fn, name=f"query_data({self._query().key})"
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
# Set up interval effect if interval is specified
|
|
373
|
+
if refetch_interval is not None and refetch_interval > 0:
|
|
374
|
+
self._setup_interval_effect(refetch_interval)
|
|
375
|
+
|
|
376
|
+
def _setup_interval_effect(self, interval: float):
|
|
377
|
+
"""Create an effect that invalidates the query at the specified interval."""
|
|
378
|
+
|
|
379
|
+
def interval_fn():
|
|
380
|
+
# Read enabled to make this effect reactive to enabled changes
|
|
381
|
+
if self._enabled():
|
|
382
|
+
self._query().invalidate()
|
|
383
|
+
|
|
384
|
+
self._interval_effect = Effect(
|
|
385
|
+
interval_fn,
|
|
386
|
+
name=f"query_interval({self._query().key})",
|
|
387
|
+
interval=interval,
|
|
388
|
+
immediate=True,
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
@property
|
|
392
|
+
def status(self) -> QueryStatus:
|
|
393
|
+
return self._query().status()
|
|
394
|
+
|
|
395
|
+
# Forward property reads to the query's signals (with automatic reactive tracking)
|
|
396
|
+
@property
|
|
397
|
+
def is_loading(self) -> bool:
|
|
398
|
+
return self.status == "loading"
|
|
399
|
+
|
|
400
|
+
@property
|
|
401
|
+
def is_success(self) -> bool:
|
|
402
|
+
return self.status == "success"
|
|
403
|
+
|
|
404
|
+
@property
|
|
405
|
+
def is_error(self) -> bool:
|
|
406
|
+
return self.status == "error"
|
|
407
|
+
|
|
408
|
+
@property
|
|
409
|
+
def is_fetching(self) -> bool:
|
|
410
|
+
return self._query().is_fetching()
|
|
411
|
+
|
|
412
|
+
@property
|
|
413
|
+
def error(self) -> Exception | None:
|
|
414
|
+
return self._query().error.read()
|
|
415
|
+
|
|
416
|
+
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
417
|
+
query = self._query()
|
|
418
|
+
if self._keep_previous_data and query.status() != "success":
|
|
419
|
+
return prev
|
|
420
|
+
raw = query.data()
|
|
421
|
+
if raw is None:
|
|
422
|
+
return None
|
|
423
|
+
return raw
|
|
424
|
+
|
|
425
|
+
@property
|
|
426
|
+
def data(self) -> T | None:
|
|
427
|
+
return self._data_computed()
|
|
428
|
+
|
|
429
|
+
def is_stale(self) -> bool:
|
|
430
|
+
"""Check if the query data is stale based on stale_time."""
|
|
431
|
+
query = self._query()
|
|
432
|
+
return (time.time() - query.last_updated.read()) > self._stale_time
|
|
433
|
+
|
|
434
|
+
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
435
|
+
"""Refetch the query data."""
|
|
436
|
+
return await self._query().refetch(cancel_refetch=cancel_refetch)
|
|
437
|
+
|
|
438
|
+
async def wait(self) -> ActionResult[T]:
|
|
439
|
+
return await self._query().wait()
|
|
440
|
+
|
|
441
|
+
def invalidate(self):
|
|
442
|
+
"""Mark the query as stale and refetch if there are observers."""
|
|
443
|
+
query = self._query()
|
|
444
|
+
query.invalidate()
|
|
445
|
+
|
|
446
|
+
def set_data(self, data: T | Callable[[T | None], T]):
|
|
447
|
+
"""Optimistically set data without changing loading/error state."""
|
|
448
|
+
query = self._query()
|
|
449
|
+
query.set_data(data)
|
|
450
|
+
|
|
451
|
+
def set_initial_data(
|
|
452
|
+
self,
|
|
453
|
+
data: T | Callable[[], T],
|
|
454
|
+
*,
|
|
455
|
+
updated_at: float | dt.datetime | None = None,
|
|
456
|
+
):
|
|
457
|
+
"""Seed initial data and optional freshness timestamp."""
|
|
458
|
+
query = self._query()
|
|
459
|
+
query.set_initial_data(data, updated_at=updated_at)
|
|
460
|
+
|
|
461
|
+
def set_error(self, error: Exception):
|
|
462
|
+
"""Set error state on the query."""
|
|
463
|
+
query = self._query()
|
|
464
|
+
query.set_error(error)
|
|
465
|
+
|
|
466
|
+
def enable(self):
|
|
467
|
+
"""Enable the query."""
|
|
468
|
+
self._enabled.write(True)
|
|
469
|
+
|
|
470
|
+
def disable(self):
|
|
471
|
+
"""Disable the query, preventing it from fetching."""
|
|
472
|
+
self._enabled.write(False)
|
|
473
|
+
|
|
474
|
+
@override
|
|
475
|
+
def dispose(self):
|
|
476
|
+
"""Clean up the result and its observe effect."""
|
|
477
|
+
if self._interval_effect is not None:
|
|
478
|
+
self._interval_effect.dispose()
|
|
479
|
+
self._observe_effect.dispose()
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
483
|
+
"""
|
|
484
|
+
Descriptor for state-bound queries.
|
|
485
|
+
|
|
486
|
+
Usage:
|
|
487
|
+
class S(ps.State):
|
|
488
|
+
@ps.query()
|
|
489
|
+
async def user(self) -> User: ...
|
|
490
|
+
|
|
491
|
+
@user.key
|
|
492
|
+
def _user_key(self):
|
|
493
|
+
return ("user", self.user_id)
|
|
494
|
+
"""
|
|
495
|
+
|
|
496
|
+
name: str
|
|
497
|
+
_fetch_fn: "Callable[[TState], Awaitable[T]]"
|
|
498
|
+
_keep_alive: bool
|
|
499
|
+
_keep_previous_data: bool
|
|
500
|
+
_stale_time: float
|
|
501
|
+
_gc_time: float
|
|
502
|
+
_refetch_interval: float | None
|
|
503
|
+
_retries: int
|
|
504
|
+
_retry_delay: float
|
|
505
|
+
_initial_data_updated_at: float | dt.datetime | None
|
|
506
|
+
_enabled: bool
|
|
507
|
+
_initial_data: T | Callable[[TState], T] | None
|
|
508
|
+
_key: QueryKey | Callable[[TState], QueryKey] | None
|
|
509
|
+
# Not using OnSuccessFn and OnErrorFn since unions of callables are not well
|
|
510
|
+
# supported in the type system. We just need to be careful to use
|
|
511
|
+
# call_flexible to invoke these functions.
|
|
512
|
+
_on_success_fn: Callable[[TState, T], Any] | None
|
|
513
|
+
_on_error_fn: Callable[[TState, Exception], Any] | None
|
|
514
|
+
_fetch_on_mount: bool
|
|
515
|
+
_priv_result: str
|
|
516
|
+
|
|
517
|
+
def __init__(
|
|
518
|
+
self,
|
|
519
|
+
name: str,
|
|
520
|
+
fetch_fn: "Callable[[TState], Awaitable[T]]",
|
|
521
|
+
keep_previous_data: bool = False,
|
|
522
|
+
stale_time: float = 0.0,
|
|
523
|
+
gc_time: float = 300.0,
|
|
524
|
+
refetch_interval: float | None = None,
|
|
525
|
+
retries: int = 3,
|
|
526
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
527
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
528
|
+
enabled: bool = True,
|
|
529
|
+
fetch_on_mount: bool = True,
|
|
530
|
+
key: QueryKey | Callable[[TState], QueryKey] | None = None,
|
|
531
|
+
):
|
|
532
|
+
self.name = name
|
|
533
|
+
self._fetch_fn = fetch_fn
|
|
534
|
+
self._key = key
|
|
535
|
+
self._on_success_fn = None
|
|
536
|
+
self._on_error_fn = None
|
|
537
|
+
self._keep_previous_data = keep_previous_data
|
|
538
|
+
self._stale_time = stale_time
|
|
539
|
+
self._gc_time = gc_time
|
|
540
|
+
self._refetch_interval = refetch_interval
|
|
541
|
+
self._retries = retries
|
|
542
|
+
self._retry_delay = retry_delay
|
|
543
|
+
self._initial_data_updated_at = initial_data_updated_at
|
|
544
|
+
self._initial_data = MISSING # pyright: ignore[reportAttributeAccessIssue]
|
|
545
|
+
self._enabled = enabled
|
|
546
|
+
self._fetch_on_mount = fetch_on_mount
|
|
547
|
+
self._priv_result = f"__query_{name}"
|
|
548
|
+
|
|
549
|
+
# Decorator to attach a key function
|
|
550
|
+
def key(self, fn: Callable[[TState], QueryKey]):
|
|
551
|
+
if self._key is not None:
|
|
552
|
+
raise RuntimeError(
|
|
553
|
+
f"Cannot use @{self.name}.key decorator when a key is already provided to @query(key=...)."
|
|
554
|
+
)
|
|
555
|
+
self._key = fn
|
|
556
|
+
return fn
|
|
557
|
+
|
|
558
|
+
# Decorator to attach a function providing initial data
|
|
559
|
+
def initial_data(self, fn: Callable[[TState], T]):
|
|
560
|
+
if self._initial_data is not MISSING:
|
|
561
|
+
raise RuntimeError(
|
|
562
|
+
f"Duplicate initial_data() decorator for query '{self.name}'. Only one is allowed."
|
|
563
|
+
)
|
|
564
|
+
self._initial_data = fn
|
|
565
|
+
return fn
|
|
566
|
+
|
|
567
|
+
# Decorator to attach an on-success handler (sync or async)
|
|
568
|
+
def on_success(self, fn: OnSuccessFn[TState, T]):
|
|
569
|
+
if self._on_success_fn is not None:
|
|
570
|
+
raise RuntimeError(
|
|
571
|
+
f"Duplicate on_success() decorator for query '{self.name}'. Only one is allowed."
|
|
572
|
+
)
|
|
573
|
+
self._on_success_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
574
|
+
return fn
|
|
575
|
+
|
|
576
|
+
# Decorator to attach an on-error handler (sync or async)
|
|
577
|
+
def on_error(self, fn: OnErrorFn[TState]):
|
|
578
|
+
if self._on_error_fn is not None:
|
|
579
|
+
raise RuntimeError(
|
|
580
|
+
f"Duplicate on_error() decorator for query '{self.name}'. Only one is allowed."
|
|
581
|
+
)
|
|
582
|
+
self._on_error_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
583
|
+
return fn
|
|
584
|
+
|
|
585
|
+
@override
|
|
586
|
+
def initialize(self, state: Any, name: str) -> QueryResult[T]:
|
|
587
|
+
# Return cached query instance if present
|
|
588
|
+
result: QueryResult[T] | None = getattr(state, self._priv_result, None)
|
|
589
|
+
if result:
|
|
590
|
+
# Don't re-initialize, just return the cached instance
|
|
591
|
+
return result
|
|
592
|
+
|
|
593
|
+
# Bind methods to this instance
|
|
594
|
+
fetch_fn = bind_state(state, self._fetch_fn)
|
|
595
|
+
initial_data = cast(
|
|
596
|
+
T | None,
|
|
597
|
+
(
|
|
598
|
+
call_flexible(self._initial_data, state)
|
|
599
|
+
if callable(self._initial_data)
|
|
600
|
+
else self._initial_data
|
|
601
|
+
),
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
if self._key is None:
|
|
605
|
+
# Unkeyed query: create private Query
|
|
606
|
+
query = self._resolve_unkeyed(
|
|
607
|
+
fetch_fn,
|
|
608
|
+
initial_data,
|
|
609
|
+
self._initial_data_updated_at,
|
|
610
|
+
)
|
|
611
|
+
else:
|
|
612
|
+
# Keyed query: use session-wide QueryStore
|
|
613
|
+
query = self._resolve_keyed(
|
|
614
|
+
state,
|
|
615
|
+
fetch_fn,
|
|
616
|
+
initial_data,
|
|
617
|
+
self._initial_data_updated_at,
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
# Wrap query in QueryResult
|
|
621
|
+
result = QueryResult[T](
|
|
622
|
+
query=query,
|
|
623
|
+
stale_time=self._stale_time,
|
|
624
|
+
keep_previous_data=self._keep_previous_data,
|
|
625
|
+
gc_time=self._gc_time,
|
|
626
|
+
refetch_interval=self._refetch_interval,
|
|
627
|
+
on_success=bind_state(state, self._on_success_fn)
|
|
628
|
+
if self._on_success_fn
|
|
629
|
+
else None,
|
|
630
|
+
on_error=bind_state(state, self._on_error_fn)
|
|
631
|
+
if self._on_error_fn
|
|
632
|
+
else None,
|
|
633
|
+
enabled=self._enabled,
|
|
634
|
+
fetch_on_mount=self._fetch_on_mount,
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
# Store result on the instance
|
|
638
|
+
setattr(state, self._priv_result, result)
|
|
639
|
+
return result
|
|
640
|
+
|
|
641
|
+
def _resolve_keyed(
|
|
642
|
+
self,
|
|
643
|
+
state: TState,
|
|
644
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
645
|
+
initial_data: T | None,
|
|
646
|
+
initial_data_updated_at: float | dt.datetime | None,
|
|
647
|
+
) -> Computed[Query[T]]:
|
|
648
|
+
"""Create or get a keyed query from the session store using a Computed."""
|
|
649
|
+
assert self._key is not None
|
|
650
|
+
|
|
651
|
+
# Create a Computed for the key - passthrough for constant keys, reactive for function keys
|
|
652
|
+
if callable(self._key):
|
|
653
|
+
key_computed = Computed(
|
|
654
|
+
bind_state(state, self._key), name=f"query.key.{self.name}"
|
|
655
|
+
)
|
|
656
|
+
else:
|
|
657
|
+
const_key = self._key # ensure a constant reference
|
|
658
|
+
key_computed = Computed(lambda: const_key, name=f"query.key.{self.name}")
|
|
659
|
+
|
|
660
|
+
render = PulseContext.get().render
|
|
661
|
+
if render is None:
|
|
662
|
+
raise RuntimeError("No render session available")
|
|
663
|
+
store = render.query_store
|
|
664
|
+
|
|
665
|
+
def query() -> Query[T]:
|
|
666
|
+
key = key_computed()
|
|
667
|
+
return store.ensure(
|
|
668
|
+
key,
|
|
669
|
+
fetch_fn,
|
|
670
|
+
initial_data,
|
|
671
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
672
|
+
gc_time=self._gc_time,
|
|
673
|
+
retries=self._retries,
|
|
674
|
+
retry_delay=self._retry_delay,
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
return Computed(query, name=f"query.{self.name}")
|
|
678
|
+
|
|
679
|
+
def _resolve_unkeyed(
|
|
680
|
+
self,
|
|
681
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
682
|
+
initial_data: T | None,
|
|
683
|
+
initial_data_updated_at: float | dt.datetime | None,
|
|
684
|
+
) -> Computed[Query[T]]:
|
|
685
|
+
"""Create a private unkeyed query."""
|
|
686
|
+
query = Query[T](
|
|
687
|
+
key=None,
|
|
688
|
+
fn=fetch_fn,
|
|
689
|
+
initial_data=initial_data,
|
|
690
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
691
|
+
gc_time=self._gc_time,
|
|
692
|
+
retries=self._retries,
|
|
693
|
+
retry_delay=self._retry_delay,
|
|
694
|
+
)
|
|
695
|
+
return Computed(lambda: query, name=f"query.{self.name}")
|
|
696
|
+
|
|
697
|
+
def __get__(self, obj: Any, objtype: Any = None) -> QueryResult[T]:
|
|
698
|
+
if obj is None:
|
|
699
|
+
return self # pyright: ignore[reportReturnType]
|
|
700
|
+
return self.initialize(obj, self.name)
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
@overload
|
|
704
|
+
def query(
|
|
705
|
+
fn: Callable[[TState], Awaitable[T]],
|
|
706
|
+
*,
|
|
707
|
+
stale_time: float = 0.0,
|
|
708
|
+
gc_time: float | None = 300.0,
|
|
709
|
+
refetch_interval: float | None = None,
|
|
710
|
+
keep_previous_data: bool = False,
|
|
711
|
+
retries: int = 3,
|
|
712
|
+
retry_delay: float | None = None,
|
|
713
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
714
|
+
enabled: bool = True,
|
|
715
|
+
fetch_on_mount: bool = True,
|
|
716
|
+
key: QueryKey | None = None,
|
|
717
|
+
) -> QueryProperty[T, TState]: ...
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
@overload
|
|
721
|
+
def query(
|
|
722
|
+
fn: None = None,
|
|
723
|
+
*,
|
|
724
|
+
stale_time: float = 0.0,
|
|
725
|
+
gc_time: float | None = 300.0,
|
|
726
|
+
refetch_interval: float | None = None,
|
|
727
|
+
keep_previous_data: bool = False,
|
|
728
|
+
retries: int = 3,
|
|
729
|
+
retry_delay: float | None = None,
|
|
730
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
731
|
+
enabled: bool = True,
|
|
732
|
+
fetch_on_mount: bool = True,
|
|
733
|
+
key: QueryKey | None = None,
|
|
734
|
+
) -> Callable[[Callable[[TState], Awaitable[T]]], QueryProperty[T, TState]]: ...
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
def query(
|
|
738
|
+
fn: Callable[[TState], Awaitable[T]] | None = None,
|
|
739
|
+
*,
|
|
740
|
+
stale_time: float = 0.0,
|
|
741
|
+
gc_time: float | None = 300.0,
|
|
742
|
+
refetch_interval: float | None = None,
|
|
743
|
+
keep_previous_data: bool = False,
|
|
744
|
+
retries: int = 3,
|
|
745
|
+
retry_delay: float | None = None,
|
|
746
|
+
initial_data_updated_at: float | dt.datetime | None = None,
|
|
747
|
+
enabled: bool = True,
|
|
748
|
+
fetch_on_mount: bool = True,
|
|
749
|
+
key: QueryKey | None = None,
|
|
750
|
+
):
|
|
751
|
+
def decorator(
|
|
752
|
+
func: Callable[[TState], Awaitable[T]], /
|
|
753
|
+
) -> QueryProperty[T, TState]:
|
|
754
|
+
sig = inspect.signature(func)
|
|
755
|
+
params = list(sig.parameters.values())
|
|
756
|
+
# Only state-method form supported for now (single 'self')
|
|
757
|
+
if not (len(params) == 1 and params[0].name == "self"):
|
|
758
|
+
raise TypeError("@query currently only supports state methods (self)")
|
|
759
|
+
|
|
760
|
+
return QueryProperty(
|
|
761
|
+
func.__name__,
|
|
762
|
+
func,
|
|
763
|
+
stale_time=stale_time,
|
|
764
|
+
gc_time=gc_time if gc_time is not None else 300.0,
|
|
765
|
+
refetch_interval=refetch_interval,
|
|
766
|
+
keep_previous_data=keep_previous_data,
|
|
767
|
+
retries=retries,
|
|
768
|
+
retry_delay=RETRY_DELAY_DEFAULT if retry_delay is None else retry_delay,
|
|
769
|
+
initial_data_updated_at=initial_data_updated_at,
|
|
770
|
+
enabled=enabled,
|
|
771
|
+
fetch_on_mount=fetch_on_mount,
|
|
772
|
+
key=key,
|
|
773
|
+
)
|
|
774
|
+
|
|
775
|
+
if fn:
|
|
776
|
+
return decorator(fn)
|
|
777
|
+
return decorator
|