pulse-framework 0.1.63__py3-none-any.whl → 0.1.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +16 -10
- pulse/app.py +30 -11
- pulse/channel.py +3 -3
- pulse/{form.py → forms.py} +2 -2
- pulse/helpers.py +9 -212
- pulse/proxy.py +10 -3
- pulse/queries/client.py +5 -1
- pulse/queries/effect.py +2 -1
- pulse/queries/infinite_query.py +164 -54
- pulse/queries/protocol.py +9 -0
- pulse/queries/query.py +164 -81
- pulse/queries/store.py +10 -2
- pulse/reactive.py +18 -7
- pulse/render_session.py +61 -12
- pulse/scheduling.py +448 -0
- {pulse_framework-0.1.63.dist-info → pulse_framework-0.1.65.dist-info}/METADATA +1 -1
- {pulse_framework-0.1.63.dist-info → pulse_framework-0.1.65.dist-info}/RECORD +19 -18
- {pulse_framework-0.1.63.dist-info → pulse_framework-0.1.65.dist-info}/WHEEL +0 -0
- {pulse_framework-0.1.63.dist-info → pulse_framework-0.1.65.dist-info}/entry_points.txt +0 -0
pulse/queries/query.py
CHANGED
|
@@ -18,9 +18,8 @@ from pulse.context import PulseContext
|
|
|
18
18
|
from pulse.helpers import (
|
|
19
19
|
MISSING,
|
|
20
20
|
Disposable,
|
|
21
|
+
Missing,
|
|
21
22
|
call_flexible,
|
|
22
|
-
is_pytest,
|
|
23
|
-
later,
|
|
24
23
|
maybe_await,
|
|
25
24
|
)
|
|
26
25
|
from pulse.queries.common import (
|
|
@@ -35,6 +34,7 @@ from pulse.queries.common import (
|
|
|
35
34
|
)
|
|
36
35
|
from pulse.queries.effect import AsyncQueryEffect
|
|
37
36
|
from pulse.reactive import Computed, Effect, Signal, Untrack
|
|
37
|
+
from pulse.scheduling import TimerHandleLike, create_task, is_pytest, later
|
|
38
38
|
from pulse.state import InitializableProperty, State
|
|
39
39
|
|
|
40
40
|
if TYPE_CHECKING:
|
|
@@ -64,7 +64,7 @@ class QueryConfig(Generic[T]):
|
|
|
64
64
|
|
|
65
65
|
retries: int
|
|
66
66
|
retry_delay: float
|
|
67
|
-
initial_data: T | Callable[[], T] | None
|
|
67
|
+
initial_data: T | Callable[[], T] | Missing | None
|
|
68
68
|
initial_data_updated_at: float | dt.datetime | None
|
|
69
69
|
gc_time: float
|
|
70
70
|
on_dispose: Callable[[Any], None] | None
|
|
@@ -90,7 +90,7 @@ class QueryState(Generic[T]):
|
|
|
90
90
|
cfg: QueryConfig[T]
|
|
91
91
|
|
|
92
92
|
# Reactive signals for query state
|
|
93
|
-
data: Signal[T | None]
|
|
93
|
+
data: Signal[T | None | Missing]
|
|
94
94
|
error: Signal[Exception | None]
|
|
95
95
|
last_updated: Signal[float]
|
|
96
96
|
status: Signal[QueryStatus]
|
|
@@ -103,7 +103,7 @@ class QueryState(Generic[T]):
|
|
|
103
103
|
name: str,
|
|
104
104
|
retries: int = 3,
|
|
105
105
|
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
106
|
-
initial_data: T | None = MISSING,
|
|
106
|
+
initial_data: T | Missing | None = MISSING,
|
|
107
107
|
initial_data_updated_at: float | dt.datetime | None = None,
|
|
108
108
|
gc_time: float = 300.0,
|
|
109
109
|
on_dispose: Callable[[Any], None] | None = None,
|
|
@@ -119,7 +119,7 @@ class QueryState(Generic[T]):
|
|
|
119
119
|
|
|
120
120
|
# Initialize reactive signals
|
|
121
121
|
self.data = Signal(
|
|
122
|
-
|
|
122
|
+
MISSING if initial_data is MISSING else initial_data,
|
|
123
123
|
name=f"query.data({name})",
|
|
124
124
|
)
|
|
125
125
|
self.error = Signal(None, name=f"query.error({name})")
|
|
@@ -147,7 +147,8 @@ class QueryState(Generic[T]):
|
|
|
147
147
|
):
|
|
148
148
|
"""Set data manually, accepting a value or updater function."""
|
|
149
149
|
current = self.data.read()
|
|
150
|
-
|
|
150
|
+
current_value = cast(T | None, None if current is MISSING else current)
|
|
151
|
+
new_value = cast(T, data(current_value) if callable(data) else data)
|
|
151
152
|
self.set_success(new_value, manual=True)
|
|
152
153
|
if updated_at is not None:
|
|
153
154
|
self.set_updated_at(updated_at)
|
|
@@ -225,7 +226,7 @@ async def run_fetch_with_retries(
|
|
|
225
226
|
on_success: Optional callback on success
|
|
226
227
|
on_error: Optional callback on error
|
|
227
228
|
untrack: If True, wrap fetch_fn in Untrack() to prevent dependency tracking.
|
|
228
|
-
Use for keyed queries where fetch is triggered via
|
|
229
|
+
Use for keyed queries where fetch is triggered via create_task().
|
|
229
230
|
"""
|
|
230
231
|
state.reset_retries()
|
|
231
232
|
|
|
@@ -267,14 +268,17 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
267
268
|
observers: "list[KeyedQueryResult[T]]"
|
|
268
269
|
_task: asyncio.Task[None] | None
|
|
269
270
|
_task_initiator: "KeyedQueryResult[T] | None"
|
|
270
|
-
_gc_handle:
|
|
271
|
+
_gc_handle: TimerHandleLike | None
|
|
272
|
+
_interval_effect: Effect | None
|
|
273
|
+
_interval: float | None
|
|
274
|
+
_interval_observer: "KeyedQueryResult[T] | None"
|
|
271
275
|
|
|
272
276
|
def __init__(
|
|
273
277
|
self,
|
|
274
278
|
key: QueryKey,
|
|
275
279
|
retries: int = 3,
|
|
276
280
|
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
277
|
-
initial_data: T | None = MISSING,
|
|
281
|
+
initial_data: T | Missing | None = MISSING,
|
|
278
282
|
initial_data_updated_at: float | dt.datetime | None = None,
|
|
279
283
|
gc_time: float = 300.0,
|
|
280
284
|
on_dispose: Callable[[Any], None] | None = None,
|
|
@@ -293,10 +297,13 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
293
297
|
self._task = None
|
|
294
298
|
self._task_initiator = None
|
|
295
299
|
self._gc_handle = None
|
|
300
|
+
self._interval_effect = None
|
|
301
|
+
self._interval = None
|
|
302
|
+
self._interval_observer = None
|
|
296
303
|
|
|
297
304
|
# --- Delegate signal access to state ---
|
|
298
305
|
@property
|
|
299
|
-
def data(self) -> Signal[T | None]:
|
|
306
|
+
def data(self) -> Signal[T | None | Missing]:
|
|
300
307
|
return self.state.data
|
|
301
308
|
|
|
302
309
|
@property
|
|
@@ -377,7 +384,7 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
377
384
|
fetch_fn,
|
|
378
385
|
on_success=on_success,
|
|
379
386
|
on_error=on_error,
|
|
380
|
-
untrack=True, # Keyed queries use
|
|
387
|
+
untrack=True, # Keyed queries use create_task(), need to untrack
|
|
381
388
|
)
|
|
382
389
|
|
|
383
390
|
def run_fetch(
|
|
@@ -401,7 +408,7 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
401
408
|
self.state.is_fetching.write(True)
|
|
402
409
|
# Capture current observers at fetch start
|
|
403
410
|
observers = list(self.observers)
|
|
404
|
-
self._task =
|
|
411
|
+
self._task = create_task(self._run_fetch(fetch_fn, observers))
|
|
405
412
|
self._task_initiator = initiator
|
|
406
413
|
return self._task
|
|
407
414
|
|
|
@@ -421,7 +428,10 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
421
428
|
# Return result based on current state
|
|
422
429
|
if self.state.status() == "error":
|
|
423
430
|
return ActionError(cast(Exception, self.state.error.read()))
|
|
424
|
-
|
|
431
|
+
data = self.state.data.read()
|
|
432
|
+
if data is MISSING:
|
|
433
|
+
return ActionSuccess(cast(T, None))
|
|
434
|
+
return ActionSuccess(cast(T, data))
|
|
425
435
|
|
|
426
436
|
def cancel(self) -> None:
|
|
427
437
|
"""Cancel the current fetch if running."""
|
|
@@ -438,6 +448,66 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
438
448
|
)
|
|
439
449
|
return self.observers[0]._fetch_fn # pyright: ignore[reportPrivateUsage]
|
|
440
450
|
|
|
451
|
+
@property
|
|
452
|
+
def has_interval(self) -> bool:
|
|
453
|
+
return self._interval is not None
|
|
454
|
+
|
|
455
|
+
def _select_interval_observer(
|
|
456
|
+
self,
|
|
457
|
+
) -> tuple[float | None, "KeyedQueryResult[T] | None"]:
|
|
458
|
+
min_interval: float | None = None
|
|
459
|
+
selected: "KeyedQueryResult[T] | None" = None
|
|
460
|
+
|
|
461
|
+
for obs in reversed(self.observers):
|
|
462
|
+
interval = obs._refetch_interval # pyright: ignore[reportPrivateUsage]
|
|
463
|
+
if interval is None:
|
|
464
|
+
continue
|
|
465
|
+
if not obs._enabled.value: # pyright: ignore[reportPrivateUsage]
|
|
466
|
+
continue
|
|
467
|
+
if min_interval is None or interval < min_interval:
|
|
468
|
+
min_interval = interval
|
|
469
|
+
selected = obs
|
|
470
|
+
|
|
471
|
+
return min_interval, selected
|
|
472
|
+
|
|
473
|
+
def _create_interval_effect(self, interval: float) -> Effect:
|
|
474
|
+
def interval_fn():
|
|
475
|
+
observer = self._interval_observer
|
|
476
|
+
if observer is None:
|
|
477
|
+
return
|
|
478
|
+
if not self.is_scheduled and len(self.observers) > 0:
|
|
479
|
+
self.run_fetch(
|
|
480
|
+
observer._fetch_fn, # pyright: ignore[reportPrivateUsage]
|
|
481
|
+
cancel_previous=False,
|
|
482
|
+
initiator=observer,
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
return Effect(
|
|
486
|
+
interval_fn,
|
|
487
|
+
name=f"query_interval({self.key})",
|
|
488
|
+
interval=interval,
|
|
489
|
+
immediate=True,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
def _update_interval(self) -> None:
|
|
493
|
+
new_interval, new_observer = self._select_interval_observer()
|
|
494
|
+
interval_changed = new_interval != self._interval
|
|
495
|
+
|
|
496
|
+
self._interval = new_interval
|
|
497
|
+
self._interval_observer = new_observer
|
|
498
|
+
|
|
499
|
+
if not interval_changed:
|
|
500
|
+
if self._interval_effect is None and new_interval is not None:
|
|
501
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
502
|
+
return
|
|
503
|
+
|
|
504
|
+
if self._interval_effect is not None:
|
|
505
|
+
self._interval_effect.dispose()
|
|
506
|
+
self._interval_effect = None
|
|
507
|
+
|
|
508
|
+
if new_interval is not None:
|
|
509
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
510
|
+
|
|
441
511
|
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
442
512
|
"""
|
|
443
513
|
Reruns the query and returns the result.
|
|
@@ -468,11 +538,13 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
468
538
|
self.cancel_gc()
|
|
469
539
|
if observer._gc_time > 0: # pyright: ignore[reportPrivateUsage]
|
|
470
540
|
self.cfg.gc_time = max(self.cfg.gc_time, observer._gc_time) # pyright: ignore[reportPrivateUsage]
|
|
541
|
+
self._update_interval()
|
|
471
542
|
|
|
472
543
|
def unobserve(self, observer: "KeyedQueryResult[T]"):
|
|
473
544
|
"""Unregister an observer. Schedules GC if no observers remain."""
|
|
474
545
|
if observer in self.observers:
|
|
475
546
|
self.observers.remove(observer)
|
|
547
|
+
self._update_interval()
|
|
476
548
|
|
|
477
549
|
# If the departing observer initiated the ongoing fetch, cancel it
|
|
478
550
|
if self._task_initiator is observer and self._task and not self._task.done():
|
|
@@ -505,6 +577,9 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
505
577
|
def dispose(self):
|
|
506
578
|
"""Clean up the query, cancelling any in-flight fetch."""
|
|
507
579
|
self.cancel()
|
|
580
|
+
if self._interval_effect is not None:
|
|
581
|
+
self._interval_effect.dispose()
|
|
582
|
+
self._interval_effect = None
|
|
508
583
|
if self.cfg.on_dispose:
|
|
509
584
|
self.cfg.on_dispose(self)
|
|
510
585
|
|
|
@@ -528,7 +603,7 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
528
603
|
_keep_previous_data: bool
|
|
529
604
|
_enabled: Signal[bool]
|
|
530
605
|
_interval_effect: Effect | None
|
|
531
|
-
_data_computed: Computed[T | None]
|
|
606
|
+
_data_computed: Computed[T | None | Missing]
|
|
532
607
|
|
|
533
608
|
def __init__(
|
|
534
609
|
self,
|
|
@@ -537,7 +612,7 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
537
612
|
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
538
613
|
retries: int = 3,
|
|
539
614
|
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
540
|
-
initial_data: T | None = MISSING,
|
|
615
|
+
initial_data: T | Missing | None = MISSING,
|
|
541
616
|
initial_data_updated_at: float | dt.datetime | None = None,
|
|
542
617
|
gc_time: float = 300.0,
|
|
543
618
|
stale_time: float = 0.0,
|
|
@@ -559,7 +634,12 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
559
634
|
self._on_success = on_success
|
|
560
635
|
self._on_error = on_error
|
|
561
636
|
self._stale_time = stale_time
|
|
562
|
-
|
|
637
|
+
interval = (
|
|
638
|
+
refetch_interval
|
|
639
|
+
if refetch_interval is not None and refetch_interval > 0
|
|
640
|
+
else None
|
|
641
|
+
)
|
|
642
|
+
self._refetch_interval = interval
|
|
563
643
|
self._keep_previous_data = keep_previous_data
|
|
564
644
|
self._enabled = Signal(enabled, name="query.enabled(unkeyed)")
|
|
565
645
|
self._interval_effect = None
|
|
@@ -576,17 +656,20 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
576
656
|
|
|
577
657
|
# Computed for keep_previous_data logic
|
|
578
658
|
self._data_computed = Computed(
|
|
579
|
-
self._data_computed_fn,
|
|
659
|
+
self._data_computed_fn,
|
|
660
|
+
name="query_data(unkeyed)",
|
|
661
|
+
initial_value=MISSING,
|
|
580
662
|
)
|
|
581
663
|
|
|
582
664
|
# Schedule initial fetch if stale (untracked to avoid reactive loop)
|
|
583
665
|
with Untrack():
|
|
584
|
-
if
|
|
666
|
+
# Skip if refetch_interval is active - interval effect handles initial fetch
|
|
667
|
+
if enabled and fetch_on_mount and interval is None and self.is_stale():
|
|
585
668
|
self.schedule()
|
|
586
669
|
|
|
587
670
|
# Set up interval effect if interval is specified
|
|
588
|
-
if
|
|
589
|
-
self._setup_interval_effect(
|
|
671
|
+
if interval is not None:
|
|
672
|
+
self._setup_interval_effect(interval)
|
|
590
673
|
|
|
591
674
|
def _setup_interval_effect(self, interval: float):
|
|
592
675
|
"""Create an effect that invalidates the query at the specified interval."""
|
|
@@ -602,12 +685,12 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
602
685
|
immediate=True,
|
|
603
686
|
)
|
|
604
687
|
|
|
605
|
-
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
606
|
-
if self._keep_previous_data and self.state.status()
|
|
688
|
+
def _data_computed_fn(self, prev: T | None | Missing) -> T | None | Missing:
|
|
689
|
+
if self._keep_previous_data and self.state.status() == "loading":
|
|
607
690
|
return prev
|
|
608
691
|
raw = self.state.data()
|
|
609
|
-
if raw is
|
|
610
|
-
return
|
|
692
|
+
if raw is MISSING:
|
|
693
|
+
return MISSING
|
|
611
694
|
return raw
|
|
612
695
|
|
|
613
696
|
# --- Status properties ---
|
|
@@ -637,7 +720,10 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
637
720
|
|
|
638
721
|
@property
|
|
639
722
|
def data(self) -> T | None:
|
|
640
|
-
|
|
723
|
+
value = self._data_computed()
|
|
724
|
+
if value is MISSING:
|
|
725
|
+
return None
|
|
726
|
+
return cast(T | None, value)
|
|
641
727
|
|
|
642
728
|
# --- State methods ---
|
|
643
729
|
def set_data(self, data: T | Callable[[T | None], T]):
|
|
@@ -699,14 +785,20 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
699
785
|
return await self.wait()
|
|
700
786
|
|
|
701
787
|
async def wait(self) -> ActionResult[T]:
|
|
702
|
-
"""Wait for the current
|
|
703
|
-
# If loading and no task, schedule a fetch
|
|
704
|
-
if self.state.status() == "loading" and not self.state.is_fetching():
|
|
705
|
-
self.schedule()
|
|
788
|
+
"""Wait for the current in-flight fetch to complete."""
|
|
706
789
|
await self._effect.wait()
|
|
707
790
|
if self.state.status() == "error":
|
|
708
791
|
return ActionError(cast(Exception, self.state.error.read()))
|
|
709
|
-
|
|
792
|
+
data = self.state.data.read()
|
|
793
|
+
if data is MISSING:
|
|
794
|
+
return ActionSuccess(cast(T, None))
|
|
795
|
+
return ActionSuccess(cast(T, data))
|
|
796
|
+
|
|
797
|
+
async def ensure(self) -> ActionResult[T]:
|
|
798
|
+
"""Ensure an initial fetch has started, then wait for completion."""
|
|
799
|
+
if self.state.status() == "loading" and not self.state.is_fetching():
|
|
800
|
+
self.schedule()
|
|
801
|
+
return await self.wait()
|
|
710
802
|
|
|
711
803
|
def invalidate(self):
|
|
712
804
|
"""Mark the query as stale and refetch through the effect."""
|
|
@@ -740,8 +832,7 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
740
832
|
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
741
833
|
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
742
834
|
_observe_effect: Effect
|
|
743
|
-
|
|
744
|
-
_data_computed: Computed[T | None]
|
|
835
|
+
_data_computed: Computed[T | None | Missing]
|
|
745
836
|
_enabled: Signal[bool]
|
|
746
837
|
_fetch_on_mount: bool
|
|
747
838
|
|
|
@@ -762,12 +853,16 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
762
853
|
self._fetch_fn = fetch_fn
|
|
763
854
|
self._stale_time = stale_time
|
|
764
855
|
self._gc_time = gc_time
|
|
765
|
-
|
|
856
|
+
interval = (
|
|
857
|
+
refetch_interval
|
|
858
|
+
if refetch_interval is not None and refetch_interval > 0
|
|
859
|
+
else None
|
|
860
|
+
)
|
|
861
|
+
self._refetch_interval = interval
|
|
766
862
|
self._keep_previous_data = keep_previous_data
|
|
767
863
|
self._on_success = on_success
|
|
768
864
|
self._on_error = on_error
|
|
769
865
|
self._enabled = Signal(enabled, name=f"query.enabled({query().key})")
|
|
770
|
-
self._interval_effect = None
|
|
771
866
|
|
|
772
867
|
def observe_effect():
|
|
773
868
|
q = self._query()
|
|
@@ -776,9 +871,11 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
776
871
|
with Untrack():
|
|
777
872
|
q.observe(self)
|
|
778
873
|
|
|
779
|
-
#
|
|
780
|
-
if enabled and fetch_on_mount and
|
|
781
|
-
|
|
874
|
+
# Skip if query interval is active - interval effect handles initial fetch
|
|
875
|
+
if enabled and fetch_on_mount and not q.has_interval:
|
|
876
|
+
# If stale, schedule refetch (only when enabled)
|
|
877
|
+
if not q.is_fetching() and self.is_stale():
|
|
878
|
+
self.invalidate()
|
|
782
879
|
|
|
783
880
|
# Return cleanup function that captures the query (old query on key change)
|
|
784
881
|
def cleanup():
|
|
@@ -792,26 +889,9 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
792
889
|
immediate=True,
|
|
793
890
|
)
|
|
794
891
|
self._data_computed = Computed(
|
|
795
|
-
self._data_computed_fn,
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
# Set up interval effect if interval is specified
|
|
799
|
-
if refetch_interval is not None and refetch_interval > 0:
|
|
800
|
-
self._setup_interval_effect(refetch_interval)
|
|
801
|
-
|
|
802
|
-
def _setup_interval_effect(self, interval: float):
|
|
803
|
-
"""Create an effect that invalidates the query at the specified interval."""
|
|
804
|
-
|
|
805
|
-
def interval_fn():
|
|
806
|
-
# Read enabled to make this effect reactive to enabled changes
|
|
807
|
-
if self._enabled():
|
|
808
|
-
self.invalidate()
|
|
809
|
-
|
|
810
|
-
self._interval_effect = Effect(
|
|
811
|
-
interval_fn,
|
|
812
|
-
name=f"query_interval({self._query().key})",
|
|
813
|
-
interval=interval,
|
|
814
|
-
immediate=True,
|
|
892
|
+
self._data_computed_fn,
|
|
893
|
+
name=f"query_data({self._query().key})",
|
|
894
|
+
initial_value=MISSING,
|
|
815
895
|
)
|
|
816
896
|
|
|
817
897
|
@property
|
|
@@ -842,18 +922,18 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
842
922
|
def error(self) -> Exception | None:
|
|
843
923
|
return self._query().error.read()
|
|
844
924
|
|
|
845
|
-
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
925
|
+
def _data_computed_fn(self, prev: T | None | Missing) -> T | None | Missing:
|
|
846
926
|
query = self._query()
|
|
847
|
-
if self._keep_previous_data and query.status()
|
|
927
|
+
if self._keep_previous_data and query.status() == "loading":
|
|
848
928
|
return prev
|
|
849
|
-
|
|
850
|
-
if raw is None:
|
|
851
|
-
return None
|
|
852
|
-
return raw
|
|
929
|
+
return query.data()
|
|
853
930
|
|
|
854
931
|
@property
|
|
855
932
|
def data(self) -> T | None:
|
|
856
|
-
|
|
933
|
+
value = self._data_computed()
|
|
934
|
+
if value is MISSING:
|
|
935
|
+
return None
|
|
936
|
+
return cast(T | None, value)
|
|
857
937
|
|
|
858
938
|
def is_stale(self) -> bool:
|
|
859
939
|
"""Check if the query data is stale based on stale_time."""
|
|
@@ -874,9 +954,12 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
874
954
|
return await self.wait()
|
|
875
955
|
|
|
876
956
|
async def wait(self) -> ActionResult[T]:
|
|
877
|
-
"""Wait for the current
|
|
957
|
+
"""Wait for the current in-flight fetch to complete."""
|
|
958
|
+
return await self._query().wait()
|
|
959
|
+
|
|
960
|
+
async def ensure(self) -> ActionResult[T]:
|
|
961
|
+
"""Ensure an initial fetch has started, then wait for completion."""
|
|
878
962
|
query = self._query()
|
|
879
|
-
# If loading and no task, start a fetch with this observer's fetch function
|
|
880
963
|
if query.status() == "loading" and not query.is_fetching():
|
|
881
964
|
query.run_fetch(self._fetch_fn, initiator=self)
|
|
882
965
|
return await query.wait()
|
|
@@ -910,16 +993,16 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
910
993
|
def enable(self):
|
|
911
994
|
"""Enable the query."""
|
|
912
995
|
self._enabled.write(True)
|
|
996
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
913
997
|
|
|
914
998
|
def disable(self):
|
|
915
999
|
"""Disable the query, preventing it from fetching."""
|
|
916
1000
|
self._enabled.write(False)
|
|
1001
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
917
1002
|
|
|
918
1003
|
@override
|
|
919
1004
|
def dispose(self):
|
|
920
1005
|
"""Clean up the result and its observe effect."""
|
|
921
|
-
if self._interval_effect is not None and not self._interval_effect.__disposed__:
|
|
922
|
-
self._interval_effect.dispose()
|
|
923
1006
|
if not self._observe_effect.__disposed__:
|
|
924
1007
|
self._observe_effect.dispose()
|
|
925
1008
|
|
|
@@ -970,7 +1053,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
|
970
1053
|
_retry_delay: float
|
|
971
1054
|
_initial_data_updated_at: float | dt.datetime | None
|
|
972
1055
|
_enabled: bool
|
|
973
|
-
_initial_data: T | Callable[[TState], T] | None
|
|
1056
|
+
_initial_data: T | Callable[[TState], T] | Missing | None
|
|
974
1057
|
_key: QueryKey | Callable[[TState], QueryKey] | None
|
|
975
1058
|
# Not using OnSuccessFn and OnErrorFn since unions of callables are not well
|
|
976
1059
|
# supported in the type system. We just need to be careful to use
|
|
@@ -1007,7 +1090,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
|
1007
1090
|
self._retries = retries
|
|
1008
1091
|
self._retry_delay = retry_delay
|
|
1009
1092
|
self._initial_data_updated_at = initial_data_updated_at
|
|
1010
|
-
self._initial_data = MISSING
|
|
1093
|
+
self._initial_data = MISSING
|
|
1011
1094
|
self._enabled = enabled
|
|
1012
1095
|
self._fetch_on_mount = fetch_on_mount
|
|
1013
1096
|
self._priv_result = f"__query_{name}"
|
|
@@ -1062,13 +1145,13 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
|
1062
1145
|
|
|
1063
1146
|
# Bind methods to this instance
|
|
1064
1147
|
fetch_fn = bind_state(state, self._fetch_fn)
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
(
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
)
|
|
1148
|
+
raw_initial = (
|
|
1149
|
+
call_flexible(self._initial_data, state)
|
|
1150
|
+
if callable(self._initial_data)
|
|
1151
|
+
else self._initial_data
|
|
1152
|
+
)
|
|
1153
|
+
initial_data = (
|
|
1154
|
+
MISSING if raw_initial is MISSING else cast(T | None, raw_initial)
|
|
1072
1155
|
)
|
|
1073
1156
|
|
|
1074
1157
|
if self._key is None:
|
|
@@ -1096,7 +1179,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
|
1096
1179
|
self,
|
|
1097
1180
|
state: TState,
|
|
1098
1181
|
fetch_fn: Callable[[], Awaitable[T]],
|
|
1099
|
-
initial_data: T | None,
|
|
1182
|
+
initial_data: T | Missing | None,
|
|
1100
1183
|
initial_data_updated_at: float | dt.datetime | None,
|
|
1101
1184
|
) -> KeyedQueryResult[T]:
|
|
1102
1185
|
"""Create or get a keyed query from the session store."""
|
|
@@ -1151,7 +1234,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
|
1151
1234
|
def _create_unkeyed(
|
|
1152
1235
|
self,
|
|
1153
1236
|
fetch_fn: Callable[[], Awaitable[T]],
|
|
1154
|
-
initial_data: T | None,
|
|
1237
|
+
initial_data: T | Missing | None,
|
|
1155
1238
|
initial_data_updated_at: float | dt.datetime | None,
|
|
1156
1239
|
state: TState,
|
|
1157
1240
|
) -> UnkeyedQueryResult[T]:
|
|
@@ -1244,7 +1327,7 @@ def query(
|
|
|
1244
1327
|
stale_time: Seconds before data is considered stale (default 0.0).
|
|
1245
1328
|
gc_time: Seconds to keep unused query in cache (default 300.0, None to disable).
|
|
1246
1329
|
refetch_interval: Auto-refetch interval in seconds (default None, disabled).
|
|
1247
|
-
keep_previous_data: Keep previous data while
|
|
1330
|
+
keep_previous_data: Keep previous data while loading (default False).
|
|
1248
1331
|
retries: Number of retry attempts on failure (default 3).
|
|
1249
1332
|
retry_delay: Delay between retries in seconds (default 2.0).
|
|
1250
1333
|
initial_data_updated_at: Timestamp for initial data staleness calculation.
|
pulse/queries/store.py
CHANGED
|
@@ -2,7 +2,7 @@ import datetime as dt
|
|
|
2
2
|
from collections.abc import Callable
|
|
3
3
|
from typing import Any, TypeVar, cast
|
|
4
4
|
|
|
5
|
-
from pulse.helpers import MISSING
|
|
5
|
+
from pulse.helpers import MISSING, Missing
|
|
6
6
|
from pulse.queries.common import QueryKey
|
|
7
7
|
from pulse.queries.infinite_query import InfiniteQuery, Page
|
|
8
8
|
from pulse.queries.query import RETRY_DELAY_DEFAULT, KeyedQuery
|
|
@@ -29,7 +29,7 @@ class QueryStore:
|
|
|
29
29
|
def ensure(
|
|
30
30
|
self,
|
|
31
31
|
key: QueryKey,
|
|
32
|
-
initial_data: T | None = MISSING,
|
|
32
|
+
initial_data: T | Missing | None = MISSING,
|
|
33
33
|
initial_data_updated_at: float | dt.datetime | None = None,
|
|
34
34
|
gc_time: float = 300.0,
|
|
35
35
|
retries: int = 3,
|
|
@@ -87,6 +87,7 @@ class QueryStore:
|
|
|
87
87
|
get_previous_page_param: Callable[[list[Page[Any, Any]]], Any | None]
|
|
88
88
|
| None = None,
|
|
89
89
|
max_pages: int = 0,
|
|
90
|
+
initial_data: list[Page[Any, Any]] | Missing | None = MISSING,
|
|
90
91
|
initial_data_updated_at: float | dt.datetime | None = None,
|
|
91
92
|
gc_time: float = 300.0,
|
|
92
93
|
retries: int = 3,
|
|
@@ -110,6 +111,7 @@ class QueryStore:
|
|
|
110
111
|
get_next_page_param=get_next_page_param,
|
|
111
112
|
get_previous_page_param=get_previous_page_param,
|
|
112
113
|
max_pages=max_pages,
|
|
114
|
+
initial_data=initial_data,
|
|
113
115
|
initial_data_updated_at=initial_data_updated_at,
|
|
114
116
|
gc_time=gc_time,
|
|
115
117
|
retries=retries,
|
|
@@ -118,3 +120,9 @@ class QueryStore:
|
|
|
118
120
|
)
|
|
119
121
|
self._entries[key] = entry
|
|
120
122
|
return entry
|
|
123
|
+
|
|
124
|
+
def dispose_all(self) -> None:
|
|
125
|
+
"""Dispose all queries and clear the store."""
|
|
126
|
+
for entry in list(self._entries.values()):
|
|
127
|
+
entry.dispose()
|
|
128
|
+
self._entries.clear()
|
pulse/reactive.py
CHANGED
|
@@ -15,11 +15,14 @@ from typing import (
|
|
|
15
15
|
|
|
16
16
|
from pulse.helpers import (
|
|
17
17
|
Disposable,
|
|
18
|
-
create_task,
|
|
19
18
|
maybe_await,
|
|
20
|
-
schedule_on_loop,
|
|
21
19
|
values_equal,
|
|
22
20
|
)
|
|
21
|
+
from pulse.scheduling import (
|
|
22
|
+
TimerHandleLike,
|
|
23
|
+
call_soon,
|
|
24
|
+
create_task,
|
|
25
|
+
)
|
|
23
26
|
|
|
24
27
|
T = TypeVar("T")
|
|
25
28
|
T_co = TypeVar("T_co", covariant=True)
|
|
@@ -151,6 +154,7 @@ class Computed(Generic[T_co]):
|
|
|
151
154
|
fn: Function computing the value. May optionally accept prev_value
|
|
152
155
|
as first positional argument for incremental computation.
|
|
153
156
|
name: Debug name for the computed.
|
|
157
|
+
initial_value: Seed value used as prev_value on first compute.
|
|
154
158
|
|
|
155
159
|
Attributes:
|
|
156
160
|
value: Cached computed value.
|
|
@@ -171,13 +175,20 @@ class Computed(Generic[T_co]):
|
|
|
171
175
|
|
|
172
176
|
fn: Callable[..., T_co]
|
|
173
177
|
name: str | None
|
|
178
|
+
value: Any
|
|
174
179
|
dirty: bool
|
|
175
180
|
on_stack: bool
|
|
176
181
|
accepts_prev_value: bool
|
|
177
182
|
|
|
178
|
-
def __init__(
|
|
183
|
+
def __init__(
|
|
184
|
+
self,
|
|
185
|
+
fn: Callable[..., T_co],
|
|
186
|
+
name: str | None = None,
|
|
187
|
+
*,
|
|
188
|
+
initial_value: Any = None,
|
|
189
|
+
):
|
|
179
190
|
self.fn = fn
|
|
180
|
-
self.value
|
|
191
|
+
self.value = initial_value
|
|
181
192
|
self.name = name
|
|
182
193
|
self.dirty = False
|
|
183
194
|
self.on_stack = False
|
|
@@ -384,7 +395,7 @@ class Effect(Disposable):
|
|
|
384
395
|
immediate: bool
|
|
385
396
|
_lazy: bool
|
|
386
397
|
_interval: float | None
|
|
387
|
-
_interval_handle:
|
|
398
|
+
_interval_handle: TimerHandleLike | None
|
|
388
399
|
update_deps: bool
|
|
389
400
|
batch: "Batch | None"
|
|
390
401
|
paused: bool
|
|
@@ -461,7 +472,7 @@ class Effect(Disposable):
|
|
|
461
472
|
def _schedule_interval(self):
|
|
462
473
|
"""Schedule the next interval run if interval is set."""
|
|
463
474
|
if self._interval is not None and self._interval > 0:
|
|
464
|
-
from pulse.
|
|
475
|
+
from pulse.scheduling import later
|
|
465
476
|
|
|
466
477
|
self._interval_handle = later(self._interval, self._on_interval)
|
|
467
478
|
|
|
@@ -995,7 +1006,7 @@ class GlobalBatch(Batch):
|
|
|
995
1006
|
@override
|
|
996
1007
|
def register_effect(self, effect: Effect):
|
|
997
1008
|
if not self.is_scheduled:
|
|
998
|
-
|
|
1009
|
+
call_soon(self.flush)
|
|
999
1010
|
self.is_scheduled = True
|
|
1000
1011
|
return super().register_effect(effect)
|
|
1001
1012
|
|