pulse-framework 0.1.64__py3-none-any.whl → 0.1.66a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pulse/queries/client.py CHANGED
@@ -3,6 +3,7 @@ from collections.abc import Callable
3
3
  from typing import Any, TypeVar, overload
4
4
 
5
5
  from pulse.context import PulseContext
6
+ from pulse.helpers import MISSING
6
7
  from pulse.queries.common import ActionResult, QueryKey
7
8
  from pulse.queries.infinite_query import InfiniteQuery, Page
8
9
  from pulse.queries.query import KeyedQuery
@@ -203,7 +204,10 @@ class QueryClient:
203
204
  query = self.get(key)
204
205
  if query is None:
205
206
  return None
206
- return query.data.read()
207
+ value = query.data.read()
208
+ if value is MISSING:
209
+ return None
210
+ return value
207
211
 
208
212
  def get_infinite_data(self, key: QueryKey) -> list[Page[Any, Any]] | None:
209
213
  """Get the pages for an infinite query by key.
pulse/queries/effect.py CHANGED
@@ -7,6 +7,7 @@ from typing import (
7
7
  override,
8
8
  )
9
9
 
10
+ from pulse.helpers import MISSING
10
11
  from pulse.reactive import AsyncEffect, Computed, Signal
11
12
 
12
13
 
@@ -49,7 +50,7 @@ class AsyncQueryEffect(AsyncEffect):
49
50
  # For unkeyed queries on re-run (dependency changed), reset data/status
50
51
  # to behave like keyed queries when key changes (new Query with data=None)
51
52
  if self._is_unkeyed and self.runs > 0:
52
- self.fetcher.data.write(None)
53
+ self.fetcher.data.write(MISSING)
53
54
  self.fetcher.status.write("loading")
54
55
 
55
56
  return super().run()
@@ -19,8 +19,8 @@ from pulse.context import PulseContext
19
19
  from pulse.helpers import (
20
20
  MISSING,
21
21
  Disposable,
22
+ Missing,
22
23
  call_flexible,
23
- later,
24
24
  maybe_await,
25
25
  )
26
26
  from pulse.queries.common import (
@@ -36,6 +36,7 @@ from pulse.queries.common import (
36
36
  from pulse.queries.query import RETRY_DELAY_DEFAULT, QueryConfig
37
37
  from pulse.reactive import Computed, Effect, Signal, Untrack
38
38
  from pulse.reactive_extensions import ReactiveList, unwrap
39
+ from pulse.scheduling import TimerHandleLike, create_task, later
39
40
  from pulse.state import InitializableProperty, State
40
41
 
41
42
  T = TypeVar("T")
@@ -225,7 +226,7 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
225
226
  _queue_task: asyncio.Task[None] | None
226
227
 
227
228
  _observers: "list[InfiniteQueryResult[T, TParam]]"
228
- _gc_handle: asyncio.TimerHandle | None
229
+ _gc_handle: TimerHandleLike | None
229
230
  _interval_effect: Effect | None
230
231
  _interval: float | None
231
232
  _interval_observer: "InfiniteQueryResult[T, TParam] | None"
@@ -242,7 +243,7 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
242
243
  max_pages: int = 0,
243
244
  retries: int = 3,
244
245
  retry_delay: float = RETRY_DELAY_DEFAULT,
245
- initial_data: list[Page[T, TParam]] | None | Any = MISSING,
246
+ initial_data: list[Page[T, TParam]] | Missing | None = MISSING,
246
247
  initial_data_updated_at: float | dt.datetime | None = None,
247
248
  gc_time: float = 300.0,
248
249
  on_dispose: Callable[[Any], None] | None = None,
@@ -266,7 +267,7 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
266
267
  if initial_data is MISSING:
267
268
  initial_pages = []
268
269
  else:
269
- initial_pages = cast(list[Page[T, TParam]], initial_data) or []
270
+ initial_pages = cast(list[Page[T, TParam]] | None, initial_data) or []
270
271
 
271
272
  self.pages = ReactiveList(initial_pages)
272
273
  self.error = Signal(None, name=f"inf_query.error({key})")
@@ -562,7 +563,7 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
562
563
  if self._queue_task is None or self._queue_task.done():
563
564
  # Create task with no reactive scope to avoid inheriting deps from caller
564
565
  with Untrack():
565
- self._queue_task = asyncio.create_task(self._process_queue())
566
+ self._queue_task = create_task(self._process_queue())
566
567
  return self._queue_task
567
568
 
568
569
  async def _process_queue(self):
@@ -859,7 +860,7 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
859
860
  _on_success: Callable[[list[Page[T, TParam]]], Awaitable[None] | None] | None
860
861
  _on_error: Callable[[Exception], Awaitable[None] | None] | None
861
862
  _observe_effect: Effect
862
- _data_computed: Computed[list[Page[T, TParam]] | None]
863
+ _data_computed: Computed[list[Page[T, TParam]] | None | Missing]
863
864
  _enabled: Signal[bool]
864
865
  _fetch_on_mount: bool
865
866
 
@@ -920,7 +921,9 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
920
921
  immediate=True,
921
922
  )
922
923
  self._data_computed = Computed(
923
- self._data_computed_fn, name=f"inf_query_data({self._query().key})"
924
+ self._data_computed_fn,
925
+ name=f"inf_query_data({self._query().key})",
926
+ initial_value=MISSING,
924
927
  )
925
928
 
926
929
  @property
@@ -948,18 +951,19 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
948
951
  return self._query().error.read()
949
952
 
950
953
  def _data_computed_fn(
951
- self, prev: list[Page[T, TParam]] | None
952
- ) -> list[Page[T, TParam]] | None:
954
+ self, prev: list[Page[T, TParam]] | None | Missing
955
+ ) -> list[Page[T, TParam]] | None | Missing:
953
956
  query = self._query()
954
- if self._keep_previous_data and query.status() != "success":
957
+ if self._keep_previous_data and query.status() == "loading":
955
958
  return prev
956
959
  # Access pages.version to subscribe to structural changes
957
- result = unwrap(query.pages) if len(query.pages) > 0 else None
958
- return result
960
+ if len(query.pages) == 0:
961
+ return MISSING
962
+ return unwrap(query.pages)
959
963
 
960
964
  @property
961
965
  def data(self) -> list[Page[T, TParam]] | None:
962
- return self._data_computed()
966
+ return none_if_missing(self._data_computed())
963
967
 
964
968
  @property
965
969
  def pages(self) -> list[T] | None:
@@ -988,8 +992,6 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
988
992
  return isinstance(self._query().current_action(), FetchPrevious)
989
993
 
990
994
  def is_stale(self) -> bool:
991
- if self._stale_time <= 0:
992
- return False
993
995
  query = self._query()
994
996
  return (time.time() - query.last_updated.read()) > self._stale_time
995
997
 
@@ -1096,6 +1098,7 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1096
1098
 
1097
1099
  Optional decorators:
1098
1100
  - ``@infinite_query_prop.get_previous_page_param``: For bi-directional pagination.
1101
+ - ``@infinite_query_prop.initial_data``: Provide initial pages.
1099
1102
  - ``@infinite_query_prop.on_success``: Handle successful fetch.
1100
1103
  - ``@infinite_query_prop.on_error``: Handle fetch errors.
1101
1104
 
@@ -1131,6 +1134,12 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1131
1134
  _refetch_interval: float | None
1132
1135
  _retries: int
1133
1136
  _retry_delay: float
1137
+ _initial_data: (
1138
+ list[Page[T, TParam]]
1139
+ | Callable[[TState], list[Page[T, TParam]]]
1140
+ | Missing
1141
+ | None
1142
+ )
1134
1143
  _initial_page_param: TParam
1135
1144
  _get_next_page_param: (
1136
1145
  Callable[[TState, list[Page[T, TParam]]], TParam | None] | None
@@ -1182,6 +1191,7 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1182
1191
  self._retry_delay = retry_delay
1183
1192
  self._on_success_fn = None
1184
1193
  self._on_error_fn = None
1194
+ self._initial_data = MISSING
1185
1195
  self._key = key
1186
1196
  self._initial_data_updated_at = initial_data_updated_at
1187
1197
  self._enabled = enabled
@@ -1212,6 +1222,16 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1212
1222
  self._on_error_fn = fn # pyright: ignore[reportAttributeAccessIssue]
1213
1223
  return fn
1214
1224
 
1225
+ def initial_data(
1226
+ self, fn: Callable[[TState], list[Page[T, TParam]]]
1227
+ ) -> Callable[[TState], list[Page[T, TParam]]]:
1228
+ if self._initial_data is not MISSING:
1229
+ raise RuntimeError(
1230
+ f"Duplicate initial_data() decorator for infinite query '{self.name}'. Only one is allowed."
1231
+ )
1232
+ self._initial_data = fn
1233
+ return fn
1234
+
1215
1235
  def get_next_page_param(
1216
1236
  self,
1217
1237
  fn: Callable[[TState, list[Page[T, TParam]]], TParam | None],
@@ -1260,8 +1280,23 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1260
1280
  raise RuntimeError(
1261
1281
  f"key is required for infinite query '{self.name}'. Provide a key via @infinite_query(key=...) or @{self.name}.key decorator."
1262
1282
  )
1283
+ raw_initial = (
1284
+ call_flexible(self._initial_data, state)
1285
+ if callable(self._initial_data)
1286
+ else self._initial_data
1287
+ )
1288
+ initial_data = (
1289
+ MISSING
1290
+ if raw_initial is MISSING
1291
+ else cast(list[Page[T, TParam]] | None, raw_initial)
1292
+ )
1263
1293
  query = self._resolve_keyed(
1264
- state, fetch_fn, next_fn, prev_fn, self._initial_data_updated_at
1294
+ state,
1295
+ fetch_fn,
1296
+ next_fn,
1297
+ prev_fn,
1298
+ initial_data,
1299
+ self._initial_data_updated_at,
1265
1300
  )
1266
1301
 
1267
1302
  on_success = None
@@ -1297,6 +1332,7 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1297
1332
  fetch_fn: Callable[[TParam], Awaitable[T]],
1298
1333
  next_fn: Callable[[list[Page[T, TParam]]], TParam | None],
1299
1334
  prev_fn: Callable[[list[Page[T, TParam]]], TParam | None] | None,
1335
+ initial_data: list[Page[T, TParam]] | Missing | None,
1300
1336
  initial_data_updated_at: float | dt.datetime | None,
1301
1337
  ) -> Computed[InfiniteQuery[T, TParam]]:
1302
1338
  assert self._key is not None
@@ -1327,6 +1363,7 @@ class InfiniteQueryProperty(Generic[T, TParam, TState], InitializableProperty):
1327
1363
  get_next_page_param=next_fn,
1328
1364
  get_previous_page_param=prev_fn,
1329
1365
  max_pages=self._max_pages,
1366
+ initial_data=initial_data,
1330
1367
  gc_time=self._gc_time,
1331
1368
  retries=self._retries,
1332
1369
  retry_delay=self._retry_delay,
@@ -1420,7 +1457,7 @@ def infinite_query(
1420
1457
  stale_time: Seconds before data is considered stale (default 0.0).
1421
1458
  gc_time: Seconds to keep unused query in cache (default 300.0).
1422
1459
  refetch_interval: Auto-refetch interval in seconds (default None).
1423
- keep_previous_data: Keep previous data while refetching (default False).
1460
+ keep_previous_data: Keep previous data while loading (default False).
1424
1461
  retries: Number of retry attempts on failure (default 3).
1425
1462
  retry_delay: Delay between retries in seconds (default 2.0).
1426
1463
  initial_data_updated_at: Timestamp for initial data staleness.
pulse/queries/query.py CHANGED
@@ -18,9 +18,8 @@ from pulse.context import PulseContext
18
18
  from pulse.helpers import (
19
19
  MISSING,
20
20
  Disposable,
21
+ Missing,
21
22
  call_flexible,
22
- is_pytest,
23
- later,
24
23
  maybe_await,
25
24
  )
26
25
  from pulse.queries.common import (
@@ -35,6 +34,7 @@ from pulse.queries.common import (
35
34
  )
36
35
  from pulse.queries.effect import AsyncQueryEffect
37
36
  from pulse.reactive import Computed, Effect, Signal, Untrack
37
+ from pulse.scheduling import TimerHandleLike, create_task, is_pytest, later
38
38
  from pulse.state import InitializableProperty, State
39
39
 
40
40
  if TYPE_CHECKING:
@@ -64,7 +64,7 @@ class QueryConfig(Generic[T]):
64
64
 
65
65
  retries: int
66
66
  retry_delay: float
67
- initial_data: T | Callable[[], T] | None
67
+ initial_data: T | Callable[[], T] | Missing | None
68
68
  initial_data_updated_at: float | dt.datetime | None
69
69
  gc_time: float
70
70
  on_dispose: Callable[[Any], None] | None
@@ -90,7 +90,7 @@ class QueryState(Generic[T]):
90
90
  cfg: QueryConfig[T]
91
91
 
92
92
  # Reactive signals for query state
93
- data: Signal[T | None]
93
+ data: Signal[T | None | Missing]
94
94
  error: Signal[Exception | None]
95
95
  last_updated: Signal[float]
96
96
  status: Signal[QueryStatus]
@@ -103,7 +103,7 @@ class QueryState(Generic[T]):
103
103
  name: str,
104
104
  retries: int = 3,
105
105
  retry_delay: float = RETRY_DELAY_DEFAULT,
106
- initial_data: T | None = MISSING,
106
+ initial_data: T | Missing | None = MISSING,
107
107
  initial_data_updated_at: float | dt.datetime | None = None,
108
108
  gc_time: float = 300.0,
109
109
  on_dispose: Callable[[Any], None] | None = None,
@@ -119,7 +119,7 @@ class QueryState(Generic[T]):
119
119
 
120
120
  # Initialize reactive signals
121
121
  self.data = Signal(
122
- None if initial_data is MISSING else initial_data,
122
+ MISSING if initial_data is MISSING else initial_data,
123
123
  name=f"query.data({name})",
124
124
  )
125
125
  self.error = Signal(None, name=f"query.error({name})")
@@ -147,7 +147,8 @@ class QueryState(Generic[T]):
147
147
  ):
148
148
  """Set data manually, accepting a value or updater function."""
149
149
  current = self.data.read()
150
- new_value = cast(T, data(current) if callable(data) else data)
150
+ current_value = cast(T | None, None if current is MISSING else current)
151
+ new_value = cast(T, data(current_value) if callable(data) else data)
151
152
  self.set_success(new_value, manual=True)
152
153
  if updated_at is not None:
153
154
  self.set_updated_at(updated_at)
@@ -225,7 +226,7 @@ async def run_fetch_with_retries(
225
226
  on_success: Optional callback on success
226
227
  on_error: Optional callback on error
227
228
  untrack: If True, wrap fetch_fn in Untrack() to prevent dependency tracking.
228
- Use for keyed queries where fetch is triggered via asyncio.create_task.
229
+ Use for keyed queries where fetch is triggered via create_task().
229
230
  """
230
231
  state.reset_retries()
231
232
 
@@ -267,7 +268,7 @@ class KeyedQuery(Generic[T], Disposable):
267
268
  observers: "list[KeyedQueryResult[T]]"
268
269
  _task: asyncio.Task[None] | None
269
270
  _task_initiator: "KeyedQueryResult[T] | None"
270
- _gc_handle: asyncio.TimerHandle | None
271
+ _gc_handle: TimerHandleLike | None
271
272
  _interval_effect: Effect | None
272
273
  _interval: float | None
273
274
  _interval_observer: "KeyedQueryResult[T] | None"
@@ -277,7 +278,7 @@ class KeyedQuery(Generic[T], Disposable):
277
278
  key: QueryKey,
278
279
  retries: int = 3,
279
280
  retry_delay: float = RETRY_DELAY_DEFAULT,
280
- initial_data: T | None = MISSING,
281
+ initial_data: T | Missing | None = MISSING,
281
282
  initial_data_updated_at: float | dt.datetime | None = None,
282
283
  gc_time: float = 300.0,
283
284
  on_dispose: Callable[[Any], None] | None = None,
@@ -302,7 +303,7 @@ class KeyedQuery(Generic[T], Disposable):
302
303
 
303
304
  # --- Delegate signal access to state ---
304
305
  @property
305
- def data(self) -> Signal[T | None]:
306
+ def data(self) -> Signal[T | None | Missing]:
306
307
  return self.state.data
307
308
 
308
309
  @property
@@ -383,7 +384,7 @@ class KeyedQuery(Generic[T], Disposable):
383
384
  fetch_fn,
384
385
  on_success=on_success,
385
386
  on_error=on_error,
386
- untrack=True, # Keyed queries use asyncio.create_task, need to untrack
387
+ untrack=True, # Keyed queries use create_task(), need to untrack
387
388
  )
388
389
 
389
390
  def run_fetch(
@@ -407,7 +408,7 @@ class KeyedQuery(Generic[T], Disposable):
407
408
  self.state.is_fetching.write(True)
408
409
  # Capture current observers at fetch start
409
410
  observers = list(self.observers)
410
- self._task = asyncio.create_task(self._run_fetch(fetch_fn, observers))
411
+ self._task = create_task(self._run_fetch(fetch_fn, observers))
411
412
  self._task_initiator = initiator
412
413
  return self._task
413
414
 
@@ -427,7 +428,10 @@ class KeyedQuery(Generic[T], Disposable):
427
428
  # Return result based on current state
428
429
  if self.state.status() == "error":
429
430
  return ActionError(cast(Exception, self.state.error.read()))
430
- return ActionSuccess(cast(T, self.state.data.read()))
431
+ data = self.state.data.read()
432
+ if data is MISSING:
433
+ return ActionSuccess(cast(T, None))
434
+ return ActionSuccess(cast(T, data))
431
435
 
432
436
  def cancel(self) -> None:
433
437
  """Cancel the current fetch if running."""
@@ -599,7 +603,7 @@ class UnkeyedQueryResult(Generic[T], Disposable):
599
603
  _keep_previous_data: bool
600
604
  _enabled: Signal[bool]
601
605
  _interval_effect: Effect | None
602
- _data_computed: Computed[T | None]
606
+ _data_computed: Computed[T | None | Missing]
603
607
 
604
608
  def __init__(
605
609
  self,
@@ -608,7 +612,7 @@ class UnkeyedQueryResult(Generic[T], Disposable):
608
612
  on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
609
613
  retries: int = 3,
610
614
  retry_delay: float = RETRY_DELAY_DEFAULT,
611
- initial_data: T | None = MISSING,
615
+ initial_data: T | Missing | None = MISSING,
612
616
  initial_data_updated_at: float | dt.datetime | None = None,
613
617
  gc_time: float = 300.0,
614
618
  stale_time: float = 0.0,
@@ -652,7 +656,9 @@ class UnkeyedQueryResult(Generic[T], Disposable):
652
656
 
653
657
  # Computed for keep_previous_data logic
654
658
  self._data_computed = Computed(
655
- self._data_computed_fn, name="query_data(unkeyed)"
659
+ self._data_computed_fn,
660
+ name="query_data(unkeyed)",
661
+ initial_value=MISSING,
656
662
  )
657
663
 
658
664
  # Schedule initial fetch if stale (untracked to avoid reactive loop)
@@ -679,12 +685,12 @@ class UnkeyedQueryResult(Generic[T], Disposable):
679
685
  immediate=True,
680
686
  )
681
687
 
682
- def _data_computed_fn(self, prev: T | None) -> T | None:
683
- if self._keep_previous_data and self.state.status() != "success":
688
+ def _data_computed_fn(self, prev: T | None | Missing) -> T | None | Missing:
689
+ if self._keep_previous_data and self.state.status() == "loading":
684
690
  return prev
685
691
  raw = self.state.data()
686
- if raw is None:
687
- return None
692
+ if raw is MISSING:
693
+ return MISSING
688
694
  return raw
689
695
 
690
696
  # --- Status properties ---
@@ -714,7 +720,10 @@ class UnkeyedQueryResult(Generic[T], Disposable):
714
720
 
715
721
  @property
716
722
  def data(self) -> T | None:
717
- return self._data_computed()
723
+ value = self._data_computed()
724
+ if value is MISSING:
725
+ return None
726
+ return cast(T | None, value)
718
727
 
719
728
  # --- State methods ---
720
729
  def set_data(self, data: T | Callable[[T | None], T]):
@@ -780,7 +789,10 @@ class UnkeyedQueryResult(Generic[T], Disposable):
780
789
  await self._effect.wait()
781
790
  if self.state.status() == "error":
782
791
  return ActionError(cast(Exception, self.state.error.read()))
783
- return ActionSuccess(cast(T, self.state.data.read()))
792
+ data = self.state.data.read()
793
+ if data is MISSING:
794
+ return ActionSuccess(cast(T, None))
795
+ return ActionSuccess(cast(T, data))
784
796
 
785
797
  async def ensure(self) -> ActionResult[T]:
786
798
  """Ensure an initial fetch has started, then wait for completion."""
@@ -820,7 +832,7 @@ class KeyedQueryResult(Generic[T], Disposable):
820
832
  _on_success: Callable[[T], Awaitable[None] | None] | None
821
833
  _on_error: Callable[[Exception], Awaitable[None] | None] | None
822
834
  _observe_effect: Effect
823
- _data_computed: Computed[T | None]
835
+ _data_computed: Computed[T | None | Missing]
824
836
  _enabled: Signal[bool]
825
837
  _fetch_on_mount: bool
826
838
 
@@ -877,7 +889,9 @@ class KeyedQueryResult(Generic[T], Disposable):
877
889
  immediate=True,
878
890
  )
879
891
  self._data_computed = Computed(
880
- self._data_computed_fn, name=f"query_data({self._query().key})"
892
+ self._data_computed_fn,
893
+ name=f"query_data({self._query().key})",
894
+ initial_value=MISSING,
881
895
  )
882
896
 
883
897
  @property
@@ -908,18 +922,18 @@ class KeyedQueryResult(Generic[T], Disposable):
908
922
  def error(self) -> Exception | None:
909
923
  return self._query().error.read()
910
924
 
911
- def _data_computed_fn(self, prev: T | None) -> T | None:
925
+ def _data_computed_fn(self, prev: T | None | Missing) -> T | None | Missing:
912
926
  query = self._query()
913
- if self._keep_previous_data and query.status() != "success":
927
+ if self._keep_previous_data and query.status() == "loading":
914
928
  return prev
915
- raw = query.data()
916
- if raw is None:
917
- return None
918
- return raw
929
+ return query.data()
919
930
 
920
931
  @property
921
932
  def data(self) -> T | None:
922
- return self._data_computed()
933
+ value = self._data_computed()
934
+ if value is MISSING:
935
+ return None
936
+ return cast(T | None, value)
923
937
 
924
938
  def is_stale(self) -> bool:
925
939
  """Check if the query data is stale based on stale_time."""
@@ -1039,7 +1053,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
1039
1053
  _retry_delay: float
1040
1054
  _initial_data_updated_at: float | dt.datetime | None
1041
1055
  _enabled: bool
1042
- _initial_data: T | Callable[[TState], T] | None
1056
+ _initial_data: T | Callable[[TState], T] | Missing | None
1043
1057
  _key: QueryKey | Callable[[TState], QueryKey] | None
1044
1058
  # Not using OnSuccessFn and OnErrorFn since unions of callables are not well
1045
1059
  # supported in the type system. We just need to be careful to use
@@ -1076,7 +1090,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
1076
1090
  self._retries = retries
1077
1091
  self._retry_delay = retry_delay
1078
1092
  self._initial_data_updated_at = initial_data_updated_at
1079
- self._initial_data = MISSING # pyright: ignore[reportAttributeAccessIssue]
1093
+ self._initial_data = MISSING
1080
1094
  self._enabled = enabled
1081
1095
  self._fetch_on_mount = fetch_on_mount
1082
1096
  self._priv_result = f"__query_{name}"
@@ -1131,13 +1145,13 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
1131
1145
 
1132
1146
  # Bind methods to this instance
1133
1147
  fetch_fn = bind_state(state, self._fetch_fn)
1134
- initial_data = cast(
1135
- T | None,
1136
- (
1137
- call_flexible(self._initial_data, state)
1138
- if callable(self._initial_data)
1139
- else self._initial_data
1140
- ),
1148
+ raw_initial = (
1149
+ call_flexible(self._initial_data, state)
1150
+ if callable(self._initial_data)
1151
+ else self._initial_data
1152
+ )
1153
+ initial_data = (
1154
+ MISSING if raw_initial is MISSING else cast(T | None, raw_initial)
1141
1155
  )
1142
1156
 
1143
1157
  if self._key is None:
@@ -1165,7 +1179,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
1165
1179
  self,
1166
1180
  state: TState,
1167
1181
  fetch_fn: Callable[[], Awaitable[T]],
1168
- initial_data: T | None,
1182
+ initial_data: T | Missing | None,
1169
1183
  initial_data_updated_at: float | dt.datetime | None,
1170
1184
  ) -> KeyedQueryResult[T]:
1171
1185
  """Create or get a keyed query from the session store."""
@@ -1220,7 +1234,7 @@ class QueryProperty(Generic[T, TState], InitializableProperty):
1220
1234
  def _create_unkeyed(
1221
1235
  self,
1222
1236
  fetch_fn: Callable[[], Awaitable[T]],
1223
- initial_data: T | None,
1237
+ initial_data: T | Missing | None,
1224
1238
  initial_data_updated_at: float | dt.datetime | None,
1225
1239
  state: TState,
1226
1240
  ) -> UnkeyedQueryResult[T]:
@@ -1313,7 +1327,7 @@ def query(
1313
1327
  stale_time: Seconds before data is considered stale (default 0.0).
1314
1328
  gc_time: Seconds to keep unused query in cache (default 300.0, None to disable).
1315
1329
  refetch_interval: Auto-refetch interval in seconds (default None, disabled).
1316
- keep_previous_data: Keep previous data while refetching (default False).
1330
+ keep_previous_data: Keep previous data while loading (default False).
1317
1331
  retries: Number of retry attempts on failure (default 3).
1318
1332
  retry_delay: Delay between retries in seconds (default 2.0).
1319
1333
  initial_data_updated_at: Timestamp for initial data staleness calculation.
pulse/queries/store.py CHANGED
@@ -2,7 +2,7 @@ import datetime as dt
2
2
  from collections.abc import Callable
3
3
  from typing import Any, TypeVar, cast
4
4
 
5
- from pulse.helpers import MISSING
5
+ from pulse.helpers import MISSING, Missing
6
6
  from pulse.queries.common import QueryKey
7
7
  from pulse.queries.infinite_query import InfiniteQuery, Page
8
8
  from pulse.queries.query import RETRY_DELAY_DEFAULT, KeyedQuery
@@ -29,7 +29,7 @@ class QueryStore:
29
29
  def ensure(
30
30
  self,
31
31
  key: QueryKey,
32
- initial_data: T | None = MISSING,
32
+ initial_data: T | Missing | None = MISSING,
33
33
  initial_data_updated_at: float | dt.datetime | None = None,
34
34
  gc_time: float = 300.0,
35
35
  retries: int = 3,
@@ -87,6 +87,7 @@ class QueryStore:
87
87
  get_previous_page_param: Callable[[list[Page[Any, Any]]], Any | None]
88
88
  | None = None,
89
89
  max_pages: int = 0,
90
+ initial_data: list[Page[Any, Any]] | Missing | None = MISSING,
90
91
  initial_data_updated_at: float | dt.datetime | None = None,
91
92
  gc_time: float = 300.0,
92
93
  retries: int = 3,
@@ -110,6 +111,7 @@ class QueryStore:
110
111
  get_next_page_param=get_next_page_param,
111
112
  get_previous_page_param=get_previous_page_param,
112
113
  max_pages=max_pages,
114
+ initial_data=initial_data,
113
115
  initial_data_updated_at=initial_data_updated_at,
114
116
  gc_time=gc_time,
115
117
  retries=retries,
@@ -118,3 +120,9 @@ class QueryStore:
118
120
  )
119
121
  self._entries[key] = entry
120
122
  return entry
123
+
124
+ def dispose_all(self) -> None:
125
+ """Dispose all queries and clear the store."""
126
+ for entry in list(self._entries.values()):
127
+ entry.dispose()
128
+ self._entries.clear()
pulse/reactive.py CHANGED
@@ -15,11 +15,14 @@ from typing import (
15
15
 
16
16
  from pulse.helpers import (
17
17
  Disposable,
18
- create_task,
19
18
  maybe_await,
20
- schedule_on_loop,
21
19
  values_equal,
22
20
  )
21
+ from pulse.scheduling import (
22
+ TimerHandleLike,
23
+ call_soon,
24
+ create_task,
25
+ )
23
26
 
24
27
  T = TypeVar("T")
25
28
  T_co = TypeVar("T_co", covariant=True)
@@ -151,6 +154,7 @@ class Computed(Generic[T_co]):
151
154
  fn: Function computing the value. May optionally accept prev_value
152
155
  as first positional argument for incremental computation.
153
156
  name: Debug name for the computed.
157
+ initial_value: Seed value used as prev_value on first compute.
154
158
 
155
159
  Attributes:
156
160
  value: Cached computed value.
@@ -171,13 +175,20 @@ class Computed(Generic[T_co]):
171
175
 
172
176
  fn: Callable[..., T_co]
173
177
  name: str | None
178
+ value: Any
174
179
  dirty: bool
175
180
  on_stack: bool
176
181
  accepts_prev_value: bool
177
182
 
178
- def __init__(self, fn: Callable[..., T_co], name: str | None = None):
183
+ def __init__(
184
+ self,
185
+ fn: Callable[..., T_co],
186
+ name: str | None = None,
187
+ *,
188
+ initial_value: Any = None,
189
+ ):
179
190
  self.fn = fn
180
- self.value: T_co = None # pyright: ignore[reportAttributeAccessIssue]
191
+ self.value = initial_value
181
192
  self.name = name
182
193
  self.dirty = False
183
194
  self.on_stack = False
@@ -384,7 +395,7 @@ class Effect(Disposable):
384
395
  immediate: bool
385
396
  _lazy: bool
386
397
  _interval: float | None
387
- _interval_handle: asyncio.TimerHandle | None
398
+ _interval_handle: TimerHandleLike | None
388
399
  update_deps: bool
389
400
  batch: "Batch | None"
390
401
  paused: bool
@@ -461,7 +472,7 @@ class Effect(Disposable):
461
472
  def _schedule_interval(self):
462
473
  """Schedule the next interval run if interval is set."""
463
474
  if self._interval is not None and self._interval > 0:
464
- from pulse.helpers import later
475
+ from pulse.scheduling import later
465
476
 
466
477
  self._interval_handle = later(self._interval, self._on_interval)
467
478
 
@@ -995,7 +1006,7 @@ class GlobalBatch(Batch):
995
1006
  @override
996
1007
  def register_effect(self, effect: Effect):
997
1008
  if not self.is_scheduled:
998
- schedule_on_loop(self.flush)
1009
+ call_soon(self.flush)
999
1010
  self.is_scheduled = True
1000
1011
  return super().register_effect(effect)
1001
1012