pulse-framework 0.1.39__py3-none-any.whl → 0.1.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +14 -4
- pulse/app.py +176 -126
- pulse/channel.py +7 -7
- pulse/cli/cmd.py +81 -45
- pulse/cli/models.py +2 -0
- pulse/cli/processes.py +67 -22
- pulse/cli/uvicorn_log_config.py +1 -1
- pulse/codegen/codegen.py +14 -1
- pulse/codegen/templates/layout.py +10 -2
- pulse/decorators.py +132 -40
- pulse/form.py +9 -9
- pulse/helpers.py +75 -11
- pulse/hooks/core.py +4 -3
- pulse/hooks/states.py +91 -54
- pulse/messages.py +1 -1
- pulse/middleware.py +170 -119
- pulse/plugin.py +0 -3
- pulse/proxy.py +168 -147
- pulse/queries/__init__.py +0 -0
- pulse/queries/common.py +24 -0
- pulse/queries/mutation.py +142 -0
- pulse/queries/query.py +270 -0
- pulse/queries/query_observer.py +365 -0
- pulse/queries/store.py +60 -0
- pulse/reactive.py +146 -50
- pulse/render_session.py +5 -2
- pulse/routing.py +68 -10
- pulse/state.py +8 -7
- pulse/types/event_handler.py +2 -3
- pulse/user_session.py +3 -2
- {pulse_framework-0.1.39.dist-info → pulse_framework-0.1.41.dist-info}/METADATA +1 -1
- {pulse_framework-0.1.39.dist-info → pulse_framework-0.1.41.dist-info}/RECORD +34 -29
- pulse/query.py +0 -408
- {pulse_framework-0.1.39.dist-info → pulse_framework-0.1.41.dist-info}/WHEEL +0 -0
- {pulse_framework-0.1.39.dist-info → pulse_framework-0.1.41.dist-info}/entry_points.txt +0 -0
pulse/queries/query.py
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
3
|
+
from collections.abc import Awaitable, Callable, Hashable
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import (
|
|
6
|
+
TYPE_CHECKING,
|
|
7
|
+
Any,
|
|
8
|
+
Generic,
|
|
9
|
+
Literal,
|
|
10
|
+
TypeAlias,
|
|
11
|
+
TypeVar,
|
|
12
|
+
cast,
|
|
13
|
+
override,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from pulse.helpers import (
|
|
17
|
+
MISSING,
|
|
18
|
+
Disposable,
|
|
19
|
+
call_flexible,
|
|
20
|
+
is_pytest,
|
|
21
|
+
later,
|
|
22
|
+
maybe_await,
|
|
23
|
+
)
|
|
24
|
+
from pulse.reactive import AsyncEffect, Computed, Signal
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from pulse.queries.query_observer import QueryResult
|
|
28
|
+
|
|
29
|
+
T = TypeVar("T")
|
|
30
|
+
QueryKey: TypeAlias = tuple[Hashable, ...]
|
|
31
|
+
QueryStatus: TypeAlias = Literal["loading", "success", "error"]
|
|
32
|
+
QueryFetchStatus: TypeAlias = Literal["idle", "fetching", "paused"]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class AsyncQueryEffect(AsyncEffect):
|
|
36
|
+
"""
|
|
37
|
+
Specialized AsyncEffect for queries that synchronously sets loading state
|
|
38
|
+
when rescheduled/run.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
query: "Query[Any]"
|
|
42
|
+
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
fn: Callable[[], Awaitable[None]],
|
|
46
|
+
query: "Query[Any]",
|
|
47
|
+
name: str | None = None,
|
|
48
|
+
lazy: bool = False,
|
|
49
|
+
deps: list[Signal[Any] | Computed[Any]] | None = None,
|
|
50
|
+
):
|
|
51
|
+
self.query = query
|
|
52
|
+
super().__init__(fn, name=name, lazy=lazy, deps=deps)
|
|
53
|
+
|
|
54
|
+
@override
|
|
55
|
+
def run(self) -> asyncio.Task[Any]:
|
|
56
|
+
# Immediately set loading state before running the effect
|
|
57
|
+
self.query.fetch_status.write("fetching")
|
|
58
|
+
return super().run()
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass(slots=True)
|
|
62
|
+
class QueryConfig(Generic[T]):
|
|
63
|
+
retries: int
|
|
64
|
+
retry_delay: float
|
|
65
|
+
initial_data: T | Callable[[], T] | None
|
|
66
|
+
gc_time: float
|
|
67
|
+
on_dispose: Callable[[Any], None] | None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
RETRY_DELAY_DEFAULT = 2.0 if not is_pytest() else 0.01
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class Query(Generic[T], Disposable):
|
|
74
|
+
"""
|
|
75
|
+
Represents a single query instance in a store.
|
|
76
|
+
Manages the async effect, data/status signals, and observer tracking.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
key: QueryKey | None
|
|
80
|
+
fn: Callable[[], Awaitable[T]]
|
|
81
|
+
cfg: QueryConfig[T]
|
|
82
|
+
|
|
83
|
+
# Reactive signals for query state
|
|
84
|
+
data: Signal[T | None]
|
|
85
|
+
error: Signal[Exception | None]
|
|
86
|
+
last_updated: Signal[float]
|
|
87
|
+
status: Signal[QueryStatus]
|
|
88
|
+
fetch_status: Signal[QueryFetchStatus]
|
|
89
|
+
retries: Signal[int]
|
|
90
|
+
retry_reason: Signal[Exception | None]
|
|
91
|
+
|
|
92
|
+
_observers: "list[QueryResult[T]]"
|
|
93
|
+
_effect: AsyncEffect | None
|
|
94
|
+
_gc_handle: asyncio.TimerHandle | None
|
|
95
|
+
|
|
96
|
+
def __init__(
|
|
97
|
+
self,
|
|
98
|
+
key: QueryKey | None,
|
|
99
|
+
fn: Callable[[], Awaitable[T]],
|
|
100
|
+
retries: int = 3,
|
|
101
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
102
|
+
initial_data: T | None = MISSING,
|
|
103
|
+
gc_time: float = 300.0,
|
|
104
|
+
on_dispose: Callable[[Any], None] | None = None,
|
|
105
|
+
):
|
|
106
|
+
self.key = key
|
|
107
|
+
self.fn = fn
|
|
108
|
+
self.cfg = QueryConfig(
|
|
109
|
+
retries=retries,
|
|
110
|
+
retry_delay=retry_delay,
|
|
111
|
+
initial_data=initial_data,
|
|
112
|
+
gc_time=gc_time,
|
|
113
|
+
on_dispose=on_dispose,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
# Initialize reactive signals
|
|
117
|
+
self.data = Signal(
|
|
118
|
+
None if initial_data is MISSING else initial_data, name=f"query.data({key})"
|
|
119
|
+
)
|
|
120
|
+
self.error = Signal(None, name=f"query.error({key})")
|
|
121
|
+
self.last_updated = Signal(
|
|
122
|
+
time.time() if initial_data is not MISSING else 0.0,
|
|
123
|
+
name=f"query.last_updated({key})",
|
|
124
|
+
)
|
|
125
|
+
self.status = Signal(
|
|
126
|
+
"loading" if initial_data is MISSING else "success",
|
|
127
|
+
name=f"query.status({key})",
|
|
128
|
+
)
|
|
129
|
+
self.fetch_status = Signal("idle", name=f"query.fetch_status({key})")
|
|
130
|
+
self.retries = Signal(0, name=f"query.retries({key})")
|
|
131
|
+
self.retry_reason = Signal(None, name=f"query.retry_reason({key})")
|
|
132
|
+
|
|
133
|
+
self._observers = []
|
|
134
|
+
self._gc_handle = None
|
|
135
|
+
# Effect is created lazily on first observation
|
|
136
|
+
self._effect = None
|
|
137
|
+
|
|
138
|
+
def set_data(self, data: T):
|
|
139
|
+
self._set_success(data, manual=True)
|
|
140
|
+
|
|
141
|
+
def set_error(self, error: Exception):
|
|
142
|
+
self._set_error(error, manual=True)
|
|
143
|
+
|
|
144
|
+
def _set_success(self, data: T, manual: bool = False):
|
|
145
|
+
self.data.write(data)
|
|
146
|
+
self.last_updated.write(time.time())
|
|
147
|
+
self.error.write(None)
|
|
148
|
+
self.status.write("success")
|
|
149
|
+
if not manual:
|
|
150
|
+
self.fetch_status.write("idle")
|
|
151
|
+
self.retries.write(0)
|
|
152
|
+
self.retry_reason.write(None)
|
|
153
|
+
|
|
154
|
+
def _set_error(self, error: Exception, manual: bool = False):
|
|
155
|
+
self.error.write(error)
|
|
156
|
+
self.last_updated.write(time.time())
|
|
157
|
+
self.status.write("error")
|
|
158
|
+
if not manual:
|
|
159
|
+
self.fetch_status.write("idle")
|
|
160
|
+
# Don't reset retries on final error - preserve for debugging
|
|
161
|
+
# retry_reason is updated to the final error in _run
|
|
162
|
+
|
|
163
|
+
def _failed_retry(self, reason: Exception):
|
|
164
|
+
self.retries.write(self.retries.read() + 1)
|
|
165
|
+
self.retry_reason.write(reason)
|
|
166
|
+
|
|
167
|
+
@property
|
|
168
|
+
def effect(self) -> AsyncEffect:
|
|
169
|
+
"""Lazy property that creates the query effect on first access."""
|
|
170
|
+
if self._effect is None:
|
|
171
|
+
self._effect = AsyncQueryEffect(
|
|
172
|
+
self._run,
|
|
173
|
+
query=self,
|
|
174
|
+
name=f"query_effect({self.key})",
|
|
175
|
+
deps=[] if self.key is not None else None,
|
|
176
|
+
)
|
|
177
|
+
return self._effect
|
|
178
|
+
|
|
179
|
+
async def _run(self):
|
|
180
|
+
# Reset retries at start of run
|
|
181
|
+
self.retries.write(0)
|
|
182
|
+
self.retry_reason.write(None)
|
|
183
|
+
|
|
184
|
+
while True:
|
|
185
|
+
try:
|
|
186
|
+
result = await self.fn()
|
|
187
|
+
self._set_success(result)
|
|
188
|
+
for obs in self._observers:
|
|
189
|
+
if obs._on_success: # pyright: ignore[reportPrivateUsage]
|
|
190
|
+
await maybe_await(call_flexible(obs._on_success, result)) # pyright: ignore[reportPrivateUsage]
|
|
191
|
+
return
|
|
192
|
+
except asyncio.CancelledError:
|
|
193
|
+
raise
|
|
194
|
+
except Exception as e:
|
|
195
|
+
current_retries = self.retries.read()
|
|
196
|
+
if current_retries < self.cfg.retries:
|
|
197
|
+
# Record failed retry attempt and retry
|
|
198
|
+
self._failed_retry(e)
|
|
199
|
+
# Wait before retrying
|
|
200
|
+
await asyncio.sleep(self.cfg.retry_delay)
|
|
201
|
+
else:
|
|
202
|
+
# All retries exhausted - update retry_reason to final error
|
|
203
|
+
self.retry_reason.write(e)
|
|
204
|
+
self._set_error(e)
|
|
205
|
+
for obs in self._observers:
|
|
206
|
+
if obs._on_error: # pyright: ignore[reportPrivateUsage]
|
|
207
|
+
await maybe_await(call_flexible(obs._on_error, e)) # pyright: ignore[reportPrivateUsage]
|
|
208
|
+
return
|
|
209
|
+
|
|
210
|
+
async def refetch(self, cancel_refetch: bool = True) -> T:
|
|
211
|
+
"""
|
|
212
|
+
Reruns the query and returns the result.
|
|
213
|
+
If cancel_refetch is True (default), cancels any in-flight request and starts a new one.
|
|
214
|
+
If cancel_refetch is False, deduplicates requests if one is already in flight.
|
|
215
|
+
"""
|
|
216
|
+
if cancel_refetch:
|
|
217
|
+
self.effect.cancel()
|
|
218
|
+
return await self.wait()
|
|
219
|
+
|
|
220
|
+
async def wait(self) -> T:
|
|
221
|
+
await self.effect.wait()
|
|
222
|
+
return cast(T, self.data.read())
|
|
223
|
+
|
|
224
|
+
def invalidate(self, cancel_refetch: bool = False):
|
|
225
|
+
"""
|
|
226
|
+
Marks query as stale. If there are active observers, triggers a refetch.
|
|
227
|
+
"""
|
|
228
|
+
should_schedule = not self.effect.is_scheduled or cancel_refetch
|
|
229
|
+
if should_schedule and len(self._observers) > 0:
|
|
230
|
+
self.effect.schedule()
|
|
231
|
+
|
|
232
|
+
def observe(
|
|
233
|
+
self,
|
|
234
|
+
observer: "QueryResult[T]",
|
|
235
|
+
):
|
|
236
|
+
_ = self.effect # ensure effect is created
|
|
237
|
+
self._observers.append(observer)
|
|
238
|
+
self.cancel_gc()
|
|
239
|
+
if observer._gc_time > 0: # pyright: ignore[reportPrivateUsage]
|
|
240
|
+
self.cfg.gc_time = max(self.cfg.gc_time, observer._gc_time) # pyright: ignore[reportPrivateUsage]
|
|
241
|
+
|
|
242
|
+
def unobserve(self, observer: "QueryResult[T]"):
|
|
243
|
+
"""Unregister an observer. Schedules GC if no observers remain."""
|
|
244
|
+
if observer in self._observers:
|
|
245
|
+
self._observers.remove(observer)
|
|
246
|
+
if len(self._observers) == 0:
|
|
247
|
+
self.schedule_gc()
|
|
248
|
+
|
|
249
|
+
def schedule_gc(self):
|
|
250
|
+
self.cancel_gc()
|
|
251
|
+
if self.cfg.gc_time > 0:
|
|
252
|
+
self._gc_handle = later(self.cfg.gc_time, self.dispose)
|
|
253
|
+
else:
|
|
254
|
+
self.dispose()
|
|
255
|
+
|
|
256
|
+
def cancel_gc(self):
|
|
257
|
+
if self._gc_handle:
|
|
258
|
+
self._gc_handle.cancel()
|
|
259
|
+
self._gc_handle = None
|
|
260
|
+
|
|
261
|
+
@override
|
|
262
|
+
def dispose(self):
|
|
263
|
+
"""
|
|
264
|
+
Cleans up the query entry, removing it from the store.
|
|
265
|
+
"""
|
|
266
|
+
if self._effect:
|
|
267
|
+
self._effect.dispose()
|
|
268
|
+
|
|
269
|
+
if self.cfg.on_dispose:
|
|
270
|
+
self.cfg.on_dispose(self)
|
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import time
|
|
3
|
+
from collections.abc import Awaitable, Callable
|
|
4
|
+
from typing import (
|
|
5
|
+
Any,
|
|
6
|
+
Generic,
|
|
7
|
+
TypeVar,
|
|
8
|
+
cast,
|
|
9
|
+
override,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from pulse.context import PulseContext
|
|
13
|
+
from pulse.helpers import MISSING, Disposable
|
|
14
|
+
from pulse.queries.common import OnErrorFn, OnSuccessFn, bind_state
|
|
15
|
+
from pulse.queries.query import (
|
|
16
|
+
RETRY_DELAY_DEFAULT,
|
|
17
|
+
Query,
|
|
18
|
+
QueryFetchStatus,
|
|
19
|
+
QueryKey,
|
|
20
|
+
QueryStatus,
|
|
21
|
+
)
|
|
22
|
+
from pulse.reactive import Computed, Effect, Untrack
|
|
23
|
+
from pulse.state import InitializableProperty, State
|
|
24
|
+
|
|
25
|
+
T = TypeVar("T")
|
|
26
|
+
TState = TypeVar("TState", bound=State)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class QueryResult(Generic[T], Disposable):
|
|
30
|
+
"""
|
|
31
|
+
Thin wrapper around Query that adds callbacks, staleness tracking,
|
|
32
|
+
and observation lifecycle.
|
|
33
|
+
|
|
34
|
+
For keyed queries, uses a Computed to resolve the correct query based on the key.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
_query: Computed[Query[T]]
|
|
38
|
+
_stale_time: float
|
|
39
|
+
_gc_time: float
|
|
40
|
+
_keep_previous_data: bool
|
|
41
|
+
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
42
|
+
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
43
|
+
_callback_effect: Effect
|
|
44
|
+
_observe_effect: Effect
|
|
45
|
+
_data_computed: Computed[T | None]
|
|
46
|
+
_disposed_data: T | None
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
query: Computed[Query[T]],
|
|
51
|
+
stale_time: float = 0.0,
|
|
52
|
+
gc_time: float = 300.0,
|
|
53
|
+
keep_previous_data: bool = False,
|
|
54
|
+
on_success: Callable[[T], Awaitable[None] | None] | None = None,
|
|
55
|
+
on_error: Callable[[Exception], Awaitable[None] | None] | None = None,
|
|
56
|
+
):
|
|
57
|
+
self._query = query
|
|
58
|
+
self._stale_time = stale_time
|
|
59
|
+
self._gc_time = gc_time
|
|
60
|
+
self._keep_previous_data = keep_previous_data
|
|
61
|
+
self._on_success = on_success
|
|
62
|
+
self._on_error = on_error
|
|
63
|
+
self._disposed_data = None
|
|
64
|
+
|
|
65
|
+
def observe_effect():
|
|
66
|
+
query = self._query()
|
|
67
|
+
with Untrack():
|
|
68
|
+
# This may create an effect, which should live independently of our observe effect
|
|
69
|
+
query.observe(self)
|
|
70
|
+
|
|
71
|
+
# If stale or loading, schedule refetch
|
|
72
|
+
if self.is_stale():
|
|
73
|
+
query.invalidate()
|
|
74
|
+
|
|
75
|
+
# Return cleanup function that captures the observer
|
|
76
|
+
return lambda: query.unobserve(self)
|
|
77
|
+
|
|
78
|
+
self._observe_effect = Effect(
|
|
79
|
+
observe_effect,
|
|
80
|
+
name=f"query_observe({self._query().key})",
|
|
81
|
+
immediate=True,
|
|
82
|
+
)
|
|
83
|
+
self._data_computed = Computed(
|
|
84
|
+
self._data_computed_fn, name=f"query_data({self._query().key})"
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def status(self) -> QueryStatus:
|
|
89
|
+
return self._query().status()
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def fetch_status(self) -> QueryFetchStatus:
|
|
93
|
+
return self._query().fetch_status()
|
|
94
|
+
|
|
95
|
+
# Forward property reads to the query's signals (with automatic reactive tracking)
|
|
96
|
+
@property
|
|
97
|
+
def is_loading(self) -> bool:
|
|
98
|
+
return self.status == "loading"
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def is_success(self) -> bool:
|
|
102
|
+
return self.status == "success"
|
|
103
|
+
|
|
104
|
+
@property
|
|
105
|
+
def is_error(self) -> bool:
|
|
106
|
+
return self.status == "error"
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
def is_fetching(self) -> bool:
|
|
110
|
+
return self.fetch_status == "fetching"
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def error(self) -> Exception | None:
|
|
114
|
+
return self._query().error.read()
|
|
115
|
+
|
|
116
|
+
def _data_computed_fn(self, prev: T | None) -> T | None:
|
|
117
|
+
query = self._query()
|
|
118
|
+
if self._keep_previous_data and query.status() != "success":
|
|
119
|
+
return prev
|
|
120
|
+
return query.data()
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def data(self) -> T | None:
|
|
124
|
+
return self._data_computed()
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def has_loaded(self) -> bool:
|
|
128
|
+
return self.status in ("success", "error")
|
|
129
|
+
|
|
130
|
+
def is_stale(self) -> bool:
|
|
131
|
+
"""Check if the query data is stale based on stale_time."""
|
|
132
|
+
query = self._query()
|
|
133
|
+
return (time.time() - query.last_updated.read()) > self._stale_time
|
|
134
|
+
|
|
135
|
+
async def refetch(self, cancel_refetch: bool = True) -> T:
|
|
136
|
+
"""Refetch the query data."""
|
|
137
|
+
return await self._query().refetch(cancel_refetch=cancel_refetch)
|
|
138
|
+
|
|
139
|
+
async def wait(self) -> T:
|
|
140
|
+
return await self._query().wait()
|
|
141
|
+
|
|
142
|
+
def invalidate(self):
|
|
143
|
+
"""Mark the query as stale and refetch if there are observers."""
|
|
144
|
+
query = self._query()
|
|
145
|
+
query.invalidate()
|
|
146
|
+
|
|
147
|
+
def set_data(self, data: T):
|
|
148
|
+
"""Optimistically set data without changing loading/error state."""
|
|
149
|
+
query = self._query()
|
|
150
|
+
query.data.write(data)
|
|
151
|
+
|
|
152
|
+
@override
|
|
153
|
+
def dispose(self):
|
|
154
|
+
"""Clean up the result and its observe effect."""
|
|
155
|
+
self._observe_effect.dispose()
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class QueryProperty(Generic[T, TState], InitializableProperty):
|
|
159
|
+
"""
|
|
160
|
+
Descriptor for state-bound queries.
|
|
161
|
+
|
|
162
|
+
Usage:
|
|
163
|
+
class S(ps.State):
|
|
164
|
+
@ps.query()
|
|
165
|
+
async def user(self) -> User: ...
|
|
166
|
+
|
|
167
|
+
@user.key
|
|
168
|
+
def _user_key(self):
|
|
169
|
+
return ("user", self.user_id)
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
name: str
|
|
173
|
+
_fetch_fn: "Callable[[TState], Awaitable[T]]"
|
|
174
|
+
_keep_alive: bool
|
|
175
|
+
_keep_previous_data: bool
|
|
176
|
+
_stale_time: float
|
|
177
|
+
_gc_time: float
|
|
178
|
+
_retries: int
|
|
179
|
+
_retry_delay: float
|
|
180
|
+
_initial_data: T | Callable[[TState], T] | None
|
|
181
|
+
_key_fn: Callable[[TState], QueryKey] | None
|
|
182
|
+
# Not using OnSuccessFn and OnErrorFn since unions of callables are not well
|
|
183
|
+
# supported in the type system. We just need to be careful to use
|
|
184
|
+
# call_flexible to invoke these functions.
|
|
185
|
+
_on_success_fn: Callable[[TState, T], Any] | None
|
|
186
|
+
_on_error_fn: Callable[[TState, Exception], Any] | None
|
|
187
|
+
_priv_result: str
|
|
188
|
+
|
|
189
|
+
def __init__(
|
|
190
|
+
self,
|
|
191
|
+
name: str,
|
|
192
|
+
fetch_fn: "Callable[[TState], Awaitable[T]]",
|
|
193
|
+
keep_previous_data: bool = False,
|
|
194
|
+
stale_time: float = 0.0,
|
|
195
|
+
gc_time: float = 300.0,
|
|
196
|
+
retries: int = 3,
|
|
197
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
198
|
+
initial: T | Callable[[TState], T] | None = MISSING,
|
|
199
|
+
key: Callable[[TState], QueryKey] | None = None,
|
|
200
|
+
on_success: OnSuccessFn[TState, T] | None = None,
|
|
201
|
+
on_error: OnErrorFn[TState] | None = None,
|
|
202
|
+
):
|
|
203
|
+
self.name = name
|
|
204
|
+
self._fetch_fn = fetch_fn
|
|
205
|
+
self._key_fn = None
|
|
206
|
+
self._on_success_fn = on_success # pyright: ignore[reportAttributeAccessIssue]
|
|
207
|
+
self._on_error_fn = on_error # pyright: ignore[reportAttributeAccessIssue]
|
|
208
|
+
self._keep_previous_data = keep_previous_data
|
|
209
|
+
self._stale_time = stale_time
|
|
210
|
+
self._gc_time = gc_time
|
|
211
|
+
self._retries = retries
|
|
212
|
+
self._retry_delay = retry_delay
|
|
213
|
+
self._initial_data = initial
|
|
214
|
+
self._priv_result = f"__query_{name}"
|
|
215
|
+
|
|
216
|
+
# Decorator to attach a key function
|
|
217
|
+
def key(self, fn: Callable[[TState], QueryKey]):
|
|
218
|
+
if self._key_fn is not None:
|
|
219
|
+
raise RuntimeError(
|
|
220
|
+
f"Duplicate key() decorator for query '{self.name}'. Only one is allowed."
|
|
221
|
+
)
|
|
222
|
+
self._key_fn = fn
|
|
223
|
+
return fn
|
|
224
|
+
|
|
225
|
+
# Decorator to attach a function providing initial data
|
|
226
|
+
def initial_data(self, fn: Callable[[TState], T]):
|
|
227
|
+
if self._initial_data is not MISSING:
|
|
228
|
+
raise RuntimeError(
|
|
229
|
+
f"Duplicate initial_data() decorator for query '{self.name}'. Only one is allowed."
|
|
230
|
+
)
|
|
231
|
+
self._initial_data = fn
|
|
232
|
+
return fn
|
|
233
|
+
|
|
234
|
+
# Decorator to attach an on-success handler (sync or async)
|
|
235
|
+
def on_success(self, fn: OnSuccessFn[TState, T]):
|
|
236
|
+
if self._on_success_fn is not None:
|
|
237
|
+
raise RuntimeError(
|
|
238
|
+
f"Duplicate on_success() decorator for query '{self.name}'. Only one is allowed."
|
|
239
|
+
)
|
|
240
|
+
self._on_success_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
241
|
+
return fn
|
|
242
|
+
|
|
243
|
+
# Decorator to attach an on-error handler (sync or async)
|
|
244
|
+
def on_error(self, fn: OnErrorFn[TState]):
|
|
245
|
+
if self._on_error_fn is not None:
|
|
246
|
+
raise RuntimeError(
|
|
247
|
+
f"Duplicate on_error() decorator for query '{self.name}'. Only one is allowed."
|
|
248
|
+
)
|
|
249
|
+
self._on_error_fn = fn # pyright: ignore[reportAttributeAccessIssue]
|
|
250
|
+
return fn
|
|
251
|
+
|
|
252
|
+
@override
|
|
253
|
+
def initialize(self, state: Any, name: str) -> QueryResult[T]:
|
|
254
|
+
# Return cached query instance if present
|
|
255
|
+
result: QueryResult[T] | None = getattr(state, self._priv_result, None)
|
|
256
|
+
if result:
|
|
257
|
+
# Don't re-initialize, just return the cached instance
|
|
258
|
+
return result
|
|
259
|
+
|
|
260
|
+
# Bind methods to this instance
|
|
261
|
+
fetch_fn = bind_state(state, self._fetch_fn)
|
|
262
|
+
initial_data = cast(
|
|
263
|
+
T | None,
|
|
264
|
+
(
|
|
265
|
+
self._initial_data(state)
|
|
266
|
+
if callable(self._initial_data)
|
|
267
|
+
and len(inspect.signature(self._initial_data).parameters) == 1
|
|
268
|
+
else self._initial_data
|
|
269
|
+
),
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
if self._key_fn:
|
|
273
|
+
# Keyed query: use session-wide QueryStore
|
|
274
|
+
query = self._resolve_keyed(state, fetch_fn, initial_data)
|
|
275
|
+
else:
|
|
276
|
+
# Unkeyed query: create private Query
|
|
277
|
+
query = self._resolve_unkeyed(fetch_fn, initial_data)
|
|
278
|
+
|
|
279
|
+
# Wrap query in QueryResult
|
|
280
|
+
result = QueryResult[T](
|
|
281
|
+
query=query,
|
|
282
|
+
stale_time=self._stale_time,
|
|
283
|
+
keep_previous_data=self._keep_previous_data,
|
|
284
|
+
gc_time=self._gc_time,
|
|
285
|
+
on_success=bind_state(state, self._on_success_fn)
|
|
286
|
+
if self._on_success_fn
|
|
287
|
+
else None,
|
|
288
|
+
on_error=bind_state(state, self._on_error_fn)
|
|
289
|
+
if self._on_error_fn
|
|
290
|
+
else None,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Store result on the instance
|
|
294
|
+
setattr(state, self._priv_result, result)
|
|
295
|
+
return result
|
|
296
|
+
|
|
297
|
+
def _resolve_keyed(
|
|
298
|
+
self,
|
|
299
|
+
state: TState,
|
|
300
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
301
|
+
initial_data: T | None,
|
|
302
|
+
) -> Computed[Query[T]]:
|
|
303
|
+
"""Create or get a keyed query from the session store using a Computed."""
|
|
304
|
+
assert self._key_fn is not None
|
|
305
|
+
|
|
306
|
+
key_computed = Computed(
|
|
307
|
+
bind_state(state, self._key_fn), name=f"query.key.{self.name}"
|
|
308
|
+
)
|
|
309
|
+
render = PulseContext.get().render
|
|
310
|
+
if render is None:
|
|
311
|
+
raise RuntimeError("No render session available")
|
|
312
|
+
store = render.query_store
|
|
313
|
+
|
|
314
|
+
def query() -> Query[T]:
|
|
315
|
+
key = key_computed()
|
|
316
|
+
return store.ensure(
|
|
317
|
+
key,
|
|
318
|
+
fetch_fn,
|
|
319
|
+
initial_data,
|
|
320
|
+
gc_time=self._gc_time,
|
|
321
|
+
retries=self._retries,
|
|
322
|
+
retry_delay=self._retry_delay,
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
return Computed(query, name=f"query.{self.name}")
|
|
326
|
+
|
|
327
|
+
def _resolve_unkeyed(
|
|
328
|
+
self,
|
|
329
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
330
|
+
initial_data: T | None,
|
|
331
|
+
) -> Computed[Query[T]]:
|
|
332
|
+
"""Create a private unkeyed query."""
|
|
333
|
+
query = Query[T](
|
|
334
|
+
key=None,
|
|
335
|
+
fn=fetch_fn,
|
|
336
|
+
initial_data=initial_data,
|
|
337
|
+
gc_time=self._gc_time,
|
|
338
|
+
retries=self._retries,
|
|
339
|
+
retry_delay=self._retry_delay,
|
|
340
|
+
)
|
|
341
|
+
return Computed(lambda: query, name=f"query.{self.name}")
|
|
342
|
+
|
|
343
|
+
def __get__(self, obj: Any, objtype: Any = None) -> QueryResult[T]:
|
|
344
|
+
if obj is None:
|
|
345
|
+
return self # pyright: ignore[reportReturnType]
|
|
346
|
+
return self.initialize(obj, self.name)
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
class QueryResultWithInitial(QueryResult[T]):
|
|
350
|
+
@property
|
|
351
|
+
@override
|
|
352
|
+
def data(self) -> T:
|
|
353
|
+
return cast(T, super().data)
|
|
354
|
+
|
|
355
|
+
@property
|
|
356
|
+
@override
|
|
357
|
+
def has_loaded(self) -> bool: # mirror base for completeness
|
|
358
|
+
return super().has_loaded
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
class QueryPropertyWithInitial(QueryProperty[T, TState]):
|
|
362
|
+
@override
|
|
363
|
+
def __get__(self, obj: Any, objtype: Any = None) -> QueryResultWithInitial[T]:
|
|
364
|
+
# Reuse base initialization but narrow the return type for type-checkers
|
|
365
|
+
return cast(QueryResultWithInitial[T], super().__get__(obj, objtype))
|
pulse/queries/store.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from collections.abc import Awaitable, Callable
|
|
2
|
+
from typing import Any, TypeVar
|
|
3
|
+
|
|
4
|
+
from pulse.queries.query import RETRY_DELAY_DEFAULT, Query, QueryKey
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class QueryStore:
|
|
10
|
+
"""
|
|
11
|
+
Store for query entries. Manages creation, retrieval, and disposal of queries.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(self):
|
|
15
|
+
self._entries: dict[QueryKey, Query[Any]] = {}
|
|
16
|
+
|
|
17
|
+
def get(self, key: QueryKey) -> Query[Any] | None:
|
|
18
|
+
return self._entries.get(key)
|
|
19
|
+
|
|
20
|
+
def ensure(
|
|
21
|
+
self,
|
|
22
|
+
key: QueryKey,
|
|
23
|
+
fetch_fn: Callable[[], Awaitable[T]],
|
|
24
|
+
initial_data: T | None = None,
|
|
25
|
+
gc_time: float = 300.0,
|
|
26
|
+
retries: int = 3,
|
|
27
|
+
retry_delay: float = RETRY_DELAY_DEFAULT,
|
|
28
|
+
) -> Query[T]:
|
|
29
|
+
# Return existing entry if present
|
|
30
|
+
if key in self._entries:
|
|
31
|
+
return self._entries[key]
|
|
32
|
+
|
|
33
|
+
def _on_dispose(e: Query[Any]) -> None:
|
|
34
|
+
if e.key in self._entries and self._entries[e.key] is e:
|
|
35
|
+
del self._entries[e.key]
|
|
36
|
+
|
|
37
|
+
entry = Query(
|
|
38
|
+
key,
|
|
39
|
+
fetch_fn,
|
|
40
|
+
initial_data=initial_data,
|
|
41
|
+
gc_time=gc_time,
|
|
42
|
+
retries=retries,
|
|
43
|
+
retry_delay=retry_delay,
|
|
44
|
+
on_dispose=_on_dispose,
|
|
45
|
+
)
|
|
46
|
+
self._entries[key] = entry
|
|
47
|
+
return entry
|
|
48
|
+
|
|
49
|
+
def remove(self, key: QueryKey):
|
|
50
|
+
entry = self._entries.get(key)
|
|
51
|
+
if entry:
|
|
52
|
+
entry.dispose()
|
|
53
|
+
|
|
54
|
+
def get_queries(
|
|
55
|
+
self, predicate: Callable[[Query[Any]], bool] | None = None
|
|
56
|
+
) -> list[Query[Any]]:
|
|
57
|
+
"""Get all queries matching the predicate."""
|
|
58
|
+
if predicate is None:
|
|
59
|
+
return list(self._entries.values())
|
|
60
|
+
return [e for e in self._entries.values() if predicate(e)]
|