pulse-framework 0.1.62__py3-none-any.whl → 0.1.64__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulse/__init__.py +1 -0
- pulse/cli/dependencies.py +7 -16
- pulse/component.py +1 -1
- pulse/queries/infinite_query.py +110 -37
- pulse/queries/protocol.py +9 -0
- pulse/queries/query.py +106 -37
- pulse/requirements.py +47 -0
- pulse/transpiler/imports.py +22 -1
- pulse/transpiler/nodes.py +55 -12
- {pulse_framework-0.1.62.dist-info → pulse_framework-0.1.64.dist-info}/METADATA +1 -1
- {pulse_framework-0.1.62.dist-info → pulse_framework-0.1.64.dist-info}/RECORD +13 -12
- {pulse_framework-0.1.62.dist-info → pulse_framework-0.1.64.dist-info}/WHEEL +0 -0
- {pulse_framework-0.1.62.dist-info → pulse_framework-0.1.64.dist-info}/entry_points.txt +0 -0
pulse/__init__.py
CHANGED
|
@@ -1410,6 +1410,7 @@ from pulse.render_session import run_js as run_js
|
|
|
1410
1410
|
|
|
1411
1411
|
# Request
|
|
1412
1412
|
from pulse.request import PulseRequest as PulseRequest
|
|
1413
|
+
from pulse.requirements import require as require
|
|
1413
1414
|
from pulse.routing import Layout as Layout
|
|
1414
1415
|
from pulse.routing import Route as Route
|
|
1415
1416
|
from pulse.routing import RouteInfo as RouteInfo
|
pulse/cli/dependencies.py
CHANGED
|
@@ -11,11 +11,10 @@ from pulse.cli.packages import (
|
|
|
11
11
|
is_workspace_spec,
|
|
12
12
|
load_package_json,
|
|
13
13
|
parse_dependency_spec,
|
|
14
|
-
parse_install_spec,
|
|
15
14
|
resolve_versions,
|
|
16
15
|
spec_satisfies,
|
|
17
16
|
)
|
|
18
|
-
from pulse.
|
|
17
|
+
from pulse.requirements import get_requirements
|
|
19
18
|
|
|
20
19
|
|
|
21
20
|
def convert_pep440_to_semver(python_version: str) -> str:
|
|
@@ -98,20 +97,12 @@ def get_required_dependencies(
|
|
|
98
97
|
"pulse-ui-client": [pulse_version],
|
|
99
98
|
}
|
|
100
99
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
if
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
# We might want to be more lenient here or at least log it,
|
|
108
|
-
# but following existing pattern of raising DependencyError
|
|
109
|
-
raise DependencyError(str(exc)) from None
|
|
110
|
-
if spec:
|
|
111
|
-
name_only, ver = parse_dependency_spec(spec)
|
|
112
|
-
constraints.setdefault(name_only, []).append(ver)
|
|
113
|
-
if imp.version:
|
|
114
|
-
constraints.setdefault(name_only, []).append(imp.version)
|
|
100
|
+
for src, version in get_requirements():
|
|
101
|
+
name_only, ver_in_src = parse_dependency_spec(src)
|
|
102
|
+
if ver_in_src:
|
|
103
|
+
constraints.setdefault(name_only, []).append(ver_in_src)
|
|
104
|
+
if version:
|
|
105
|
+
constraints.setdefault(name_only, []).append(version)
|
|
115
106
|
|
|
116
107
|
try:
|
|
117
108
|
resolved = resolve_versions(constraints)
|
pulse/component.py
CHANGED
|
@@ -112,7 +112,7 @@ class Component(Generic[P]):
|
|
|
112
112
|
flattened = flatten_children(
|
|
113
113
|
args, # pyright: ignore[reportArgumentType]
|
|
114
114
|
parent_name=f"<{self.name}>",
|
|
115
|
-
warn_stacklevel=
|
|
115
|
+
warn_stacklevel=None,
|
|
116
116
|
)
|
|
117
117
|
args = tuple(flattened) # pyright: ignore[reportAssignmentType]
|
|
118
118
|
|
pulse/queries/infinite_query.py
CHANGED
|
@@ -152,6 +152,61 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
152
152
|
)
|
|
153
153
|
return self._observers[0]._fetch_fn # pyright: ignore[reportPrivateUsage]
|
|
154
154
|
|
|
155
|
+
@property
|
|
156
|
+
def has_interval(self) -> bool:
|
|
157
|
+
return self._interval is not None
|
|
158
|
+
|
|
159
|
+
def _select_interval_observer(
|
|
160
|
+
self,
|
|
161
|
+
) -> tuple[float | None, "InfiniteQueryResult[T, TParam] | None"]:
|
|
162
|
+
min_interval: float | None = None
|
|
163
|
+
selected: "InfiniteQueryResult[T, TParam] | None" = None
|
|
164
|
+
|
|
165
|
+
for obs in reversed(self._observers):
|
|
166
|
+
interval = obs._refetch_interval # pyright: ignore[reportPrivateUsage]
|
|
167
|
+
if interval is None:
|
|
168
|
+
continue
|
|
169
|
+
if not obs._enabled.value: # pyright: ignore[reportPrivateUsage]
|
|
170
|
+
continue
|
|
171
|
+
if min_interval is None or interval < min_interval:
|
|
172
|
+
min_interval = interval
|
|
173
|
+
selected = obs
|
|
174
|
+
|
|
175
|
+
return min_interval, selected
|
|
176
|
+
|
|
177
|
+
def _create_interval_effect(self, interval: float) -> Effect:
|
|
178
|
+
def interval_fn():
|
|
179
|
+
observer = self._interval_observer
|
|
180
|
+
if observer is None:
|
|
181
|
+
return
|
|
182
|
+
self.invalidate(fetch_fn=observer._fetch_fn, observer=observer) # pyright: ignore[reportPrivateUsage]
|
|
183
|
+
|
|
184
|
+
return Effect(
|
|
185
|
+
interval_fn,
|
|
186
|
+
name=f"inf_query_interval({self.key})",
|
|
187
|
+
interval=interval,
|
|
188
|
+
immediate=True,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
def _update_interval(self) -> None:
|
|
192
|
+
new_interval, new_observer = self._select_interval_observer()
|
|
193
|
+
interval_changed = new_interval != self._interval
|
|
194
|
+
|
|
195
|
+
self._interval = new_interval
|
|
196
|
+
self._interval_observer = new_observer
|
|
197
|
+
|
|
198
|
+
if not interval_changed:
|
|
199
|
+
if self._interval_effect is None and new_interval is not None:
|
|
200
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
201
|
+
return
|
|
202
|
+
|
|
203
|
+
if self._interval_effect is not None:
|
|
204
|
+
self._interval_effect.dispose()
|
|
205
|
+
self._interval_effect = None
|
|
206
|
+
|
|
207
|
+
if new_interval is not None:
|
|
208
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
209
|
+
|
|
155
210
|
# Reactive state
|
|
156
211
|
pages: ReactiveList[Page[T, TParam]]
|
|
157
212
|
error: Signal[Exception | None]
|
|
@@ -171,6 +226,9 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
171
226
|
|
|
172
227
|
_observers: "list[InfiniteQueryResult[T, TParam]]"
|
|
173
228
|
_gc_handle: asyncio.TimerHandle | None
|
|
229
|
+
_interval_effect: Effect | None
|
|
230
|
+
_interval: float | None
|
|
231
|
+
_interval_observer: "InfiniteQueryResult[T, TParam] | None"
|
|
174
232
|
|
|
175
233
|
def __init__(
|
|
176
234
|
self,
|
|
@@ -232,6 +290,9 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
232
290
|
self._queue_task = None
|
|
233
291
|
self._observers = []
|
|
234
292
|
self._gc_handle = None
|
|
293
|
+
self._interval_effect = None
|
|
294
|
+
self._interval = None
|
|
295
|
+
self._interval_observer = None
|
|
235
296
|
|
|
236
297
|
# ─────────────────────────────────────────────────────────────────────────
|
|
237
298
|
# Commit functions - update state after pages have been modified
|
|
@@ -326,13 +387,7 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
326
387
|
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
327
388
|
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
328
389
|
) -> ActionResult[list[Page[T, TParam]]]:
|
|
329
|
-
"""Wait for
|
|
330
|
-
# If no data and loading, enqueue initial fetch (unless already processing)
|
|
331
|
-
if len(self.pages) == 0 and self.status() == "loading":
|
|
332
|
-
if self._queue_task is None or self._queue_task.done():
|
|
333
|
-
# Use provided fetch_fn or fall back to first observer's fetch_fn
|
|
334
|
-
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
335
|
-
self._enqueue(Refetch(fetch_fn=fn, observer=observer))
|
|
390
|
+
"""Wait for any in-flight queue processing to complete."""
|
|
336
391
|
# Wait for any in-progress queue processing
|
|
337
392
|
if self._queue_task and not self._queue_task.done():
|
|
338
393
|
await self._queue_task
|
|
@@ -341,17 +396,31 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
341
396
|
return ActionError(cast(Exception, self.error()))
|
|
342
397
|
return ActionSuccess(list(self.pages))
|
|
343
398
|
|
|
399
|
+
async def ensure(
|
|
400
|
+
self,
|
|
401
|
+
fetch_fn: Callable[[TParam], Awaitable[T]] | None = None,
|
|
402
|
+
observer: "InfiniteQueryResult[T, TParam] | None" = None,
|
|
403
|
+
) -> ActionResult[list[Page[T, TParam]]]:
|
|
404
|
+
"""Ensure an initial fetch has started, then wait for completion."""
|
|
405
|
+
if len(self.pages) == 0 and self.status() == "loading":
|
|
406
|
+
if self._queue_task is None or self._queue_task.done():
|
|
407
|
+
fn = fetch_fn if fetch_fn is not None else self.fn
|
|
408
|
+
self._enqueue(Refetch(fetch_fn=fn, observer=observer))
|
|
409
|
+
return await self.wait()
|
|
410
|
+
|
|
344
411
|
def observe(self, observer: Any):
|
|
345
412
|
self._observers.append(observer)
|
|
346
413
|
self.cancel_gc()
|
|
347
414
|
gc_time = getattr(observer, "_gc_time", 0)
|
|
348
415
|
if gc_time and gc_time > 0:
|
|
349
416
|
self.cfg.gc_time = max(self.cfg.gc_time, gc_time)
|
|
417
|
+
self._update_interval()
|
|
350
418
|
|
|
351
419
|
def unobserve(self, observer: "InfiniteQueryResult[T, TParam]"):
|
|
352
420
|
"""Unregister an observer. Cancels pending actions. Schedules GC if no observers remain."""
|
|
353
421
|
if observer in self._observers:
|
|
354
422
|
self._observers.remove(observer)
|
|
423
|
+
self._update_interval()
|
|
355
424
|
|
|
356
425
|
# Cancel pending actions from this observer
|
|
357
426
|
self._cancel_observer_actions(observer)
|
|
@@ -630,11 +699,17 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
630
699
|
(i for i, p in enumerate(self.pages) if p.param == action.param),
|
|
631
700
|
None,
|
|
632
701
|
)
|
|
633
|
-
if idx is None:
|
|
634
|
-
return None
|
|
635
702
|
|
|
636
703
|
page = await action.fetch_fn(action.param)
|
|
637
|
-
|
|
704
|
+
|
|
705
|
+
if idx is None:
|
|
706
|
+
# Page doesn't exist - jump to this page, clearing existing pages
|
|
707
|
+
self.pages.clear()
|
|
708
|
+
self.pages.append(Page(page, action.param))
|
|
709
|
+
else:
|
|
710
|
+
# Page exists, update it
|
|
711
|
+
self.pages[idx] = Page(page, action.param)
|
|
712
|
+
|
|
638
713
|
await self.commit()
|
|
639
714
|
return page
|
|
640
715
|
|
|
@@ -708,7 +783,10 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
708
783
|
cancel_fetch: bool = False,
|
|
709
784
|
) -> ActionResult[T | None]:
|
|
710
785
|
"""
|
|
711
|
-
Refetch
|
|
786
|
+
Refetch a page by its param. Queued for sequential execution.
|
|
787
|
+
|
|
788
|
+
If the page doesn't exist, clears existing pages and loads the requested
|
|
789
|
+
page as the new starting point.
|
|
712
790
|
|
|
713
791
|
Note: Prefer calling refetch_page() on InfiniteQueryResult to ensure the
|
|
714
792
|
correct fetch function is used. When called directly on InfiniteQuery, uses
|
|
@@ -725,6 +803,9 @@ class InfiniteQuery(Generic[T, TParam], Disposable):
|
|
|
725
803
|
self._cancel_queue()
|
|
726
804
|
if self._queue_task and not self._queue_task.done():
|
|
727
805
|
self._queue_task.cancel()
|
|
806
|
+
if self._interval_effect is not None:
|
|
807
|
+
self._interval_effect.dispose()
|
|
808
|
+
self._interval_effect = None
|
|
728
809
|
if self.cfg.on_dispose:
|
|
729
810
|
self.cfg.on_dispose(self)
|
|
730
811
|
|
|
@@ -778,7 +859,6 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
778
859
|
_on_success: Callable[[list[Page[T, TParam]]], Awaitable[None] | None] | None
|
|
779
860
|
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
780
861
|
_observe_effect: Effect
|
|
781
|
-
_interval_effect: Effect | None
|
|
782
862
|
_data_computed: Computed[list[Page[T, TParam]] | None]
|
|
783
863
|
_enabled: Signal[bool]
|
|
784
864
|
_fetch_on_mount: bool
|
|
@@ -801,13 +881,17 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
801
881
|
self._fetch_fn = fetch_fn
|
|
802
882
|
self._stale_time = stale_time
|
|
803
883
|
self._gc_time = gc_time
|
|
804
|
-
|
|
884
|
+
interval = (
|
|
885
|
+
refetch_interval
|
|
886
|
+
if refetch_interval is not None and refetch_interval > 0
|
|
887
|
+
else None
|
|
888
|
+
)
|
|
889
|
+
self._refetch_interval = interval
|
|
805
890
|
self._keep_previous_data = keep_previous_data
|
|
806
891
|
self._on_success = on_success
|
|
807
892
|
self._on_error = on_error
|
|
808
893
|
self._enabled = Signal(enabled, name=f"inf_query.enabled({query().key})")
|
|
809
894
|
self._fetch_on_mount = fetch_on_mount
|
|
810
|
-
self._interval_effect = None
|
|
811
895
|
|
|
812
896
|
def observe_effect():
|
|
813
897
|
q = self._query()
|
|
@@ -816,8 +900,13 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
816
900
|
with Untrack():
|
|
817
901
|
q.observe(self)
|
|
818
902
|
|
|
819
|
-
if
|
|
820
|
-
|
|
903
|
+
# Skip if query interval is active - interval effect handles initial fetch
|
|
904
|
+
if enabled and fetch_on_mount and not q.has_interval:
|
|
905
|
+
# Fetch if no data loaded yet or if existing data is stale
|
|
906
|
+
if not q.is_fetching() and (
|
|
907
|
+
q.status() == "loading" or self.is_stale()
|
|
908
|
+
):
|
|
909
|
+
q.invalidate()
|
|
821
910
|
|
|
822
911
|
# Return cleanup function that captures the query (old query on key change)
|
|
823
912
|
def cleanup():
|
|
@@ -834,25 +923,6 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
834
923
|
self._data_computed_fn, name=f"inf_query_data({self._query().key})"
|
|
835
924
|
)
|
|
836
925
|
|
|
837
|
-
# Set up interval effect if interval is specified
|
|
838
|
-
if refetch_interval is not None and refetch_interval > 0:
|
|
839
|
-
self._setup_interval_effect(refetch_interval)
|
|
840
|
-
|
|
841
|
-
def _setup_interval_effect(self, interval: float):
|
|
842
|
-
"""Create an effect that invalidates the query at the specified interval."""
|
|
843
|
-
|
|
844
|
-
def interval_fn():
|
|
845
|
-
# Read enabled to make this effect reactive to enabled changes
|
|
846
|
-
if self._enabled():
|
|
847
|
-
self._query().invalidate()
|
|
848
|
-
|
|
849
|
-
self._interval_effect = Effect(
|
|
850
|
-
interval_fn,
|
|
851
|
-
name=f"inf_query_interval({self._query().key})",
|
|
852
|
-
interval=interval,
|
|
853
|
-
immediate=True,
|
|
854
|
-
)
|
|
855
|
-
|
|
856
926
|
@property
|
|
857
927
|
def status(self) -> QueryStatus:
|
|
858
928
|
return self._query().status()
|
|
@@ -985,15 +1055,20 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
985
1055
|
async def wait(self) -> ActionResult[list[Page[T, TParam]]]:
|
|
986
1056
|
return await self._query().wait(fetch_fn=self._fetch_fn, observer=self)
|
|
987
1057
|
|
|
1058
|
+
async def ensure(self) -> ActionResult[list[Page[T, TParam]]]:
|
|
1059
|
+
return await self._query().ensure(fetch_fn=self._fetch_fn, observer=self)
|
|
1060
|
+
|
|
988
1061
|
def invalidate(self):
|
|
989
1062
|
query = self._query()
|
|
990
1063
|
query.invalidate(fetch_fn=self._fetch_fn, observer=self)
|
|
991
1064
|
|
|
992
1065
|
def enable(self):
|
|
993
1066
|
self._enabled.write(True)
|
|
1067
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
994
1068
|
|
|
995
1069
|
def disable(self):
|
|
996
1070
|
self._enabled.write(False)
|
|
1071
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
997
1072
|
|
|
998
1073
|
def set_error(self, error: Exception):
|
|
999
1074
|
query = self._query()
|
|
@@ -1002,8 +1077,6 @@ class InfiniteQueryResult(Generic[T, TParam], Disposable):
|
|
|
1002
1077
|
@override
|
|
1003
1078
|
def dispose(self):
|
|
1004
1079
|
"""Clean up the result and its observe effect."""
|
|
1005
|
-
if self._interval_effect is not None:
|
|
1006
|
-
self._interval_effect.dispose()
|
|
1007
1080
|
self._observe_effect.dispose()
|
|
1008
1081
|
|
|
1009
1082
|
|
pulse/queries/protocol.py
CHANGED
|
@@ -90,6 +90,15 @@ class QueryResult(Protocol[T]):
|
|
|
90
90
|
"""
|
|
91
91
|
...
|
|
92
92
|
|
|
93
|
+
async def ensure(self) -> ActionResult[T]:
|
|
94
|
+
"""
|
|
95
|
+
Ensure an initial fetch has started, then wait for completion.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
ActionResult containing either the data or an error.
|
|
99
|
+
"""
|
|
100
|
+
...
|
|
101
|
+
|
|
93
102
|
def invalidate(self) -> None:
|
|
94
103
|
"""Mark the query as stale and trigger a refetch if observed."""
|
|
95
104
|
...
|
pulse/queries/query.py
CHANGED
|
@@ -268,6 +268,9 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
268
268
|
_task: asyncio.Task[None] | None
|
|
269
269
|
_task_initiator: "KeyedQueryResult[T] | None"
|
|
270
270
|
_gc_handle: asyncio.TimerHandle | None
|
|
271
|
+
_interval_effect: Effect | None
|
|
272
|
+
_interval: float | None
|
|
273
|
+
_interval_observer: "KeyedQueryResult[T] | None"
|
|
271
274
|
|
|
272
275
|
def __init__(
|
|
273
276
|
self,
|
|
@@ -293,6 +296,9 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
293
296
|
self._task = None
|
|
294
297
|
self._task_initiator = None
|
|
295
298
|
self._gc_handle = None
|
|
299
|
+
self._interval_effect = None
|
|
300
|
+
self._interval = None
|
|
301
|
+
self._interval_observer = None
|
|
296
302
|
|
|
297
303
|
# --- Delegate signal access to state ---
|
|
298
304
|
@property
|
|
@@ -438,6 +444,66 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
438
444
|
)
|
|
439
445
|
return self.observers[0]._fetch_fn # pyright: ignore[reportPrivateUsage]
|
|
440
446
|
|
|
447
|
+
@property
|
|
448
|
+
def has_interval(self) -> bool:
|
|
449
|
+
return self._interval is not None
|
|
450
|
+
|
|
451
|
+
def _select_interval_observer(
|
|
452
|
+
self,
|
|
453
|
+
) -> tuple[float | None, "KeyedQueryResult[T] | None"]:
|
|
454
|
+
min_interval: float | None = None
|
|
455
|
+
selected: "KeyedQueryResult[T] | None" = None
|
|
456
|
+
|
|
457
|
+
for obs in reversed(self.observers):
|
|
458
|
+
interval = obs._refetch_interval # pyright: ignore[reportPrivateUsage]
|
|
459
|
+
if interval is None:
|
|
460
|
+
continue
|
|
461
|
+
if not obs._enabled.value: # pyright: ignore[reportPrivateUsage]
|
|
462
|
+
continue
|
|
463
|
+
if min_interval is None or interval < min_interval:
|
|
464
|
+
min_interval = interval
|
|
465
|
+
selected = obs
|
|
466
|
+
|
|
467
|
+
return min_interval, selected
|
|
468
|
+
|
|
469
|
+
def _create_interval_effect(self, interval: float) -> Effect:
|
|
470
|
+
def interval_fn():
|
|
471
|
+
observer = self._interval_observer
|
|
472
|
+
if observer is None:
|
|
473
|
+
return
|
|
474
|
+
if not self.is_scheduled and len(self.observers) > 0:
|
|
475
|
+
self.run_fetch(
|
|
476
|
+
observer._fetch_fn, # pyright: ignore[reportPrivateUsage]
|
|
477
|
+
cancel_previous=False,
|
|
478
|
+
initiator=observer,
|
|
479
|
+
)
|
|
480
|
+
|
|
481
|
+
return Effect(
|
|
482
|
+
interval_fn,
|
|
483
|
+
name=f"query_interval({self.key})",
|
|
484
|
+
interval=interval,
|
|
485
|
+
immediate=True,
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
def _update_interval(self) -> None:
|
|
489
|
+
new_interval, new_observer = self._select_interval_observer()
|
|
490
|
+
interval_changed = new_interval != self._interval
|
|
491
|
+
|
|
492
|
+
self._interval = new_interval
|
|
493
|
+
self._interval_observer = new_observer
|
|
494
|
+
|
|
495
|
+
if not interval_changed:
|
|
496
|
+
if self._interval_effect is None and new_interval is not None:
|
|
497
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
498
|
+
return
|
|
499
|
+
|
|
500
|
+
if self._interval_effect is not None:
|
|
501
|
+
self._interval_effect.dispose()
|
|
502
|
+
self._interval_effect = None
|
|
503
|
+
|
|
504
|
+
if new_interval is not None:
|
|
505
|
+
self._interval_effect = self._create_interval_effect(new_interval)
|
|
506
|
+
|
|
441
507
|
async def refetch(self, cancel_refetch: bool = True) -> ActionResult[T]:
|
|
442
508
|
"""
|
|
443
509
|
Reruns the query and returns the result.
|
|
@@ -468,11 +534,13 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
468
534
|
self.cancel_gc()
|
|
469
535
|
if observer._gc_time > 0: # pyright: ignore[reportPrivateUsage]
|
|
470
536
|
self.cfg.gc_time = max(self.cfg.gc_time, observer._gc_time) # pyright: ignore[reportPrivateUsage]
|
|
537
|
+
self._update_interval()
|
|
471
538
|
|
|
472
539
|
def unobserve(self, observer: "KeyedQueryResult[T]"):
|
|
473
540
|
"""Unregister an observer. Schedules GC if no observers remain."""
|
|
474
541
|
if observer in self.observers:
|
|
475
542
|
self.observers.remove(observer)
|
|
543
|
+
self._update_interval()
|
|
476
544
|
|
|
477
545
|
# If the departing observer initiated the ongoing fetch, cancel it
|
|
478
546
|
if self._task_initiator is observer and self._task and not self._task.done():
|
|
@@ -505,6 +573,9 @@ class KeyedQuery(Generic[T], Disposable):
|
|
|
505
573
|
def dispose(self):
|
|
506
574
|
"""Clean up the query, cancelling any in-flight fetch."""
|
|
507
575
|
self.cancel()
|
|
576
|
+
if self._interval_effect is not None:
|
|
577
|
+
self._interval_effect.dispose()
|
|
578
|
+
self._interval_effect = None
|
|
508
579
|
if self.cfg.on_dispose:
|
|
509
580
|
self.cfg.on_dispose(self)
|
|
510
581
|
|
|
@@ -559,7 +630,12 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
559
630
|
self._on_success = on_success
|
|
560
631
|
self._on_error = on_error
|
|
561
632
|
self._stale_time = stale_time
|
|
562
|
-
|
|
633
|
+
interval = (
|
|
634
|
+
refetch_interval
|
|
635
|
+
if refetch_interval is not None and refetch_interval > 0
|
|
636
|
+
else None
|
|
637
|
+
)
|
|
638
|
+
self._refetch_interval = interval
|
|
563
639
|
self._keep_previous_data = keep_previous_data
|
|
564
640
|
self._enabled = Signal(enabled, name="query.enabled(unkeyed)")
|
|
565
641
|
self._interval_effect = None
|
|
@@ -581,12 +657,13 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
581
657
|
|
|
582
658
|
# Schedule initial fetch if stale (untracked to avoid reactive loop)
|
|
583
659
|
with Untrack():
|
|
584
|
-
if
|
|
660
|
+
# Skip if refetch_interval is active - interval effect handles initial fetch
|
|
661
|
+
if enabled and fetch_on_mount and interval is None and self.is_stale():
|
|
585
662
|
self.schedule()
|
|
586
663
|
|
|
587
664
|
# Set up interval effect if interval is specified
|
|
588
|
-
if
|
|
589
|
-
self._setup_interval_effect(
|
|
665
|
+
if interval is not None:
|
|
666
|
+
self._setup_interval_effect(interval)
|
|
590
667
|
|
|
591
668
|
def _setup_interval_effect(self, interval: float):
|
|
592
669
|
"""Create an effect that invalidates the query at the specified interval."""
|
|
@@ -699,15 +776,18 @@ class UnkeyedQueryResult(Generic[T], Disposable):
|
|
|
699
776
|
return await self.wait()
|
|
700
777
|
|
|
701
778
|
async def wait(self) -> ActionResult[T]:
|
|
702
|
-
"""Wait for the current
|
|
703
|
-
# If loading and no task, schedule a fetch
|
|
704
|
-
if self.state.status() == "loading" and not self.state.is_fetching():
|
|
705
|
-
self.schedule()
|
|
779
|
+
"""Wait for the current in-flight fetch to complete."""
|
|
706
780
|
await self._effect.wait()
|
|
707
781
|
if self.state.status() == "error":
|
|
708
782
|
return ActionError(cast(Exception, self.state.error.read()))
|
|
709
783
|
return ActionSuccess(cast(T, self.state.data.read()))
|
|
710
784
|
|
|
785
|
+
async def ensure(self) -> ActionResult[T]:
|
|
786
|
+
"""Ensure an initial fetch has started, then wait for completion."""
|
|
787
|
+
if self.state.status() == "loading" and not self.state.is_fetching():
|
|
788
|
+
self.schedule()
|
|
789
|
+
return await self.wait()
|
|
790
|
+
|
|
711
791
|
def invalidate(self):
|
|
712
792
|
"""Mark the query as stale and refetch through the effect."""
|
|
713
793
|
if not self.is_scheduled:
|
|
@@ -740,7 +820,6 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
740
820
|
_on_success: Callable[[T], Awaitable[None] | None] | None
|
|
741
821
|
_on_error: Callable[[Exception], Awaitable[None] | None] | None
|
|
742
822
|
_observe_effect: Effect
|
|
743
|
-
_interval_effect: Effect | None
|
|
744
823
|
_data_computed: Computed[T | None]
|
|
745
824
|
_enabled: Signal[bool]
|
|
746
825
|
_fetch_on_mount: bool
|
|
@@ -762,12 +841,16 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
762
841
|
self._fetch_fn = fetch_fn
|
|
763
842
|
self._stale_time = stale_time
|
|
764
843
|
self._gc_time = gc_time
|
|
765
|
-
|
|
844
|
+
interval = (
|
|
845
|
+
refetch_interval
|
|
846
|
+
if refetch_interval is not None and refetch_interval > 0
|
|
847
|
+
else None
|
|
848
|
+
)
|
|
849
|
+
self._refetch_interval = interval
|
|
766
850
|
self._keep_previous_data = keep_previous_data
|
|
767
851
|
self._on_success = on_success
|
|
768
852
|
self._on_error = on_error
|
|
769
853
|
self._enabled = Signal(enabled, name=f"query.enabled({query().key})")
|
|
770
|
-
self._interval_effect = None
|
|
771
854
|
|
|
772
855
|
def observe_effect():
|
|
773
856
|
q = self._query()
|
|
@@ -776,9 +859,11 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
776
859
|
with Untrack():
|
|
777
860
|
q.observe(self)
|
|
778
861
|
|
|
779
|
-
#
|
|
780
|
-
if enabled and fetch_on_mount and
|
|
781
|
-
|
|
862
|
+
# Skip if query interval is active - interval effect handles initial fetch
|
|
863
|
+
if enabled and fetch_on_mount and not q.has_interval:
|
|
864
|
+
# If stale, schedule refetch (only when enabled)
|
|
865
|
+
if not q.is_fetching() and self.is_stale():
|
|
866
|
+
self.invalidate()
|
|
782
867
|
|
|
783
868
|
# Return cleanup function that captures the query (old query on key change)
|
|
784
869
|
def cleanup():
|
|
@@ -795,25 +880,6 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
795
880
|
self._data_computed_fn, name=f"query_data({self._query().key})"
|
|
796
881
|
)
|
|
797
882
|
|
|
798
|
-
# Set up interval effect if interval is specified
|
|
799
|
-
if refetch_interval is not None and refetch_interval > 0:
|
|
800
|
-
self._setup_interval_effect(refetch_interval)
|
|
801
|
-
|
|
802
|
-
def _setup_interval_effect(self, interval: float):
|
|
803
|
-
"""Create an effect that invalidates the query at the specified interval."""
|
|
804
|
-
|
|
805
|
-
def interval_fn():
|
|
806
|
-
# Read enabled to make this effect reactive to enabled changes
|
|
807
|
-
if self._enabled():
|
|
808
|
-
self.invalidate()
|
|
809
|
-
|
|
810
|
-
self._interval_effect = Effect(
|
|
811
|
-
interval_fn,
|
|
812
|
-
name=f"query_interval({self._query().key})",
|
|
813
|
-
interval=interval,
|
|
814
|
-
immediate=True,
|
|
815
|
-
)
|
|
816
|
-
|
|
817
883
|
@property
|
|
818
884
|
def status(self) -> QueryStatus:
|
|
819
885
|
return self._query().status()
|
|
@@ -874,9 +940,12 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
874
940
|
return await self.wait()
|
|
875
941
|
|
|
876
942
|
async def wait(self) -> ActionResult[T]:
|
|
877
|
-
"""Wait for the current
|
|
943
|
+
"""Wait for the current in-flight fetch to complete."""
|
|
944
|
+
return await self._query().wait()
|
|
945
|
+
|
|
946
|
+
async def ensure(self) -> ActionResult[T]:
|
|
947
|
+
"""Ensure an initial fetch has started, then wait for completion."""
|
|
878
948
|
query = self._query()
|
|
879
|
-
# If loading and no task, start a fetch with this observer's fetch function
|
|
880
949
|
if query.status() == "loading" and not query.is_fetching():
|
|
881
950
|
query.run_fetch(self._fetch_fn, initiator=self)
|
|
882
951
|
return await query.wait()
|
|
@@ -910,16 +979,16 @@ class KeyedQueryResult(Generic[T], Disposable):
|
|
|
910
979
|
def enable(self):
|
|
911
980
|
"""Enable the query."""
|
|
912
981
|
self._enabled.write(True)
|
|
982
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
913
983
|
|
|
914
984
|
def disable(self):
|
|
915
985
|
"""Disable the query, preventing it from fetching."""
|
|
916
986
|
self._enabled.write(False)
|
|
987
|
+
self._query()._update_interval() # pyright: ignore[reportPrivateUsage]
|
|
917
988
|
|
|
918
989
|
@override
|
|
919
990
|
def dispose(self):
|
|
920
991
|
"""Clean up the result and its observe effect."""
|
|
921
|
-
if self._interval_effect is not None and not self._interval_effect.__disposed__:
|
|
922
|
-
self._interval_effect.dispose()
|
|
923
992
|
if not self._observe_effect.__disposed__:
|
|
924
993
|
self._observe_effect.dispose()
|
|
925
994
|
|
pulse/requirements.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping
|
|
4
|
+
|
|
5
|
+
_REQUIREMENTS: list[tuple[str, str]] = []
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def add_requirement(name: str, version: str) -> None:
|
|
9
|
+
if not name or not version:
|
|
10
|
+
return
|
|
11
|
+
_REQUIREMENTS.append((name, version))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def register_requirements(packages: Mapping[str, str]) -> None:
|
|
15
|
+
for name, version in packages.items():
|
|
16
|
+
if not name or not version:
|
|
17
|
+
continue
|
|
18
|
+
add_requirement(name, version)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_requirements() -> list[tuple[str, str]]:
|
|
22
|
+
return list(_REQUIREMENTS)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def clear_requirements() -> None:
|
|
26
|
+
_REQUIREMENTS.clear()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def require(packages: Mapping[str, str]) -> None:
|
|
30
|
+
"""Register npm package version requirements for dependency syncing."""
|
|
31
|
+
if not isinstance(packages, Mapping):
|
|
32
|
+
raise TypeError("require expects a mapping of package names to versions")
|
|
33
|
+
if not packages:
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
normalized: dict[str, str] = {}
|
|
37
|
+
for name, version in packages.items():
|
|
38
|
+
if not isinstance(name, str) or not name.strip():
|
|
39
|
+
raise TypeError("require expects non-empty package names")
|
|
40
|
+
if not isinstance(version, str) or not version.strip():
|
|
41
|
+
raise TypeError(f"require expects a version string for {name!r}")
|
|
42
|
+
normalized[name.strip()] = version.strip()
|
|
43
|
+
|
|
44
|
+
register_requirements(normalized)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
__all__ = ["require"]
|
pulse/transpiler/imports.py
CHANGED
|
@@ -14,7 +14,8 @@ from typing import (
|
|
|
14
14
|
)
|
|
15
15
|
from typing import Literal as Lit
|
|
16
16
|
|
|
17
|
-
from pulse.cli.packages import pick_more_specific
|
|
17
|
+
from pulse.cli.packages import parse_dependency_spec, pick_more_specific
|
|
18
|
+
from pulse.requirements import add_requirement, clear_requirements
|
|
18
19
|
from pulse.transpiler.assets import LocalAsset, register_local_asset
|
|
19
20
|
from pulse.transpiler.errors import TranspileError
|
|
20
21
|
from pulse.transpiler.id import next_id
|
|
@@ -131,6 +132,14 @@ _ImportKey: TypeAlias = tuple[str, str, str, bool]
|
|
|
131
132
|
_IMPORT_REGISTRY: dict[_ImportKey, "Import"] = {}
|
|
132
133
|
|
|
133
134
|
|
|
135
|
+
def _is_alias_path(path: str) -> bool:
|
|
136
|
+
return path.startswith("@/") or path.startswith("~/")
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _is_url(path: str) -> bool:
|
|
140
|
+
return path.startswith("http://") or path.startswith("https://")
|
|
141
|
+
|
|
142
|
+
|
|
134
143
|
def get_registered_imports() -> list["Import"]:
|
|
135
144
|
"""Get all registered imports."""
|
|
136
145
|
return list(_IMPORT_REGISTRY.values())
|
|
@@ -139,6 +148,7 @@ def get_registered_imports() -> list["Import"]:
|
|
|
139
148
|
def clear_import_registry() -> None:
|
|
140
149
|
"""Clear the import registry."""
|
|
141
150
|
_IMPORT_REGISTRY.clear()
|
|
151
|
+
clear_requirements()
|
|
142
152
|
|
|
143
153
|
|
|
144
154
|
@dataclass(slots=True, init=False)
|
|
@@ -240,6 +250,17 @@ class Import(Expr):
|
|
|
240
250
|
asset = register_local_asset(resolved)
|
|
241
251
|
import_src = str(resolved)
|
|
242
252
|
|
|
253
|
+
if (
|
|
254
|
+
not is_local_path(import_src)
|
|
255
|
+
and not _is_alias_path(import_src)
|
|
256
|
+
and not _is_url(import_src)
|
|
257
|
+
):
|
|
258
|
+
name_only, ver_in_src = parse_dependency_spec(import_src)
|
|
259
|
+
if ver_in_src:
|
|
260
|
+
add_requirement(name_only, ver_in_src)
|
|
261
|
+
if version:
|
|
262
|
+
add_requirement(name_only, version)
|
|
263
|
+
|
|
243
264
|
self.name = name
|
|
244
265
|
self.src = import_src
|
|
245
266
|
self.kind = kind
|
pulse/transpiler/nodes.py
CHANGED
|
@@ -7,7 +7,7 @@ import warnings
|
|
|
7
7
|
from abc import ABC, abstractmethod
|
|
8
8
|
from collections.abc import Callable, Iterable, Sequence
|
|
9
9
|
from dataclasses import dataclass, field
|
|
10
|
-
from inspect import isfunction, signature
|
|
10
|
+
from inspect import currentframe, isfunction, signature
|
|
11
11
|
from typing import (
|
|
12
12
|
TYPE_CHECKING,
|
|
13
13
|
Any,
|
|
@@ -540,15 +540,13 @@ class Element(Expr):
|
|
|
540
540
|
self.children = None
|
|
541
541
|
else:
|
|
542
542
|
if isinstance(tag, str):
|
|
543
|
-
parent_name = tag[2:] if tag.startswith("$$") else tag
|
|
543
|
+
parent_name: str | Expr = tag[2:] if tag.startswith("$$") else tag
|
|
544
544
|
else:
|
|
545
|
-
|
|
546
|
-
tag.emit(tag_out)
|
|
547
|
-
parent_name = "".join(tag_out)
|
|
545
|
+
parent_name = tag
|
|
548
546
|
self.children = flatten_children(
|
|
549
547
|
children,
|
|
550
548
|
parent_name=parent_name,
|
|
551
|
-
warn_stacklevel=
|
|
549
|
+
warn_stacklevel=None,
|
|
552
550
|
)
|
|
553
551
|
self.key = key
|
|
554
552
|
|
|
@@ -787,7 +785,7 @@ class PulseNode:
|
|
|
787
785
|
flat = flatten_children(
|
|
788
786
|
children_arg,
|
|
789
787
|
parent_name=parent_name,
|
|
790
|
-
warn_stacklevel=
|
|
788
|
+
warn_stacklevel=None,
|
|
791
789
|
)
|
|
792
790
|
return PulseNode(
|
|
793
791
|
fn=self.fn,
|
|
@@ -804,8 +802,8 @@ class PulseNode:
|
|
|
804
802
|
def flatten_children(
|
|
805
803
|
children: Sequence[Node | Iterable[Node]],
|
|
806
804
|
*,
|
|
807
|
-
parent_name: str,
|
|
808
|
-
warn_stacklevel: int =
|
|
805
|
+
parent_name: str | Expr,
|
|
806
|
+
warn_stacklevel: int | None = None,
|
|
809
807
|
) -> list[Node]:
|
|
810
808
|
if env.pulse_env == "dev":
|
|
811
809
|
return _flatten_children_dev(
|
|
@@ -835,13 +833,14 @@ def _flatten_children_prod(children: Sequence[Node | Iterable[Node]]) -> list[No
|
|
|
835
833
|
def _flatten_children_dev(
|
|
836
834
|
children: Sequence[Node | Iterable[Node]],
|
|
837
835
|
*,
|
|
838
|
-
parent_name: str,
|
|
839
|
-
warn_stacklevel: int =
|
|
836
|
+
parent_name: str | Expr,
|
|
837
|
+
warn_stacklevel: int | None = None,
|
|
840
838
|
) -> list[Node]:
|
|
841
839
|
flat: list[Node] = []
|
|
842
840
|
seen_keys: set[str] = set()
|
|
843
841
|
|
|
844
842
|
def visit(item: Node | Iterable[Node]) -> None:
|
|
843
|
+
nonlocal warn_stacklevel
|
|
845
844
|
if isinstance(item, dict):
|
|
846
845
|
raise TypeError("Dict is not a valid child")
|
|
847
846
|
if isinstance(item, Iterable) and not isinstance(item, str):
|
|
@@ -853,6 +852,32 @@ def _flatten_children_dev(
|
|
|
853
852
|
missing_key = True
|
|
854
853
|
visit(sub) # type: ignore[arg-type]
|
|
855
854
|
if missing_key:
|
|
855
|
+
if warn_stacklevel is None:
|
|
856
|
+
stacklevel = 1
|
|
857
|
+
frame = currentframe()
|
|
858
|
+
if frame is not None:
|
|
859
|
+
frame = frame.f_back
|
|
860
|
+
internal_prefixes = (
|
|
861
|
+
"pulse",
|
|
862
|
+
"pulse_mantine",
|
|
863
|
+
"pulse_ag_grid",
|
|
864
|
+
"pulse_recharts",
|
|
865
|
+
"pulse_lucide",
|
|
866
|
+
"pulse_msal",
|
|
867
|
+
"pulse_aws",
|
|
868
|
+
)
|
|
869
|
+
while frame is not None:
|
|
870
|
+
module = frame.f_globals.get("__name__", "")
|
|
871
|
+
if module and not any(
|
|
872
|
+
module == prefix or module.startswith(f"{prefix}.")
|
|
873
|
+
for prefix in internal_prefixes
|
|
874
|
+
):
|
|
875
|
+
break
|
|
876
|
+
stacklevel += 1
|
|
877
|
+
frame = frame.f_back
|
|
878
|
+
if frame is not None:
|
|
879
|
+
stacklevel += 1
|
|
880
|
+
warn_stacklevel = stacklevel
|
|
856
881
|
clean_name = clean_element_name(parent_name)
|
|
857
882
|
warnings.warn(
|
|
858
883
|
(
|
|
@@ -888,7 +913,25 @@ def _flatten_children_dev(
|
|
|
888
913
|
return flat
|
|
889
914
|
|
|
890
915
|
|
|
891
|
-
def clean_element_name(parent_name: str) -> str:
|
|
916
|
+
def clean_element_name(parent_name: str | Expr) -> str:
|
|
917
|
+
def expr_name(expr: Expr) -> str:
|
|
918
|
+
while isinstance(expr, ExprWrapper):
|
|
919
|
+
expr = expr.expr
|
|
920
|
+
if isinstance(expr, Member):
|
|
921
|
+
base = expr_name(expr.obj)
|
|
922
|
+
return f"{base}.{expr.prop}" if base else expr.prop
|
|
923
|
+
if isinstance(expr, Identifier):
|
|
924
|
+
return expr.name
|
|
925
|
+
if expr.__class__.__name__ == "Import":
|
|
926
|
+
name = getattr(expr, "name", None)
|
|
927
|
+
if isinstance(name, str) and name:
|
|
928
|
+
return name
|
|
929
|
+
out: list[str] = []
|
|
930
|
+
expr.emit(out)
|
|
931
|
+
return "".join(out)
|
|
932
|
+
|
|
933
|
+
if isinstance(parent_name, Expr):
|
|
934
|
+
parent_name = expr_name(parent_name)
|
|
892
935
|
if parent_name.startswith("<") and parent_name.endswith(">"):
|
|
893
936
|
return parent_name
|
|
894
937
|
return f"<{parent_name}>"
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
pulse/__init__.py,sha256=
|
|
1
|
+
pulse/__init__.py,sha256=hf1FFA4EDRxFNiPQoKwQfNHtnCf4UT7STqzgbjJ7dgI,32043
|
|
2
2
|
pulse/_examples.py,sha256=dFuhD2EVXsbvAeexoG57s4VuN4gWLaTMOEMNYvlPm9A,561
|
|
3
3
|
pulse/app.py,sha256=KnP6U8uHgfBbFMguDcVk0KgjakY1UC1NJk2rS5l6Sas,35145
|
|
4
4
|
pulse/channel.py,sha256=sQrDLh3k9Z8CyJQkEHzKu4h-yR4XSTgAA3OCQax3Ciw,15766
|
|
5
5
|
pulse/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
pulse/cli/cmd.py,sha256=zh3Ah6c16cNg3o_v_If_S58Qe8rvxNe5M2VrTkwvDU8,15957
|
|
7
|
-
pulse/cli/dependencies.py,sha256=
|
|
7
|
+
pulse/cli/dependencies.py,sha256=qU-rF7QyP0Rl1Fl0YKQubrGNBzj84BAbH1uUT3ehxik,4283
|
|
8
8
|
pulse/cli/folder_lock.py,sha256=-AKld2iM91G0uHB3F5ARD0QAjOw0TmsYYGaFgy_V350,3477
|
|
9
9
|
pulse/cli/helpers.py,sha256=XXRRXeGFgeq-jbp0QGFFVq_aGg_Kp7_AkYsTK8LfSdg,7810
|
|
10
10
|
pulse/cli/logging.py,sha256=3uuB1dqI-lHJkodNUURN6UMWdKF5UQ9spNG-hBG7bA4,2516
|
|
@@ -21,7 +21,7 @@ pulse/codegen/templates/layout.py,sha256=nmWPQcO9SRXc3mCCVLCmykreSF96TqQfdDY7dvU
|
|
|
21
21
|
pulse/codegen/templates/route.py,sha256=UjBrb3e_8tMkd1OjBjEsnYmK6PCQqOYZBWDuU59FcrI,9234
|
|
22
22
|
pulse/codegen/templates/routes_ts.py,sha256=nPgKCvU0gzue2k6KlOL1TJgrBqqRLmyy7K_qKAI8zAE,1129
|
|
23
23
|
pulse/codegen/utils.py,sha256=QoXcV-h-DLLmq_t03hDNUePS0fNnofUQLoR-TXzDFCY,539
|
|
24
|
-
pulse/component.py,sha256=
|
|
24
|
+
pulse/component.py,sha256=mY4ZTX7XKXGXAiVwTec1YR3_HOJf6uTdZcxCT_WX5Gs,6230
|
|
25
25
|
pulse/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
26
|
pulse/components/for_.py,sha256=lrt1JHegf4OkBbL9nrMOy7zxmbuD8Kn11x32ZGS72lY,2390
|
|
27
27
|
pulse/components/if_.py,sha256=5IOq3R70B-JdI-fvDNYDyAaSEtO8L5OaiqHp-jUn-Kw,2153
|
|
@@ -80,10 +80,10 @@ pulse/queries/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
80
80
|
pulse/queries/client.py,sha256=AW42ZPdZJfDkCERpoxQnsiU5cYLfOlZX0sIb9BdIL4E,18495
|
|
81
81
|
pulse/queries/common.py,sha256=TYhn6LyldfmOKYYurxINgCEr3C3WSEwB0cIki1a5iBM,2488
|
|
82
82
|
pulse/queries/effect.py,sha256=7KvV_yK7OHTWhfQbZFGzg_pRhyI2mn25pKIF9AmSmcU,1471
|
|
83
|
-
pulse/queries/infinite_query.py,sha256=
|
|
83
|
+
pulse/queries/infinite_query.py,sha256=3_gv_pDTShNDf_y2yQ-QLDskJOxXhg1dOmQbU2_QxhE,48101
|
|
84
84
|
pulse/queries/mutation.py,sha256=fhEpOZ7CuHImH4Y02QapYdTJrwe6K52-keb0d67wmms,8274
|
|
85
|
-
pulse/queries/protocol.py,sha256=
|
|
86
|
-
pulse/queries/query.py,sha256=
|
|
85
|
+
pulse/queries/protocol.py,sha256=TOrUiI4QK55xuh0i4ch1u96apNl12QeYafkf6RVDd08,3544
|
|
86
|
+
pulse/queries/query.py,sha256=bKtsbsYGy53s8Fq-33OY0od8p9GKbXmiKR0Ob-NRcPs,40735
|
|
87
87
|
pulse/queries/store.py,sha256=Ct7a-h1-Cq07zEfe9vw-LM85Fm7jIJx7CLAIlsiznlU,3444
|
|
88
88
|
pulse/react_component.py,sha256=8RLg4Bi7IcjqbnbEnp4hJpy8t1UsE7mG0UR1Q655LDk,2332
|
|
89
89
|
pulse/reactive.py,sha256=FxxpH7NBtQr7G89iCVN7y1EG21f23GcRi1M-XIxcRQA,31280
|
|
@@ -91,6 +91,7 @@ pulse/reactive_extensions.py,sha256=yQ1PpdAh4kMvll7R15T72FOg8NFdG_HGBsGc63dawYk,
|
|
|
91
91
|
pulse/render_session.py,sha256=9gfwuBZRCWuQMN_nFuaAi__1UPN3I3C1mKWtAXyA3-A,21340
|
|
92
92
|
pulse/renderer.py,sha256=fjSsUvCqV12jyN7Y5XspKUfjQJJzKX-Chha5oF5PrAk,16001
|
|
93
93
|
pulse/request.py,sha256=N0oFOLiGxpbgSgxznjvu64lG3YyOcZPKC8JFyKx6X7w,6023
|
|
94
|
+
pulse/requirements.py,sha256=nMnE25Uu-TUuQd88jW7m2xwus6fD-HvXxQ9UNb7OOGc,1254
|
|
94
95
|
pulse/routing.py,sha256=LzTITvGgaLI1w7qTDZjFwoBcWAb4O8Dz7AmXeTNYrFU,16903
|
|
95
96
|
pulse/serializer.py,sha256=HmQZgxQiaCx2SL2XwmEQLd_xsk_P8XfLtGciLLLOxx0,7616
|
|
96
97
|
pulse/state.py,sha256=VMphVpYNU1CyHMMg1_kNJO3cfqLXJPAuq9gr9RYyUAw,15922
|
|
@@ -103,7 +104,7 @@ pulse/transpiler/emit_context.py,sha256=GyK6VdsBSTVIewQRhBagaV0hlqLTlPZ1i8EAZGi8
|
|
|
103
104
|
pulse/transpiler/errors.py,sha256=LSBjLBnMglbl2D94p9JR4y-3jDefk6iHSlUVBaBOTu4,2823
|
|
104
105
|
pulse/transpiler/function.py,sha256=a871LZFergCmjs1vr-XlOx4eU1FQKAuYxSLJej-LHHc,17036
|
|
105
106
|
pulse/transpiler/id.py,sha256=CdgA1NndBpZjv0Hp4XiYbKn7wi-x4zWsFSjEiViKxVk,434
|
|
106
|
-
pulse/transpiler/imports.py,sha256=
|
|
107
|
+
pulse/transpiler/imports.py,sha256=gWLjRr9jakbUzBGDEepE2RI5Xn_UZwOD4TmlqjNIapM,10302
|
|
107
108
|
pulse/transpiler/js_module.py,sha256=OcIgmrfiA6Hh6aukzgkyX63KsVSHdLzx5ezdKiJFUaQ,11093
|
|
108
109
|
pulse/transpiler/modules/__init__.py,sha256=JGi3CuZoF4sug4dNhQg3MFhpEQqnXec4xRJM2cHNP3c,1184
|
|
109
110
|
pulse/transpiler/modules/asyncio.py,sha256=kWMuFU2vZbqutCM_EXJMvy5SdlB66XiT0czs8lELj_o,1584
|
|
@@ -112,7 +113,7 @@ pulse/transpiler/modules/math.py,sha256=8gjvdYTMqtuOnXrvX_Lwuo0ywAdSl7cpss4TMk6m
|
|
|
112
113
|
pulse/transpiler/modules/pulse/__init__.py,sha256=TfMsiiB53ZFlxdNl7jfCAiMZs-vSRUTxUmqzkLTj-po,91
|
|
113
114
|
pulse/transpiler/modules/pulse/tags.py,sha256=FMN1mWMlnsXa2qO6VmXxUAhFn1uOfGoKPQOjH4ZPlRE,6218
|
|
114
115
|
pulse/transpiler/modules/typing.py,sha256=J9QCkXE6zzwMjiprX2q1BtK-iKLIiS21sQ78JH4RSMc,1716
|
|
115
|
-
pulse/transpiler/nodes.py,sha256=
|
|
116
|
+
pulse/transpiler/nodes.py,sha256=xJSUb0PfqyfvG85ZnUwZMzUs4JVeCJTw87biI2K2UC8,51742
|
|
116
117
|
pulse/transpiler/py_module.py,sha256=um4BYLrbs01bpgv2LEBHTbhXXh8Bs174c3ygv5tHHOg,4410
|
|
117
118
|
pulse/transpiler/transpiler.py,sha256=28diEp1yZTs3RsUEJZZdCv1DfzgO9WyOGI-xSHe7y_4,32562
|
|
118
119
|
pulse/transpiler/vdom.py,sha256=Ie36iHa2bkUbui5iMClbMSFDGlKaNxI98Ux0JLPCGT4,6399
|
|
@@ -120,7 +121,7 @@ pulse/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
120
121
|
pulse/types/event_handler.py,sha256=psQCydj-WEtBcFU5JU4mDwvyzkW8V2O0g_VFRU2EOHI,1618
|
|
121
122
|
pulse/user_session.py,sha256=nsnsMgqq2xGJZLpbHRMHUHcLrElMP8WcA4gjGMrcoBk,10208
|
|
122
123
|
pulse/version.py,sha256=711vaM1jVIQPgkisGgKZqwmw019qZIsc_QTae75K2pg,1895
|
|
123
|
-
pulse_framework-0.1.
|
|
124
|
-
pulse_framework-0.1.
|
|
125
|
-
pulse_framework-0.1.
|
|
126
|
-
pulse_framework-0.1.
|
|
124
|
+
pulse_framework-0.1.64.dist-info/WHEEL,sha256=eh7sammvW2TypMMMGKgsM83HyA_3qQ5Lgg3ynoecH3M,79
|
|
125
|
+
pulse_framework-0.1.64.dist-info/entry_points.txt,sha256=i7aohd3QaPu5IcuGKKvsQQEiMYMe5HcF56QEsaLVO64,46
|
|
126
|
+
pulse_framework-0.1.64.dist-info/METADATA,sha256=vngWZz-tQp2X0ijnpp71j-4W1pRv8_JZCYlU0smMZE4,8300
|
|
127
|
+
pulse_framework-0.1.64.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|