reflex 0.6.1a2__py3-none-any.whl → 0.6.2a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of reflex might be problematic. Click here for more details.

reflex/state.py CHANGED
@@ -9,6 +9,7 @@ import dataclasses
9
9
  import functools
10
10
  import inspect
11
11
  import os
12
+ import pickle
12
13
  import uuid
13
14
  from abc import ABC, abstractmethod
14
15
  from collections import defaultdict
@@ -19,6 +20,7 @@ from typing import (
19
20
  TYPE_CHECKING,
20
21
  Any,
21
22
  AsyncIterator,
23
+ BinaryIO,
22
24
  Callable,
23
25
  ClassVar,
24
26
  Dict,
@@ -33,11 +35,11 @@ from typing import (
33
35
  get_type_hints,
34
36
  )
35
37
 
36
- import dill
37
38
  from sqlalchemy.orm import DeclarativeBase
38
39
  from typing_extensions import Self
39
40
 
40
41
  from reflex.config import get_config
42
+ from reflex.istate.data import RouterData
41
43
  from reflex.vars.base import (
42
44
  ComputedVar,
43
45
  DynamicRouteVar,
@@ -74,6 +76,8 @@ from reflex.utils.exceptions import (
74
76
  EventHandlerShadowsBuiltInStateMethod,
75
77
  ImmutableStateError,
76
78
  LockExpiredError,
79
+ SetUndefinedStateVarError,
80
+ StateSchemaMismatchError,
77
81
  )
78
82
  from reflex.utils.exec import is_testing_env
79
83
  from reflex.utils.serializers import serializer
@@ -92,125 +96,6 @@ var = computed_var
92
96
  TOO_LARGE_SERIALIZED_STATE = 100 * 1024 # 100kb
93
97
 
94
98
 
95
- @dataclasses.dataclass(frozen=True)
96
- class HeaderData:
97
- """An object containing headers data."""
98
-
99
- host: str = ""
100
- origin: str = ""
101
- upgrade: str = ""
102
- connection: str = ""
103
- cookie: str = ""
104
- pragma: str = ""
105
- cache_control: str = ""
106
- user_agent: str = ""
107
- sec_websocket_version: str = ""
108
- sec_websocket_key: str = ""
109
- sec_websocket_extensions: str = ""
110
- accept_encoding: str = ""
111
- accept_language: str = ""
112
-
113
- def __init__(self, router_data: Optional[dict] = None):
114
- """Initalize the HeaderData object based on router_data.
115
-
116
- Args:
117
- router_data: the router_data dict.
118
- """
119
- if router_data:
120
- for k, v in router_data.get(constants.RouteVar.HEADERS, {}).items():
121
- object.__setattr__(self, format.to_snake_case(k), v)
122
- else:
123
- for k in dataclasses.fields(self):
124
- object.__setattr__(self, k.name, "")
125
-
126
-
127
- @dataclasses.dataclass(frozen=True)
128
- class PageData:
129
- """An object containing page data."""
130
-
131
- host: str = "" # repeated with self.headers.origin (remove or keep the duplicate?)
132
- path: str = ""
133
- raw_path: str = ""
134
- full_path: str = ""
135
- full_raw_path: str = ""
136
- params: dict = dataclasses.field(default_factory=dict)
137
-
138
- def __init__(self, router_data: Optional[dict] = None):
139
- """Initalize the PageData object based on router_data.
140
-
141
- Args:
142
- router_data: the router_data dict.
143
- """
144
- if router_data:
145
- object.__setattr__(
146
- self,
147
- "host",
148
- router_data.get(constants.RouteVar.HEADERS, {}).get("origin", ""),
149
- )
150
- object.__setattr__(
151
- self, "path", router_data.get(constants.RouteVar.PATH, "")
152
- )
153
- object.__setattr__(
154
- self, "raw_path", router_data.get(constants.RouteVar.ORIGIN, "")
155
- )
156
- object.__setattr__(self, "full_path", f"{self.host}{self.path}")
157
- object.__setattr__(self, "full_raw_path", f"{self.host}{self.raw_path}")
158
- object.__setattr__(
159
- self, "params", router_data.get(constants.RouteVar.QUERY, {})
160
- )
161
- else:
162
- object.__setattr__(self, "host", "")
163
- object.__setattr__(self, "path", "")
164
- object.__setattr__(self, "raw_path", "")
165
- object.__setattr__(self, "full_path", "")
166
- object.__setattr__(self, "full_raw_path", "")
167
- object.__setattr__(self, "params", {})
168
-
169
-
170
- @dataclasses.dataclass(frozen=True, init=False)
171
- class SessionData:
172
- """An object containing session data."""
173
-
174
- client_token: str = ""
175
- client_ip: str = ""
176
- session_id: str = ""
177
-
178
- def __init__(self, router_data: Optional[dict] = None):
179
- """Initalize the SessionData object based on router_data.
180
-
181
- Args:
182
- router_data: the router_data dict.
183
- """
184
- if router_data:
185
- client_token = router_data.get(constants.RouteVar.CLIENT_TOKEN, "")
186
- client_ip = router_data.get(constants.RouteVar.CLIENT_IP, "")
187
- session_id = router_data.get(constants.RouteVar.SESSION_ID, "")
188
- else:
189
- client_token = client_ip = session_id = ""
190
- object.__setattr__(self, "client_token", client_token)
191
- object.__setattr__(self, "client_ip", client_ip)
192
- object.__setattr__(self, "session_id", session_id)
193
-
194
-
195
- @dataclasses.dataclass(frozen=True, init=False)
196
- class RouterData:
197
- """An object containing RouterData."""
198
-
199
- session: SessionData = dataclasses.field(default_factory=SessionData)
200
- headers: HeaderData = dataclasses.field(default_factory=HeaderData)
201
- page: PageData = dataclasses.field(default_factory=PageData)
202
-
203
- def __init__(self, router_data: Optional[dict] = None):
204
- """Initialize the RouterData object.
205
-
206
- Args:
207
- router_data: the router_data dict.
208
- """
209
- object.__setattr__(self, "session", SessionData(router_data))
210
- object.__setattr__(self, "headers", HeaderData(router_data))
211
- object.__setattr__(self, "page", PageData(router_data))
212
-
213
-
214
99
  def _no_chain_background_task(
215
100
  state_cls: Type["BaseState"], name: str, fn: Callable
216
101
  ) -> Callable:
@@ -698,11 +583,14 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
698
583
  )
699
584
 
700
585
  @classmethod
701
- def _evaluate(cls, f: Callable[[Self], Any]) -> Var:
586
+ def _evaluate(
587
+ cls, f: Callable[[Self], Any], of_type: Union[type, None] = None
588
+ ) -> Var:
702
589
  """Evaluate a function to a ComputedVar. Experimental.
703
590
 
704
591
  Args:
705
592
  f: The function to evaluate.
593
+ of_type: The type of the ComputedVar. Defaults to Component.
706
594
 
707
595
  Returns:
708
596
  The ComputedVar.
@@ -710,14 +598,23 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
710
598
  console.warn(
711
599
  "The _evaluate method is experimental and may be removed in future versions."
712
600
  )
713
- from reflex.components.base.fragment import fragment
714
601
  from reflex.components.component import Component
715
602
 
603
+ of_type = of_type or Component
604
+
716
605
  unique_var_name = get_unique_variable_name()
717
606
 
718
- @computed_var(_js_expr=unique_var_name, return_type=Component)
607
+ @computed_var(_js_expr=unique_var_name, return_type=of_type)
719
608
  def computed_var_func(state: Self):
720
- return fragment(f(state))
609
+ result = f(state)
610
+
611
+ if not isinstance(result, of_type):
612
+ console.warn(
613
+ f"Inline ComputedVar {f} expected type {of_type}, got {type(result)}. "
614
+ "You can specify expected type with `of_type` argument."
615
+ )
616
+
617
+ return result
721
618
 
722
619
  setattr(cls, unique_var_name, computed_var_func)
723
620
  cls.computed_vars[unique_var_name] = computed_var_func
@@ -1260,6 +1157,9 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
1260
1157
  Args:
1261
1158
  name: The name of the attribute.
1262
1159
  value: The value of the attribute.
1160
+
1161
+ Raises:
1162
+ SetUndefinedStateVarError: If a value of a var is set without first defining it.
1263
1163
  """
1264
1164
  if isinstance(value, MutableProxy):
1265
1165
  # unwrap proxy objects when assigning back to the state
@@ -1277,6 +1177,17 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
1277
1177
  self._mark_dirty()
1278
1178
  return
1279
1179
 
1180
+ if (
1181
+ name not in self.vars
1182
+ and name not in self.get_skip_vars()
1183
+ and not name.startswith("__")
1184
+ and not name.startswith(f"_{type(self).__name__}__")
1185
+ ):
1186
+ raise SetUndefinedStateVarError(
1187
+ f"The state variable '{name}' has not been defined in '{type(self).__name__}'. "
1188
+ f"All state variables must be declared before they can be set."
1189
+ )
1190
+
1280
1191
  # Set the attribute.
1281
1192
  super().__setattr__(name, value)
1282
1193
 
@@ -2005,7 +1916,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
2005
1916
  def __getstate__(self):
2006
1917
  """Get the state for redis serialization.
2007
1918
 
2008
- This method is called by cloudpickle to serialize the object.
1919
+ This method is called by pickle to serialize the object.
2009
1920
 
2010
1921
  It explicitly removes parent_state and substates because those are serialized separately
2011
1922
  by the StateManagerRedis to allow for better horizontal scaling as state size increases.
@@ -2021,6 +1932,43 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
2021
1932
  state["__dict__"].pop("_was_touched", None)
2022
1933
  return state
2023
1934
 
1935
+ def _serialize(self) -> bytes:
1936
+ """Serialize the state for redis.
1937
+
1938
+ Returns:
1939
+ The serialized state.
1940
+ """
1941
+ return pickle.dumps((state_to_schema(self), self))
1942
+
1943
+ @classmethod
1944
+ def _deserialize(
1945
+ cls, data: bytes | None = None, fp: BinaryIO | None = None
1946
+ ) -> BaseState:
1947
+ """Deserialize the state from redis/disk.
1948
+
1949
+ data and fp are mutually exclusive, but one must be provided.
1950
+
1951
+ Args:
1952
+ data: The serialized state data.
1953
+ fp: The file pointer to the serialized state data.
1954
+
1955
+ Returns:
1956
+ The deserialized state.
1957
+
1958
+ Raises:
1959
+ ValueError: If both data and fp are provided, or neither are provided.
1960
+ StateSchemaMismatchError: If the state schema does not match the expected schema.
1961
+ """
1962
+ if data is not None and fp is None:
1963
+ (substate_schema, state) = pickle.loads(data)
1964
+ elif fp is not None and data is None:
1965
+ (substate_schema, state) = pickle.load(fp)
1966
+ else:
1967
+ raise ValueError("Only one of `data` or `fp` must be provided")
1968
+ if substate_schema != state_to_schema(state):
1969
+ raise StateSchemaMismatchError()
1970
+ return state
1971
+
2024
1972
 
2025
1973
  class State(BaseState):
2026
1974
  """The app Base State."""
@@ -2177,7 +2125,11 @@ class ComponentState(State, mixin=True):
2177
2125
  """
2178
2126
  cls._per_component_state_instance_count += 1
2179
2127
  state_cls_name = f"{cls.__name__}_n{cls._per_component_state_instance_count}"
2180
- component_state = type(state_cls_name, (cls, State), {}, mixin=False)
2128
+ component_state = type(
2129
+ state_cls_name, (cls, State), {"__module__": __name__}, mixin=False
2130
+ )
2131
+ # Save a reference to the dynamic state for pickle/unpickle.
2132
+ globals()[state_cls_name] = component_state
2181
2133
  component = component_state.get_component(*children, **props)
2182
2134
  component.State = component_state
2183
2135
  return component
@@ -2643,7 +2595,7 @@ def is_serializable(value: Any) -> bool:
2643
2595
  Whether the value is serializable.
2644
2596
  """
2645
2597
  try:
2646
- return bool(dill.dumps(value))
2598
+ return bool(pickle.dumps(value))
2647
2599
  except Exception:
2648
2600
  return False
2649
2601
 
@@ -2779,8 +2731,7 @@ class StateManagerDisk(StateManager):
2779
2731
  if token_path.exists():
2780
2732
  try:
2781
2733
  with token_path.open(mode="rb") as file:
2782
- (substate_schema, substate) = dill.load(file)
2783
- if substate_schema == state_to_schema(substate):
2734
+ substate = BaseState._deserialize(fp=file)
2784
2735
  await self.populate_substates(client_token, substate, root_state)
2785
2736
  return substate
2786
2737
  except Exception:
@@ -2822,10 +2773,12 @@ class StateManagerDisk(StateManager):
2822
2773
  client_token, substate_address = _split_substate_key(token)
2823
2774
 
2824
2775
  root_state_token = _substate_key(client_token, substate_address.split(".")[0])
2776
+ root_state = self.states.get(root_state_token)
2777
+ if root_state is None:
2778
+ # Create a new root state which will be persisted in the next set_state call.
2779
+ root_state = self.state(_reflex_internal_init=True)
2825
2780
 
2826
- return await self.load_state(
2827
- root_state_token, self.state(_reflex_internal_init=True)
2828
- )
2781
+ return await self.load_state(root_state_token, root_state)
2829
2782
 
2830
2783
  async def set_state_for_substate(self, client_token: str, substate: BaseState):
2831
2784
  """Set the state for a substate.
@@ -2838,7 +2791,7 @@ class StateManagerDisk(StateManager):
2838
2791
 
2839
2792
  self.states[substate_token] = substate
2840
2793
 
2841
- state_dilled = dill.dumps((state_to_schema(substate), substate))
2794
+ state_dilled = substate._serialize()
2842
2795
  if not self.states_directory.exists():
2843
2796
  self.states_directory.mkdir(parents=True, exist_ok=True)
2844
2797
  self.token_path(substate_token).write_bytes(state_dilled)
@@ -2881,25 +2834,6 @@ class StateManagerDisk(StateManager):
2881
2834
  await self.set_state(token, state)
2882
2835
 
2883
2836
 
2884
- # Workaround https://github.com/cloudpipe/cloudpickle/issues/408 for dynamic pydantic classes
2885
- if not isinstance(State.validate.__func__, FunctionType):
2886
- cython_function_or_method = type(State.validate.__func__)
2887
-
2888
- @dill.register(cython_function_or_method)
2889
- def _dill_reduce_cython_function_or_method(pickler, obj):
2890
- # Ignore cython function when pickling.
2891
- pass
2892
-
2893
-
2894
- @dill.register(type(State))
2895
- def _dill_reduce_state(pickler, obj):
2896
- if obj is not State and issubclass(obj, State):
2897
- # Avoid serializing subclasses of State, instead get them by reference from the State class.
2898
- pickler.save_reduce(State.get_class_substate, (obj.get_full_name(),), obj=obj)
2899
- else:
2900
- dill.Pickler.dispatch[type](pickler, obj)
2901
-
2902
-
2903
2837
  def _default_lock_expiration() -> int:
2904
2838
  """Get the default lock expiration time.
2905
2839
 
@@ -3039,7 +2973,7 @@ class StateManagerRedis(StateManager):
3039
2973
 
3040
2974
  if redis_state is not None:
3041
2975
  # Deserialize the substate.
3042
- state = dill.loads(redis_state)
2976
+ state = BaseState._deserialize(data=redis_state)
3043
2977
 
3044
2978
  # Populate parent state if missing and requested.
3045
2979
  if parent_state is None:
@@ -3151,7 +3085,7 @@ class StateManagerRedis(StateManager):
3151
3085
  )
3152
3086
  # Persist only the given state (parents or substates are excluded by BaseState.__getstate__).
3153
3087
  if state._get_was_touched():
3154
- pickle_state = dill.dumps(state, byref=True)
3088
+ pickle_state = state._serialize()
3155
3089
  self._warn_if_too_large(state, len(pickle_state))
3156
3090
  await self.redis.set(
3157
3091
  _substate_key(client_token, state),
reflex/utils/build.py CHANGED
@@ -61,8 +61,8 @@ def generate_sitemap_config(deploy_url: str, export=False):
61
61
 
62
62
  def _zip(
63
63
  component_name: constants.ComponentName,
64
- target: str,
65
- root_dir: str,
64
+ target: str | Path,
65
+ root_dir: str | Path,
66
66
  exclude_venv_dirs: bool,
67
67
  upload_db_file: bool = False,
68
68
  dirs_to_exclude: set[str] | None = None,
@@ -82,22 +82,22 @@ def _zip(
82
82
  top_level_dirs_to_exclude: The top level directory names immediately under root_dir to exclude. Do not exclude folders by these names further in the sub-directories.
83
83
 
84
84
  """
85
+ target = Path(target)
86
+ root_dir = Path(root_dir)
85
87
  dirs_to_exclude = dirs_to_exclude or set()
86
88
  files_to_exclude = files_to_exclude or set()
87
89
  files_to_zip: list[str] = []
88
90
  # Traverse the root directory in a top-down manner. In this traversal order,
89
91
  # we can modify the dirs list in-place to remove directories we don't want to include.
90
92
  for root, dirs, files in os.walk(root_dir, topdown=True):
93
+ root = Path(root)
91
94
  # Modify the dirs in-place so excluded and hidden directories are skipped in next traversal.
92
95
  dirs[:] = [
93
96
  d
94
97
  for d in dirs
95
- if (basename := os.path.basename(os.path.normpath(d)))
96
- not in dirs_to_exclude
98
+ if (basename := Path(d).resolve().name) not in dirs_to_exclude
97
99
  and not basename.startswith(".")
98
- and (
99
- not exclude_venv_dirs or not _looks_like_venv_dir(os.path.join(root, d))
100
- )
100
+ and (not exclude_venv_dirs or not _looks_like_venv_dir(root / d))
101
101
  ]
102
102
  # If we are at the top level with root_dir, exclude the top level dirs.
103
103
  if top_level_dirs_to_exclude and root == root_dir:
@@ -109,7 +109,7 @@ def _zip(
109
109
  if not f.startswith(".") and (upload_db_file or not f.endswith(".db"))
110
110
  ]
111
111
  files_to_zip += [
112
- os.path.join(root, file) for file in files if file not in files_to_exclude
112
+ str(root / file) for file in files if file not in files_to_exclude
113
113
  ]
114
114
 
115
115
  # Create a progress bar for zipping the component.
@@ -126,13 +126,13 @@ def _zip(
126
126
  for file in files_to_zip:
127
127
  console.debug(f"{target}: {file}", progress=progress)
128
128
  progress.advance(task)
129
- zipf.write(file, os.path.relpath(file, root_dir))
129
+ zipf.write(file, Path(file).relative_to(root_dir))
130
130
 
131
131
 
132
132
  def zip_app(
133
133
  frontend: bool = True,
134
134
  backend: bool = True,
135
- zip_dest_dir: str = os.getcwd(),
135
+ zip_dest_dir: str | Path = Path.cwd(),
136
136
  upload_db_file: bool = False,
137
137
  ):
138
138
  """Zip up the app.
@@ -143,6 +143,7 @@ def zip_app(
143
143
  zip_dest_dir: The directory to export the zip file to.
144
144
  upload_db_file: Whether to upload the database file.
145
145
  """
146
+ zip_dest_dir = Path(zip_dest_dir)
146
147
  files_to_exclude = {
147
148
  constants.ComponentName.FRONTEND.zip(),
148
149
  constants.ComponentName.BACKEND.zip(),
@@ -151,8 +152,8 @@ def zip_app(
151
152
  if frontend:
152
153
  _zip(
153
154
  component_name=constants.ComponentName.FRONTEND,
154
- target=os.path.join(zip_dest_dir, constants.ComponentName.FRONTEND.zip()),
155
- root_dir=str(prerequisites.get_web_dir() / constants.Dirs.STATIC),
155
+ target=zip_dest_dir / constants.ComponentName.FRONTEND.zip(),
156
+ root_dir=prerequisites.get_web_dir() / constants.Dirs.STATIC,
156
157
  files_to_exclude=files_to_exclude,
157
158
  exclude_venv_dirs=False,
158
159
  )
@@ -160,8 +161,8 @@ def zip_app(
160
161
  if backend:
161
162
  _zip(
162
163
  component_name=constants.ComponentName.BACKEND,
163
- target=os.path.join(zip_dest_dir, constants.ComponentName.BACKEND.zip()),
164
- root_dir=".",
164
+ target=zip_dest_dir / constants.ComponentName.BACKEND.zip(),
165
+ root_dir=Path("."),
165
166
  dirs_to_exclude={"__pycache__"},
166
167
  files_to_exclude=files_to_exclude,
167
168
  top_level_dirs_to_exclude={"assets"},
@@ -236,6 +237,9 @@ def setup_frontend(
236
237
  # Set the environment variables in client (env.json).
237
238
  set_env_json()
238
239
 
240
+ # update the last reflex run time.
241
+ prerequisites.set_last_reflex_run_time()
242
+
239
243
  # Disable the Next telemetry.
240
244
  if disable_telemetry:
241
245
  processes.new_process(
@@ -266,5 +270,6 @@ def setup_frontend_prod(
266
270
  build(deploy_url=get_config().deploy_url)
267
271
 
268
272
 
269
- def _looks_like_venv_dir(dir_to_check: str) -> bool:
270
- return os.path.exists(os.path.join(dir_to_check, "pyvenv.cfg"))
273
+ def _looks_like_venv_dir(dir_to_check: str | Path) -> bool:
274
+ dir_to_check = Path(dir_to_check)
275
+ return (dir_to_check / "pyvenv.cfg").exists()
reflex/utils/compat.py CHANGED
@@ -69,3 +69,21 @@ def pydantic_v1_patch():
69
69
 
70
70
  with pydantic_v1_patch():
71
71
  import sqlmodel as sqlmodel
72
+
73
+
74
+ def sqlmodel_field_has_primary_key(field) -> bool:
75
+ """Determines if a field is a priamary.
76
+
77
+ Args:
78
+ field: a rx.model field
79
+
80
+ Returns:
81
+ If field is a primary key (Bool)
82
+ """
83
+ if getattr(field.field_info, "primary_key", None) is True:
84
+ return True
85
+ if getattr(field.field_info, "sa_column", None) is None:
86
+ return False
87
+ if getattr(field.field_info.sa_column, "primary_key", None) is True:
88
+ return True
89
+ return False
@@ -115,3 +115,15 @@ class PrimitiveUnserializableToJSON(ReflexError, ValueError):
115
115
 
116
116
  class InvalidLifespanTaskType(ReflexError, TypeError):
117
117
  """Raised when an invalid task type is registered as a lifespan task."""
118
+
119
+
120
+ class DynamicComponentMissingLibrary(ReflexError, ValueError):
121
+ """Raised when a dynamic component is missing a library."""
122
+
123
+
124
+ class SetUndefinedStateVarError(ReflexError, AttributeError):
125
+ """Raised when setting the value of a var without first declaring it."""
126
+
127
+
128
+ class StateSchemaMismatchError(ReflexError, TypeError):
129
+ """Raised when the serialized schema of a state class does not match the current schema."""
reflex/utils/exec.py CHANGED
@@ -284,7 +284,7 @@ def run_granian_backend(host, port, loglevel: LogLevel):
284
284
  ).serve()
285
285
  except ImportError:
286
286
  console.error(
287
- 'InstallError: REFLEX_USE_GRANIAN is set but `granian` is not installed. (run `pip install "granian>=1.6.0"`)'
287
+ 'InstallError: REFLEX_USE_GRANIAN is set but `granian` is not installed. (run `pip install "granian[reload]>=1.6.0"`)'
288
288
  )
289
289
  os._exit(1)
290
290
 
@@ -410,7 +410,7 @@ def run_granian_backend_prod(host, port, loglevel):
410
410
  )
411
411
  except ImportError:
412
412
  console.error(
413
- 'InstallError: REFLEX_USE_GRANIAN is set but `granian` is not installed. (run `pip install "granian>=1.6.0"`)'
413
+ 'InstallError: REFLEX_USE_GRANIAN is set but `granian` is not installed. (run `pip install "granian[reload]>=1.6.0"`)'
414
414
  )
415
415
 
416
416
 
reflex/utils/format.py CHANGED
@@ -359,19 +359,7 @@ def format_prop(
359
359
 
360
360
  # Handle event props.
361
361
  if isinstance(prop, EventChain):
362
- sig = inspect.signature(prop.args_spec) # type: ignore
363
- if sig.parameters:
364
- arg_def = ",".join(f"_{p}" for p in sig.parameters)
365
- arg_def_expr = f"[{arg_def}]"
366
- else:
367
- # add a default argument for addEvents if none were specified in prop.args_spec
368
- # used to trigger the preventDefault() on the event.
369
- arg_def = "...args"
370
- arg_def_expr = "args"
371
-
372
- chain = ",".join([format_event(event) for event in prop.events])
373
- event = f"addEvents([{chain}], {arg_def_expr}, {json_dumps(prop.event_actions)})"
374
- prop = f"({arg_def}) => {event}"
362
+ return str(Var.create(prop))
375
363
 
376
364
  # Handle other types.
377
365
  elif isinstance(prop, str):
reflex/utils/path_ops.py CHANGED
@@ -164,7 +164,7 @@ def use_system_bun() -> bool:
164
164
  return use_system_install(constants.Bun.USE_SYSTEM_VAR)
165
165
 
166
166
 
167
- def get_node_bin_path() -> str | None:
167
+ def get_node_bin_path() -> Path | None:
168
168
  """Get the node binary dir path.
169
169
 
170
170
  Returns:
@@ -173,8 +173,8 @@ def get_node_bin_path() -> str | None:
173
173
  bin_path = Path(constants.Node.BIN_PATH)
174
174
  if not bin_path.exists():
175
175
  str_path = which("node")
176
- return str(Path(str_path).parent.resolve()) if str_path else str_path
177
- return str(bin_path.resolve())
176
+ return Path(str_path).parent.resolve() if str_path else None
177
+ return bin_path.resolve()
178
178
 
179
179
 
180
180
  def get_node_path() -> str | None: