reflex 0.7.5__py3-none-any.whl → 0.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of reflex might be problematic. Click here for more details.

@@ -1,16 +1,32 @@
1
- """Module to implement lazy loading in reflex."""
1
+ """Module to implement lazy loading in reflex.
2
+
3
+ BSD 3-Clause License
4
+
5
+ Copyright (c) 2022--2023, Scientific Python project All rights reserved.
6
+
7
+ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
8
+
9
+ Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
10
+
11
+ Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
12
+
13
+ Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
14
+
15
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
16
+ """
2
17
 
3
18
  from __future__ import annotations
4
19
 
5
20
  import copy
6
-
7
- import lazy_loader as lazy
21
+ import importlib
22
+ import os
23
+ import sys
8
24
 
9
25
 
10
26
  def attach(
11
27
  package_name: str,
12
- submodules: set | None = None,
13
- submod_attrs: dict | None = None,
28
+ submodules: set[str] | None = None,
29
+ submod_attrs: dict[str, list[str]] | None = None,
14
30
  ):
15
31
  """Replaces a package's __getattr__, __dir__, and __all__ attributes using lazy.attach.
16
32
  The lazy loader __getattr__ doesn't support tuples as list values. We needed to add
@@ -27,14 +43,49 @@ def attach(
27
43
  Returns:
28
44
  __getattr__, __dir__, __all__
29
45
  """
30
- _submod_attrs = copy.deepcopy(submod_attrs)
31
- if _submod_attrs:
32
- for k, v in _submod_attrs.items():
46
+ submod_attrs = copy.deepcopy(submod_attrs)
47
+ if submod_attrs:
48
+ for k, v in submod_attrs.items():
33
49
  # when flattening the list, only keep the alias in the tuple(mod[1])
34
- _submod_attrs[k] = [
50
+ submod_attrs[k] = [
35
51
  mod if not isinstance(mod, tuple) else mod[1] for mod in v
36
52
  ]
37
53
 
38
- return lazy.attach(
39
- package_name=package_name, submodules=submodules, submod_attrs=_submod_attrs
40
- )
54
+ if submod_attrs is None:
55
+ submod_attrs = {}
56
+
57
+ submodules = set(submodules) if submodules is not None else set()
58
+
59
+ attr_to_modules = {
60
+ attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
61
+ }
62
+
63
+ __all__ = sorted(submodules | attr_to_modules.keys())
64
+
65
+ def __getattr__(name: str): # noqa: N807
66
+ if name in submodules:
67
+ return importlib.import_module(f"{package_name}.{name}")
68
+ elif name in attr_to_modules:
69
+ submod_path = f"{package_name}.{attr_to_modules[name]}"
70
+ submod = importlib.import_module(submod_path)
71
+ attr = getattr(submod, name)
72
+
73
+ # If the attribute lives in a file (module) with the same
74
+ # name as the attribute, ensure that the attribute and *not*
75
+ # the module is accessible on the package.
76
+ if name == attr_to_modules[name]:
77
+ pkg = sys.modules[package_name]
78
+ pkg.__dict__[name] = attr
79
+
80
+ return attr
81
+ else:
82
+ raise AttributeError(f"No {package_name} attribute {name}")
83
+
84
+ def __dir__(): # noqa: N807
85
+ return __all__
86
+
87
+ if os.environ.get("EAGER_IMPORT", ""):
88
+ for attr in set(attr_to_modules.keys()) | submodules:
89
+ __getattr__(attr)
90
+
91
+ return __getattr__, __dir__, list(__all__)
@@ -254,7 +254,7 @@ def get_nodejs_compatible_package_managers(
254
254
 
255
255
  package_managers = list(filter(None, package_managers))
256
256
 
257
- if not package_managers and not raise_on_none:
257
+ if not package_managers and raise_on_none:
258
258
  raise FileNotFoundError(
259
259
  "Bun or npm not found. You might need to rerun `reflex init` or install either."
260
260
  )
@@ -831,44 +831,48 @@ def initialize_gitignore(
831
831
  gitignore_file.write_text("\n".join(files_to_ignore) + "\n")
832
832
 
833
833
 
834
- def initialize_requirements_txt():
834
+ def initialize_requirements_txt() -> bool:
835
835
  """Initialize the requirements.txt file.
836
836
  If absent, generate one for the user.
837
837
  If the requirements.txt does not have reflex as dependency,
838
838
  generate a requirement pinning current version and append to
839
839
  the requirements.txt file.
840
+
841
+ Returns:
842
+ True if the requirements.txt file was created or updated, False otherwise.
843
+
844
+ Raises:
845
+ Exit: If the requirements.txt file cannot be read or written to.
840
846
  """
841
- fp = Path(constants.RequirementsTxt.FILE)
842
- encoding = "utf-8"
843
- if not fp.exists():
844
- fp.touch()
847
+ requirements_file_path = Path(constants.RequirementsTxt.FILE)
848
+ requirements_file_path.touch(exist_ok=True)
849
+
850
+ for encoding in [None, "utf-8"]:
851
+ try:
852
+ content = requirements_file_path.read_text(encoding)
853
+ break
854
+ except UnicodeDecodeError:
855
+ continue
856
+ except Exception as e:
857
+ console.error(f"Failed to read {requirements_file_path}.")
858
+ raise typer.Exit(1) from e
845
859
  else:
846
- # Detect the encoding of the original file
847
- import charset_normalizer
860
+ return False
848
861
 
849
- charset_matches = charset_normalizer.from_path(fp)
850
- maybe_charset_match = charset_matches.best()
851
- if maybe_charset_match is None:
852
- console.debug(f"Unable to detect encoding for {fp}, exiting.")
853
- return
854
- encoding = maybe_charset_match.encoding
855
- console.debug(f"Detected encoding for {fp} as {encoding}.")
856
- try:
857
- other_requirements_exist = False
858
- with fp.open("r", encoding=encoding) as f:
859
- for req in f:
860
- # Check if we have a package name that is reflex
861
- if re.match(r"^reflex[^a-zA-Z0-9]", req):
862
- console.debug(f"{fp} already has reflex as dependency.")
863
- return
864
- other_requirements_exist = True
865
- with fp.open("a", encoding=encoding) as f:
866
- preceding_newline = "\n" if other_requirements_exist else ""
867
- f.write(
868
- f"{preceding_newline}{constants.RequirementsTxt.DEFAULTS_STUB}{constants.Reflex.VERSION}\n"
869
- )
870
- except Exception:
871
- console.info(f"Unable to check {fp} for reflex dependency.")
862
+ for line in content.splitlines():
863
+ if re.match(r"^reflex[^a-zA-Z0-9]", line):
864
+ console.debug(f"{requirements_file_path} already has reflex as dependency.")
865
+ return True
866
+
867
+ console.debug(
868
+ f"Appending {constants.RequirementsTxt.DEFAULTS_STUB} to {requirements_file_path}"
869
+ )
870
+ with requirements_file_path.open("a", encoding=encoding) as f:
871
+ f.write(
872
+ "\n" + constants.RequirementsTxt.DEFAULTS_STUB + constants.Reflex.VERSION
873
+ )
874
+
875
+ return True
872
876
 
873
877
 
874
878
  def initialize_app_directory(
@@ -1087,8 +1091,10 @@ def _update_next_config(
1087
1091
  "compress": config.next_compression,
1088
1092
  "trailingSlash": True,
1089
1093
  "staticPageGenerationTimeout": config.static_page_generation_timeout,
1090
- "devIndicators": config.next_dev_indicators,
1091
1094
  }
1095
+ if not config.next_dev_indicators:
1096
+ next_config["devIndicators"] = False
1097
+
1092
1098
  if transpile_packages:
1093
1099
  next_config["transpilePackages"] = list(
1094
1100
  {format_library_name(p) for p in transpile_packages}
@@ -1409,7 +1415,7 @@ def validate_bun():
1409
1415
  raise typer.Exit(1)
1410
1416
  elif bun_version < version.parse(constants.Bun.MIN_VERSION):
1411
1417
  console.error(
1412
- f"Reflex requires bun version {constants.Bun.VERSION} or higher to run, but the detected version is "
1418
+ f"Reflex requires bun version {constants.Bun.MIN_VERSION} or higher to run, but the detected version is "
1413
1419
  f"{bun_version}. If you have specified a custom bun path in your config, make sure to provide one "
1414
1420
  f"that satisfies the minimum version requirement."
1415
1421
  )
reflex/utils/processes.py CHANGED
@@ -294,6 +294,7 @@ def stream_logs(
294
294
 
295
295
  Raises:
296
296
  Exit: If the process failed.
297
+ ValueError: If the process stdout pipe is closed, but the process remains running.
297
298
  """
298
299
  from reflex.utils import telemetry
299
300
 
@@ -303,10 +304,18 @@ def stream_logs(
303
304
  console.debug(message, progress=progress)
304
305
  if process.stdout is None:
305
306
  return
306
- for line in process.stdout:
307
- console.debug(line, end="", progress=progress)
308
- logs.append(line)
309
- yield line
307
+ try:
308
+ for line in process.stdout:
309
+ console.debug(line, end="", progress=progress)
310
+ logs.append(line)
311
+ yield line
312
+ except ValueError:
313
+ # The stream we were reading has been closed,
314
+ if process.poll() is None:
315
+ # But if the process is still running that is weird.
316
+ raise
317
+ # If the process exited, break out of the loop for post processing.
318
+ pass
310
319
 
311
320
  # Check if the process failed (not printing the logs for SIGINT).
312
321
 
@@ -6,11 +6,13 @@ import ast
6
6
  import contextlib
7
7
  import importlib
8
8
  import inspect
9
+ import json
9
10
  import logging
10
11
  import re
11
12
  import subprocess
12
13
  import typing
13
14
  from fileinput import FileInput
15
+ from hashlib import md5
14
16
  from inspect import getfullargspec
15
17
  from itertools import chain
16
18
  from multiprocessing import Pool, cpu_count
@@ -1058,9 +1060,9 @@ class PyiGenerator:
1058
1060
  modules: list = []
1059
1061
  root: str = ""
1060
1062
  current_module: Any = {}
1061
- written_files: list[str] = []
1063
+ written_files: list[tuple[str, str]] = []
1062
1064
 
1063
- def _write_pyi_file(self, module_path: Path, source: str):
1065
+ def _write_pyi_file(self, module_path: Path, source: str) -> str:
1064
1066
  relpath = str(_relative_to_pwd(module_path)).replace("\\", "/")
1065
1067
  pyi_content = (
1066
1068
  "\n".join(
@@ -1078,6 +1080,7 @@ class PyiGenerator:
1078
1080
  pyi_path = module_path.with_suffix(".pyi")
1079
1081
  pyi_path.write_text(pyi_content)
1080
1082
  logger.info(f"Wrote {relpath}")
1083
+ return md5(pyi_content.encode()).hexdigest()
1081
1084
 
1082
1085
  def _get_init_lazy_imports(self, mod: tuple | ModuleType, new_tree: ast.AST):
1083
1086
  # retrieve the _SUBMODULES and _SUBMOD_ATTRS from an init file if present.
@@ -1118,7 +1121,7 @@ class PyiGenerator:
1118
1121
  text += ast.unparse(new_tree) + "\n"
1119
1122
  return text
1120
1123
 
1121
- def _scan_file(self, module_path: Path) -> str | None:
1124
+ def _scan_file(self, module_path: Path) -> tuple[str, str] | None:
1122
1125
  module_import = (
1123
1126
  _relative_to_pwd(module_path)
1124
1127
  .with_suffix("")
@@ -1132,7 +1135,10 @@ class PyiGenerator:
1132
1135
  name: obj
1133
1136
  for name, obj in vars(module).items()
1134
1137
  if inspect.isclass(obj)
1135
- and (issubclass(obj, Component) or issubclass(obj, SimpleNamespace))
1138
+ and (
1139
+ rx_types.safe_issubclass(obj, Component)
1140
+ or rx_types.safe_issubclass(obj, SimpleNamespace)
1141
+ )
1136
1142
  and obj != Component
1137
1143
  and inspect.getmodule(obj) == module
1138
1144
  }
@@ -1147,13 +1153,13 @@ class PyiGenerator:
1147
1153
  init_imports = self._get_init_lazy_imports(module, new_tree)
1148
1154
  if not init_imports:
1149
1155
  return
1150
- self._write_pyi_file(module_path, init_imports)
1156
+ content_hash = self._write_pyi_file(module_path, init_imports)
1151
1157
  else:
1152
1158
  new_tree = StubGenerator(module, class_names).visit(
1153
1159
  ast.parse(inspect.getsource(module))
1154
1160
  )
1155
- self._write_pyi_file(module_path, ast.unparse(new_tree))
1156
- return str(module_path.with_suffix(".pyi").resolve())
1161
+ content_hash = self._write_pyi_file(module_path, ast.unparse(new_tree))
1162
+ return str(module_path.with_suffix(".pyi").resolve()), content_hash
1157
1163
 
1158
1164
  def _scan_files_multiprocess(self, files: list[Path]):
1159
1165
  with Pool(processes=cpu_count()) as pool:
@@ -1165,12 +1171,18 @@ class PyiGenerator:
1165
1171
  if pyi_path:
1166
1172
  self.written_files.append(pyi_path)
1167
1173
 
1168
- def scan_all(self, targets: list, changed_files: list[Path] | None = None):
1174
+ def scan_all(
1175
+ self,
1176
+ targets: list,
1177
+ changed_files: list[Path] | None = None,
1178
+ use_json: bool = False,
1179
+ ):
1169
1180
  """Scan all targets for class inheriting Component and generate the .pyi files.
1170
1181
 
1171
1182
  Args:
1172
1183
  targets: the list of file/folders to scan.
1173
1184
  changed_files (optional): the list of changed files since the last run.
1185
+ use_json: whether to use json to store the hashes.
1174
1186
  """
1175
1187
  file_targets = []
1176
1188
  for target in targets:
@@ -1212,17 +1224,82 @@ class PyiGenerator:
1212
1224
  else:
1213
1225
  self._scan_files_multiprocess(file_targets)
1214
1226
 
1227
+ file_paths, hashes = (
1228
+ [f[0] for f in self.written_files],
1229
+ [f[1] for f in self.written_files],
1230
+ )
1231
+
1215
1232
  # Fix generated pyi files with ruff.
1216
- if self.written_files:
1217
- subprocess.run(["ruff", "format", *self.written_files])
1218
- subprocess.run(["ruff", "check", "--fix", *self.written_files])
1233
+ if file_paths:
1234
+ subprocess.run(["ruff", "format", *file_paths])
1235
+ subprocess.run(["ruff", "check", "--fix", *file_paths])
1219
1236
 
1220
1237
  # For some reason, we need to format the __init__.pyi files again after fixing...
1221
- init_files = [f for f in self.written_files if "/__init__.pyi" in f]
1238
+ init_files = [f for f in file_paths if "/__init__.pyi" in f]
1222
1239
  subprocess.run(["ruff", "format", *init_files])
1223
1240
 
1241
+ if use_json:
1242
+ if file_paths and changed_files is None:
1243
+ file_paths = list(map(Path, file_paths))
1244
+ top_dir = file_paths[0].parent
1245
+ for file_path in file_paths:
1246
+ file_parent = file_path.parent
1247
+ while len(file_parent.parts) > len(top_dir.parts):
1248
+ file_parent = file_parent.parent
1249
+ while not file_parent.samefile(top_dir):
1250
+ file_parent = file_parent.parent
1251
+ top_dir = top_dir.parent
1252
+
1253
+ pyi_hashes_file = top_dir / "pyi_hashes.json"
1254
+ if not pyi_hashes_file.exists():
1255
+ while top_dir.parent and not (top_dir / "pyi_hashes.json").exists():
1256
+ top_dir = top_dir.parent
1257
+ another_pyi_hashes_file = top_dir / "pyi_hashes.json"
1258
+ if another_pyi_hashes_file.exists():
1259
+ pyi_hashes_file = another_pyi_hashes_file
1260
+
1261
+ pyi_hashes_file.write_text(
1262
+ json.dumps(
1263
+ dict(
1264
+ zip(
1265
+ [
1266
+ str(f.relative_to(pyi_hashes_file.parent))
1267
+ for f in file_paths
1268
+ ],
1269
+ hashes,
1270
+ strict=True,
1271
+ )
1272
+ ),
1273
+ indent=2,
1274
+ sort_keys=True,
1275
+ )
1276
+ + "\n",
1277
+ )
1278
+ elif file_paths:
1279
+ file_paths = list(map(Path, file_paths))
1280
+ pyi_hashes_parent = file_paths[0].parent
1281
+ while (
1282
+ pyi_hashes_parent.parent
1283
+ and not (pyi_hashes_parent / "pyi_hashes.json").exists()
1284
+ ):
1285
+ pyi_hashes_parent = pyi_hashes_parent.parent
1286
+
1287
+ pyi_hashes_file = pyi_hashes_parent / "pyi_hashes.json"
1288
+ if pyi_hashes_file.exists():
1289
+ pyi_hashes = json.loads(pyi_hashes_file.read_text())
1290
+ for file_path, hashed_content in zip(
1291
+ file_paths, hashes, strict=False
1292
+ ):
1293
+ pyi_hashes[str(file_path.relative_to(pyi_hashes_parent))] = (
1294
+ hashed_content
1295
+ )
1296
+
1297
+ pyi_hashes_file.write_text(
1298
+ json.dumps(pyi_hashes, indent=2, sort_keys=True) + "\n"
1299
+ )
1300
+
1224
1301
  # Post-process the generated pyi files to add hacky type: ignore comments
1225
- for file_path in self.written_files:
1302
+ for file_path in file_paths:
1226
1303
  with FileInput(file_path, inplace=True) as f:
1227
1304
  for line in f:
1228
1305
  # Hack due to ast not supporting comments in the tree.
@@ -1233,3 +1310,15 @@ class PyiGenerator:
1233
1310
  ):
1234
1311
  line = line.rstrip() + " # type: ignore\n"
1235
1312
  print(line, end="") # noqa: T201
1313
+
1314
+
1315
+ if __name__ == "__main__":
1316
+ logging.basicConfig(level=logging.INFO)
1317
+ logging.getLogger("blib2to3.pgen2.driver").setLevel(logging.INFO)
1318
+
1319
+ gen = PyiGenerator()
1320
+ gen.scan_all(
1321
+ ["reflex/components", "reflex/experimental", "reflex/__init__.py"],
1322
+ None,
1323
+ use_json=True,
1324
+ )
reflex/utils/telemetry.py CHANGED
@@ -9,6 +9,7 @@ import platform
9
9
  import warnings
10
10
  from contextlib import suppress
11
11
  from datetime import datetime, timezone
12
+ from typing import TypedDict
12
13
 
13
14
  import httpx
14
15
  import psutil
@@ -16,6 +17,8 @@ import psutil
16
17
  from reflex import constants
17
18
  from reflex.config import environment
18
19
  from reflex.utils import console
20
+ from reflex.utils.decorator import once_unless_none
21
+ from reflex.utils.exceptions import ReflexError
19
22
  from reflex.utils.prerequisites import ensure_reflex_installation_id, get_project_hash
20
23
 
21
24
  UTC = timezone.utc
@@ -94,15 +97,39 @@ def _raise_on_missing_project_hash() -> bool:
94
97
  return not environment.REFLEX_SKIP_COMPILE.get()
95
98
 
96
99
 
97
- def _prepare_event(event: str, **kwargs) -> dict:
98
- """Prepare the event to be sent to the PostHog server.
100
+ class _Properties(TypedDict):
101
+ """Properties type for telemetry."""
99
102
 
100
- Args:
101
- event: The event name.
102
- kwargs: Additional data to send with the event.
103
+ distinct_id: int
104
+ distinct_app_id: int
105
+ user_os: str
106
+ user_os_detail: str
107
+ reflex_version: str
108
+ python_version: str
109
+ cpu_count: int
110
+ memory: int
111
+ cpu_info: dict
112
+
113
+
114
+ class _DefaultEvent(TypedDict):
115
+ """Default event type for telemetry."""
116
+
117
+ api_key: str
118
+ properties: _Properties
119
+
120
+
121
+ class _Event(_DefaultEvent):
122
+ """Event type for telemetry."""
123
+
124
+ event: str
125
+ timestamp: str
126
+
127
+
128
+ def _get_event_defaults() -> _DefaultEvent | None:
129
+ """Get the default event data.
103
130
 
104
131
  Returns:
105
- The event data.
132
+ The default event data.
106
133
  """
107
134
  from reflex.utils.prerequisites import get_cpu_info
108
135
 
@@ -113,19 +140,12 @@ def _prepare_event(event: str, **kwargs) -> dict:
113
140
  console.debug(
114
141
  f"Could not get installation_id or project_hash: {installation_id}, {project_hash}"
115
142
  )
116
- return {}
117
-
118
- stamp = datetime.now(UTC).isoformat()
143
+ return None
119
144
 
120
145
  cpuinfo = get_cpu_info()
121
146
 
122
- additional_keys = ["template", "context", "detail", "user_uuid"]
123
- additional_fields = {
124
- key: value for key in additional_keys if (value := kwargs.get(key)) is not None
125
- }
126
147
  return {
127
148
  "api_key": "phc_JoMo0fOyi0GQAooY3UyO9k0hebGkMyFJrrCw1Gt5SGb",
128
- "event": event,
129
149
  "properties": {
130
150
  "distinct_id": installation_id,
131
151
  "distinct_app_id": project_hash,
@@ -136,13 +156,55 @@ def _prepare_event(event: str, **kwargs) -> dict:
136
156
  "cpu_count": get_cpu_count(),
137
157
  "memory": get_memory(),
138
158
  "cpu_info": dataclasses.asdict(cpuinfo) if cpuinfo else {},
139
- **additional_fields,
140
159
  },
160
+ }
161
+
162
+
163
+ @once_unless_none
164
+ def get_event_defaults() -> _DefaultEvent | None:
165
+ """Get the default event data.
166
+
167
+ Returns:
168
+ The default event data.
169
+ """
170
+ return _get_event_defaults()
171
+
172
+
173
+ def _prepare_event(event: str, **kwargs) -> _Event | None:
174
+ """Prepare the event to be sent to the PostHog server.
175
+
176
+ Args:
177
+ event: The event name.
178
+ kwargs: Additional data to send with the event.
179
+
180
+ Returns:
181
+ The event data.
182
+ """
183
+ event_data = get_event_defaults()
184
+ if not event_data:
185
+ return None
186
+
187
+ additional_keys = ["template", "context", "detail", "user_uuid"]
188
+
189
+ properties = event_data["properties"]
190
+
191
+ for key in additional_keys:
192
+ if key in properties or key not in kwargs:
193
+ continue
194
+
195
+ properties[key] = kwargs[key]
196
+
197
+ stamp = datetime.now(UTC).isoformat()
198
+
199
+ return {
200
+ "api_key": event_data["api_key"],
201
+ "event": event,
202
+ "properties": properties,
141
203
  "timestamp": stamp,
142
204
  }
143
205
 
144
206
 
145
- def _send_event(event_data: dict) -> bool:
207
+ def _send_event(event_data: _Event) -> bool:
146
208
  try:
147
209
  httpx.post(POSTHOG_API_URL, json=event_data)
148
210
  except Exception:
@@ -151,7 +213,7 @@ def _send_event(event_data: dict) -> bool:
151
213
  return True
152
214
 
153
215
 
154
- def _send(event: str, telemetry_enabled: bool | None, **kwargs):
216
+ def _send(event: str, telemetry_enabled: bool | None, **kwargs) -> bool:
155
217
  from reflex.config import get_config
156
218
 
157
219
  # Get the telemetry_enabled from the config if it is not specified.
@@ -167,6 +229,7 @@ def _send(event: str, telemetry_enabled: bool | None, **kwargs):
167
229
  if not event_data:
168
230
  return False
169
231
  return _send_event(event_data)
232
+ return False
170
233
 
171
234
 
172
235
  def send(event: str, telemetry_enabled: bool | None = None, **kwargs):
@@ -196,8 +259,6 @@ def send_error(error: Exception, context: str):
196
259
  Args:
197
260
  error: The error to send.
198
261
  context: The context of the error (e.g. "frontend" or "backend")
199
-
200
- Returns:
201
- Whether the telemetry was sent successfully.
202
262
  """
203
- return send("error", detail=type(error).__name__, context=context)
263
+ if isinstance(error, ReflexError):
264
+ send("error", detail=type(error).__name__, context=context)
reflex/utils/types.py CHANGED
@@ -607,7 +607,7 @@ def _isinstance(
607
607
  if cls is None or cls is type(None):
608
608
  return obj is None
609
609
 
610
- if cls and is_union(cls):
610
+ if cls is not None and is_union(cls):
611
611
  return any(
612
612
  _isinstance(obj, arg, nested=nested, treat_var_as_type=treat_var_as_type)
613
613
  for arg in get_args(cls)
@@ -643,6 +643,16 @@ def _isinstance(
643
643
  # cls is a simple generic class
644
644
  return isinstance(obj, origin)
645
645
 
646
+ if origin is Var and args:
647
+ # cls is a Var
648
+ return _isinstance(
649
+ obj,
650
+ args[0],
651
+ nested=nested,
652
+ treat_var_as_type=treat_var_as_type,
653
+ treat_mutable_obj_as_immutable=treat_mutable_obj_as_immutable,
654
+ )
655
+
646
656
  if nested > 0 and args:
647
657
  if origin is list:
648
658
  expected_class = Sequence if treat_mutable_obj_as_immutable else list
@@ -986,6 +996,18 @@ def typehint_issubclass(
986
996
  for arg in args
987
997
  )
988
998
 
999
+ if is_literal(possible_subclass):
1000
+ args = get_args(possible_subclass)
1001
+ return all(
1002
+ _isinstance(
1003
+ arg,
1004
+ possible_superclass,
1005
+ treat_mutable_obj_as_immutable=treat_mutable_superclasss_as_immutable,
1006
+ nested=2,
1007
+ )
1008
+ for arg in args
1009
+ )
1010
+
989
1011
  # Remove this check when Python 3.10 is the minimum supported version
990
1012
  if hasattr(types, "UnionType"):
991
1013
  provided_type_origin = (
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: reflex
3
- Version: 0.7.5
3
+ Version: 0.7.6
4
4
  Summary: Web apps in pure Python.
5
5
  Project-URL: homepage, https://reflex.dev
6
6
  Project-URL: repository, https://github.com/reflex-dev/reflex
@@ -19,34 +19,25 @@ Classifier: Programming Language :: Python :: 3.12
19
19
  Classifier: Programming Language :: Python :: 3.13
20
20
  Requires-Python: <4.0,>=3.10
21
21
  Requires-Dist: alembic<2.0,>=1.11.1
22
- Requires-Dist: build<2.0,>=1.0.3
23
- Requires-Dist: charset-normalizer<4.0,>=3.3.2
24
22
  Requires-Dist: distro<2.0,>=1.8.0; platform_system == 'Linux'
25
23
  Requires-Dist: fastapi!=0.111.0,!=0.111.1,>=0.96.0
26
24
  Requires-Dist: granian[reload]>=2.2.0
27
25
  Requires-Dist: gunicorn<24.0.0,>=23.0.0
28
26
  Requires-Dist: httpx<1.0,>=0.25.1
29
27
  Requires-Dist: jinja2<4.0,>=3.1.2
30
- Requires-Dist: lazy-loader>=0.4
31
28
  Requires-Dist: packaging<25.0,>=23.1
32
29
  Requires-Dist: platformdirs<5.0,>=3.10.0
33
30
  Requires-Dist: psutil<8.0,>=5.9.4
34
31
  Requires-Dist: pydantic<3.0,>=1.10.21
35
- Requires-Dist: python-engineio!=4.6.0
36
- Requires-Dist: python-multipart<0.1,>=0.0.5
32
+ Requires-Dist: python-multipart<1.0,>=0.0.20
37
33
  Requires-Dist: python-socketio<6.0,>=5.7.0
38
34
  Requires-Dist: redis<6.0,>=4.3.5
39
35
  Requires-Dist: reflex-hosting-cli>=0.1.29
40
36
  Requires-Dist: rich<14.0,>=13.0.0
41
- Requires-Dist: setuptools>=75.0
42
37
  Requires-Dist: sqlmodel<0.1,>=0.0.14
43
- Requires-Dist: starlette-admin<1.0,>=0.11.0
44
- Requires-Dist: tomlkit<1.0,>=0.12.4
45
- Requires-Dist: twine<7.0,>=4.0.0
46
38
  Requires-Dist: typer<1.0,>=0.15.1
47
39
  Requires-Dist: typing-extensions>=4.6.0
48
40
  Requires-Dist: uvicorn>=0.20.0
49
- Requires-Dist: wheel<1.0,>=0.42.0
50
41
  Requires-Dist: wrapt<2.0,>=1.17.0
51
42
  Description-Content-Type: text/markdown
52
43