reflex 0.7.2.dev1__py3-none-any.whl → 0.7.2.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of reflex might be problematic. Click here for more details.
- reflex/reflex.py +2 -6
- reflex/state.py +27 -6
- reflex/utils/prerequisites.py +45 -3
- reflex/utils/pyi_generator.py +2 -2
- reflex/utils/redir.py +12 -3
- reflex/vars/base.py +2 -3
- {reflex-0.7.2.dev1.dist-info → reflex-0.7.2.post1.dist-info}/METADATA +46 -39
- {reflex-0.7.2.dev1.dist-info → reflex-0.7.2.post1.dist-info}/RECORD +11 -20
- {reflex-0.7.2.dev1.dist-info → reflex-0.7.2.post1.dist-info}/WHEEL +1 -1
- reflex-0.7.2.post1.dist-info/entry_points.txt +3 -0
- benchmarks/__init__.py +0 -3
- benchmarks/benchmark_compile_times.py +0 -147
- benchmarks/benchmark_imports.py +0 -128
- benchmarks/benchmark_lighthouse.py +0 -75
- benchmarks/benchmark_package_size.py +0 -135
- benchmarks/benchmark_web_size.py +0 -106
- benchmarks/conftest.py +0 -20
- benchmarks/lighthouse.sh +0 -77
- benchmarks/utils.py +0 -74
- reflex-0.7.2.dev1.dist-info/entry_points.txt +0 -5
- {reflex-0.7.2.dev1.dist-info/licenses → reflex-0.7.2.post1.dist-info}/LICENSE +0 -0
reflex/reflex.py
CHANGED
|
@@ -13,7 +13,7 @@ from reflex import constants
|
|
|
13
13
|
from reflex.config import environment, get_config
|
|
14
14
|
from reflex.custom_components.custom_components import custom_components_cli
|
|
15
15
|
from reflex.state import reset_disk_state_manager
|
|
16
|
-
from reflex.utils import console,
|
|
16
|
+
from reflex.utils import console, telemetry
|
|
17
17
|
|
|
18
18
|
# Disable typer+rich integration for help panels
|
|
19
19
|
typer.core.rich = None # pyright: ignore [reportPrivateImportUsage]
|
|
@@ -70,10 +70,6 @@ def _init(
|
|
|
70
70
|
# Show system info
|
|
71
71
|
exec.output_system_info()
|
|
72
72
|
|
|
73
|
-
if ai:
|
|
74
|
-
redir.reflex_build_redirect()
|
|
75
|
-
return
|
|
76
|
-
|
|
77
73
|
# Validate the app name.
|
|
78
74
|
app_name = prerequisites.validate_app_name(name)
|
|
79
75
|
console.rule(f"[bold]Initializing {app_name}")
|
|
@@ -87,7 +83,7 @@ def _init(
|
|
|
87
83
|
prerequisites.initialize_frontend_dependencies()
|
|
88
84
|
|
|
89
85
|
# Initialize the app.
|
|
90
|
-
template = prerequisites.initialize_app(app_name, template)
|
|
86
|
+
template = prerequisites.initialize_app(app_name, template, ai)
|
|
91
87
|
|
|
92
88
|
# Initialize the .gitignore.
|
|
93
89
|
prerequisites.initialize_gitignore()
|
reflex/state.py
CHANGED
|
@@ -1396,6 +1396,29 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
|
|
1396
1396
|
for substate in self.substates.values():
|
|
1397
1397
|
substate.reset()
|
|
1398
1398
|
|
|
1399
|
+
@classmethod
|
|
1400
|
+
@functools.lru_cache
|
|
1401
|
+
def _is_client_storage(cls, prop_name_or_field: str | ModelField) -> bool:
|
|
1402
|
+
"""Check if the var is a client storage var.
|
|
1403
|
+
|
|
1404
|
+
Args:
|
|
1405
|
+
prop_name_or_field: The name of the var or the field itself.
|
|
1406
|
+
|
|
1407
|
+
Returns:
|
|
1408
|
+
Whether the var is a client storage var.
|
|
1409
|
+
"""
|
|
1410
|
+
if isinstance(prop_name_or_field, str):
|
|
1411
|
+
field = cls.get_fields().get(prop_name_or_field)
|
|
1412
|
+
else:
|
|
1413
|
+
field = prop_name_or_field
|
|
1414
|
+
return field is not None and (
|
|
1415
|
+
isinstance(field.default, ClientStorageBase)
|
|
1416
|
+
or (
|
|
1417
|
+
isinstance(field.type_, type)
|
|
1418
|
+
and issubclass(field.type_, ClientStorageBase)
|
|
1419
|
+
)
|
|
1420
|
+
)
|
|
1421
|
+
|
|
1399
1422
|
def _reset_client_storage(self):
|
|
1400
1423
|
"""Reset client storage base vars to their default values."""
|
|
1401
1424
|
# Client-side storage is reset during hydrate so that clearing cookies
|
|
@@ -1403,10 +1426,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
|
|
1403
1426
|
fields = self.get_fields()
|
|
1404
1427
|
for prop_name in self.base_vars:
|
|
1405
1428
|
field = fields[prop_name]
|
|
1406
|
-
if
|
|
1407
|
-
isinstance(field.type_, type)
|
|
1408
|
-
and issubclass(field.type_, ClientStorageBase)
|
|
1409
|
-
):
|
|
1429
|
+
if self._is_client_storage(field):
|
|
1410
1430
|
setattr(self, prop_name, copy.deepcopy(field.default))
|
|
1411
1431
|
|
|
1412
1432
|
# Recursively reset the substate client storage.
|
|
@@ -2342,8 +2362,9 @@ class UpdateVarsInternalState(State):
|
|
|
2342
2362
|
for var, value in vars.items():
|
|
2343
2363
|
state_name, _, var_name = var.rpartition(".")
|
|
2344
2364
|
var_state_cls = State.get_class_substate(state_name)
|
|
2345
|
-
|
|
2346
|
-
|
|
2365
|
+
if var_state_cls._is_client_storage(var_name):
|
|
2366
|
+
var_state = await self.get_state(var_state_cls)
|
|
2367
|
+
setattr(var_state, var_name, value)
|
|
2347
2368
|
|
|
2348
2369
|
|
|
2349
2370
|
class OnLoadInternalState(State):
|
reflex/utils/prerequisites.py
CHANGED
|
@@ -37,7 +37,7 @@ from redis.exceptions import RedisError
|
|
|
37
37
|
from reflex import constants, model
|
|
38
38
|
from reflex.compiler import templates
|
|
39
39
|
from reflex.config import Config, environment, get_config
|
|
40
|
-
from reflex.utils import console, net, path_ops, processes
|
|
40
|
+
from reflex.utils import console, net, path_ops, processes, redir
|
|
41
41
|
from reflex.utils.exceptions import (
|
|
42
42
|
GeneratedCodeHasNoFunctionDefsError,
|
|
43
43
|
SystemPackageMissingError,
|
|
@@ -1695,6 +1695,31 @@ def validate_and_create_app_using_remote_template(
|
|
|
1695
1695
|
)
|
|
1696
1696
|
|
|
1697
1697
|
|
|
1698
|
+
def generate_template_using_ai(template: str | None = None) -> str:
|
|
1699
|
+
"""Generate a template using AI(Flexgen).
|
|
1700
|
+
|
|
1701
|
+
Args:
|
|
1702
|
+
template: The name of the template.
|
|
1703
|
+
|
|
1704
|
+
Returns:
|
|
1705
|
+
The generation hash.
|
|
1706
|
+
|
|
1707
|
+
Raises:
|
|
1708
|
+
Exit: If the template and ai flags are used.
|
|
1709
|
+
"""
|
|
1710
|
+
if template is None:
|
|
1711
|
+
# If AI is requested and no template specified, redirect the user to reflex.build.
|
|
1712
|
+
return redir.reflex_build_redirect()
|
|
1713
|
+
elif is_generation_hash(template):
|
|
1714
|
+
# Otherwise treat the template as a generation hash.
|
|
1715
|
+
return template
|
|
1716
|
+
else:
|
|
1717
|
+
console.error(
|
|
1718
|
+
"Cannot use `--template` option with `--ai` option. Please remove `--template` option."
|
|
1719
|
+
)
|
|
1720
|
+
raise typer.Exit(2)
|
|
1721
|
+
|
|
1722
|
+
|
|
1698
1723
|
def fetch_remote_templates(
|
|
1699
1724
|
template: str,
|
|
1700
1725
|
) -> tuple[str, dict[str, Template]]:
|
|
@@ -1719,12 +1744,15 @@ def fetch_remote_templates(
|
|
|
1719
1744
|
return template, available_templates
|
|
1720
1745
|
|
|
1721
1746
|
|
|
1722
|
-
def initialize_app(
|
|
1747
|
+
def initialize_app(
|
|
1748
|
+
app_name: str, template: str | None = None, ai: bool = False
|
|
1749
|
+
) -> str | None:
|
|
1723
1750
|
"""Initialize the app either from a remote template or a blank app. If the config file exists, it is considered as reinit.
|
|
1724
1751
|
|
|
1725
1752
|
Args:
|
|
1726
1753
|
app_name: The name of the app.
|
|
1727
1754
|
template: The name of the template to use.
|
|
1755
|
+
ai: Whether to use AI to generate the template.
|
|
1728
1756
|
|
|
1729
1757
|
Returns:
|
|
1730
1758
|
The name of the template.
|
|
@@ -1740,6 +1768,11 @@ def initialize_app(app_name: str, template: str | None = None) -> str | None:
|
|
|
1740
1768
|
telemetry.send("reinit")
|
|
1741
1769
|
return
|
|
1742
1770
|
|
|
1771
|
+
generation_hash = None
|
|
1772
|
+
if ai:
|
|
1773
|
+
generation_hash = generate_template_using_ai(template)
|
|
1774
|
+
template = constants.Templates.DEFAULT
|
|
1775
|
+
|
|
1743
1776
|
templates: dict[str, Template] = {}
|
|
1744
1777
|
|
|
1745
1778
|
# Don't fetch app templates if the user directly asked for DEFAULT.
|
|
@@ -1748,7 +1781,11 @@ def initialize_app(app_name: str, template: str | None = None) -> str | None:
|
|
|
1748
1781
|
|
|
1749
1782
|
if template is None:
|
|
1750
1783
|
template = prompt_for_template_options(get_init_cli_prompt_options())
|
|
1751
|
-
if template == constants.Templates.
|
|
1784
|
+
if template == constants.Templates.AI:
|
|
1785
|
+
generation_hash = generate_template_using_ai()
|
|
1786
|
+
# change to the default to allow creation of default app
|
|
1787
|
+
template = constants.Templates.DEFAULT
|
|
1788
|
+
elif template == constants.Templates.CHOOSE_TEMPLATES:
|
|
1752
1789
|
console.print(
|
|
1753
1790
|
f"Go to the templates page ({constants.Templates.REFLEX_TEMPLATES_URL}) and copy the command to init with a template."
|
|
1754
1791
|
)
|
|
@@ -1763,6 +1800,11 @@ def initialize_app(app_name: str, template: str | None = None) -> str | None:
|
|
|
1763
1800
|
app_name=app_name, template=template, templates=templates
|
|
1764
1801
|
)
|
|
1765
1802
|
|
|
1803
|
+
# If a reflex.build generation hash is available, download the code and apply it to the main module.
|
|
1804
|
+
if generation_hash:
|
|
1805
|
+
initialize_main_module_index_from_generation(
|
|
1806
|
+
app_name, generation_hash=generation_hash
|
|
1807
|
+
)
|
|
1766
1808
|
telemetry.send("init", template=template)
|
|
1767
1809
|
|
|
1768
1810
|
return template
|
reflex/utils/pyi_generator.py
CHANGED
|
@@ -348,7 +348,7 @@ def _extract_class_props_as_ast_nodes(
|
|
|
348
348
|
all_props = []
|
|
349
349
|
kwargs = []
|
|
350
350
|
for target_class in clzs:
|
|
351
|
-
event_triggers = target_class
|
|
351
|
+
event_triggers = target_class().get_event_triggers()
|
|
352
352
|
# Import from the target class to ensure type hints are resolvable.
|
|
353
353
|
exec(f"from {target_class.__module__} import *", type_hint_globals)
|
|
354
354
|
for name, value in target_class.__annotations__.items():
|
|
@@ -575,7 +575,7 @@ def _generate_component_create_functiondef(
|
|
|
575
575
|
return ast.Name(id=f"{' | '.join(map(ast.unparse, all_count_args_type))}")
|
|
576
576
|
return ast.Name(id="EventType[Any]")
|
|
577
577
|
|
|
578
|
-
event_triggers = clz
|
|
578
|
+
event_triggers = clz().get_event_triggers()
|
|
579
579
|
|
|
580
580
|
# event handler kwargs
|
|
581
581
|
kwargs.extend(
|
reflex/utils/redir.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Utilities to handle redirection to browser UI."""
|
|
2
2
|
|
|
3
3
|
import time
|
|
4
|
+
import uuid
|
|
4
5
|
import webbrowser
|
|
5
6
|
|
|
6
7
|
import httpx
|
|
@@ -47,6 +48,14 @@ def open_browser_and_wait(
|
|
|
47
48
|
return response
|
|
48
49
|
|
|
49
50
|
|
|
50
|
-
def reflex_build_redirect() ->
|
|
51
|
-
"""Open the browser window to reflex.build.
|
|
52
|
-
|
|
51
|
+
def reflex_build_redirect() -> str:
|
|
52
|
+
"""Open the browser window to reflex.build and wait for the user to select a generation.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
The selected generation hash.
|
|
56
|
+
"""
|
|
57
|
+
token = str(uuid.uuid4())
|
|
58
|
+
target_url = constants.Templates.REFLEX_BUILD_URL.format(reflex_init_token=token)
|
|
59
|
+
poll_url = constants.Templates.REFLEX_BUILD_POLL_URL.format(reflex_init_token=token)
|
|
60
|
+
response = open_browser_and_wait(target_url, poll_url)
|
|
61
|
+
return response.json()["generation_hash"]
|
reflex/vars/base.py
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import contextlib
|
|
6
|
-
import copy
|
|
7
6
|
import dataclasses
|
|
8
7
|
import datetime
|
|
9
8
|
import functools
|
|
@@ -2147,7 +2146,7 @@ class ComputedVar(Var[RETURN_TYPE]):
|
|
|
2147
2146
|
"fget": kwargs.pop("fget", self._fget),
|
|
2148
2147
|
"initial_value": kwargs.pop("initial_value", self._initial_value),
|
|
2149
2148
|
"cache": kwargs.pop("cache", self._cache),
|
|
2150
|
-
"deps": kwargs.pop("deps",
|
|
2149
|
+
"deps": kwargs.pop("deps", self._static_deps),
|
|
2151
2150
|
"auto_deps": kwargs.pop("auto_deps", self._auto_deps),
|
|
2152
2151
|
"interval": kwargs.pop("interval", self._update_interval),
|
|
2153
2152
|
"backend": kwargs.pop("backend", self._backend),
|
|
@@ -2319,7 +2318,7 @@ class ComputedVar(Var[RETURN_TYPE]):
|
|
|
2319
2318
|
if not _isinstance(value, self._var_type, nested=1, treat_var_as_type=False):
|
|
2320
2319
|
console.error(
|
|
2321
2320
|
f"Computed var '{type(instance).__name__}.{self._js_expr}' must return"
|
|
2322
|
-
f"
|
|
2321
|
+
f" type '{self._var_type}', got '{type(value)}'."
|
|
2323
2322
|
)
|
|
2324
2323
|
|
|
2325
2324
|
def _deps(
|
|
@@ -1,45 +1,51 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: reflex
|
|
3
|
-
Version: 0.7.2.
|
|
3
|
+
Version: 0.7.2.post1
|
|
4
4
|
Summary: Web apps in pure Python.
|
|
5
|
+
License: Apache-2.0
|
|
5
6
|
Keywords: web,framework
|
|
6
|
-
Author:
|
|
7
|
-
Author-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
Requires-Dist:
|
|
17
|
-
Requires-Dist:
|
|
18
|
-
Requires-Dist:
|
|
19
|
-
Requires-Dist:
|
|
20
|
-
Requires-Dist:
|
|
21
|
-
Requires-Dist:
|
|
22
|
-
Requires-Dist:
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist:
|
|
25
|
-
Requires-Dist:
|
|
26
|
-
Requires-Dist:
|
|
27
|
-
Requires-Dist:
|
|
28
|
-
Requires-Dist:
|
|
29
|
-
Requires-Dist:
|
|
30
|
-
Requires-Dist: python-
|
|
31
|
-
Requires-Dist:
|
|
32
|
-
Requires-Dist:
|
|
33
|
-
Requires-Dist: reflex-hosting-cli>=0.1.29
|
|
34
|
-
Requires-Dist:
|
|
35
|
-
Requires-Dist:
|
|
36
|
-
Requires-Dist:
|
|
37
|
-
Requires-Dist:
|
|
38
|
-
Requires-Dist:
|
|
39
|
-
Requires-Dist: twine
|
|
40
|
-
Requires-Dist:
|
|
41
|
-
Requires-Dist:
|
|
42
|
-
Requires-Dist:
|
|
7
|
+
Author: Nikhil Rao
|
|
8
|
+
Author-email: nikhil@reflex.dev
|
|
9
|
+
Requires-Python: >=3.10,<4.0
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
17
|
+
Requires-Dist: alembic (>=1.11.1,<2.0)
|
|
18
|
+
Requires-Dist: build (>=1.0.3,<2.0)
|
|
19
|
+
Requires-Dist: charset-normalizer (>=3.3.2,<4.0)
|
|
20
|
+
Requires-Dist: distro (>=1.8.0,<2.0) ; sys_platform == "linux"
|
|
21
|
+
Requires-Dist: fastapi (>=0.96.0,!=0.111.0,!=0.111.1)
|
|
22
|
+
Requires-Dist: gunicorn (>=20.1.0,<24.0)
|
|
23
|
+
Requires-Dist: httpx (>=0.25.1,<1.0)
|
|
24
|
+
Requires-Dist: jinja2 (>=3.1.2,<4.0)
|
|
25
|
+
Requires-Dist: lazy_loader (>=0.4)
|
|
26
|
+
Requires-Dist: packaging (>=23.1,<25.0)
|
|
27
|
+
Requires-Dist: platformdirs (>=3.10.0,<5.0)
|
|
28
|
+
Requires-Dist: psutil (>=5.9.4,<7.0)
|
|
29
|
+
Requires-Dist: pydantic (>=1.10.21,<3.0)
|
|
30
|
+
Requires-Dist: python-engineio (!=4.6.0)
|
|
31
|
+
Requires-Dist: python-multipart (>=0.0.5,<0.1)
|
|
32
|
+
Requires-Dist: python-socketio (>=5.7.0,<6.0)
|
|
33
|
+
Requires-Dist: redis (>=4.3.5,<6.0)
|
|
34
|
+
Requires-Dist: reflex-hosting-cli (>=0.1.29)
|
|
35
|
+
Requires-Dist: rich (>=13.0.0,<14.0)
|
|
36
|
+
Requires-Dist: setuptools (>=75.0)
|
|
37
|
+
Requires-Dist: sqlmodel (>=0.0.14,<0.1)
|
|
38
|
+
Requires-Dist: starlette-admin (>=0.11.0,<1.0)
|
|
39
|
+
Requires-Dist: tomlkit (>=0.12.4,<1.0)
|
|
40
|
+
Requires-Dist: twine (>=4.0.0,<7.0)
|
|
41
|
+
Requires-Dist: typer (>=0.15.1,<1.0)
|
|
42
|
+
Requires-Dist: typing_extensions (>=4.6.0)
|
|
43
|
+
Requires-Dist: uvicorn (>=0.20.0)
|
|
44
|
+
Requires-Dist: wheel (>=0.42.0,<1.0)
|
|
45
|
+
Requires-Dist: wrapt (>=1.17.0,<2.0)
|
|
46
|
+
Project-URL: Documentation, https://reflex.dev/docs/getting-started/introduction
|
|
47
|
+
Project-URL: Homepage, https://reflex.dev
|
|
48
|
+
Project-URL: Repository, https://github.com/reflex-dev/reflex
|
|
43
49
|
Description-Content-Type: text/markdown
|
|
44
50
|
|
|
45
51
|
|
|
@@ -301,3 +307,4 @@ We are actively looking for contributors, no matter your skill level or experien
|
|
|
301
307
|
## License
|
|
302
308
|
|
|
303
309
|
Reflex is open-source and licensed under the [Apache License 2.0](LICENSE).
|
|
310
|
+
|
|
@@ -1,16 +1,3 @@
|
|
|
1
|
-
benchmarks/__init__.py,sha256=EPwQDZ_qYgf5GFMdYQGHWDbpkLvR1OdQiEvPkVByYpM,89
|
|
2
|
-
benchmarks/benchmark_compile_times.py,sha256=DA0MuUVF2SGXun1cIO6So_B7FE78YZepJkq2JUvHHK4,4500
|
|
3
|
-
benchmarks/benchmark_imports.py,sha256=rC9Ke0n4h9lty3GEfLF0nODZpbMpiiAPqWVkDLATdHk,3733
|
|
4
|
-
benchmarks/benchmark_lighthouse.py,sha256=EdoTJ9oOyWTalj3OZn5C_-J76kR3Tedw_WjDxzM52F8,2347
|
|
5
|
-
benchmarks/benchmark_package_size.py,sha256=118Np7CIX-T2lG5OGFISm_KPfrni-pMRz3aFfrFUdkw,3824
|
|
6
|
-
benchmarks/benchmark_web_size.py,sha256=KG3rWk8ARg6K7eqtwg5qTIjgBDev0zG3rPz_MlMAqLo,2972
|
|
7
|
-
benchmarks/conftest.py,sha256=ekR_xO0FL2c9W_zLCTMRn35uPjdqPma0IbIcSn2WKPU,487
|
|
8
|
-
benchmarks/lighthouse.sh,sha256=fbOaaTOvE69Z23nEhA4od-v_WehyLvtI1FJfPjYdPPk,2139
|
|
9
|
-
benchmarks/utils.py,sha256=NTI9WzkTvr4lE20GKh-DZ30Wc0Xqs-KN2Nb5og2dPzQ,1968
|
|
10
|
-
reflex-0.7.2.dev1.dist-info/METADATA,sha256=31Tj75jzbJX_H6-g04JcBF9sbkiHtjYKM5OQCLzzrlE,11764
|
|
11
|
-
reflex-0.7.2.dev1.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
12
|
-
reflex-0.7.2.dev1.dist-info/entry_points.txt,sha256=XfumVjOeM8bxbPMTjy5CvSe65xnMKHCBQ4MxWWHCidM,61
|
|
13
|
-
reflex-0.7.2.dev1.dist-info/licenses/LICENSE,sha256=dw3zLrp9f5ObD7kqS32vWfhcImfO52PMmRqvtxq_YEE,11358
|
|
14
1
|
reflex/.templates/apps/blank/assets/favicon.ico,sha256=baxxgDAQ2V4-G5Q4S2yK5uUJTUGkv-AOWBQ0xd6myUo,4286
|
|
15
2
|
reflex/.templates/apps/blank/code/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
3
|
reflex/.templates/apps/blank/code/blank.py,sha256=oKnsBBZM1-_RFAuwGKgfiCzgsrHlN_m_XP0-Fpnld7k,926
|
|
@@ -376,9 +363,9 @@ reflex/middleware/middleware.py,sha256=p5VVoIgQ_NwOg_GOY6g0S4fmrV76_VE1zt-HiwbMw
|
|
|
376
363
|
reflex/model.py,sha256=k6qCweATPW1YRB_qcHwa5X35btJmtIlB4zEQ63FaW3w,17527
|
|
377
364
|
reflex/page.py,sha256=qEt8n5EtawSywCzdsiaNQJWhC8ie-vg8ig0JGuVavPI,2386
|
|
378
365
|
reflex/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
379
|
-
reflex/reflex.py,sha256=
|
|
366
|
+
reflex/reflex.py,sha256=idDTvinthj0VsTP_TS225Y4p9wBSTTeGllRbyqiEmYU,19325
|
|
380
367
|
reflex/route.py,sha256=nn_hJwtQdjiqH_dHXfqMGWKllnyPQZTSR-KWdHDhoOs,4210
|
|
381
|
-
reflex/state.py,sha256=
|
|
368
|
+
reflex/state.py,sha256=HEvMOnqF6TVE7CGhVb6qW_wDNRMFxJ4j9Lhc1iSIp-c,142185
|
|
382
369
|
reflex/style.py,sha256=dilXPn8de80NzsXT53GPJrmjELC5nPYIlCgongyq1zM,13145
|
|
383
370
|
reflex/testing.py,sha256=wzqppu_-4e1QeFJ-vLVpW19egTGm-JpU_c7wUPiURlE,35693
|
|
384
371
|
reflex/utils/__init__.py,sha256=y-AHKiRQAhk2oAkvn7W8cRVTZVK625ff8tTwvZtO7S4,24
|
|
@@ -395,20 +382,24 @@ reflex/utils/imports.py,sha256=-EkUt9y5U3qmImjfpsXwYh7JI9qJHd_L6X9y12EPJew,3921
|
|
|
395
382
|
reflex/utils/lazy_loader.py,sha256=-3DcwIqHNft2fb1ikgDYAMiEwNfbiWfrTBAf1gEVX2o,1367
|
|
396
383
|
reflex/utils/net.py,sha256=0Yd9OLK8R_px2sqnqrDkTky6hYHtG2pEDvvilOjDfjc,1219
|
|
397
384
|
reflex/utils/path_ops.py,sha256=Sio_pZ9-dqu6pAPUkO_JA9ONXDsyLGKWOVRoA-dCrec,7903
|
|
398
|
-
reflex/utils/prerequisites.py,sha256=
|
|
385
|
+
reflex/utils/prerequisites.py,sha256=Y6M53kvLVNnLzws9HAUexmNUp8zAyexhhR6ivsPz3bo,65033
|
|
399
386
|
reflex/utils/processes.py,sha256=1iZe-3Yrg-ja8jZxxAfggljqqcJgsFu8fi4bu4XQGx0,13489
|
|
400
|
-
reflex/utils/pyi_generator.py,sha256=
|
|
401
|
-
reflex/utils/redir.py,sha256=
|
|
387
|
+
reflex/utils/pyi_generator.py,sha256=jVgUULay9ROrIaIuMRbfeXMyJCvsl9tNMVRfK8ruqBg,41582
|
|
388
|
+
reflex/utils/redir.py,sha256=bmQGAgoNWwySeLRQTpoMpmKInwIOCW77wkXT61fwcj8,1868
|
|
402
389
|
reflex/utils/registry.py,sha256=bseD0bIO8b3pctHKpD5J2MRdDzcf7eWKtHEZVutVNJ0,1401
|
|
403
390
|
reflex/utils/serializers.py,sha256=K8-erpNIjJNIKif0cDFExa9f5DEVuQUq0j5v5VH6aBI,13408
|
|
404
391
|
reflex/utils/telemetry.py,sha256=qwJBwjdtAV-OGKgO4h-NWhgTvfC3gbduBdn1UB8Ikes,5608
|
|
405
392
|
reflex/utils/types.py,sha256=nGX44Q_Jp33wIaxf2vxANwBWe1743V2B8RRS8H9yV4c,33449
|
|
406
393
|
reflex/vars/__init__.py,sha256=2Kv6Oh9g3ISZFESjL1al8KiO7QBZUXmLKGMCBsP-DoY,1243
|
|
407
|
-
reflex/vars/base.py,sha256=
|
|
394
|
+
reflex/vars/base.py,sha256=dehVty3r8P88V98ONX-Zeeyz6hQW6cuZEjNDVhcH0PQ,101295
|
|
408
395
|
reflex/vars/datetime.py,sha256=WOEzQF6qjMjYvCat80XxgB_4hmVNHwIIZNMBSmfu0PM,5790
|
|
409
396
|
reflex/vars/dep_tracking.py,sha256=kluvF4Pfbpdqf0GcpmYHjT1yP-D1erAzaSQP6qIxjB0,13846
|
|
410
397
|
reflex/vars/function.py,sha256=2sVnhgetPSwtor8VFtAiYJdzZ9IRNzAKdsUJG6dXQcE,14461
|
|
411
398
|
reflex/vars/number.py,sha256=RHY_KsUxliIgn7sptYPPyDubIfLkGYr0TZjX4PB_dgI,29334
|
|
412
399
|
reflex/vars/object.py,sha256=cHVXN7I1MNw32KfpYKcmgStNSD4BnF3Y2CjkPABmjeo,16233
|
|
413
400
|
reflex/vars/sequence.py,sha256=X4Gducv2u6fSEZm9uBlMr030bhDO0jUxnKkUXNg4Mwg,54878
|
|
414
|
-
reflex-0.7.2.
|
|
401
|
+
reflex-0.7.2.post1.dist-info/LICENSE,sha256=dw3zLrp9f5ObD7kqS32vWfhcImfO52PMmRqvtxq_YEE,11358
|
|
402
|
+
reflex-0.7.2.post1.dist-info/METADATA,sha256=euNHR_k4oOMm6azQaPXKeU5RlVS-2hb-AyW4CzsqJOs,11879
|
|
403
|
+
reflex-0.7.2.post1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
404
|
+
reflex-0.7.2.post1.dist-info/entry_points.txt,sha256=H1Z5Yat_xJfy0dRT1Frk2PkO_p41Xy7fCKlj4FcdL9o,44
|
|
405
|
+
reflex-0.7.2.post1.dist-info/RECORD,,
|
benchmarks/__init__.py
DELETED
|
@@ -1,147 +0,0 @@
|
|
|
1
|
-
"""Extracts the compile times from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import argparse
|
|
6
|
-
import json
|
|
7
|
-
import os
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
|
|
10
|
-
from utils import send_data_to_posthog
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def extract_stats_from_json(json_file: str) -> list[dict]:
|
|
14
|
-
"""Extracts the stats from the JSON data and returns them as a list of dictionaries.
|
|
15
|
-
|
|
16
|
-
Args:
|
|
17
|
-
json_file: The JSON file to extract the stats data from.
|
|
18
|
-
|
|
19
|
-
Returns:
|
|
20
|
-
list[dict]: The stats for each test.
|
|
21
|
-
"""
|
|
22
|
-
with Path(json_file).open() as file:
|
|
23
|
-
json_data = json.load(file)
|
|
24
|
-
|
|
25
|
-
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
-
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
-
|
|
28
|
-
# Initialize an empty list to store the stats for each test
|
|
29
|
-
test_stats = []
|
|
30
|
-
|
|
31
|
-
# Iterate over each test in the 'benchmarks' list
|
|
32
|
-
for test in data.get("benchmarks", []):
|
|
33
|
-
group = test.get("group", None)
|
|
34
|
-
stats = test.get("stats", {})
|
|
35
|
-
full_name = test.get("fullname")
|
|
36
|
-
file_name = (
|
|
37
|
-
full_name.split("/")[-1].split("::")[0].strip(".py") if full_name else None
|
|
38
|
-
)
|
|
39
|
-
test_name = test.get("name", "Unknown Test")
|
|
40
|
-
|
|
41
|
-
test_stats.append(
|
|
42
|
-
{
|
|
43
|
-
"test_name": test_name,
|
|
44
|
-
"group": group,
|
|
45
|
-
"stats": stats,
|
|
46
|
-
"full_name": full_name,
|
|
47
|
-
"file_name": file_name,
|
|
48
|
-
}
|
|
49
|
-
)
|
|
50
|
-
return test_stats
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def insert_benchmarking_data(
|
|
54
|
-
os_type_version: str,
|
|
55
|
-
python_version: str,
|
|
56
|
-
performance_data: list[dict],
|
|
57
|
-
commit_sha: str,
|
|
58
|
-
pr_title: str,
|
|
59
|
-
branch_name: str,
|
|
60
|
-
event_type: str,
|
|
61
|
-
pr_id: str,
|
|
62
|
-
):
|
|
63
|
-
"""Insert the benchmarking data into the database.
|
|
64
|
-
|
|
65
|
-
Args:
|
|
66
|
-
os_type_version: The OS type and version to insert.
|
|
67
|
-
python_version: The Python version to insert.
|
|
68
|
-
performance_data: The performance data of reflex web to insert.
|
|
69
|
-
commit_sha: The commit SHA to insert.
|
|
70
|
-
pr_title: The PR title to insert.
|
|
71
|
-
branch_name: The name of the branch.
|
|
72
|
-
event_type: Type of github event(push, pull request, etc).
|
|
73
|
-
pr_id: Id of the PR.
|
|
74
|
-
"""
|
|
75
|
-
# Prepare the event data
|
|
76
|
-
properties = {
|
|
77
|
-
"os": os_type_version,
|
|
78
|
-
"python_version": python_version,
|
|
79
|
-
"distinct_id": commit_sha,
|
|
80
|
-
"pr_title": pr_title,
|
|
81
|
-
"branch_name": branch_name,
|
|
82
|
-
"event_type": event_type,
|
|
83
|
-
"performance": performance_data,
|
|
84
|
-
"pr_id": pr_id,
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
send_data_to_posthog("simple_app_benchmark", properties)
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def main():
|
|
91
|
-
"""Runs the benchmarks and inserts the results."""
|
|
92
|
-
# Get the commit SHA and JSON directory from the command line arguments
|
|
93
|
-
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
94
|
-
parser.add_argument(
|
|
95
|
-
"--os", help="The OS type and version to insert into the database."
|
|
96
|
-
)
|
|
97
|
-
parser.add_argument(
|
|
98
|
-
"--python-version", help="The Python version to insert into the database."
|
|
99
|
-
)
|
|
100
|
-
parser.add_argument(
|
|
101
|
-
"--commit-sha", help="The commit SHA to insert into the database."
|
|
102
|
-
)
|
|
103
|
-
parser.add_argument(
|
|
104
|
-
"--benchmark-json",
|
|
105
|
-
help="The JSON file containing the benchmark results.",
|
|
106
|
-
)
|
|
107
|
-
parser.add_argument(
|
|
108
|
-
"--pr-title",
|
|
109
|
-
help="The PR title to insert into the database.",
|
|
110
|
-
)
|
|
111
|
-
parser.add_argument(
|
|
112
|
-
"--branch-name",
|
|
113
|
-
help="The current branch",
|
|
114
|
-
required=True,
|
|
115
|
-
)
|
|
116
|
-
parser.add_argument(
|
|
117
|
-
"--event-type",
|
|
118
|
-
help="The github event type",
|
|
119
|
-
required=True,
|
|
120
|
-
)
|
|
121
|
-
parser.add_argument(
|
|
122
|
-
"--pr-id",
|
|
123
|
-
help="ID of the PR.",
|
|
124
|
-
required=True,
|
|
125
|
-
)
|
|
126
|
-
args = parser.parse_args()
|
|
127
|
-
|
|
128
|
-
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
129
|
-
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
130
|
-
|
|
131
|
-
# Get the results of pytest benchmarks
|
|
132
|
-
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
133
|
-
# Insert the data into the database
|
|
134
|
-
insert_benchmarking_data(
|
|
135
|
-
os_type_version=args.os,
|
|
136
|
-
python_version=args.python_version,
|
|
137
|
-
performance_data=cleaned_benchmark_results,
|
|
138
|
-
commit_sha=args.commit_sha,
|
|
139
|
-
pr_title=pr_title,
|
|
140
|
-
branch_name=args.branch_name,
|
|
141
|
-
event_type=args.event_type,
|
|
142
|
-
pr_id=args.pr_id,
|
|
143
|
-
)
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
if __name__ == "__main__":
|
|
147
|
-
main()
|
benchmarks/benchmark_imports.py
DELETED
|
@@ -1,128 +0,0 @@
|
|
|
1
|
-
"""Extract and upload benchmarking data to PostHog."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import argparse
|
|
6
|
-
import json
|
|
7
|
-
import os
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
|
|
10
|
-
from utils import send_data_to_posthog
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def extract_stats_from_json(json_file: str) -> dict:
|
|
14
|
-
"""Extracts the stats from the JSON data and returns them as dictionaries.
|
|
15
|
-
|
|
16
|
-
Args:
|
|
17
|
-
json_file: The JSON file to extract the stats data from.
|
|
18
|
-
|
|
19
|
-
Returns:
|
|
20
|
-
dict: The stats for each test.
|
|
21
|
-
"""
|
|
22
|
-
with Path(json_file).open() as file:
|
|
23
|
-
json_data = json.load(file)
|
|
24
|
-
|
|
25
|
-
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
-
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
-
|
|
28
|
-
result = data.get("results", [{}])[0]
|
|
29
|
-
return {
|
|
30
|
-
k: v
|
|
31
|
-
for k, v in result.items()
|
|
32
|
-
if k in ("mean", "stddev", "median", "min", "max")
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def insert_benchmarking_data(
|
|
37
|
-
os_type_version: str,
|
|
38
|
-
python_version: str,
|
|
39
|
-
performance_data: dict,
|
|
40
|
-
commit_sha: str,
|
|
41
|
-
pr_title: str,
|
|
42
|
-
branch_name: str,
|
|
43
|
-
pr_id: str,
|
|
44
|
-
app_name: str,
|
|
45
|
-
):
|
|
46
|
-
"""Insert the benchmarking data into the database.
|
|
47
|
-
|
|
48
|
-
Args:
|
|
49
|
-
os_type_version: The OS type and version to insert.
|
|
50
|
-
python_version: The Python version to insert.
|
|
51
|
-
performance_data: The imports performance data to insert.
|
|
52
|
-
commit_sha: The commit SHA to insert.
|
|
53
|
-
pr_title: The PR title to insert.
|
|
54
|
-
branch_name: The name of the branch.
|
|
55
|
-
pr_id: Id of the PR.
|
|
56
|
-
app_name: The name of the app being measured.
|
|
57
|
-
"""
|
|
58
|
-
properties = {
|
|
59
|
-
"os": os_type_version,
|
|
60
|
-
"python_version": python_version,
|
|
61
|
-
"distinct_id": commit_sha,
|
|
62
|
-
"pr_title": pr_title,
|
|
63
|
-
"branch_name": branch_name,
|
|
64
|
-
"pr_id": pr_id,
|
|
65
|
-
"performance": performance_data,
|
|
66
|
-
"app_name": app_name,
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
send_data_to_posthog("import_benchmark", properties)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def main():
|
|
73
|
-
"""Runs the benchmarks and inserts the results."""
|
|
74
|
-
# Get the commit SHA and JSON directory from the command line arguments
|
|
75
|
-
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
76
|
-
parser.add_argument(
|
|
77
|
-
"--os", help="The OS type and version to insert into the database."
|
|
78
|
-
)
|
|
79
|
-
parser.add_argument(
|
|
80
|
-
"--python-version", help="The Python version to insert into the database."
|
|
81
|
-
)
|
|
82
|
-
parser.add_argument(
|
|
83
|
-
"--commit-sha", help="The commit SHA to insert into the database."
|
|
84
|
-
)
|
|
85
|
-
parser.add_argument(
|
|
86
|
-
"--benchmark-json",
|
|
87
|
-
help="The JSON file containing the benchmark results.",
|
|
88
|
-
)
|
|
89
|
-
parser.add_argument(
|
|
90
|
-
"--pr-title",
|
|
91
|
-
help="The PR title to insert into the database.",
|
|
92
|
-
)
|
|
93
|
-
parser.add_argument(
|
|
94
|
-
"--branch-name",
|
|
95
|
-
help="The current branch",
|
|
96
|
-
required=True,
|
|
97
|
-
)
|
|
98
|
-
parser.add_argument(
|
|
99
|
-
"--app-name",
|
|
100
|
-
help="The name of the app measured.",
|
|
101
|
-
required=True,
|
|
102
|
-
)
|
|
103
|
-
parser.add_argument(
|
|
104
|
-
"--pr-id",
|
|
105
|
-
help="ID of the PR.",
|
|
106
|
-
required=True,
|
|
107
|
-
)
|
|
108
|
-
args = parser.parse_args()
|
|
109
|
-
|
|
110
|
-
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
111
|
-
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
112
|
-
|
|
113
|
-
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
114
|
-
# Insert the data into the database
|
|
115
|
-
insert_benchmarking_data(
|
|
116
|
-
os_type_version=args.os,
|
|
117
|
-
python_version=args.python_version,
|
|
118
|
-
performance_data=cleaned_benchmark_results,
|
|
119
|
-
commit_sha=args.commit_sha,
|
|
120
|
-
pr_title=pr_title,
|
|
121
|
-
branch_name=args.branch_name,
|
|
122
|
-
app_name=args.app_name,
|
|
123
|
-
pr_id=args.pr_id,
|
|
124
|
-
)
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
if __name__ == "__main__":
|
|
128
|
-
main()
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
"""Extracts the Lighthouse scores from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import json
|
|
6
|
-
import sys
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
|
|
9
|
-
from utils import send_data_to_posthog
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def insert_benchmarking_data(
|
|
13
|
-
lighthouse_data: dict,
|
|
14
|
-
commit_sha: str,
|
|
15
|
-
):
|
|
16
|
-
"""Insert the benchmarking data into the database.
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
lighthouse_data: The Lighthouse data to insert.
|
|
20
|
-
commit_sha: The commit SHA to insert.
|
|
21
|
-
"""
|
|
22
|
-
properties = {
|
|
23
|
-
"distinct_id": commit_sha,
|
|
24
|
-
"lighthouse_data": lighthouse_data,
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
# Send the data to PostHog
|
|
28
|
-
send_data_to_posthog("lighthouse_benchmark", properties)
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
def get_lighthouse_scores(directory_path: str | Path) -> dict:
|
|
32
|
-
"""Extracts the Lighthouse scores from the JSON files in the specified directory.
|
|
33
|
-
|
|
34
|
-
Args:
|
|
35
|
-
directory_path (str): The path to the directory containing the JSON files.
|
|
36
|
-
|
|
37
|
-
Returns:
|
|
38
|
-
dict: The Lighthouse scores.
|
|
39
|
-
"""
|
|
40
|
-
scores = {}
|
|
41
|
-
directory_path = Path(directory_path)
|
|
42
|
-
try:
|
|
43
|
-
for filename in directory_path.iterdir():
|
|
44
|
-
if filename.suffix == ".json" and filename.stem != "manifest":
|
|
45
|
-
data = json.loads(filename.read_text())
|
|
46
|
-
# Extract scores and add them to the dictionary with the filename as key
|
|
47
|
-
scores[data["finalUrl"].replace("http://localhost:3000/", "/")] = {
|
|
48
|
-
"performance_score": data["categories"]["performance"]["score"],
|
|
49
|
-
"accessibility_score": data["categories"]["accessibility"]["score"],
|
|
50
|
-
"best_practices_score": data["categories"]["best-practices"][
|
|
51
|
-
"score"
|
|
52
|
-
],
|
|
53
|
-
"seo_score": data["categories"]["seo"]["score"],
|
|
54
|
-
}
|
|
55
|
-
except Exception as e:
|
|
56
|
-
return {"error": e}
|
|
57
|
-
|
|
58
|
-
return scores
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def main():
|
|
62
|
-
"""Runs the benchmarks and inserts the results into the database."""
|
|
63
|
-
# Get the commit SHA and JSON directory from the command line arguments
|
|
64
|
-
commit_sha = sys.argv[1]
|
|
65
|
-
json_dir = sys.argv[2]
|
|
66
|
-
|
|
67
|
-
# Get the Lighthouse scores
|
|
68
|
-
lighthouse_scores = get_lighthouse_scores(json_dir)
|
|
69
|
-
|
|
70
|
-
# Insert the data into the database
|
|
71
|
-
insert_benchmarking_data(lighthouse_scores, commit_sha)
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
if __name__ == "__main__":
|
|
75
|
-
main()
|
|
@@ -1,135 +0,0 @@
|
|
|
1
|
-
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
-
|
|
3
|
-
import argparse
|
|
4
|
-
import os
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
from utils import get_directory_size, get_python_version, send_data_to_posthog
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def get_package_size(venv_path: Path, os_name):
|
|
11
|
-
"""Get the size of a specified package.
|
|
12
|
-
|
|
13
|
-
Args:
|
|
14
|
-
venv_path: The path to the venv.
|
|
15
|
-
os_name: Name of os.
|
|
16
|
-
|
|
17
|
-
Returns:
|
|
18
|
-
The total size of the package in bytes.
|
|
19
|
-
|
|
20
|
-
Raises:
|
|
21
|
-
ValueError: when venv does not exist or python version is None.
|
|
22
|
-
"""
|
|
23
|
-
python_version = get_python_version(venv_path, os_name)
|
|
24
|
-
print("Python version:", python_version)
|
|
25
|
-
if python_version is None:
|
|
26
|
-
raise ValueError("Error: Failed to determine Python version.")
|
|
27
|
-
|
|
28
|
-
is_windows = "windows" in os_name
|
|
29
|
-
|
|
30
|
-
package_dir: Path = (
|
|
31
|
-
venv_path / "lib" / f"python{python_version}" / "site-packages"
|
|
32
|
-
if not is_windows
|
|
33
|
-
else venv_path / "Lib" / "site-packages"
|
|
34
|
-
)
|
|
35
|
-
if not package_dir.exists():
|
|
36
|
-
raise ValueError(
|
|
37
|
-
"Error: Virtual environment does not exist or is not activated."
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
total_size = get_directory_size(package_dir)
|
|
41
|
-
return total_size
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def insert_benchmarking_data(
|
|
45
|
-
os_type_version: str,
|
|
46
|
-
python_version: str,
|
|
47
|
-
commit_sha: str,
|
|
48
|
-
pr_title: str,
|
|
49
|
-
branch_name: str,
|
|
50
|
-
pr_id: str,
|
|
51
|
-
path: str,
|
|
52
|
-
):
|
|
53
|
-
"""Insert the benchmarking data into PostHog.
|
|
54
|
-
|
|
55
|
-
Args:
|
|
56
|
-
os_type_version: The OS type and version to insert.
|
|
57
|
-
python_version: The Python version to insert.
|
|
58
|
-
commit_sha: The commit SHA to insert.
|
|
59
|
-
pr_title: The PR title to insert.
|
|
60
|
-
branch_name: The name of the branch.
|
|
61
|
-
pr_id: The id of the PR.
|
|
62
|
-
path: The path to the dir or file to check size.
|
|
63
|
-
"""
|
|
64
|
-
if "./dist" in path:
|
|
65
|
-
size = get_directory_size(Path(path))
|
|
66
|
-
else:
|
|
67
|
-
size = get_package_size(Path(path), os_type_version)
|
|
68
|
-
|
|
69
|
-
# Prepare the event data
|
|
70
|
-
properties = {
|
|
71
|
-
"path": path,
|
|
72
|
-
"os": os_type_version,
|
|
73
|
-
"python_version": python_version,
|
|
74
|
-
"distinct_id": commit_sha,
|
|
75
|
-
"pr_title": pr_title,
|
|
76
|
-
"branch_name": branch_name,
|
|
77
|
-
"pr_id": pr_id,
|
|
78
|
-
"size_mb": round(
|
|
79
|
-
size / (1024 * 1024), 3
|
|
80
|
-
), # save size in MB and round to 3 places
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
send_data_to_posthog("package_size", properties)
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
def main():
|
|
87
|
-
"""Runs the benchmarks and inserts the results."""
|
|
88
|
-
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
89
|
-
parser.add_argument(
|
|
90
|
-
"--os", help="The OS type and version to insert into the database."
|
|
91
|
-
)
|
|
92
|
-
parser.add_argument(
|
|
93
|
-
"--python-version", help="The Python version to insert into the database."
|
|
94
|
-
)
|
|
95
|
-
parser.add_argument(
|
|
96
|
-
"--commit-sha", help="The commit SHA to insert into the database."
|
|
97
|
-
)
|
|
98
|
-
parser.add_argument(
|
|
99
|
-
"--pr-title",
|
|
100
|
-
help="The PR title to insert into the database.",
|
|
101
|
-
)
|
|
102
|
-
parser.add_argument(
|
|
103
|
-
"--branch-name",
|
|
104
|
-
help="The current branch",
|
|
105
|
-
required=True,
|
|
106
|
-
)
|
|
107
|
-
parser.add_argument(
|
|
108
|
-
"--pr-id",
|
|
109
|
-
help="The pr id",
|
|
110
|
-
required=True,
|
|
111
|
-
)
|
|
112
|
-
parser.add_argument(
|
|
113
|
-
"--path",
|
|
114
|
-
help="The path to the vnenv.",
|
|
115
|
-
required=True,
|
|
116
|
-
)
|
|
117
|
-
args = parser.parse_args()
|
|
118
|
-
|
|
119
|
-
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
120
|
-
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
121
|
-
|
|
122
|
-
# Insert the data into the database
|
|
123
|
-
insert_benchmarking_data(
|
|
124
|
-
os_type_version=args.os,
|
|
125
|
-
python_version=args.python_version,
|
|
126
|
-
commit_sha=args.commit_sha,
|
|
127
|
-
pr_title=pr_title,
|
|
128
|
-
branch_name=args.branch_name,
|
|
129
|
-
pr_id=args.pr_id,
|
|
130
|
-
path=args.path,
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
if __name__ == "__main__":
|
|
135
|
-
main()
|
benchmarks/benchmark_web_size.py
DELETED
|
@@ -1,106 +0,0 @@
|
|
|
1
|
-
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
-
|
|
3
|
-
import argparse
|
|
4
|
-
import os
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
from utils import get_directory_size, send_data_to_posthog
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def insert_benchmarking_data(
|
|
11
|
-
os_type_version: str,
|
|
12
|
-
python_version: str,
|
|
13
|
-
app_name: str,
|
|
14
|
-
commit_sha: str,
|
|
15
|
-
pr_title: str,
|
|
16
|
-
branch_name: str,
|
|
17
|
-
pr_id: str,
|
|
18
|
-
path: str,
|
|
19
|
-
):
|
|
20
|
-
"""Insert the benchmarking data into PostHog.
|
|
21
|
-
|
|
22
|
-
Args:
|
|
23
|
-
app_name: The name of the app being measured.
|
|
24
|
-
os_type_version: The OS type and version to insert.
|
|
25
|
-
python_version: The Python version to insert.
|
|
26
|
-
commit_sha: The commit SHA to insert.
|
|
27
|
-
pr_title: The PR title to insert.
|
|
28
|
-
branch_name: The name of the branch.
|
|
29
|
-
pr_id: The id of the PR.
|
|
30
|
-
path: The path to the dir or file to check size.
|
|
31
|
-
"""
|
|
32
|
-
size = get_directory_size(Path(path))
|
|
33
|
-
|
|
34
|
-
# Prepare the event data
|
|
35
|
-
properties = {
|
|
36
|
-
"app_name": app_name,
|
|
37
|
-
"os": os_type_version,
|
|
38
|
-
"python_version": python_version,
|
|
39
|
-
"distinct_id": commit_sha,
|
|
40
|
-
"pr_title": pr_title,
|
|
41
|
-
"branch_name": branch_name,
|
|
42
|
-
"pr_id": pr_id,
|
|
43
|
-
"size_mb": round(
|
|
44
|
-
size / (1024 * 1024), 3
|
|
45
|
-
), # save size in MB and round to 3 places
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
send_data_to_posthog("web-size", properties)
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
def main():
|
|
52
|
-
"""Runs the benchmarks and inserts the results."""
|
|
53
|
-
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
54
|
-
parser.add_argument(
|
|
55
|
-
"--os", help="The OS type and version to insert into the database."
|
|
56
|
-
)
|
|
57
|
-
parser.add_argument(
|
|
58
|
-
"--python-version", help="The Python version to insert into the database."
|
|
59
|
-
)
|
|
60
|
-
parser.add_argument(
|
|
61
|
-
"--commit-sha", help="The commit SHA to insert into the database."
|
|
62
|
-
)
|
|
63
|
-
parser.add_argument(
|
|
64
|
-
"--pr-title",
|
|
65
|
-
help="The PR title to insert into the database.",
|
|
66
|
-
)
|
|
67
|
-
parser.add_argument(
|
|
68
|
-
"--branch-name",
|
|
69
|
-
help="The current branch",
|
|
70
|
-
required=True,
|
|
71
|
-
)
|
|
72
|
-
parser.add_argument(
|
|
73
|
-
"--app-name",
|
|
74
|
-
help="The name of the app measured.",
|
|
75
|
-
required=True,
|
|
76
|
-
)
|
|
77
|
-
parser.add_argument(
|
|
78
|
-
"--pr-id",
|
|
79
|
-
help="The pr id",
|
|
80
|
-
required=True,
|
|
81
|
-
)
|
|
82
|
-
parser.add_argument(
|
|
83
|
-
"--path",
|
|
84
|
-
help="The current path to app to check.",
|
|
85
|
-
required=True,
|
|
86
|
-
)
|
|
87
|
-
args = parser.parse_args()
|
|
88
|
-
|
|
89
|
-
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
90
|
-
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
91
|
-
|
|
92
|
-
# Insert the data into the database
|
|
93
|
-
insert_benchmarking_data(
|
|
94
|
-
app_name=args.app_name,
|
|
95
|
-
os_type_version=args.os,
|
|
96
|
-
python_version=args.python_version,
|
|
97
|
-
commit_sha=args.commit_sha,
|
|
98
|
-
pr_title=pr_title,
|
|
99
|
-
branch_name=args.branch_name,
|
|
100
|
-
pr_id=args.pr_id,
|
|
101
|
-
path=args.path,
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
if __name__ == "__main__":
|
|
106
|
-
main()
|
benchmarks/conftest.py
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
"""Shared conftest for all benchmark tests."""
|
|
2
|
-
|
|
3
|
-
import pytest
|
|
4
|
-
|
|
5
|
-
from reflex.testing import AppHarness, AppHarnessProd
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
@pytest.fixture(
|
|
9
|
-
scope="session", params=[AppHarness, AppHarnessProd], ids=["dev", "prod"]
|
|
10
|
-
)
|
|
11
|
-
def app_harness_env(request):
|
|
12
|
-
"""Parametrize the AppHarness class to use for the test, either dev or prod.
|
|
13
|
-
|
|
14
|
-
Args:
|
|
15
|
-
request: The pytest fixture request object.
|
|
16
|
-
|
|
17
|
-
Returns:
|
|
18
|
-
The AppHarness class to use for the test.
|
|
19
|
-
"""
|
|
20
|
-
return request.param
|
benchmarks/lighthouse.sh
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
|
|
3
|
-
# Change directory to the first argument passed to the script
|
|
4
|
-
project_dir=$1
|
|
5
|
-
shift
|
|
6
|
-
pushd "$project_dir" || exit 1
|
|
7
|
-
echo "Changed directory to $project_dir"
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
# So we get stdout / stderr from Python ASAP. Without this, delays can be very long (e.g. on Windows, Github Actions)
|
|
11
|
-
export PYTHONUNBUFFERED=1
|
|
12
|
-
|
|
13
|
-
env_mode=$1
|
|
14
|
-
shift
|
|
15
|
-
check_ports=${1:-3000 8000}
|
|
16
|
-
shift
|
|
17
|
-
|
|
18
|
-
# Start the server in the background
|
|
19
|
-
export TELEMETRY_ENABLED=false
|
|
20
|
-
reflex run --env "$env_mode" "$@" & pid=$!
|
|
21
|
-
|
|
22
|
-
# Within the context of this bash, $pid_in_bash is what we need to pass to "kill" on exit
|
|
23
|
-
# This is true on all platforms.
|
|
24
|
-
pid_in_bash=$pid
|
|
25
|
-
trap "kill -INT $pid_in_bash ||:" EXIT
|
|
26
|
-
|
|
27
|
-
echo "Started server with PID $pid"
|
|
28
|
-
|
|
29
|
-
# Assume we run from the root of the repo
|
|
30
|
-
popd
|
|
31
|
-
|
|
32
|
-
# In Windows, our Python script below needs to work with the WINPID
|
|
33
|
-
if [ -f /proc/$pid/winpid ]; then
|
|
34
|
-
pid=$(cat /proc/$pid/winpid)
|
|
35
|
-
echo "Windows detected, passing winpid $pid to port waiter"
|
|
36
|
-
fi
|
|
37
|
-
|
|
38
|
-
python scripts/wait_for_listening_port.py $check_ports --timeout=600 --server-pid "$pid"
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
# Check if something is running on port 3000
|
|
42
|
-
if curl --output /dev/null --silent --head --fail "http://localhost:3000"; then
|
|
43
|
-
echo "URL exists: http://localhost:3000"
|
|
44
|
-
else
|
|
45
|
-
echo "URL does not exist: https://localhost:3000"
|
|
46
|
-
fi
|
|
47
|
-
|
|
48
|
-
mkdir -p ./tests/benchmarks/.lighthouseci
|
|
49
|
-
|
|
50
|
-
# Create a lighthouserc.js file
|
|
51
|
-
cat << EOF > lighthouserc.js
|
|
52
|
-
module.exports = {
|
|
53
|
-
ci: {
|
|
54
|
-
collect: {
|
|
55
|
-
isSinglePageApplication: true,
|
|
56
|
-
numberOfRuns: 1,
|
|
57
|
-
url: ['http://localhost:3000', "http://localhost:3000/docs/getting-started/introduction/", "http://localhost:3000/blog/2023-08-02-seed-annoucement/"]
|
|
58
|
-
},
|
|
59
|
-
upload: {
|
|
60
|
-
target: 'filesystem',
|
|
61
|
-
"outputDir": "./integration/benchmarks/.lighthouseci"
|
|
62
|
-
},
|
|
63
|
-
},
|
|
64
|
-
};
|
|
65
|
-
EOF
|
|
66
|
-
|
|
67
|
-
# Install and Run LHCI
|
|
68
|
-
npm install -g @lhci/cli
|
|
69
|
-
lhci autorun
|
|
70
|
-
|
|
71
|
-
# Check to see if the LHCI report is generated
|
|
72
|
-
if [ -d "./integration/benchmarks/.lighthouseci" ] && [ "$(ls -A ./integration/benchmarks/.lighthouseci)" ]; then
|
|
73
|
-
echo "LHCI report generated"
|
|
74
|
-
else
|
|
75
|
-
echo "LHCI report not generated"
|
|
76
|
-
exit 1 # Exits the script with a status of 1, which will cause the GitHub Action to stop
|
|
77
|
-
fi
|
benchmarks/utils.py
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
"""Utility functions for the benchmarks."""
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import subprocess
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
import httpx
|
|
8
|
-
from httpx import HTTPError
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def get_python_version(venv_path: Path, os_name):
|
|
12
|
-
"""Get the python version of python in a virtual env.
|
|
13
|
-
|
|
14
|
-
Args:
|
|
15
|
-
venv_path: Path to virtual environment.
|
|
16
|
-
os_name: Name of os.
|
|
17
|
-
|
|
18
|
-
Returns:
|
|
19
|
-
The python version.
|
|
20
|
-
"""
|
|
21
|
-
python_executable = (
|
|
22
|
-
venv_path / "bin" / "python"
|
|
23
|
-
if "windows" not in os_name
|
|
24
|
-
else venv_path / "Scripts" / "python.exe"
|
|
25
|
-
)
|
|
26
|
-
try:
|
|
27
|
-
output = subprocess.check_output(
|
|
28
|
-
[str(python_executable), "--version"], stderr=subprocess.STDOUT
|
|
29
|
-
)
|
|
30
|
-
python_version = output.decode("utf-8").strip().split()[1]
|
|
31
|
-
return ".".join(python_version.split(".")[:-1])
|
|
32
|
-
except subprocess.CalledProcessError:
|
|
33
|
-
return None
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def get_directory_size(directory: Path):
|
|
37
|
-
"""Get the size of a directory in bytes.
|
|
38
|
-
|
|
39
|
-
Args:
|
|
40
|
-
directory: The directory to check.
|
|
41
|
-
|
|
42
|
-
Returns:
|
|
43
|
-
The size of the dir in bytes.
|
|
44
|
-
"""
|
|
45
|
-
total_size = 0
|
|
46
|
-
for dirpath, _, filenames in os.walk(directory):
|
|
47
|
-
for f in filenames:
|
|
48
|
-
fp = Path(dirpath) / f
|
|
49
|
-
total_size += fp.stat().st_size
|
|
50
|
-
return total_size
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def send_data_to_posthog(event, properties):
|
|
54
|
-
"""Send data to PostHog.
|
|
55
|
-
|
|
56
|
-
Args:
|
|
57
|
-
event: The event to send.
|
|
58
|
-
properties: The properties to send.
|
|
59
|
-
|
|
60
|
-
Raises:
|
|
61
|
-
HTTPError: When there is an error sending data to PostHog.
|
|
62
|
-
"""
|
|
63
|
-
event_data = {
|
|
64
|
-
"api_key": "phc_JoMo0fOyi0GQAooY3UyO9k0hebGkMyFJrrCw1Gt5SGb",
|
|
65
|
-
"event": event,
|
|
66
|
-
"properties": properties,
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
with httpx.Client() as client:
|
|
70
|
-
response = client.post("https://app.posthog.com/capture/", json=event_data)
|
|
71
|
-
if response.status_code != 200:
|
|
72
|
-
raise HTTPError(
|
|
73
|
-
f"Error sending data to PostHog: {response.status_code} - {response.text}"
|
|
74
|
-
)
|
|
File without changes
|