reflex 0.7.2a2__py3-none-any.whl → 0.7.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of reflex might be problematic. Click here for more details.
- benchmarks/__init__.py +3 -0
- benchmarks/benchmark_compile_times.py +147 -0
- benchmarks/benchmark_imports.py +128 -0
- benchmarks/benchmark_lighthouse.py +75 -0
- benchmarks/benchmark_package_size.py +135 -0
- benchmarks/benchmark_web_size.py +106 -0
- benchmarks/conftest.py +20 -0
- benchmarks/lighthouse.sh +77 -0
- benchmarks/utils.py +74 -0
- reflex/.templates/jinja/web/pages/custom_component.js.jinja2 +6 -3
- reflex/.templates/web/components/reflex/radix_themes_color_mode_provider.js +1 -1
- reflex/.templates/web/components/shiki/code.js +26 -21
- reflex/.templates/web/postcss.config.js +1 -1
- reflex/.templates/web/utils/client_side_routing.js +18 -16
- reflex/.templates/web/utils/helpers/dataeditor.js +1 -1
- reflex/.templates/web/utils/helpers/range.js +30 -30
- reflex/.templates/web/utils/state.js +44 -22
- reflex/app_mixins/middleware.py +7 -7
- reflex/compiler/compiler.py +7 -0
- reflex/components/core/banner.py +1 -1
- reflex/components/core/foreach.py +3 -2
- reflex/components/datadisplay/logo.py +1 -1
- reflex/components/el/__init__.pyi +22 -0
- reflex/components/el/elements/__init__.py +20 -1
- reflex/components/el/elements/__init__.pyi +42 -1
- reflex/components/el/elements/media.py +11 -0
- reflex/components/el/elements/media.pyi +11 -0
- reflex/components/lucide/icon.py +8 -6
- reflex/components/lucide/icon.pyi +0 -1
- reflex/components/radix/themes/components/slider.py +2 -1
- reflex/config.py +13 -7
- reflex/custom_components/custom_components.py +23 -25
- reflex/event.py +7 -2
- reflex/istate/data.py +59 -7
- reflex/reflex.py +75 -32
- reflex/state.py +3 -3
- reflex/style.py +3 -3
- reflex/testing.py +4 -10
- reflex/utils/exceptions.py +31 -1
- reflex/utils/exec.py +4 -8
- reflex/utils/export.py +2 -2
- reflex/utils/prerequisites.py +3 -45
- reflex/utils/pyi_generator.py +2 -2
- reflex/utils/redir.py +3 -12
- reflex/vars/base.py +26 -3
- reflex/vars/number.py +22 -21
- reflex/vars/object.py +29 -0
- reflex/vars/sequence.py +37 -0
- {reflex-0.7.2a2.dist-info → reflex-0.7.3a1.dist-info}/METADATA +52 -66
- {reflex-0.7.2a2.dist-info → reflex-0.7.3a1.dist-info}/RECORD +53 -44
- {reflex-0.7.2a2.dist-info → reflex-0.7.3a1.dist-info}/WHEEL +1 -1
- reflex-0.7.3a1.dist-info/entry_points.txt +5 -0
- reflex-0.7.2a2.dist-info/entry_points.txt +0 -3
- {reflex-0.7.2a2.dist-info → reflex-0.7.3a1.dist-info/licenses}/LICENSE +0 -0
benchmarks/__init__.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Extracts the compile times from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from utils import send_data_to_posthog
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_stats_from_json(json_file: str) -> list[dict]:
|
|
14
|
+
"""Extracts the stats from the JSON data and returns them as a list of dictionaries.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
json_file: The JSON file to extract the stats data from.
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
list[dict]: The stats for each test.
|
|
21
|
+
"""
|
|
22
|
+
with Path(json_file).open() as file:
|
|
23
|
+
json_data = json.load(file)
|
|
24
|
+
|
|
25
|
+
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
+
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
+
|
|
28
|
+
# Initialize an empty list to store the stats for each test
|
|
29
|
+
test_stats = []
|
|
30
|
+
|
|
31
|
+
# Iterate over each test in the 'benchmarks' list
|
|
32
|
+
for test in data.get("benchmarks", []):
|
|
33
|
+
group = test.get("group", None)
|
|
34
|
+
stats = test.get("stats", {})
|
|
35
|
+
full_name = test.get("fullname")
|
|
36
|
+
file_name = (
|
|
37
|
+
full_name.split("/")[-1].split("::")[0].strip(".py") if full_name else None
|
|
38
|
+
)
|
|
39
|
+
test_name = test.get("name", "Unknown Test")
|
|
40
|
+
|
|
41
|
+
test_stats.append(
|
|
42
|
+
{
|
|
43
|
+
"test_name": test_name,
|
|
44
|
+
"group": group,
|
|
45
|
+
"stats": stats,
|
|
46
|
+
"full_name": full_name,
|
|
47
|
+
"file_name": file_name,
|
|
48
|
+
}
|
|
49
|
+
)
|
|
50
|
+
return test_stats
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def insert_benchmarking_data(
|
|
54
|
+
os_type_version: str,
|
|
55
|
+
python_version: str,
|
|
56
|
+
performance_data: list[dict],
|
|
57
|
+
commit_sha: str,
|
|
58
|
+
pr_title: str,
|
|
59
|
+
branch_name: str,
|
|
60
|
+
event_type: str,
|
|
61
|
+
pr_id: str,
|
|
62
|
+
):
|
|
63
|
+
"""Insert the benchmarking data into the database.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
os_type_version: The OS type and version to insert.
|
|
67
|
+
python_version: The Python version to insert.
|
|
68
|
+
performance_data: The performance data of reflex web to insert.
|
|
69
|
+
commit_sha: The commit SHA to insert.
|
|
70
|
+
pr_title: The PR title to insert.
|
|
71
|
+
branch_name: The name of the branch.
|
|
72
|
+
event_type: Type of github event(push, pull request, etc).
|
|
73
|
+
pr_id: Id of the PR.
|
|
74
|
+
"""
|
|
75
|
+
# Prepare the event data
|
|
76
|
+
properties = {
|
|
77
|
+
"os": os_type_version,
|
|
78
|
+
"python_version": python_version,
|
|
79
|
+
"distinct_id": commit_sha,
|
|
80
|
+
"pr_title": pr_title,
|
|
81
|
+
"branch_name": branch_name,
|
|
82
|
+
"event_type": event_type,
|
|
83
|
+
"performance": performance_data,
|
|
84
|
+
"pr_id": pr_id,
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
send_data_to_posthog("simple_app_benchmark", properties)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def main():
|
|
91
|
+
"""Runs the benchmarks and inserts the results."""
|
|
92
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
93
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
"--os", help="The OS type and version to insert into the database."
|
|
96
|
+
)
|
|
97
|
+
parser.add_argument(
|
|
98
|
+
"--python-version", help="The Python version to insert into the database."
|
|
99
|
+
)
|
|
100
|
+
parser.add_argument(
|
|
101
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
102
|
+
)
|
|
103
|
+
parser.add_argument(
|
|
104
|
+
"--benchmark-json",
|
|
105
|
+
help="The JSON file containing the benchmark results.",
|
|
106
|
+
)
|
|
107
|
+
parser.add_argument(
|
|
108
|
+
"--pr-title",
|
|
109
|
+
help="The PR title to insert into the database.",
|
|
110
|
+
)
|
|
111
|
+
parser.add_argument(
|
|
112
|
+
"--branch-name",
|
|
113
|
+
help="The current branch",
|
|
114
|
+
required=True,
|
|
115
|
+
)
|
|
116
|
+
parser.add_argument(
|
|
117
|
+
"--event-type",
|
|
118
|
+
help="The github event type",
|
|
119
|
+
required=True,
|
|
120
|
+
)
|
|
121
|
+
parser.add_argument(
|
|
122
|
+
"--pr-id",
|
|
123
|
+
help="ID of the PR.",
|
|
124
|
+
required=True,
|
|
125
|
+
)
|
|
126
|
+
args = parser.parse_args()
|
|
127
|
+
|
|
128
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
129
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
130
|
+
|
|
131
|
+
# Get the results of pytest benchmarks
|
|
132
|
+
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
133
|
+
# Insert the data into the database
|
|
134
|
+
insert_benchmarking_data(
|
|
135
|
+
os_type_version=args.os,
|
|
136
|
+
python_version=args.python_version,
|
|
137
|
+
performance_data=cleaned_benchmark_results,
|
|
138
|
+
commit_sha=args.commit_sha,
|
|
139
|
+
pr_title=pr_title,
|
|
140
|
+
branch_name=args.branch_name,
|
|
141
|
+
event_type=args.event_type,
|
|
142
|
+
pr_id=args.pr_id,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
if __name__ == "__main__":
|
|
147
|
+
main()
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"""Extract and upload benchmarking data to PostHog."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from utils import send_data_to_posthog
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_stats_from_json(json_file: str) -> dict:
|
|
14
|
+
"""Extracts the stats from the JSON data and returns them as dictionaries.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
json_file: The JSON file to extract the stats data from.
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
dict: The stats for each test.
|
|
21
|
+
"""
|
|
22
|
+
with Path(json_file).open() as file:
|
|
23
|
+
json_data = json.load(file)
|
|
24
|
+
|
|
25
|
+
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
+
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
+
|
|
28
|
+
result = data.get("results", [{}])[0]
|
|
29
|
+
return {
|
|
30
|
+
k: v
|
|
31
|
+
for k, v in result.items()
|
|
32
|
+
if k in ("mean", "stddev", "median", "min", "max")
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def insert_benchmarking_data(
|
|
37
|
+
os_type_version: str,
|
|
38
|
+
python_version: str,
|
|
39
|
+
performance_data: dict,
|
|
40
|
+
commit_sha: str,
|
|
41
|
+
pr_title: str,
|
|
42
|
+
branch_name: str,
|
|
43
|
+
pr_id: str,
|
|
44
|
+
app_name: str,
|
|
45
|
+
):
|
|
46
|
+
"""Insert the benchmarking data into the database.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
os_type_version: The OS type and version to insert.
|
|
50
|
+
python_version: The Python version to insert.
|
|
51
|
+
performance_data: The imports performance data to insert.
|
|
52
|
+
commit_sha: The commit SHA to insert.
|
|
53
|
+
pr_title: The PR title to insert.
|
|
54
|
+
branch_name: The name of the branch.
|
|
55
|
+
pr_id: Id of the PR.
|
|
56
|
+
app_name: The name of the app being measured.
|
|
57
|
+
"""
|
|
58
|
+
properties = {
|
|
59
|
+
"os": os_type_version,
|
|
60
|
+
"python_version": python_version,
|
|
61
|
+
"distinct_id": commit_sha,
|
|
62
|
+
"pr_title": pr_title,
|
|
63
|
+
"branch_name": branch_name,
|
|
64
|
+
"pr_id": pr_id,
|
|
65
|
+
"performance": performance_data,
|
|
66
|
+
"app_name": app_name,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
send_data_to_posthog("import_benchmark", properties)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def main():
|
|
73
|
+
"""Runs the benchmarks and inserts the results."""
|
|
74
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
75
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
76
|
+
parser.add_argument(
|
|
77
|
+
"--os", help="The OS type and version to insert into the database."
|
|
78
|
+
)
|
|
79
|
+
parser.add_argument(
|
|
80
|
+
"--python-version", help="The Python version to insert into the database."
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
84
|
+
)
|
|
85
|
+
parser.add_argument(
|
|
86
|
+
"--benchmark-json",
|
|
87
|
+
help="The JSON file containing the benchmark results.",
|
|
88
|
+
)
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--pr-title",
|
|
91
|
+
help="The PR title to insert into the database.",
|
|
92
|
+
)
|
|
93
|
+
parser.add_argument(
|
|
94
|
+
"--branch-name",
|
|
95
|
+
help="The current branch",
|
|
96
|
+
required=True,
|
|
97
|
+
)
|
|
98
|
+
parser.add_argument(
|
|
99
|
+
"--app-name",
|
|
100
|
+
help="The name of the app measured.",
|
|
101
|
+
required=True,
|
|
102
|
+
)
|
|
103
|
+
parser.add_argument(
|
|
104
|
+
"--pr-id",
|
|
105
|
+
help="ID of the PR.",
|
|
106
|
+
required=True,
|
|
107
|
+
)
|
|
108
|
+
args = parser.parse_args()
|
|
109
|
+
|
|
110
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
111
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
112
|
+
|
|
113
|
+
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
114
|
+
# Insert the data into the database
|
|
115
|
+
insert_benchmarking_data(
|
|
116
|
+
os_type_version=args.os,
|
|
117
|
+
python_version=args.python_version,
|
|
118
|
+
performance_data=cleaned_benchmark_results,
|
|
119
|
+
commit_sha=args.commit_sha,
|
|
120
|
+
pr_title=pr_title,
|
|
121
|
+
branch_name=args.branch_name,
|
|
122
|
+
app_name=args.app_name,
|
|
123
|
+
pr_id=args.pr_id,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
if __name__ == "__main__":
|
|
128
|
+
main()
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""Extracts the Lighthouse scores from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from utils import send_data_to_posthog
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def insert_benchmarking_data(
|
|
13
|
+
lighthouse_data: dict,
|
|
14
|
+
commit_sha: str,
|
|
15
|
+
):
|
|
16
|
+
"""Insert the benchmarking data into the database.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
lighthouse_data: The Lighthouse data to insert.
|
|
20
|
+
commit_sha: The commit SHA to insert.
|
|
21
|
+
"""
|
|
22
|
+
properties = {
|
|
23
|
+
"distinct_id": commit_sha,
|
|
24
|
+
"lighthouse_data": lighthouse_data,
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
# Send the data to PostHog
|
|
28
|
+
send_data_to_posthog("lighthouse_benchmark", properties)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_lighthouse_scores(directory_path: str | Path) -> dict:
|
|
32
|
+
"""Extracts the Lighthouse scores from the JSON files in the specified directory.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
directory_path (str): The path to the directory containing the JSON files.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
dict: The Lighthouse scores.
|
|
39
|
+
"""
|
|
40
|
+
scores = {}
|
|
41
|
+
directory_path = Path(directory_path)
|
|
42
|
+
try:
|
|
43
|
+
for filename in directory_path.iterdir():
|
|
44
|
+
if filename.suffix == ".json" and filename.stem != "manifest":
|
|
45
|
+
data = json.loads(filename.read_text())
|
|
46
|
+
# Extract scores and add them to the dictionary with the filename as key
|
|
47
|
+
scores[data["finalUrl"].replace("http://localhost:3000/", "/")] = {
|
|
48
|
+
"performance_score": data["categories"]["performance"]["score"],
|
|
49
|
+
"accessibility_score": data["categories"]["accessibility"]["score"],
|
|
50
|
+
"best_practices_score": data["categories"]["best-practices"][
|
|
51
|
+
"score"
|
|
52
|
+
],
|
|
53
|
+
"seo_score": data["categories"]["seo"]["score"],
|
|
54
|
+
}
|
|
55
|
+
except Exception as e:
|
|
56
|
+
return {"error": e}
|
|
57
|
+
|
|
58
|
+
return scores
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def main():
|
|
62
|
+
"""Runs the benchmarks and inserts the results into the database."""
|
|
63
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
64
|
+
commit_sha = sys.argv[1]
|
|
65
|
+
json_dir = sys.argv[2]
|
|
66
|
+
|
|
67
|
+
# Get the Lighthouse scores
|
|
68
|
+
lighthouse_scores = get_lighthouse_scores(json_dir)
|
|
69
|
+
|
|
70
|
+
# Insert the data into the database
|
|
71
|
+
insert_benchmarking_data(lighthouse_scores, commit_sha)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
if __name__ == "__main__":
|
|
75
|
+
main()
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from utils import get_directory_size, get_python_version, send_data_to_posthog
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_package_size(venv_path: Path, os_name):
|
|
11
|
+
"""Get the size of a specified package.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
venv_path: The path to the venv.
|
|
15
|
+
os_name: Name of os.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
The total size of the package in bytes.
|
|
19
|
+
|
|
20
|
+
Raises:
|
|
21
|
+
ValueError: when venv does not exist or python version is None.
|
|
22
|
+
"""
|
|
23
|
+
python_version = get_python_version(venv_path, os_name)
|
|
24
|
+
print("Python version:", python_version)
|
|
25
|
+
if python_version is None:
|
|
26
|
+
raise ValueError("Error: Failed to determine Python version.")
|
|
27
|
+
|
|
28
|
+
is_windows = "windows" in os_name
|
|
29
|
+
|
|
30
|
+
package_dir: Path = (
|
|
31
|
+
venv_path / "lib" / f"python{python_version}" / "site-packages"
|
|
32
|
+
if not is_windows
|
|
33
|
+
else venv_path / "Lib" / "site-packages"
|
|
34
|
+
)
|
|
35
|
+
if not package_dir.exists():
|
|
36
|
+
raise ValueError(
|
|
37
|
+
"Error: Virtual environment does not exist or is not activated."
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
total_size = get_directory_size(package_dir)
|
|
41
|
+
return total_size
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def insert_benchmarking_data(
|
|
45
|
+
os_type_version: str,
|
|
46
|
+
python_version: str,
|
|
47
|
+
commit_sha: str,
|
|
48
|
+
pr_title: str,
|
|
49
|
+
branch_name: str,
|
|
50
|
+
pr_id: str,
|
|
51
|
+
path: str,
|
|
52
|
+
):
|
|
53
|
+
"""Insert the benchmarking data into PostHog.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
os_type_version: The OS type and version to insert.
|
|
57
|
+
python_version: The Python version to insert.
|
|
58
|
+
commit_sha: The commit SHA to insert.
|
|
59
|
+
pr_title: The PR title to insert.
|
|
60
|
+
branch_name: The name of the branch.
|
|
61
|
+
pr_id: The id of the PR.
|
|
62
|
+
path: The path to the dir or file to check size.
|
|
63
|
+
"""
|
|
64
|
+
if "./dist" in path:
|
|
65
|
+
size = get_directory_size(Path(path))
|
|
66
|
+
else:
|
|
67
|
+
size = get_package_size(Path(path), os_type_version)
|
|
68
|
+
|
|
69
|
+
# Prepare the event data
|
|
70
|
+
properties = {
|
|
71
|
+
"path": path,
|
|
72
|
+
"os": os_type_version,
|
|
73
|
+
"python_version": python_version,
|
|
74
|
+
"distinct_id": commit_sha,
|
|
75
|
+
"pr_title": pr_title,
|
|
76
|
+
"branch_name": branch_name,
|
|
77
|
+
"pr_id": pr_id,
|
|
78
|
+
"size_mb": round(
|
|
79
|
+
size / (1024 * 1024), 3
|
|
80
|
+
), # save size in MB and round to 3 places
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
send_data_to_posthog("package_size", properties)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def main():
|
|
87
|
+
"""Runs the benchmarks and inserts the results."""
|
|
88
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--os", help="The OS type and version to insert into the database."
|
|
91
|
+
)
|
|
92
|
+
parser.add_argument(
|
|
93
|
+
"--python-version", help="The Python version to insert into the database."
|
|
94
|
+
)
|
|
95
|
+
parser.add_argument(
|
|
96
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
97
|
+
)
|
|
98
|
+
parser.add_argument(
|
|
99
|
+
"--pr-title",
|
|
100
|
+
help="The PR title to insert into the database.",
|
|
101
|
+
)
|
|
102
|
+
parser.add_argument(
|
|
103
|
+
"--branch-name",
|
|
104
|
+
help="The current branch",
|
|
105
|
+
required=True,
|
|
106
|
+
)
|
|
107
|
+
parser.add_argument(
|
|
108
|
+
"--pr-id",
|
|
109
|
+
help="The pr id",
|
|
110
|
+
required=True,
|
|
111
|
+
)
|
|
112
|
+
parser.add_argument(
|
|
113
|
+
"--path",
|
|
114
|
+
help="The path to the vnenv.",
|
|
115
|
+
required=True,
|
|
116
|
+
)
|
|
117
|
+
args = parser.parse_args()
|
|
118
|
+
|
|
119
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
120
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
121
|
+
|
|
122
|
+
# Insert the data into the database
|
|
123
|
+
insert_benchmarking_data(
|
|
124
|
+
os_type_version=args.os,
|
|
125
|
+
python_version=args.python_version,
|
|
126
|
+
commit_sha=args.commit_sha,
|
|
127
|
+
pr_title=pr_title,
|
|
128
|
+
branch_name=args.branch_name,
|
|
129
|
+
pr_id=args.pr_id,
|
|
130
|
+
path=args.path,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
if __name__ == "__main__":
|
|
135
|
+
main()
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from utils import get_directory_size, send_data_to_posthog
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def insert_benchmarking_data(
|
|
11
|
+
os_type_version: str,
|
|
12
|
+
python_version: str,
|
|
13
|
+
app_name: str,
|
|
14
|
+
commit_sha: str,
|
|
15
|
+
pr_title: str,
|
|
16
|
+
branch_name: str,
|
|
17
|
+
pr_id: str,
|
|
18
|
+
path: str,
|
|
19
|
+
):
|
|
20
|
+
"""Insert the benchmarking data into PostHog.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
app_name: The name of the app being measured.
|
|
24
|
+
os_type_version: The OS type and version to insert.
|
|
25
|
+
python_version: The Python version to insert.
|
|
26
|
+
commit_sha: The commit SHA to insert.
|
|
27
|
+
pr_title: The PR title to insert.
|
|
28
|
+
branch_name: The name of the branch.
|
|
29
|
+
pr_id: The id of the PR.
|
|
30
|
+
path: The path to the dir or file to check size.
|
|
31
|
+
"""
|
|
32
|
+
size = get_directory_size(Path(path))
|
|
33
|
+
|
|
34
|
+
# Prepare the event data
|
|
35
|
+
properties = {
|
|
36
|
+
"app_name": app_name,
|
|
37
|
+
"os": os_type_version,
|
|
38
|
+
"python_version": python_version,
|
|
39
|
+
"distinct_id": commit_sha,
|
|
40
|
+
"pr_title": pr_title,
|
|
41
|
+
"branch_name": branch_name,
|
|
42
|
+
"pr_id": pr_id,
|
|
43
|
+
"size_mb": round(
|
|
44
|
+
size / (1024 * 1024), 3
|
|
45
|
+
), # save size in MB and round to 3 places
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
send_data_to_posthog("web-size", properties)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def main():
|
|
52
|
+
"""Runs the benchmarks and inserts the results."""
|
|
53
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
54
|
+
parser.add_argument(
|
|
55
|
+
"--os", help="The OS type and version to insert into the database."
|
|
56
|
+
)
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
"--python-version", help="The Python version to insert into the database."
|
|
59
|
+
)
|
|
60
|
+
parser.add_argument(
|
|
61
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
62
|
+
)
|
|
63
|
+
parser.add_argument(
|
|
64
|
+
"--pr-title",
|
|
65
|
+
help="The PR title to insert into the database.",
|
|
66
|
+
)
|
|
67
|
+
parser.add_argument(
|
|
68
|
+
"--branch-name",
|
|
69
|
+
help="The current branch",
|
|
70
|
+
required=True,
|
|
71
|
+
)
|
|
72
|
+
parser.add_argument(
|
|
73
|
+
"--app-name",
|
|
74
|
+
help="The name of the app measured.",
|
|
75
|
+
required=True,
|
|
76
|
+
)
|
|
77
|
+
parser.add_argument(
|
|
78
|
+
"--pr-id",
|
|
79
|
+
help="The pr id",
|
|
80
|
+
required=True,
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--path",
|
|
84
|
+
help="The current path to app to check.",
|
|
85
|
+
required=True,
|
|
86
|
+
)
|
|
87
|
+
args = parser.parse_args()
|
|
88
|
+
|
|
89
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
90
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
91
|
+
|
|
92
|
+
# Insert the data into the database
|
|
93
|
+
insert_benchmarking_data(
|
|
94
|
+
app_name=args.app_name,
|
|
95
|
+
os_type_version=args.os,
|
|
96
|
+
python_version=args.python_version,
|
|
97
|
+
commit_sha=args.commit_sha,
|
|
98
|
+
pr_title=pr_title,
|
|
99
|
+
branch_name=args.branch_name,
|
|
100
|
+
pr_id=args.pr_id,
|
|
101
|
+
path=args.path,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
if __name__ == "__main__":
|
|
106
|
+
main()
|
benchmarks/conftest.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""Shared conftest for all benchmark tests."""
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
|
|
5
|
+
from reflex.testing import AppHarness, AppHarnessProd
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.fixture(
|
|
9
|
+
scope="session", params=[AppHarness, AppHarnessProd], ids=["dev", "prod"]
|
|
10
|
+
)
|
|
11
|
+
def app_harness_env(request):
|
|
12
|
+
"""Parametrize the AppHarness class to use for the test, either dev or prod.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
request: The pytest fixture request object.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
The AppHarness class to use for the test.
|
|
19
|
+
"""
|
|
20
|
+
return request.param
|
benchmarks/lighthouse.sh
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Change directory to the first argument passed to the script
|
|
4
|
+
project_dir=$1
|
|
5
|
+
shift
|
|
6
|
+
pushd "$project_dir" || exit 1
|
|
7
|
+
echo "Changed directory to $project_dir"
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# So we get stdout / stderr from Python ASAP. Without this, delays can be very long (e.g. on Windows, Github Actions)
|
|
11
|
+
export PYTHONUNBUFFERED=1
|
|
12
|
+
|
|
13
|
+
env_mode=$1
|
|
14
|
+
shift
|
|
15
|
+
check_ports=${1:-3000 8000}
|
|
16
|
+
shift
|
|
17
|
+
|
|
18
|
+
# Start the server in the background
|
|
19
|
+
export TELEMETRY_ENABLED=false
|
|
20
|
+
reflex run --env "$env_mode" "$@" & pid=$!
|
|
21
|
+
|
|
22
|
+
# Within the context of this bash, $pid_in_bash is what we need to pass to "kill" on exit
|
|
23
|
+
# This is true on all platforms.
|
|
24
|
+
pid_in_bash=$pid
|
|
25
|
+
trap "kill -INT $pid_in_bash ||:" EXIT
|
|
26
|
+
|
|
27
|
+
echo "Started server with PID $pid"
|
|
28
|
+
|
|
29
|
+
# Assume we run from the root of the repo
|
|
30
|
+
popd
|
|
31
|
+
|
|
32
|
+
# In Windows, our Python script below needs to work with the WINPID
|
|
33
|
+
if [ -f /proc/$pid/winpid ]; then
|
|
34
|
+
pid=$(cat /proc/$pid/winpid)
|
|
35
|
+
echo "Windows detected, passing winpid $pid to port waiter"
|
|
36
|
+
fi
|
|
37
|
+
|
|
38
|
+
python scripts/wait_for_listening_port.py $check_ports --timeout=600 --server-pid "$pid"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# Check if something is running on port 3000
|
|
42
|
+
if curl --output /dev/null --silent --head --fail "http://localhost:3000"; then
|
|
43
|
+
echo "URL exists: http://localhost:3000"
|
|
44
|
+
else
|
|
45
|
+
echo "URL does not exist: https://localhost:3000"
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
mkdir -p ./tests/benchmarks/.lighthouseci
|
|
49
|
+
|
|
50
|
+
# Create a lighthouserc.js file
|
|
51
|
+
cat << EOF > lighthouserc.js
|
|
52
|
+
module.exports = {
|
|
53
|
+
ci: {
|
|
54
|
+
collect: {
|
|
55
|
+
isSinglePageApplication: true,
|
|
56
|
+
numberOfRuns: 1,
|
|
57
|
+
url: ['http://localhost:3000', "http://localhost:3000/docs/getting-started/introduction/", "http://localhost:3000/blog/2023-08-02-seed-annoucement/"]
|
|
58
|
+
},
|
|
59
|
+
upload: {
|
|
60
|
+
target: 'filesystem',
|
|
61
|
+
"outputDir": "./integration/benchmarks/.lighthouseci"
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
};
|
|
65
|
+
EOF
|
|
66
|
+
|
|
67
|
+
# Install and Run LHCI
|
|
68
|
+
npm install -g @lhci/cli
|
|
69
|
+
lhci autorun
|
|
70
|
+
|
|
71
|
+
# Check to see if the LHCI report is generated
|
|
72
|
+
if [ -d "./integration/benchmarks/.lighthouseci" ] && [ "$(ls -A ./integration/benchmarks/.lighthouseci)" ]; then
|
|
73
|
+
echo "LHCI report generated"
|
|
74
|
+
else
|
|
75
|
+
echo "LHCI report not generated"
|
|
76
|
+
exit 1 # Exits the script with a status of 1, which will cause the GitHub Action to stop
|
|
77
|
+
fi
|