reflex 0.7.2a2__py3-none-any.whl → 0.7.2.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of reflex might be problematic. Click here for more details.
- benchmarks/__init__.py +3 -0
- benchmarks/benchmark_compile_times.py +147 -0
- benchmarks/benchmark_imports.py +128 -0
- benchmarks/benchmark_lighthouse.py +75 -0
- benchmarks/benchmark_package_size.py +135 -0
- benchmarks/benchmark_web_size.py +106 -0
- benchmarks/conftest.py +20 -0
- benchmarks/lighthouse.sh +77 -0
- benchmarks/utils.py +74 -0
- reflex/reflex.py +6 -2
- reflex/style.py +3 -3
- reflex/utils/prerequisites.py +3 -45
- reflex/utils/pyi_generator.py +2 -2
- reflex/utils/redir.py +3 -12
- reflex/vars/base.py +3 -2
- {reflex-0.7.2a2.dist-info → reflex-0.7.2.dev1.dist-info}/METADATA +39 -46
- {reflex-0.7.2a2.dist-info → reflex-0.7.2.dev1.dist-info}/RECORD +20 -11
- {reflex-0.7.2a2.dist-info → reflex-0.7.2.dev1.dist-info}/WHEEL +1 -1
- reflex-0.7.2.dev1.dist-info/entry_points.txt +5 -0
- reflex-0.7.2a2.dist-info/entry_points.txt +0 -3
- {reflex-0.7.2a2.dist-info → reflex-0.7.2.dev1.dist-info/licenses}/LICENSE +0 -0
benchmarks/__init__.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Extracts the compile times from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from utils import send_data_to_posthog
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_stats_from_json(json_file: str) -> list[dict]:
|
|
14
|
+
"""Extracts the stats from the JSON data and returns them as a list of dictionaries.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
json_file: The JSON file to extract the stats data from.
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
list[dict]: The stats for each test.
|
|
21
|
+
"""
|
|
22
|
+
with Path(json_file).open() as file:
|
|
23
|
+
json_data = json.load(file)
|
|
24
|
+
|
|
25
|
+
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
+
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
+
|
|
28
|
+
# Initialize an empty list to store the stats for each test
|
|
29
|
+
test_stats = []
|
|
30
|
+
|
|
31
|
+
# Iterate over each test in the 'benchmarks' list
|
|
32
|
+
for test in data.get("benchmarks", []):
|
|
33
|
+
group = test.get("group", None)
|
|
34
|
+
stats = test.get("stats", {})
|
|
35
|
+
full_name = test.get("fullname")
|
|
36
|
+
file_name = (
|
|
37
|
+
full_name.split("/")[-1].split("::")[0].strip(".py") if full_name else None
|
|
38
|
+
)
|
|
39
|
+
test_name = test.get("name", "Unknown Test")
|
|
40
|
+
|
|
41
|
+
test_stats.append(
|
|
42
|
+
{
|
|
43
|
+
"test_name": test_name,
|
|
44
|
+
"group": group,
|
|
45
|
+
"stats": stats,
|
|
46
|
+
"full_name": full_name,
|
|
47
|
+
"file_name": file_name,
|
|
48
|
+
}
|
|
49
|
+
)
|
|
50
|
+
return test_stats
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def insert_benchmarking_data(
|
|
54
|
+
os_type_version: str,
|
|
55
|
+
python_version: str,
|
|
56
|
+
performance_data: list[dict],
|
|
57
|
+
commit_sha: str,
|
|
58
|
+
pr_title: str,
|
|
59
|
+
branch_name: str,
|
|
60
|
+
event_type: str,
|
|
61
|
+
pr_id: str,
|
|
62
|
+
):
|
|
63
|
+
"""Insert the benchmarking data into the database.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
os_type_version: The OS type and version to insert.
|
|
67
|
+
python_version: The Python version to insert.
|
|
68
|
+
performance_data: The performance data of reflex web to insert.
|
|
69
|
+
commit_sha: The commit SHA to insert.
|
|
70
|
+
pr_title: The PR title to insert.
|
|
71
|
+
branch_name: The name of the branch.
|
|
72
|
+
event_type: Type of github event(push, pull request, etc).
|
|
73
|
+
pr_id: Id of the PR.
|
|
74
|
+
"""
|
|
75
|
+
# Prepare the event data
|
|
76
|
+
properties = {
|
|
77
|
+
"os": os_type_version,
|
|
78
|
+
"python_version": python_version,
|
|
79
|
+
"distinct_id": commit_sha,
|
|
80
|
+
"pr_title": pr_title,
|
|
81
|
+
"branch_name": branch_name,
|
|
82
|
+
"event_type": event_type,
|
|
83
|
+
"performance": performance_data,
|
|
84
|
+
"pr_id": pr_id,
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
send_data_to_posthog("simple_app_benchmark", properties)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def main():
|
|
91
|
+
"""Runs the benchmarks and inserts the results."""
|
|
92
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
93
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
"--os", help="The OS type and version to insert into the database."
|
|
96
|
+
)
|
|
97
|
+
parser.add_argument(
|
|
98
|
+
"--python-version", help="The Python version to insert into the database."
|
|
99
|
+
)
|
|
100
|
+
parser.add_argument(
|
|
101
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
102
|
+
)
|
|
103
|
+
parser.add_argument(
|
|
104
|
+
"--benchmark-json",
|
|
105
|
+
help="The JSON file containing the benchmark results.",
|
|
106
|
+
)
|
|
107
|
+
parser.add_argument(
|
|
108
|
+
"--pr-title",
|
|
109
|
+
help="The PR title to insert into the database.",
|
|
110
|
+
)
|
|
111
|
+
parser.add_argument(
|
|
112
|
+
"--branch-name",
|
|
113
|
+
help="The current branch",
|
|
114
|
+
required=True,
|
|
115
|
+
)
|
|
116
|
+
parser.add_argument(
|
|
117
|
+
"--event-type",
|
|
118
|
+
help="The github event type",
|
|
119
|
+
required=True,
|
|
120
|
+
)
|
|
121
|
+
parser.add_argument(
|
|
122
|
+
"--pr-id",
|
|
123
|
+
help="ID of the PR.",
|
|
124
|
+
required=True,
|
|
125
|
+
)
|
|
126
|
+
args = parser.parse_args()
|
|
127
|
+
|
|
128
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
129
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
130
|
+
|
|
131
|
+
# Get the results of pytest benchmarks
|
|
132
|
+
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
133
|
+
# Insert the data into the database
|
|
134
|
+
insert_benchmarking_data(
|
|
135
|
+
os_type_version=args.os,
|
|
136
|
+
python_version=args.python_version,
|
|
137
|
+
performance_data=cleaned_benchmark_results,
|
|
138
|
+
commit_sha=args.commit_sha,
|
|
139
|
+
pr_title=pr_title,
|
|
140
|
+
branch_name=args.branch_name,
|
|
141
|
+
event_type=args.event_type,
|
|
142
|
+
pr_id=args.pr_id,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
if __name__ == "__main__":
|
|
147
|
+
main()
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"""Extract and upload benchmarking data to PostHog."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from utils import send_data_to_posthog
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_stats_from_json(json_file: str) -> dict:
|
|
14
|
+
"""Extracts the stats from the JSON data and returns them as dictionaries.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
json_file: The JSON file to extract the stats data from.
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
dict: The stats for each test.
|
|
21
|
+
"""
|
|
22
|
+
with Path(json_file).open() as file:
|
|
23
|
+
json_data = json.load(file)
|
|
24
|
+
|
|
25
|
+
# Load the JSON data if it is a string, otherwise assume it's already a dictionary
|
|
26
|
+
data = json.loads(json_data) if isinstance(json_data, str) else json_data
|
|
27
|
+
|
|
28
|
+
result = data.get("results", [{}])[0]
|
|
29
|
+
return {
|
|
30
|
+
k: v
|
|
31
|
+
for k, v in result.items()
|
|
32
|
+
if k in ("mean", "stddev", "median", "min", "max")
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def insert_benchmarking_data(
|
|
37
|
+
os_type_version: str,
|
|
38
|
+
python_version: str,
|
|
39
|
+
performance_data: dict,
|
|
40
|
+
commit_sha: str,
|
|
41
|
+
pr_title: str,
|
|
42
|
+
branch_name: str,
|
|
43
|
+
pr_id: str,
|
|
44
|
+
app_name: str,
|
|
45
|
+
):
|
|
46
|
+
"""Insert the benchmarking data into the database.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
os_type_version: The OS type and version to insert.
|
|
50
|
+
python_version: The Python version to insert.
|
|
51
|
+
performance_data: The imports performance data to insert.
|
|
52
|
+
commit_sha: The commit SHA to insert.
|
|
53
|
+
pr_title: The PR title to insert.
|
|
54
|
+
branch_name: The name of the branch.
|
|
55
|
+
pr_id: Id of the PR.
|
|
56
|
+
app_name: The name of the app being measured.
|
|
57
|
+
"""
|
|
58
|
+
properties = {
|
|
59
|
+
"os": os_type_version,
|
|
60
|
+
"python_version": python_version,
|
|
61
|
+
"distinct_id": commit_sha,
|
|
62
|
+
"pr_title": pr_title,
|
|
63
|
+
"branch_name": branch_name,
|
|
64
|
+
"pr_id": pr_id,
|
|
65
|
+
"performance": performance_data,
|
|
66
|
+
"app_name": app_name,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
send_data_to_posthog("import_benchmark", properties)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def main():
|
|
73
|
+
"""Runs the benchmarks and inserts the results."""
|
|
74
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
75
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
76
|
+
parser.add_argument(
|
|
77
|
+
"--os", help="The OS type and version to insert into the database."
|
|
78
|
+
)
|
|
79
|
+
parser.add_argument(
|
|
80
|
+
"--python-version", help="The Python version to insert into the database."
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
84
|
+
)
|
|
85
|
+
parser.add_argument(
|
|
86
|
+
"--benchmark-json",
|
|
87
|
+
help="The JSON file containing the benchmark results.",
|
|
88
|
+
)
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--pr-title",
|
|
91
|
+
help="The PR title to insert into the database.",
|
|
92
|
+
)
|
|
93
|
+
parser.add_argument(
|
|
94
|
+
"--branch-name",
|
|
95
|
+
help="The current branch",
|
|
96
|
+
required=True,
|
|
97
|
+
)
|
|
98
|
+
parser.add_argument(
|
|
99
|
+
"--app-name",
|
|
100
|
+
help="The name of the app measured.",
|
|
101
|
+
required=True,
|
|
102
|
+
)
|
|
103
|
+
parser.add_argument(
|
|
104
|
+
"--pr-id",
|
|
105
|
+
help="ID of the PR.",
|
|
106
|
+
required=True,
|
|
107
|
+
)
|
|
108
|
+
args = parser.parse_args()
|
|
109
|
+
|
|
110
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
111
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
112
|
+
|
|
113
|
+
cleaned_benchmark_results = extract_stats_from_json(args.benchmark_json)
|
|
114
|
+
# Insert the data into the database
|
|
115
|
+
insert_benchmarking_data(
|
|
116
|
+
os_type_version=args.os,
|
|
117
|
+
python_version=args.python_version,
|
|
118
|
+
performance_data=cleaned_benchmark_results,
|
|
119
|
+
commit_sha=args.commit_sha,
|
|
120
|
+
pr_title=pr_title,
|
|
121
|
+
branch_name=args.branch_name,
|
|
122
|
+
app_name=args.app_name,
|
|
123
|
+
pr_id=args.pr_id,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
if __name__ == "__main__":
|
|
128
|
+
main()
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""Extracts the Lighthouse scores from the JSON files in the specified directory and inserts them into the database."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from utils import send_data_to_posthog
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def insert_benchmarking_data(
|
|
13
|
+
lighthouse_data: dict,
|
|
14
|
+
commit_sha: str,
|
|
15
|
+
):
|
|
16
|
+
"""Insert the benchmarking data into the database.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
lighthouse_data: The Lighthouse data to insert.
|
|
20
|
+
commit_sha: The commit SHA to insert.
|
|
21
|
+
"""
|
|
22
|
+
properties = {
|
|
23
|
+
"distinct_id": commit_sha,
|
|
24
|
+
"lighthouse_data": lighthouse_data,
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
# Send the data to PostHog
|
|
28
|
+
send_data_to_posthog("lighthouse_benchmark", properties)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_lighthouse_scores(directory_path: str | Path) -> dict:
|
|
32
|
+
"""Extracts the Lighthouse scores from the JSON files in the specified directory.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
directory_path (str): The path to the directory containing the JSON files.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
dict: The Lighthouse scores.
|
|
39
|
+
"""
|
|
40
|
+
scores = {}
|
|
41
|
+
directory_path = Path(directory_path)
|
|
42
|
+
try:
|
|
43
|
+
for filename in directory_path.iterdir():
|
|
44
|
+
if filename.suffix == ".json" and filename.stem != "manifest":
|
|
45
|
+
data = json.loads(filename.read_text())
|
|
46
|
+
# Extract scores and add them to the dictionary with the filename as key
|
|
47
|
+
scores[data["finalUrl"].replace("http://localhost:3000/", "/")] = {
|
|
48
|
+
"performance_score": data["categories"]["performance"]["score"],
|
|
49
|
+
"accessibility_score": data["categories"]["accessibility"]["score"],
|
|
50
|
+
"best_practices_score": data["categories"]["best-practices"][
|
|
51
|
+
"score"
|
|
52
|
+
],
|
|
53
|
+
"seo_score": data["categories"]["seo"]["score"],
|
|
54
|
+
}
|
|
55
|
+
except Exception as e:
|
|
56
|
+
return {"error": e}
|
|
57
|
+
|
|
58
|
+
return scores
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def main():
|
|
62
|
+
"""Runs the benchmarks and inserts the results into the database."""
|
|
63
|
+
# Get the commit SHA and JSON directory from the command line arguments
|
|
64
|
+
commit_sha = sys.argv[1]
|
|
65
|
+
json_dir = sys.argv[2]
|
|
66
|
+
|
|
67
|
+
# Get the Lighthouse scores
|
|
68
|
+
lighthouse_scores = get_lighthouse_scores(json_dir)
|
|
69
|
+
|
|
70
|
+
# Insert the data into the database
|
|
71
|
+
insert_benchmarking_data(lighthouse_scores, commit_sha)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
if __name__ == "__main__":
|
|
75
|
+
main()
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from utils import get_directory_size, get_python_version, send_data_to_posthog
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_package_size(venv_path: Path, os_name):
|
|
11
|
+
"""Get the size of a specified package.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
venv_path: The path to the venv.
|
|
15
|
+
os_name: Name of os.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
The total size of the package in bytes.
|
|
19
|
+
|
|
20
|
+
Raises:
|
|
21
|
+
ValueError: when venv does not exist or python version is None.
|
|
22
|
+
"""
|
|
23
|
+
python_version = get_python_version(venv_path, os_name)
|
|
24
|
+
print("Python version:", python_version)
|
|
25
|
+
if python_version is None:
|
|
26
|
+
raise ValueError("Error: Failed to determine Python version.")
|
|
27
|
+
|
|
28
|
+
is_windows = "windows" in os_name
|
|
29
|
+
|
|
30
|
+
package_dir: Path = (
|
|
31
|
+
venv_path / "lib" / f"python{python_version}" / "site-packages"
|
|
32
|
+
if not is_windows
|
|
33
|
+
else venv_path / "Lib" / "site-packages"
|
|
34
|
+
)
|
|
35
|
+
if not package_dir.exists():
|
|
36
|
+
raise ValueError(
|
|
37
|
+
"Error: Virtual environment does not exist or is not activated."
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
total_size = get_directory_size(package_dir)
|
|
41
|
+
return total_size
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def insert_benchmarking_data(
|
|
45
|
+
os_type_version: str,
|
|
46
|
+
python_version: str,
|
|
47
|
+
commit_sha: str,
|
|
48
|
+
pr_title: str,
|
|
49
|
+
branch_name: str,
|
|
50
|
+
pr_id: str,
|
|
51
|
+
path: str,
|
|
52
|
+
):
|
|
53
|
+
"""Insert the benchmarking data into PostHog.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
os_type_version: The OS type and version to insert.
|
|
57
|
+
python_version: The Python version to insert.
|
|
58
|
+
commit_sha: The commit SHA to insert.
|
|
59
|
+
pr_title: The PR title to insert.
|
|
60
|
+
branch_name: The name of the branch.
|
|
61
|
+
pr_id: The id of the PR.
|
|
62
|
+
path: The path to the dir or file to check size.
|
|
63
|
+
"""
|
|
64
|
+
if "./dist" in path:
|
|
65
|
+
size = get_directory_size(Path(path))
|
|
66
|
+
else:
|
|
67
|
+
size = get_package_size(Path(path), os_type_version)
|
|
68
|
+
|
|
69
|
+
# Prepare the event data
|
|
70
|
+
properties = {
|
|
71
|
+
"path": path,
|
|
72
|
+
"os": os_type_version,
|
|
73
|
+
"python_version": python_version,
|
|
74
|
+
"distinct_id": commit_sha,
|
|
75
|
+
"pr_title": pr_title,
|
|
76
|
+
"branch_name": branch_name,
|
|
77
|
+
"pr_id": pr_id,
|
|
78
|
+
"size_mb": round(
|
|
79
|
+
size / (1024 * 1024), 3
|
|
80
|
+
), # save size in MB and round to 3 places
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
send_data_to_posthog("package_size", properties)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def main():
|
|
87
|
+
"""Runs the benchmarks and inserts the results."""
|
|
88
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--os", help="The OS type and version to insert into the database."
|
|
91
|
+
)
|
|
92
|
+
parser.add_argument(
|
|
93
|
+
"--python-version", help="The Python version to insert into the database."
|
|
94
|
+
)
|
|
95
|
+
parser.add_argument(
|
|
96
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
97
|
+
)
|
|
98
|
+
parser.add_argument(
|
|
99
|
+
"--pr-title",
|
|
100
|
+
help="The PR title to insert into the database.",
|
|
101
|
+
)
|
|
102
|
+
parser.add_argument(
|
|
103
|
+
"--branch-name",
|
|
104
|
+
help="The current branch",
|
|
105
|
+
required=True,
|
|
106
|
+
)
|
|
107
|
+
parser.add_argument(
|
|
108
|
+
"--pr-id",
|
|
109
|
+
help="The pr id",
|
|
110
|
+
required=True,
|
|
111
|
+
)
|
|
112
|
+
parser.add_argument(
|
|
113
|
+
"--path",
|
|
114
|
+
help="The path to the vnenv.",
|
|
115
|
+
required=True,
|
|
116
|
+
)
|
|
117
|
+
args = parser.parse_args()
|
|
118
|
+
|
|
119
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
120
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
121
|
+
|
|
122
|
+
# Insert the data into the database
|
|
123
|
+
insert_benchmarking_data(
|
|
124
|
+
os_type_version=args.os,
|
|
125
|
+
python_version=args.python_version,
|
|
126
|
+
commit_sha=args.commit_sha,
|
|
127
|
+
pr_title=pr_title,
|
|
128
|
+
branch_name=args.branch_name,
|
|
129
|
+
pr_id=args.pr_id,
|
|
130
|
+
path=args.path,
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
if __name__ == "__main__":
|
|
135
|
+
main()
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Checks the size of a specific directory and uploads result to Posthog."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from utils import get_directory_size, send_data_to_posthog
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def insert_benchmarking_data(
|
|
11
|
+
os_type_version: str,
|
|
12
|
+
python_version: str,
|
|
13
|
+
app_name: str,
|
|
14
|
+
commit_sha: str,
|
|
15
|
+
pr_title: str,
|
|
16
|
+
branch_name: str,
|
|
17
|
+
pr_id: str,
|
|
18
|
+
path: str,
|
|
19
|
+
):
|
|
20
|
+
"""Insert the benchmarking data into PostHog.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
app_name: The name of the app being measured.
|
|
24
|
+
os_type_version: The OS type and version to insert.
|
|
25
|
+
python_version: The Python version to insert.
|
|
26
|
+
commit_sha: The commit SHA to insert.
|
|
27
|
+
pr_title: The PR title to insert.
|
|
28
|
+
branch_name: The name of the branch.
|
|
29
|
+
pr_id: The id of the PR.
|
|
30
|
+
path: The path to the dir or file to check size.
|
|
31
|
+
"""
|
|
32
|
+
size = get_directory_size(Path(path))
|
|
33
|
+
|
|
34
|
+
# Prepare the event data
|
|
35
|
+
properties = {
|
|
36
|
+
"app_name": app_name,
|
|
37
|
+
"os": os_type_version,
|
|
38
|
+
"python_version": python_version,
|
|
39
|
+
"distinct_id": commit_sha,
|
|
40
|
+
"pr_title": pr_title,
|
|
41
|
+
"branch_name": branch_name,
|
|
42
|
+
"pr_id": pr_id,
|
|
43
|
+
"size_mb": round(
|
|
44
|
+
size / (1024 * 1024), 3
|
|
45
|
+
), # save size in MB and round to 3 places
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
send_data_to_posthog("web-size", properties)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def main():
|
|
52
|
+
"""Runs the benchmarks and inserts the results."""
|
|
53
|
+
parser = argparse.ArgumentParser(description="Run benchmarks and process results.")
|
|
54
|
+
parser.add_argument(
|
|
55
|
+
"--os", help="The OS type and version to insert into the database."
|
|
56
|
+
)
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
"--python-version", help="The Python version to insert into the database."
|
|
59
|
+
)
|
|
60
|
+
parser.add_argument(
|
|
61
|
+
"--commit-sha", help="The commit SHA to insert into the database."
|
|
62
|
+
)
|
|
63
|
+
parser.add_argument(
|
|
64
|
+
"--pr-title",
|
|
65
|
+
help="The PR title to insert into the database.",
|
|
66
|
+
)
|
|
67
|
+
parser.add_argument(
|
|
68
|
+
"--branch-name",
|
|
69
|
+
help="The current branch",
|
|
70
|
+
required=True,
|
|
71
|
+
)
|
|
72
|
+
parser.add_argument(
|
|
73
|
+
"--app-name",
|
|
74
|
+
help="The name of the app measured.",
|
|
75
|
+
required=True,
|
|
76
|
+
)
|
|
77
|
+
parser.add_argument(
|
|
78
|
+
"--pr-id",
|
|
79
|
+
help="The pr id",
|
|
80
|
+
required=True,
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--path",
|
|
84
|
+
help="The current path to app to check.",
|
|
85
|
+
required=True,
|
|
86
|
+
)
|
|
87
|
+
args = parser.parse_args()
|
|
88
|
+
|
|
89
|
+
# Get the PR title from env or the args. For the PR merge or push event, there is no PR title, leaving it empty.
|
|
90
|
+
pr_title = args.pr_title or os.getenv("PR_TITLE", "")
|
|
91
|
+
|
|
92
|
+
# Insert the data into the database
|
|
93
|
+
insert_benchmarking_data(
|
|
94
|
+
app_name=args.app_name,
|
|
95
|
+
os_type_version=args.os,
|
|
96
|
+
python_version=args.python_version,
|
|
97
|
+
commit_sha=args.commit_sha,
|
|
98
|
+
pr_title=pr_title,
|
|
99
|
+
branch_name=args.branch_name,
|
|
100
|
+
pr_id=args.pr_id,
|
|
101
|
+
path=args.path,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
if __name__ == "__main__":
|
|
106
|
+
main()
|
benchmarks/conftest.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""Shared conftest for all benchmark tests."""
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
|
|
5
|
+
from reflex.testing import AppHarness, AppHarnessProd
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.fixture(
|
|
9
|
+
scope="session", params=[AppHarness, AppHarnessProd], ids=["dev", "prod"]
|
|
10
|
+
)
|
|
11
|
+
def app_harness_env(request):
|
|
12
|
+
"""Parametrize the AppHarness class to use for the test, either dev or prod.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
request: The pytest fixture request object.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
The AppHarness class to use for the test.
|
|
19
|
+
"""
|
|
20
|
+
return request.param
|
benchmarks/lighthouse.sh
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Change directory to the first argument passed to the script
|
|
4
|
+
project_dir=$1
|
|
5
|
+
shift
|
|
6
|
+
pushd "$project_dir" || exit 1
|
|
7
|
+
echo "Changed directory to $project_dir"
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# So we get stdout / stderr from Python ASAP. Without this, delays can be very long (e.g. on Windows, Github Actions)
|
|
11
|
+
export PYTHONUNBUFFERED=1
|
|
12
|
+
|
|
13
|
+
env_mode=$1
|
|
14
|
+
shift
|
|
15
|
+
check_ports=${1:-3000 8000}
|
|
16
|
+
shift
|
|
17
|
+
|
|
18
|
+
# Start the server in the background
|
|
19
|
+
export TELEMETRY_ENABLED=false
|
|
20
|
+
reflex run --env "$env_mode" "$@" & pid=$!
|
|
21
|
+
|
|
22
|
+
# Within the context of this bash, $pid_in_bash is what we need to pass to "kill" on exit
|
|
23
|
+
# This is true on all platforms.
|
|
24
|
+
pid_in_bash=$pid
|
|
25
|
+
trap "kill -INT $pid_in_bash ||:" EXIT
|
|
26
|
+
|
|
27
|
+
echo "Started server with PID $pid"
|
|
28
|
+
|
|
29
|
+
# Assume we run from the root of the repo
|
|
30
|
+
popd
|
|
31
|
+
|
|
32
|
+
# In Windows, our Python script below needs to work with the WINPID
|
|
33
|
+
if [ -f /proc/$pid/winpid ]; then
|
|
34
|
+
pid=$(cat /proc/$pid/winpid)
|
|
35
|
+
echo "Windows detected, passing winpid $pid to port waiter"
|
|
36
|
+
fi
|
|
37
|
+
|
|
38
|
+
python scripts/wait_for_listening_port.py $check_ports --timeout=600 --server-pid "$pid"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# Check if something is running on port 3000
|
|
42
|
+
if curl --output /dev/null --silent --head --fail "http://localhost:3000"; then
|
|
43
|
+
echo "URL exists: http://localhost:3000"
|
|
44
|
+
else
|
|
45
|
+
echo "URL does not exist: https://localhost:3000"
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
mkdir -p ./tests/benchmarks/.lighthouseci
|
|
49
|
+
|
|
50
|
+
# Create a lighthouserc.js file
|
|
51
|
+
cat << EOF > lighthouserc.js
|
|
52
|
+
module.exports = {
|
|
53
|
+
ci: {
|
|
54
|
+
collect: {
|
|
55
|
+
isSinglePageApplication: true,
|
|
56
|
+
numberOfRuns: 1,
|
|
57
|
+
url: ['http://localhost:3000', "http://localhost:3000/docs/getting-started/introduction/", "http://localhost:3000/blog/2023-08-02-seed-annoucement/"]
|
|
58
|
+
},
|
|
59
|
+
upload: {
|
|
60
|
+
target: 'filesystem',
|
|
61
|
+
"outputDir": "./integration/benchmarks/.lighthouseci"
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
};
|
|
65
|
+
EOF
|
|
66
|
+
|
|
67
|
+
# Install and Run LHCI
|
|
68
|
+
npm install -g @lhci/cli
|
|
69
|
+
lhci autorun
|
|
70
|
+
|
|
71
|
+
# Check to see if the LHCI report is generated
|
|
72
|
+
if [ -d "./integration/benchmarks/.lighthouseci" ] && [ "$(ls -A ./integration/benchmarks/.lighthouseci)" ]; then
|
|
73
|
+
echo "LHCI report generated"
|
|
74
|
+
else
|
|
75
|
+
echo "LHCI report not generated"
|
|
76
|
+
exit 1 # Exits the script with a status of 1, which will cause the GitHub Action to stop
|
|
77
|
+
fi
|
benchmarks/utils.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"""Utility functions for the benchmarks."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import subprocess
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
from httpx import HTTPError
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_python_version(venv_path: Path, os_name):
|
|
12
|
+
"""Get the python version of python in a virtual env.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
venv_path: Path to virtual environment.
|
|
16
|
+
os_name: Name of os.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
The python version.
|
|
20
|
+
"""
|
|
21
|
+
python_executable = (
|
|
22
|
+
venv_path / "bin" / "python"
|
|
23
|
+
if "windows" not in os_name
|
|
24
|
+
else venv_path / "Scripts" / "python.exe"
|
|
25
|
+
)
|
|
26
|
+
try:
|
|
27
|
+
output = subprocess.check_output(
|
|
28
|
+
[str(python_executable), "--version"], stderr=subprocess.STDOUT
|
|
29
|
+
)
|
|
30
|
+
python_version = output.decode("utf-8").strip().split()[1]
|
|
31
|
+
return ".".join(python_version.split(".")[:-1])
|
|
32
|
+
except subprocess.CalledProcessError:
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_directory_size(directory: Path):
|
|
37
|
+
"""Get the size of a directory in bytes.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
directory: The directory to check.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
The size of the dir in bytes.
|
|
44
|
+
"""
|
|
45
|
+
total_size = 0
|
|
46
|
+
for dirpath, _, filenames in os.walk(directory):
|
|
47
|
+
for f in filenames:
|
|
48
|
+
fp = Path(dirpath) / f
|
|
49
|
+
total_size += fp.stat().st_size
|
|
50
|
+
return total_size
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def send_data_to_posthog(event, properties):
|
|
54
|
+
"""Send data to PostHog.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
event: The event to send.
|
|
58
|
+
properties: The properties to send.
|
|
59
|
+
|
|
60
|
+
Raises:
|
|
61
|
+
HTTPError: When there is an error sending data to PostHog.
|
|
62
|
+
"""
|
|
63
|
+
event_data = {
|
|
64
|
+
"api_key": "phc_JoMo0fOyi0GQAooY3UyO9k0hebGkMyFJrrCw1Gt5SGb",
|
|
65
|
+
"event": event,
|
|
66
|
+
"properties": properties,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
with httpx.Client() as client:
|
|
70
|
+
response = client.post("https://app.posthog.com/capture/", json=event_data)
|
|
71
|
+
if response.status_code != 200:
|
|
72
|
+
raise HTTPError(
|
|
73
|
+
f"Error sending data to PostHog: {response.status_code} - {response.text}"
|
|
74
|
+
)
|
reflex/reflex.py
CHANGED
|
@@ -13,7 +13,7 @@ from reflex import constants
|
|
|
13
13
|
from reflex.config import environment, get_config
|
|
14
14
|
from reflex.custom_components.custom_components import custom_components_cli
|
|
15
15
|
from reflex.state import reset_disk_state_manager
|
|
16
|
-
from reflex.utils import console, telemetry
|
|
16
|
+
from reflex.utils import console, redir, telemetry
|
|
17
17
|
|
|
18
18
|
# Disable typer+rich integration for help panels
|
|
19
19
|
typer.core.rich = None # pyright: ignore [reportPrivateImportUsage]
|
|
@@ -70,6 +70,10 @@ def _init(
|
|
|
70
70
|
# Show system info
|
|
71
71
|
exec.output_system_info()
|
|
72
72
|
|
|
73
|
+
if ai:
|
|
74
|
+
redir.reflex_build_redirect()
|
|
75
|
+
return
|
|
76
|
+
|
|
73
77
|
# Validate the app name.
|
|
74
78
|
app_name = prerequisites.validate_app_name(name)
|
|
75
79
|
console.rule(f"[bold]Initializing {app_name}")
|
|
@@ -83,7 +87,7 @@ def _init(
|
|
|
83
87
|
prerequisites.initialize_frontend_dependencies()
|
|
84
88
|
|
|
85
89
|
# Initialize the app.
|
|
86
|
-
template = prerequisites.initialize_app(app_name, template
|
|
90
|
+
template = prerequisites.initialize_app(app_name, template)
|
|
87
91
|
|
|
88
92
|
# Initialize the .gitignore.
|
|
89
93
|
prerequisites.initialize_gitignore()
|
reflex/style.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from typing import Any, Literal, Type
|
|
5
|
+
from typing import Any, Literal, Mapping, Type
|
|
6
6
|
|
|
7
7
|
from reflex import constants
|
|
8
8
|
from reflex.components.core.breakpoints import Breakpoints, breakpoints_values
|
|
@@ -10,7 +10,7 @@ from reflex.event import EventChain, EventHandler, EventSpec, run_script
|
|
|
10
10
|
from reflex.utils import format
|
|
11
11
|
from reflex.utils.exceptions import ReflexError
|
|
12
12
|
from reflex.utils.imports import ImportVar
|
|
13
|
-
from reflex.utils.types import
|
|
13
|
+
from reflex.utils.types import typehint_issubclass
|
|
14
14
|
from reflex.vars import VarData
|
|
15
15
|
from reflex.vars.base import LiteralVar, Var
|
|
16
16
|
from reflex.vars.function import FunctionVar
|
|
@@ -189,7 +189,7 @@ def convert(
|
|
|
189
189
|
or (isinstance(value, list) and all(not isinstance(v, dict) for v in value))
|
|
190
190
|
or (
|
|
191
191
|
isinstance(value, ObjectVar)
|
|
192
|
-
and not
|
|
192
|
+
and not typehint_issubclass(value._var_type, Mapping)
|
|
193
193
|
)
|
|
194
194
|
else (key,)
|
|
195
195
|
)
|
reflex/utils/prerequisites.py
CHANGED
|
@@ -37,7 +37,7 @@ from redis.exceptions import RedisError
|
|
|
37
37
|
from reflex import constants, model
|
|
38
38
|
from reflex.compiler import templates
|
|
39
39
|
from reflex.config import Config, environment, get_config
|
|
40
|
-
from reflex.utils import console, net, path_ops, processes
|
|
40
|
+
from reflex.utils import console, net, path_ops, processes
|
|
41
41
|
from reflex.utils.exceptions import (
|
|
42
42
|
GeneratedCodeHasNoFunctionDefsError,
|
|
43
43
|
SystemPackageMissingError,
|
|
@@ -1695,31 +1695,6 @@ def validate_and_create_app_using_remote_template(
|
|
|
1695
1695
|
)
|
|
1696
1696
|
|
|
1697
1697
|
|
|
1698
|
-
def generate_template_using_ai(template: str | None = None) -> str:
|
|
1699
|
-
"""Generate a template using AI(Flexgen).
|
|
1700
|
-
|
|
1701
|
-
Args:
|
|
1702
|
-
template: The name of the template.
|
|
1703
|
-
|
|
1704
|
-
Returns:
|
|
1705
|
-
The generation hash.
|
|
1706
|
-
|
|
1707
|
-
Raises:
|
|
1708
|
-
Exit: If the template and ai flags are used.
|
|
1709
|
-
"""
|
|
1710
|
-
if template is None:
|
|
1711
|
-
# If AI is requested and no template specified, redirect the user to reflex.build.
|
|
1712
|
-
return redir.reflex_build_redirect()
|
|
1713
|
-
elif is_generation_hash(template):
|
|
1714
|
-
# Otherwise treat the template as a generation hash.
|
|
1715
|
-
return template
|
|
1716
|
-
else:
|
|
1717
|
-
console.error(
|
|
1718
|
-
"Cannot use `--template` option with `--ai` option. Please remove `--template` option."
|
|
1719
|
-
)
|
|
1720
|
-
raise typer.Exit(2)
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
1698
|
def fetch_remote_templates(
|
|
1724
1699
|
template: str,
|
|
1725
1700
|
) -> tuple[str, dict[str, Template]]:
|
|
@@ -1744,15 +1719,12 @@ def fetch_remote_templates(
|
|
|
1744
1719
|
return template, available_templates
|
|
1745
1720
|
|
|
1746
1721
|
|
|
1747
|
-
def initialize_app(
|
|
1748
|
-
app_name: str, template: str | None = None, ai: bool = False
|
|
1749
|
-
) -> str | None:
|
|
1722
|
+
def initialize_app(app_name: str, template: str | None = None) -> str | None:
|
|
1750
1723
|
"""Initialize the app either from a remote template or a blank app. If the config file exists, it is considered as reinit.
|
|
1751
1724
|
|
|
1752
1725
|
Args:
|
|
1753
1726
|
app_name: The name of the app.
|
|
1754
1727
|
template: The name of the template to use.
|
|
1755
|
-
ai: Whether to use AI to generate the template.
|
|
1756
1728
|
|
|
1757
1729
|
Returns:
|
|
1758
1730
|
The name of the template.
|
|
@@ -1768,11 +1740,6 @@ def initialize_app(
|
|
|
1768
1740
|
telemetry.send("reinit")
|
|
1769
1741
|
return
|
|
1770
1742
|
|
|
1771
|
-
generation_hash = None
|
|
1772
|
-
if ai:
|
|
1773
|
-
generation_hash = generate_template_using_ai(template)
|
|
1774
|
-
template = constants.Templates.DEFAULT
|
|
1775
|
-
|
|
1776
1743
|
templates: dict[str, Template] = {}
|
|
1777
1744
|
|
|
1778
1745
|
# Don't fetch app templates if the user directly asked for DEFAULT.
|
|
@@ -1781,11 +1748,7 @@ def initialize_app(
|
|
|
1781
1748
|
|
|
1782
1749
|
if template is None:
|
|
1783
1750
|
template = prompt_for_template_options(get_init_cli_prompt_options())
|
|
1784
|
-
if template == constants.Templates.
|
|
1785
|
-
generation_hash = generate_template_using_ai()
|
|
1786
|
-
# change to the default to allow creation of default app
|
|
1787
|
-
template = constants.Templates.DEFAULT
|
|
1788
|
-
elif template == constants.Templates.CHOOSE_TEMPLATES:
|
|
1751
|
+
if template == constants.Templates.CHOOSE_TEMPLATES:
|
|
1789
1752
|
console.print(
|
|
1790
1753
|
f"Go to the templates page ({constants.Templates.REFLEX_TEMPLATES_URL}) and copy the command to init with a template."
|
|
1791
1754
|
)
|
|
@@ -1800,11 +1763,6 @@ def initialize_app(
|
|
|
1800
1763
|
app_name=app_name, template=template, templates=templates
|
|
1801
1764
|
)
|
|
1802
1765
|
|
|
1803
|
-
# If a reflex.build generation hash is available, download the code and apply it to the main module.
|
|
1804
|
-
if generation_hash:
|
|
1805
|
-
initialize_main_module_index_from_generation(
|
|
1806
|
-
app_name, generation_hash=generation_hash
|
|
1807
|
-
)
|
|
1808
1766
|
telemetry.send("init", template=template)
|
|
1809
1767
|
|
|
1810
1768
|
return template
|
reflex/utils/pyi_generator.py
CHANGED
|
@@ -348,7 +348,7 @@ def _extract_class_props_as_ast_nodes(
|
|
|
348
348
|
all_props = []
|
|
349
349
|
kwargs = []
|
|
350
350
|
for target_class in clzs:
|
|
351
|
-
event_triggers = target_class().get_event_triggers()
|
|
351
|
+
event_triggers = target_class._create([]).get_event_triggers()
|
|
352
352
|
# Import from the target class to ensure type hints are resolvable.
|
|
353
353
|
exec(f"from {target_class.__module__} import *", type_hint_globals)
|
|
354
354
|
for name, value in target_class.__annotations__.items():
|
|
@@ -575,7 +575,7 @@ def _generate_component_create_functiondef(
|
|
|
575
575
|
return ast.Name(id=f"{' | '.join(map(ast.unparse, all_count_args_type))}")
|
|
576
576
|
return ast.Name(id="EventType[Any]")
|
|
577
577
|
|
|
578
|
-
event_triggers = clz().get_event_triggers()
|
|
578
|
+
event_triggers = clz._create([]).get_event_triggers()
|
|
579
579
|
|
|
580
580
|
# event handler kwargs
|
|
581
581
|
kwargs.extend(
|
reflex/utils/redir.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"""Utilities to handle redirection to browser UI."""
|
|
2
2
|
|
|
3
3
|
import time
|
|
4
|
-
import uuid
|
|
5
4
|
import webbrowser
|
|
6
5
|
|
|
7
6
|
import httpx
|
|
@@ -48,14 +47,6 @@ def open_browser_and_wait(
|
|
|
48
47
|
return response
|
|
49
48
|
|
|
50
49
|
|
|
51
|
-
def reflex_build_redirect() ->
|
|
52
|
-
"""Open the browser window to reflex.build
|
|
53
|
-
|
|
54
|
-
Returns:
|
|
55
|
-
The selected generation hash.
|
|
56
|
-
"""
|
|
57
|
-
token = str(uuid.uuid4())
|
|
58
|
-
target_url = constants.Templates.REFLEX_BUILD_URL.format(reflex_init_token=token)
|
|
59
|
-
poll_url = constants.Templates.REFLEX_BUILD_POLL_URL.format(reflex_init_token=token)
|
|
60
|
-
response = open_browser_and_wait(target_url, poll_url)
|
|
61
|
-
return response.json()["generation_hash"]
|
|
50
|
+
def reflex_build_redirect() -> None:
|
|
51
|
+
"""Open the browser window to reflex.build."""
|
|
52
|
+
open_browser(constants.Templates.REFLEX_BUILD_FRONTEND)
|
reflex/vars/base.py
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import contextlib
|
|
6
|
+
import copy
|
|
6
7
|
import dataclasses
|
|
7
8
|
import datetime
|
|
8
9
|
import functools
|
|
@@ -2146,7 +2147,7 @@ class ComputedVar(Var[RETURN_TYPE]):
|
|
|
2146
2147
|
"fget": kwargs.pop("fget", self._fget),
|
|
2147
2148
|
"initial_value": kwargs.pop("initial_value", self._initial_value),
|
|
2148
2149
|
"cache": kwargs.pop("cache", self._cache),
|
|
2149
|
-
"deps": kwargs.pop("deps", self._static_deps),
|
|
2150
|
+
"deps": kwargs.pop("deps", copy.copy(self._static_deps)),
|
|
2150
2151
|
"auto_deps": kwargs.pop("auto_deps", self._auto_deps),
|
|
2151
2152
|
"interval": kwargs.pop("interval", self._update_interval),
|
|
2152
2153
|
"backend": kwargs.pop("backend", self._backend),
|
|
@@ -2318,7 +2319,7 @@ class ComputedVar(Var[RETURN_TYPE]):
|
|
|
2318
2319
|
if not _isinstance(value, self._var_type, nested=1, treat_var_as_type=False):
|
|
2319
2320
|
console.error(
|
|
2320
2321
|
f"Computed var '{type(instance).__name__}.{self._js_expr}' must return"
|
|
2321
|
-
f" type '{self._var_type}', got '{type(value)}
|
|
2322
|
+
f" a value of type '{self._var_type}', got '{value}' of type {type(value)}."
|
|
2322
2323
|
)
|
|
2323
2324
|
|
|
2324
2325
|
def _deps(
|
|
@@ -1,51 +1,45 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: reflex
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.2.dev1
|
|
4
4
|
Summary: Web apps in pure Python.
|
|
5
|
-
License: Apache-2.0
|
|
6
5
|
Keywords: web,framework
|
|
7
|
-
Author:
|
|
8
|
-
Author-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
Requires-Dist:
|
|
18
|
-
Requires-Dist:
|
|
19
|
-
Requires-Dist:
|
|
20
|
-
Requires-Dist:
|
|
21
|
-
Requires-Dist:
|
|
22
|
-
Requires-Dist:
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist:
|
|
25
|
-
Requires-Dist:
|
|
26
|
-
Requires-Dist:
|
|
27
|
-
Requires-Dist:
|
|
28
|
-
Requires-Dist:
|
|
29
|
-
Requires-Dist:
|
|
30
|
-
Requires-Dist:
|
|
31
|
-
Requires-Dist: python-
|
|
32
|
-
Requires-Dist:
|
|
33
|
-
Requires-Dist:
|
|
34
|
-
Requires-Dist: reflex-hosting-cli
|
|
35
|
-
Requires-Dist:
|
|
36
|
-
Requires-Dist:
|
|
37
|
-
Requires-Dist:
|
|
38
|
-
Requires-Dist:
|
|
39
|
-
Requires-Dist:
|
|
40
|
-
Requires-Dist: twine
|
|
41
|
-
Requires-Dist:
|
|
42
|
-
Requires-Dist:
|
|
43
|
-
Requires-Dist:
|
|
44
|
-
Requires-Dist: wheel (>=0.42.0,<1.0)
|
|
45
|
-
Requires-Dist: wrapt (>=1.17.0,<2.0)
|
|
46
|
-
Project-URL: Documentation, https://reflex.dev/docs/getting-started/introduction
|
|
47
|
-
Project-URL: Homepage, https://reflex.dev
|
|
48
|
-
Project-URL: Repository, https://github.com/reflex-dev/reflex
|
|
6
|
+
Author: Elijah Ahianyo
|
|
7
|
+
Author-Email: Nikhil Rao <nikhil@reflex.dev>, Alek Petuskey <alek@reflex.dev>, Masen Furer <masen@reflex.dev>, =?utf-8?q?Thomas_Brand=C3=A9ho?= <thomas@reflex.dev>, Khaleel Al-Adhami <khaleel@reflex.dev>
|
|
8
|
+
Maintainer-Email: Masen Furer <masen@reflex.dev>, =?utf-8?q?Thomas_Brand=C3=A9ho?= <thomas@reflex.dev>, Khaleel Al-Adhami <khaleel@reflex.dev>
|
|
9
|
+
License-Expression: Apache-2.0
|
|
10
|
+
Project-URL: homepage, https://reflex.dev
|
|
11
|
+
Project-URL: repository, https://github.com/reflex-dev/reflex
|
|
12
|
+
Project-URL: documentation, https://reflex.dev/docs/getting-started/introduction
|
|
13
|
+
Requires-Python: <4.0,>=3.10
|
|
14
|
+
Requires-Dist: fastapi!=0.111.0,!=0.111.1,>=0.96.0
|
|
15
|
+
Requires-Dist: gunicorn<24.0,>=20.1.0
|
|
16
|
+
Requires-Dist: jinja2<4.0,>=3.1.2
|
|
17
|
+
Requires-Dist: psutil<8.0,>=5.9.4
|
|
18
|
+
Requires-Dist: pydantic<3.0,>=1.10.21
|
|
19
|
+
Requires-Dist: python-multipart<0.1,>=0.0.5
|
|
20
|
+
Requires-Dist: python-socketio<6.0,>=5.7.0
|
|
21
|
+
Requires-Dist: redis<6.0,>=4.3.5
|
|
22
|
+
Requires-Dist: rich<14.0,>=13.0.0
|
|
23
|
+
Requires-Dist: sqlmodel<0.1,>=0.0.14
|
|
24
|
+
Requires-Dist: typer<1.0,>=0.15.1
|
|
25
|
+
Requires-Dist: uvicorn>=0.20.0
|
|
26
|
+
Requires-Dist: starlette-admin<1.0,>=0.11.0
|
|
27
|
+
Requires-Dist: alembic<2.0,>=1.11.1
|
|
28
|
+
Requires-Dist: platformdirs<5.0,>=3.10.0
|
|
29
|
+
Requires-Dist: distro<2.0,>=1.8.0; platform_system == "Linux"
|
|
30
|
+
Requires-Dist: python-engineio!=4.6.0
|
|
31
|
+
Requires-Dist: wrapt<2.0,>=1.17.0
|
|
32
|
+
Requires-Dist: packaging<25.0,>=23.1
|
|
33
|
+
Requires-Dist: reflex-hosting-cli>=0.1.29
|
|
34
|
+
Requires-Dist: charset-normalizer<4.0,>=3.3.2
|
|
35
|
+
Requires-Dist: wheel<1.0,>=0.42.0
|
|
36
|
+
Requires-Dist: build<2.0,>=1.0.3
|
|
37
|
+
Requires-Dist: setuptools>=75.0
|
|
38
|
+
Requires-Dist: httpx<1.0,>=0.25.1
|
|
39
|
+
Requires-Dist: twine<7.0,>=4.0.0
|
|
40
|
+
Requires-Dist: tomlkit<1.0,>=0.12.4
|
|
41
|
+
Requires-Dist: lazy_loader>=0.4
|
|
42
|
+
Requires-Dist: typing_extensions>=4.6.0
|
|
49
43
|
Description-Content-Type: text/markdown
|
|
50
44
|
|
|
51
45
|
|
|
@@ -307,4 +301,3 @@ We are actively looking for contributors, no matter your skill level or experien
|
|
|
307
301
|
## License
|
|
308
302
|
|
|
309
303
|
Reflex is open-source and licensed under the [Apache License 2.0](LICENSE).
|
|
310
|
-
|
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
benchmarks/__init__.py,sha256=EPwQDZ_qYgf5GFMdYQGHWDbpkLvR1OdQiEvPkVByYpM,89
|
|
2
|
+
benchmarks/benchmark_compile_times.py,sha256=DA0MuUVF2SGXun1cIO6So_B7FE78YZepJkq2JUvHHK4,4500
|
|
3
|
+
benchmarks/benchmark_imports.py,sha256=rC9Ke0n4h9lty3GEfLF0nODZpbMpiiAPqWVkDLATdHk,3733
|
|
4
|
+
benchmarks/benchmark_lighthouse.py,sha256=EdoTJ9oOyWTalj3OZn5C_-J76kR3Tedw_WjDxzM52F8,2347
|
|
5
|
+
benchmarks/benchmark_package_size.py,sha256=118Np7CIX-T2lG5OGFISm_KPfrni-pMRz3aFfrFUdkw,3824
|
|
6
|
+
benchmarks/benchmark_web_size.py,sha256=KG3rWk8ARg6K7eqtwg5qTIjgBDev0zG3rPz_MlMAqLo,2972
|
|
7
|
+
benchmarks/conftest.py,sha256=ekR_xO0FL2c9W_zLCTMRn35uPjdqPma0IbIcSn2WKPU,487
|
|
8
|
+
benchmarks/lighthouse.sh,sha256=fbOaaTOvE69Z23nEhA4od-v_WehyLvtI1FJfPjYdPPk,2139
|
|
9
|
+
benchmarks/utils.py,sha256=NTI9WzkTvr4lE20GKh-DZ30Wc0Xqs-KN2Nb5og2dPzQ,1968
|
|
10
|
+
reflex-0.7.2.dev1.dist-info/METADATA,sha256=31Tj75jzbJX_H6-g04JcBF9sbkiHtjYKM5OQCLzzrlE,11764
|
|
11
|
+
reflex-0.7.2.dev1.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
12
|
+
reflex-0.7.2.dev1.dist-info/entry_points.txt,sha256=XfumVjOeM8bxbPMTjy5CvSe65xnMKHCBQ4MxWWHCidM,61
|
|
13
|
+
reflex-0.7.2.dev1.dist-info/licenses/LICENSE,sha256=dw3zLrp9f5ObD7kqS32vWfhcImfO52PMmRqvtxq_YEE,11358
|
|
1
14
|
reflex/.templates/apps/blank/assets/favicon.ico,sha256=baxxgDAQ2V4-G5Q4S2yK5uUJTUGkv-AOWBQ0xd6myUo,4286
|
|
2
15
|
reflex/.templates/apps/blank/code/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
16
|
reflex/.templates/apps/blank/code/blank.py,sha256=oKnsBBZM1-_RFAuwGKgfiCzgsrHlN_m_XP0-Fpnld7k,926
|
|
@@ -363,10 +376,10 @@ reflex/middleware/middleware.py,sha256=p5VVoIgQ_NwOg_GOY6g0S4fmrV76_VE1zt-HiwbMw
|
|
|
363
376
|
reflex/model.py,sha256=k6qCweATPW1YRB_qcHwa5X35btJmtIlB4zEQ63FaW3w,17527
|
|
364
377
|
reflex/page.py,sha256=qEt8n5EtawSywCzdsiaNQJWhC8ie-vg8ig0JGuVavPI,2386
|
|
365
378
|
reflex/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
366
|
-
reflex/reflex.py,sha256=
|
|
379
|
+
reflex/reflex.py,sha256=de26qs1UOfnvYjDrmBti5SX7gxAgIMPhuXgsS2LF4AY,19393
|
|
367
380
|
reflex/route.py,sha256=nn_hJwtQdjiqH_dHXfqMGWKllnyPQZTSR-KWdHDhoOs,4210
|
|
368
381
|
reflex/state.py,sha256=nnNIr-uQ2qfmzQVF7D5jghzdKZ8vL7-nBiFBlDnSww4,141506
|
|
369
|
-
reflex/style.py,sha256=
|
|
382
|
+
reflex/style.py,sha256=dilXPn8de80NzsXT53GPJrmjELC5nPYIlCgongyq1zM,13145
|
|
370
383
|
reflex/testing.py,sha256=wzqppu_-4e1QeFJ-vLVpW19egTGm-JpU_c7wUPiURlE,35693
|
|
371
384
|
reflex/utils/__init__.py,sha256=y-AHKiRQAhk2oAkvn7W8cRVTZVK625ff8tTwvZtO7S4,24
|
|
372
385
|
reflex/utils/build.py,sha256=gm_lRsOqKeRD5mBiRToJcT4Vt_ZQPzDuazGfvJlSeeQ,9024
|
|
@@ -382,24 +395,20 @@ reflex/utils/imports.py,sha256=-EkUt9y5U3qmImjfpsXwYh7JI9qJHd_L6X9y12EPJew,3921
|
|
|
382
395
|
reflex/utils/lazy_loader.py,sha256=-3DcwIqHNft2fb1ikgDYAMiEwNfbiWfrTBAf1gEVX2o,1367
|
|
383
396
|
reflex/utils/net.py,sha256=0Yd9OLK8R_px2sqnqrDkTky6hYHtG2pEDvvilOjDfjc,1219
|
|
384
397
|
reflex/utils/path_ops.py,sha256=Sio_pZ9-dqu6pAPUkO_JA9ONXDsyLGKWOVRoA-dCrec,7903
|
|
385
|
-
reflex/utils/prerequisites.py,sha256=
|
|
398
|
+
reflex/utils/prerequisites.py,sha256=fhpp1yaRkBr0_zAowNSFeaRDZ1qjsmhR6_Tz34nebK0,63578
|
|
386
399
|
reflex/utils/processes.py,sha256=1iZe-3Yrg-ja8jZxxAfggljqqcJgsFu8fi4bu4XQGx0,13489
|
|
387
|
-
reflex/utils/pyi_generator.py,sha256=
|
|
388
|
-
reflex/utils/redir.py,sha256=
|
|
400
|
+
reflex/utils/pyi_generator.py,sha256=cKdssbtAtGj2deOSDos9OF96w10qte8JM-TlfbzSdtw,41602
|
|
401
|
+
reflex/utils/redir.py,sha256=kTqY2WSouF5_ftOe5bnvPEyU3SLpg3pcysTcxFH1UxI,1505
|
|
389
402
|
reflex/utils/registry.py,sha256=bseD0bIO8b3pctHKpD5J2MRdDzcf7eWKtHEZVutVNJ0,1401
|
|
390
403
|
reflex/utils/serializers.py,sha256=K8-erpNIjJNIKif0cDFExa9f5DEVuQUq0j5v5VH6aBI,13408
|
|
391
404
|
reflex/utils/telemetry.py,sha256=qwJBwjdtAV-OGKgO4h-NWhgTvfC3gbduBdn1UB8Ikes,5608
|
|
392
405
|
reflex/utils/types.py,sha256=nGX44Q_Jp33wIaxf2vxANwBWe1743V2B8RRS8H9yV4c,33449
|
|
393
406
|
reflex/vars/__init__.py,sha256=2Kv6Oh9g3ISZFESjL1al8KiO7QBZUXmLKGMCBsP-DoY,1243
|
|
394
|
-
reflex/vars/base.py,sha256=
|
|
407
|
+
reflex/vars/base.py,sha256=uJqKCT6maryYQ_5YON4hjFbyfoMwYva9Upwxyyn6zGk,101345
|
|
395
408
|
reflex/vars/datetime.py,sha256=WOEzQF6qjMjYvCat80XxgB_4hmVNHwIIZNMBSmfu0PM,5790
|
|
396
409
|
reflex/vars/dep_tracking.py,sha256=kluvF4Pfbpdqf0GcpmYHjT1yP-D1erAzaSQP6qIxjB0,13846
|
|
397
410
|
reflex/vars/function.py,sha256=2sVnhgetPSwtor8VFtAiYJdzZ9IRNzAKdsUJG6dXQcE,14461
|
|
398
411
|
reflex/vars/number.py,sha256=RHY_KsUxliIgn7sptYPPyDubIfLkGYr0TZjX4PB_dgI,29334
|
|
399
412
|
reflex/vars/object.py,sha256=cHVXN7I1MNw32KfpYKcmgStNSD4BnF3Y2CjkPABmjeo,16233
|
|
400
413
|
reflex/vars/sequence.py,sha256=X4Gducv2u6fSEZm9uBlMr030bhDO0jUxnKkUXNg4Mwg,54878
|
|
401
|
-
reflex-0.7.
|
|
402
|
-
reflex-0.7.2a2.dist-info/METADATA,sha256=c91IdRnfmecqq_4ZaI1uU5J7cM87O012JddW-HDHSQI,11875
|
|
403
|
-
reflex-0.7.2a2.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
|
404
|
-
reflex-0.7.2a2.dist-info/entry_points.txt,sha256=H1Z5Yat_xJfy0dRT1Frk2PkO_p41Xy7fCKlj4FcdL9o,44
|
|
405
|
-
reflex-0.7.2a2.dist-info/RECORD,,
|
|
414
|
+
reflex-0.7.2.dev1.dist-info/RECORD,,
|
|
File without changes
|