bakefile 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bake/__init__.py +9 -0
- bake/bakebook/bakebook.py +85 -0
- bake/bakebook/decorator.py +50 -0
- bake/bakebook/get.py +175 -0
- bake/cli/bake/__init__.py +3 -0
- bake/cli/bake/__main__.py +5 -0
- bake/cli/bake/main.py +74 -0
- bake/cli/bake/reinvocation.py +63 -0
- bake/cli/bakefile/__init__.py +3 -0
- bake/cli/bakefile/__main__.py +5 -0
- bake/cli/bakefile/add_inline.py +29 -0
- bake/cli/bakefile/export.py +212 -0
- bake/cli/bakefile/find_python.py +18 -0
- bake/cli/bakefile/init.py +56 -0
- bake/cli/bakefile/lint.py +77 -0
- bake/cli/bakefile/main.py +43 -0
- bake/cli/bakefile/uv.py +146 -0
- bake/cli/common/app.py +54 -0
- bake/cli/common/callback.py +13 -0
- bake/cli/common/context.py +145 -0
- bake/cli/common/exception_handler.py +57 -0
- bake/cli/common/obj.py +216 -0
- bake/cli/common/params.py +72 -0
- bake/cli/utils/__init__.py +0 -0
- bake/cli/utils/version.py +18 -0
- bake/manage/__init__.py +0 -0
- bake/manage/add_inline.py +71 -0
- bake/manage/find_python.py +210 -0
- bake/manage/lint.py +101 -0
- bake/manage/run_uv.py +88 -0
- bake/manage/write_bakefile.py +20 -0
- bake/py.typed +0 -0
- bake/samples/__init__.py +0 -0
- bake/samples/simple.py +8 -0
- bake/ui/__init__.py +11 -0
- bake/ui/console.py +58 -0
- bake/ui/logger/__init__.py +33 -0
- bake/ui/logger/capsys.py +158 -0
- bake/ui/logger/setup.py +53 -0
- bake/ui/logger/utils.py +215 -0
- bake/ui/params.py +5 -0
- bake/ui/run/__init__.py +5 -0
- bake/ui/run/run.py +546 -0
- bake/ui/run/script.py +74 -0
- bake/ui/run/splitter.py +249 -0
- bake/ui/run/uv.py +83 -0
- bake/ui/style.py +2 -0
- bake/utils/__init__.py +11 -0
- bake/utils/constants.py +21 -0
- {bakefile → bake/utils}/env.py +3 -1
- bake/utils/exceptions.py +17 -0
- {bakefile-0.0.4.dist-info → bakefile-0.0.6.dist-info}/METADATA +15 -2
- bakefile-0.0.6.dist-info/RECORD +63 -0
- {bakefile-0.0.4.dist-info → bakefile-0.0.6.dist-info}/WHEEL +2 -2
- bakefile-0.0.6.dist-info/entry_points.txt +5 -0
- bakelib/__init__.py +4 -0
- bakelib/space/__init__.py +0 -0
- bakelib/space/base.py +193 -0
- bakelib/space/python.py +80 -0
- bakelib/space/utils.py +118 -0
- bakefile/__init__.py +0 -13
- bakefile/cli/bake/__init__.py +0 -3
- bakefile/cli/bake/main.py +0 -127
- bakefile/cli/bake/resolve_bakebook.py +0 -103
- bakefile/cli/bake/utils.py +0 -25
- bakefile/cli/bakefile.py +0 -19
- bakefile/cli/utils/version.py +0 -9
- bakefile/exceptions.py +0 -9
- bakefile-0.0.4.dist-info/RECORD +0 -16
- bakefile-0.0.4.dist-info/entry_points.txt +0 -4
- {bakefile/cli/utils → bake/bakebook}/__init__.py +0 -0
- {bakefile → bake}/cli/__init__.py +0 -0
- /bakefile/py.typed → /bake/cli/common/__init__.py +0 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import re
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from bake.manage.add_inline import read_inline
|
|
6
|
+
from bake.ui import run_uv
|
|
7
|
+
from bake.utils import BakebookError
|
|
8
|
+
from bake.utils.exceptions import PythonNotFoundError
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def is_standalone_bakefile(bakefile_path: Path) -> bool:
|
|
14
|
+
inline_metadata = read_inline(bakefile_path)
|
|
15
|
+
if inline_metadata is None:
|
|
16
|
+
return False
|
|
17
|
+
|
|
18
|
+
dependencies = inline_metadata.get("dependencies", [])
|
|
19
|
+
has_bakefile = any(dep.startswith("bakefile") for dep in dependencies)
|
|
20
|
+
|
|
21
|
+
if not has_bakefile:
|
|
22
|
+
logger.error(
|
|
23
|
+
f"Invalid inline metadata in {bakefile_path}: "
|
|
24
|
+
f"PEP 723 metadata exists but 'bakefile' dependency is missing"
|
|
25
|
+
)
|
|
26
|
+
raise BakebookError(
|
|
27
|
+
f"Invalid inline metadata in {bakefile_path}: "
|
|
28
|
+
f"PEP 723 metadata exists but 'bakefile' dependency is missing. "
|
|
29
|
+
f"Run 'bakefile add-inline' to fix."
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
return True
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _find_bakefile_lock(bakefile_path: Path) -> Path | None:
|
|
36
|
+
"""Find bakefile-level lock (<bakefile.py.lock>)."""
|
|
37
|
+
lock_path = bakefile_path.with_suffix(bakefile_path.suffix + ".lock")
|
|
38
|
+
if lock_path.exists():
|
|
39
|
+
logger.debug(f"Found bakefile lock at {lock_path}")
|
|
40
|
+
return lock_path
|
|
41
|
+
logger.debug("No bakefile lock found")
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _find_project_lock(bakefile_path: Path) -> Path | None:
|
|
46
|
+
"""Find project-level uv.lock by searching up directory tree."""
|
|
47
|
+
current_dir = bakefile_path.parent
|
|
48
|
+
for _ in range(10): # Limit search depth
|
|
49
|
+
uv_lock = current_dir / "uv.lock"
|
|
50
|
+
if uv_lock.exists():
|
|
51
|
+
logger.debug(f"Found project lock at {uv_lock}")
|
|
52
|
+
return uv_lock
|
|
53
|
+
parent = current_dir.parent
|
|
54
|
+
if parent == current_dir: # Reached root
|
|
55
|
+
break
|
|
56
|
+
current_dir = parent
|
|
57
|
+
logger.debug("No project lock found")
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _find_bakefile_python(bakefile_path: Path) -> Path | None:
|
|
62
|
+
# References:
|
|
63
|
+
# https://github.com/astral-sh/uv/blob/543f1f3f5924d1d2734fd718381e6f0d0f6f70b5/crates/uv/src/commands/project/mod.rs#L843
|
|
64
|
+
|
|
65
|
+
kind = "script"
|
|
66
|
+
found_bakefile_level_venv_message = (
|
|
67
|
+
f"The {kind} environment's Python version satisfies the request"
|
|
68
|
+
)
|
|
69
|
+
result = run_uv(
|
|
70
|
+
["python", "find", "--script", str(bakefile_path.name), "-v"],
|
|
71
|
+
check=False,
|
|
72
|
+
cwd=bakefile_path.parent,
|
|
73
|
+
echo=False,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
is_bakefile_python_found = (
|
|
77
|
+
result.returncode == 0 and found_bakefile_level_venv_message in result.stderr.strip()
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if is_bakefile_python_found:
|
|
81
|
+
python_path = Path(result.stdout.strip())
|
|
82
|
+
logger.debug(f"Found bakefile Python at {python_path.as_posix()}")
|
|
83
|
+
return python_path
|
|
84
|
+
|
|
85
|
+
logger.debug("No bakefile Python found")
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _find_project_python(bakefile_path: Path) -> Path | None:
|
|
90
|
+
"""Find Python from project-level venv using uv python find -v."""
|
|
91
|
+
# References:
|
|
92
|
+
# https://github.com/astral-sh/uv/blob/543f1f3f5924d1d2734fd718381e6f0d0f6f70b5/crates/uv-python/src/discovery.rs#L795
|
|
93
|
+
# https://github.com/astral-sh/uv/blob/543f1f3f5924d1d2734fd718381e6f0d0f6f70b5/crates/uv-python/src/discovery.rs#L3169-L3184
|
|
94
|
+
result = run_uv(
|
|
95
|
+
["python", "find", "-v"],
|
|
96
|
+
check=False,
|
|
97
|
+
cwd=bakefile_path.parent,
|
|
98
|
+
echo=False,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# Check if stderr contains "Found `...` at `...` (...)"
|
|
102
|
+
# where source is "active virtual environment" or "virtual environment"
|
|
103
|
+
stderr = result.stderr.strip()
|
|
104
|
+
pattern = r"Found `[^`]+` at `[^`]+` \(([^)]+)\)"
|
|
105
|
+
match = re.search(pattern, stderr)
|
|
106
|
+
|
|
107
|
+
if result.returncode == 0 and match:
|
|
108
|
+
source = match.group(1)
|
|
109
|
+
if source in {"active virtual environment", "virtual environment"}:
|
|
110
|
+
python_path = Path(result.stdout.strip())
|
|
111
|
+
logger.debug(f"Found project Python at {python_path} (source: {source})")
|
|
112
|
+
return python_path
|
|
113
|
+
|
|
114
|
+
logger.debug("No project Python found")
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _create_bakefile_venv(bakefile_path: Path) -> Path | None:
|
|
119
|
+
"""Create bakefile-level venv and return Python path."""
|
|
120
|
+
lock_path = _find_bakefile_lock(bakefile_path)
|
|
121
|
+
|
|
122
|
+
if lock_path:
|
|
123
|
+
# Use frozen sync if lock exists
|
|
124
|
+
logger.debug("Syncing bakefile with frozen lock")
|
|
125
|
+
run_uv(
|
|
126
|
+
["sync", "--script", str(bakefile_path.name), "--frozen"],
|
|
127
|
+
check=True,
|
|
128
|
+
cwd=bakefile_path.parent,
|
|
129
|
+
echo=False,
|
|
130
|
+
)
|
|
131
|
+
else:
|
|
132
|
+
# Create new lock and sync
|
|
133
|
+
logger.debug("Creating bakefile lock and syncing")
|
|
134
|
+
run_uv(
|
|
135
|
+
["sync", "--script", str(bakefile_path.name)],
|
|
136
|
+
check=True,
|
|
137
|
+
cwd=bakefile_path.parent,
|
|
138
|
+
echo=False,
|
|
139
|
+
)
|
|
140
|
+
run_uv(
|
|
141
|
+
["lock", "--script", str(bakefile_path.name)],
|
|
142
|
+
check=True,
|
|
143
|
+
cwd=bakefile_path.parent,
|
|
144
|
+
echo=False,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
return _find_bakefile_python(bakefile_path)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _create_project_venv(bakefile_path: Path) -> Path | None:
|
|
151
|
+
"""Create project-level venv and return Python path."""
|
|
152
|
+
work_dir = bakefile_path.parent
|
|
153
|
+
|
|
154
|
+
# Check if pyproject.toml exists
|
|
155
|
+
pyproject = work_dir / "pyproject.toml"
|
|
156
|
+
if not pyproject.exists():
|
|
157
|
+
logger.debug("No pyproject.toml found, cannot create project venv")
|
|
158
|
+
return None
|
|
159
|
+
|
|
160
|
+
lock_path = _find_project_lock(bakefile_path)
|
|
161
|
+
|
|
162
|
+
if lock_path:
|
|
163
|
+
# Use frozen sync if lock exists
|
|
164
|
+
logger.debug("Syncing project with frozen lock")
|
|
165
|
+
run_uv(["sync", "--frozen"], check=True, cwd=work_dir, echo=False)
|
|
166
|
+
else:
|
|
167
|
+
# Create new lock and sync
|
|
168
|
+
logger.debug("Creating project lock and syncing")
|
|
169
|
+
run_uv(["lock"], check=True, cwd=work_dir, echo=False)
|
|
170
|
+
run_uv(["sync"], check=True, cwd=work_dir, echo=False)
|
|
171
|
+
|
|
172
|
+
return _find_project_python(bakefile_path)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def find_python_path(bakefile_path: Path | None) -> Path:
|
|
176
|
+
if bakefile_path is None or not bakefile_path.exists():
|
|
177
|
+
raise PythonNotFoundError(f"Bakefile not found at {bakefile_path}")
|
|
178
|
+
|
|
179
|
+
is_standalone = is_standalone_bakefile(bakefile_path)
|
|
180
|
+
|
|
181
|
+
if is_standalone:
|
|
182
|
+
logger.debug("Bakefile has inline metadata -> bakefile-level Python")
|
|
183
|
+
|
|
184
|
+
# Step 1: Try to find existing bakefile-level Python
|
|
185
|
+
python_path = _find_bakefile_python(bakefile_path)
|
|
186
|
+
if python_path:
|
|
187
|
+
return python_path
|
|
188
|
+
|
|
189
|
+
# Step 2: Create bakefile-level venv
|
|
190
|
+
python_path = _create_bakefile_venv(bakefile_path)
|
|
191
|
+
if python_path:
|
|
192
|
+
return python_path
|
|
193
|
+
|
|
194
|
+
else:
|
|
195
|
+
logger.debug("No inline metadata -> project-level Python")
|
|
196
|
+
|
|
197
|
+
# Step 1: Try to find existing project-level Python
|
|
198
|
+
python_path = _find_project_python(bakefile_path)
|
|
199
|
+
if python_path:
|
|
200
|
+
return python_path
|
|
201
|
+
|
|
202
|
+
# Step 2: Create project-level venv
|
|
203
|
+
python_path = _create_project_venv(bakefile_path)
|
|
204
|
+
if python_path:
|
|
205
|
+
return python_path
|
|
206
|
+
|
|
207
|
+
raise PythonNotFoundError(
|
|
208
|
+
f"Could not find Python for {bakefile_path}. "
|
|
209
|
+
f"Run 'bakefile add-inline' to add PEP 723 metadata for bakefile-level Python."
|
|
210
|
+
)
|
bake/manage/lint.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import subprocess
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from ruff.__main__ import find_ruff_bin
|
|
6
|
+
from ty.__main__ import find_ty_bin
|
|
7
|
+
|
|
8
|
+
from bake.ui import console
|
|
9
|
+
from bake.ui.run import run
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def run_ruff(
|
|
15
|
+
bakefile_path: Path,
|
|
16
|
+
subcommand: str,
|
|
17
|
+
args: list[str],
|
|
18
|
+
*,
|
|
19
|
+
only_bakefile: bool = False,
|
|
20
|
+
check: bool = True,
|
|
21
|
+
dry_run: bool = False,
|
|
22
|
+
) -> subprocess.CompletedProcess[str]:
|
|
23
|
+
ruff_bin = find_ruff_bin()
|
|
24
|
+
target = bakefile_path.name if only_bakefile else "."
|
|
25
|
+
cmd = [subcommand, *args, target]
|
|
26
|
+
display_cmd = "ruff " + " ".join(cmd)
|
|
27
|
+
console.cmd(display_cmd)
|
|
28
|
+
return run(
|
|
29
|
+
[str(ruff_bin), *cmd],
|
|
30
|
+
cwd=bakefile_path.parent,
|
|
31
|
+
capture_output=True,
|
|
32
|
+
stream=True,
|
|
33
|
+
check=check,
|
|
34
|
+
echo=False,
|
|
35
|
+
dry_run=dry_run,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def run_ruff_format(
|
|
40
|
+
bakefile_path: Path,
|
|
41
|
+
*,
|
|
42
|
+
only_bakefile: bool = False,
|
|
43
|
+
check: bool = True,
|
|
44
|
+
dry_run: bool = False,
|
|
45
|
+
) -> subprocess.CompletedProcess[str]:
|
|
46
|
+
return run_ruff(
|
|
47
|
+
bakefile_path=bakefile_path,
|
|
48
|
+
subcommand="format",
|
|
49
|
+
args=["--exit-non-zero-on-format"],
|
|
50
|
+
only_bakefile=only_bakefile,
|
|
51
|
+
check=check,
|
|
52
|
+
dry_run=dry_run,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def run_ruff_check(
|
|
57
|
+
bakefile_path: Path,
|
|
58
|
+
*,
|
|
59
|
+
only_bakefile: bool = False,
|
|
60
|
+
check: bool = True,
|
|
61
|
+
dry_run: bool = False,
|
|
62
|
+
) -> subprocess.CompletedProcess[str]:
|
|
63
|
+
return run_ruff(
|
|
64
|
+
bakefile_path=bakefile_path,
|
|
65
|
+
subcommand="check",
|
|
66
|
+
args=[
|
|
67
|
+
"--fix",
|
|
68
|
+
"--exit-non-zero-on-fix",
|
|
69
|
+
"--extend-select",
|
|
70
|
+
"ARG,B,C4,E,F,I,N,PGH,PIE,PYI,RUF,SIM,UP",
|
|
71
|
+
],
|
|
72
|
+
only_bakefile=only_bakefile,
|
|
73
|
+
check=check,
|
|
74
|
+
dry_run=dry_run,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def run_ty_check(
|
|
79
|
+
bakefile_path: Path,
|
|
80
|
+
python_path: Path,
|
|
81
|
+
*,
|
|
82
|
+
only_bakefile: bool = False,
|
|
83
|
+
check: bool = True,
|
|
84
|
+
dry_run: bool = False,
|
|
85
|
+
) -> subprocess.CompletedProcess[str]:
|
|
86
|
+
ty_bin = find_ty_bin()
|
|
87
|
+
cmd = ["check", "--error-on-warning", "--python", str(python_path)]
|
|
88
|
+
if only_bakefile:
|
|
89
|
+
cmd.append(bakefile_path.name)
|
|
90
|
+
|
|
91
|
+
display_cmd = "ty " + " ".join(cmd)
|
|
92
|
+
console.cmd(display_cmd)
|
|
93
|
+
return run(
|
|
94
|
+
[str(ty_bin), *cmd],
|
|
95
|
+
cwd=bakefile_path.parent,
|
|
96
|
+
capture_output=True,
|
|
97
|
+
stream=True,
|
|
98
|
+
check=check,
|
|
99
|
+
echo=False,
|
|
100
|
+
dry_run=dry_run,
|
|
101
|
+
)
|
bake/manage/run_uv.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import subprocess
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from bake.manage.find_python import find_python_path, is_standalone_bakefile
|
|
6
|
+
from bake.ui import console, style
|
|
7
|
+
from bake.ui.run import run, run_uv
|
|
8
|
+
from bake.utils import BakebookError
|
|
9
|
+
from bake.utils.exceptions import PythonNotFoundError
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _run_uv(
|
|
15
|
+
bakefile_path: Path | None, command_name: str, cmd: list[str], dry_run: bool = False
|
|
16
|
+
) -> subprocess.CompletedProcess[str] | subprocess.CompletedProcess[None]:
|
|
17
|
+
if bakefile_path is None or not bakefile_path.exists():
|
|
18
|
+
raise PythonNotFoundError(f"Bakefile not found at {bakefile_path}")
|
|
19
|
+
|
|
20
|
+
if not is_standalone_bakefile(bakefile_path):
|
|
21
|
+
error_msg = (
|
|
22
|
+
f"`{command_name}` command requires PEP 723 inline metadata in the bakefile. "
|
|
23
|
+
f"Run {style.code('bakefile add-inline')} to add metadata, "
|
|
24
|
+
f"or use {style.code(f'uv {command_name}')} directly for project-level dependencies."
|
|
25
|
+
)
|
|
26
|
+
raise BakebookError(error_msg)
|
|
27
|
+
|
|
28
|
+
logger.debug(f"Running `uv {command_name}` for {bakefile_path}")
|
|
29
|
+
result = run_uv(
|
|
30
|
+
(command_name, "--script", bakefile_path.name, *cmd),
|
|
31
|
+
capture_output=True,
|
|
32
|
+
stream=True,
|
|
33
|
+
check=True,
|
|
34
|
+
echo=True,
|
|
35
|
+
cwd=bakefile_path.parent,
|
|
36
|
+
dry_run=dry_run,
|
|
37
|
+
)
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def run_uv_add(
|
|
42
|
+
bakefile_path: Path | None, cmd: list[str], dry_run: bool
|
|
43
|
+
) -> subprocess.CompletedProcess[str] | subprocess.CompletedProcess[None]:
|
|
44
|
+
return _run_uv(bakefile_path=bakefile_path, command_name="add", cmd=cmd, dry_run=dry_run)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def run_uv_lock(
|
|
48
|
+
bakefile_path: Path | None, cmd: list[str], dry_run: bool
|
|
49
|
+
) -> subprocess.CompletedProcess[str] | subprocess.CompletedProcess[None]:
|
|
50
|
+
return _run_uv(bakefile_path=bakefile_path, command_name="lock", cmd=cmd, dry_run=dry_run)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def run_uv_sync(
|
|
54
|
+
bakefile_path: Path | None, cmd: list[str], dry_run: bool
|
|
55
|
+
) -> subprocess.CompletedProcess[str] | subprocess.CompletedProcess[None]:
|
|
56
|
+
return _run_uv(bakefile_path=bakefile_path, command_name="sync", cmd=cmd, dry_run=dry_run)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def run_uv_pip(
|
|
60
|
+
bakefile_path: Path | None, cmd: list[str], dry_run: bool
|
|
61
|
+
) -> subprocess.CompletedProcess[str] | subprocess.CompletedProcess[None]:
|
|
62
|
+
if bakefile_path is None or not bakefile_path.exists():
|
|
63
|
+
raise PythonNotFoundError(f"Bakefile not found at {bakefile_path}")
|
|
64
|
+
|
|
65
|
+
is_standalone = is_standalone_bakefile(bakefile_path)
|
|
66
|
+
if not is_standalone:
|
|
67
|
+
console.warning(
|
|
68
|
+
"No PEP 723 inline metadata found. Using project-level Python.\n"
|
|
69
|
+
f"For project-level dependencies, consider using {style.code('uv pip')} directly.\n"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
python_path = find_python_path(bakefile_path)
|
|
73
|
+
|
|
74
|
+
version_result = run(
|
|
75
|
+
[str(python_path), "--version"], capture_output=True, stream=False, echo=False
|
|
76
|
+
)
|
|
77
|
+
version = version_result.stdout.strip() or version_result.stderr.strip()
|
|
78
|
+
console.err.print(f"Using {version}\n")
|
|
79
|
+
|
|
80
|
+
logger.debug(f"Running uv pip with cmd: {cmd}")
|
|
81
|
+
return run_uv(
|
|
82
|
+
("pip", *cmd, "--python", str(python_path)),
|
|
83
|
+
capture_output=True,
|
|
84
|
+
stream=True,
|
|
85
|
+
check=True,
|
|
86
|
+
echo=True,
|
|
87
|
+
dry_run=dry_run,
|
|
88
|
+
)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import types
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from bake.utils.constants import BAKEBOOK_NAME_IN_SAMPLES
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def write_bakefile(
|
|
8
|
+
bakefile_path: Path, bakebook_name: str, sample_module: types.ModuleType
|
|
9
|
+
) -> None:
|
|
10
|
+
if not hasattr(sample_module, BAKEBOOK_NAME_IN_SAMPLES):
|
|
11
|
+
raise ValueError(
|
|
12
|
+
f"Module `{sample_module.__name__}` must have `{BAKEBOOK_NAME_IN_SAMPLES}` attribute"
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
if sample_module.__file__ is None:
|
|
16
|
+
raise ValueError(f"Could not find `{sample_module.__name__}`")
|
|
17
|
+
|
|
18
|
+
original_bakefile_content = Path(sample_module.__file__).read_text()
|
|
19
|
+
customized_content = original_bakefile_content.replace(BAKEBOOK_NAME_IN_SAMPLES, bakebook_name)
|
|
20
|
+
bakefile_path.write_text(customized_content)
|
bake/py.typed
ADDED
|
File without changes
|
bake/samples/__init__.py
ADDED
|
File without changes
|
bake/samples/simple.py
ADDED
bake/ui/__init__.py
ADDED
bake/ui/console.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import textwrap
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from beautysh import BashFormatter
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
|
|
7
|
+
out = Console(stderr=False)
|
|
8
|
+
err = Console(stderr=True)
|
|
9
|
+
|
|
10
|
+
BOLD_GREEN = "bold green"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _print(
|
|
14
|
+
console_obj: Console, emoji: str | None, label: str, style: str, message: str, **kwargs
|
|
15
|
+
) -> None:
|
|
16
|
+
formatted_label = f"[{label}]" if console_obj.no_color or out.color_system is None else label
|
|
17
|
+
|
|
18
|
+
emoji = emoji + " " if emoji else ""
|
|
19
|
+
console_obj.print(f"[{style}]{emoji}{formatted_label}[/{style}] {message}", **kwargs)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def success(message: str, **kwargs) -> None:
|
|
23
|
+
_print(out, ":white_check_mark:", "SUCCESS", BOLD_GREEN, message, **kwargs)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def echo(message: Any, **kwargs) -> None:
|
|
27
|
+
out.print(message, **kwargs)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def cmd(cmd_str: str, **kwargs) -> None:
|
|
31
|
+
err.print(f"[bold green]❯[/bold green] [default]{cmd_str}[/default]", **kwargs) # noqa: RUF001
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def script_block(title: str, script: str, **kwargs) -> None:
|
|
35
|
+
formatter = BashFormatter()
|
|
36
|
+
formatted, error = formatter.beautify_string(script)
|
|
37
|
+
|
|
38
|
+
if error:
|
|
39
|
+
formatted = textwrap.dedent(script)
|
|
40
|
+
|
|
41
|
+
terminal_width: int = err.size.width
|
|
42
|
+
width = min(70, terminal_width)
|
|
43
|
+
bold_line = "━" * width
|
|
44
|
+
thin_line = "─" * width
|
|
45
|
+
|
|
46
|
+
err.print(bold_line, style=BOLD_GREEN)
|
|
47
|
+
err.print(title, style="bold")
|
|
48
|
+
err.print(thin_line, style=BOLD_GREEN)
|
|
49
|
+
err.print(formatted, highlight=False, **kwargs)
|
|
50
|
+
err.print(bold_line, style=BOLD_GREEN)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def warning(message: str, **kwargs) -> None:
|
|
54
|
+
_print(err, ":warning-emoji: ", "WARNING", "bold yellow", message, **kwargs)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def error(message: str, **kwargs) -> None:
|
|
58
|
+
_print(err, ":x:", "ERROR", "bold red", message, **kwargs)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from loguru import logger
|
|
2
|
+
|
|
3
|
+
from bake.ui.logger.capsys import (
|
|
4
|
+
capsys_to_logs,
|
|
5
|
+
capsys_to_logs_pretty,
|
|
6
|
+
capture_to_logs,
|
|
7
|
+
capture_to_logs_pretty,
|
|
8
|
+
count_message_in_logs,
|
|
9
|
+
find_log,
|
|
10
|
+
has_message_in_logs,
|
|
11
|
+
has_messages_in_logs,
|
|
12
|
+
parse_pretty_log,
|
|
13
|
+
strip_ansi,
|
|
14
|
+
)
|
|
15
|
+
from bake.ui.logger.setup import setup_logging
|
|
16
|
+
from bake.ui.logger.utils import UNPARSABLE_LINE, LogKey
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"UNPARSABLE_LINE",
|
|
20
|
+
"LogKey",
|
|
21
|
+
"capsys_to_logs",
|
|
22
|
+
"capsys_to_logs_pretty",
|
|
23
|
+
"capture_to_logs",
|
|
24
|
+
"capture_to_logs_pretty",
|
|
25
|
+
"count_message_in_logs",
|
|
26
|
+
"find_log",
|
|
27
|
+
"has_message_in_logs",
|
|
28
|
+
"has_messages_in_logs",
|
|
29
|
+
"logger",
|
|
30
|
+
"parse_pretty_log",
|
|
31
|
+
"setup_logging",
|
|
32
|
+
"strip_ansi",
|
|
33
|
+
]
|
bake/ui/logger/capsys.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import re
|
|
3
|
+
from typing import TYPE_CHECKING, Any
|
|
4
|
+
|
|
5
|
+
import orjson
|
|
6
|
+
|
|
7
|
+
from bake.ui.logger.utils import UNPARSABLE_LINE, LogKey, LogType
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
import _pytest.capture
|
|
11
|
+
import pytest
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def has_required_keys(log: LogType) -> bool:
|
|
15
|
+
return LogKey.required_keys().issubset(log.keys())
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def strip_ansi(text: str) -> str:
|
|
19
|
+
"""Remove ANSI color codes from text."""
|
|
20
|
+
ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
|
|
21
|
+
return ansi_escape.sub("", text)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _safe_parse_dict_str(dict_str: str, unparsed_key: str = "_unparsed") -> dict[str, Any]:
|
|
25
|
+
try:
|
|
26
|
+
return orjson.loads(dict_str)
|
|
27
|
+
except (TypeError, ValueError):
|
|
28
|
+
# For malformed JSON, preserve original string for debugging
|
|
29
|
+
return {unparsed_key: dict_str}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def parse_pretty_log(pretty_output: str) -> list[LogType]:
|
|
33
|
+
"""Parse pretty log format back into structured log entries."""
|
|
34
|
+
# Strip ANSI codes first
|
|
35
|
+
clean_output = strip_ansi(pretty_output)
|
|
36
|
+
|
|
37
|
+
log_pattern = re.compile(
|
|
38
|
+
r"(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [+-]\d{2}:\d{2}) \| "
|
|
39
|
+
r"(?P<level>\w+)\s+\| "
|
|
40
|
+
r"(?P<name>[\w.]+):(?P<function>[\w_<>]+):(?P<line>\d+) - "
|
|
41
|
+
r"(?P<message>.+?) - "
|
|
42
|
+
r"(?P<extra>\{.*?\}) - "
|
|
43
|
+
r"(?P<default_extra>\{.*?\})"
|
|
44
|
+
r"(?:\n\n(?P<exception>Traceback[\s\S]*?))?(?=\n\n\d{4}-\d{2}-\d{2}|\n\d{4}-\d{2}-\d{2}|$)",
|
|
45
|
+
re.DOTALL,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
matches = log_pattern.findall(clean_output)
|
|
49
|
+
parsed_logs = []
|
|
50
|
+
for match in matches:
|
|
51
|
+
# Unpack the match tuple (findall returns tuples, not match objects)
|
|
52
|
+
timestamp, level, name, function, line, message, extra, default_extra, exception = match
|
|
53
|
+
|
|
54
|
+
# Parse extra fields safely (handles non-literal values like PosixPath)
|
|
55
|
+
extra_dict = _safe_parse_dict_str(extra, "_unparsed_extra")
|
|
56
|
+
default_extra_dict = _safe_parse_dict_str(default_extra, "_unparsed_default_extra")
|
|
57
|
+
|
|
58
|
+
log_data = {
|
|
59
|
+
LogKey.TIMESTAMP.value: timestamp,
|
|
60
|
+
LogKey.LEVEL.value: level,
|
|
61
|
+
LogKey.NAME.value: name,
|
|
62
|
+
LogKey.MODULE.value: name.split(".")[-1],
|
|
63
|
+
LogKey.FUNCTION_NAME.value: function,
|
|
64
|
+
LogKey.LINE_NO.value: int(line),
|
|
65
|
+
LogKey.MESSAGE.value: message,
|
|
66
|
+
**extra_dict,
|
|
67
|
+
**default_extra_dict,
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
# Add exception if present
|
|
71
|
+
if exception:
|
|
72
|
+
log_data[LogKey.EXCEPTION.value] = exception
|
|
73
|
+
|
|
74
|
+
# Map default_extra to LogKey fields
|
|
75
|
+
if "process_name" in default_extra_dict:
|
|
76
|
+
log_data[LogKey.PROCESS_NAME.value] = default_extra_dict["process_name"]
|
|
77
|
+
if "file_name" in default_extra_dict:
|
|
78
|
+
log_data[LogKey.FILE_NAME.value] = default_extra_dict["file_name"]
|
|
79
|
+
if "thread_name" in default_extra_dict:
|
|
80
|
+
log_data[LogKey.THREAD_NAME.value] = default_extra_dict["thread_name"]
|
|
81
|
+
|
|
82
|
+
parsed_logs.append(log_data)
|
|
83
|
+
|
|
84
|
+
return parsed_logs
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def capture_to_logs(
|
|
88
|
+
capture: "_pytest.capture.CaptureResult[str]", preserve_unparsable: bool = False
|
|
89
|
+
) -> list[LogType]:
|
|
90
|
+
log_lines = capture.err.strip().split("\n")
|
|
91
|
+
parsed_logs = []
|
|
92
|
+
|
|
93
|
+
for line in log_lines:
|
|
94
|
+
if not line:
|
|
95
|
+
continue
|
|
96
|
+
try:
|
|
97
|
+
parsed_log = json.loads(line)
|
|
98
|
+
if not has_required_keys(parsed_log):
|
|
99
|
+
if preserve_unparsable:
|
|
100
|
+
parsed_logs.append({UNPARSABLE_LINE: line})
|
|
101
|
+
continue
|
|
102
|
+
parsed_logs.append(parsed_log)
|
|
103
|
+
except json.JSONDecodeError:
|
|
104
|
+
if preserve_unparsable:
|
|
105
|
+
parsed_logs.append({UNPARSABLE_LINE: line})
|
|
106
|
+
continue
|
|
107
|
+
return parsed_logs
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def capture_to_logs_pretty(capture: "_pytest.capture.CaptureResult[str]") -> list[LogType]:
|
|
111
|
+
pretty_output = capture.err
|
|
112
|
+
if not pretty_output.strip():
|
|
113
|
+
return []
|
|
114
|
+
return parse_pretty_log(pretty_output)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def capsys_to_logs(
|
|
118
|
+
capsys: "pytest.CaptureFixture[str]", preserve_unparsable: bool = False
|
|
119
|
+
) -> list[LogType]:
|
|
120
|
+
capture = capsys.readouterr()
|
|
121
|
+
return capture_to_logs(capture=capture, preserve_unparsable=preserve_unparsable)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def capsys_to_logs_pretty(capsys: "pytest.CaptureFixture[str]") -> list[LogType]:
|
|
125
|
+
capture = capsys.readouterr()
|
|
126
|
+
return capture_to_logs_pretty(capture=capture)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def has_message_in_logs(logs: list[LogType], message: str) -> bool:
|
|
130
|
+
return any(log for log in logs if re.search(message, log[LogKey.MESSAGE.value]))
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def has_messages_in_logs(logs: list[LogType], messages: list[str]) -> bool:
|
|
134
|
+
if not messages:
|
|
135
|
+
return True
|
|
136
|
+
|
|
137
|
+
log_messages = [log[LogKey.MESSAGE.value] for log in logs]
|
|
138
|
+
msg_idx = 0
|
|
139
|
+
|
|
140
|
+
for log_msg in log_messages:
|
|
141
|
+
if not re.search(messages[msg_idx], log_msg):
|
|
142
|
+
continue
|
|
143
|
+
msg_idx += 1
|
|
144
|
+
if msg_idx == len(messages):
|
|
145
|
+
return True
|
|
146
|
+
|
|
147
|
+
return False
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def count_message_in_logs(logs: list[LogType], message: str) -> int:
|
|
151
|
+
return sum(1 for log in logs if re.search(message, log[LogKey.MESSAGE.value]))
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def find_log(logs: list[LogType], pattern: str, index: int = 0) -> LogType:
|
|
155
|
+
matches = (log for log in logs if re.search(pattern, log[LogKey.MESSAGE.value]))
|
|
156
|
+
for _ in range(index):
|
|
157
|
+
next(matches)
|
|
158
|
+
return next(matches)
|