bakefile 0.0.4__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. bake/__init__.py +9 -0
  2. bake/bakebook/bakebook.py +85 -0
  3. bake/bakebook/decorator.py +50 -0
  4. bake/bakebook/get.py +175 -0
  5. bake/cli/bake/__init__.py +3 -0
  6. bake/cli/bake/__main__.py +5 -0
  7. bake/cli/bake/main.py +74 -0
  8. bake/cli/bake/reinvocation.py +63 -0
  9. bake/cli/bakefile/__init__.py +3 -0
  10. bake/cli/bakefile/__main__.py +5 -0
  11. bake/cli/bakefile/add_inline.py +29 -0
  12. bake/cli/bakefile/find_python.py +18 -0
  13. bake/cli/bakefile/init.py +56 -0
  14. bake/cli/bakefile/lint.py +77 -0
  15. bake/cli/bakefile/main.py +41 -0
  16. bake/cli/bakefile/uv.py +146 -0
  17. bake/cli/common/app.py +54 -0
  18. bake/cli/common/callback.py +13 -0
  19. bake/cli/common/context.py +145 -0
  20. bake/cli/common/exception_handler.py +57 -0
  21. bake/cli/common/obj.py +214 -0
  22. bake/cli/common/params.py +72 -0
  23. bake/cli/utils/__init__.py +0 -0
  24. bake/cli/utils/version.py +18 -0
  25. bake/manage/__init__.py +0 -0
  26. bake/manage/add_inline.py +71 -0
  27. bake/manage/find_python.py +210 -0
  28. bake/manage/lint.py +101 -0
  29. bake/manage/run_uv.py +88 -0
  30. bake/manage/write_bakefile.py +20 -0
  31. bake/py.typed +0 -0
  32. bake/samples/__init__.py +0 -0
  33. bake/samples/simple.py +9 -0
  34. bake/ui/__init__.py +10 -0
  35. bake/ui/console.py +58 -0
  36. bake/ui/logger/__init__.py +33 -0
  37. bake/ui/logger/capsys.py +158 -0
  38. bake/ui/logger/setup.py +53 -0
  39. bake/ui/logger/utils.py +215 -0
  40. bake/ui/run/__init__.py +11 -0
  41. bake/ui/run/run.py +541 -0
  42. bake/ui/run/script.py +74 -0
  43. bake/ui/run/splitter.py +237 -0
  44. bake/ui/run/uv.py +83 -0
  45. bake/ui/style.py +2 -0
  46. bake/utils/__init__.py +11 -0
  47. bake/utils/constants.py +21 -0
  48. {bakefile → bake/utils}/env.py +3 -1
  49. bake/utils/exceptions.py +17 -0
  50. {bakefile-0.0.4.dist-info → bakefile-0.0.5.dist-info}/METADATA +14 -2
  51. bakefile-0.0.5.dist-info/RECORD +61 -0
  52. {bakefile-0.0.4.dist-info → bakefile-0.0.5.dist-info}/WHEEL +1 -1
  53. bakefile-0.0.5.dist-info/entry_points.txt +5 -0
  54. bakelib/__init__.py +4 -0
  55. bakelib/space/__init__.py +0 -0
  56. bakelib/space/base.py +73 -0
  57. bakelib/space/python.py +42 -0
  58. bakelib/space/utils.py +55 -0
  59. bakefile/__init__.py +0 -13
  60. bakefile/cli/bake/__init__.py +0 -3
  61. bakefile/cli/bake/main.py +0 -127
  62. bakefile/cli/bake/resolve_bakebook.py +0 -103
  63. bakefile/cli/bake/utils.py +0 -25
  64. bakefile/cli/bakefile.py +0 -19
  65. bakefile/cli/utils/version.py +0 -9
  66. bakefile/exceptions.py +0 -9
  67. bakefile-0.0.4.dist-info/RECORD +0 -16
  68. bakefile-0.0.4.dist-info/entry_points.txt +0 -4
  69. {bakefile/cli/utils → bake/bakebook}/__init__.py +0 -0
  70. {bakefile → bake}/cli/__init__.py +0 -0
  71. /bakefile/py.typed → /bake/cli/common/__init__.py +0 -0
@@ -0,0 +1,33 @@
1
+ from loguru import logger
2
+
3
+ from bake.ui.logger.capsys import (
4
+ capsys_to_logs,
5
+ capsys_to_logs_pretty,
6
+ capture_to_logs,
7
+ capture_to_logs_pretty,
8
+ count_message_in_logs,
9
+ find_log,
10
+ has_message_in_logs,
11
+ has_messages_in_logs,
12
+ parse_pretty_log,
13
+ strip_ansi,
14
+ )
15
+ from bake.ui.logger.setup import setup_logging
16
+ from bake.ui.logger.utils import UNPARSABLE_LINE, LogKey
17
+
18
+ __all__ = [
19
+ "UNPARSABLE_LINE",
20
+ "LogKey",
21
+ "capsys_to_logs",
22
+ "capsys_to_logs_pretty",
23
+ "capture_to_logs",
24
+ "capture_to_logs_pretty",
25
+ "count_message_in_logs",
26
+ "find_log",
27
+ "has_message_in_logs",
28
+ "has_messages_in_logs",
29
+ "logger",
30
+ "parse_pretty_log",
31
+ "setup_logging",
32
+ "strip_ansi",
33
+ ]
@@ -0,0 +1,158 @@
1
+ import json
2
+ import re
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ import orjson
6
+
7
+ from bake.ui.logger.utils import UNPARSABLE_LINE, LogKey, LogType
8
+
9
+ if TYPE_CHECKING:
10
+ import _pytest.capture
11
+ import pytest
12
+
13
+
14
+ def has_required_keys(log: LogType) -> bool:
15
+ return LogKey.required_keys().issubset(log.keys())
16
+
17
+
18
+ def strip_ansi(text: str) -> str:
19
+ """Remove ANSI color codes from text."""
20
+ ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
21
+ return ansi_escape.sub("", text)
22
+
23
+
24
+ def _safe_parse_dict_str(dict_str: str, unparsed_key: str = "_unparsed") -> dict[str, Any]:
25
+ try:
26
+ return orjson.loads(dict_str)
27
+ except (TypeError, ValueError):
28
+ # For malformed JSON, preserve original string for debugging
29
+ return {unparsed_key: dict_str}
30
+
31
+
32
+ def parse_pretty_log(pretty_output: str) -> list[LogType]:
33
+ """Parse pretty log format back into structured log entries."""
34
+ # Strip ANSI codes first
35
+ clean_output = strip_ansi(pretty_output)
36
+
37
+ log_pattern = re.compile(
38
+ r"(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [+-]\d{2}:\d{2}) \| "
39
+ r"(?P<level>\w+)\s+\| "
40
+ r"(?P<name>[\w.]+):(?P<function>[\w_<>]+):(?P<line>\d+) - "
41
+ r"(?P<message>.+?) - "
42
+ r"(?P<extra>\{.*?\}) - "
43
+ r"(?P<default_extra>\{.*?\})"
44
+ r"(?:\n\n(?P<exception>Traceback[\s\S]*?))?(?=\n\n\d{4}-\d{2}-\d{2}|\n\d{4}-\d{2}-\d{2}|$)",
45
+ re.DOTALL,
46
+ )
47
+
48
+ matches = log_pattern.findall(clean_output)
49
+ parsed_logs = []
50
+ for match in matches:
51
+ # Unpack the match tuple (findall returns tuples, not match objects)
52
+ timestamp, level, name, function, line, message, extra, default_extra, exception = match
53
+
54
+ # Parse extra fields safely (handles non-literal values like PosixPath)
55
+ extra_dict = _safe_parse_dict_str(extra, "_unparsed_extra")
56
+ default_extra_dict = _safe_parse_dict_str(default_extra, "_unparsed_default_extra")
57
+
58
+ log_data = {
59
+ LogKey.TIMESTAMP.value: timestamp,
60
+ LogKey.LEVEL.value: level,
61
+ LogKey.NAME.value: name,
62
+ LogKey.MODULE.value: name.split(".")[-1],
63
+ LogKey.FUNCTION_NAME.value: function,
64
+ LogKey.LINE_NO.value: int(line),
65
+ LogKey.MESSAGE.value: message,
66
+ **extra_dict,
67
+ **default_extra_dict,
68
+ }
69
+
70
+ # Add exception if present
71
+ if exception:
72
+ log_data[LogKey.EXCEPTION.value] = exception
73
+
74
+ # Map default_extra to LogKey fields
75
+ if "process_name" in default_extra_dict:
76
+ log_data[LogKey.PROCESS_NAME.value] = default_extra_dict["process_name"]
77
+ if "file_name" in default_extra_dict:
78
+ log_data[LogKey.FILE_NAME.value] = default_extra_dict["file_name"]
79
+ if "thread_name" in default_extra_dict:
80
+ log_data[LogKey.THREAD_NAME.value] = default_extra_dict["thread_name"]
81
+
82
+ parsed_logs.append(log_data)
83
+
84
+ return parsed_logs
85
+
86
+
87
+ def capture_to_logs(
88
+ capture: "_pytest.capture.CaptureResult[str]", preserve_unparsable: bool = False
89
+ ) -> list[LogType]:
90
+ log_lines = capture.err.strip().split("\n")
91
+ parsed_logs = []
92
+
93
+ for line in log_lines:
94
+ if not line:
95
+ continue
96
+ try:
97
+ parsed_log = json.loads(line)
98
+ if not has_required_keys(parsed_log):
99
+ if preserve_unparsable:
100
+ parsed_logs.append({UNPARSABLE_LINE: line})
101
+ continue
102
+ parsed_logs.append(parsed_log)
103
+ except json.JSONDecodeError:
104
+ if preserve_unparsable:
105
+ parsed_logs.append({UNPARSABLE_LINE: line})
106
+ continue
107
+ return parsed_logs
108
+
109
+
110
+ def capture_to_logs_pretty(capture: "_pytest.capture.CaptureResult[str]") -> list[LogType]:
111
+ pretty_output = capture.err
112
+ if not pretty_output.strip():
113
+ return []
114
+ return parse_pretty_log(pretty_output)
115
+
116
+
117
+ def capsys_to_logs(
118
+ capsys: "pytest.CaptureFixture[str]", preserve_unparsable: bool = False
119
+ ) -> list[LogType]:
120
+ capture = capsys.readouterr()
121
+ return capture_to_logs(capture=capture, preserve_unparsable=preserve_unparsable)
122
+
123
+
124
+ def capsys_to_logs_pretty(capsys: "pytest.CaptureFixture[str]") -> list[LogType]:
125
+ capture = capsys.readouterr()
126
+ return capture_to_logs_pretty(capture=capture)
127
+
128
+
129
+ def has_message_in_logs(logs: list[LogType], message: str) -> bool:
130
+ return any(log for log in logs if re.search(message, log[LogKey.MESSAGE.value]))
131
+
132
+
133
+ def has_messages_in_logs(logs: list[LogType], messages: list[str]) -> bool:
134
+ if not messages:
135
+ return True
136
+
137
+ log_messages = [log[LogKey.MESSAGE.value] for log in logs]
138
+ msg_idx = 0
139
+
140
+ for log_msg in log_messages:
141
+ if not re.search(messages[msg_idx], log_msg):
142
+ continue
143
+ msg_idx += 1
144
+ if msg_idx == len(messages):
145
+ return True
146
+
147
+ return False
148
+
149
+
150
+ def count_message_in_logs(logs: list[LogType], message: str) -> int:
151
+ return sum(1 for log in logs if re.search(message, log[LogKey.MESSAGE.value]))
152
+
153
+
154
+ def find_log(logs: list[LogType], pattern: str, index: int = 0) -> LogType:
155
+ matches = (log for log in logs if re.search(pattern, log[LogKey.MESSAGE.value]))
156
+ for _ in range(index):
157
+ next(matches)
158
+ return next(matches)
@@ -0,0 +1,53 @@
1
+ import logging
2
+ import sys
3
+ from contextvars import ContextVar
4
+ from typing import TYPE_CHECKING, Any, cast
5
+
6
+ from loguru import logger
7
+
8
+ from .utils import (
9
+ InterceptHandler,
10
+ JsonSink,
11
+ PrettyLogFormatter,
12
+ get_global_min_log_level,
13
+ reset_all_logging_states,
14
+ )
15
+
16
+ if TYPE_CHECKING:
17
+ from loguru import FilterDict, FormatFunction
18
+
19
+
20
+ def setup_logging(
21
+ level_per_module: "FilterDict | None" = None,
22
+ thread_local_context: dict[str, ContextVar[Any]] | None = None,
23
+ is_pretty_log: bool = False,
24
+ ) -> None:
25
+ if level_per_module is None:
26
+ level_per_module = {"": logging.WARNING}
27
+
28
+ if thread_local_context is None:
29
+ thread_local_context = {}
30
+
31
+ global_min_log_level = get_global_min_log_level(level_per_module)
32
+
33
+ reset_all_logging_states()
34
+ logger.remove()
35
+ logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
36
+
37
+ if is_pretty_log:
38
+ sink = sys.stderr
39
+ formatter: FormatFunction | str = cast(
40
+ "FormatFunction",
41
+ PrettyLogFormatter(thread_local_context=thread_local_context),
42
+ )
43
+ else:
44
+ sink = JsonSink(thread_local_context=thread_local_context)
45
+ formatter: FormatFunction | str = ""
46
+
47
+ logger.add(
48
+ sink=sink,
49
+ format=formatter,
50
+ level=global_min_log_level,
51
+ filter=level_per_module,
52
+ backtrace=False,
53
+ )
@@ -0,0 +1,215 @@
1
+ import inspect
2
+ import logging
3
+ import sys
4
+ from contextvars import ContextVar
5
+ from enum import Enum
6
+ from typing import TYPE_CHECKING, Any, ClassVar, TextIO
7
+
8
+ import orjson
9
+ from loguru import logger
10
+ from loguru._better_exceptions import ExceptionFormatter
11
+ from loguru._simple_sinks import StreamSink
12
+
13
+ if TYPE_CHECKING:
14
+ from loguru import FilterDict, Message, Record
15
+
16
+ class ExtendedRecord(Record):
17
+ default_extra: dict[str, Any]
18
+ extra_json: str
19
+ default_extra_json: str
20
+
21
+
22
+ UNPARSABLE_LINE = "unparsable_line"
23
+ LogType = dict[str, Any]
24
+
25
+
26
+ class LogKey(str, Enum):
27
+ TIMESTAMP = "timestamp"
28
+ LEVEL = "level"
29
+ MESSAGE = "message"
30
+ NAME = "name"
31
+ PROCESS_NAME = "process_name"
32
+ FILE_NAME = "file_name"
33
+ FUNCTION_NAME = "function_name"
34
+ LINE_NO = "line_no"
35
+ MODULE = "module"
36
+ THREAD_NAME = "thread_name"
37
+ EXCEPTION = "exc_info"
38
+
39
+ @classmethod
40
+ def required_keys(cls) -> frozenset[str]:
41
+ return frozenset(key.value for key in LogKey if key is not LogKey.EXCEPTION)
42
+
43
+
44
+ def get_global_min_log_level(level_per_module: "FilterDict") -> int:
45
+ if "" not in level_per_module:
46
+ raise ValueError("Missing empty string key for default logging level")
47
+
48
+ if not all(isinstance(v, int) for v in level_per_module.values()):
49
+ raise ValueError("All values in the dictionary must be of type 'int'")
50
+
51
+ global_min_log_level = min(v for v in level_per_module.values() if isinstance(v, int))
52
+
53
+ return global_min_log_level
54
+
55
+
56
+ def reset_all_logging_states():
57
+ logging.root.handlers.clear()
58
+ logging.root.setLevel(logging.NOTSET)
59
+ for _logger in logging.Logger.manager.loggerDict.values():
60
+ if isinstance(_logger, logging.Logger):
61
+ _logger.handlers.clear()
62
+ _logger.setLevel(logging.NOTSET)
63
+
64
+
65
+ class InterceptHandler(logging.Handler):
66
+ default_log_record_attr: ClassVar[set[str]] = {
67
+ "args",
68
+ "asctime",
69
+ "created",
70
+ "exc_info",
71
+ "exc_text",
72
+ "filename",
73
+ "funcName",
74
+ "levelname",
75
+ "levelno",
76
+ "lineno",
77
+ "module",
78
+ "msecs",
79
+ "message",
80
+ "msg",
81
+ "name",
82
+ "pathname",
83
+ "process",
84
+ "processName",
85
+ "relativeCreated",
86
+ "stack_info",
87
+ "taskName",
88
+ "thread",
89
+ "threadName",
90
+ }
91
+
92
+ def emit(self, record: logging.LogRecord) -> None:
93
+ # Get corresponding Loguru level if it exists.
94
+ try:
95
+ level: str | int = logger.level(record.levelname).name
96
+ except ValueError:
97
+ level = record.levelno
98
+
99
+ # Find caller from where originated the logged message.
100
+ frame, depth = inspect.currentframe(), 0
101
+ while frame:
102
+ filename = frame.f_code.co_filename
103
+ is_logging = filename == logging.__file__
104
+ is_frozen = "importlib" in filename and "_bootstrap" in filename
105
+ if depth > 0 and not (is_logging or is_frozen):
106
+ break
107
+ frame = frame.f_back
108
+ depth += 1
109
+
110
+ extra = {k: v for k, v in record.__dict__.items() if k not in self.default_log_record_attr}
111
+ logger.opt(depth=depth, exception=record.exc_info).bind(**extra).log(
112
+ level, record.getMessage()
113
+ )
114
+
115
+
116
+ def to_json_serializable(data: Any) -> Any:
117
+ return orjson.dumps(data, default=str).decode()
118
+
119
+
120
+ def flatten_extra(record_extra: dict[str, Any]) -> dict[str, Any]:
121
+ # Maintain consistent extra= API between standard logging and Loguru
122
+ # Flatten Loguru's nested structure to match logging module behavior
123
+ if "extra" in record_extra:
124
+ nested_extra = record_extra.pop("extra")
125
+ record_extra.update(
126
+ {f"extra_{k}" if k in record_extra else k: v for k, v in nested_extra.items()}
127
+ )
128
+
129
+ return record_extra
130
+
131
+
132
+ class PrettyLogFormatter:
133
+ def __init__(self, thread_local_context: dict[str, ContextVar[Any]]):
134
+ self.thread_local_context = thread_local_context
135
+
136
+ def __call__(self, record: "ExtendedRecord"):
137
+ thread_local_extra = {}
138
+ for context_var_name, context_var in self.thread_local_context.items():
139
+ thread_local_extra[context_var_name] = str(context_var.get())
140
+
141
+ record["extra"] = flatten_extra(record["extra"])
142
+ record["extra"] = {**thread_local_extra, **record["extra"]}
143
+ # Ensure all values are JSON-serializable (e.g., PosixPath -> str)
144
+ record["extra_json"] = orjson.dumps(record["extra"], default=str).decode()
145
+
146
+ record["default_extra"] = {
147
+ "process_name": record["process"].name,
148
+ "file_name": record["file"].name,
149
+ "thread_name": record["thread"].name,
150
+ }
151
+ record["default_extra_json"] = orjson.dumps(record["default_extra"], default=str).decode()
152
+
153
+ return (
154
+ "<green>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</green> | "
155
+ "<level>{level: <8}</level> | "
156
+ "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
157
+ "<level>{message}</level> - "
158
+ "<cyan>{extra_json}</cyan> - "
159
+ "<light-black>{default_extra_json}</light-black>\n"
160
+ ) + ("\n{exception}\n\n" if record["exception"] else "")
161
+
162
+
163
+ class JsonSink(StreamSink):
164
+ def __init__(
165
+ self,
166
+ thread_local_context: dict[str, ContextVar[Any]] | None = None,
167
+ stream: TextIO | Any = None,
168
+ ):
169
+ # sys.stderr is mutable object
170
+ if stream is None:
171
+ stream = sys.stderr
172
+ super().__init__(stream)
173
+
174
+ if thread_local_context is None:
175
+ thread_local_context = {}
176
+
177
+ self.thread_local_context = thread_local_context
178
+
179
+ def write(self, message: "Message"):
180
+ record = message.record
181
+ log_entry = self.json_formatter(record=record)
182
+ log_message = orjson.dumps(
183
+ log_entry, default=str, option=orjson.OPT_APPEND_NEWLINE
184
+ ).decode()
185
+ return super().write(log_message)
186
+
187
+ def json_formatter(self, record: "Record") -> LogType:
188
+ log_entry: LogType = {
189
+ LogKey.TIMESTAMP.value: record["time"],
190
+ LogKey.LEVEL.value: record["level"].name,
191
+ LogKey.MESSAGE.value: record["message"],
192
+ LogKey.NAME.value: record["name"],
193
+ LogKey.PROCESS_NAME.value: record["process"].name,
194
+ LogKey.FILE_NAME.value: record["file"].name,
195
+ LogKey.FUNCTION_NAME.value: record["function"],
196
+ LogKey.LINE_NO.value: record["line"],
197
+ LogKey.MODULE.value: record["module"],
198
+ LogKey.THREAD_NAME.value: record["thread"].name,
199
+ }
200
+
201
+ for context_var_name, context_vars in self.thread_local_context.items():
202
+ log_entry[context_var_name] = str(context_vars.get())
203
+
204
+ if record["exception"] is not None:
205
+ log_entry[LogKey.EXCEPTION.value] = "".join(
206
+ ExceptionFormatter().format_exception(
207
+ type_=record["exception"][0],
208
+ value=record["exception"][1],
209
+ tb=record["exception"][2],
210
+ )
211
+ )
212
+
213
+ record["extra"] = flatten_extra(record["extra"])
214
+ log_entry.update(record["extra"])
215
+ return log_entry
@@ -0,0 +1,11 @@
1
+ """Subprocess execution utilities for bake UI.
2
+
3
+ Provides functions for running commands with real-time output streaming
4
+ and capture capabilities.
5
+ """
6
+
7
+ from bake.ui.run.run import OutputSplitter, run
8
+ from bake.ui.run.script import run_script
9
+ from bake.ui.run.uv import run_uv
10
+
11
+ __all__ = ["OutputSplitter", "run", "run_script", "run_uv"]