bakefile 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bake/__init__.py +9 -0
- bake/bakebook/bakebook.py +85 -0
- bake/bakebook/decorator.py +50 -0
- bake/bakebook/get.py +175 -0
- bake/cli/bake/__init__.py +3 -0
- bake/cli/bake/__main__.py +5 -0
- bake/cli/bake/main.py +74 -0
- bake/cli/bake/reinvocation.py +63 -0
- bake/cli/bakefile/__init__.py +3 -0
- bake/cli/bakefile/__main__.py +5 -0
- bake/cli/bakefile/add_inline.py +29 -0
- bake/cli/bakefile/export.py +212 -0
- bake/cli/bakefile/find_python.py +18 -0
- bake/cli/bakefile/init.py +56 -0
- bake/cli/bakefile/lint.py +77 -0
- bake/cli/bakefile/main.py +43 -0
- bake/cli/bakefile/uv.py +146 -0
- bake/cli/common/app.py +54 -0
- bake/cli/common/callback.py +13 -0
- bake/cli/common/context.py +145 -0
- bake/cli/common/exception_handler.py +57 -0
- bake/cli/common/obj.py +216 -0
- bake/cli/common/params.py +72 -0
- bake/cli/utils/__init__.py +0 -0
- bake/cli/utils/version.py +18 -0
- bake/manage/__init__.py +0 -0
- bake/manage/add_inline.py +71 -0
- bake/manage/find_python.py +210 -0
- bake/manage/lint.py +101 -0
- bake/manage/run_uv.py +88 -0
- bake/manage/write_bakefile.py +20 -0
- bake/py.typed +0 -0
- bake/samples/__init__.py +0 -0
- bake/samples/simple.py +8 -0
- bake/ui/__init__.py +11 -0
- bake/ui/console.py +58 -0
- bake/ui/logger/__init__.py +33 -0
- bake/ui/logger/capsys.py +158 -0
- bake/ui/logger/setup.py +53 -0
- bake/ui/logger/utils.py +215 -0
- bake/ui/params.py +5 -0
- bake/ui/run/__init__.py +5 -0
- bake/ui/run/run.py +546 -0
- bake/ui/run/script.py +74 -0
- bake/ui/run/splitter.py +249 -0
- bake/ui/run/uv.py +83 -0
- bake/ui/style.py +2 -0
- bake/utils/__init__.py +11 -0
- bake/utils/constants.py +21 -0
- {bakefile → bake/utils}/env.py +3 -1
- bake/utils/exceptions.py +17 -0
- {bakefile-0.0.4.dist-info → bakefile-0.0.6.dist-info}/METADATA +15 -2
- bakefile-0.0.6.dist-info/RECORD +63 -0
- {bakefile-0.0.4.dist-info → bakefile-0.0.6.dist-info}/WHEEL +2 -2
- bakefile-0.0.6.dist-info/entry_points.txt +5 -0
- bakelib/__init__.py +4 -0
- bakelib/space/__init__.py +0 -0
- bakelib/space/base.py +193 -0
- bakelib/space/python.py +80 -0
- bakelib/space/utils.py +118 -0
- bakefile/__init__.py +0 -13
- bakefile/cli/bake/__init__.py +0 -3
- bakefile/cli/bake/main.py +0 -127
- bakefile/cli/bake/resolve_bakebook.py +0 -103
- bakefile/cli/bake/utils.py +0 -25
- bakefile/cli/bakefile.py +0 -19
- bakefile/cli/utils/version.py +0 -9
- bakefile/exceptions.py +0 -9
- bakefile-0.0.4.dist-info/RECORD +0 -16
- bakefile-0.0.4.dist-info/entry_points.txt +0 -4
- {bakefile/cli/utils → bake/bakebook}/__init__.py +0 -0
- {bakefile → bake}/cli/__init__.py +0 -0
- /bakefile/py.typed → /bake/cli/common/__init__.py +0 -0
bake/ui/logger/setup.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
from contextvars import ContextVar
|
|
4
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
5
|
+
|
|
6
|
+
from loguru import logger
|
|
7
|
+
|
|
8
|
+
from .utils import (
|
|
9
|
+
InterceptHandler,
|
|
10
|
+
JsonSink,
|
|
11
|
+
PrettyLogFormatter,
|
|
12
|
+
get_global_min_log_level,
|
|
13
|
+
reset_all_logging_states,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from loguru import FilterDict, FormatFunction
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def setup_logging(
|
|
21
|
+
level_per_module: "FilterDict | None" = None,
|
|
22
|
+
thread_local_context: dict[str, ContextVar[Any]] | None = None,
|
|
23
|
+
is_pretty_log: bool = False,
|
|
24
|
+
) -> None:
|
|
25
|
+
if level_per_module is None:
|
|
26
|
+
level_per_module = {"": logging.WARNING}
|
|
27
|
+
|
|
28
|
+
if thread_local_context is None:
|
|
29
|
+
thread_local_context = {}
|
|
30
|
+
|
|
31
|
+
global_min_log_level = get_global_min_log_level(level_per_module)
|
|
32
|
+
|
|
33
|
+
reset_all_logging_states()
|
|
34
|
+
logger.remove()
|
|
35
|
+
logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True)
|
|
36
|
+
|
|
37
|
+
if is_pretty_log:
|
|
38
|
+
sink = sys.stderr
|
|
39
|
+
formatter: FormatFunction | str = cast(
|
|
40
|
+
"FormatFunction",
|
|
41
|
+
PrettyLogFormatter(thread_local_context=thread_local_context),
|
|
42
|
+
)
|
|
43
|
+
else:
|
|
44
|
+
sink = JsonSink(thread_local_context=thread_local_context)
|
|
45
|
+
formatter: FormatFunction | str = ""
|
|
46
|
+
|
|
47
|
+
logger.add(
|
|
48
|
+
sink=sink,
|
|
49
|
+
format=formatter,
|
|
50
|
+
level=global_min_log_level,
|
|
51
|
+
filter=level_per_module,
|
|
52
|
+
backtrace=False,
|
|
53
|
+
)
|
bake/ui/logger/utils.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
from contextvars import ContextVar
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import TYPE_CHECKING, Any, ClassVar, TextIO
|
|
7
|
+
|
|
8
|
+
import orjson
|
|
9
|
+
from loguru import logger
|
|
10
|
+
from loguru._better_exceptions import ExceptionFormatter
|
|
11
|
+
from loguru._simple_sinks import StreamSink
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from loguru import FilterDict, Message, Record
|
|
15
|
+
|
|
16
|
+
class ExtendedRecord(Record):
|
|
17
|
+
default_extra: dict[str, Any]
|
|
18
|
+
extra_json: str
|
|
19
|
+
default_extra_json: str
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
UNPARSABLE_LINE = "unparsable_line"
|
|
23
|
+
LogType = dict[str, Any]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class LogKey(str, Enum):
|
|
27
|
+
TIMESTAMP = "timestamp"
|
|
28
|
+
LEVEL = "level"
|
|
29
|
+
MESSAGE = "message"
|
|
30
|
+
NAME = "name"
|
|
31
|
+
PROCESS_NAME = "process_name"
|
|
32
|
+
FILE_NAME = "file_name"
|
|
33
|
+
FUNCTION_NAME = "function_name"
|
|
34
|
+
LINE_NO = "line_no"
|
|
35
|
+
MODULE = "module"
|
|
36
|
+
THREAD_NAME = "thread_name"
|
|
37
|
+
EXCEPTION = "exc_info"
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def required_keys(cls) -> frozenset[str]:
|
|
41
|
+
return frozenset(key.value for key in LogKey if key is not LogKey.EXCEPTION)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_global_min_log_level(level_per_module: "FilterDict") -> int:
|
|
45
|
+
if "" not in level_per_module:
|
|
46
|
+
raise ValueError("Missing empty string key for default logging level")
|
|
47
|
+
|
|
48
|
+
if not all(isinstance(v, int) for v in level_per_module.values()):
|
|
49
|
+
raise ValueError("All values in the dictionary must be of type 'int'")
|
|
50
|
+
|
|
51
|
+
global_min_log_level = min(v for v in level_per_module.values() if isinstance(v, int))
|
|
52
|
+
|
|
53
|
+
return global_min_log_level
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def reset_all_logging_states():
|
|
57
|
+
logging.root.handlers.clear()
|
|
58
|
+
logging.root.setLevel(logging.NOTSET)
|
|
59
|
+
for _logger in logging.Logger.manager.loggerDict.values():
|
|
60
|
+
if isinstance(_logger, logging.Logger):
|
|
61
|
+
_logger.handlers.clear()
|
|
62
|
+
_logger.setLevel(logging.NOTSET)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class InterceptHandler(logging.Handler):
|
|
66
|
+
default_log_record_attr: ClassVar[set[str]] = {
|
|
67
|
+
"args",
|
|
68
|
+
"asctime",
|
|
69
|
+
"created",
|
|
70
|
+
"exc_info",
|
|
71
|
+
"exc_text",
|
|
72
|
+
"filename",
|
|
73
|
+
"funcName",
|
|
74
|
+
"levelname",
|
|
75
|
+
"levelno",
|
|
76
|
+
"lineno",
|
|
77
|
+
"module",
|
|
78
|
+
"msecs",
|
|
79
|
+
"message",
|
|
80
|
+
"msg",
|
|
81
|
+
"name",
|
|
82
|
+
"pathname",
|
|
83
|
+
"process",
|
|
84
|
+
"processName",
|
|
85
|
+
"relativeCreated",
|
|
86
|
+
"stack_info",
|
|
87
|
+
"taskName",
|
|
88
|
+
"thread",
|
|
89
|
+
"threadName",
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
93
|
+
# Get corresponding Loguru level if it exists.
|
|
94
|
+
try:
|
|
95
|
+
level: str | int = logger.level(record.levelname).name
|
|
96
|
+
except ValueError:
|
|
97
|
+
level = record.levelno
|
|
98
|
+
|
|
99
|
+
# Find caller from where originated the logged message.
|
|
100
|
+
frame, depth = inspect.currentframe(), 0
|
|
101
|
+
while frame:
|
|
102
|
+
filename = frame.f_code.co_filename
|
|
103
|
+
is_logging = filename == logging.__file__
|
|
104
|
+
is_frozen = "importlib" in filename and "_bootstrap" in filename
|
|
105
|
+
if depth > 0 and not (is_logging or is_frozen):
|
|
106
|
+
break
|
|
107
|
+
frame = frame.f_back
|
|
108
|
+
depth += 1
|
|
109
|
+
|
|
110
|
+
extra = {k: v for k, v in record.__dict__.items() if k not in self.default_log_record_attr}
|
|
111
|
+
logger.opt(depth=depth, exception=record.exc_info).bind(**extra).log(
|
|
112
|
+
level, record.getMessage()
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def to_json_serializable(data: Any) -> Any:
|
|
117
|
+
return orjson.dumps(data, default=str).decode()
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def flatten_extra(record_extra: dict[str, Any]) -> dict[str, Any]:
|
|
121
|
+
# Maintain consistent extra= API between standard logging and Loguru
|
|
122
|
+
# Flatten Loguru's nested structure to match logging module behavior
|
|
123
|
+
if "extra" in record_extra:
|
|
124
|
+
nested_extra = record_extra.pop("extra")
|
|
125
|
+
record_extra.update(
|
|
126
|
+
{f"extra_{k}" if k in record_extra else k: v for k, v in nested_extra.items()}
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
return record_extra
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class PrettyLogFormatter:
|
|
133
|
+
def __init__(self, thread_local_context: dict[str, ContextVar[Any]]):
|
|
134
|
+
self.thread_local_context = thread_local_context
|
|
135
|
+
|
|
136
|
+
def __call__(self, record: "ExtendedRecord"):
|
|
137
|
+
thread_local_extra = {}
|
|
138
|
+
for context_var_name, context_var in self.thread_local_context.items():
|
|
139
|
+
thread_local_extra[context_var_name] = str(context_var.get())
|
|
140
|
+
|
|
141
|
+
record["extra"] = flatten_extra(record["extra"])
|
|
142
|
+
record["extra"] = {**thread_local_extra, **record["extra"]}
|
|
143
|
+
# Ensure all values are JSON-serializable (e.g., PosixPath -> str)
|
|
144
|
+
record["extra_json"] = orjson.dumps(record["extra"], default=str).decode()
|
|
145
|
+
|
|
146
|
+
record["default_extra"] = {
|
|
147
|
+
"process_name": record["process"].name,
|
|
148
|
+
"file_name": record["file"].name,
|
|
149
|
+
"thread_name": record["thread"].name,
|
|
150
|
+
}
|
|
151
|
+
record["default_extra_json"] = orjson.dumps(record["default_extra"], default=str).decode()
|
|
152
|
+
|
|
153
|
+
return (
|
|
154
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS Z}</green> | "
|
|
155
|
+
"<level>{level: <8}</level> | "
|
|
156
|
+
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
|
|
157
|
+
"<level>{message}</level> - "
|
|
158
|
+
"<cyan>{extra_json}</cyan> - "
|
|
159
|
+
"<light-black>{default_extra_json}</light-black>\n"
|
|
160
|
+
) + ("\n{exception}\n\n" if record["exception"] else "")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class JsonSink(StreamSink):
|
|
164
|
+
def __init__(
|
|
165
|
+
self,
|
|
166
|
+
thread_local_context: dict[str, ContextVar[Any]] | None = None,
|
|
167
|
+
stream: TextIO | Any = None,
|
|
168
|
+
):
|
|
169
|
+
# sys.stderr is mutable object
|
|
170
|
+
if stream is None:
|
|
171
|
+
stream = sys.stderr
|
|
172
|
+
super().__init__(stream)
|
|
173
|
+
|
|
174
|
+
if thread_local_context is None:
|
|
175
|
+
thread_local_context = {}
|
|
176
|
+
|
|
177
|
+
self.thread_local_context = thread_local_context
|
|
178
|
+
|
|
179
|
+
def write(self, message: "Message"):
|
|
180
|
+
record = message.record
|
|
181
|
+
log_entry = self.json_formatter(record=record)
|
|
182
|
+
log_message = orjson.dumps(
|
|
183
|
+
log_entry, default=str, option=orjson.OPT_APPEND_NEWLINE
|
|
184
|
+
).decode()
|
|
185
|
+
return super().write(log_message)
|
|
186
|
+
|
|
187
|
+
def json_formatter(self, record: "Record") -> LogType:
|
|
188
|
+
log_entry: LogType = {
|
|
189
|
+
LogKey.TIMESTAMP.value: record["time"],
|
|
190
|
+
LogKey.LEVEL.value: record["level"].name,
|
|
191
|
+
LogKey.MESSAGE.value: record["message"],
|
|
192
|
+
LogKey.NAME.value: record["name"],
|
|
193
|
+
LogKey.PROCESS_NAME.value: record["process"].name,
|
|
194
|
+
LogKey.FILE_NAME.value: record["file"].name,
|
|
195
|
+
LogKey.FUNCTION_NAME.value: record["function"],
|
|
196
|
+
LogKey.LINE_NO.value: record["line"],
|
|
197
|
+
LogKey.MODULE.value: record["module"],
|
|
198
|
+
LogKey.THREAD_NAME.value: record["thread"].name,
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
for context_var_name, context_vars in self.thread_local_context.items():
|
|
202
|
+
log_entry[context_var_name] = str(context_vars.get())
|
|
203
|
+
|
|
204
|
+
if record["exception"] is not None:
|
|
205
|
+
log_entry[LogKey.EXCEPTION.value] = "".join(
|
|
206
|
+
ExceptionFormatter().format_exception(
|
|
207
|
+
type_=record["exception"][0],
|
|
208
|
+
value=record["exception"][1],
|
|
209
|
+
tb=record["exception"][2],
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
record["extra"] = flatten_extra(record["extra"])
|
|
214
|
+
log_entry.update(record["extra"])
|
|
215
|
+
return log_entry
|
bake/ui/params.py
ADDED