pybasemkit 0.2.0__tar.gz → 0.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/.github/workflows/upload-to-pypi.yml +2 -1
  2. pybasemkit-0.2.2/AGENTS.md +284 -0
  3. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/PKG-INFO +1 -1
  4. pybasemkit-0.2.2/basemkit/__init__.py +1 -0
  5. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/remotedebug.py +14 -9
  6. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/shell.py +72 -9
  7. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/yamlable.py +60 -8
  8. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_remotedebug.py +5 -10
  9. pybasemkit-0.2.2/tests/test_shell.py +78 -0
  10. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_yamlable.py +66 -0
  11. pybasemkit-0.2.2/yamable.md +324 -0
  12. pybasemkit-0.2.0/basemkit/__init__.py +0 -1
  13. pybasemkit-0.2.0/tests/test_shell.py +0 -36
  14. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/.github/workflows/build.yml +0 -0
  15. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/.gitignore +0 -0
  16. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/.project +0 -0
  17. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/.pydevproject +0 -0
  18. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/LICENSE +0 -0
  19. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/README.md +0 -0
  20. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/argparse_action.py +0 -0
  21. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/base_cmd.py +0 -0
  22. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/basetest.py +0 -0
  23. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/docker_util.py +0 -0
  24. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/persistent_log.py +0 -0
  25. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/basemkit/profiler.py +0 -0
  26. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/mkdocs.yml +0 -0
  27. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/pyproject.toml +0 -0
  28. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/scripts/blackisort +0 -0
  29. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/scripts/doc +0 -0
  30. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/scripts/install +0 -0
  31. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/scripts/release +0 -0
  32. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/scripts/test +0 -0
  33. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/__init__.py +0 -0
  34. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_argparse_action.py +0 -0
  35. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_avro.py +0 -0
  36. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_base_cmd.py +0 -0
  37. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_docker_util.py +0 -0
  38. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_persistent_log.py +0 -0
  39. {pybasemkit-0.2.0 → pybasemkit-0.2.2}/tests/test_timeout_decorator.py +0 -0
@@ -19,7 +19,8 @@ jobs:
19
19
  - name: Install dependencies
20
20
  run: |
21
21
  python -m pip install --upgrade pip
22
- pip install hatch
22
+ # work around https://github.com/pypa/hatch/issues/2198
23
+ pip install 'virtualenv<20.26.0' hatch
23
24
  - name: Build and publish
24
25
  run: |
25
26
  hatch build
@@ -0,0 +1,284 @@
1
+ # AGENTS.md — pybasemkit
2
+
3
+ Coding agent instructions for the `pybasemkit` repository.
4
+ Package installs as `basemkit`; PyPI name is `pybasemkit`.
5
+
6
+ ---
7
+
8
+ ## Project Overview
9
+
10
+ - **Python:** `>=3.10` (CI runs 3.12; classifiers cover 3.10–3.13)
11
+ - **Build backend:** `hatchling` (configured entirely in `pyproject.toml`)
12
+ - **Runtime deps:** `dacite`, `dataclasses-json`, `PyYAML`, `shutup`
13
+ - **Test deps (optional):** `pytest`, `green`, `tox`
14
+ - **Dev deps (optional):** `black`, `isort`
15
+
16
+ ---
17
+
18
+ ## Build & Install
19
+
20
+ ```bash
21
+ # Install from source (editable not required; plain install is the norm)
22
+ pip install .
23
+
24
+ # Install with test extras
25
+ pip install ".[test]"
26
+
27
+ # Install with dev extras
28
+ pip install ".[dev]"
29
+
30
+ # Build wheel + sdist
31
+ hatch build
32
+ ```
33
+
34
+ ---
35
+
36
+ ## Formatting
37
+
38
+ Two tools are used together. Always run both before committing:
39
+
40
+ ```bash
41
+ # Format all source and test files (isort then black)
42
+ scripts/blackisort
43
+ ```
44
+
45
+ Under the hood this runs:
46
+ ```bash
47
+ isort basemkit/*.py
48
+ black basemkit/*.py
49
+ isort tests/*.py
50
+ black tests/*.py
51
+ ```
52
+
53
+ **Line length is 120 characters** (configured in `[tool.black]` in `pyproject.toml`).
54
+
55
+ No other linters (flake8, ruff, pylint, mypy) are configured.
56
+
57
+ ---
58
+
59
+ ## Running Tests
60
+
61
+ The default runner is `unittest discover`. All test classes inherit from
62
+ `basemkit.basetest.Basetest` (which subclasses `unittest.TestCase`).
63
+ `pytest` also works because it collects unittest-style classes.
64
+
65
+ ```bash
66
+ # Run the full test suite (default)
67
+ python3 -m unittest discover
68
+
69
+ # Using the project script (same as above)
70
+ scripts/test
71
+
72
+ # Using green (colorful output, serial)
73
+ scripts/test -g
74
+ # or:
75
+ green tests/ -s 1
76
+
77
+ # Using tox
78
+ scripts/test -t
79
+ # or:
80
+ tox -e py
81
+
82
+ # Using pytest
83
+ python -m pytest tests/
84
+ ```
85
+
86
+ ### Running a Single Test
87
+
88
+ ```bash
89
+ # Single test module
90
+ python -m unittest tests.test_yamlable
91
+
92
+ # Single test class
93
+ python -m unittest tests.test_yamlable.TestYamlAble
94
+
95
+ # Single test method ← most common for targeted debugging
96
+ python -m unittest tests.test_yamlable.TestYamlAble.test_to_yaml
97
+
98
+ # With pytest
99
+ python -m pytest tests/test_yamlable.py
100
+ python -m pytest tests/test_yamlable.py::TestYamlAble::test_to_yaml
101
+
102
+ # With green
103
+ green tests/test_yamlable.py -s 1
104
+ ```
105
+
106
+ ---
107
+
108
+ ## Code Style
109
+
110
+ ### Imports
111
+
112
+ Follow PEP 8 import order, sorted by `isort` (default profile):
113
+
114
+ 1. Standard library
115
+ 2. Third-party packages
116
+ 3. Local `basemkit.*` imports
117
+
118
+ ```python
119
+ import sys
120
+ import traceback
121
+ from argparse import ArgumentParser, Namespace
122
+ from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar
123
+
124
+ import yaml
125
+ from dacite import from_dict
126
+
127
+ from basemkit.yamlable import YamlAble
128
+ ```
129
+
130
+ Do **not** use `from __future__ import annotations`. Use `from typing import ...` imports for all type hints.
131
+
132
+ ### Type Annotations
133
+
134
+ Every function parameter and return type must be annotated.
135
+
136
+ ```python
137
+ def load_from_yaml_file(cls: Type[T], filename: str) -> T: ...
138
+ def run(self, cmd: str, text: bool = True, debug: bool = False) -> subprocess.CompletedProcess: ...
139
+ def get_level_summary(self, level: str, limit: int = 7) -> Tuple[int, str]: ...
140
+ ```
141
+
142
+ - Use `Optional[X]` (not `X | None`) — keeps 3.10 compatibility explicit
143
+ - Use `Union[str, Path]` (not `str | Path`)
144
+ - Use `TypeVar` for generic patterns: `T = TypeVar("T")`
145
+
146
+ ### Naming Conventions
147
+
148
+ | Category | Convention | Examples |
149
+ |---|---|---|
150
+ | Classes | `PascalCase` | `YamlAble`, `BaseCmd`, `Basetest`, `ShellResult` |
151
+ | Functions / methods | `snake_case` | `to_yaml`, `load_from_yaml_file`, `handle_exception` |
152
+ | Instance / local variables | `snake_case` | `self.shell_path`, `self.do_log`, `result` |
153
+ | Module-level constants | `UPPER_SNAKE_CASE` | `BLUE`, `RED`, `GREEN`, `END_COLOR` |
154
+ | Private / internal | `_leading_underscore` | `_yaml_setup`, `_yaml_dumper`, `_run` |
155
+ | TypeVars | Single capital | `T = TypeVar("T")` |
156
+
157
+ Avoid introducing new `camelCase` method names; those that exist are legacy compatibility shims.
158
+
159
+ ### Docstrings
160
+
161
+ Use **Google-style** docstrings (configured in `mkdocs.yml` as `docstring_style: google`).
162
+
163
+ ```python
164
+ def to_yaml(
165
+ self,
166
+ ignore_none: bool = True,
167
+ sort_keys: bool = False,
168
+ ) -> str:
169
+ """
170
+ Convert this dataclass object to a YAML string.
171
+
172
+ Args:
173
+ ignore_none: Omit attributes whose value is None.
174
+ sort_keys: Sort dictionary keys in the output.
175
+
176
+ Returns:
177
+ YAML string representation of the dataclass.
178
+
179
+ Raises:
180
+ ValueError: If the object is not a dataclass instance.
181
+ """
182
+ ```
183
+
184
+ - One-liner docstrings are acceptable for trivial methods.
185
+ - Module-level docstrings use the format:
186
+
187
+ ```python
188
+ """
189
+ Created on YYYY-MM-DD
190
+
191
+ @author: wf
192
+ """
193
+ ```
194
+
195
+ ### Error Handling
196
+
197
+ ```python
198
+ # Preferred: catch specific exceptions; use debug flag for traceback
199
+ try:
200
+ result = self.shell.run(command, debug=self.debug)
201
+ except Exception as ex:
202
+ self.handle_exception(f"command '{command}'", ex)
203
+
204
+ # Optional imports that may not be installed: guard with try/except ImportError
205
+ try:
206
+ import pydevd
207
+ except ImportError:
208
+ print("Error: 'pydevd' is required for remote debugging.", file=sys.stderr)
209
+ return
210
+
211
+ # Silently ignore cleanup errors
212
+ try:
213
+ os.unlink(tmp_path)
214
+ except Exception:
215
+ pass
216
+
217
+ # Top-level entry points: catch BaseException and map to exit codes
218
+ try:
219
+ args = self.parse_args(argv)
220
+ self.handle_args(args)
221
+ except BaseException as e:
222
+ exit_code = self.handle_exception(e)
223
+ ```
224
+
225
+ ### Module Organization
226
+
227
+ 1. Module docstring (creation date + author)
228
+ 2. Imports (stdlib → third-party → local)
229
+ 3. Module-level constants
230
+ 4. Class definitions (one primary class per module)
231
+ 5. No `if __name__ == "__main__"` guard except in `basetest.py`
232
+
233
+ ---
234
+
235
+ ## Writing Tests
236
+
237
+ All test classes must subclass `Basetest`, not `unittest.TestCase` directly.
238
+
239
+ ```python
240
+ from basemkit.basetest import Basetest
241
+
242
+ class TestMyFeature(Basetest):
243
+ """Tests for my_feature."""
244
+
245
+ def setUp(self, debug=False, profile=True):
246
+ Basetest.setUp(self, debug=debug, profile=profile)
247
+ # additional setup here
248
+
249
+ def test_something(self):
250
+ # use self.debug for conditional diagnostic prints
251
+ result = my_function()
252
+ self.assertEqual(result, expected)
253
+ ```
254
+
255
+ Key `Basetest` utilities:
256
+
257
+ ```python
258
+ Basetest.inPublicCI() # True when running under GitHub Actions / Travis / Jenkins
259
+ Basetest.isUser("wf") # True if the current user matches
260
+ ```
261
+
262
+ `Basetest.setUp` creates a `Profiler` that automatically times each test and prints
263
+ elapsed time in `tearDown`.
264
+
265
+ ---
266
+
267
+ ## CI / CD
268
+
269
+ - **CI:** `.github/workflows/build.yml` — runs `scripts/install && scripts/test` on every push/PR to `main` (Python 3.12, ubuntu-latest). Env var `GHACTIONS=ACTIVE` is set.
270
+ - **CD:** `.github/workflows/upload-to-pypi.yml` — runs `hatch build` and publishes to PyPI via OIDC trusted publishing on GitHub release creation.
271
+ - No coverage reporting, no linting step, and no multi-version matrix in CI currently.
272
+
273
+ ---
274
+
275
+ ## Repository Layout
276
+
277
+ ```
278
+ basemkit/ # Main package (import as "basemkit")
279
+ tests/ # unittest-style tests; filenames match test_<module>.py
280
+ scripts/ # Shell scripts: blackisort, test, install, doc, release
281
+ docs/ # MkDocs source (material theme + mkdocstrings)
282
+ pyproject.toml # All project metadata, build config, and tool settings
283
+ mkdocs.yml # Documentation site config
284
+ ```
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pybasemkit
3
- Version: 0.2.0
3
+ Version: 0.2.2
4
4
  Summary: Python base module kit: YAML/JSON I/O, structured logging, CLI tooling, shell execution, and pydevd remote debug support.
5
5
  Project-URL: Home, https://github.com/WolfgangFahl/pybasemkit
6
6
  Project-URL: Documentation, https://wiki.bitplan.com/index.php/pybasemkit
@@ -0,0 +1 @@
1
+ __version__ = "0.2.2"
@@ -19,6 +19,7 @@ https://github.com/fabioz/PyDev.Debugger/blob/main/pydevd_file_utils.py
19
19
 
20
20
  @author: wf
21
21
  """
22
+
22
23
  import os
23
24
  import socket
24
25
  import sys
@@ -26,6 +27,7 @@ from argparse import Namespace
26
27
  from dataclasses import dataclass, field
27
28
  from typing import List, Tuple
28
29
 
30
+
29
31
  @dataclass
30
32
  class PathMapping:
31
33
  """
@@ -58,18 +60,21 @@ class PathMapping:
58
60
  the source code sits on your physical laptop.
59
61
 
60
62
  """
61
- remote: str # eclipse / vscode/ pycharm
63
+
64
+ remote: str # eclipse / vscode/ pycharm
62
65
  local: str # python execution environment path to be remotely debugged
63
66
 
67
+
64
68
  @dataclass
65
69
  class PathMappings:
66
70
  """
67
71
  Represents a collection of path mappings.
68
72
  """
73
+
69
74
  mappings: List[PathMapping] = field(default_factory=list)
70
75
 
71
76
  @classmethod
72
- def from_args(cls, remote_str: str, local_str: str) -> 'PathMappings':
77
+ def from_args(cls, remote_str: str, local_str: str) -> "PathMappings":
73
78
  """
74
79
  Parses comma-separated remote
75
80
  and local path strings into a PathMappings object.
@@ -81,16 +86,17 @@ class PathMappings:
81
86
  raise ValueError("debugRemotePath and debugLocalPath must have the same number of entries")
82
87
 
83
88
  mapping_list = [PathMapping(remote=r, local=l) for r, l in zip(remote_paths, local_paths)]
84
- path_mappings= cls(mappings=mapping_list)
89
+ path_mappings = cls(mappings=mapping_list)
85
90
  return path_mappings
86
91
 
87
92
  def as_tuple_list(self) -> List[Tuple[str, str]]:
88
93
  """
89
94
  Returns the mappings in the format required by pydevd (list of tuples).
90
95
  """
91
- tuple_list= [(m.remote, m.local) for m in self.mappings]
96
+ tuple_list = [(m.remote, m.local) for m in self.mappings]
92
97
  return tuple_list
93
98
 
99
+
94
100
  class RemoteDebugSetup:
95
101
  """
96
102
  Handles initialization and connection for pydevd remote debugging.
@@ -104,7 +110,7 @@ class RemoteDebugSetup:
104
110
  args (Namespace): Parsed CLI arguments containing debug flags.
105
111
  """
106
112
  self.args = args
107
- self.path_mappings=None
113
+ self.path_mappings = None
108
114
 
109
115
  def get_path_mappings(self):
110
116
  """
@@ -120,7 +126,6 @@ class RemoteDebugSetup:
120
126
  if remote_path and local_path:
121
127
  self.path_mappings = PathMappings.from_args(remote_path, local_path)
122
128
 
123
-
124
129
  def log(self, msg: str):
125
130
  """
126
131
  Print debug message to stderr if debug mode is enabled.
@@ -180,7 +185,7 @@ class RemoteDebugSetup:
180
185
  # Check if this is the bad call (single tuple with comma-separated strings)
181
186
  if len(paths) == 1 and isinstance(paths[0], tuple):
182
187
  remote, local = paths[0]
183
- if ',' in remote and ',' in local:
188
+ if "," in remote and "," in local:
184
189
  self.log("IGNORING bad setup_client_server_paths call with comma-separated strings")
185
190
  self.log(f"remote='{remote}'")
186
191
  self.log(f"local='{local}'")
@@ -197,7 +202,7 @@ class RemoteDebugSetup:
197
202
  pydevd_file_utils.setup_client_server_paths = fixed_setup
198
203
 
199
204
  # https://github.com/fabioz/PyDev.Debugger/blob/main/pydevd_file_utils.py
200
- tuple_list=self.path_mappings.as_tuple_list()
205
+ tuple_list = self.path_mappings.as_tuple_list()
201
206
  pydevd_file_utils.setup_client_server_paths(tuple_list)
202
207
 
203
208
  def print_debug_info(self):
@@ -214,4 +219,4 @@ class RemoteDebugSetup:
214
219
  if self.path_mappings:
215
220
  for m in self.path_mappings.mappings:
216
221
  marker = "✅" if os.path.exists(m.local) else "❌"
217
- self.log(f"PATH MAP: Remote (IDE)='{m.remote}' <-> Local='{m.local}' {marker}")
222
+ self.log(f"PATH MAP: Remote (IDE)='{m.remote}' <-> Local='{m.local}' {marker}")
@@ -11,7 +11,7 @@ import sys
11
11
  import threading
12
12
  from argparse import Namespace
13
13
  from pathlib import Path
14
- from typing import Dict, List
14
+ from typing import Callable, Dict, List, Optional
15
15
 
16
16
 
17
17
  class ShellResult:
@@ -38,13 +38,22 @@ class ShellResult:
38
38
  class StreamTee:
39
39
  """
40
40
  Tees a single input stream to both a mirror and a capture buffer.
41
+ An optional per-line callback is invoked for each line after buffering.
41
42
  """
42
43
 
43
- def __init__(self, source, mirror, buffer, tee=True):
44
+ def __init__(
45
+ self,
46
+ source,
47
+ mirror,
48
+ buffer,
49
+ tee=True,
50
+ callback: Optional[Callable[[str], None]] = None,
51
+ ):
44
52
  self.source = source
45
53
  self.mirror = mirror
46
54
  self.buffer = buffer
47
55
  self.tee = tee
56
+ self.callback = callback
48
57
  self.thread = threading.Thread(target=self._run, daemon=True)
49
58
 
50
59
  def _run(self):
@@ -53,6 +62,8 @@ class StreamTee:
53
62
  self.mirror.write(line)
54
63
  self.mirror.flush()
55
64
  self.buffer.write(line)
65
+ if self.callback:
66
+ self.callback(line)
56
67
  self.source.close()
57
68
 
58
69
  def start(self):
@@ -92,13 +103,32 @@ class StdTee:
92
103
  """
93
104
  Manages teeing for both stdout and stderr using StreamTee instances.
94
105
  Captures output in instance variables.
106
+ Optional per-line callbacks are invoked for each stdout/stderr line.
95
107
  """
96
108
 
97
- def __init__(self, process, tee=True):
109
+ def __init__(
110
+ self,
111
+ process,
112
+ tee=True,
113
+ stdout_callback: Optional[Callable[[str], None]] = None,
114
+ stderr_callback: Optional[Callable[[str], None]] = None,
115
+ ):
98
116
  self.stdout_buffer = io.StringIO()
99
117
  self.stderr_buffer = io.StringIO()
100
- self.out_tee = StreamTee(process.stdout, sys.stdout, self.stdout_buffer, tee)
101
- self.err_tee = StreamTee(process.stderr, sys.stderr, self.stderr_buffer, tee)
118
+ self.out_tee = StreamTee(
119
+ process.stdout,
120
+ sys.stdout,
121
+ self.stdout_buffer,
122
+ tee,
123
+ callback=stdout_callback,
124
+ )
125
+ self.err_tee = StreamTee(
126
+ process.stderr,
127
+ sys.stderr,
128
+ self.stderr_buffer,
129
+ tee,
130
+ callback=stderr_callback,
131
+ )
102
132
 
103
133
  def start(self):
104
134
  self.out_tee.start()
@@ -109,12 +139,23 @@ class StdTee:
109
139
  self.err_tee.join()
110
140
 
111
141
  @classmethod
112
- def run(cls, process, tee=True):
142
+ def run(
143
+ cls,
144
+ process,
145
+ tee=True,
146
+ stdout_callback: Optional[Callable[[str], None]] = None,
147
+ stderr_callback: Optional[Callable[[str], None]] = None,
148
+ ):
113
149
  """
114
150
  Run teeing and capture for the given process.
115
151
  Returns a StdTee instance with stdout/stderr captured.
116
152
  """
117
- std_tee = cls(process, tee=tee)
153
+ std_tee = cls(
154
+ process,
155
+ tee=tee,
156
+ stdout_callback=stdout_callback,
157
+ stderr_callback=stderr_callback,
158
+ )
118
159
  std_tee.start()
119
160
  std_tee.join()
120
161
  return std_tee
@@ -181,15 +222,30 @@ class Shell:
181
222
  shell = cls(profile=profile)
182
223
  return shell
183
224
 
184
- def run(self, cmd: str, text: bool = True, debug: bool = False, tee: bool = False) -> subprocess.CompletedProcess:
225
+ def run(
226
+ self,
227
+ cmd: str,
228
+ text: bool = True,
229
+ encoding: str = "utf-8",
230
+ errors: str = "replace",
231
+ debug: bool = False,
232
+ tee: bool = False,
233
+ stdout_callback: Optional[Callable[[str], None]] = None,
234
+ stderr_callback: Optional[Callable[[str], None]] = None,
235
+ ) -> subprocess.CompletedProcess:
185
236
  """
186
237
  Run command with profile, always capturing output and optionally teeing it.
187
238
 
188
239
  Args:
189
240
  cmd: Command to run
190
241
  text: Text mode for subprocess I/O
242
+ encoding: Character encoding for subprocess I/O (default: utf-8)
243
+ errors: Error handling for decoding (default: replace — bad bytes become U+FFFD
244
+ instead of raising UnicodeDecodeError; pass 'strict' to restore old behaviour)
191
245
  debug: Print the command to be run
192
246
  tee: If True, also print output live while capturing
247
+ stdout_callback: Optional callable invoked with each stdout line as it is produced
248
+ stderr_callback: Optional callable invoked with each stderr line as it is produced
193
249
 
194
250
  Returns:
195
251
  subprocess.CompletedProcess
@@ -204,9 +260,16 @@ class Shell:
204
260
  stdout=subprocess.PIPE,
205
261
  stderr=subprocess.PIPE,
206
262
  text=text,
263
+ encoding=encoding,
264
+ errors=errors,
207
265
  )
208
266
 
209
- std_tee = StdTee.run(popen_process, tee=tee)
267
+ std_tee = StdTee.run(
268
+ popen_process,
269
+ tee=tee,
270
+ stdout_callback=stdout_callback,
271
+ stderr_callback=stderr_callback,
272
+ )
210
273
  returncode = popen_process.wait()
211
274
 
212
275
  process = subprocess.CompletedProcess(
@@ -40,7 +40,7 @@ from collections.abc import Iterable, Mapping
40
40
  from dataclasses import asdict, dataclass, is_dataclass
41
41
  from datetime import date, datetime
42
42
  from pathlib import Path
43
- from typing import Any, Generic, TextIO, Type, TypeVar, Union
43
+ from typing import Any, Generic, Optional, TextIO, Tuple, Type, TypeVar, Union
44
44
 
45
45
  import yaml
46
46
  from dacite import from_dict
@@ -107,6 +107,40 @@ class YamlAble(Generic[T]):
107
107
  self._yaml_dumper.add_representer(type(None), self.represent_none)
108
108
  self._yaml_dumper.add_representer(str, self.represent_literal)
109
109
 
110
+ @staticmethod
111
+ def _split_yaml_header(text: str) -> Tuple[str, str]:
112
+ """
113
+ Split raw YAML text into a leading comment block and the data body.
114
+
115
+ Scans lines from the top; a line belongs to the header if it starts
116
+ with '#' or is blank (blank lines between comment lines are kept).
117
+ Scanning stops at the first line that is neither a comment nor blank.
118
+ Trailing blank lines are trimmed from the header and prepended to the
119
+ body so that the body remains valid standalone YAML.
120
+
121
+ Args:
122
+ text: Raw YAML file content.
123
+
124
+ Returns:
125
+ A tuple (header, body) where header is the extracted comment block
126
+ (including a trailing newline) and body is the remainder.
127
+ If no leading comments are found, header is '' and body is text.
128
+ """
129
+ lines = text.splitlines(keepends=True)
130
+ split_idx = 0
131
+ for i, line in enumerate(lines):
132
+ stripped = line.strip()
133
+ if stripped.startswith("#") or stripped == "":
134
+ split_idx = i + 1
135
+ else:
136
+ break
137
+ # Trim trailing blank lines from the header, move them to the body
138
+ while split_idx > 0 and lines[split_idx - 1].strip() == "":
139
+ split_idx -= 1
140
+ header = "".join(lines[:split_idx])
141
+ body = "".join(lines[split_idx:])
142
+ return header, body
143
+
110
144
  def represent_none(self, _, __) -> yaml.Node:
111
145
  """
112
146
  Custom representer for ignoring None values in the YAML output.
@@ -169,33 +203,42 @@ class YamlAble(Generic[T]):
169
203
  return instance
170
204
 
171
205
  @classmethod
172
- def load_from_yaml_stream(cls: Type[T], stream: TextIO) -> T:
206
+ def load_from_yaml_stream(cls: Type[T], stream: TextIO, with_header_comment: bool = False) -> T:
173
207
  """
174
208
  Loads a dataclass instance from a YAML stream.
175
209
 
176
210
  Args:
177
211
  stream (TextIO): The input stream containing YAML data.
212
+ with_header_comment: If True, extract any leading comment block from the
213
+ raw text and store it on the instance as ``_yaml_header`` so it can
214
+ be re-emitted by :meth:`save_to_yaml_stream`.
178
215
 
179
216
  Returns:
180
217
  T: An instance of the dataclass.
181
218
  """
182
219
  yaml_str: str = stream.read()
220
+ if with_header_comment:
221
+ header, yaml_str = cls._split_yaml_header(yaml_str)
183
222
  instance: T = cls.from_yaml(yaml_str)
223
+ if with_header_comment:
224
+ instance._yaml_header = header if header else None
184
225
  return instance
185
226
 
186
227
  @classmethod
187
- def load_from_yaml_file(cls: Type[T], filename: str) -> T:
228
+ def load_from_yaml_file(cls: Type[T], filename: str, with_header_comment: bool = False) -> T:
188
229
  """
189
230
  Loads a dataclass instance from a YAML file.
190
231
 
191
232
  Args:
192
233
  filename (str): The path to the YAML file.
234
+ with_header_comment: If True, preserve any leading comment block found in
235
+ the file; see :meth:`load_from_yaml_stream` for details.
193
236
 
194
237
  Returns:
195
238
  T: An instance of the dataclass.
196
239
  """
197
240
  with open(filename, "r") as file:
198
- return cls.load_from_yaml_stream(file)
241
+ return cls.load_from_yaml_stream(file, with_header_comment=with_header_comment)
199
242
 
200
243
  @classmethod
201
244
  def load_from_yaml_url(cls: Type[T], url: str) -> T:
@@ -212,26 +255,35 @@ class YamlAble(Generic[T]):
212
255
  instance: T = cls.from_yaml(yaml_str)
213
256
  return instance
214
257
 
215
- def save_to_yaml_stream(self, file: TextIO):
258
+ def save_to_yaml_stream(self, file: TextIO, with_header_comment: bool = False):
216
259
  """
217
260
  Saves the current dataclass instance to the given YAML stream.
218
261
 
219
262
  Args:
220
263
  file (TextIO): The stream to which YAML content will be saved.
264
+ with_header_comment: If True and ``self._yaml_header`` is set (populated
265
+ by a previous :meth:`load_from_yaml_stream` call with the same flag),
266
+ the comment block is written before the YAML body. If no header is
267
+ stored the flag is silently a no-op.
221
268
  """
222
269
  yaml_content: str = self.to_yaml()
270
+ header: Optional[str] = getattr(self, "_yaml_header", None)
271
+ if with_header_comment and header:
272
+ file.write(header)
273
+ file.write("\n")
223
274
  file.write(yaml_content)
224
275
 
225
- def save_to_yaml_file(self, filename: str):
276
+ def save_to_yaml_file(self, filename: str, with_header_comment: bool = False):
226
277
  """
227
278
  Saves the current dataclass instance to a YAML file.
228
279
 
229
280
  Args:
230
281
  filename (str): The path where the YAML file will be saved.
282
+ with_header_comment: If True, re-emit any leading comment block that was
283
+ captured during loading; see :meth:`save_to_yaml_stream` for details.
231
284
  """
232
-
233
285
  with open(filename, "w", encoding="utf-8") as file:
234
- self.save_to_yaml_stream(file)
286
+ self.save_to_yaml_stream(file, with_header_comment=with_header_comment)
235
287
 
236
288
  @classmethod
237
289
  def load_from_json_file(cls: Type[T], filename: Union[str, Path]) -> T:
@@ -7,8 +7,7 @@ Created on 2025-112-29
7
7
  from argparse import Namespace
8
8
 
9
9
  from basemkit.basetest import Basetest
10
-
11
- from basemkit.remotedebug import RemoteDebugSetup, PathMappings, PathMapping
10
+ from basemkit.remotedebug import PathMapping, PathMappings, RemoteDebugSetup
12
11
 
13
12
 
14
13
  class TestRemoteDebugSetup(Basetest):
@@ -28,10 +27,10 @@ class TestRemoteDebugSetup(Basetest):
28
27
  debugPort=5678,
29
28
  debugRemotePath="/remote/app",
30
29
  debugLocalPath="/local/app",
31
- debug=True # Enable internal logging
30
+ debug=True, # Enable internal logging
32
31
  )
33
32
 
34
- debug_setup=RemoteDebugSetup(args=args)
33
+ debug_setup = RemoteDebugSetup(args=args)
35
34
  debug_setup.get_path_mappings()
36
35
  debug_setup.print_debug_info()
37
36
 
@@ -54,24 +53,20 @@ class TestRemoteDebugSetup(Basetest):
54
53
  self.assertEqual("/app/lib", pm.mappings[1].remote)
55
54
  self.assertEqual("C:\\Users\\Lib", pm.mappings[1].local)
56
55
 
57
-
58
56
  def test_path_mappings_mismatch(self):
59
57
  """
60
58
  Test that unequal list lengths raise an error.
61
59
  """
62
60
  remote_str = "/app/src"
63
- local_str = "C:\\src,C:\\lib" # Two locals, one remote
61
+ local_str = "C:\\src,C:\\lib" # Two locals, one remote
64
62
 
65
63
  with self.assertRaises(ValueError):
66
64
  PathMappings.from_args(remote_str, local_str)
67
65
 
68
-
69
66
  def test_tuple_conversion(self):
70
67
  """
71
68
  Test conversion to list of tuples for pydevd.
72
69
  """
73
- pm = PathMappings(mappings=[
74
- PathMapping(remote="/r", local="/l")
75
- ])
70
+ pm = PathMappings(mappings=[PathMapping(remote="/r", local="/l")])
76
71
  expected = [("/r", "/l")]
77
72
  self.assertEqual(expected, pm.as_tuple_list())
@@ -0,0 +1,78 @@
1
+ """
2
+ Created on 2025-05-14
3
+
4
+ @author: wf
5
+ """
6
+
7
+ from typing import List
8
+
9
+ from basemkit.basetest import Basetest
10
+ from basemkit.shell import Shell
11
+
12
+
13
+ class TestShell(Basetest):
14
+ """
15
+ test shell commands
16
+ """
17
+
18
+ def setUp(self, debug=False, profile=True):
19
+ Basetest.setUp(self, debug=debug, profile=profile)
20
+
21
+ def testShell(self):
22
+ """
23
+ test the shell handling
24
+ """
25
+ shell = Shell()
26
+ for cmd, expected in [
27
+ # ("pwd", "test"),
28
+ # ("which git", "git"),
29
+ ("echo $PATH", "bin"),
30
+ # ("docker ps", "CONTAINER ID"),
31
+ # ("which soffice", "soffice"),
32
+ ]:
33
+ p = shell.run(cmd, tee=self.debug)
34
+ if self.debug:
35
+ print(p)
36
+ print(p.stdout)
37
+ self.assertEqual(0, p.returncode)
38
+ self.assertIn(expected, p.stdout)
39
+
40
+ def test_encoding_errors(self):
41
+ """
42
+ Test that non-UTF-8 bytes in subprocess output do not raise UnicodeDecodeError.
43
+ With errors='replace' (the new default) bad bytes are replaced by U+FFFD.
44
+ """
45
+ shell = Shell()
46
+ # printf emits a raw 0xdf byte which is invalid UTF-8 (it is valid Latin-1: ß)
47
+ p = shell.run("printf '\\xdf'")
48
+ if self.debug:
49
+ print(repr(p.stdout))
50
+ # Must not raise; returncode must be 0
51
+ self.assertEqual(0, p.returncode)
52
+ # The replacement character U+FFFD must appear in the decoded output
53
+ self.assertIn("\ufffd", p.stdout)
54
+
55
+ def test_callbacks(self):
56
+ """
57
+ Test that stdout_callback and stderr_callback are invoked for each line.
58
+ """
59
+ shell = Shell()
60
+ stdout_lines: List[str] = []
61
+ stderr_lines: List[str] = []
62
+
63
+ p = shell.run(
64
+ "printf 'line1\\nline2\\nline3\\n' && printf 'err1\\nerr2\\n' >&2",
65
+ stdout_callback=lambda line: stdout_lines.append(line),
66
+ stderr_callback=lambda line: stderr_lines.append(line),
67
+ )
68
+ if self.debug:
69
+ print("stdout_lines:", stdout_lines)
70
+ print("stderr_lines:", stderr_lines)
71
+
72
+ self.assertEqual(0, p.returncode)
73
+ # Callbacks receive lines including the trailing newline from readline()
74
+ self.assertEqual(["line1\n", "line2\n", "line3\n"], stdout_lines)
75
+ self.assertEqual(["err1\n", "err2\n"], stderr_lines)
76
+ # Captured buffers must also be consistent
77
+ self.assertEqual("line1\nline2\nline3\n", p.stdout)
78
+ self.assertEqual("err1\nerr2\n", p.stderr)
@@ -131,6 +131,72 @@ class TestYamlAble(Basetest):
131
131
  # Clean up the temp file
132
132
  os.remove(temp_file.name)
133
133
 
134
+ def test_header_comment_preserved(self) -> None:
135
+ """
136
+ Test that a leading YAML comment block survives a full file round-trip
137
+ when with_header_comment=True is used on both load and save.
138
+ """
139
+ yaml_with_header = (
140
+ "# This is a project config file\n"
141
+ "# Generated by the system — do not edit manually\n"
142
+ "\n"
143
+ "name: Example\n"
144
+ "id: 123\n"
145
+ "flag: true\n"
146
+ )
147
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False, encoding="utf-8") as f:
148
+ src_path = f.name
149
+ f.write(yaml_with_header)
150
+
151
+ try:
152
+ # Load with header preservation
153
+ loaded = MockDataClass.load_from_yaml_file(src_path, with_header_comment=True)
154
+ if self.debug:
155
+ print(f"_yaml_header: {repr(loaded._yaml_header)}")
156
+ self.assertIsNotNone(loaded._yaml_header, "_yaml_header should be set")
157
+ self.assertIn("project config file", loaded._yaml_header)
158
+ self.assertIn("do not edit manually", loaded._yaml_header)
159
+ self.assertEqual(loaded.name, "Example")
160
+ self.assertEqual(loaded.id, 123)
161
+
162
+ # Modify a field and save back with header
163
+ loaded.id = 999
164
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False, encoding="utf-8") as f:
165
+ dst_path = f.name
166
+ loaded.save_to_yaml_file(dst_path, with_header_comment=True)
167
+
168
+ with open(dst_path, "r", encoding="utf-8") as f:
169
+ result = f.read()
170
+ if self.debug:
171
+ print(result)
172
+
173
+ # Header must be at the top
174
+ self.assertTrue(
175
+ result.startswith("# This is a project config file"),
176
+ "Header must be first",
177
+ )
178
+ self.assertIn("do not edit manually", result)
179
+ # Modified data must be present
180
+ self.assertIn("id: 999", result)
181
+ self.assertIn("name: Example", result)
182
+
183
+ # Saving without flag must NOT include the header
184
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False, encoding="utf-8") as f:
185
+ no_header_path = f.name
186
+ loaded.save_to_yaml_file(no_header_path, with_header_comment=False)
187
+ with open(no_header_path, "r", encoding="utf-8") as f:
188
+ no_header_result = f.read()
189
+ self.assertFalse(
190
+ no_header_result.startswith("#"),
191
+ "Header must not appear when with_header_comment=False",
192
+ )
193
+ finally:
194
+ for path in [src_path, dst_path, no_header_path]:
195
+ try:
196
+ os.remove(path)
197
+ except Exception:
198
+ pass
199
+
134
200
  def test_load_with_none_optional_field(self) -> None:
135
201
  """
136
202
  Test that loading YAML with a None value for an Optional[str] field works without error.
@@ -0,0 +1,324 @@
1
+ # yamable — YamlAble & lod_storable
2
+
3
+ Module: `basemkit/yamlable.py`
4
+
5
+ Provides YAML and JSON serialization/deserialization for Python dataclasses via
6
+ the `YamlAble` base class and the `@lod_storable` decorator.
7
+
8
+ ---
9
+
10
+ ## Overview
11
+
12
+ `YamlAble` is a generic mixin that adds YAML and JSON I/O to any `@dataclass`.
13
+ It handles:
14
+
15
+ - Serializing to YAML with block-scalar strings and optional omission of `None`
16
+ values and underscore-prefixed attributes
17
+ - Deserializing from YAML strings, streams, files, and URLs
18
+ - Serializing to / deserializing from JSON (delegating to `dataclasses-json`)
19
+ - Recursive filtering of `None`, empty collections, and private attributes
20
+ before serialization
21
+
22
+ The `@lod_storable` decorator is a one-shot convenience that applies
23
+ `@dataclass`, `@dataclass_json`, **and** `YamlAble` inheritance to a plain
24
+ class with a single annotation.
25
+
26
+ ---
27
+
28
+ ## Quick Start
29
+
30
+ ### Using `@lod_storable` (recommended)
31
+
32
+ ```python
33
+ from typing import Optional
34
+ from basemkit.yamlable import lod_storable
35
+
36
+ @lod_storable
37
+ class Person:
38
+ name: str
39
+ age: int
40
+ email: Optional[str] = None
41
+
42
+ # Serialize
43
+ p = Person(name="Alice", age=30)
44
+ print(p.to_yaml())
45
+ # name: Alice
46
+ # age: 30
47
+
48
+ # Deserialize
49
+ p2 = Person.from_yaml("name: Bob\nage: 25\n")
50
+ print(p2.name) # Bob
51
+ ```
52
+
53
+ ### Using `YamlAble` directly
54
+
55
+ When you need explicit control over the MRO or already have `@dataclass` and
56
+ `@dataclass_json` applied:
57
+
58
+ ```python
59
+ from dataclasses import dataclass
60
+ from dataclasses_json import dataclass_json
61
+ from basemkit.yamlable import YamlAble
62
+
63
+ @dataclass_json
64
+ @dataclass
65
+ class Config(YamlAble):
66
+ host: str = "localhost"
67
+ port: int = 8080
68
+ ```
69
+
70
+ ---
71
+
72
+ ## `@lod_storable` Decorator
73
+
74
+ ```python
75
+ def lod_storable(cls):
76
+ ```
77
+
78
+ Transforms a plain class into a fully capable storable dataclass by:
79
+
80
+ 1. Applying `@dataclass` — adds `__init__`, `__repr__`, `__eq__`, etc.
81
+ 2. Applying `@dataclass_json` — adds `from_json` / `to_json` / `from_dict` /
82
+ `to_dict`
83
+ 3. Creating an inner `LoDStorable` class that inherits from both `YamlAble`
84
+ and the decorated class, then restoring `__name__`, `__doc__`, and
85
+ `__module__` so the class identity is transparent to serializers and
86
+ module lookups.
87
+
88
+ The name *LoDStorable* stands for **List-of-Dicts Storable** — the pattern
89
+ used throughout pyLoDStorage for tabular in-memory data.
90
+
91
+ ---
92
+
93
+ ## `YamlAble` Class
94
+
95
+ ```python
96
+ class YamlAble(Generic[T]):
97
+ ```
98
+
99
+ ### YAML Serialization
100
+
101
+ #### `to_yaml`
102
+
103
+ ```python
104
+ def to_yaml(
105
+ self,
106
+ ignore_none: bool = True,
107
+ ignore_underscore: bool = True,
108
+ allow_unicode: bool = True,
109
+ sort_keys: bool = False,
110
+ ) -> str:
111
+ ```
112
+
113
+ Converts the dataclass instance to a YAML string.
114
+
115
+ | Parameter | Default | Effect |
116
+ |---|---|---|
117
+ | `ignore_none` | `True` | Omit keys whose value is `None` |
118
+ | `ignore_underscore` | `True` | Omit keys whose name starts with `_` |
119
+ | `allow_unicode` | `True` | Emit unicode characters unescaped |
120
+ | `sort_keys` | `False` | Alphabetically sort mapping keys |
121
+
122
+ Multi-line strings are automatically rendered in **block scalar style** (`|`),
123
+ preserving newlines readably.
124
+
125
+ ```python
126
+ obj.to_yaml()
127
+ # description: |-
128
+ # First line
129
+ # Second line
130
+ # name: Example
131
+ ```
132
+
133
+ #### `save_to_yaml_stream` / `save_to_yaml_file`
134
+
135
+ ```python
136
+ def save_to_yaml_stream(self, file: TextIO) -> None:
137
+ def save_to_yaml_file(self, filename: str) -> None:
138
+ ```
139
+
140
+ Write YAML output to an open stream or a file path (UTF-8).
141
+
142
+ ```python
143
+ obj.save_to_yaml_file("/tmp/config.yaml")
144
+ ```
145
+
146
+ ---
147
+
148
+ ### YAML Deserialization
149
+
150
+ All three class methods return a fully constructed instance of the calling
151
+ class (`cls`).
152
+
153
+ #### `from_yaml`
154
+
155
+ ```python
156
+ @classmethod
157
+ def from_yaml(cls: Type[T], yaml_str: str) -> T:
158
+ ```
159
+
160
+ Parses a YAML string and reconstructs the dataclass via `dacite.from_dict`.
161
+ Handles `null` values in YAML correctly for `Optional` fields.
162
+
163
+ ```python
164
+ instance = MyClass.from_yaml(yaml_string)
165
+ ```
166
+
167
+ #### `load_from_yaml_stream`
168
+
169
+ ```python
170
+ @classmethod
171
+ def load_from_yaml_stream(cls: Type[T], stream: TextIO) -> T:
172
+ ```
173
+
174
+ Reads the entire stream and delegates to `from_yaml`.
175
+
176
+ #### `load_from_yaml_file`
177
+
178
+ ```python
179
+ @classmethod
180
+ def load_from_yaml_file(cls: Type[T], filename: str) -> T:
181
+ ```
182
+
183
+ Opens `filename` in text mode and delegates to `load_from_yaml_stream`.
184
+
185
+ ```python
186
+ config = Config.load_from_yaml_file("config.yaml")
187
+ ```
188
+
189
+ #### `load_from_yaml_url`
190
+
191
+ ```python
192
+ @classmethod
193
+ def load_from_yaml_url(cls: Type[T], url: str) -> T:
194
+ ```
195
+
196
+ Fetches the URL with `urllib.request` and delegates to `from_yaml`.
197
+ Raises `Exception` if the HTTP status is not 200.
198
+
199
+ ---
200
+
201
+ ### JSON Serialization / Deserialization
202
+
203
+ JSON support is provided by `dataclasses-json` and is available on any class
204
+ decorated with `@lod_storable` (or `@dataclass_json` directly). `YamlAble`
205
+ adds file and URL convenience wrappers.
206
+
207
+ #### `save_to_json_file`
208
+
209
+ ```python
210
+ def save_to_json_file(self, filename: str, **kwargs: Any) -> None:
211
+ ```
212
+
213
+ Serializes to JSON and writes to `filename` (UTF-8). Extra `**kwargs` are
214
+ forwarded to `to_json()`.
215
+
216
+ #### `load_from_json_file`
217
+
218
+ ```python
219
+ @classmethod
220
+ def load_from_json_file(cls: Type[T], filename: Union[str, Path]) -> T:
221
+ ```
222
+
223
+ Reads a JSON file and reconstructs the instance via `from_json`.
224
+
225
+ #### `load_from_json_url`
226
+
227
+ ```python
228
+ @classmethod
229
+ def load_from_json_url(cls: Type[T], url: str) -> T:
230
+ ```
231
+
232
+ Fetches JSON from a URL and reconstructs the instance.
233
+
234
+ ---
235
+
236
+ ### Filtering Helper
237
+
238
+ #### `remove_ignored_values`
239
+
240
+ ```python
241
+ @classmethod
242
+ def remove_ignored_values(
243
+ cls,
244
+ value: Any,
245
+ ignore_none: bool = True,
246
+ ignore_underscore: bool = False,
247
+ ignore_empty: bool = True,
248
+ ) -> Any:
249
+ ```
250
+
251
+ Recursively walks a dict / list structure and removes entries that match the
252
+ active ignore flags. Called internally by `to_yaml` but also usable standalone.
253
+
254
+ | Flag | Default | Removes |
255
+ |---|---|---|
256
+ | `ignore_none` | `True` | Keys with `None` values |
257
+ | `ignore_underscore` | `False` | Keys whose name starts with `_` |
258
+ | `ignore_empty` | `True` | Empty dicts, lists, sets, tuples |
259
+
260
+ Strings and bytes are treated as scalars, not iterables, and are never
261
+ removed by `ignore_empty`.
262
+
263
+ ---
264
+
265
+ ### `from_dict2`
266
+
267
+ ```python
268
+ @classmethod
269
+ def from_dict2(cls: Type[T], data: dict) -> T:
270
+ ```
271
+
272
+ Alternative deserializer using `dacite.from_dict` instead of
273
+ `dataclasses-json`. Returns `None` if `data` is falsy. Useful when
274
+ `dataclasses-json` is not available or when `dacite`'s strict type coercion
275
+ is preferred.
276
+
277
+ ---
278
+
279
+ ## `DateConvert` Helper
280
+
281
+ ```python
282
+ class DateConvert:
283
+ @classmethod
284
+ def iso_date_to_datetime(cls, iso_date: str) -> date:
285
+ ```
286
+
287
+ Converts an ISO 8601 date string (`"YYYY-MM-DD"`) to a `datetime.date`
288
+ object. Returns `None` if `iso_date` is falsy. Intended as a `dacite`
289
+ type-hook for fields typed as `date`.
290
+
291
+ ---
292
+
293
+ ## Internal Representers
294
+
295
+ These are set up automatically by `_yaml_setup()` on the first call to
296
+ `to_yaml()` and should not need to be called directly.
297
+
298
+ | Method | Purpose |
299
+ |---|---|
300
+ | `represent_none` | Renders `None` as an empty YAML scalar (`""`) rather than `null` |
301
+ | `represent_literal` | Renders strings containing `\n` in block scalar style (`\|`) |
302
+
303
+ ---
304
+
305
+ ## Dependencies
306
+
307
+ | Package | Role |
308
+ |---|---|
309
+ | `PyYAML` | YAML parsing and emission |
310
+ | `dacite` | Strict dict-to-dataclass construction (`from_dict2`) |
311
+ | `dataclasses-json` | JSON serialization (`from_json`, `to_json`, `from_dict`) |
312
+
313
+ ---
314
+
315
+ ## Notes
316
+
317
+ - `YamlAble` requires the instance to be a dataclass (`is_dataclass(self)` is
318
+ asserted in `_yaml_setup`). A `ValueError` is raised otherwise.
319
+ - The YAML dumper is cached on `self._yaml_dumper` after the first setup call;
320
+ custom representers are registered only once per instance.
321
+ - URL loading uses the stdlib `urllib.request` — no `requests` dependency.
322
+ - `lod_storable` preserves `__name__`, `__qualname__`, `__doc__`, and
323
+ `__module__` so `pickle`, `dacite`, and `dataclasses-json` can resolve the
324
+ class correctly.
@@ -1 +0,0 @@
1
- __version__ = "0.2.0"
@@ -1,36 +0,0 @@
1
- """
2
- Created on 2025-05-14
3
-
4
- @author: wf
5
- """
6
-
7
- from basemkit.basetest import Basetest
8
- from basemkit.shell import Shell
9
-
10
-
11
- class TestShell(Basetest):
12
- """
13
- test shell commands
14
- """
15
-
16
- def setUp(self, debug=True, profile=True):
17
- Basetest.setUp(self, debug=debug, profile=profile)
18
-
19
- def testShell(self):
20
- """
21
- test the shell handling
22
- """
23
- shell = Shell()
24
- for cmd, expected in [
25
- # ("pwd", "test"),
26
- # ("which git", "git"),
27
- ("echo $PATH", "bin"),
28
- # ("docker ps", "CONTAINER ID"),
29
- # ("which soffice", "soffice"),
30
- ]:
31
- p = shell.run(cmd, tee=self.debug)
32
- if self.debug:
33
- print(p)
34
- print(p.stdout)
35
- self.assertEqual(0, p.returncode)
36
- self.assertIn(expected, p.stdout)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes