mm-std 0.6.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. {mm_std-0.6.0 → mm_std-0.7.0}/.claude/settings.local.json +10 -1
  2. mm_std-0.7.0/CLAUDE.md +18 -0
  3. mm_std-0.7.0/PKG-INFO +4 -0
  4. {mm_std-0.6.0 → mm_std-0.7.0}/README.md +27 -23
  5. {mm_std-0.6.0 → mm_std-0.7.0}/justfile +1 -0
  6. mm_std-0.7.0/pyproject.toml +70 -0
  7. mm_std-0.7.0/src/mm_std/__init__.py +21 -0
  8. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/date_utils.py +19 -16
  9. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/dict_utils.py +17 -15
  10. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/json_utils.py +13 -10
  11. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/random_utils.py +33 -14
  12. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/str_utils.py +4 -6
  13. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/subprocess_utils.py +26 -9
  14. mm_std-0.7.0/tests/__init__.py +1 -0
  15. mm_std-0.7.0/tests/mm_std/__init__.py +1 -0
  16. mm_std-0.7.0/tests/mm_std/test_date_utils.py +165 -0
  17. mm_std-0.7.0/tests/mm_std/test_dict_utils.py +198 -0
  18. mm_std-0.7.0/tests/mm_std/test_json_utils.py +233 -0
  19. mm_std-0.7.0/tests/mm_std/test_random_utils.py +126 -0
  20. mm_std-0.7.0/tests/mm_std/test_str_utils.py +265 -0
  21. mm_std-0.7.0/tests/mm_std/test_subprocess_utils.py +110 -0
  22. mm_std-0.7.0/uv.lock +488 -0
  23. mm_std-0.6.0/CLAUDE.md +0 -13
  24. mm_std-0.6.0/PKG-INFO +0 -4
  25. mm_std-0.6.0/pyproject.toml +0 -68
  26. mm_std-0.6.0/src/mm_std/__init__.py +0 -24
  27. mm_std-0.6.0/tests/__init__.py +0 -0
  28. mm_std-0.6.0/tests/test_date_utils.py +0 -174
  29. mm_std-0.6.0/tests/test_dict_utils.py +0 -179
  30. mm_std-0.6.0/tests/test_json_utils.py +0 -205
  31. mm_std-0.6.0/tests/test_random_utils.py +0 -122
  32. mm_std-0.6.0/tests/test_str_utils.py +0 -207
  33. mm_std-0.6.0/tests/test_subprocess_utils.py +0 -132
  34. mm_std-0.6.0/uv.lock +0 -393
  35. {mm_std-0.6.0 → mm_std-0.7.0}/.gitignore +0 -0
  36. {mm_std-0.6.0 → mm_std-0.7.0}/.pre-commit-config.yaml +0 -0
  37. {mm_std-0.6.0 → mm_std-0.7.0}/src/mm_std/py.typed +0 -0
@@ -8,7 +8,16 @@
8
8
  "mcp__ide__getDiagnostics",
9
9
  "Read(//Users/m/.vscode/extensions/ms-python.vscode-pylance-2025.7.1/dist/typeshed-fallback/stdlib/json/**)",
10
10
  "Bash(just lint)",
11
- "Bash(python:*)"
11
+ "Bash(python:*)",
12
+ "Bash(wc:*)",
13
+ "Bash(pytest:*)",
14
+ "Bash(just test)",
15
+ "Bash(do echo \"=== $file ===\")",
16
+ "Bash(just audit:*)",
17
+ "Bash(test:*)",
18
+ "Bash(just test:*)",
19
+ "Bash(uv sync:*)",
20
+ "Bash(just:*)"
12
21
  ],
13
22
  "deny": []
14
23
  }
mm_std-0.7.0/CLAUDE.md ADDED
@@ -0,0 +1,18 @@
1
+ # AI Agent Start Guide
2
+
3
+ ## Mandatory Rules (external)
4
+ These files are REQUIRED. Read them fully and follow all rules.
5
+ - `~/.claude/shared-rules/general.md`
6
+ - `~/.claude/shared-rules/python.md`
7
+
8
+ ## Project Reading (context)
9
+ These files are REQUIRED for project understanding.
10
+ - `README.md`
11
+
12
+ ## Preflight (mandatory)
13
+ Before your first response:
14
+ 1. Read all files listed above.
15
+ 2. Do not answer until all are read.
16
+ 3. In your first reply, list every file you have read from this document.
17
+
18
+ Failure to follow this protocol is considered an error.
mm_std-0.7.0/PKG-INFO ADDED
@@ -0,0 +1,4 @@
1
+ Metadata-Version: 2.4
2
+ Name: mm-std
3
+ Version: 0.7.0
4
+ Requires-Python: >=3.14
@@ -10,7 +10,6 @@ A collection of Python utilities for common data manipulation tasks with strict
10
10
  - **Random Utilities**: Type-safe random generation for decimals and datetimes
11
11
  - **String Utilities**: Efficient string matching utilities for prefixes, suffixes, and substrings, plus multiline text parsing
12
12
  - **Subprocess Utilities**: Safe shell command execution with comprehensive result handling
13
- - **Full Type Safety**: Strict mypy compliance with comprehensive type annotations
14
13
 
15
14
  ## Quick Start
16
15
 
@@ -78,34 +77,34 @@ parsed = parse_lines(
78
77
  Execute shell commands safely with comprehensive result handling:
79
78
 
80
79
  ```python
81
- from mm_std import shell, ssh_shell, ShellResult
80
+ from mm_std import run_cmd, run_ssh_cmd, CmdResult
82
81
 
83
82
  # Execute local commands
84
- result = shell("ls -la /tmp")
83
+ result = run_cmd("ls -la /tmp")
85
84
  print(f"Exit code: {result.code}")
86
85
  print(f"Output: {result.stdout}")
87
86
  print(f"Errors: {result.stderr}")
88
87
  print(f"Combined: {result.combined_output}")
89
88
 
90
89
  # Handle command errors gracefully
91
- result = shell("grep 'pattern' nonexistent.txt")
90
+ result = run_cmd("grep 'pattern' nonexistent.txt")
92
91
  if result.code != 0:
93
92
  print(f"Command failed: {result.stderr}")
94
93
 
95
94
  # Execute with timeout
96
- result = shell("long-running-command", timeout=30)
95
+ result = run_cmd("long-running-command", timeout=30)
97
96
  if result.code == 255: # TIMEOUT_EXIT_CODE
98
97
  print("Command timed out")
99
98
 
100
99
  # Echo commands for debugging
101
- result = shell("echo 'Hello World'", echo_command=True)
100
+ result = run_cmd("echo 'Hello World'", echo_command=True)
102
101
 
103
- # Complex shell operations with pipes
104
- result = shell("ps aux | grep python | wc -l")
102
+ # Complex shell operations with pipes (requires shell=True)
103
+ result = run_cmd("ps aux | grep python | wc -l", shell=True)
105
104
  python_processes = int(result.stdout.strip())
106
105
 
107
106
  # Execute commands on remote hosts via SSH
108
- ssh_result = ssh_shell(
107
+ ssh_result = run_ssh_cmd(
109
108
  host="server.example.com",
110
109
  cmd="systemctl status nginx",
111
110
  ssh_key_path="~/.ssh/id_rsa",
@@ -113,7 +112,7 @@ ssh_result = ssh_shell(
113
112
  )
114
113
 
115
114
  # SSH commands are automatically quoted for security
116
- ssh_result = ssh_shell(
115
+ ssh_result = run_ssh_cmd(
117
116
  "server.example.com",
118
117
  "echo 'hello world; ls -la'", # Properly escaped
119
118
  echo_command=True
@@ -151,7 +150,7 @@ json_str = json_dumps(data, type_handlers={
151
150
  Clean up dictionaries by replacing or removing empty values:
152
151
 
153
152
  ```python
154
- from mm_std import replace_empty_dict_entries
153
+ from mm_std import compact_dict
155
154
 
156
155
  data = {
157
156
  "name": "John",
@@ -162,16 +161,16 @@ data = {
162
161
  }
163
162
 
164
163
  # Remove empty entries entirely
165
- cleaned = replace_empty_dict_entries(data)
164
+ cleaned = compact_dict(data)
166
165
  # Result: {"name": "John"}
167
166
 
168
167
  # Replace with defaults
169
168
  defaults = {"age": 25, "email": "unknown@example.com"}
170
- cleaned = replace_empty_dict_entries(data, defaults=defaults)
169
+ cleaned = compact_dict(data, defaults=defaults)
171
170
  # Result: {"name": "John", "age": 25, "email": "unknown@example.com"}
172
171
 
173
172
  # Treat zero and false as empty too
174
- cleaned = replace_empty_dict_entries(
173
+ cleaned = compact_dict(
175
174
  data,
176
175
  defaults=defaults,
177
176
  treat_zero_as_empty=True,
@@ -184,14 +183,14 @@ cleaned = replace_empty_dict_entries(
184
183
  UTC-focused datetime operations:
185
184
 
186
185
  ```python
187
- from mm_std import utc_now, utc_delta, parse_date
186
+ from mm_std import utc_now, utc_now_offset, parse_datetime
188
187
 
189
188
  # Current UTC time
190
189
  now = utc_now()
191
190
 
192
191
  # Time calculations
193
- past = utc_delta(hours=-2, minutes=-30)
194
- future = utc_delta(days=7)
192
+ past = utc_now_offset(hours=-2, minutes=-30)
193
+ future = utc_now_offset(days=7)
195
194
 
196
195
  # Flexible date parsing
197
196
  dates = [
@@ -201,10 +200,10 @@ dates = [
201
200
  "2023/12/25"
202
201
  ]
203
202
 
204
- parsed_dates = [parse_date(d) for d in dates]
203
+ parsed_dates = [parse_datetime(d) for d in dates]
205
204
 
206
205
  # Parse and ignore timezone info
207
- local_time = parse_date("2023-12-25T10:30:00+02:00", ignore_tz=True)
206
+ local_time = parse_datetime("2023-12-25T10:30:00+02:00", ignore_tz=True)
208
207
  ```
209
208
 
210
209
  ### Random Utilities
@@ -212,16 +211,21 @@ local_time = parse_date("2023-12-25T10:30:00+02:00", ignore_tz=True)
212
211
  Generate random values with precision:
213
212
 
214
213
  ```python
215
- from mm_std import random_decimal, random_datetime
214
+ from mm_std import random_decimal, random_datetime, random_datetime_offset
216
215
  from decimal import Decimal
217
- from datetime import datetime
216
+ from datetime import datetime, timedelta
218
217
 
219
218
  # Random decimal with preserved precision
220
219
  price = random_decimal(Decimal("10.00"), Decimal("99.99"))
221
220
 
222
- # Random datetime within a range
221
+ # Random datetime within a range (from_time to to_time)
222
+ start = datetime.now()
223
+ end = start + timedelta(days=7)
224
+ random_time = random_datetime(start, end)
225
+
226
+ # Random datetime with offset from base time
223
227
  base_time = datetime.now()
224
- random_time = random_datetime(
228
+ random_time = random_datetime_offset(
225
229
  base_time,
226
230
  hours=24, # Up to 24 hours later
227
231
  minutes=30, # Plus up to 30 minutes
@@ -17,6 +17,7 @@ test:
17
17
  lint: format pre-commit
18
18
  uv run ruff check src tests
19
19
  uv run mypy src
20
+ uv run ty check
20
21
 
21
22
  audit:
22
23
  # uv export --no-dev --all-extras --format requirements-txt --no-emit-project > requirements.txt
@@ -0,0 +1,70 @@
1
+ [project]
2
+ name = "mm-std"
3
+ version = "0.7.0"
4
+ description = ""
5
+ requires-python = ">=3.14"
6
+ dependencies = []
7
+
8
+ [build-system]
9
+ requires = ["hatchling"]
10
+ build-backend = "hatchling.build"
11
+
12
+ [dependency-groups]
13
+ dev = [
14
+ "bandit~=1.9.3",
15
+ "mypy~=1.19.1",
16
+ "pre-commit~=4.5.1",
17
+ "pydantic~=2.10",
18
+ "pytest~=9.0.2",
19
+ "pytest-xdist~=3.8.0",
20
+ "ruff~=0.14.14",
21
+ "ty~=0.0.13",
22
+ ]
23
+
24
+ [tool.mypy]
25
+ python_version = "3.14"
26
+ warn_no_return = false
27
+ strict = true
28
+ exclude = ["^tests/", "^tmp/"]
29
+
30
+ [tool.ruff]
31
+ line-length = 130
32
+ target-version = "py314"
33
+ [tool.ruff.lint]
34
+ select = ["ALL"]
35
+ ignore = [
36
+ "TC", # flake8-type-checking, TYPE_CHECKING is dangerous, for example it doesn't work with pydantic
37
+ "A005", # flake8-builtins: stdlib-module-shadowing
38
+ "ERA001", # eradicate: commented-out-code
39
+ "PT", # flake8-pytest-style
40
+ "FIX", # flake8-fixme
41
+ "PLR0911", # pylint: too-many-return-statements
42
+ "PLR0912", # pylint: too-many-branches
43
+ "PLR0913", # pylint: too-many-arguments
44
+ "PLR2004", # pylint: magic-value-comparison
45
+ "PLC0414", # pylint: useless-import-alias
46
+ "FBT", # flake8-boolean-trap
47
+ "EM", # flake8-errmsg
48
+ "TRY003", # tryceratops: raise-vanilla-args
49
+ "C901", # mccabe: complex-structure,
50
+ "BLE001", # flake8-blind-except
51
+ "S311", # bandit: suspicious-non-cryptographic-random-usage
52
+ "TD002", # flake8-todos: missing-todo-author
53
+ "TD003", # flake8-todos: missing-todo-link
54
+ "RET503", # flake8-return: implicit-return
55
+ "COM812", # it's used in ruff formatter
56
+ "ASYNC109",
57
+ "G004",
58
+ "DTZ001",
59
+ "D203", # pydocstyle: one-blank-line-before-class (conflicts with D211)
60
+ "D213", # pydocstyle: multi-line-summary-second-line (conflicts with D212)
61
+ ]
62
+ [tool.ruff.lint.per-file-ignores]
63
+ "tests/*.py" = ["ANN", "S"]
64
+ [tool.ruff.format]
65
+ quote-style = "double"
66
+ indent-style = "space"
67
+
68
+ [tool.bandit]
69
+ exclude_dirs = ["tests"]
70
+ skips = ["B311"]
@@ -0,0 +1,21 @@
1
+ """mm-std: Python utilities for common data manipulation tasks."""
2
+
3
+ from .date_utils import parse_datetime as parse_datetime
4
+ from .date_utils import utc_from_timestamp as utc_from_timestamp
5
+ from .date_utils import utc_now as utc_now
6
+ from .date_utils import utc_now_offset as utc_now_offset
7
+ from .dict_utils import compact_dict as compact_dict
8
+ from .json_utils import ExtendedJSONEncoder as ExtendedJSONEncoder
9
+ from .json_utils import json_dumps as json_dumps
10
+ from .random_utils import random_datetime as random_datetime
11
+ from .random_utils import random_datetime_offset as random_datetime_offset
12
+ from .random_utils import random_decimal as random_decimal
13
+ from .str_utils import parse_lines as parse_lines
14
+ from .str_utils import str_contains_any as str_contains_any
15
+ from .str_utils import str_ends_with_any as str_ends_with_any
16
+ from .str_utils import str_starts_with_any as str_starts_with_any
17
+
18
+ # B404: re-exporting subprocess utilities with documented security considerations
19
+ from .subprocess_utils import CmdResult as CmdResult # nosec
20
+ from .subprocess_utils import run_cmd as run_cmd # nosec
21
+ from .subprocess_utils import run_ssh_cmd as run_ssh_cmd # nosec
@@ -1,3 +1,5 @@
1
+ """UTC-focused datetime operations and flexible date parsing."""
2
+
1
3
  from datetime import UTC, datetime, timedelta
2
4
 
3
5
 
@@ -6,37 +8,38 @@ def utc_now() -> datetime:
6
8
  return datetime.now(UTC)
7
9
 
8
10
 
9
- def utc_delta(
10
- *,
11
- days: int | None = None,
12
- hours: int | None = None,
13
- minutes: int | None = None,
14
- seconds: int | None = None,
11
+ def utc_now_offset(
12
+ *, days: int | None = None, hours: int | None = None, minutes: int | None = None, seconds: int | None = None
15
13
  ) -> datetime:
16
14
  """Get UTC time shifted by the specified delta.
17
15
 
18
16
  Use negative values to get time in the past.
19
17
  """
20
18
  params = {}
21
- if days:
19
+ if days is not None:
22
20
  params["days"] = days
23
- if hours:
21
+ if hours is not None:
24
22
  params["hours"] = hours
25
- if minutes:
23
+ if minutes is not None:
26
24
  params["minutes"] = minutes
27
- if seconds:
25
+ if seconds is not None:
28
26
  params["seconds"] = seconds
29
27
  return datetime.now(UTC) + timedelta(**params)
30
28
 
31
29
 
32
- def parse_date(value: str, ignore_tz: bool = False) -> datetime:
30
+ def utc_from_timestamp(timestamp: float) -> datetime:
31
+ """Create UTC datetime from Unix timestamp."""
32
+ return datetime.fromtimestamp(timestamp, UTC)
33
+
34
+
35
+ def parse_datetime(date_str: str, ignore_tz: bool = False) -> datetime:
33
36
  """Parse date string in various formats, with timezone handling.
34
37
 
35
38
  Converts 'Z' suffix to '+00:00' for ISO format compatibility.
36
39
  Use ignore_tz=True to strip timezone info from the result.
37
40
  """
38
- if value.lower().endswith("z"):
39
- value = value[:-1] + "+00:00"
41
+ if date_str.lower().endswith("z"):
42
+ date_str = date_str[:-1] + "+00:00"
40
43
  date_formats = [
41
44
  "%Y-%m-%d %H:%M:%S.%f%z",
42
45
  "%Y-%m-%dT%H:%M:%S.%f%z",
@@ -53,10 +56,10 @@ def parse_date(value: str, ignore_tz: bool = False) -> datetime:
53
56
 
54
57
  for fmt in date_formats:
55
58
  try:
56
- dt = datetime.strptime(value, fmt) # noqa: DTZ007
59
+ dt = datetime.strptime(date_str, fmt) # noqa: DTZ007 - timezone deliberately ignored when ignore_tz=True
57
60
  if ignore_tz and dt.tzinfo is not None:
58
61
  dt = dt.replace(tzinfo=None)
59
- return dt # noqa: TRY300
62
+ return dt # noqa: TRY300 - return in try block is intentional for parse flow
60
63
  except ValueError:
61
64
  continue
62
- raise ValueError(f"Time data '{value}' does not match any known format.")
65
+ raise ValueError(f"Time data '{date_str}' does not match any known format.")
@@ -1,3 +1,5 @@
1
+ """Dictionary manipulation utilities with type preservation."""
2
+
1
3
  from collections import OrderedDict, defaultdict
2
4
  from collections.abc import Mapping, MutableMapping
3
5
  from decimal import Decimal
@@ -8,8 +10,8 @@ V = TypeVar("V")
8
10
 
9
11
 
10
12
  @overload
11
- def replace_empty_dict_entries(
12
- data: defaultdict[K, V],
13
+ def compact_dict(
14
+ mapping: defaultdict[K, V],
13
15
  defaults: Mapping[K, V] | None = None,
14
16
  treat_zero_as_empty: bool = False,
15
17
  treat_false_as_empty: bool = False,
@@ -18,8 +20,8 @@ def replace_empty_dict_entries(
18
20
 
19
21
 
20
22
  @overload
21
- def replace_empty_dict_entries(
22
- data: OrderedDict[K, V],
23
+ def compact_dict(
24
+ mapping: OrderedDict[K, V],
23
25
  defaults: Mapping[K, V] | None = None,
24
26
  treat_zero_as_empty: bool = False,
25
27
  treat_false_as_empty: bool = False,
@@ -28,8 +30,8 @@ def replace_empty_dict_entries(
28
30
 
29
31
 
30
32
  @overload
31
- def replace_empty_dict_entries(
32
- data: dict[K, V],
33
+ def compact_dict(
34
+ mapping: dict[K, V],
33
35
  defaults: Mapping[K, V] | None = None,
34
36
  treat_zero_as_empty: bool = False,
35
37
  treat_false_as_empty: bool = False,
@@ -37,15 +39,14 @@ def replace_empty_dict_entries(
37
39
  ) -> dict[K, V]: ...
38
40
 
39
41
 
40
- def replace_empty_dict_entries(
41
- data: MutableMapping[K, V],
42
+ def compact_dict(
43
+ mapping: MutableMapping[K, V],
42
44
  defaults: Mapping[K, V] | None = None,
43
45
  treat_zero_as_empty: bool = False,
44
46
  treat_false_as_empty: bool = False,
45
47
  treat_empty_string_as_empty: bool = True,
46
48
  ) -> MutableMapping[K, V]:
47
- """
48
- Replace empty entries in a dictionary with defaults or remove them entirely.
49
+ """Replace empty entries in a dictionary with defaults or remove them entirely.
49
50
 
50
51
  Preserves the exact type of the input mapping:
51
52
  - dict[str, int] → dict[str, int]
@@ -53,7 +54,7 @@ def replace_empty_dict_entries(
53
54
  - OrderedDict[str, str] → OrderedDict[str, str]
54
55
 
55
56
  Args:
56
- data: The dictionary to process
57
+ mapping: The dictionary to process
57
58
  defaults: Default values to use for empty entries. If None or key not found, empty entries are removed
58
59
  treat_zero_as_empty: Treat 0 as empty value
59
60
  treat_false_as_empty: Treat False as empty value
@@ -61,16 +62,17 @@ def replace_empty_dict_entries(
61
62
 
62
63
  Returns:
63
64
  New dictionary of the same concrete type with empty entries replaced or removed
65
+
64
66
  """
65
67
  if defaults is None:
66
68
  defaults = {}
67
69
 
68
- if isinstance(data, defaultdict):
69
- result: MutableMapping[K, V] = defaultdict(data.default_factory)
70
+ if isinstance(mapping, defaultdict):
71
+ result: MutableMapping[K, V] = defaultdict(mapping.default_factory)
70
72
  else:
71
- result = data.__class__()
73
+ result = mapping.__class__()
72
74
 
73
- for key, value in data.items():
75
+ for key, value in mapping.items():
74
76
  should_replace = (
75
77
  value is None
76
78
  or (treat_false_as_empty and value is False)
@@ -1,4 +1,4 @@
1
- from __future__ import annotations
1
+ """Extended JSON encoder with support for Python types."""
2
2
 
3
3
  import json
4
4
  from collections.abc import Callable
@@ -35,21 +35,23 @@ class ExtendedJSONEncoder(json.JSONEncoder):
35
35
  }
36
36
 
37
37
  @classmethod
38
- def register(cls, type_: type[Any], serializer: Callable[[Any], Any]) -> None:
38
+ def register(cls, type_: type[Any], handler: Callable[[Any], Any]) -> None:
39
39
  """Register a custom type with its serialization function.
40
40
 
41
41
  Args:
42
42
  type_: The type to register
43
- serializer: Function that converts objects of this type to JSON-serializable data
43
+ handler: Function that converts objects of this type to JSON-serializable data
44
44
 
45
45
  Raises:
46
46
  ValueError: If type_ is a built-in JSON type
47
+
47
48
  """
48
49
  if type_ in (str, int, float, bool, list, dict, type(None)):
49
50
  raise ValueError(f"Cannot override built-in JSON type: {type_.__name__}")
50
- cls._type_handlers[type_] = serializer
51
+ cls._type_handlers[type_] = handler
51
52
 
52
- def default(self, o: Any) -> Any: # noqa: ANN401
53
+ def default(self, o: Any) -> Any: # noqa: ANN401 - Any required for generic JSON encoding
54
+ """Encode object to JSON-serializable format."""
53
55
  # Check registered type handlers first
54
56
  for type_, handler in self._type_handlers.items():
55
57
  if isinstance(o, type_):
@@ -62,20 +64,21 @@ class ExtendedJSONEncoder(json.JSONEncoder):
62
64
  return super().default(o)
63
65
 
64
66
 
65
- def json_dumps(data: Any, type_handlers: dict[type[Any], Callable[[Any], Any]] | None = None, **kwargs: Any) -> str: # noqa: ANN401
67
+ def json_dumps(obj: Any, type_handlers: dict[type[Any], Callable[[Any], Any]] | None = None, **kwargs: Any) -> str: # noqa: ANN401 - Any required for generic type handler
66
68
  """Serialize object to JSON with extended type support.
67
69
 
68
70
  Unlike standard json.dumps, uses ExtendedJSONEncoder which automatically handles
69
71
  UUID, Decimal, Path, datetime, dataclasses, enums, pydantic models, and other Python types.
70
72
 
71
73
  Args:
72
- data: Object to serialize to JSON
74
+ obj: Object to serialize to JSON
73
75
  type_handlers: Optional additional type handlers for this call only.
74
76
  These handlers take precedence over default ones.
75
77
  **kwargs: Additional arguments passed to json.dumps
76
78
 
77
79
  Returns:
78
80
  JSON string representation
81
+
79
82
  """
80
83
  if type_handlers:
81
84
  # Type narrowing for mypy
@@ -83,7 +86,7 @@ def json_dumps(data: Any, type_handlers: dict[type[Any], Callable[[Any], Any]] |
83
86
 
84
87
  class TemporaryEncoder(ExtendedJSONEncoder):
85
88
  _type_handlers: ClassVar[dict[type[Any], Callable[[Any], Any]]] = {
86
- **ExtendedJSONEncoder._type_handlers, # noqa: SLF001
89
+ **ExtendedJSONEncoder._type_handlers, # noqa: SLF001 - accessing class internals for type handler inheritance
87
90
  **handlers,
88
91
  }
89
92
 
@@ -91,14 +94,14 @@ def json_dumps(data: Any, type_handlers: dict[type[Any], Callable[[Any], Any]] |
91
94
  else:
92
95
  encoder_cls = ExtendedJSONEncoder
93
96
 
94
- return json.dumps(data, cls=encoder_cls, **kwargs)
97
+ return json.dumps(obj, cls=encoder_cls, **kwargs)
95
98
 
96
99
 
97
100
  def _auto_register_optional_types() -> None:
98
101
  """Register handlers for optional dependencies if available."""
99
102
  # Pydantic models
100
103
  try:
101
- from pydantic import BaseModel # type: ignore[import-not-found] # noqa: PLC0415
104
+ from pydantic import BaseModel # noqa: PLC0415 - optional pydantic import at runtime
102
105
 
103
106
  ExtendedJSONEncoder.register(BaseModel, lambda obj: obj.model_dump())
104
107
  except ImportError:
@@ -1,3 +1,5 @@
1
+ """Type-safe random generation for decimals and datetimes."""
2
+
1
3
  import random
2
4
  from datetime import datetime, timedelta
3
5
  from decimal import Decimal
@@ -18,6 +20,7 @@ def random_decimal(from_value: Decimal, to_value: Decimal) -> Decimal:
18
20
 
19
21
  Raises:
20
22
  ValueError: If from_value > to_value
23
+
21
24
  """
22
25
  if from_value > to_value:
23
26
  raise ValueError("from_value must be <= to_value")
@@ -37,14 +40,33 @@ def random_decimal(from_value: Decimal, to_value: Decimal) -> Decimal:
37
40
  return Decimal(random_int) / Decimal(multiplier)
38
41
 
39
42
 
40
- def random_datetime(
41
- from_time: datetime,
42
- *,
43
- hours: int = 0,
44
- minutes: int = 0,
45
- seconds: int = 0,
46
- ) -> datetime:
47
- """Generate a random datetime within a specified time range.
43
+ def random_datetime(from_time: datetime, to_time: datetime) -> datetime:
44
+ """Generate a random datetime between from_time and to_time.
45
+
46
+ Args:
47
+ from_time: Minimum datetime (inclusive)
48
+ to_time: Maximum datetime (inclusive)
49
+
50
+ Returns:
51
+ Random datetime in the specified range
52
+
53
+ Raises:
54
+ ValueError: If from_time > to_time
55
+
56
+ """
57
+ if from_time > to_time:
58
+ raise ValueError("from_time must be <= to_time")
59
+
60
+ delta = (to_time - from_time).total_seconds()
61
+ if delta == 0:
62
+ return from_time
63
+
64
+ random_seconds = random.uniform(0, delta) # nosec B311
65
+ return from_time + timedelta(seconds=random_seconds)
66
+
67
+
68
+ def random_datetime_offset(from_time: datetime, *, hours: int = 0, minutes: int = 0, seconds: int = 0) -> datetime:
69
+ """Generate a random datetime within a specified offset from base time.
48
70
 
49
71
  Returns a random datetime between from_time and from_time + offset,
50
72
  where offset is calculated from the provided hours, minutes, and seconds.
@@ -60,13 +82,10 @@ def random_datetime(
60
82
 
61
83
  Raises:
62
84
  ValueError: If any offset value is negative
85
+
63
86
  """
64
87
  if hours < 0 or minutes < 0 or seconds < 0:
65
- raise ValueError("Range values must be non-negative")
88
+ raise ValueError("Offset values must be non-negative")
66
89
 
67
90
  total_seconds = hours * 3600 + minutes * 60 + seconds
68
- if total_seconds == 0:
69
- return from_time
70
-
71
- random_seconds = random.uniform(0, total_seconds) # nosec B311
72
- return from_time + timedelta(seconds=random_seconds)
91
+ return random_datetime(from_time, from_time + timedelta(seconds=total_seconds))
@@ -1,3 +1,5 @@
1
+ """String matching utilities and multiline text parsing."""
2
+
1
3
  from collections.abc import Iterable
2
4
 
3
5
 
@@ -16,12 +18,7 @@ def str_contains_any(value: str, substrings: Iterable[str]) -> bool:
16
18
  return any(substring in value for substring in substrings)
17
19
 
18
20
 
19
- def parse_lines(
20
- text: str,
21
- lowercase: bool = False,
22
- remove_comments: bool = False,
23
- deduplicate: bool = False,
24
- ) -> list[str]:
21
+ def parse_lines(text: str, lowercase: bool = False, remove_comments: bool = False, deduplicate: bool = False) -> list[str]:
25
22
  """Parse multiline text into a list of cleaned lines.
26
23
 
27
24
  Args:
@@ -32,6 +29,7 @@ def parse_lines(
32
29
 
33
30
  Returns:
34
31
  List of non-empty, stripped lines after applying specified transformations
32
+
35
33
  """
36
34
  if lowercase:
37
35
  text = text.lower()