mm-std 0.0.1__tar.gz → 0.5.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. mm_std-0.5.3/.gitignore +16 -0
  2. mm_std-0.5.3/.pre-commit-config.yaml +10 -0
  3. mm_std-0.5.3/PKG-INFO +4 -0
  4. mm_std-0.5.3/README.md +230 -0
  5. mm_std-0.5.3/justfile +40 -0
  6. mm_std-0.5.3/pyproject.toml +68 -0
  7. mm_std-0.5.3/requirements.txt +2 -0
  8. mm_std-0.5.3/src/mm_std/__init__.py +24 -0
  9. mm_std-0.5.3/src/mm_std/date_utils.py +62 -0
  10. mm_std-0.5.3/src/mm_std/dict_utils.py +63 -0
  11. mm_std-0.5.3/src/mm_std/json_utils.py +112 -0
  12. mm_std-0.5.3/src/mm_std/py.typed +0 -0
  13. mm_std-0.5.3/src/mm_std/random_utils.py +72 -0
  14. mm_std-0.5.3/src/mm_std/str_utils.py +45 -0
  15. mm_std-0.5.3/src/mm_std/subprocess_utils.py +75 -0
  16. mm_std-0.5.3/tests/__init__.py +0 -0
  17. mm_std-0.5.3/tests/test_date_utils.py +174 -0
  18. mm_std-0.5.3/tests/test_dict_utils.py +169 -0
  19. mm_std-0.5.3/tests/test_json_utils.py +263 -0
  20. mm_std-0.5.3/tests/test_random_utils.py +122 -0
  21. mm_std-0.5.3/tests/test_str_utils.py +180 -0
  22. mm_std-0.5.3/tests/test_subprocess_utils.py +128 -0
  23. mm_std-0.5.3/uv.lock +368 -0
  24. mm_std-0.0.1/PKG-INFO +0 -27
  25. mm_std-0.0.1/pyproject.toml +0 -78
  26. mm_std-0.0.1/setup.cfg +0 -4
  27. mm_std-0.0.1/src/mm_std/__init__.py +0 -46
  28. mm_std-0.0.1/src/mm_std/command.py +0 -35
  29. mm_std-0.0.1/src/mm_std/concurrency.py +0 -157
  30. mm_std-0.0.1/src/mm_std/config.py +0 -78
  31. mm_std-0.0.1/src/mm_std/crypto.py +0 -13
  32. mm_std-0.0.1/src/mm_std/date.py +0 -48
  33. mm_std-0.0.1/src/mm_std/dict.py +0 -4
  34. mm_std-0.0.1/src/mm_std/env.py +0 -9
  35. mm_std-0.0.1/src/mm_std/fs.py +0 -13
  36. mm_std-0.0.1/src/mm_std/json_.py +0 -36
  37. mm_std-0.0.1/src/mm_std/log.py +0 -22
  38. mm_std-0.0.1/src/mm_std/net.py +0 -154
  39. mm_std-0.0.1/src/mm_std/print_.py +0 -54
  40. mm_std-0.0.1/src/mm_std/random_.py +0 -38
  41. mm_std-0.0.1/src/mm_std/result.py +0 -266
  42. mm_std-0.0.1/src/mm_std/str.py +0 -106
  43. mm_std-0.0.1/src/mm_std/telegram.py +0 -35
  44. mm_std-0.0.1/src/mm_std/types.py +0 -4
  45. mm_std-0.0.1/src/mm_std/zip.py +0 -8
  46. mm_std-0.0.1/src/mm_std.egg-info/PKG-INFO +0 -27
  47. mm_std-0.0.1/src/mm_std.egg-info/SOURCES.txt +0 -41
  48. mm_std-0.0.1/src/mm_std.egg-info/dependency_links.txt +0 -1
  49. mm_std-0.0.1/src/mm_std.egg-info/requires.txt +0 -24
  50. mm_std-0.0.1/src/mm_std.egg-info/top_level.txt +0 -1
  51. mm_std-0.0.1/tests/test_command.py +0 -20
  52. mm_std-0.0.1/tests/test_concurrency.py +0 -142
  53. mm_std-0.0.1/tests/test_crypto.py +0 -18
  54. mm_std-0.0.1/tests/test_date.py +0 -25
  55. mm_std-0.0.1/tests/test_dict.py +0 -8
  56. mm_std-0.0.1/tests/test_env.py +0 -5
  57. mm_std-0.0.1/tests/test_fs.py +0 -12
  58. mm_std-0.0.1/tests/test_json.py +0 -13
  59. mm_std-0.0.1/tests/test_log.py +0 -6
  60. mm_std-0.0.1/tests/test_net.py +0 -124
  61. mm_std-0.0.1/tests/test_print.py +0 -7
  62. mm_std-0.0.1/tests/test_random.py +0 -30
  63. mm_std-0.0.1/tests/test_result.py +0 -52
  64. mm_std-0.0.1/tests/test_str.py +0 -60
  65. mm_std-0.0.1/tests/test_telegram.py +0 -17
  66. /mm_std-0.0.1/src/mm_std/py.typed → /mm_std-0.5.3/dict.dic +0 -0
@@ -0,0 +1,16 @@
1
+ .idea
2
+ .vscode
3
+ .venv
4
+ .env
5
+ .coverage
6
+ /htmlcov
7
+ __pycache__
8
+ *.egg-info
9
+ pip-wheel-metadata
10
+ .pytest_cache
11
+ .mypy_cache
12
+ .ruff_cache
13
+ /dist
14
+ /build
15
+ /tmp
16
+ .DS_Store
@@ -0,0 +1,10 @@
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v5.0.0
4
+ hooks:
5
+ - id: trailing-whitespace
6
+ - id: end-of-file-fixer
7
+ - id: check-yaml
8
+ - id: check-toml
9
+ - id: check-json
10
+ - id: check-added-large-files
mm_std-0.5.3/PKG-INFO ADDED
@@ -0,0 +1,4 @@
1
+ Metadata-Version: 2.4
2
+ Name: mm-std
3
+ Version: 0.5.3
4
+ Requires-Python: >=3.13
mm_std-0.5.3/README.md ADDED
@@ -0,0 +1,230 @@
1
+ # mm-std
2
+
3
+ A collection of Python utilities for common data manipulation tasks with strict type safety and modern Python support.
4
+
5
+ ## Features
6
+
7
+ - **JSON Utilities**: Extended JSON encoder with support for datetime, UUID, Decimal, dataclasses, enums, and Pydantic models
8
+ - **Dictionary Utilities**: Advanced dictionary manipulation with type preservation
9
+ - **Date Utilities**: UTC-focused datetime operations and flexible date parsing
10
+ - **Random Utilities**: Type-safe random generation for decimals and datetimes
11
+ - **String Utilities**: Efficient string matching utilities for prefixes, suffixes, and substrings, plus multiline text parsing
12
+ - **Subprocess Utilities**: Safe shell command execution with comprehensive result handling
13
+ - **Full Type Safety**: Strict mypy compliance with comprehensive type annotations
14
+
15
+ ## Quick Start
16
+
17
+ ### String Utilities
18
+
19
+ Efficient string matching for common patterns:
20
+
21
+ ```python
22
+ from mm_std import str_starts_with_any, str_ends_with_any, str_contains_any
23
+
24
+ # Check URL protocols
25
+ url = "https://example.com"
26
+ is_web_url = str_starts_with_any(url, ["http://", "https://"]) # True
27
+
28
+ # Check file extensions
29
+ filename = "document.pdf"
30
+ is_document = str_ends_with_any(filename, [".pdf", ".doc", ".docx"]) # True
31
+
32
+ # Check log levels in messages
33
+ log_message = "ERROR: Database connection failed"
34
+ has_error = str_contains_any(log_message, ["ERROR", "CRITICAL", "FATAL"]) # True
35
+
36
+ # All functions accept any iterable
37
+ prefixes = ("admin_", "super_", "root_")
38
+ username = "admin_john"
39
+ is_privileged = str_starts_with_any(username, prefixes) # True
40
+ ```
41
+
42
+ Parse multiline text into cleaned lines:
43
+
44
+ ```python
45
+ from mm_std import parse_lines
46
+
47
+ # Basic line parsing
48
+ text = """
49
+ line1
50
+ line2
51
+ line3
52
+
53
+ line4
54
+ """
55
+ lines = parse_lines(text) # ["line1", "line2", "line3", "line4"]
56
+
57
+ # Advanced parsing with options
58
+ config_text = """
59
+ DEBUG=true # Enable debug mode
60
+ HOST=localhost
61
+ PORT=8080 # Application port
62
+ # This is a comment
63
+ DEBUG=true # Duplicate line
64
+ """
65
+
66
+ # Parse with all options
67
+ parsed = parse_lines(
68
+ config_text,
69
+ lowercase=True, # Convert to lowercase
70
+ remove_comments=True, # Remove everything after '#'
71
+ deduplicate=True # Remove duplicates, preserve order
72
+ )
73
+ # Result: ["debug=true", "host=localhost", "port=8080"]
74
+ ```
75
+
76
+ ### Subprocess Utilities
77
+
78
+ Execute shell commands safely with comprehensive result handling:
79
+
80
+ ```python
81
+ from mm_std import shell, ssh_shell, ShellResult
82
+
83
+ # Execute local commands
84
+ result = shell("ls -la /tmp")
85
+ print(f"Exit code: {result.code}")
86
+ print(f"Output: {result.stdout}")
87
+ print(f"Errors: {result.stderr}")
88
+ print(f"Combined: {result.combined_output}")
89
+
90
+ # Handle command errors gracefully
91
+ result = shell("grep 'pattern' nonexistent.txt")
92
+ if result.code != 0:
93
+ print(f"Command failed: {result.stderr}")
94
+
95
+ # Execute with timeout
96
+ result = shell("long-running-command", timeout=30)
97
+ if result.code == 255: # TIMEOUT_EXIT_CODE
98
+ print("Command timed out")
99
+
100
+ # Echo commands for debugging
101
+ result = shell("echo 'Hello World'", echo_command=True)
102
+
103
+ # Complex shell operations with pipes
104
+ result = shell("ps aux | grep python | wc -l")
105
+ python_processes = int(result.stdout.strip())
106
+
107
+ # Execute commands on remote hosts via SSH
108
+ ssh_result = ssh_shell(
109
+ host="server.example.com",
110
+ cmd="systemctl status nginx",
111
+ ssh_key_path="~/.ssh/id_rsa",
112
+ timeout=10
113
+ )
114
+
115
+ # SSH commands are automatically quoted for security
116
+ ssh_result = ssh_shell(
117
+ "server.example.com",
118
+ "echo 'hello world; ls -la'", # Properly escaped
119
+ echo_command=True
120
+ )
121
+ ```
122
+
123
+ ### JSON Utilities
124
+
125
+ Extended JSON serialization with automatic handling of Python types:
126
+
127
+ ```python
128
+ from mm_std import json_dumps, ExtendedJSONEncoder
129
+ from datetime import datetime
130
+ from decimal import Decimal
131
+ from uuid import UUID
132
+
133
+ data = {
134
+ "timestamp": datetime.now(),
135
+ "price": Decimal("19.99"),
136
+ "user_id": UUID("12345678-1234-5678-1234-567812345678"),
137
+ "tags": {"python", "json"} # set will be converted to list
138
+ }
139
+
140
+ # Simple serialization
141
+ json_str = json_dumps(data)
142
+
143
+ # Custom type handlers for specific use cases
144
+ json_str = json_dumps(data, type_handlers={
145
+ Decimal: lambda d: float(d) # Convert Decimal to float instead of string
146
+ })
147
+ ```
148
+
149
+ ### Dictionary Utilities
150
+
151
+ Clean up dictionaries by replacing or removing empty values:
152
+
153
+ ```python
154
+ from mm_std import replace_empty_dict_entries
155
+
156
+ data = {
157
+ "name": "John",
158
+ "age": None,
159
+ "email": "",
160
+ "score": 0,
161
+ "active": False
162
+ }
163
+
164
+ # Remove empty entries entirely
165
+ cleaned = replace_empty_dict_entries(data)
166
+ # Result: {"name": "John"}
167
+
168
+ # Replace with defaults
169
+ defaults = {"age": 25, "email": "unknown@example.com"}
170
+ cleaned = replace_empty_dict_entries(data, defaults=defaults)
171
+ # Result: {"name": "John", "age": 25, "email": "unknown@example.com"}
172
+
173
+ # Treat zero and false as empty too
174
+ cleaned = replace_empty_dict_entries(
175
+ data,
176
+ defaults=defaults,
177
+ treat_zero_as_empty=True,
178
+ treat_false_as_empty=True
179
+ )
180
+ ```
181
+
182
+ ### Date Utilities
183
+
184
+ UTC-focused datetime operations:
185
+
186
+ ```python
187
+ from mm_std import utc_now, utc_delta, parse_date
188
+
189
+ # Current UTC time
190
+ now = utc_now()
191
+
192
+ # Time calculations
193
+ past = utc_delta(hours=-2, minutes=-30)
194
+ future = utc_delta(days=7)
195
+
196
+ # Flexible date parsing
197
+ dates = [
198
+ "2023-12-25",
199
+ "2023-12-25T10:30:00Z",
200
+ "2023-12-25 10:30:00.123456+00:00",
201
+ "2023/12/25"
202
+ ]
203
+
204
+ parsed_dates = [parse_date(d) for d in dates]
205
+
206
+ # Parse and ignore timezone info
207
+ local_time = parse_date("2023-12-25T10:30:00+02:00", ignore_tz=True)
208
+ ```
209
+
210
+ ### Random Utilities
211
+
212
+ Generate random values with precision:
213
+
214
+ ```python
215
+ from mm_std import random_decimal, random_datetime
216
+ from decimal import Decimal
217
+ from datetime import datetime
218
+
219
+ # Random decimal with preserved precision
220
+ price = random_decimal(Decimal("10.00"), Decimal("99.99"))
221
+
222
+ # Random datetime within a range
223
+ base_time = datetime.now()
224
+ random_time = random_datetime(
225
+ base_time,
226
+ hours=24, # Up to 24 hours later
227
+ minutes=30, # Plus up to 30 minutes
228
+ seconds=45 # Plus up to 45 seconds
229
+ )
230
+ ```
mm_std-0.5.3/justfile ADDED
@@ -0,0 +1,40 @@
1
+ version := `uv run python -c 'import tomllib; print(tomllib.load(open("pyproject.toml", "rb"))["project"]["version"])'`
2
+
3
+
4
+ clean:
5
+ rm -rf .pytest_cache .mypy_cache .ruff_cache .coverage dist build src/*.egg-info
6
+
7
+ build: clean
8
+ uv build
9
+
10
+ format:
11
+ uv run ruff check --select I --fix src tests
12
+ uv run ruff format src tests
13
+
14
+ test:
15
+ uv run pytest -n auto tests
16
+
17
+ lint: format pre-commit
18
+ uv run ruff check src tests
19
+ uv run mypy src
20
+
21
+ audit:
22
+ # uv export --no-dev --all-extras --format requirements-txt --no-emit-project > requirements.txt
23
+ # uv run pip-audit -r requirements.txt --disable-pip
24
+ # rm requirements.txt
25
+ uv run bandit -q -r -c "pyproject.toml" src
26
+
27
+ publish: build lint audit test
28
+ git diff-index --quiet HEAD
29
+ printf "Enter PyPI token: " && IFS= read -rs TOKEN && echo && uv publish --token "$TOKEN"
30
+ git tag -a 'v{{version}}' -m 'v{{version}}'
31
+ git push origin v{{version}}
32
+
33
+ sync:
34
+ uv sync
35
+
36
+ pre-commit:
37
+ uv run pre-commit run --all-files
38
+
39
+ pre-commit-autoupdate:
40
+ uv run pre-commit autoupdate
@@ -0,0 +1,68 @@
1
+ [project]
2
+ name = "mm-std"
3
+ version = "0.5.3"
4
+ description = ""
5
+ requires-python = ">=3.13"
6
+ dependencies = [
7
+ ]
8
+
9
+ [build-system]
10
+ requires = ["hatchling"]
11
+ build-backend = "hatchling.build"
12
+
13
+ [tool.uv]
14
+ dev-dependencies = [
15
+ "pytest~=8.4.0",
16
+ "pytest-xdist~=3.7.0",
17
+ "ruff~=0.11.13",
18
+ "mypy~=1.16.0",
19
+ "bandit~=1.8.3",
20
+ "pre-commit~=4.2.0",
21
+ ]
22
+
23
+ [tool.mypy]
24
+ python_version = "3.13"
25
+ warn_no_return = false
26
+ strict = true
27
+ exclude = ["^tests/", "^tmp/"]
28
+
29
+ [tool.ruff]
30
+ line-length = 130
31
+ target-version = "py313"
32
+ [tool.ruff.lint]
33
+ select = ["ALL"]
34
+ ignore = [
35
+ "TC", # flake8-type-checking, TYPE_CHECKING is dangerous, for example it doesn't work with pydantic
36
+ "A005", # flake8-builtins: stdlib-module-shadowing
37
+ "ERA001", # eradicate: commented-out-code
38
+ "PT", # flake8-pytest-style
39
+ "D", # pydocstyle
40
+ "FIX", # flake8-fixme
41
+ "PLR0911", # pylint: too-many-return-statements
42
+ "PLR0912", # pylint: too-many-branches
43
+ "PLR0913", # pylint: too-many-arguments
44
+ "PLR2004", # pylint: magic-value-comparison
45
+ "PLC0414", # pylint: useless-import-alias
46
+ "FBT", # flake8-boolean-trap
47
+ "EM", # flake8-errmsg
48
+ "TRY003", # tryceratops: raise-vanilla-args
49
+ "C901", # mccabe: complex-structure,
50
+ "BLE001", # flake8-blind-except
51
+ "S311", # bandit: suspicious-non-cryptographic-random-usage
52
+ "TD002", # flake8-todos: missing-todo-author
53
+ "TD003", # flake8-todos: missing-todo-link
54
+ "RET503", # flake8-return: implicit-return
55
+ "COM812", # it's used in ruff formatter
56
+ "ASYNC109",
57
+ "G004",
58
+ "DTZ001"
59
+ ]
60
+ [tool.ruff.lint.per-file-ignores]
61
+ "tests/*.py" = ["ANN", "S"]
62
+ [tool.ruff.format]
63
+ quote-style = "double"
64
+ indent-style = "space"
65
+
66
+ [tool.bandit]
67
+ exclude_dirs = ["tests"]
68
+ skips = ["B311"]
@@ -0,0 +1,2 @@
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv export --no-dev --all-extras --format requirements-txt --no-emit-project
@@ -0,0 +1,24 @@
1
+ from .date_utils import parse_date, utc_delta, utc_now
2
+ from .dict_utils import replace_empty_dict_entries
3
+ from .json_utils import ExtendedJSONEncoder, json_dumps
4
+ from .random_utils import random_datetime, random_decimal
5
+ from .str_utils import parse_lines, str_contains_any, str_ends_with_any, str_starts_with_any
6
+ from .subprocess_utils import ShellResult, shell, ssh_shell # nosec
7
+
8
+ __all__ = [
9
+ "ExtendedJSONEncoder",
10
+ "ShellResult",
11
+ "json_dumps",
12
+ "parse_date",
13
+ "parse_lines",
14
+ "random_datetime",
15
+ "random_decimal",
16
+ "replace_empty_dict_entries",
17
+ "shell",
18
+ "ssh_shell",
19
+ "str_contains_any",
20
+ "str_ends_with_any",
21
+ "str_starts_with_any",
22
+ "utc_delta",
23
+ "utc_now",
24
+ ]
@@ -0,0 +1,62 @@
1
+ from datetime import UTC, datetime, timedelta
2
+
3
+
4
+ def utc_now() -> datetime:
5
+ """Get current UTC time."""
6
+ return datetime.now(UTC)
7
+
8
+
9
+ def utc_delta(
10
+ *,
11
+ days: int | None = None,
12
+ hours: int | None = None,
13
+ minutes: int | None = None,
14
+ seconds: int | None = None,
15
+ ) -> datetime:
16
+ """Get UTC time shifted by the specified delta.
17
+
18
+ Use negative values to get time in the past.
19
+ """
20
+ params = {}
21
+ if days:
22
+ params["days"] = days
23
+ if hours:
24
+ params["hours"] = hours
25
+ if minutes:
26
+ params["minutes"] = minutes
27
+ if seconds:
28
+ params["seconds"] = seconds
29
+ return datetime.now(UTC) + timedelta(**params)
30
+
31
+
32
+ def parse_date(value: str, ignore_tz: bool = False) -> datetime:
33
+ """Parse date string in various formats, with timezone handling.
34
+
35
+ Converts 'Z' suffix to '+00:00' for ISO format compatibility.
36
+ Use ignore_tz=True to strip timezone info from the result.
37
+ """
38
+ if value.lower().endswith("z"):
39
+ value = value[:-1] + "+00:00"
40
+ date_formats = [
41
+ "%Y-%m-%d %H:%M:%S.%f%z",
42
+ "%Y-%m-%dT%H:%M:%S.%f%z",
43
+ "%Y-%m-%d %H:%M:%S.%f",
44
+ "%Y-%m-%dT%H:%M:%S%z",
45
+ "%Y-%m-%d %H:%M:%S%z",
46
+ "%Y-%m-%d %H:%M:%S",
47
+ "%Y-%m-%d %H:%M%z",
48
+ "%Y-%m-%d %H:%M",
49
+ "%Y-%m-%d",
50
+ "%Y/%m/%d",
51
+ # Add more formats as needed
52
+ ]
53
+
54
+ for fmt in date_formats:
55
+ try:
56
+ dt = datetime.strptime(value, fmt) # noqa: DTZ007
57
+ if ignore_tz and dt.tzinfo is not None:
58
+ dt = dt.replace(tzinfo=None)
59
+ return dt # noqa: TRY300
60
+ except ValueError:
61
+ continue
62
+ raise ValueError(f"Time data '{value}' does not match any known format.")
@@ -0,0 +1,63 @@
1
+ from collections import defaultdict
2
+ from collections.abc import Mapping, MutableMapping
3
+ from decimal import Decimal
4
+ from typing import TypeVar, cast
5
+
6
+ K = TypeVar("K")
7
+ V = TypeVar("V")
8
+ # TypeVar bound to MutableMapping with same K, V as defaults parameter
9
+ # 'type: ignore' needed because mypy can't handle TypeVar bounds with other TypeVars
10
+ DictType = TypeVar("DictType", bound=MutableMapping[K, V]) # type: ignore[valid-type]
11
+
12
+
13
+ def replace_empty_dict_entries(
14
+ data: DictType,
15
+ defaults: Mapping[K, V] | None = None,
16
+ treat_zero_as_empty: bool = False,
17
+ treat_false_as_empty: bool = False,
18
+ treat_empty_string_as_empty: bool = True,
19
+ ) -> DictType:
20
+ """
21
+ Replace empty entries in a dictionary with defaults or remove them entirely.
22
+
23
+ Preserves the exact type of the input mapping:
24
+ - dict[str, int] → dict[str, int]
25
+ - defaultdict[str, float] → defaultdict[str, float]
26
+ - OrderedDict[str, str] → OrderedDict[str, str]
27
+
28
+ Args:
29
+ data: The dictionary to process
30
+ defaults: Default values to use for empty entries. If None or key not found, empty entries are removed
31
+ treat_zero_as_empty: Treat 0 as empty value
32
+ treat_false_as_empty: Treat False as empty value
33
+ treat_empty_string_as_empty: Treat "" as empty value
34
+
35
+ Returns:
36
+ New dictionary of the same concrete type with empty entries replaced or removed
37
+ """
38
+ if defaults is None:
39
+ defaults = {}
40
+
41
+ if isinstance(data, defaultdict):
42
+ result: MutableMapping[K, V] = defaultdict(data.default_factory)
43
+ else:
44
+ result = data.__class__()
45
+
46
+ for key, value in data.items():
47
+ should_replace = (
48
+ value is None
49
+ or (treat_false_as_empty and value is False)
50
+ or (treat_empty_string_as_empty and isinstance(value, str) and value == "")
51
+ or (treat_zero_as_empty and isinstance(value, (int, float, Decimal)) and not isinstance(value, bool) and value == 0)
52
+ )
53
+
54
+ if should_replace:
55
+ if key in defaults:
56
+ new_value = defaults[key]
57
+ else:
58
+ continue # Skip the key if no default is available
59
+ else:
60
+ new_value = value
61
+
62
+ result[key] = new_value
63
+ return cast(DictType, result)
@@ -0,0 +1,112 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from collections.abc import Callable
5
+ from dataclasses import asdict, is_dataclass
6
+ from datetime import date, datetime
7
+ from decimal import Decimal
8
+ from enum import Enum
9
+ from pathlib import Path
10
+ from typing import Any, ClassVar
11
+ from uuid import UUID
12
+
13
+
14
+ class ExtendedJSONEncoder(json.JSONEncoder):
15
+ """JSON encoder with extended type support for common Python objects.
16
+
17
+ Supports built-in Python types, dataclasses, enums, exceptions, and custom registered types.
18
+ Automatically registers pydantic BaseModel if available.
19
+ All type handlers are unified in a single registration system for consistency and performance.
20
+ """
21
+
22
+ _type_handlers: ClassVar[dict[type[Any], Callable[[Any], Any]]] = {
23
+ # Order matters: more specific types first
24
+ datetime: lambda obj: obj.isoformat(), # Must be before date (inheritance)
25
+ date: lambda obj: obj.isoformat(),
26
+ UUID: str,
27
+ Decimal: str,
28
+ Path: str,
29
+ set: list,
30
+ frozenset: list,
31
+ bytes: lambda obj: obj.decode("latin-1"),
32
+ complex: lambda obj: {"real": obj.real, "imag": obj.imag},
33
+ Enum: lambda obj: obj.value,
34
+ Exception: str,
35
+ }
36
+
37
+ @classmethod
38
+ def register(cls, type_: type[Any], serializer: Callable[[Any], Any]) -> None:
39
+ """Register a custom type with its serialization function.
40
+
41
+ Args:
42
+ type_: The type to register
43
+ serializer: Function that converts objects of this type to JSON-serializable data
44
+
45
+ Raises:
46
+ TypeError: If serializer is not callable
47
+ ValueError: If type_ is a built-in JSON type
48
+ """
49
+ if not callable(serializer):
50
+ raise TypeError("Serializer must be callable")
51
+ if type_ in (str, int, float, bool, list, dict, type(None)):
52
+ raise ValueError(f"Cannot override built-in JSON type: {type_.__name__}")
53
+ cls._type_handlers[type_] = serializer
54
+
55
+ def default(self, obj: Any) -> Any: # noqa: ANN401
56
+ # Check registered type handlers first
57
+ for type_, handler in self._type_handlers.items():
58
+ if isinstance(obj, type_):
59
+ return handler(obj)
60
+
61
+ # Special case: dataclasses (requires is_dataclass check, not isinstance)
62
+ if is_dataclass(obj) and not isinstance(obj, type):
63
+ return asdict(obj) # Don't need recursive serialization
64
+
65
+ return super().default(obj)
66
+
67
+
68
+ def json_dumps(data: Any, type_handlers: dict[type[Any], Callable[[Any], Any]] | None = None, **kwargs: Any) -> str: # noqa: ANN401
69
+ """Serialize object to JSON with extended type support.
70
+
71
+ Unlike standard json.dumps, uses ExtendedJSONEncoder which automatically handles
72
+ UUID, Decimal, Path, datetime, dataclasses, enums, pydantic models, and other Python types.
73
+
74
+ Args:
75
+ data: Object to serialize to JSON
76
+ type_handlers: Optional additional type handlers for this call only.
77
+ These handlers take precedence over default ones.
78
+ **kwargs: Additional arguments passed to json.dumps
79
+
80
+ Returns:
81
+ JSON string representation
82
+ """
83
+ if type_handlers:
84
+ # Type narrowing for mypy
85
+ handlers: dict[type[Any], Callable[[Any], Any]] = type_handlers
86
+
87
+ class TemporaryEncoder(ExtendedJSONEncoder):
88
+ _type_handlers: ClassVar[dict[type[Any], Callable[[Any], Any]]] = {
89
+ **ExtendedJSONEncoder._type_handlers, # noqa: SLF001
90
+ **handlers,
91
+ }
92
+
93
+ encoder_cls: type[json.JSONEncoder] = TemporaryEncoder
94
+ else:
95
+ encoder_cls = ExtendedJSONEncoder
96
+
97
+ return json.dumps(data, cls=encoder_cls, **kwargs)
98
+
99
+
100
+ def _auto_register_optional_types() -> None:
101
+ """Register handlers for optional dependencies if available."""
102
+ # Pydantic models
103
+ try:
104
+ from pydantic import BaseModel # type: ignore[import-not-found]
105
+
106
+ ExtendedJSONEncoder.register(BaseModel, lambda obj: obj.model_dump())
107
+ except ImportError:
108
+ pass
109
+
110
+
111
+ # Auto-register optional types when module is imported
112
+ _auto_register_optional_types()
File without changes