adiumentum 0.1.1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,410 @@
1
+ """
2
+ Idea: make BaseSequence[C, T] type,
3
+ where C is list|set|tuple and T is the element type
4
+ """
5
+
6
+ import json
7
+ from abc import ABC, abstractmethod
8
+ from collections.abc import Callable, Hashable, Mapping
9
+ from pathlib import Path
10
+ from types import GenericAlias
11
+ from typing import (
12
+ Any,
13
+ Literal,
14
+ Self,
15
+ TypeVar,
16
+ cast,
17
+ get_args,
18
+ )
19
+
20
+ from pydantic import ConfigDict, GetCoreSchemaHandler, GetJsonSchemaHandler, TypeAdapter
21
+ from pydantic.config import ExtraValues
22
+ from pydantic.fields import FieldInfo
23
+ from pydantic.json_schema import (
24
+ DEFAULT_REF_TEMPLATE,
25
+ GenerateJsonSchema,
26
+ JsonSchemaMode,
27
+ JsonSchemaValue,
28
+ )
29
+ from pydantic.main import IncEx
30
+ from pydantic_core import CoreSchema, core_schema
31
+
32
+ from .io_utils import JSONDict, write_json
33
+
34
+ T = TypeVar("T")
35
+ K_ = TypeVar("K_", bound=Hashable)
36
+ V_ = TypeVar("V_")
37
+ Mode = Literal["json", "python"] | str
38
+
39
+
40
+ VALID_MODES: set[Literal["json", "python"]] = {"json", "python"}
41
+
42
+
43
+ # TODO: next step: create BaseList (BaseModelList?)
44
+ # FoodPlanRaw, Entries, WeightSessionPlan, WorkoutPlanRaw, Partition
45
+
46
+
47
+ class AbstractCustom(ABC):
48
+ model_config: ConfigDict = ConfigDict()
49
+ __pydantic_core_schema__: CoreSchema
50
+
51
+ @classmethod
52
+ def __get_pydantic_core_schema__(
53
+ cls, source_type: Any, handler: GetCoreSchemaHandler
54
+ ) -> CoreSchema:
55
+ return core_schema.no_info_after_validator_function(
56
+ cls.model_validate,
57
+ core_schema.dict_schema(),
58
+ )
59
+
60
+ @classmethod
61
+ def __get_pydantic_json_schema__(
62
+ cls,
63
+ core_schema: CoreSchema,
64
+ handler: GetJsonSchemaHandler,
65
+ ) -> JsonSchemaValue:
66
+ json_schema = handler(core_schema)
67
+ json_schema = handler.resolve_ref_schema(json_schema)
68
+ return json_schema
69
+
70
+ @abstractmethod
71
+ def __init__(self, *args, **kwargs) -> None:
72
+ TypeError(f"Use `model_validate` to instantiate {self.__class__.__name__}")
73
+
74
+ # @abstractmethod
75
+ # def __init__(**data: Any) -> None: ...
76
+
77
+ @classmethod
78
+ @abstractmethod
79
+ def get_adapter(cls) -> TypeAdapter: ...
80
+
81
+ @abstractmethod
82
+ def __repr__(self) -> str: ...
83
+
84
+ @abstractmethod
85
+ def __str__(self) -> str: ...
86
+
87
+ @classmethod
88
+ def model_fields(cls) -> dict[str, FieldInfo]:
89
+ return {}
90
+
91
+ @property
92
+ def model_extra(self) -> dict[str, Any] | None:
93
+ return None
94
+
95
+ @property
96
+ def model_fields_set(self) -> set[str]:
97
+ return set()
98
+
99
+ @classmethod
100
+ @abstractmethod
101
+ def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: ...
102
+
103
+ @abstractmethod
104
+ def model_copy(
105
+ self, *, update: Mapping[str, Any] | None = None, deep: bool = False
106
+ ) -> Self: ...
107
+
108
+ def model_dump(
109
+ self,
110
+ *,
111
+ mode: Literal["json", "python"] | str = "python",
112
+ include: IncEx | None = None,
113
+ exclude: IncEx | None = None,
114
+ context: Any | None = None,
115
+ by_alias: bool | None = None,
116
+ exclude_unset: bool = False,
117
+ exclude_defaults: bool = False,
118
+ exclude_none: bool = False,
119
+ exclude_computed_fields: bool = False,
120
+ round_trip: bool = False,
121
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
122
+ fallback: Callable[[Any], Any] | None = None,
123
+ serialize_as_any: bool = False,
124
+ ) -> dict[str, Any]:
125
+ if mode not in VALID_MODES:
126
+ raise ValueError
127
+ if exclude_computed_fields:
128
+ raise ValueError
129
+
130
+ return self.get_adapter().dump_python(
131
+ self.pre_dump_hook(mode=mode),
132
+ mode=mode,
133
+ include=include,
134
+ exclude=exclude,
135
+ context=context,
136
+ by_alias=by_alias,
137
+ exclude_unset=exclude_unset,
138
+ exclude_defaults=exclude_defaults,
139
+ exclude_none=exclude_none,
140
+ round_trip=round_trip,
141
+ warnings=warnings,
142
+ fallback=fallback,
143
+ serialize_as_any=serialize_as_any,
144
+ )
145
+
146
+ def model_dump_json(
147
+ self,
148
+ *,
149
+ indent: int | None = None,
150
+ ensure_ascii: bool = False,
151
+ include: IncEx | None = None,
152
+ exclude: IncEx | None = None,
153
+ context: Any | None = None,
154
+ by_alias: bool | None = None,
155
+ exclude_unset: bool = False,
156
+ exclude_defaults: bool = False,
157
+ exclude_none: bool = False,
158
+ exclude_computed_fields: bool = False,
159
+ round_trip: bool = False,
160
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
161
+ fallback: Callable[[Any], Any] | None = None,
162
+ serialize_as_any: bool = False,
163
+ ) -> str:
164
+ if ensure_ascii or exclude_computed_fields:
165
+ raise ValueError
166
+ from_hook = self.pre_dump_hook(mode="json")
167
+ return (
168
+ self.get_adapter()
169
+ .dump_json(
170
+ from_hook,
171
+ indent=indent,
172
+ include=include,
173
+ exclude=exclude,
174
+ context=context,
175
+ by_alias=by_alias,
176
+ exclude_unset=exclude_unset,
177
+ exclude_defaults=exclude_defaults,
178
+ exclude_none=exclude_none,
179
+ round_trip=round_trip,
180
+ warnings=warnings,
181
+ fallback=fallback,
182
+ serialize_as_any=serialize_as_any,
183
+ )
184
+ .decode("utf-8")
185
+ )
186
+
187
+ @classmethod
188
+ def model_json_schema(
189
+ cls,
190
+ by_alias: bool = True,
191
+ ref_template: str = DEFAULT_REF_TEMPLATE,
192
+ schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
193
+ mode: JsonSchemaMode = "validation",
194
+ *,
195
+ union_format: Literal["any_of", "primitive_type_array"] = "any_of",
196
+ ) -> dict[str, Any]:
197
+ return cls.get_adapter().json_schema(
198
+ by_alias=by_alias,
199
+ ref_template=ref_template,
200
+ schema_generator=schema_generator,
201
+ mode=mode,
202
+ )
203
+
204
+ @classmethod
205
+ def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
206
+ raise NotImplementedError
207
+
208
+ @classmethod
209
+ def model_validate(
210
+ cls,
211
+ obj: Any,
212
+ *,
213
+ strict: bool | None = None,
214
+ extra: ExtraValues | None = None,
215
+ from_attributes: bool | None = None,
216
+ context: Any | None = None,
217
+ by_alias: bool | None = None,
218
+ by_name: bool | None = None,
219
+ ) -> Self:
220
+ if extra:
221
+ raise ValueError
222
+ adapter = cls.get_adapter()
223
+ obj = cls.pre_validation_hook_python(obj)
224
+ obj = adapter.validate_python(
225
+ obj,
226
+ strict=strict,
227
+ from_attributes=from_attributes,
228
+ context=context,
229
+ by_alias=by_alias,
230
+ by_name=by_name,
231
+ )
232
+ validated = cls(obj)
233
+ return cls.post_validation_hook(validated)
234
+
235
+ @classmethod
236
+ def model_validate_json(
237
+ cls,
238
+ json_data: str | bytes | bytearray,
239
+ *,
240
+ strict: bool | None = None,
241
+ extra: ExtraValues | None = None,
242
+ context: Any | None = None,
243
+ by_alias: bool | None = None,
244
+ by_name: bool | None = None,
245
+ ) -> Self:
246
+ if extra:
247
+ raise ValueError
248
+ adapter = cls.get_adapter()
249
+ json_string = cls.pre_validation_hook_json(json_data)
250
+ raw_validated = adapter.validate_json(
251
+ json_string,
252
+ strict=strict,
253
+ context=context,
254
+ by_alias=by_alias,
255
+ by_name=by_name,
256
+ )
257
+ validated = cls(raw_validated)
258
+ return cls.post_validation_hook(validated)
259
+
260
+ @classmethod
261
+ @abstractmethod
262
+ def model_validate_strings(
263
+ cls,
264
+ obj: Any,
265
+ *,
266
+ strict: bool | None = None,
267
+ extra: ExtraValues | None = None,
268
+ context: Any | None = None,
269
+ by_alias: bool | None = None,
270
+ by_name: bool | None = None,
271
+ ) -> Self: ...
272
+
273
+ @property
274
+ def __annotations__(self) -> dict[str, Any]: # type: ignore
275
+ return self.get_adapter().__annotations__
276
+
277
+ @classmethod
278
+ def read(cls, read_path: Path) -> Self:
279
+ return cls.model_validate_json(read_path.read_text())
280
+
281
+ def write(self, write_path: Path) -> None:
282
+ write_json(cast(JSONDict, self.model_dump(mode="json")), write_path)
283
+
284
+ @staticmethod
285
+ def pre_validation_hook_python(python_dict: dict) -> dict:
286
+ return python_dict
287
+
288
+ @staticmethod
289
+ def pre_validation_hook_json(json_string: str | bytes | bytearray) -> str | bytes | bytearray:
290
+ return json_string
291
+
292
+ @staticmethod
293
+ def post_validation_hook(validated: T) -> T:
294
+ return validated
295
+
296
+ def pre_dump_hook(self, *, mode: Mode) -> Self:
297
+ if mode not in VALID_MODES:
298
+ raise ValueError
299
+ return self
300
+
301
+ def post_dump_hook(self, dumped: T, *, mode: Mode) -> T:
302
+ return dumped
303
+
304
+ def schema(self) -> CoreSchema:
305
+ return self.get_adapter().core_schema
306
+
307
+ def schema_json(self) -> str:
308
+ return json.dumps(self.model_json_schema())
309
+
310
+
311
+ class BaseDict[K, V](dict[K, V], AbstractCustom):
312
+ """
313
+ Dictionary type leveraging pydantic for validation and JSON serialization.
314
+ """
315
+
316
+ @classmethod
317
+ def get_adapter(cls) -> TypeAdapter[dict[K, V]]:
318
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
319
+ for base in getattr(cls, "__orig_bases__", []):
320
+ if isinstance(base, GenericAlias):
321
+ key_type, val_type = get_args(base)
322
+ return TypeAdapter(dict[key_type, val_type]) # type: ignore[valid-type]
323
+ raise TypeError("Key and value types not found.")
324
+
325
+ def __repr__(self) -> str:
326
+ return f"{self.__class__.__name__}{dict.__repr__(self)}"
327
+
328
+ def __str__(self) -> str:
329
+ raise NotImplementedError
330
+
331
+
332
+ class BaseList[T](list[T], AbstractCustom):
333
+ """
334
+ Dictionary type leveraging pydantic for validation and JSON serialization.
335
+ """
336
+
337
+ @classmethod
338
+ def get_adapter(cls) -> TypeAdapter[list[T]]:
339
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
340
+ for base in getattr(cls, "__orig_bases__", []):
341
+ if isinstance(base, GenericAlias):
342
+ element_type = get_args(base)[0]
343
+ return TypeAdapter(list[element_type]) # type: ignore[valid-type]
344
+ raise TypeError("Key and value types not found.")
345
+
346
+ def __repr__(self) -> str:
347
+ return f"{self.__class__.__name__}{list.__repr__(self)}"
348
+
349
+ def __str__(self) -> str:
350
+ raise NotImplementedError
351
+
352
+
353
+ class BaseSet[T](set[T], AbstractCustom):
354
+ """
355
+ Dictionary type leveraging pydantic for validation and JSON serialization.
356
+
357
+ TODO: make dump as list if not all members hashable.
358
+ """
359
+
360
+ @classmethod
361
+ def get_adapter(cls) -> TypeAdapter[set[T]]:
362
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
363
+ for base in getattr(cls, "__orig_bases__", []):
364
+ if isinstance(base, GenericAlias):
365
+ element_type = get_args(base)[0]
366
+ return TypeAdapter(set[element_type]) # type: ignore[valid-type]
367
+ raise TypeError("Key and value types not found.")
368
+
369
+ def __repr__(self) -> str:
370
+ return f"{self.__class__.__name__}{set.__repr__(self)}"
371
+
372
+ def __str__(self) -> str:
373
+ raise NotImplementedError
374
+
375
+ def model_dump(
376
+ self,
377
+ *,
378
+ mode: Literal["json", "python"] | str = "python",
379
+ include: IncEx | None = None,
380
+ exclude: IncEx | None = None,
381
+ context: Any | None = None,
382
+ by_alias: bool | None = None,
383
+ exclude_unset: bool = False,
384
+ exclude_defaults: bool = False,
385
+ exclude_none: bool = False,
386
+ exclude_computed_fields: bool = False,
387
+ round_trip: bool = False,
388
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
389
+ fallback: Callable[[Any], Any] | None = None,
390
+ serialize_as_any: bool = False,
391
+ ) -> dict[str, Any]:
392
+ if mode not in VALID_MODES:
393
+ raise ValueError
394
+ if exclude_computed_fields:
395
+ raise ValueError
396
+
397
+ return BaseList.model_validate(self).model_dump(
398
+ mode=mode,
399
+ include=include,
400
+ exclude=exclude,
401
+ context=context,
402
+ by_alias=by_alias,
403
+ exclude_unset=exclude_unset,
404
+ exclude_defaults=exclude_defaults,
405
+ exclude_none=exclude_none,
406
+ round_trip=round_trip,
407
+ warnings=warnings,
408
+ fallback=fallback,
409
+ serialize_as_any=serialize_as_any,
410
+ )
@@ -6,6 +6,7 @@ from typing import Literal, TypeAlias, cast
6
6
  from datethyme import Date, Time
7
7
 
8
8
  from .functional import lmap
9
+ from .typing_utils import JSONDict, areinstances
9
10
 
10
11
  MixedValidated: TypeAlias = (
11
12
  str
@@ -75,7 +76,7 @@ def parse_sequence(s: str) -> list[str]:
75
76
  start, end = start[1:], end[1:]
76
77
  return [f"{letter}{i}" for i in range(int(start), int(end) + 1, step)]
77
78
 
78
- segments = []
79
+ segments: list[str] = []
79
80
  for subseq in s.strip().split(","):
80
81
  segments.extend(interpolate(subseq))
81
82
  return segments
@@ -175,5 +176,12 @@ def cast_as(
175
176
  return type_specific_caster
176
177
 
177
178
 
178
- def as_json(d: dict) -> str:
179
+ def as_json(d: JSONDict) -> str:
179
180
  return json.dumps(d, ensure_ascii=False, indent=4)
181
+
182
+
183
+ def re_split(expr: str | re.Pattern[str], s: str) -> list[str]:
184
+ segments = re.split(expr, s)
185
+ if not areinstances(segments, str):
186
+ raise TypeError("Function 're_split' may only return a list of strings.")
187
+ return segments
@@ -1,13 +1,22 @@
1
+ import re
2
+ from abc import ABC, abstractmethod
1
3
  from collections.abc import Callable, Iterable
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
2
6
  from types import UnionType
3
- from typing import TypeVar
7
+ from typing import Annotated, Any, Protocol, Self, TypeVar, Union
4
8
 
5
9
  T = TypeVar("T")
6
10
 
11
+
12
+ type Atomic = int | float | str | bool | None
7
13
  ClassInfo = type | UnionType | tuple["ClassInfo"]
14
+ JSONPrimitive = int | float | str | None
15
+ JSONDict = dict[str, Union[JSONPrimitive, "JSONDict", list[Union["JSONDict | JSONPrimitive"]]]]
16
+ JSONList = list[JSONPrimitive | JSONDict]
8
17
 
9
18
 
10
- def areinstances(iterable_instance: Iterable, class_or_tuple: ClassInfo) -> bool:
19
+ def areinstances(iterable_instance: Iterable[Any], class_or_tuple: ClassInfo) -> bool:
11
20
  return all(map(lambda inst: isinstance(inst, class_or_tuple), iterable_instance))
12
21
 
13
22
 
@@ -17,3 +26,107 @@ def fallback_if_none(orig: T | None, alt: T) -> T:
17
26
 
18
27
  def call_fallback_if_none(orig: T | None, alt: Callable[[], T]) -> T:
19
28
  return alt() if (orig is None) else orig
29
+
30
+
31
+ class SupportsGe(Protocol):
32
+ def __ge__(self: T, __other: T) -> bool: ...
33
+
34
+
35
+ class SupportsGt(Protocol):
36
+ def __gt__(self: T, __other: T) -> bool: ...
37
+
38
+
39
+ class SupportsLe(Protocol):
40
+ def __le__(self: T, __other: T) -> bool: ...
41
+
42
+
43
+ class SupportsLt(Protocol):
44
+ def __lt__(self: T, __other: T) -> bool: ...
45
+
46
+
47
+ @dataclass(frozen=True)
48
+ class _BaseMetadata:
49
+ """Base class for all metadata.
50
+
51
+ This exists mainly so that implementers
52
+ can do `isinstance(..., BaseMetadata)` while traversing field annotations.
53
+ """
54
+
55
+ __slots__ = ()
56
+
57
+
58
+ @dataclass(frozen=True)
59
+ class Ge(_BaseMetadata):
60
+ """Ge(ge=x) implies that the value must be greater than or equal to x.
61
+
62
+ It can be used with any type that supports the ``>=`` operator,
63
+ including numbers, dates and times, strings, sets, and so on.
64
+ """
65
+
66
+ ge: SupportsGe
67
+
68
+
69
+ @dataclass(frozen=True)
70
+ class Gt(_BaseMetadata):
71
+ """Gt(gt=x) implies that the value must be greater than x.
72
+
73
+ It can be used with any type that supports the ``>`` operator,
74
+ including numbers, dates and times, strings, sets, and so on.
75
+ """
76
+
77
+ gt: SupportsGt
78
+
79
+
80
+ @dataclass(frozen=True)
81
+ class Lt(_BaseMetadata):
82
+ """Lt(lt=x) implies that the value must be less than x.
83
+
84
+ It can be used with any type that supports the ``<`` operator,
85
+ including numbers, dates and times, strings, sets, and so on.
86
+ """
87
+
88
+ lt: SupportsLt
89
+
90
+
91
+ @dataclass(frozen=True)
92
+ class Le(_BaseMetadata):
93
+ """Le(le=x) implies that the value must be less than x.
94
+
95
+ It can be used with any type that supports the ``<=`` operator,
96
+ including numbers, dates and times, strings, sets, and so on.
97
+ """
98
+
99
+ le: SupportsLe
100
+
101
+
102
+ class SupportsIO(ABC):
103
+ @classmethod
104
+ @abstractmethod
105
+ def read(cls, read_path: Path) -> Self: ...
106
+
107
+ @abstractmethod
108
+ def write(self, write_path: Path) -> None: ...
109
+
110
+
111
+ class Pattern:
112
+ DATE: re.Pattern[str] = re.compile(r"^[12]\d\d\d-(0?\d|1[012]|)-(0?\d|[12]\d|3[01])$")
113
+ DATE_STRICT: re.Pattern[str] = re.compile(r"^[12]\d\d\d-(0\d|1[012]|)-(0\d|[12]\d|3[01])$")
114
+ DATE_LOOSE: re.Pattern[str] = re.compile(r"(\d{2,4})[^\d](\d\d?)[^\d](\d\d?)")
115
+ ID: re.Pattern[str] = re.compile(r"^[A-Za-z][A-Za-z0-9_]+$")
116
+ ID_OR_2IDS: re.Pattern[str] = re.compile(r"^[A-Za-z][A-Za-z0-9_]+,[A-Za-z][A-Za-z0-9_]+$")
117
+ NATURAL: re.Pattern[str] = re.compile(r"^[1-9][0-9]*$")
118
+ PROPORTION: re.Pattern[str] = re.compile(r"^0?\.[0-9]+$")
119
+ PROPORTION_OR_2: re.Pattern[str] = re.compile(r"^0?\.[0-9]+$|^0?\.[0-9]+,0?\.[0-9]+$")
120
+
121
+
122
+ TimeAmountRaw = str | int
123
+
124
+ Natural = Annotated[int, Ge(ge=0)]
125
+ Nonnegative = Annotated[float, Ge(ge=0)]
126
+ Positive = Annotated[float, Gt(gt=0)]
127
+ PositiveScore = Annotated[float, Gt(gt=0.0), Lt(lt=5.0)]
128
+ NegativeScore = Annotated[float, Gt(gt=-5.0), Lt(lt=0.0)]
129
+ Proportion = Annotated[float, Ge(ge=0.0), Le(le=1.0)]
130
+ NonnegativeInt = Annotated[int, Ge(ge=0)]
131
+ NonnegativeFloat = Annotated[float, Ge(ge=0.0)]
132
+ PolarityScore = Annotated[float, Ge(ge=-1.0), Le(le=1.0)]
@@ -0,0 +1,61 @@
1
+ Metadata-Version: 2.3
2
+ Name: adiumentum
3
+ Version: 0.3.0
4
+ Summary:
5
+ Author: Isaac Riley
6
+ Author-email: Isaac Riley <yelircaasi@proton.me>
7
+ Requires-Dist: pydantic>=2.11
8
+ Requires-Dist: multipledispatch>=1
9
+ Requires-Dist: loguru>=0.7.3
10
+ Requires-Dist: datethyme>=0.4.0
11
+ Requires-Python: >=3.11, <3.15
12
+ Description-Content-Type: text/markdown
13
+
14
+ # adiumentum
15
+
16
+
17
+ With Nix installed, you can enter a development environment with all dependencies installed:
18
+
19
+ ```sh
20
+ nix develop
21
+ ```
22
+
23
+ Once in this dev shell, you have a number of development utils you can try out (via just):
24
+
25
+ ```sh
26
+ ✔just
27
+ ✔just format
28
+ ✔just check
29
+ ✔just fix
30
+ ✔just typecheck
31
+ ✔just lint
32
+ ✔just deal
33
+ ✔just vulture
34
+ ✔just pydeps-full
35
+ ✔just pydeps
36
+ ✔just pydeps-simple
37
+ ✔just view-deps
38
+ ✔just snakefood
39
+ ✔just deply
40
+ ✔just bandit
41
+ ✔just bandit-html
42
+ ✔just bandit-view
43
+ ✔just pyflame
44
+ ✔just flamegraph
45
+ ✔just perf-flamegraph
46
+ ✔just check-structure
47
+ ✔just check-imports
48
+ ✔just smoke
49
+ ✔just unit
50
+ ✔just test
51
+ ✔just test-cov
52
+ ✔just docs
53
+ ✔just scalene
54
+ ✔just view-cov
55
+ ✔just view-docs
56
+ ✔just view-flamegraphs
57
+ ✔just sbom
58
+
59
+ lefthook validate
60
+ lefthook run all
61
+ ```
@@ -0,0 +1,26 @@
1
+ adiumentum/__init__.py,sha256=318b2af46bfbddaf6cd4b90b1a9d65e1023052a43a44323abbf704826e70426d,2611
2
+ adiumentum/color.py,sha256=dfdebfecf0cdcf32794103fff75bb2e20dd0ae3514d1d051b1af95e6560f6790,1673
3
+ adiumentum/comparison.py,sha256=5d3045dbe9364c52c88ae52be29a6739d280f2f1789fa7813ccda0c05f1e1bad,204
4
+ adiumentum/converters.py,sha256=ae64583d622aa3f9e242fcba479ffb2fa5ec0bcbdf237e6bfc7f980f69914221,48
5
+ adiumentum/dependency_sorting.py,sha256=0310f297b9b559ab02334d9d5becd1909433246d70f04c03b26b7b7afe7fa903,3291
6
+ adiumentum/display.py,sha256=87528f2eacde427138c6ccfe41e5f1141ba34236553352e7159f7e9389d15ec6,1461
7
+ adiumentum/elementary_types.py,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,0
8
+ adiumentum/exceptions.py,sha256=b6851f704e236f8970998060317b68ead953f20e496b990ccd11eb740e5a01e3,49
9
+ adiumentum/file_modification_time.py,sha256=898b8e8c794954ac9133b2c6959f4d6b142a11e98f01a3aab89efdf097e4d2e3,1236
10
+ adiumentum/frozendict.py,sha256=b575059bd5fdaecb7b5f33f86e87342b31656b5526e0948a060adf1cf93dcb0c,942
11
+ adiumentum/functional.py,sha256=82ac44991c02f946a0eb1b34506121c1f8978461085ad3e1958a23d0cc1643dc,2972
12
+ adiumentum/io_utils.py,sha256=033110e6b9335ad4270cf62c86a0df53c5c43954521a814729f2f2b7edc5e7c2,2005
13
+ adiumentum/markers.py,sha256=988f0b7ef80d61871165f30b601f05c9981cfd353c235eb314de5271e8040900,2959
14
+ adiumentum/merge.py,sha256=b1894b08e906ead6f423dfb6d637df553eeed4f894c197f771f3569854044c32,3877
15
+ adiumentum/numerical.py,sha256=adad3335e7220c8bf9666ce8c79f01ca4f1a0933316bb8e6fbfc0440293fbbed,704
16
+ adiumentum/paths_manager.py,sha256=7098bf8bd71cbe93eb60eb8c145a3f02d3e3470c2f236e8bb579c08cf0db5579,403
17
+ adiumentum/performance_logging.py,sha256=bd0c42337fb5c77921700e5487d366ea103e8cd25825138962bfb44c1b54773b,1471
18
+ adiumentum/pydantic_extensions.md,sha256=6f5fe7fc9f10e0f6a87e2861dd16d066892099542dbaca3764944c6a27635706,24936
19
+ adiumentum/pydantic_extensions.py,sha256=8e49c80f4be42b71986ec93195172af2d83986598de3f1bffe4c64505dc03729,12599
20
+ adiumentum/string_utils.py,sha256=88b6389b2cb8ed9a3d65c5ea506ffe852cff480ac287ecdc399dec1e45095a58,4862
21
+ adiumentum/timestamping.py,sha256=87729ac9dd7dac614fbb7bb1995e321bd860202a4c2f435e044f4af23dd545a0,556
22
+ adiumentum/typing_utils.py,sha256=81e01e90a60fe397c5211493fb4446af5f59fbd4b364c98f377962f4bd67ff8b,3894
23
+ adiumentum-0.3.0.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
24
+ adiumentum-0.3.0.dist-info/entry_points.txt,sha256=8d158243687a1102f915d2091f19bdc1443ec58709954317c4edff9edb9e5be8,57
25
+ adiumentum-0.3.0.dist-info/METADATA,sha256=9e6fb8f1c191d51f594457019bfc06043d6754b93ef6c7c1f82925d22eaeae2e,1136
26
+ adiumentum-0.3.0.dist-info/RECORD,,
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ adiumentum = adiumentum.__main__:main
3
+
adiumentum/io.py DELETED
@@ -1,33 +0,0 @@
1
- import json
2
- import os
3
- from pathlib import Path
4
-
5
-
6
- def list_full(directory: str | Path, ending: str = "") -> list[Path]:
7
- directory = Path(directory)
8
- return sorted([directory / file for file in os.listdir(directory) if file.endswith(ending)])
9
-
10
-
11
- def read_raw(json_path: Path) -> str:
12
- with open(json_path, encoding="utf-8") as f:
13
- return f.read()
14
-
15
-
16
- def read_json(json_path: Path) -> dict | list:
17
- with open(json_path, encoding="utf-8") as f:
18
- return json.load(f)
19
-
20
-
21
- def write_json(python_obj: dict | list | bytes, json_path: Path) -> None:
22
- with open(json_path, "w", encoding="utf-8") as f:
23
- json.dump(python_obj, f, indent=4, ensure_ascii=False)
24
-
25
-
26
- def write_raw(text: str, file_path: Path) -> None:
27
- with open(file_path, "w", encoding="utf-8") as f:
28
- f.write(text)
29
-
30
-
31
- def write_raw_bytes(text: bytes, json_path: Path) -> None:
32
- with open(json_path, "wb") as f:
33
- f.write(text)