adiumentum 0.1.1__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,410 @@
1
+ """
2
+ Idea: make BaseSequence[C, T] type,
3
+ where C is list|set|tuple and T is the element type
4
+ """
5
+
6
+ import json
7
+ from abc import ABC, abstractmethod
8
+ from collections.abc import Callable, Hashable, Mapping
9
+ from pathlib import Path
10
+ from types import GenericAlias
11
+ from typing import (
12
+ Any,
13
+ Literal,
14
+ Self,
15
+ TypeVar,
16
+ cast,
17
+ get_args,
18
+ )
19
+
20
+ from pydantic import ConfigDict, GetCoreSchemaHandler, GetJsonSchemaHandler, TypeAdapter
21
+ from pydantic.config import ExtraValues
22
+ from pydantic.fields import FieldInfo
23
+ from pydantic.json_schema import (
24
+ DEFAULT_REF_TEMPLATE,
25
+ GenerateJsonSchema,
26
+ JsonSchemaMode,
27
+ JsonSchemaValue,
28
+ )
29
+ from pydantic.main import IncEx
30
+ from pydantic_core import CoreSchema, core_schema
31
+
32
+ from .io_utils import JSONDict, write_json
33
+
34
+ T = TypeVar("T")
35
+ K_ = TypeVar("K_", bound=Hashable)
36
+ V_ = TypeVar("V_")
37
+ Mode = Literal["json", "python"] | str
38
+
39
+
40
+ VALID_MODES: set[Literal["json", "python"]] = {"json", "python"}
41
+
42
+
43
+ # TODO: next step: create BaseList (BaseModelList?)
44
+ # FoodPlanRaw, Entries, WeightSessionPlan, WorkoutPlanRaw, Partition
45
+
46
+
47
+ class AbstractCustom(ABC):
48
+ model_config: ConfigDict = ConfigDict()
49
+ __pydantic_core_schema__: CoreSchema
50
+
51
+ @classmethod
52
+ def __get_pydantic_core_schema__(
53
+ cls, source_type: Any, handler: GetCoreSchemaHandler
54
+ ) -> CoreSchema:
55
+ return core_schema.no_info_after_validator_function(
56
+ cls.model_validate,
57
+ core_schema.dict_schema(),
58
+ )
59
+
60
+ @classmethod
61
+ def __get_pydantic_json_schema__(
62
+ cls,
63
+ core_schema: CoreSchema,
64
+ handler: GetJsonSchemaHandler,
65
+ ) -> JsonSchemaValue:
66
+ json_schema = handler(core_schema)
67
+ json_schema = handler.resolve_ref_schema(json_schema)
68
+ return json_schema
69
+
70
+ @abstractmethod
71
+ def __init__(self, *args, **kwargs) -> None:
72
+ TypeError(f"Use `model_validate` to instantiate {self.__class__.__name__}")
73
+
74
+ # @abstractmethod
75
+ # def __init__(**data: Any) -> None: ...
76
+
77
+ @classmethod
78
+ @abstractmethod
79
+ def get_adapter(cls) -> TypeAdapter: ...
80
+
81
+ @abstractmethod
82
+ def __repr__(self) -> str: ...
83
+
84
+ @abstractmethod
85
+ def __str__(self) -> str: ...
86
+
87
+ @classmethod
88
+ def model_fields(cls) -> dict[str, FieldInfo]:
89
+ return {}
90
+
91
+ @property
92
+ def model_extra(self) -> dict[str, Any] | None:
93
+ return None
94
+
95
+ @property
96
+ def model_fields_set(self) -> set[str]:
97
+ return set()
98
+
99
+ @classmethod
100
+ @abstractmethod
101
+ def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: ...
102
+
103
+ @abstractmethod
104
+ def model_copy(
105
+ self, *, update: Mapping[str, Any] | None = None, deep: bool = False
106
+ ) -> Self: ...
107
+
108
+ def model_dump(
109
+ self,
110
+ *,
111
+ mode: Literal["json", "python"] | str = "python",
112
+ include: IncEx | None = None,
113
+ exclude: IncEx | None = None,
114
+ context: Any | None = None,
115
+ by_alias: bool | None = None,
116
+ exclude_unset: bool = False,
117
+ exclude_defaults: bool = False,
118
+ exclude_none: bool = False,
119
+ exclude_computed_fields: bool = False,
120
+ round_trip: bool = False,
121
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
122
+ fallback: Callable[[Any], Any] | None = None,
123
+ serialize_as_any: bool = False,
124
+ ) -> dict[str, Any]:
125
+ if mode not in VALID_MODES:
126
+ raise ValueError
127
+ if exclude_computed_fields:
128
+ raise ValueError
129
+
130
+ return self.get_adapter().dump_python(
131
+ self.pre_dump_hook(mode=mode),
132
+ mode=mode,
133
+ include=include,
134
+ exclude=exclude,
135
+ context=context,
136
+ by_alias=by_alias,
137
+ exclude_unset=exclude_unset,
138
+ exclude_defaults=exclude_defaults,
139
+ exclude_none=exclude_none,
140
+ round_trip=round_trip,
141
+ warnings=warnings,
142
+ fallback=fallback,
143
+ serialize_as_any=serialize_as_any,
144
+ )
145
+
146
+ def model_dump_json(
147
+ self,
148
+ *,
149
+ indent: int | None = None,
150
+ ensure_ascii: bool = False,
151
+ include: IncEx | None = None,
152
+ exclude: IncEx | None = None,
153
+ context: Any | None = None,
154
+ by_alias: bool | None = None,
155
+ exclude_unset: bool = False,
156
+ exclude_defaults: bool = False,
157
+ exclude_none: bool = False,
158
+ exclude_computed_fields: bool = False,
159
+ round_trip: bool = False,
160
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
161
+ fallback: Callable[[Any], Any] | None = None,
162
+ serialize_as_any: bool = False,
163
+ ) -> str:
164
+ if ensure_ascii or exclude_computed_fields:
165
+ raise ValueError
166
+ from_hook = self.pre_dump_hook(mode="json")
167
+ return (
168
+ self.get_adapter()
169
+ .dump_json(
170
+ from_hook,
171
+ indent=indent,
172
+ include=include,
173
+ exclude=exclude,
174
+ context=context,
175
+ by_alias=by_alias,
176
+ exclude_unset=exclude_unset,
177
+ exclude_defaults=exclude_defaults,
178
+ exclude_none=exclude_none,
179
+ round_trip=round_trip,
180
+ warnings=warnings,
181
+ fallback=fallback,
182
+ serialize_as_any=serialize_as_any,
183
+ )
184
+ .decode("utf-8")
185
+ )
186
+
187
+ @classmethod
188
+ def model_json_schema(
189
+ cls,
190
+ by_alias: bool = True,
191
+ ref_template: str = DEFAULT_REF_TEMPLATE,
192
+ schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
193
+ mode: JsonSchemaMode = "validation",
194
+ *,
195
+ union_format: Literal["any_of", "primitive_type_array"] = "any_of",
196
+ ) -> dict[str, Any]:
197
+ return cls.get_adapter().json_schema(
198
+ by_alias=by_alias,
199
+ ref_template=ref_template,
200
+ schema_generator=schema_generator,
201
+ mode=mode,
202
+ )
203
+
204
+ @classmethod
205
+ def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
206
+ raise NotImplementedError
207
+
208
+ @classmethod
209
+ def model_validate(
210
+ cls,
211
+ obj: Any,
212
+ *,
213
+ strict: bool | None = None,
214
+ extra: ExtraValues | None = None,
215
+ from_attributes: bool | None = None,
216
+ context: Any | None = None,
217
+ by_alias: bool | None = None,
218
+ by_name: bool | None = None,
219
+ ) -> Self:
220
+ if extra:
221
+ raise ValueError
222
+ adapter = cls.get_adapter()
223
+ obj = cls.pre_validation_hook_python(obj)
224
+ obj = adapter.validate_python(
225
+ obj,
226
+ strict=strict,
227
+ from_attributes=from_attributes,
228
+ context=context,
229
+ by_alias=by_alias,
230
+ by_name=by_name,
231
+ )
232
+ validated = cls(obj)
233
+ return cls.post_validation_hook(validated)
234
+
235
+ @classmethod
236
+ def model_validate_json(
237
+ cls,
238
+ json_data: str | bytes | bytearray,
239
+ *,
240
+ strict: bool | None = None,
241
+ extra: ExtraValues | None = None,
242
+ context: Any | None = None,
243
+ by_alias: bool | None = None,
244
+ by_name: bool | None = None,
245
+ ) -> Self:
246
+ if extra:
247
+ raise ValueError
248
+ adapter = cls.get_adapter()
249
+ json_string = cls.pre_validation_hook_json(json_data)
250
+ raw_validated = adapter.validate_json(
251
+ json_string,
252
+ strict=strict,
253
+ context=context,
254
+ by_alias=by_alias,
255
+ by_name=by_name,
256
+ )
257
+ validated = cls(raw_validated)
258
+ return cls.post_validation_hook(validated)
259
+
260
+ @classmethod
261
+ @abstractmethod
262
+ def model_validate_strings(
263
+ cls,
264
+ obj: Any,
265
+ *,
266
+ strict: bool | None = None,
267
+ extra: ExtraValues | None = None,
268
+ context: Any | None = None,
269
+ by_alias: bool | None = None,
270
+ by_name: bool | None = None,
271
+ ) -> Self: ...
272
+
273
+ @property
274
+ def __annotations__(self) -> dict[str, Any]: # type: ignore
275
+ return self.get_adapter().__annotations__
276
+
277
+ @classmethod
278
+ def read(cls, read_path: Path) -> Self:
279
+ return cls.model_validate_json(read_path.read_text())
280
+
281
+ def write(self, write_path: Path) -> None:
282
+ write_json(cast(JSONDict, self.model_dump(mode="json")), write_path)
283
+
284
+ @staticmethod
285
+ def pre_validation_hook_python(python_dict: dict) -> dict:
286
+ return python_dict
287
+
288
+ @staticmethod
289
+ def pre_validation_hook_json(json_string: str | bytes | bytearray) -> str | bytes | bytearray:
290
+ return json_string
291
+
292
+ @staticmethod
293
+ def post_validation_hook(validated: T) -> T:
294
+ return validated
295
+
296
+ def pre_dump_hook(self, *, mode: Mode) -> Self:
297
+ if mode not in VALID_MODES:
298
+ raise ValueError
299
+ return self
300
+
301
+ def post_dump_hook(self, dumped: T, *, mode: Mode) -> T:
302
+ return dumped
303
+
304
+ def schema(self) -> CoreSchema:
305
+ return self.get_adapter().core_schema
306
+
307
+ def schema_json(self) -> str:
308
+ return json.dumps(self.model_json_schema())
309
+
310
+
311
+ class BaseDict[K, V](dict[K, V], AbstractCustom):
312
+ """
313
+ Dictionary type leveraging pydantic for validation and JSON serialization.
314
+ """
315
+
316
+ @classmethod
317
+ def get_adapter(cls) -> TypeAdapter[dict[K, V]]:
318
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
319
+ for base in getattr(cls, "__orig_bases__", []):
320
+ if isinstance(base, GenericAlias):
321
+ key_type, val_type = get_args(base)
322
+ return TypeAdapter(dict[key_type, val_type]) # type: ignore[valid-type]
323
+ raise TypeError("Key and value types not found.")
324
+
325
+ def __repr__(self) -> str:
326
+ return f"{self.__class__.__name__}{dict.__repr__(self)}"
327
+
328
+ def __str__(self) -> str:
329
+ raise NotImplementedError
330
+
331
+
332
+ class BaseList[T](list[T], AbstractCustom):
333
+ """
334
+ Dictionary type leveraging pydantic for validation and JSON serialization.
335
+ """
336
+
337
+ @classmethod
338
+ def get_adapter(cls) -> TypeAdapter[list[T]]:
339
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
340
+ for base in getattr(cls, "__orig_bases__", []):
341
+ if isinstance(base, GenericAlias):
342
+ element_type = get_args(base)[0]
343
+ return TypeAdapter(list[element_type]) # type: ignore[valid-type]
344
+ raise TypeError("Key and value types not found.")
345
+
346
+ def __repr__(self) -> str:
347
+ return f"{self.__class__.__name__}{list.__repr__(self)}"
348
+
349
+ def __str__(self) -> str:
350
+ raise NotImplementedError
351
+
352
+
353
+ class BaseSet[T](set[T], AbstractCustom):
354
+ """
355
+ Dictionary type leveraging pydantic for validation and JSON serialization.
356
+
357
+ TODO: make dump as list if not all members hashable.
358
+ """
359
+
360
+ @classmethod
361
+ def get_adapter(cls) -> TypeAdapter[set[T]]:
362
+ """Return a TypeAdapter for this subclass, preserving its key/value types."""
363
+ for base in getattr(cls, "__orig_bases__", []):
364
+ if isinstance(base, GenericAlias):
365
+ element_type = get_args(base)[0]
366
+ return TypeAdapter(set[element_type]) # type: ignore[valid-type]
367
+ raise TypeError("Key and value types not found.")
368
+
369
+ def __repr__(self) -> str:
370
+ return f"{self.__class__.__name__}{set.__repr__(self)}"
371
+
372
+ def __str__(self) -> str:
373
+ raise NotImplementedError
374
+
375
+ def model_dump(
376
+ self,
377
+ *,
378
+ mode: Literal["json", "python"] | str = "python",
379
+ include: IncEx | None = None,
380
+ exclude: IncEx | None = None,
381
+ context: Any | None = None,
382
+ by_alias: bool | None = None,
383
+ exclude_unset: bool = False,
384
+ exclude_defaults: bool = False,
385
+ exclude_none: bool = False,
386
+ exclude_computed_fields: bool = False,
387
+ round_trip: bool = False,
388
+ warnings: (bool | Literal["none", "warn", "error"]) = True,
389
+ fallback: Callable[[Any], Any] | None = None,
390
+ serialize_as_any: bool = False,
391
+ ) -> dict[str, Any]:
392
+ if mode not in VALID_MODES:
393
+ raise ValueError
394
+ if exclude_computed_fields:
395
+ raise ValueError
396
+
397
+ return BaseList.model_validate(self).model_dump(
398
+ mode=mode,
399
+ include=include,
400
+ exclude=exclude,
401
+ context=context,
402
+ by_alias=by_alias,
403
+ exclude_unset=exclude_unset,
404
+ exclude_defaults=exclude_defaults,
405
+ exclude_none=exclude_none,
406
+ round_trip=round_trip,
407
+ warnings=warnings,
408
+ fallback=fallback,
409
+ serialize_as_any=serialize_as_any,
410
+ )
@@ -1,19 +1,40 @@
1
1
  import json
2
2
  import re
3
3
  from collections.abc import Callable, Iterable
4
- from typing import Literal, TypeAlias, cast
5
-
6
- from datethyme import Date, Time
4
+ from typing import Literal, Protocol, Self, TypeAlias, cast
7
5
 
8
6
  from .functional import lmap
7
+ from .typing_utils import JSONDict, areinstances
8
+
9
+ class DateProtocol(Protocol):
10
+ year: int
11
+ month: int
12
+ day: int
13
+
14
+ @classmethod
15
+ def parse(cls, date_string: str) -> Self: ...
16
+
17
+ def __str__(self) -> str: ...
18
+
19
+
20
+ class TimeProtocol(Protocol):
21
+ hour: int
22
+ minute: int
23
+ second: float
24
+
25
+ @classmethod
26
+ def parse(cls, time_string: str) -> Self: ...
27
+
28
+ def __str__(self) -> str: ...
29
+
9
30
 
10
31
  MixedValidated: TypeAlias = (
11
32
  str
12
33
  | bool
13
34
  | int
14
35
  | float
15
- | Time
16
- | Date
36
+ | TimeProtocol
37
+ | DateProtocol
17
38
  | tuple[str, ...]
18
39
  | tuple[str, str]
19
40
  | tuple[float, float]
@@ -75,7 +96,7 @@ def parse_sequence(s: str) -> list[str]:
75
96
  start, end = start[1:], end[1:]
76
97
  return [f"{letter}{i}" for i in range(int(start), int(end) + 1, step)]
77
98
 
78
- segments = []
99
+ segments: list[str] = []
79
100
  for subseq in s.strip().split(","):
80
101
  segments.extend(interpolate(subseq))
81
102
  return segments
@@ -134,10 +155,10 @@ def cast_to_negative_score(s: str | bool) -> float:
134
155
  return score
135
156
 
136
157
 
137
- def cast_to_Date(s: str | bool) -> Date:
158
+ def cast_to_date(s: str | bool, date_class: type[DateProtocol]) -> DateProtocol:
138
159
  if isinstance(s, bool):
139
160
  raise TypeError
140
- return Date.model_validate(s)
161
+ return date_class.parse(s)
141
162
 
142
163
 
143
164
  def cast_to_stringtuple(s: str | bool) -> tuple[str, ...]:
@@ -159,10 +180,10 @@ def cast_as(
159
180
  "integer": cast_to_int,
160
181
  "float": cast_to_float,
161
182
  "minutes": cast_to_minutes,
162
- "time": Time.model_validate,
183
+ # "time": Time.model_validate, TODO: add later via injection
163
184
  "positiveScore": cast_to_positive_score,
164
185
  "negativeScore": cast_to_negative_score,
165
- "date": cast_to_Date,
186
+ # "date": cast_to_date, TODO: add later via injection
166
187
  "stringtuple": cast_to_stringtuple,
167
188
  }
168
189
  caster = dispatch[input_type]
@@ -175,5 +196,12 @@ def cast_as(
175
196
  return type_specific_caster
176
197
 
177
198
 
178
- def as_json(d: dict) -> str:
199
+ def as_json(d: JSONDict) -> str:
179
200
  return json.dumps(d, ensure_ascii=False, indent=4)
201
+
202
+
203
+ def re_split(expr: str | re.Pattern[str], s: str) -> list[str]:
204
+ segments = re.split(expr, s)
205
+ if not areinstances(segments, str):
206
+ raise TypeError("Function 're_split' may only return a list of strings.")
207
+ return segments
@@ -1,13 +1,22 @@
1
+ import re
2
+ from abc import ABC, abstractmethod
1
3
  from collections.abc import Callable, Iterable
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
2
6
  from types import UnionType
3
- from typing import TypeVar
7
+ from typing import Annotated, Any, Protocol, Self, TypeVar, Union
4
8
 
5
9
  T = TypeVar("T")
6
10
 
11
+
12
+ type Atomic = int | float | str | bool | None
7
13
  ClassInfo = type | UnionType | tuple["ClassInfo"]
14
+ JSONPrimitive = int | float | str | None
15
+ JSONDict = dict[str, Union[JSONPrimitive, "JSONDict", list[Union["JSONDict | JSONPrimitive"]]]]
16
+ JSONList = list[JSONPrimitive | JSONDict]
8
17
 
9
18
 
10
- def areinstances(iterable_instance: Iterable, class_or_tuple: ClassInfo) -> bool:
19
+ def areinstances(iterable_instance: Iterable[Any], class_or_tuple: ClassInfo) -> bool:
11
20
  return all(map(lambda inst: isinstance(inst, class_or_tuple), iterable_instance))
12
21
 
13
22
 
@@ -17,3 +26,107 @@ def fallback_if_none(orig: T | None, alt: T) -> T:
17
26
 
18
27
  def call_fallback_if_none(orig: T | None, alt: Callable[[], T]) -> T:
19
28
  return alt() if (orig is None) else orig
29
+
30
+
31
+ class SupportsGe(Protocol):
32
+ def __ge__(self: T, __other: T) -> bool: ...
33
+
34
+
35
+ class SupportsGt(Protocol):
36
+ def __gt__(self: T, __other: T) -> bool: ...
37
+
38
+
39
+ class SupportsLe(Protocol):
40
+ def __le__(self: T, __other: T) -> bool: ...
41
+
42
+
43
+ class SupportsLt(Protocol):
44
+ def __lt__(self: T, __other: T) -> bool: ...
45
+
46
+
47
+ @dataclass(frozen=True)
48
+ class _BaseMetadata:
49
+ """Base class for all metadata.
50
+
51
+ This exists mainly so that implementers
52
+ can do `isinstance(..., BaseMetadata)` while traversing field annotations.
53
+ """
54
+
55
+ __slots__ = ()
56
+
57
+
58
+ @dataclass(frozen=True)
59
+ class Ge(_BaseMetadata):
60
+ """Ge(ge=x) implies that the value must be greater than or equal to x.
61
+
62
+ It can be used with any type that supports the ``>=`` operator,
63
+ including numbers, dates and times, strings, sets, and so on.
64
+ """
65
+
66
+ ge: SupportsGe
67
+
68
+
69
+ @dataclass(frozen=True)
70
+ class Gt(_BaseMetadata):
71
+ """Gt(gt=x) implies that the value must be greater than x.
72
+
73
+ It can be used with any type that supports the ``>`` operator,
74
+ including numbers, dates and times, strings, sets, and so on.
75
+ """
76
+
77
+ gt: SupportsGt
78
+
79
+
80
+ @dataclass(frozen=True)
81
+ class Lt(_BaseMetadata):
82
+ """Lt(lt=x) implies that the value must be less than x.
83
+
84
+ It can be used with any type that supports the ``<`` operator,
85
+ including numbers, dates and times, strings, sets, and so on.
86
+ """
87
+
88
+ lt: SupportsLt
89
+
90
+
91
+ @dataclass(frozen=True)
92
+ class Le(_BaseMetadata):
93
+ """Le(le=x) implies that the value must be less than x.
94
+
95
+ It can be used with any type that supports the ``<=`` operator,
96
+ including numbers, dates and times, strings, sets, and so on.
97
+ """
98
+
99
+ le: SupportsLe
100
+
101
+
102
+ class SupportsIO(ABC):
103
+ @classmethod
104
+ @abstractmethod
105
+ def read(cls, read_path: Path) -> Self: ...
106
+
107
+ @abstractmethod
108
+ def write(self, write_path: Path) -> None: ...
109
+
110
+
111
+ class Pattern:
112
+ DATE: re.Pattern[str] = re.compile(r"^[12]\d\d\d-(0?\d|1[012]|)-(0?\d|[12]\d|3[01])$")
113
+ DATE_STRICT: re.Pattern[str] = re.compile(r"^[12]\d\d\d-(0\d|1[012]|)-(0\d|[12]\d|3[01])$")
114
+ DATE_LOOSE: re.Pattern[str] = re.compile(r"(\d{2,4})[^\d](\d\d?)[^\d](\d\d?)")
115
+ ID: re.Pattern[str] = re.compile(r"^[A-Za-z][A-Za-z0-9_]+$")
116
+ ID_OR_2IDS: re.Pattern[str] = re.compile(r"^[A-Za-z][A-Za-z0-9_]+,[A-Za-z][A-Za-z0-9_]+$")
117
+ NATURAL: re.Pattern[str] = re.compile(r"^[1-9][0-9]*$")
118
+ PROPORTION: re.Pattern[str] = re.compile(r"^0?\.[0-9]+$")
119
+ PROPORTION_OR_2: re.Pattern[str] = re.compile(r"^0?\.[0-9]+$|^0?\.[0-9]+,0?\.[0-9]+$")
120
+
121
+
122
+ TimeAmountRaw = str | int
123
+
124
+ Natural = Annotated[int, Ge(ge=0)]
125
+ Nonnegative = Annotated[float, Ge(ge=0)]
126
+ Positive = Annotated[float, Gt(gt=0)]
127
+ PositiveScore = Annotated[float, Gt(gt=0.0), Lt(lt=5.0)]
128
+ NegativeScore = Annotated[float, Gt(gt=-5.0), Lt(lt=0.0)]
129
+ Proportion = Annotated[float, Ge(ge=0.0), Le(le=1.0)]
130
+ NonnegativeInt = Annotated[int, Ge(ge=0)]
131
+ NonnegativeFloat = Annotated[float, Ge(ge=0.0)]
132
+ PolarityScore = Annotated[float, Ge(ge=-1.0), Le(le=1.0)]
@@ -0,0 +1,60 @@
1
+ Metadata-Version: 2.3
2
+ Name: adiumentum
3
+ Version: 0.3.1
4
+ Summary:
5
+ Author: Isaac Riley
6
+ Author-email: Isaac Riley <yelircaasi@proton.me>
7
+ Requires-Dist: pydantic>=2.11
8
+ Requires-Dist: multipledispatch>=1
9
+ Requires-Dist: loguru>=0.7.3
10
+ Requires-Python: >=3.11, <3.15
11
+ Description-Content-Type: text/markdown
12
+
13
+ # adiumentum
14
+
15
+
16
+ With Nix installed, you can enter a development environment with all dependencies installed:
17
+
18
+ ```sh
19
+ nix develop
20
+ ```
21
+
22
+ Once in this dev shell, you have a number of development utils you can try out (via just):
23
+
24
+ ```sh
25
+ ✔just
26
+ ✔just format
27
+ ✔just check
28
+ ✔just fix
29
+ ✔just typecheck
30
+ ✔just lint
31
+ ✔just deal
32
+ ✔just vulture
33
+ ✔just pydeps-full
34
+ ✔just pydeps
35
+ ✔just pydeps-simple
36
+ ✔just view-deps
37
+ ✔just snakefood
38
+ ✔just deply
39
+ ✔just bandit
40
+ ✔just bandit-html
41
+ ✔just bandit-view
42
+ ✔just pyflame
43
+ ✔just flamegraph
44
+ ✔just perf-flamegraph
45
+ ✔just check-structure
46
+ ✔just check-imports
47
+ ✔just smoke
48
+ ✔just unit
49
+ ✔just test
50
+ ✔just test-cov
51
+ ✔just docs
52
+ ✔just scalene
53
+ ✔just view-cov
54
+ ✔just view-docs
55
+ ✔just view-flamegraphs
56
+ ✔just sbom
57
+
58
+ lefthook validate
59
+ lefthook run all
60
+ ```