haiway 0.17.0__py3-none-any.whl → 0.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- haiway/__init__.py +18 -16
- haiway/context/__init__.py +19 -13
- haiway/context/access.py +92 -88
- haiway/context/disposables.py +2 -2
- haiway/context/identifier.py +4 -5
- haiway/context/observability.py +452 -0
- haiway/context/state.py +2 -2
- haiway/context/tasks.py +1 -3
- haiway/context/types.py +2 -2
- haiway/helpers/__init__.py +7 -6
- haiway/helpers/asynchrony.py +2 -2
- haiway/helpers/caching.py +2 -2
- haiway/helpers/observability.py +219 -0
- haiway/helpers/retries.py +1 -3
- haiway/helpers/throttling.py +1 -3
- haiway/helpers/timeouted.py +1 -3
- haiway/helpers/tracing.py +25 -17
- haiway/opentelemetry/__init__.py +3 -0
- haiway/opentelemetry/observability.py +420 -0
- haiway/state/__init__.py +2 -2
- haiway/state/attributes.py +2 -2
- haiway/state/path.py +1 -3
- haiway/state/requirement.py +1 -3
- haiway/state/structure.py +161 -30
- haiway/state/validation.py +2 -2
- haiway/types/__init__.py +2 -2
- haiway/types/default.py +2 -2
- haiway/types/frozen.py +1 -3
- haiway/types/missing.py +2 -2
- haiway/utils/__init__.py +2 -2
- haiway/utils/always.py +2 -2
- haiway/utils/collections.py +2 -2
- haiway/utils/env.py +2 -2
- haiway/utils/freezing.py +1 -3
- haiway/utils/logs.py +1 -3
- haiway/utils/mimic.py +1 -3
- haiway/utils/noop.py +2 -2
- haiway/utils/queue.py +1 -3
- haiway/utils/stream.py +1 -3
- {haiway-0.17.0.dist-info → haiway-0.18.0.dist-info}/METADATA +9 -5
- haiway-0.18.0.dist-info/RECORD +44 -0
- haiway/context/logging.py +0 -242
- haiway/context/metrics.py +0 -176
- haiway/helpers/metrics.py +0 -465
- haiway-0.17.0.dist-info/RECORD +0 -43
- {haiway-0.17.0.dist-info → haiway-0.18.0.dist-info}/WHEEL +0 -0
- {haiway-0.17.0.dist-info → haiway-0.18.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,420 @@
|
|
1
|
+
import os
|
2
|
+
from collections.abc import Mapping
|
3
|
+
from typing import Any, Self, final
|
4
|
+
|
5
|
+
###
|
6
|
+
from opentelemetry import metrics, trace
|
7
|
+
from opentelemetry._logs import get_logger, set_logger_provider
|
8
|
+
from opentelemetry._logs._internal import Logger
|
9
|
+
from opentelemetry._logs.severity import SeverityNumber
|
10
|
+
from opentelemetry.context import Context
|
11
|
+
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
|
12
|
+
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter
|
13
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
14
|
+
from opentelemetry.metrics._internal import Meter
|
15
|
+
from opentelemetry.metrics._internal.instrument import Counter, Histogram
|
16
|
+
from opentelemetry.sdk._logs import LoggerProvider
|
17
|
+
from opentelemetry.sdk._logs._internal import LogRecord
|
18
|
+
from opentelemetry.sdk._logs._internal.export import (
|
19
|
+
BatchLogRecordProcessor,
|
20
|
+
ConsoleLogExporter,
|
21
|
+
LogExporter,
|
22
|
+
)
|
23
|
+
from opentelemetry.sdk.metrics import MeterProvider as SdkMeterProvider
|
24
|
+
from opentelemetry.sdk.metrics._internal.export import MetricExporter
|
25
|
+
from opentelemetry.sdk.metrics.export import (
|
26
|
+
ConsoleMetricExporter,
|
27
|
+
PeriodicExportingMetricReader,
|
28
|
+
)
|
29
|
+
from opentelemetry.sdk.resources import Resource
|
30
|
+
from opentelemetry.sdk.trace import TracerProvider
|
31
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter, SpanExporter
|
32
|
+
from opentelemetry.trace import Span, StatusCode, Tracer
|
33
|
+
|
34
|
+
from haiway.context import Observability, ObservabilityLevel, ScopeIdentifier
|
35
|
+
from haiway.state import State
|
36
|
+
|
37
|
+
__all__ = ("OpenTelemetry",)
|
38
|
+
|
39
|
+
|
40
|
+
class ScopeStore:
|
41
|
+
__slots__ = (
|
42
|
+
"_completed",
|
43
|
+
"_counters",
|
44
|
+
"_exited",
|
45
|
+
"_histograms",
|
46
|
+
"identifier",
|
47
|
+
"logger",
|
48
|
+
"logger_2",
|
49
|
+
"meter",
|
50
|
+
"nested",
|
51
|
+
"span",
|
52
|
+
)
|
53
|
+
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
identifier: ScopeIdentifier,
|
57
|
+
/,
|
58
|
+
span: Span,
|
59
|
+
meter: Meter,
|
60
|
+
logger: Logger,
|
61
|
+
) -> None:
|
62
|
+
self.identifier: ScopeIdentifier = identifier
|
63
|
+
self.nested: list[ScopeStore] = []
|
64
|
+
self._counters: dict[str, Counter] = {}
|
65
|
+
self._histograms: dict[str, Histogram] = {}
|
66
|
+
self._exited: bool = False
|
67
|
+
self._completed: bool = False
|
68
|
+
self.span: Span = span
|
69
|
+
self.meter: Meter = meter
|
70
|
+
self.logger: Logger = logger
|
71
|
+
|
72
|
+
@property
|
73
|
+
def exited(self) -> bool:
|
74
|
+
return self._exited
|
75
|
+
|
76
|
+
def exit(self) -> None:
|
77
|
+
assert not self._exited # nosec: B101
|
78
|
+
self._exited = True
|
79
|
+
|
80
|
+
@property
|
81
|
+
def completed(self) -> bool:
|
82
|
+
return self._completed and all(nested.completed for nested in self.nested)
|
83
|
+
|
84
|
+
def try_complete(self) -> bool:
|
85
|
+
if not self._exited:
|
86
|
+
return False # not elegible for completion yet
|
87
|
+
|
88
|
+
if self._completed:
|
89
|
+
return False # already completed
|
90
|
+
|
91
|
+
if not all(nested.completed for nested in self.nested):
|
92
|
+
return False # nested not completed
|
93
|
+
|
94
|
+
self._completed = True
|
95
|
+
self.span.end()
|
96
|
+
return True # successfully completed
|
97
|
+
|
98
|
+
def record_log(
|
99
|
+
self,
|
100
|
+
message: str,
|
101
|
+
/,
|
102
|
+
level: ObservabilityLevel,
|
103
|
+
) -> None:
|
104
|
+
self.logger.emit(
|
105
|
+
LogRecord(
|
106
|
+
span_id=self.span.get_span_context().span_id,
|
107
|
+
trace_id=self.span.get_span_context().trace_id,
|
108
|
+
trace_flags=self.span.get_span_context().trace_flags,
|
109
|
+
body=message,
|
110
|
+
severity_text=level.name,
|
111
|
+
severity_number=SEVERITY_MAPPING[level],
|
112
|
+
attributes={
|
113
|
+
"trace_id": self.identifier.trace_id,
|
114
|
+
"scope_id": self.identifier.scope_id,
|
115
|
+
"parent_id": self.identifier.parent_id,
|
116
|
+
},
|
117
|
+
)
|
118
|
+
)
|
119
|
+
|
120
|
+
def record_exception(
|
121
|
+
self,
|
122
|
+
exception: BaseException,
|
123
|
+
/,
|
124
|
+
) -> None:
|
125
|
+
self.span.record_exception(exception)
|
126
|
+
|
127
|
+
def record_event(
|
128
|
+
self,
|
129
|
+
event: State,
|
130
|
+
/,
|
131
|
+
) -> None:
|
132
|
+
self.span.add_event(
|
133
|
+
str(type(event).__name__),
|
134
|
+
attributes=event.to_mapping(recursive=True),
|
135
|
+
)
|
136
|
+
|
137
|
+
def record_metric(
|
138
|
+
self,
|
139
|
+
name: str,
|
140
|
+
/,
|
141
|
+
*,
|
142
|
+
value: float | int,
|
143
|
+
unit: str | None,
|
144
|
+
) -> None:
|
145
|
+
attributes: Mapping[str, Any] = {
|
146
|
+
"trace_id": self.identifier.trace_id,
|
147
|
+
"scope_id": self.identifier.scope_id,
|
148
|
+
"parent_id": self.identifier.parent_id,
|
149
|
+
}
|
150
|
+
|
151
|
+
if name not in self._counters:
|
152
|
+
self._counters[name] = self.meter.create_counter(
|
153
|
+
name=name,
|
154
|
+
unit=unit or "",
|
155
|
+
)
|
156
|
+
|
157
|
+
self._counters[name].add(
|
158
|
+
value,
|
159
|
+
attributes=attributes,
|
160
|
+
)
|
161
|
+
|
162
|
+
|
163
|
+
@final
|
164
|
+
class OpenTelemetry:
|
165
|
+
@classmethod
|
166
|
+
def configure(
|
167
|
+
cls,
|
168
|
+
*,
|
169
|
+
service: str,
|
170
|
+
version: str,
|
171
|
+
environment: str,
|
172
|
+
otlp_endpoint: str | None = None,
|
173
|
+
insecure: bool = True,
|
174
|
+
export_interval_millis: int = 10000,
|
175
|
+
attributes: Mapping[str, Any] | None = None,
|
176
|
+
) -> type[Self]:
|
177
|
+
# Create shared resource for both metrics and traces
|
178
|
+
resource: Resource = Resource.create(
|
179
|
+
{
|
180
|
+
"service.name": service,
|
181
|
+
"service.version": version,
|
182
|
+
"deployment.environment": environment,
|
183
|
+
"service.pid": os.getpid(),
|
184
|
+
**(attributes if attributes is not None else {}),
|
185
|
+
},
|
186
|
+
)
|
187
|
+
|
188
|
+
logs_exporter: LogExporter
|
189
|
+
span_exporter: SpanExporter
|
190
|
+
metric_exporter: MetricExporter
|
191
|
+
|
192
|
+
if otlp_endpoint:
|
193
|
+
logs_exporter = OTLPLogExporter(
|
194
|
+
endpoint=otlp_endpoint,
|
195
|
+
insecure=insecure,
|
196
|
+
)
|
197
|
+
span_exporter = OTLPSpanExporter(
|
198
|
+
endpoint=otlp_endpoint,
|
199
|
+
insecure=insecure,
|
200
|
+
)
|
201
|
+
metric_exporter = OTLPMetricExporter(
|
202
|
+
endpoint=otlp_endpoint,
|
203
|
+
insecure=insecure,
|
204
|
+
)
|
205
|
+
|
206
|
+
else:
|
207
|
+
logs_exporter = ConsoleLogExporter()
|
208
|
+
span_exporter = ConsoleSpanExporter()
|
209
|
+
metric_exporter = ConsoleMetricExporter()
|
210
|
+
|
211
|
+
# Set up logger provider
|
212
|
+
logger_provider: LoggerProvider = LoggerProvider(
|
213
|
+
resource=resource,
|
214
|
+
shutdown_on_exit=True,
|
215
|
+
)
|
216
|
+
log_processor: BatchLogRecordProcessor = BatchLogRecordProcessor(logs_exporter)
|
217
|
+
logger_provider.add_log_record_processor(log_processor)
|
218
|
+
set_logger_provider(logger_provider)
|
219
|
+
|
220
|
+
# Set up metrics provider
|
221
|
+
meter_provider: SdkMeterProvider = SdkMeterProvider(
|
222
|
+
resource=resource,
|
223
|
+
metric_readers=[
|
224
|
+
PeriodicExportingMetricReader(
|
225
|
+
metric_exporter,
|
226
|
+
export_interval_millis=export_interval_millis,
|
227
|
+
)
|
228
|
+
],
|
229
|
+
shutdown_on_exit=True,
|
230
|
+
)
|
231
|
+
metrics.set_meter_provider(meter_provider)
|
232
|
+
|
233
|
+
# Set up trace provider
|
234
|
+
tracer_provider: TracerProvider = TracerProvider(
|
235
|
+
resource=resource,
|
236
|
+
shutdown_on_exit=True,
|
237
|
+
)
|
238
|
+
span_processor: BatchSpanProcessor = BatchSpanProcessor(span_exporter)
|
239
|
+
tracer_provider.add_span_processor(span_processor)
|
240
|
+
trace.set_tracer_provider(tracer_provider)
|
241
|
+
|
242
|
+
return cls
|
243
|
+
|
244
|
+
@classmethod
|
245
|
+
def observability( # noqa: C901, PLR0915
|
246
|
+
cls,
|
247
|
+
level: ObservabilityLevel = ObservabilityLevel.INFO,
|
248
|
+
) -> Observability:
|
249
|
+
tracer: Tracer | None = None
|
250
|
+
meter: Meter | None = None
|
251
|
+
root_scope: ScopeIdentifier | None = None
|
252
|
+
scopes: dict[str, ScopeStore] = {}
|
253
|
+
observed_level: ObservabilityLevel = level
|
254
|
+
|
255
|
+
def log_recording(
|
256
|
+
scope: ScopeIdentifier,
|
257
|
+
/,
|
258
|
+
level: ObservabilityLevel,
|
259
|
+
message: str,
|
260
|
+
*args: Any,
|
261
|
+
exception: BaseException | None,
|
262
|
+
) -> None:
|
263
|
+
assert root_scope is not None # nosec: B101
|
264
|
+
assert scope.scope_id in scopes # nosec: B101
|
265
|
+
|
266
|
+
if level < observed_level:
|
267
|
+
return
|
268
|
+
|
269
|
+
scopes[scope.scope_id].record_log(
|
270
|
+
message % args,
|
271
|
+
level=level,
|
272
|
+
)
|
273
|
+
if exception is not None:
|
274
|
+
scopes[scope.scope_id].record_exception(exception)
|
275
|
+
|
276
|
+
def event_recording(
|
277
|
+
scope: ScopeIdentifier,
|
278
|
+
/,
|
279
|
+
*,
|
280
|
+
level: ObservabilityLevel,
|
281
|
+
event: State,
|
282
|
+
) -> None:
|
283
|
+
assert root_scope is not None # nosec: B101
|
284
|
+
assert scope.scope_id in scopes # nosec: B101
|
285
|
+
|
286
|
+
if level < observed_level:
|
287
|
+
return
|
288
|
+
|
289
|
+
scopes[scope.scope_id].record_event(event)
|
290
|
+
|
291
|
+
def metric_recording(
|
292
|
+
scope: ScopeIdentifier,
|
293
|
+
/,
|
294
|
+
*,
|
295
|
+
metric: str,
|
296
|
+
value: float | int,
|
297
|
+
unit: str | None,
|
298
|
+
) -> None:
|
299
|
+
assert root_scope is not None # nosec: B101
|
300
|
+
assert scope.scope_id in scopes # nosec: B101
|
301
|
+
|
302
|
+
if level < observed_level:
|
303
|
+
return
|
304
|
+
|
305
|
+
scopes[scope.scope_id].record_metric(
|
306
|
+
metric,
|
307
|
+
value=value,
|
308
|
+
unit=unit,
|
309
|
+
)
|
310
|
+
|
311
|
+
def scope_entering[Metric: State](
|
312
|
+
scope: ScopeIdentifier,
|
313
|
+
/,
|
314
|
+
) -> None:
|
315
|
+
nonlocal tracer
|
316
|
+
assert scope.scope_id not in scopes # nosec: B101
|
317
|
+
|
318
|
+
nonlocal root_scope
|
319
|
+
nonlocal meter
|
320
|
+
|
321
|
+
scope_store: ScopeStore
|
322
|
+
if root_scope is None:
|
323
|
+
tracer = trace.get_tracer(scope.trace_id)
|
324
|
+
meter = metrics.get_meter(scope.trace_id)
|
325
|
+
scope_store = ScopeStore(
|
326
|
+
scope,
|
327
|
+
span=tracer.start_span(
|
328
|
+
name=scope.label,
|
329
|
+
context=Context(
|
330
|
+
trace_id=scope.trace_id,
|
331
|
+
scope_id=scope.scope_id,
|
332
|
+
parent_id=scope.parent_id,
|
333
|
+
),
|
334
|
+
attributes={
|
335
|
+
"trace_id": scope.trace_id,
|
336
|
+
"scope_id": scope.scope_id,
|
337
|
+
"parent_id": scope.parent_id,
|
338
|
+
},
|
339
|
+
),
|
340
|
+
meter=meter,
|
341
|
+
logger=get_logger(scope.label),
|
342
|
+
)
|
343
|
+
root_scope = scope
|
344
|
+
|
345
|
+
else:
|
346
|
+
assert tracer is not None # nosec: B101
|
347
|
+
assert meter is not None # nosec: B101
|
348
|
+
|
349
|
+
scope_store = ScopeStore(
|
350
|
+
scope,
|
351
|
+
span=tracer.start_span(
|
352
|
+
name=scope.label,
|
353
|
+
context=trace.set_span_in_context(
|
354
|
+
scopes[scope.parent_id].span,
|
355
|
+
Context(
|
356
|
+
trace_id=scope.trace_id,
|
357
|
+
scope_id=scope.scope_id,
|
358
|
+
parent_id=scope.parent_id,
|
359
|
+
),
|
360
|
+
),
|
361
|
+
attributes={
|
362
|
+
"trace_id": scope.trace_id,
|
363
|
+
"scope_id": scope.scope_id,
|
364
|
+
"parent_id": scope.parent_id,
|
365
|
+
},
|
366
|
+
),
|
367
|
+
meter=meter,
|
368
|
+
logger=get_logger(scope.label),
|
369
|
+
)
|
370
|
+
scopes[scope.parent_id].nested.append(scope_store)
|
371
|
+
|
372
|
+
scopes[scope.scope_id] = scope_store
|
373
|
+
|
374
|
+
def scope_exiting[Metric: State](
|
375
|
+
scope: ScopeIdentifier,
|
376
|
+
/,
|
377
|
+
*,
|
378
|
+
exception: BaseException | None,
|
379
|
+
) -> None:
|
380
|
+
nonlocal root_scope
|
381
|
+
nonlocal scopes
|
382
|
+
assert root_scope is not None # nosec: B101
|
383
|
+
assert scope.scope_id in scopes # nosec: B101
|
384
|
+
|
385
|
+
scopes[scope.scope_id].exit()
|
386
|
+
if exception is not None:
|
387
|
+
scopes[scope.scope_id].span.set_status(status=StatusCode.ERROR)
|
388
|
+
|
389
|
+
else:
|
390
|
+
scopes[scope.scope_id].span.set_status(status=StatusCode.OK)
|
391
|
+
|
392
|
+
if not scopes[scope.scope_id].try_complete():
|
393
|
+
return # not completed yet or already completed
|
394
|
+
|
395
|
+
# try complete parent scopes
|
396
|
+
parent_id: str = scope.parent_id
|
397
|
+
while scopes[parent_id].try_complete():
|
398
|
+
parent_id = scopes[parent_id].identifier.parent_id
|
399
|
+
|
400
|
+
# check for root completion
|
401
|
+
if scopes[root_scope.scope_id].completed:
|
402
|
+
# finished root - cleanup state
|
403
|
+
root_scope = None
|
404
|
+
scopes = {}
|
405
|
+
|
406
|
+
return Observability(
|
407
|
+
log_recording=log_recording,
|
408
|
+
event_recording=event_recording,
|
409
|
+
metric_recording=metric_recording,
|
410
|
+
scope_entering=scope_entering,
|
411
|
+
scope_exiting=scope_exiting,
|
412
|
+
)
|
413
|
+
|
414
|
+
|
415
|
+
SEVERITY_MAPPING = {
|
416
|
+
ObservabilityLevel.DEBUG: SeverityNumber.DEBUG,
|
417
|
+
ObservabilityLevel.INFO: SeverityNumber.INFO,
|
418
|
+
ObservabilityLevel.WARNING: SeverityNumber.WARN,
|
419
|
+
ObservabilityLevel.ERROR: SeverityNumber.ERROR,
|
420
|
+
}
|
haiway/state/__init__.py
CHANGED
@@ -3,10 +3,10 @@ from haiway.state.path import AttributePath
|
|
3
3
|
from haiway.state.requirement import AttributeRequirement
|
4
4
|
from haiway.state.structure import State
|
5
5
|
|
6
|
-
__all__ =
|
6
|
+
__all__ = (
|
7
7
|
"AttributeAnnotation",
|
8
8
|
"AttributePath",
|
9
9
|
"AttributeRequirement",
|
10
10
|
"State",
|
11
11
|
"attribute_annotations",
|
12
|
-
|
12
|
+
)
|
haiway/state/attributes.py
CHANGED
@@ -25,11 +25,11 @@ from typing import (
|
|
25
25
|
from haiway import types as haiway_types
|
26
26
|
from haiway.types import MISSING, Missing
|
27
27
|
|
28
|
-
__all__ =
|
28
|
+
__all__ = (
|
29
29
|
"AttributeAnnotation",
|
30
30
|
"attribute_annotations",
|
31
31
|
"resolve_attribute_annotation",
|
32
|
-
|
32
|
+
)
|
33
33
|
|
34
34
|
|
35
35
|
@final
|
haiway/state/path.py
CHANGED
@@ -10,9 +10,7 @@ from typing import Any, TypeAliasType, final, get_args, get_origin, overload
|
|
10
10
|
|
11
11
|
from haiway.types import MISSING, Missing, not_missing
|
12
12
|
|
13
|
-
__all__ =
|
14
|
-
"AttributePath",
|
15
|
-
]
|
13
|
+
__all__ = ("AttributePath",)
|
16
14
|
|
17
15
|
|
18
16
|
class AttributePathComponent(ABC):
|
haiway/state/requirement.py
CHANGED
haiway/state/structure.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import typing
|
2
|
-
from collections.abc import
|
2
|
+
from collections.abc import ItemsView, Mapping, Sequence
|
3
3
|
from types import EllipsisType, GenericAlias
|
4
4
|
from typing import (
|
5
5
|
Any,
|
@@ -10,7 +10,6 @@ from typing import (
|
|
10
10
|
cast,
|
11
11
|
dataclass_transform,
|
12
12
|
final,
|
13
|
-
overload,
|
14
13
|
)
|
15
14
|
from weakref import WeakValueDictionary
|
16
15
|
|
@@ -19,32 +18,7 @@ from haiway.state.path import AttributePath
|
|
19
18
|
from haiway.state.validation import AttributeValidation, AttributeValidator
|
20
19
|
from haiway.types import MISSING, DefaultValue, Missing, not_missing
|
21
20
|
|
22
|
-
__all__ =
|
23
|
-
"State",
|
24
|
-
]
|
25
|
-
|
26
|
-
|
27
|
-
@overload
|
28
|
-
def Default[Value](
|
29
|
-
value: Value,
|
30
|
-
/,
|
31
|
-
) -> Value: ...
|
32
|
-
|
33
|
-
|
34
|
-
@overload
|
35
|
-
def Default[Value](
|
36
|
-
*,
|
37
|
-
factory: Callable[[], Value],
|
38
|
-
) -> Value: ...
|
39
|
-
|
40
|
-
|
41
|
-
def Default[Value](
|
42
|
-
value: Value | Missing = MISSING,
|
43
|
-
/,
|
44
|
-
*,
|
45
|
-
factory: Callable[[], Value] | Missing = MISSING,
|
46
|
-
) -> Value: # it is actually a DefaultValue, but type checker has to be fooled
|
47
|
-
return cast(Value, DefaultValue(value, factory=factory))
|
21
|
+
__all__ = ("State",)
|
48
22
|
|
49
23
|
|
50
24
|
@final
|
@@ -392,11 +366,27 @@ class State(metaclass=StateMeta):
|
|
392
366
|
) -> Self:
|
393
367
|
return self.__replace__(**kwargs)
|
394
368
|
|
395
|
-
def
|
369
|
+
def to_str(
|
370
|
+
self,
|
371
|
+
pretty: bool = False,
|
372
|
+
) -> str:
|
373
|
+
if pretty:
|
374
|
+
return _state_str(self)
|
375
|
+
|
376
|
+
else:
|
377
|
+
return self.__str__()
|
378
|
+
|
379
|
+
def to_mapping(
|
380
|
+
self,
|
381
|
+
recursive: bool = False,
|
382
|
+
) -> Mapping[str, Any]:
|
396
383
|
dict_result: dict[str, Any] = {}
|
397
384
|
for key in self.__ATTRIBUTES__.keys():
|
398
385
|
value: Any | Missing = getattr(self, key, MISSING)
|
399
|
-
if
|
386
|
+
if recursive and isinstance(value, State):
|
387
|
+
dict_result[key] = value.to_mapping(recursive=recursive)
|
388
|
+
|
389
|
+
elif not_missing(value):
|
400
390
|
dict_result[key] = value
|
401
391
|
|
402
392
|
return dict_result
|
@@ -458,3 +448,144 @@ class State(metaclass=StateMeta):
|
|
458
448
|
**kwargs,
|
459
449
|
}
|
460
450
|
)
|
451
|
+
|
452
|
+
|
453
|
+
def _attribute_str(
|
454
|
+
*,
|
455
|
+
key: str,
|
456
|
+
value: str,
|
457
|
+
) -> str:
|
458
|
+
return f"┝ {key}: {value}"
|
459
|
+
|
460
|
+
|
461
|
+
def _element_str(
|
462
|
+
*,
|
463
|
+
key: Any,
|
464
|
+
value: Any,
|
465
|
+
) -> str:
|
466
|
+
return f"[{key}]: {value}"
|
467
|
+
|
468
|
+
|
469
|
+
def _state_str(
|
470
|
+
state: State,
|
471
|
+
/,
|
472
|
+
) -> str:
|
473
|
+
variables: ItemsView[str, Any] = vars(state).items()
|
474
|
+
|
475
|
+
parts: list[str] = [f"┍━ {type(state).__name__}:"]
|
476
|
+
for key, value in variables:
|
477
|
+
value_string: str | None = _value_str(value)
|
478
|
+
|
479
|
+
if value_string:
|
480
|
+
parts.append(
|
481
|
+
_attribute_str(
|
482
|
+
key=key,
|
483
|
+
value=value_string,
|
484
|
+
)
|
485
|
+
)
|
486
|
+
|
487
|
+
else:
|
488
|
+
continue # skip empty elements
|
489
|
+
|
490
|
+
if parts:
|
491
|
+
return "\n".join(parts) + "\n┕━"
|
492
|
+
|
493
|
+
else:
|
494
|
+
return "╍"
|
495
|
+
|
496
|
+
|
497
|
+
def _mapping_str(
|
498
|
+
dictionary: Mapping[Any, Any],
|
499
|
+
/,
|
500
|
+
) -> str | None:
|
501
|
+
elements: ItemsView[Any, Any] = dictionary.items()
|
502
|
+
|
503
|
+
parts: list[str] = []
|
504
|
+
for key, value in elements:
|
505
|
+
value_string: str | None = _value_str(value)
|
506
|
+
|
507
|
+
if value_string:
|
508
|
+
parts.append(
|
509
|
+
_element_str(
|
510
|
+
key=key,
|
511
|
+
value=value_string,
|
512
|
+
)
|
513
|
+
)
|
514
|
+
|
515
|
+
else:
|
516
|
+
continue # skip empty elements
|
517
|
+
|
518
|
+
if parts:
|
519
|
+
return "\n| " + "\n".join(parts).replace("\n", "\n| ")
|
520
|
+
|
521
|
+
else:
|
522
|
+
return None
|
523
|
+
|
524
|
+
|
525
|
+
def _sequence_str(
|
526
|
+
sequence: Sequence[Any],
|
527
|
+
/,
|
528
|
+
) -> str | None:
|
529
|
+
parts: list[str] = []
|
530
|
+
for idx, element in enumerate(sequence):
|
531
|
+
element_string: str | None = _value_str(element)
|
532
|
+
|
533
|
+
if element_string:
|
534
|
+
parts.append(
|
535
|
+
_element_str(
|
536
|
+
key=idx,
|
537
|
+
value=element_string,
|
538
|
+
)
|
539
|
+
)
|
540
|
+
|
541
|
+
else:
|
542
|
+
continue # skip empty elements
|
543
|
+
|
544
|
+
if parts:
|
545
|
+
return "\n| " + "\n".join(parts).replace("\n", "\n| ")
|
546
|
+
|
547
|
+
else:
|
548
|
+
return None
|
549
|
+
|
550
|
+
|
551
|
+
def _raw_value_str(
|
552
|
+
value: Any,
|
553
|
+
/,
|
554
|
+
) -> str | None:
|
555
|
+
if value is MISSING:
|
556
|
+
return None # skip missing
|
557
|
+
|
558
|
+
else:
|
559
|
+
return str(value).strip().replace("\n", "\n| ")
|
560
|
+
|
561
|
+
|
562
|
+
def _value_str( # noqa: PLR0911
|
563
|
+
value: Any,
|
564
|
+
/,
|
565
|
+
) -> str | None:
|
566
|
+
# check for string
|
567
|
+
if isinstance(value, str):
|
568
|
+
if "\n" in value:
|
569
|
+
return f'"""\n{value}\n"""'.replace("\n", "\n| ")
|
570
|
+
|
571
|
+
else:
|
572
|
+
return f'"{value}"'
|
573
|
+
|
574
|
+
# check for bytes
|
575
|
+
elif isinstance(value, bytes):
|
576
|
+
return f'b"{value}"'
|
577
|
+
|
578
|
+
# try unpack state
|
579
|
+
elif isinstance(value, State):
|
580
|
+
return _state_str(value)
|
581
|
+
|
582
|
+
# try unpack mapping
|
583
|
+
elif isinstance(value, Mapping):
|
584
|
+
return _mapping_str(value)
|
585
|
+
|
586
|
+
# try unpack sequence
|
587
|
+
elif isinstance(value, Sequence):
|
588
|
+
return _sequence_str(value)
|
589
|
+
|
590
|
+
else: # fallback to other
|
591
|
+
return _raw_value_str(value)
|
haiway/state/validation.py
CHANGED
@@ -10,11 +10,11 @@ from uuid import UUID
|
|
10
10
|
from haiway.state.attributes import AttributeAnnotation
|
11
11
|
from haiway.types import MISSING, Missing
|
12
12
|
|
13
|
-
__all__ =
|
13
|
+
__all__ = (
|
14
14
|
"AttributeValidation",
|
15
15
|
"AttributeValidationError",
|
16
16
|
"AttributeValidator",
|
17
|
-
|
17
|
+
)
|
18
18
|
|
19
19
|
|
20
20
|
class AttributeValidation[Type](Protocol):
|