haiway 0.23.2__py3-none-any.whl → 0.24.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- haiway/__init__.py +6 -2
- haiway/context/access.py +9 -48
- haiway/context/observability.py +2 -3
- haiway/context/state.py +8 -2
- haiway/context/types.py +0 -4
- haiway/helpers/__init__.py +2 -1
- haiway/helpers/concurrent.py +324 -15
- haiway/helpers/retries.py +2 -2
- haiway/state/requirement.py +1 -1
- haiway/types/__init__.py +2 -2
- haiway/types/missing.py +66 -12
- haiway/utils/__init__.py +2 -0
- haiway/utils/collections.py +4 -4
- haiway/utils/env.py +86 -7
- haiway/utils/formatting.py +1 -1
- haiway/utils/logs.py +9 -0
- {haiway-0.23.2.dist-info → haiway-0.24.1.dist-info}/METADATA +1 -1
- {haiway-0.23.2.dist-info → haiway-0.24.1.dist-info}/RECORD +20 -20
- {haiway-0.23.2.dist-info → haiway-0.24.1.dist-info}/WHEEL +0 -0
- {haiway-0.23.2.dist-info → haiway-0.24.1.dist-info}/licenses/LICENSE +0 -0
haiway/__init__.py
CHANGED
@@ -18,6 +18,7 @@ from haiway.helpers import (
|
|
18
18
|
LoggerObservability,
|
19
19
|
asynchronous,
|
20
20
|
cache,
|
21
|
+
execute_concurrently,
|
21
22
|
process_concurrently,
|
22
23
|
retry,
|
23
24
|
throttle,
|
@@ -32,7 +33,7 @@ from haiway.types import (
|
|
32
33
|
Missing,
|
33
34
|
is_missing,
|
34
35
|
not_missing,
|
35
|
-
|
36
|
+
unwrap_missing,
|
36
37
|
)
|
37
38
|
from haiway.utils import (
|
38
39
|
AsyncQueue,
|
@@ -44,6 +45,7 @@ from haiway.utils import (
|
|
44
45
|
as_tuple,
|
45
46
|
async_always,
|
46
47
|
async_noop,
|
48
|
+
getenv,
|
47
49
|
getenv_base64,
|
48
50
|
getenv_bool,
|
49
51
|
getenv_float,
|
@@ -89,6 +91,8 @@ __all__ = (
|
|
89
91
|
"asynchronous",
|
90
92
|
"cache",
|
91
93
|
"ctx",
|
94
|
+
"execute_concurrently",
|
95
|
+
"getenv",
|
92
96
|
"getenv_base64",
|
93
97
|
"getenv_bool",
|
94
98
|
"getenv_float",
|
@@ -104,6 +108,6 @@ __all__ = (
|
|
104
108
|
"throttle",
|
105
109
|
"timeout",
|
106
110
|
"traced",
|
107
|
-
"
|
111
|
+
"unwrap_missing",
|
108
112
|
"without_missing",
|
109
113
|
)
|
haiway/context/access.py
CHANGED
@@ -3,7 +3,6 @@ from asyncio import (
|
|
3
3
|
Task,
|
4
4
|
TaskGroup,
|
5
5
|
current_task,
|
6
|
-
iscoroutinefunction,
|
7
6
|
)
|
8
7
|
from collections.abc import (
|
9
8
|
AsyncGenerator,
|
@@ -28,7 +27,6 @@ from haiway.context.observability import (
|
|
28
27
|
from haiway.context.state import ScopeState, StateContext
|
29
28
|
from haiway.context.tasks import TaskGroupContext
|
30
29
|
from haiway.state import State
|
31
|
-
from haiway.utils import mimic_function
|
32
30
|
from haiway.utils.stream import AsyncStream
|
33
31
|
|
34
32
|
__all__ = ("ctx",)
|
@@ -218,44 +216,6 @@ class ScopeContext:
|
|
218
216
|
exc_tb=exc_tb,
|
219
217
|
)
|
220
218
|
|
221
|
-
@overload
|
222
|
-
def __call__[Result, **Arguments](
|
223
|
-
self,
|
224
|
-
function: Callable[Arguments, Coroutine[Any, Any, Result]],
|
225
|
-
) -> Callable[Arguments, Coroutine[Any, Any, Result]]: ...
|
226
|
-
|
227
|
-
@overload
|
228
|
-
def __call__[Result, **Arguments](
|
229
|
-
self,
|
230
|
-
function: Callable[Arguments, Result],
|
231
|
-
) -> Callable[Arguments, Result]: ...
|
232
|
-
|
233
|
-
def __call__[Result, **Arguments](
|
234
|
-
self,
|
235
|
-
function: Callable[Arguments, Coroutine[Any, Any, Result]] | Callable[Arguments, Result],
|
236
|
-
) -> Callable[Arguments, Coroutine[Any, Any, Result]] | Callable[Arguments, Result]:
|
237
|
-
if iscoroutinefunction(function):
|
238
|
-
|
239
|
-
async def async_context(
|
240
|
-
*args: Arguments.args,
|
241
|
-
**kwargs: Arguments.kwargs,
|
242
|
-
) -> Result:
|
243
|
-
async with self:
|
244
|
-
return await function(*args, **kwargs)
|
245
|
-
|
246
|
-
return mimic_function(function, within=async_context)
|
247
|
-
|
248
|
-
else:
|
249
|
-
|
250
|
-
def sync_context(
|
251
|
-
*args: Arguments.args,
|
252
|
-
**kwargs: Arguments.kwargs,
|
253
|
-
) -> Result:
|
254
|
-
with self:
|
255
|
-
return function(*args, **kwargs) # pyright: ignore[reportReturnType]
|
256
|
-
|
257
|
-
return mimic_function(function, within=sync_context) # pyright: ignore[reportReturnType]
|
258
|
-
|
259
219
|
|
260
220
|
@final
|
261
221
|
class ctx:
|
@@ -482,18 +442,19 @@ class ctx:
|
|
482
442
|
"""
|
483
443
|
|
484
444
|
output_stream = AsyncStream[Element]()
|
445
|
+
stream_scope: ScopeContext = ctx.scope("stream")
|
485
446
|
|
486
|
-
@ctx.scope("stream")
|
487
447
|
async def stream() -> None:
|
488
|
-
|
489
|
-
|
490
|
-
|
448
|
+
async with stream_scope:
|
449
|
+
try:
|
450
|
+
async for result in source(*args, **kwargs):
|
451
|
+
await output_stream.send(result)
|
491
452
|
|
492
|
-
|
493
|
-
|
453
|
+
except BaseException as exc:
|
454
|
+
output_stream.finish(exception=exc)
|
494
455
|
|
495
|
-
|
496
|
-
|
456
|
+
else:
|
457
|
+
output_stream.finish()
|
497
458
|
|
498
459
|
TaskGroupContext.run(stream)
|
499
460
|
return output_stream
|
haiway/context/observability.py
CHANGED
@@ -11,7 +11,6 @@ from typing import Any, Protocol, Self, final, runtime_checkable
|
|
11
11
|
from uuid import UUID, uuid4
|
12
12
|
|
13
13
|
from haiway.context.identifier import ScopeIdentifier
|
14
|
-
from haiway.state import State
|
15
14
|
from haiway.types import Missing
|
16
15
|
from haiway.utils.formatting import format_str
|
17
16
|
|
@@ -166,7 +165,7 @@ class ObservabilityScopeEntering(Protocol):
|
|
166
165
|
Implementations should record when execution enters a new scope.
|
167
166
|
"""
|
168
167
|
|
169
|
-
def __call__
|
168
|
+
def __call__(
|
170
169
|
self,
|
171
170
|
scope: ScopeIdentifier,
|
172
171
|
/,
|
@@ -182,7 +181,7 @@ class ObservabilityScopeExiting(Protocol):
|
|
182
181
|
including any exceptions that caused the exit.
|
183
182
|
"""
|
184
183
|
|
185
|
-
def __call__
|
184
|
+
def __call__(
|
186
185
|
self,
|
187
186
|
scope: ScopeIdentifier,
|
188
187
|
/,
|
haiway/context/state.py
CHANGED
@@ -25,7 +25,10 @@ class ScopeState:
|
|
25
25
|
This class is immutable after initialization.
|
26
26
|
"""
|
27
27
|
|
28
|
-
__slots__ = (
|
28
|
+
__slots__ = (
|
29
|
+
"_lock",
|
30
|
+
"_state",
|
31
|
+
)
|
29
32
|
|
30
33
|
def __init__(
|
31
34
|
self,
|
@@ -271,7 +274,10 @@ class StateContext:
|
|
271
274
|
If state not found and default not provided or instantiation fails
|
272
275
|
"""
|
273
276
|
try:
|
274
|
-
return cls._context.get().state(
|
277
|
+
return cls._context.get().state(
|
278
|
+
state,
|
279
|
+
default=default,
|
280
|
+
)
|
275
281
|
|
276
282
|
except LookupError as exc:
|
277
283
|
raise MissingContext("StateContext requested but not defined!") from exc
|
haiway/context/types.py
CHANGED
@@ -13,8 +13,6 @@ class MissingContext(Exception):
|
|
13
13
|
identifiers when no context has been established.
|
14
14
|
"""
|
15
15
|
|
16
|
-
pass
|
17
|
-
|
18
16
|
|
19
17
|
class MissingState(Exception):
|
20
18
|
"""
|
@@ -24,5 +22,3 @@ class MissingState(Exception):
|
|
24
22
|
that is not present in the current context and cannot be automatically
|
25
23
|
created (either because no default was provided or instantiation failed).
|
26
24
|
"""
|
27
|
-
|
28
|
-
pass
|
haiway/helpers/__init__.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
from haiway.helpers.asynchrony import asynchronous
|
2
2
|
from haiway.helpers.caching import CacheMakeKey, CacheRead, CacheWrite, cache
|
3
|
-
from haiway.helpers.concurrent import process_concurrently
|
3
|
+
from haiway.helpers.concurrent import execute_concurrently, process_concurrently
|
4
4
|
from haiway.helpers.files import File, FileAccess
|
5
5
|
from haiway.helpers.observability import LoggerObservability
|
6
6
|
from haiway.helpers.retries import retry
|
@@ -17,6 +17,7 @@ __all__ = (
|
|
17
17
|
"LoggerObservability",
|
18
18
|
"asynchronous",
|
19
19
|
"cache",
|
20
|
+
"execute_concurrently",
|
20
21
|
"process_concurrently",
|
21
22
|
"retry",
|
22
23
|
"throttle",
|
haiway/helpers/concurrent.py
CHANGED
@@ -1,11 +1,22 @@
|
|
1
|
-
from asyncio import FIRST_COMPLETED, CancelledError, Task, wait
|
2
|
-
from collections.abc import
|
3
|
-
|
4
|
-
|
1
|
+
from asyncio import ALL_COMPLETED, FIRST_COMPLETED, CancelledError, Task, wait
|
2
|
+
from collections.abc import (
|
3
|
+
AsyncIterable,
|
4
|
+
AsyncIterator,
|
5
|
+
Callable,
|
6
|
+
Collection,
|
7
|
+
Coroutine,
|
8
|
+
MutableSequence,
|
9
|
+
Sequence,
|
10
|
+
)
|
11
|
+
from typing import Any, Literal, overload
|
5
12
|
|
6
13
|
from haiway.context import ctx
|
7
14
|
|
8
|
-
__all__ = (
|
15
|
+
__all__ = (
|
16
|
+
"execute_concurrently",
|
17
|
+
"process_concurrently",
|
18
|
+
"stream_concurrently",
|
19
|
+
)
|
9
20
|
|
10
21
|
|
11
22
|
async def process_concurrently[Element]( # noqa: C901, PLR0912
|
@@ -18,20 +29,52 @@ async def process_concurrently[Element]( # noqa: C901, PLR0912
|
|
18
29
|
) -> None:
|
19
30
|
"""Process elements from an async iterator concurrently.
|
20
31
|
|
32
|
+
Consumes elements from an async iterator and processes them using the provided
|
33
|
+
handler function. Processing happens concurrently with a configurable maximum
|
34
|
+
number of concurrent tasks. Elements are processed as they become available,
|
35
|
+
maintaining the specified concurrency limit.
|
36
|
+
|
37
|
+
The function continues until the source iterator is exhausted. If the function
|
38
|
+
is cancelled, all running tasks are also cancelled. When ignore_exceptions is
|
39
|
+
False, the first exception encountered will stop processing and propagate.
|
40
|
+
|
21
41
|
Parameters
|
22
42
|
----------
|
23
|
-
source: AsyncIterator[Element]
|
24
|
-
An async iterator providing elements to process.
|
43
|
+
source : AsyncIterator[Element]
|
44
|
+
An async iterator providing elements to process. Elements are consumed
|
45
|
+
one at a time as processing slots become available.
|
46
|
+
handler : Callable[[Element], Coroutine[Any, Any, None]]
|
47
|
+
A coroutine function that processes each element. The handler should
|
48
|
+
not return a value (returns None).
|
49
|
+
concurrent_tasks : int, default=2
|
50
|
+
Maximum number of concurrent tasks. Must be greater than 0. Higher
|
51
|
+
values allow more parallelism but consume more resources.
|
52
|
+
ignore_exceptions : bool, default=False
|
53
|
+
If True, exceptions from handler tasks will be logged but not propagated,
|
54
|
+
allowing processing to continue. If False, the first exception stops
|
55
|
+
all processing.
|
25
56
|
|
26
|
-
|
27
|
-
|
57
|
+
Raises
|
58
|
+
------
|
59
|
+
CancelledError
|
60
|
+
If the function is cancelled, propagated after cancelling all running tasks.
|
61
|
+
Exception
|
62
|
+
Any exception raised by handler tasks when ignore_exceptions is False.
|
28
63
|
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
64
|
+
Examples
|
65
|
+
--------
|
66
|
+
>>> async def process_item(item: str) -> None:
|
67
|
+
... await some_async_operation(item)
|
68
|
+
...
|
69
|
+
>>> async def items() -> AsyncIterator[str]:
|
70
|
+
... for i in range(10):
|
71
|
+
... yield f"item_{i}"
|
72
|
+
...
|
73
|
+
>>> await process_concurrently(
|
74
|
+
... items(),
|
75
|
+
... process_item,
|
76
|
+
... concurrent_tasks=5
|
77
|
+
... )
|
35
78
|
|
36
79
|
"""
|
37
80
|
assert concurrent_tasks > 0 # nosec: B101
|
@@ -83,3 +126,269 @@ async def process_concurrently[Element]( # noqa: C901, PLR0912
|
|
83
126
|
f"Concurrent processing error - {type(exc)}: {exc}",
|
84
127
|
exception=exc,
|
85
128
|
)
|
129
|
+
|
130
|
+
|
131
|
+
@overload
|
132
|
+
async def execute_concurrently[Element, Result](
|
133
|
+
source: Collection[Element],
|
134
|
+
/,
|
135
|
+
handler: Callable[[Element], Coroutine[Any, Any, Result]],
|
136
|
+
*,
|
137
|
+
concurrent_tasks: int = 2,
|
138
|
+
) -> Sequence[Result]: ...
|
139
|
+
|
140
|
+
|
141
|
+
@overload
|
142
|
+
async def execute_concurrently[Element, Result](
|
143
|
+
source: Collection[Element],
|
144
|
+
/,
|
145
|
+
handler: Callable[[Element], Coroutine[Any, Any, Result]],
|
146
|
+
*,
|
147
|
+
concurrent_tasks: int = 2,
|
148
|
+
return_exceptions: Literal[True],
|
149
|
+
) -> Sequence[Result | BaseException]: ...
|
150
|
+
|
151
|
+
|
152
|
+
async def execute_concurrently[Element, Result]( # noqa: C901
|
153
|
+
source: Collection[Element],
|
154
|
+
/,
|
155
|
+
handler: Callable[[Element], Coroutine[Any, Any, Result]],
|
156
|
+
*,
|
157
|
+
concurrent_tasks: int = 2,
|
158
|
+
return_exceptions: bool = False,
|
159
|
+
) -> Sequence[Result | BaseException] | Sequence[Result]:
|
160
|
+
"""Execute handler for each element from a collection concurrently.
|
161
|
+
|
162
|
+
Processes all elements from a collection using the provided handler function,
|
163
|
+
executing multiple handlers concurrently up to the specified limit. Results
|
164
|
+
are collected and returned in the same order as the input elements.
|
165
|
+
|
166
|
+
Unlike `process_concurrently`, this function:
|
167
|
+
- Works with collections (known size) rather than async iterators
|
168
|
+
- Returns results from each handler invocation
|
169
|
+
- Preserves the order of results to match input order
|
170
|
+
|
171
|
+
The function ensures all tasks complete before returning. If cancelled,
|
172
|
+
all running tasks are cancelled before propagating the cancellation.
|
173
|
+
|
174
|
+
Parameters
|
175
|
+
----------
|
176
|
+
source : Collection[Element]
|
177
|
+
A collection of elements to process. The collection size determines
|
178
|
+
the result sequence length.
|
179
|
+
handler : Callable[[Element], Coroutine[Any, Any, Result]]
|
180
|
+
A coroutine function that processes each element and returns a result.
|
181
|
+
concurrent_tasks : int, default=2
|
182
|
+
Maximum number of concurrent tasks. Must be greater than 0. Higher
|
183
|
+
values allow more parallelism but consume more resources.
|
184
|
+
return_exceptions : bool, default=False
|
185
|
+
If True, exceptions from handler tasks are included in the results
|
186
|
+
as BaseException instances. If False, the first exception stops
|
187
|
+
processing and is raised.
|
188
|
+
|
189
|
+
Returns
|
190
|
+
-------
|
191
|
+
Sequence[Result] or Sequence[Result | BaseException]
|
192
|
+
Results from each handler invocation, in the same order as input elements.
|
193
|
+
If return_exceptions is True, failed tasks return BaseException instances.
|
194
|
+
|
195
|
+
Raises
|
196
|
+
------
|
197
|
+
CancelledError
|
198
|
+
If the function is cancelled, propagated after cancelling all running tasks.
|
199
|
+
Exception
|
200
|
+
Any exception raised by handler tasks when return_exceptions is False.
|
201
|
+
|
202
|
+
Examples
|
203
|
+
--------
|
204
|
+
>>> async def fetch_data(url: str) -> dict:
|
205
|
+
... return await http_client.get(url)
|
206
|
+
...
|
207
|
+
>>> urls = ["http://api.example.com/1", "http://api.example.com/2"]
|
208
|
+
>>> results = await execute_concurrently(
|
209
|
+
... urls,
|
210
|
+
... fetch_data,
|
211
|
+
... concurrent_tasks=10
|
212
|
+
... )
|
213
|
+
>>> # results[0] corresponds to urls[0], results[1] to urls[1], etc.
|
214
|
+
|
215
|
+
>>> # With exception handling
|
216
|
+
>>> results = await execute_concurrently(
|
217
|
+
... urls,
|
218
|
+
... fetch_data,
|
219
|
+
... concurrent_tasks=10,
|
220
|
+
... return_exceptions=True
|
221
|
+
... )
|
222
|
+
>>> for url, result in zip(urls, results):
|
223
|
+
... if isinstance(result, BaseException):
|
224
|
+
... print(f"Failed to fetch {url}: {result}")
|
225
|
+
... else:
|
226
|
+
... print(f"Got data from {url}")
|
227
|
+
|
228
|
+
"""
|
229
|
+
assert concurrent_tasks > 0 # nosec: B101
|
230
|
+
running: set[Task[Result]] = set()
|
231
|
+
results: MutableSequence[Task[Result]] = []
|
232
|
+
try:
|
233
|
+
for element in source:
|
234
|
+
task: Task[Result] = ctx.spawn(handler, element)
|
235
|
+
results.append(task)
|
236
|
+
running.add(task)
|
237
|
+
if len(running) < concurrent_tasks:
|
238
|
+
continue # keep spawning tasks
|
239
|
+
|
240
|
+
completed, running = await wait(
|
241
|
+
running,
|
242
|
+
return_when=FIRST_COMPLETED,
|
243
|
+
)
|
244
|
+
|
245
|
+
for task in completed:
|
246
|
+
if exc := task.exception():
|
247
|
+
if not return_exceptions:
|
248
|
+
raise exc
|
249
|
+
|
250
|
+
ctx.log_error(
|
251
|
+
f"Concurrent execution error - {type(exc)}: {exc}",
|
252
|
+
exception=exc,
|
253
|
+
)
|
254
|
+
|
255
|
+
except CancelledError as exc:
|
256
|
+
# Cancel all running tasks
|
257
|
+
for task in running:
|
258
|
+
task.cancel()
|
259
|
+
|
260
|
+
raise exc
|
261
|
+
|
262
|
+
finally:
|
263
|
+
if running:
|
264
|
+
completed, _ = await wait(
|
265
|
+
running,
|
266
|
+
return_when=ALL_COMPLETED,
|
267
|
+
)
|
268
|
+
for task in completed:
|
269
|
+
if exc := task.exception():
|
270
|
+
if not return_exceptions:
|
271
|
+
raise exc
|
272
|
+
|
273
|
+
ctx.log_error(
|
274
|
+
f"Concurrent execution error - {type(exc)}: {exc}",
|
275
|
+
exception=exc,
|
276
|
+
)
|
277
|
+
|
278
|
+
return [result.exception() or result.result() for result in results]
|
279
|
+
|
280
|
+
|
281
|
+
async def stream_concurrently[ElementA, ElementB]( # noqa: C901
|
282
|
+
source_a: AsyncIterator[ElementA],
|
283
|
+
source_b: AsyncIterator[ElementB],
|
284
|
+
/,
|
285
|
+
) -> AsyncIterable[ElementA | ElementB]:
|
286
|
+
"""Merge streams from two async iterators processed concurrently.
|
287
|
+
|
288
|
+
Concurrently consumes elements from two async iterators and yields them
|
289
|
+
as they become available. Elements from both sources are interleaved based
|
290
|
+
on which iterator produces them first. The function continues until both
|
291
|
+
iterators are exhausted.
|
292
|
+
|
293
|
+
This is useful for combining multiple async data sources into a single
|
294
|
+
stream while maintaining concurrency. Each iterator is polled independently,
|
295
|
+
and whichever has data available first will have its element yielded.
|
296
|
+
|
297
|
+
Parameters
|
298
|
+
----------
|
299
|
+
source_a : AsyncIterator[ElementA]
|
300
|
+
First async iterator to consume from.
|
301
|
+
source_b : AsyncIterator[ElementB]
|
302
|
+
Second async iterator to consume from.
|
303
|
+
|
304
|
+
Yields
|
305
|
+
------
|
306
|
+
ElementA | ElementB
|
307
|
+
Elements from either source as they become available. The order
|
308
|
+
depends on which iterator produces elements first.
|
309
|
+
|
310
|
+
Raises
|
311
|
+
------
|
312
|
+
CancelledError
|
313
|
+
If the async generator is cancelled, both source tasks are cancelled
|
314
|
+
before propagating the cancellation.
|
315
|
+
Exception
|
316
|
+
Any exception raised by either source iterator.
|
317
|
+
|
318
|
+
Examples
|
319
|
+
--------
|
320
|
+
>>> async def numbers() -> AsyncIterator[int]:
|
321
|
+
... for i in range(5):
|
322
|
+
... await asyncio.sleep(0.1)
|
323
|
+
... yield i
|
324
|
+
...
|
325
|
+
>>> async def letters() -> AsyncIterator[str]:
|
326
|
+
... for c in "abcde":
|
327
|
+
... await asyncio.sleep(0.15)
|
328
|
+
... yield c
|
329
|
+
...
|
330
|
+
>>> async for item in stream_concurrently(numbers(), letters()):
|
331
|
+
... print(item) # Prints interleaved numbers and letters
|
332
|
+
|
333
|
+
Notes
|
334
|
+
-----
|
335
|
+
The function maintains exactly one pending task per iterator at all times,
|
336
|
+
ensuring efficient resource usage while maximizing throughput from both
|
337
|
+
sources.
|
338
|
+
|
339
|
+
"""
|
340
|
+
|
341
|
+
async def next_a() -> ElementA:
|
342
|
+
return await anext(source_a)
|
343
|
+
|
344
|
+
async def next_b() -> ElementB:
|
345
|
+
return await anext(source_b)
|
346
|
+
|
347
|
+
task_a: Task[ElementA] = ctx.spawn(next_a)
|
348
|
+
task_b: Task[ElementB] = ctx.spawn(next_b)
|
349
|
+
|
350
|
+
try:
|
351
|
+
while not ( # Continue until both iterators are exhausted
|
352
|
+
task_a.done()
|
353
|
+
and task_b.done()
|
354
|
+
and isinstance(task_a.exception(), StopAsyncIteration)
|
355
|
+
and isinstance(task_b.exception(), StopAsyncIteration)
|
356
|
+
):
|
357
|
+
# Wait for at least one task to complete
|
358
|
+
done, _ = await wait(
|
359
|
+
{task_a, task_b},
|
360
|
+
return_when=FIRST_COMPLETED,
|
361
|
+
)
|
362
|
+
|
363
|
+
# Process completed tasks
|
364
|
+
for task in done:
|
365
|
+
if task is task_a:
|
366
|
+
exc: BaseException | None = task.exception()
|
367
|
+
if exc is None:
|
368
|
+
yield task.result()
|
369
|
+
task_a = ctx.spawn(next_a)
|
370
|
+
|
371
|
+
elif not isinstance(exc, StopAsyncIteration):
|
372
|
+
raise exc
|
373
|
+
# If StopAsyncIteration, don't respawn task_a
|
374
|
+
|
375
|
+
elif task is task_b:
|
376
|
+
exc: BaseException | None = task.exception()
|
377
|
+
if exc is None:
|
378
|
+
yield task.result()
|
379
|
+
task_b = ctx.spawn(next_b)
|
380
|
+
|
381
|
+
elif not isinstance(exc, StopAsyncIteration):
|
382
|
+
raise exc
|
383
|
+
# If StopAsyncIteration, don't respawn task_b
|
384
|
+
|
385
|
+
except CancelledError as exc:
|
386
|
+
# Cancel all running tasks
|
387
|
+
task_a.cancel()
|
388
|
+
task_b.cancel()
|
389
|
+
raise exc
|
390
|
+
|
391
|
+
finally:
|
392
|
+
# Ensure cleanup of any remaining tasks
|
393
|
+
for task in (task_a, task_b):
|
394
|
+
task.cancel()
|
haiway/helpers/retries.py
CHANGED
@@ -207,7 +207,7 @@ def _wrap_sync[**Args, Result](
|
|
207
207
|
case float(strict):
|
208
208
|
sleep_sync(strict)
|
209
209
|
|
210
|
-
case make_delay:
|
210
|
+
case make_delay:
|
211
211
|
sleep_sync(make_delay(attempt, exc)) # pyright: ignore[reportCallIssue, reportUnknownArgumentType]
|
212
212
|
|
213
213
|
else:
|
@@ -253,7 +253,7 @@ def _wrap_async[**Args, Result](
|
|
253
253
|
case float(strict):
|
254
254
|
await sleep(strict)
|
255
255
|
|
256
|
-
case make_delay:
|
256
|
+
case make_delay:
|
257
257
|
await sleep(make_delay(attempt, exc)) # pyright: ignore[reportCallIssue, reportUnknownArgumentType]
|
258
258
|
|
259
259
|
else:
|
haiway/state/requirement.py
CHANGED
@@ -100,7 +100,7 @@ class AttributeRequirement[Root]:
|
|
100
100
|
), "Prepare attribute path by using Self._.path.to.property or explicitly"
|
101
101
|
|
102
102
|
def check_text_match(root: Root) -> None:
|
103
|
-
checked: Any =
|
103
|
+
checked: Any = path(root)
|
104
104
|
if not isinstance(checked, str):
|
105
105
|
raise ValueError(
|
106
106
|
f"Attribute value must be a string for like operation, got {type(checked)}"
|
haiway/types/__init__.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
from haiway.types.default import Default, DefaultValue
|
2
|
-
from haiway.types.missing import MISSING, Missing, is_missing, not_missing,
|
2
|
+
from haiway.types.missing import MISSING, Missing, is_missing, not_missing, unwrap_missing
|
3
3
|
|
4
4
|
__all__ = (
|
5
5
|
"MISSING",
|
@@ -8,5 +8,5 @@ __all__ = (
|
|
8
8
|
"Missing",
|
9
9
|
"is_missing",
|
10
10
|
"not_missing",
|
11
|
-
"
|
11
|
+
"unwrap_missing",
|
12
12
|
)
|
haiway/types/missing.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1
|
-
from
|
1
|
+
from collections.abc import Callable
|
2
|
+
from typing import Any, Final, TypeGuard, cast, final, overload
|
2
3
|
|
3
4
|
__all__ = (
|
4
5
|
"MISSING",
|
5
6
|
"Missing",
|
6
7
|
"is_missing",
|
7
8
|
"not_missing",
|
8
|
-
"
|
9
|
+
"unwrap_missing",
|
9
10
|
)
|
10
11
|
|
11
12
|
|
@@ -147,11 +148,12 @@ def not_missing[Value](
|
|
147
148
|
return check is not MISSING
|
148
149
|
|
149
150
|
|
150
|
-
|
151
|
+
@overload
|
152
|
+
def unwrap_missing[Value](
|
151
153
|
check: Value | Missing,
|
152
154
|
/,
|
153
155
|
*,
|
154
|
-
|
156
|
+
default: Value,
|
155
157
|
) -> Value:
|
156
158
|
"""
|
157
159
|
Substitute a default value when the input is MISSING.
|
@@ -162,10 +164,10 @@ def when_missing[Value](
|
|
162
164
|
|
163
165
|
Parameters
|
164
166
|
----------
|
165
|
-
|
166
|
-
The value to check
|
167
|
-
|
168
|
-
The default value to use if check is MISSING
|
167
|
+
value : Value | Missing
|
168
|
+
The value to check.
|
169
|
+
default : Value
|
170
|
+
The default value to use if check is MISSING.
|
169
171
|
|
170
172
|
Returns
|
171
173
|
-------
|
@@ -175,13 +177,65 @@ def when_missing[Value](
|
|
175
177
|
Examples
|
176
178
|
--------
|
177
179
|
```python
|
178
|
-
result =
|
180
|
+
result = unwrap_missing(optional_value, default=default_value)
|
179
181
|
# result will be default_value if optional_value is MISSING
|
180
182
|
# otherwise it will be optional_value
|
181
183
|
```
|
182
184
|
"""
|
183
|
-
|
184
|
-
|
185
|
+
|
186
|
+
|
187
|
+
@overload
|
188
|
+
def unwrap_missing[Value, Mapped](
|
189
|
+
value: Value | Missing,
|
190
|
+
/,
|
191
|
+
*,
|
192
|
+
default: Mapped,
|
193
|
+
mapping: Callable[[Value], Mapped],
|
194
|
+
) -> Value | Mapped:
|
195
|
+
"""
|
196
|
+
Substitute a default value when the input is MISSING or map the original.
|
197
|
+
|
198
|
+
This function provides a convenient way to replace the MISSING
|
199
|
+
sentinel with a default value, similar to how the or operator
|
200
|
+
works with None but specifically for the MISSING sentinel.
|
201
|
+
Original value is mapped using provided function when not missing.
|
202
|
+
|
203
|
+
Parameters
|
204
|
+
----------
|
205
|
+
value : Value | Missing
|
206
|
+
The value to check.
|
207
|
+
default : Mapped
|
208
|
+
The default value to use if check is MISSING.
|
209
|
+
mapping: Callable[[Value], Result] | None = None
|
210
|
+
Mapping to apply to the value.
|
211
|
+
|
212
|
+
Returns
|
213
|
+
-------
|
214
|
+
Mapped
|
215
|
+
The original value with mapping applied if not MISSING, otherwise the provided default.
|
216
|
+
|
217
|
+
Examples
|
218
|
+
--------
|
219
|
+
```python
|
220
|
+
result = unwrap_missing(optional_value, default=default_value, mapping=value_map)
|
221
|
+
# result will be default_value if optional_value is MISSING
|
222
|
+
# otherwise it will be optional_value after mapping
|
223
|
+
```
|
224
|
+
"""
|
225
|
+
|
226
|
+
|
227
|
+
def unwrap_missing[Value, Mapped](
|
228
|
+
value: Value | Missing,
|
229
|
+
/,
|
230
|
+
*,
|
231
|
+
default: Value | Mapped,
|
232
|
+
mapping: Callable[[Value], Mapped] | None = None,
|
233
|
+
) -> Value | Mapped:
|
234
|
+
if value is MISSING:
|
235
|
+
return default
|
236
|
+
|
237
|
+
elif mapping is not None:
|
238
|
+
return mapping(cast(Value, value))
|
185
239
|
|
186
240
|
else:
|
187
|
-
return cast(Value,
|
241
|
+
return cast(Value, value)
|
haiway/utils/__init__.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from haiway.utils.always import always, async_always
|
2
2
|
from haiway.utils.collections import as_dict, as_list, as_set, as_tuple, without_missing
|
3
3
|
from haiway.utils.env import (
|
4
|
+
getenv,
|
4
5
|
getenv_base64,
|
5
6
|
getenv_bool,
|
6
7
|
getenv_float,
|
@@ -26,6 +27,7 @@ __all__ = (
|
|
26
27
|
"async_always",
|
27
28
|
"async_noop",
|
28
29
|
"format_str",
|
30
|
+
"getenv",
|
29
31
|
"getenv_base64",
|
30
32
|
"getenv_bool",
|
31
33
|
"getenv_float",
|
haiway/utils/collections.py
CHANGED
@@ -50,7 +50,7 @@ def as_list[T](
|
|
50
50
|
if collection is None:
|
51
51
|
return None
|
52
52
|
|
53
|
-
|
53
|
+
elif isinstance(collection, list):
|
54
54
|
return collection
|
55
55
|
|
56
56
|
else:
|
@@ -95,7 +95,7 @@ def as_tuple[T](
|
|
95
95
|
if collection is None:
|
96
96
|
return None
|
97
97
|
|
98
|
-
|
98
|
+
elif isinstance(collection, tuple):
|
99
99
|
return collection
|
100
100
|
|
101
101
|
else:
|
@@ -140,7 +140,7 @@ def as_set[T](
|
|
140
140
|
if collection is None:
|
141
141
|
return None
|
142
142
|
|
143
|
-
|
143
|
+
elif isinstance(collection, set):
|
144
144
|
return collection
|
145
145
|
|
146
146
|
else:
|
@@ -185,7 +185,7 @@ def as_dict[K, V](
|
|
185
185
|
if collection is None:
|
186
186
|
return None
|
187
187
|
|
188
|
-
|
188
|
+
elif isinstance(collection, dict):
|
189
189
|
return collection
|
190
190
|
|
191
191
|
else:
|
haiway/utils/env.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1
1
|
from base64 import b64decode
|
2
2
|
from collections.abc import Callable
|
3
|
-
from os import environ
|
3
|
+
from os import environ
|
4
|
+
from os import getenv as os_getenv
|
4
5
|
from typing import Literal, overload
|
5
6
|
|
6
7
|
__all__ = (
|
8
|
+
"getenv",
|
7
9
|
"getenv_base64",
|
8
10
|
"getenv_bool",
|
9
11
|
"getenv_float",
|
@@ -13,6 +15,83 @@ __all__ = (
|
|
13
15
|
)
|
14
16
|
|
15
17
|
|
18
|
+
@overload
|
19
|
+
def getenv[Value](
|
20
|
+
key: str,
|
21
|
+
/,
|
22
|
+
mapping: Callable[[str], Value],
|
23
|
+
) -> Value | None: ...
|
24
|
+
|
25
|
+
|
26
|
+
@overload
|
27
|
+
def getenv[Value](
|
28
|
+
key: str,
|
29
|
+
/,
|
30
|
+
mapping: Callable[[str], Value],
|
31
|
+
*,
|
32
|
+
default: Value,
|
33
|
+
) -> Value: ...
|
34
|
+
|
35
|
+
|
36
|
+
@overload
|
37
|
+
def getenv[Value](
|
38
|
+
key: str,
|
39
|
+
/,
|
40
|
+
mapping: Callable[[str], Value],
|
41
|
+
*,
|
42
|
+
required: Literal[True],
|
43
|
+
) -> Value: ...
|
44
|
+
|
45
|
+
|
46
|
+
def getenv[Value](
|
47
|
+
key: str,
|
48
|
+
/,
|
49
|
+
mapping: Callable[[str], Value],
|
50
|
+
*,
|
51
|
+
default: Value | None = None,
|
52
|
+
required: bool = False,
|
53
|
+
) -> Value | None:
|
54
|
+
"""
|
55
|
+
Get a value from an environment variable and transforms.
|
56
|
+
|
57
|
+
Uses provided transformation method to deliver custom data type from env variable.
|
58
|
+
|
59
|
+
Parameters
|
60
|
+
----------
|
61
|
+
key : str
|
62
|
+
The environment variable name to retrieve
|
63
|
+
mapping : Callable[[str], Value]
|
64
|
+
Custom transformation of env value to desired value type.
|
65
|
+
default : Value | None, optional
|
66
|
+
Value to return if the environment variable is not set
|
67
|
+
required : bool, default=False
|
68
|
+
If True and the environment variable is not set and no default is provided,
|
69
|
+
raises a ValueError
|
70
|
+
|
71
|
+
Returns
|
72
|
+
-------
|
73
|
+
Value | None
|
74
|
+
The value from the environment variable, or the default value
|
75
|
+
|
76
|
+
Raises
|
77
|
+
------
|
78
|
+
ValueError
|
79
|
+
If required=True, the environment variable is not set, and no default is provided
|
80
|
+
"""
|
81
|
+
if value := os_getenv(key=key):
|
82
|
+
try:
|
83
|
+
return mapping(value)
|
84
|
+
|
85
|
+
except Exception as exc:
|
86
|
+
raise ValueError(f"Failed to transform environment value `{key}`: {value}") from exc
|
87
|
+
|
88
|
+
elif required and default is None:
|
89
|
+
raise ValueError(f"Required environment value `{key}` is missing!")
|
90
|
+
|
91
|
+
else:
|
92
|
+
return default
|
93
|
+
|
94
|
+
|
16
95
|
@overload
|
17
96
|
def getenv_bool(
|
18
97
|
key: str,
|
@@ -70,7 +149,7 @@ def getenv_bool(
|
|
70
149
|
ValueError
|
71
150
|
If required=True, the environment variable is not set, and no default is provided
|
72
151
|
"""
|
73
|
-
if value :=
|
152
|
+
if value := os_getenv(key=key):
|
74
153
|
return value.lower() in ("true", "1", "t")
|
75
154
|
|
76
155
|
elif required and default is None:
|
@@ -135,7 +214,7 @@ def getenv_int(
|
|
135
214
|
If the environment variable is set but cannot be converted to an integer,
|
136
215
|
or if required=True, the environment variable is not set, and no default is provided
|
137
216
|
"""
|
138
|
-
if value :=
|
217
|
+
if value := os_getenv(key=key):
|
139
218
|
try:
|
140
219
|
return int(value)
|
141
220
|
|
@@ -204,7 +283,7 @@ def getenv_float(
|
|
204
283
|
If the environment variable is set but cannot be converted to a float,
|
205
284
|
or if required=True, the environment variable is not set, and no default is provided
|
206
285
|
"""
|
207
|
-
if value :=
|
286
|
+
if value := os_getenv(key=key):
|
208
287
|
try:
|
209
288
|
return float(value)
|
210
289
|
|
@@ -272,7 +351,7 @@ def getenv_str(
|
|
272
351
|
ValueError
|
273
352
|
If required=True, the environment variable is not set, and no default is provided
|
274
353
|
"""
|
275
|
-
if value :=
|
354
|
+
if value := os_getenv(key=key):
|
276
355
|
return value
|
277
356
|
|
278
357
|
elif required and default is None:
|
@@ -344,7 +423,7 @@ def getenv_base64[Value](
|
|
344
423
|
ValueError
|
345
424
|
If required=True, the environment variable is not set, and no default is provided
|
346
425
|
"""
|
347
|
-
if value :=
|
426
|
+
if value := os_getenv(key=key):
|
348
427
|
return decoder(b64decode(value))
|
349
428
|
|
350
429
|
elif required and default is None:
|
@@ -395,7 +474,7 @@ def load_env(
|
|
395
474
|
idx: int # find where key ends
|
396
475
|
for element in enumerate(line):
|
397
476
|
if element[1] == "=":
|
398
|
-
idx
|
477
|
+
idx = element[0]
|
399
478
|
break
|
400
479
|
else: # ignore keys without assignment
|
401
480
|
continue
|
haiway/utils/formatting.py
CHANGED
haiway/utils/logs.py
CHANGED
@@ -7,6 +7,7 @@ __all__ = ("setup_logging",)
|
|
7
7
|
|
8
8
|
def setup_logging(
|
9
9
|
*loggers: str,
|
10
|
+
time: bool = True,
|
10
11
|
debug: bool = getenv_bool("DEBUG_LOGGING", __debug__),
|
11
12
|
) -> None:
|
12
13
|
"""\
|
@@ -16,6 +17,10 @@ def setup_logging(
|
|
16
17
|
----------
|
17
18
|
*loggers: str
|
18
19
|
names of additional loggers to configure.
|
20
|
+
time: bool = True
|
21
|
+
include timestamps in logs.
|
22
|
+
debug: bool = __debug__
|
23
|
+
include debug logs.
|
19
24
|
|
20
25
|
NOTE: this function should be run only once on application start
|
21
26
|
"""
|
@@ -28,6 +33,10 @@ def setup_logging(
|
|
28
33
|
"standard": {
|
29
34
|
"format": "%(asctime)s [%(levelname)-4s] [%(name)s] %(message)s",
|
30
35
|
"datefmt": "%d/%b/%Y:%H:%M:%S +0000",
|
36
|
+
}
|
37
|
+
if time
|
38
|
+
else {
|
39
|
+
"format": "[%(levelname)-4s] [%(name)s] %(message)s",
|
31
40
|
},
|
32
41
|
},
|
33
42
|
"handlers": {
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: haiway
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.24.1
|
4
4
|
Summary: Framework for dependency injection and state management within structured concurrency model.
|
5
5
|
Project-URL: Homepage, https://miquido.com
|
6
6
|
Project-URL: Repository, https://github.com/miquido/haiway.git
|
@@ -1,20 +1,20 @@
|
|
1
|
-
haiway/__init__.py,sha256=
|
1
|
+
haiway/__init__.py,sha256=E-IUVgLuTkaj-6hUzuqOwef_98rDXmGeE79waiYrFfQ,2001
|
2
2
|
haiway/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
haiway/context/__init__.py,sha256=1N_SvdPkTfIZDZybm3y0rY2dGrDLWTm0ryzUz2XD4f8,1174
|
4
|
-
haiway/context/access.py,sha256=
|
4
|
+
haiway/context/access.py,sha256=lsK6RnSjaGRgdOprtLCz4vyw5KqMtbGs0uFXnJWc_j4,24446
|
5
5
|
haiway/context/disposables.py,sha256=AP9eZ0BPHJZfjrrfrjSzr4jONMKkR6YmhjOfnBp37so,11504
|
6
6
|
haiway/context/identifier.py,sha256=dCCwLneXJzH__ZWFlGRUHvoCmbT4lM0QVbyokYIbUHg,5255
|
7
|
-
haiway/context/observability.py,sha256=
|
8
|
-
haiway/context/state.py,sha256=
|
7
|
+
haiway/context/observability.py,sha256=JGiBScElJMgYDxi1PoIx7K98PpCXTVV3WY-x8abLx9A,23631
|
8
|
+
haiway/context/state.py,sha256=lzkVVdTMYn-Bfon2aCPMx6vRScrbOqY2f_DuS5aMx0s,11982
|
9
9
|
haiway/context/tasks.py,sha256=pScFgeiyrXSJRDFZiYbBLi3k_DHkSlhB8rgAnYtgyrU,4925
|
10
|
-
haiway/context/types.py,sha256=
|
11
|
-
haiway/helpers/__init__.py,sha256=
|
10
|
+
haiway/context/types.py,sha256=LoW8238TTdbUgmxyHDi0LVc8M8ZwTHLWKkAPttTsTeg,746
|
11
|
+
haiway/helpers/__init__.py,sha256=K_OABDzXcCURHNDDBLmZleKP1pDCOxsG30c4zkfRnIE,771
|
12
12
|
haiway/helpers/asynchrony.py,sha256=Ddj8UdXhVczAbAC-rLpyhWa4RJ_W2Eolo45Veorq7_4,5362
|
13
13
|
haiway/helpers/caching.py,sha256=BqgcUGQSAmXsuLi5V8EwlZzuGyutHOn1V4k7BHsGKeg,14347
|
14
|
-
haiway/helpers/concurrent.py,sha256=
|
14
|
+
haiway/helpers/concurrent.py,sha256=P8YXukabb29iQhSKTECVaThPhzTX17JDdKrWAjHy4d4,13105
|
15
15
|
haiway/helpers/files.py,sha256=L6vXd8gdgWx5jPL8azloU8IGoFq2xnxjMc4ufz-gdl4,11650
|
16
16
|
haiway/helpers/observability.py,sha256=jCJzOPJ5E3RKJsbbGRR1O-mZydaHNIGkIpppOH7nFBA,11012
|
17
|
-
haiway/helpers/retries.py,sha256=
|
17
|
+
haiway/helpers/retries.py,sha256=OH__I9e-PUFxcSwuQLIzJ9F1MwXgbz1Ur4jEjJiOmjQ,8974
|
18
18
|
haiway/helpers/throttling.py,sha256=KBWUSHdKVMC5_nRMmmoPNwfp-3AcerQ6OczJa9gNLM0,5796
|
19
19
|
haiway/helpers/timeouting.py,sha256=GQ8-btb36f0Jq7TnorAPYXyKScNmf0nxHXCYxqGl-o8,3949
|
20
20
|
haiway/helpers/tracing.py,sha256=NHipA5UlngwFcAaKhXg1jTuJ-ti6AqSNxE7u7-92vWo,5409
|
@@ -23,23 +23,23 @@ haiway/opentelemetry/observability.py,sha256=akGAPX6_958BxCfzlLnoDJHPtEvztdMnfMA
|
|
23
23
|
haiway/state/__init__.py,sha256=AaMqlMhO4zKS_XNevy3A7BHh5PxmguA-Sk_FnaNDY1Q,355
|
24
24
|
haiway/state/attributes.py,sha256=sububiFP23aBB8RGk6OvTUp7BEY6S0kER_uHC09yins,26733
|
25
25
|
haiway/state/path.py,sha256=bv5MI3HmUyku78k0Sz5lc7Q_Bay53iom1l3AL5KZs-4,32143
|
26
|
-
haiway/state/requirement.py,sha256=
|
26
|
+
haiway/state/requirement.py,sha256=HjoABPQ2r-uo6xr5s4q9nDPGUMde5X_JCIZlYqTts5s,15609
|
27
27
|
haiway/state/structure.py,sha256=CTf1l0TyKA7vkVDqA9RMdxaOVNSHwQduN2jb6H015hg,23798
|
28
28
|
haiway/state/validation.py,sha256=eDOZKRrfd-dmdbqoHcLacdCVKmVCEpwt239EG6ljNF8,23557
|
29
|
-
haiway/types/__init__.py,sha256=
|
29
|
+
haiway/types/__init__.py,sha256=BQKjbPZQej4DQsD_y4linn4rQMWdfaahKW-t-gapSjs,285
|
30
30
|
haiway/types/default.py,sha256=59chcOaoGqI2to08RamCCLluimfYbJp5xbYl3fWaLrM,4153
|
31
|
-
haiway/types/missing.py,sha256=
|
32
|
-
haiway/utils/__init__.py,sha256=
|
31
|
+
haiway/types/missing.py,sha256=V9FWUgAWUsmFuSXc57MORQOVh2wO2vlF1qYopmcEA2A,5760
|
32
|
+
haiway/utils/__init__.py,sha256=FkY6EUwkZmb2Z8Z5UpMW3i9J0l9JoowgrULy-s_6X5M,943
|
33
33
|
haiway/utils/always.py,sha256=dd6jDQ1j4DpJjTKO1J2Tv5xS8X1LnMC4kQ0D7DtKUvw,1230
|
34
|
-
haiway/utils/collections.py,sha256=
|
35
|
-
haiway/utils/env.py,sha256=
|
36
|
-
haiway/utils/formatting.py,sha256=
|
37
|
-
haiway/utils/logs.py,sha256
|
34
|
+
haiway/utils/collections.py,sha256=W2K5haxogHdngEw2JF_qEUr0O28dhirdy2kzSbeW4wE,4745
|
35
|
+
haiway/utils/env.py,sha256=mCMveOWwOphgp8Ir5NEpZQFENyG7MBOoLlUeHzzIYEQ,11262
|
36
|
+
haiway/utils/formatting.py,sha256=SQ-gjBa2nxg_UhIP0AhNXIRwcDRei2ZZUiCLMiYLYUo,4041
|
37
|
+
haiway/utils/logs.py,sha256=-MVyxVGU892yJKFh0bkshW_NEg1aiJt9wv2cUY2w98o,1847
|
38
38
|
haiway/utils/mimic.py,sha256=xaZiUKp096QFfdSw7cNIKEWt2UIS7vf880KF54gny38,1831
|
39
39
|
haiway/utils/noop.py,sha256=U8ocfoCgt-pY0owJDPtrRrj53cabeIXH9qCKWMQnoRk,1336
|
40
40
|
haiway/utils/queue.py,sha256=6v2u3pA6A44IuCCTOjmCt3yLyOcm7PCRnrIGo25j-1o,6402
|
41
41
|
haiway/utils/stream.py,sha256=lXaeveTY0-AYG5xVzcQYaiC6SUD5fUtHoMXiQcrQAAM,5723
|
42
|
-
haiway-0.
|
43
|
-
haiway-0.
|
44
|
-
haiway-0.
|
45
|
-
haiway-0.
|
42
|
+
haiway-0.24.1.dist-info/METADATA,sha256=Wkbm7ERtjanGClRRM6m8TJi2m5bA8hujWwekCvdjfiQ,4919
|
43
|
+
haiway-0.24.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
44
|
+
haiway-0.24.1.dist-info/licenses/LICENSE,sha256=3phcpHVNBP8jsi77gOO0E7rgKeDeu99Pi7DSnK9YHoQ,1069
|
45
|
+
haiway-0.24.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|