relib 1.3.0__tar.gz → 1.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {relib-1.3.0 → relib-1.3.1}/PKG-INFO +1 -1
- {relib-1.3.0 → relib-1.3.1}/pyproject.toml +1 -1
- {relib-1.3.0 → relib-1.3.1}/relib/dict_utils.py +12 -13
- {relib-1.3.0 → relib-1.3.1}/relib/iter_utils.py +21 -18
- {relib-1.3.0 → relib-1.3.1}/relib/runtime_tools.py +6 -4
- {relib-1.3.0 → relib-1.3.1}/uv.lock +1 -1
- {relib-1.3.0 → relib-1.3.1}/.gitignore +0 -0
- {relib-1.3.0 → relib-1.3.1}/.python-version +0 -0
- {relib-1.3.0 → relib-1.3.1}/LICENSE +0 -0
- {relib-1.3.0 → relib-1.3.1}/README.md +0 -0
- {relib-1.3.0 → relib-1.3.1}/relib/__init__.py +0 -0
- {relib-1.3.0 → relib-1.3.1}/relib/io_utils.py +0 -0
- {relib-1.3.0 → relib-1.3.1}/relib/processing_utils.py +0 -0
- {relib-1.3.0 → relib-1.3.1}/relib/type_utils.py +0 -0
@@ -52,9 +52,8 @@ def get_at[T](d: dict, keys: Iterable[Any], default: T) -> T:
|
|
52
52
|
|
53
53
|
def dict_firsts[T, K](pairs: Iterable[tuple[K, T]]) -> dict[K, T]:
|
54
54
|
result: dict[K, T] = {}
|
55
|
-
for key,
|
56
|
-
|
57
|
-
result[key] = item
|
55
|
+
for key, value in pairs:
|
56
|
+
result.setdefault(key, value)
|
58
57
|
return result
|
59
58
|
|
60
59
|
def group[T, K](pairs: Iterable[tuple[K, T]]) -> dict[K, list[T]]:
|
@@ -63,6 +62,16 @@ def group[T, K](pairs: Iterable[tuple[K, T]]) -> dict[K, list[T]]:
|
|
63
62
|
values_by_key.setdefault(key, []).append(value)
|
64
63
|
return values_by_key
|
65
64
|
|
65
|
+
def flatten_dict_inner(d, prefix=()):
|
66
|
+
for key, value in d.items():
|
67
|
+
if not isinstance(value, dict) or value == {}:
|
68
|
+
yield prefix + (key,), value
|
69
|
+
else:
|
70
|
+
yield from flatten_dict_inner(value, prefix + (key,))
|
71
|
+
|
72
|
+
def flatten_dict(deep_dict: dict, prefix=()) -> dict:
|
73
|
+
return dict(flatten_dict_inner(deep_dict, prefix))
|
74
|
+
|
66
75
|
@overload
|
67
76
|
def deepen_dict[K1, U](d: dict[tuple[K1], U]) -> dict[K1, U]: ...
|
68
77
|
@overload
|
@@ -85,13 +94,3 @@ def deepen_dict(d: dict[tuple[Any, ...], Any]) -> dict:
|
|
85
94
|
curr = curr.setdefault(key, {})
|
86
95
|
curr[head] = value
|
87
96
|
return output
|
88
|
-
|
89
|
-
def flatten_dict_inner(d, prefix=()):
|
90
|
-
for key, value in d.items():
|
91
|
-
if not isinstance(value, dict) or value == {}:
|
92
|
-
yield prefix + (key,), value
|
93
|
-
else:
|
94
|
-
yield from flatten_dict_inner(value, prefix + (key,))
|
95
|
-
|
96
|
-
def flatten_dict(deep_dict: dict, prefix=()) -> dict:
|
97
|
-
return dict(flatten_dict_inner(deep_dict, prefix))
|
@@ -5,7 +5,7 @@ from .dict_utils import dict_firsts
|
|
5
5
|
__all__ = [
|
6
6
|
"distinct_by", "distinct", "drop_none",
|
7
7
|
"first", "flatten",
|
8
|
-
"intersect",
|
8
|
+
"interleave", "intersect",
|
9
9
|
"list_split",
|
10
10
|
"move_value",
|
11
11
|
"num_partitions",
|
@@ -36,27 +36,13 @@ def move_value[T](iterable: Iterable[T], from_i: int, to_i: int) -> list[T]:
|
|
36
36
|
return values
|
37
37
|
|
38
38
|
def reversed_enumerate[T](values: list[T] | tuple[T, ...]) -> Iterable[tuple[int, T]]:
|
39
|
-
return zip(
|
39
|
+
return zip(range(len(values))[::1], reversed(values))
|
40
40
|
|
41
41
|
def intersect[T](*iterables: Iterable[T]) -> list[T]:
|
42
42
|
return list(set.intersection(*map(set, iterables)))
|
43
43
|
|
44
|
-
|
45
|
-
|
46
|
-
@overload
|
47
|
-
def flatten[T](iterable: Iterable[Iterable[T]], depth: Literal[1] = 1) -> list[T]: ...
|
48
|
-
@overload
|
49
|
-
def flatten[T](iterable: Iterable[Iterable[Iterable[T]]], depth: Literal[2]) -> list[T]: ...
|
50
|
-
@overload
|
51
|
-
def flatten[T](iterable: Iterable[Iterable[Iterable[Iterable[T]]]], depth: Literal[3]) -> list[T]: ...
|
52
|
-
@overload
|
53
|
-
def flatten[T](iterable: Iterable[Iterable[Iterable[Iterable[Iterable[T]]]]], depth: Literal[4]) -> list[T]: ...
|
54
|
-
@overload
|
55
|
-
def flatten(iterable: Iterable, depth: int) -> list: ...
|
56
|
-
def flatten(iterable: Iterable, depth: int = 1) -> list:
|
57
|
-
for _ in range(depth):
|
58
|
-
iterable = chain.from_iterable(iterable)
|
59
|
-
return list(iterable)
|
44
|
+
def interleave[T](*iterables: Iterable[T]) -> list[T]:
|
45
|
+
return flatten(zip(*iterables))
|
60
46
|
|
61
47
|
def list_split[T](iterable: Iterable[T], sep: T) -> list[list[T]]:
|
62
48
|
values = [sep, *iterable, sep]
|
@@ -77,6 +63,23 @@ def num_partitions[T](values: Iterable[T], num_parts: int) -> list[list[T]]:
|
|
77
63
|
part_size = (len(values) / num_parts).__ceil__()
|
78
64
|
return [values[i * part_size:(i + 1) * part_size] for i in range(num_parts)]
|
79
65
|
|
66
|
+
@overload
|
67
|
+
def flatten[T](iterable: Iterable[T], depth: Literal[0]) -> list[T]: ...
|
68
|
+
@overload
|
69
|
+
def flatten[T](iterable: Iterable[Iterable[T]], depth: Literal[1] = 1) -> list[T]: ...
|
70
|
+
@overload
|
71
|
+
def flatten[T](iterable: Iterable[Iterable[Iterable[T]]], depth: Literal[2]) -> list[T]: ...
|
72
|
+
@overload
|
73
|
+
def flatten[T](iterable: Iterable[Iterable[Iterable[Iterable[T]]]], depth: Literal[3]) -> list[T]: ...
|
74
|
+
@overload
|
75
|
+
def flatten[T](iterable: Iterable[Iterable[Iterable[Iterable[Iterable[T]]]]], depth: Literal[4]) -> list[T]: ...
|
76
|
+
@overload
|
77
|
+
def flatten(iterable: Iterable, depth: int) -> list: ...
|
78
|
+
def flatten(iterable: Iterable, depth: int = 1) -> list:
|
79
|
+
for _ in range(depth):
|
80
|
+
iterable = chain.from_iterable(iterable)
|
81
|
+
return list(iterable)
|
82
|
+
|
80
83
|
@overload
|
81
84
|
def transpose[T1, T2](tuples: Iterable[tuple[T1, T2]], default_num_returns=0) -> tuple[list[T1], list[T2]]: ...
|
82
85
|
@overload
|
@@ -10,6 +10,7 @@ from .processing_utils import noop
|
|
10
10
|
__all__ = [
|
11
11
|
"as_async", "async_limit",
|
12
12
|
"clear_console", "console_link",
|
13
|
+
"default_executor", "default_workers",
|
13
14
|
"roll_tasks",
|
14
15
|
"measure_duration",
|
15
16
|
]
|
@@ -18,6 +19,7 @@ P = ParamSpec("P")
|
|
18
19
|
R = TypeVar("R")
|
19
20
|
|
20
21
|
default_workers = min(32, (os.cpu_count() or 1) + 4)
|
22
|
+
default_executor = ThreadPoolExecutor(max_workers=default_workers)
|
21
23
|
|
22
24
|
def clear_console() -> None:
|
23
25
|
os.system("cls" if os.name == "nt" else "clear")
|
@@ -42,8 +44,8 @@ async def roll_tasks[T](tasks: Iterable[Awaitable[T]], workers=default_workers,
|
|
42
44
|
update = partial(pbar.update, 1)
|
43
45
|
return await asyncio.gather(*[worker(task, semaphore, update) for task in tasks])
|
44
46
|
|
45
|
-
def as_async(workers=
|
46
|
-
executor = ThreadPoolExecutor(max_workers=workers)
|
47
|
+
def as_async(workers: int | ThreadPoolExecutor = default_executor) -> Callable[[Callable[P, R]], Callable[P, Awaitable[R]]]:
|
48
|
+
executor = ThreadPoolExecutor(max_workers=workers) if isinstance(workers, int) else workers
|
47
49
|
|
48
50
|
def on_fn(func: Callable[P, R]) -> Callable[P, Awaitable[R]]:
|
49
51
|
@wraps(func)
|
@@ -79,7 +81,7 @@ class measure_duration:
|
|
79
81
|
def __exit__(self, *_):
|
80
82
|
duration = round(time() - self.start, 4)
|
81
83
|
depth = len(active_mds) - 1
|
82
|
-
indent =
|
83
|
-
text =
|
84
|
+
indent = "──" * depth + " " * (depth > 0)
|
85
|
+
text = f"{self.name}: {duration} seconds"
|
84
86
|
print(indent + text)
|
85
87
|
active_mds.remove(self)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|