opengris-scaler 1.12.28__cp313-cp313-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of opengris-scaler might be problematic. Click here for more details.

Files changed (187) hide show
  1. opengris_scaler-1.12.28.dist-info/METADATA +728 -0
  2. opengris_scaler-1.12.28.dist-info/RECORD +187 -0
  3. opengris_scaler-1.12.28.dist-info/WHEEL +5 -0
  4. opengris_scaler-1.12.28.dist-info/entry_points.txt +10 -0
  5. opengris_scaler-1.12.28.dist-info/licenses/LICENSE +201 -0
  6. opengris_scaler-1.12.28.dist-info/licenses/LICENSE.spdx +7 -0
  7. opengris_scaler-1.12.28.dist-info/licenses/NOTICE +8 -0
  8. opengris_scaler.libs/libcapnp-1-e88d5415.0.1.so +0 -0
  9. opengris_scaler.libs/libgcc_s-2298274a.so.1 +0 -0
  10. opengris_scaler.libs/libkj-1-9bebd8ac.0.1.so +0 -0
  11. opengris_scaler.libs/libstdc++-08d5c7eb.so.6.0.33 +0 -0
  12. scaler/__init__.py +14 -0
  13. scaler/about.py +5 -0
  14. scaler/client/__init__.py +0 -0
  15. scaler/client/agent/__init__.py +0 -0
  16. scaler/client/agent/client_agent.py +210 -0
  17. scaler/client/agent/disconnect_manager.py +27 -0
  18. scaler/client/agent/future_manager.py +112 -0
  19. scaler/client/agent/heartbeat_manager.py +74 -0
  20. scaler/client/agent/mixins.py +89 -0
  21. scaler/client/agent/object_manager.py +98 -0
  22. scaler/client/agent/task_manager.py +64 -0
  23. scaler/client/client.py +658 -0
  24. scaler/client/future.py +252 -0
  25. scaler/client/object_buffer.py +129 -0
  26. scaler/client/object_reference.py +25 -0
  27. scaler/client/serializer/__init__.py +0 -0
  28. scaler/client/serializer/default.py +16 -0
  29. scaler/client/serializer/mixins.py +38 -0
  30. scaler/cluster/__init__.py +0 -0
  31. scaler/cluster/cluster.py +115 -0
  32. scaler/cluster/combo.py +150 -0
  33. scaler/cluster/object_storage_server.py +45 -0
  34. scaler/cluster/scheduler.py +86 -0
  35. scaler/config/__init__.py +0 -0
  36. scaler/config/defaults.py +94 -0
  37. scaler/config/loader.py +96 -0
  38. scaler/config/mixins.py +20 -0
  39. scaler/config/section/__init__.py +0 -0
  40. scaler/config/section/cluster.py +55 -0
  41. scaler/config/section/ecs_worker_adapter.py +85 -0
  42. scaler/config/section/native_worker_adapter.py +43 -0
  43. scaler/config/section/object_storage_server.py +8 -0
  44. scaler/config/section/scheduler.py +54 -0
  45. scaler/config/section/symphony_worker_adapter.py +47 -0
  46. scaler/config/section/top.py +13 -0
  47. scaler/config/section/webui.py +21 -0
  48. scaler/config/types/__init__.py +0 -0
  49. scaler/config/types/network_backend.py +12 -0
  50. scaler/config/types/object_storage_server.py +45 -0
  51. scaler/config/types/worker.py +62 -0
  52. scaler/config/types/zmq.py +83 -0
  53. scaler/entry_points/__init__.py +0 -0
  54. scaler/entry_points/cluster.py +133 -0
  55. scaler/entry_points/object_storage_server.py +45 -0
  56. scaler/entry_points/scheduler.py +144 -0
  57. scaler/entry_points/top.py +286 -0
  58. scaler/entry_points/webui.py +48 -0
  59. scaler/entry_points/worker_adapter_ecs.py +191 -0
  60. scaler/entry_points/worker_adapter_native.py +137 -0
  61. scaler/entry_points/worker_adapter_symphony.py +98 -0
  62. scaler/io/__init__.py +0 -0
  63. scaler/io/async_binder.py +89 -0
  64. scaler/io/async_connector.py +95 -0
  65. scaler/io/async_object_storage_connector.py +225 -0
  66. scaler/io/mixins.py +154 -0
  67. scaler/io/sync_connector.py +68 -0
  68. scaler/io/sync_object_storage_connector.py +247 -0
  69. scaler/io/sync_subscriber.py +83 -0
  70. scaler/io/utility.py +80 -0
  71. scaler/io/ymq/__init__.py +0 -0
  72. scaler/io/ymq/_ymq.pyi +95 -0
  73. scaler/io/ymq/ymq.py +138 -0
  74. scaler/io/ymq_async_object_storage_connector.py +184 -0
  75. scaler/io/ymq_sync_object_storage_connector.py +184 -0
  76. scaler/object_storage/__init__.py +0 -0
  77. scaler/protocol/__init__.py +0 -0
  78. scaler/protocol/capnp/__init__.py +0 -0
  79. scaler/protocol/capnp/_python.py +6 -0
  80. scaler/protocol/capnp/common.capnp +68 -0
  81. scaler/protocol/capnp/message.capnp +218 -0
  82. scaler/protocol/capnp/object_storage.capnp +57 -0
  83. scaler/protocol/capnp/status.capnp +73 -0
  84. scaler/protocol/introduction.md +105 -0
  85. scaler/protocol/python/__init__.py +0 -0
  86. scaler/protocol/python/common.py +140 -0
  87. scaler/protocol/python/message.py +751 -0
  88. scaler/protocol/python/mixins.py +13 -0
  89. scaler/protocol/python/object_storage.py +118 -0
  90. scaler/protocol/python/status.py +279 -0
  91. scaler/protocol/worker.md +228 -0
  92. scaler/scheduler/__init__.py +0 -0
  93. scaler/scheduler/allocate_policy/__init__.py +0 -0
  94. scaler/scheduler/allocate_policy/allocate_policy.py +9 -0
  95. scaler/scheduler/allocate_policy/capability_allocate_policy.py +280 -0
  96. scaler/scheduler/allocate_policy/even_load_allocate_policy.py +159 -0
  97. scaler/scheduler/allocate_policy/mixins.py +55 -0
  98. scaler/scheduler/controllers/__init__.py +0 -0
  99. scaler/scheduler/controllers/balance_controller.py +65 -0
  100. scaler/scheduler/controllers/client_controller.py +131 -0
  101. scaler/scheduler/controllers/config_controller.py +31 -0
  102. scaler/scheduler/controllers/graph_controller.py +424 -0
  103. scaler/scheduler/controllers/information_controller.py +81 -0
  104. scaler/scheduler/controllers/mixins.py +194 -0
  105. scaler/scheduler/controllers/object_controller.py +147 -0
  106. scaler/scheduler/controllers/scaling_policies/__init__.py +0 -0
  107. scaler/scheduler/controllers/scaling_policies/fixed_elastic.py +145 -0
  108. scaler/scheduler/controllers/scaling_policies/mixins.py +10 -0
  109. scaler/scheduler/controllers/scaling_policies/null.py +14 -0
  110. scaler/scheduler/controllers/scaling_policies/types.py +9 -0
  111. scaler/scheduler/controllers/scaling_policies/utility.py +20 -0
  112. scaler/scheduler/controllers/scaling_policies/vanilla.py +95 -0
  113. scaler/scheduler/controllers/task_controller.py +376 -0
  114. scaler/scheduler/controllers/worker_controller.py +169 -0
  115. scaler/scheduler/object_usage/__init__.py +0 -0
  116. scaler/scheduler/object_usage/object_tracker.py +131 -0
  117. scaler/scheduler/scheduler.py +251 -0
  118. scaler/scheduler/task/__init__.py +0 -0
  119. scaler/scheduler/task/task_state_machine.py +92 -0
  120. scaler/scheduler/task/task_state_manager.py +61 -0
  121. scaler/ui/__init__.py +0 -0
  122. scaler/ui/constants.py +9 -0
  123. scaler/ui/live_display.py +147 -0
  124. scaler/ui/memory_window.py +146 -0
  125. scaler/ui/setting_page.py +40 -0
  126. scaler/ui/task_graph.py +832 -0
  127. scaler/ui/task_log.py +107 -0
  128. scaler/ui/utility.py +66 -0
  129. scaler/ui/webui.py +147 -0
  130. scaler/ui/worker_processors.py +104 -0
  131. scaler/utility/__init__.py +0 -0
  132. scaler/utility/debug.py +19 -0
  133. scaler/utility/event_list.py +63 -0
  134. scaler/utility/event_loop.py +58 -0
  135. scaler/utility/exceptions.py +42 -0
  136. scaler/utility/formatter.py +44 -0
  137. scaler/utility/graph/__init__.py +0 -0
  138. scaler/utility/graph/optimization.py +27 -0
  139. scaler/utility/graph/topological_sorter.py +11 -0
  140. scaler/utility/graph/topological_sorter_graphblas.py +174 -0
  141. scaler/utility/identifiers.py +107 -0
  142. scaler/utility/logging/__init__.py +0 -0
  143. scaler/utility/logging/decorators.py +25 -0
  144. scaler/utility/logging/scoped_logger.py +33 -0
  145. scaler/utility/logging/utility.py +183 -0
  146. scaler/utility/many_to_many_dict.py +123 -0
  147. scaler/utility/metadata/__init__.py +0 -0
  148. scaler/utility/metadata/profile_result.py +31 -0
  149. scaler/utility/metadata/task_flags.py +30 -0
  150. scaler/utility/mixins.py +13 -0
  151. scaler/utility/network_util.py +7 -0
  152. scaler/utility/one_to_many_dict.py +72 -0
  153. scaler/utility/queues/__init__.py +0 -0
  154. scaler/utility/queues/async_indexed_queue.py +37 -0
  155. scaler/utility/queues/async_priority_queue.py +70 -0
  156. scaler/utility/queues/async_sorted_priority_queue.py +45 -0
  157. scaler/utility/queues/indexed_queue.py +114 -0
  158. scaler/utility/serialization.py +9 -0
  159. scaler/version.txt +1 -0
  160. scaler/worker/__init__.py +0 -0
  161. scaler/worker/agent/__init__.py +0 -0
  162. scaler/worker/agent/heartbeat_manager.py +107 -0
  163. scaler/worker/agent/mixins.py +137 -0
  164. scaler/worker/agent/processor/__init__.py +0 -0
  165. scaler/worker/agent/processor/object_cache.py +107 -0
  166. scaler/worker/agent/processor/processor.py +285 -0
  167. scaler/worker/agent/processor/streaming_buffer.py +28 -0
  168. scaler/worker/agent/processor_holder.py +147 -0
  169. scaler/worker/agent/processor_manager.py +369 -0
  170. scaler/worker/agent/profiling_manager.py +109 -0
  171. scaler/worker/agent/task_manager.py +150 -0
  172. scaler/worker/agent/timeout_manager.py +19 -0
  173. scaler/worker/preload.py +84 -0
  174. scaler/worker/worker.py +265 -0
  175. scaler/worker_adapter/__init__.py +0 -0
  176. scaler/worker_adapter/common.py +26 -0
  177. scaler/worker_adapter/ecs.py +269 -0
  178. scaler/worker_adapter/native.py +155 -0
  179. scaler/worker_adapter/symphony/__init__.py +0 -0
  180. scaler/worker_adapter/symphony/callback.py +45 -0
  181. scaler/worker_adapter/symphony/heartbeat_manager.py +79 -0
  182. scaler/worker_adapter/symphony/message.py +24 -0
  183. scaler/worker_adapter/symphony/task_manager.py +289 -0
  184. scaler/worker_adapter/symphony/worker.py +204 -0
  185. scaler/worker_adapter/symphony/worker_adapter.py +139 -0
  186. src/scaler/io/ymq/_ymq.so +0 -0
  187. src/scaler/object_storage/object_storage_server.so +0 -0
@@ -0,0 +1,11 @@
1
+ import logging
2
+
3
+ try:
4
+ from scaler.utility.graph.topological_sorter_graphblas import TopologicalSorter
5
+
6
+ logging.info("using GraphBLAS for calculate graph")
7
+ except ImportError as e:
8
+ assert isinstance(e, Exception)
9
+ from graphlib import TopologicalSorter # type: ignore[assignment, no-redef]
10
+
11
+ assert isinstance(TopologicalSorter, object)
@@ -0,0 +1,174 @@
1
+ import collections
2
+ import graphlib
3
+ import itertools
4
+ from typing import Generic, Hashable, Iterable, List, Mapping, Optional, Tuple, TypeVar
5
+
6
+ from bidict import bidict
7
+
8
+ try:
9
+ import graphblas as gb
10
+ import numpy as np # noqa
11
+ except ImportError:
12
+ raise ImportError("Please use 'pip install python-graphblas' to have graph blas support")
13
+
14
+ GraphKeyType = TypeVar("GraphKeyType", bound=Hashable)
15
+
16
+
17
+ class TopologicalSorter(Generic[GraphKeyType]):
18
+ """
19
+ Implements graphlib's TopologicalSorter, but the graph handling is backed by GraphBLAS
20
+ Reference: https://github.com/python/cpython/blob/4a3ea1fdd890e5e2ec26540dc3c958a52fba6556/Lib/graphlib.py
21
+ """
22
+
23
+ def __init__(self, graph: Optional[Mapping[GraphKeyType, Iterable[GraphKeyType]]] = None):
24
+ # the layout of the matrix is (in-vertex, out-vertex)
25
+ self._matrix = gb.Matrix(gb.dtypes.BOOL)
26
+ self._key_to_id: bidict[GraphKeyType, int] = bidict()
27
+
28
+ self._graph_matrix_mask: Optional[np.ndarray] = None
29
+ self._visited_vertices_mask: Optional[np.ndarray] = None
30
+ self._ready_nodes: Optional[List[GraphKeyType]] = None
31
+
32
+ self._n_done = 0
33
+ self._n_visited = 0
34
+
35
+ if graph is not None:
36
+ self.merge_graph(graph)
37
+
38
+ def add(self, node: GraphKeyType, *predecessors: GraphKeyType) -> None:
39
+ self.merge_graph({node: predecessors})
40
+
41
+ def merge_graph(self, graph: Mapping[GraphKeyType, Iterable[GraphKeyType]]) -> None:
42
+ if self._ready_nodes is not None:
43
+ raise ValueError("nodes cannot be added after a call to prepare()")
44
+
45
+ # cache old dim to compare later when resizing matrix
46
+ old_dim = len(self._key_to_id)
47
+
48
+ # maintain iterable copies for iterable predecessors
49
+ graph_iterable_copy = {}
50
+
51
+ # update key to id mappings
52
+ for node, predecessors in graph.items():
53
+ if node not in self._key_to_id:
54
+ self._key_to_id[node] = len(self._key_to_id)
55
+
56
+ # copy iterator if predecessors is an iterable
57
+ if isinstance(predecessors, collections.abc.Iterable):
58
+ predecessors, graph_iterable_copy[node] = itertools.tee(predecessors)
59
+
60
+ for pred in predecessors:
61
+ if pred not in self._key_to_id:
62
+ self._key_to_id[pred] = len(self._key_to_id)
63
+
64
+ # resize at once as it is faster
65
+ if old_dim != len(self._key_to_id):
66
+ self._matrix.resize(len(self._key_to_id), len(self._key_to_id))
67
+
68
+ # update matrix
69
+ for node, predecessors in graph.items():
70
+ if node in graph_iterable_copy:
71
+ predecessors = graph_iterable_copy[node]
72
+
73
+ for pred in predecessors:
74
+ self._matrix[self._key_to_id[node], self._key_to_id[pred]] = True
75
+
76
+ def prepare(self) -> None:
77
+ if self._ready_nodes is not None:
78
+ raise ValueError("cannot prepare() more than once")
79
+
80
+ self._graph_matrix_mask = np.ones(len(self._key_to_id), bool)
81
+ self._visited_vertices_mask = np.zeros(len(self._key_to_id), bool)
82
+
83
+ self._ready_nodes = self._get_zero_degree_keys()
84
+ for node in self._ready_nodes:
85
+ self._visited_vertices_mask[self._key_to_id[node]] = True
86
+ self._n_visited += len(self._ready_nodes)
87
+
88
+ if self._has_cycle():
89
+ raise graphlib.CycleError("cycle detected")
90
+
91
+ def get_ready(self) -> Tuple[GraphKeyType, ...]:
92
+ if self._ready_nodes is None:
93
+ raise ValueError("prepare() must be called first")
94
+
95
+ result = tuple(self._ready_nodes)
96
+ self._ready_nodes.clear()
97
+ return result
98
+
99
+ def is_active(self) -> bool:
100
+ if self._ready_nodes is None:
101
+ raise ValueError("prepare() must be called first")
102
+ return self._n_done < self._n_visited or bool(self._ready_nodes)
103
+
104
+ def __bool__(self) -> bool:
105
+ return self.is_active()
106
+
107
+ def done(self, *nodes: GraphKeyType) -> None:
108
+ if self._ready_nodes is None:
109
+ raise ValueError("prepare() must be called first")
110
+
111
+ for node in nodes:
112
+ if node not in self._key_to_id:
113
+ raise ValueError(f"node {node!r} was not added using add()")
114
+
115
+ _id = self._key_to_id[node]
116
+
117
+ if not self._visited_vertices_mask[_id]:
118
+ raise ValueError(f"node {node!r} is not ready")
119
+
120
+ if not self._graph_matrix_mask[_id]:
121
+ raise ValueError(f"node {node!r} is already done")
122
+
123
+ self._graph_matrix_mask[_id] = False
124
+ self._n_done += len(nodes)
125
+
126
+ new_ready_nodes = self._get_zero_degree_keys()
127
+ for node in new_ready_nodes:
128
+ self._visited_vertices_mask[self._key_to_id[node]] = True
129
+ self._ready_nodes.extend(new_ready_nodes)
130
+ self._n_visited += len(new_ready_nodes)
131
+
132
+ def static_order(self) -> Iterable[GraphKeyType]:
133
+ self.prepare()
134
+ while self.is_active():
135
+ node_group = self.get_ready()
136
+ yield from node_group
137
+ self.done(*node_group)
138
+
139
+ def _has_cycle(self) -> bool:
140
+ """
141
+ Detect cycle using trace(A^n) != 0.
142
+ https://arxiv.org/pdf/1610.01200.pdf
143
+
144
+ :return: True if cycle is found, otherwise False
145
+ """
146
+ matrix_n = gb.Vector.from_dense(np.ones(len(self._key_to_id), bool), missing_value=False).diag()
147
+ for _ in range(len(self._key_to_id)):
148
+ # use LOR_PAIR to compute matrix multiplication over boolean matrices
149
+ matrix_n << gb.semiring.lor_pair(matrix_n @ self._matrix)
150
+ # check diagonal for any truthy values
151
+ if matrix_n.diag().reduce(gb.monoid.lor):
152
+ return True
153
+ return False
154
+
155
+ def _get_zero_degree_keys(self) -> List[GraphKeyType]:
156
+ ids = self._get_mask_diff(self._visited_vertices_mask, self._get_zero_degree_mask(self._get_masked_matrix()))
157
+ return [self._key_to_id.inverse[_id] for _id in ids]
158
+
159
+ def _get_masked_matrix(self) -> gb.Matrix:
160
+ # convert vector mask to matrix diagonal and then perform matrix multiplication to mask matrix
161
+ # https://github.com/DrTimothyAldenDavis/GraphBLAS/issues/48#issuecomment-858596341
162
+ return gb.semiring.lor_pair(
163
+ self._matrix @ gb.Vector.from_dense(self._graph_matrix_mask, missing_value=False).diag()
164
+ )
165
+
166
+ @classmethod
167
+ def _get_zero_degree_mask(cls, masked_matrix: gb.Matrix) -> np.ndarray:
168
+ degrees = masked_matrix.reduce_rowwise(gb.monoid.lor)
169
+ indices, _ = degrees.to_coo(indices=True, values=False, sort=False)
170
+ return np.logical_not(np.in1d(np.arange(masked_matrix.nrows), indices)) # type: ignore[attr-defined]
171
+
172
+ @staticmethod
173
+ def _get_mask_diff(old_mask: np.ndarray, new_mask: np.ndarray) -> List[int]:
174
+ return np.argwhere(old_mask != new_mask).ravel().tolist()
@@ -0,0 +1,107 @@
1
+ import abc
2
+ import hashlib
3
+ import uuid
4
+ from typing import Optional
5
+
6
+
7
+ class Identifier(bytes, metaclass=abc.ABCMeta):
8
+ @abc.abstractmethod
9
+ def __repr__(self) -> str:
10
+ raise NotImplementedError()
11
+
12
+
13
+ class ClientID(Identifier):
14
+ def __repr__(self) -> str:
15
+ return f"ClientID({self.decode()})"
16
+
17
+ @staticmethod
18
+ def generate_client_id(name: Optional[str] = None) -> "ClientID":
19
+ unique_client_tag = uuid.uuid4().bytes.hex()
20
+
21
+ if name is None:
22
+ return ClientID(f"Client|{unique_client_tag}".encode())
23
+ else:
24
+ return ClientID(f"Client|{name}|{unique_client_tag}".encode())
25
+
26
+
27
+ class WorkerID(Identifier):
28
+ def __repr__(self) -> str:
29
+ return f"WorkerID({self.decode()})"
30
+
31
+ def is_valid(self) -> bool:
32
+ return self != _INVALID_WORKER_ID
33
+
34
+ @staticmethod
35
+ def invalid_worker_id() -> "WorkerID":
36
+ return _INVALID_WORKER_ID
37
+
38
+ @staticmethod
39
+ def generate_worker_id(name: str) -> "WorkerID":
40
+ unique_worker_tag = uuid.uuid4().bytes.hex()
41
+ return WorkerID(f"Worker|{name}|{unique_worker_tag}".encode())
42
+
43
+
44
+ _INVALID_WORKER_ID = WorkerID(b"")
45
+
46
+
47
+ class ProcessorID(Identifier):
48
+ def __repr__(self) -> str:
49
+ return f"ProcessorID({self.hex()})"
50
+
51
+ @staticmethod
52
+ def generate_processor_id() -> "ProcessorID":
53
+ return ProcessorID(uuid.uuid4().bytes)
54
+
55
+
56
+ class TaskID(Identifier):
57
+ def __repr__(self) -> str:
58
+ return f"TaskID({self.hex()})"
59
+
60
+ @staticmethod
61
+ def generate_task_id() -> "TaskID":
62
+ return TaskID(uuid.uuid4().bytes)
63
+
64
+
65
+ class ObjectID(bytes):
66
+ SERIALIZER_TAG = hashlib.md5(b"serializer").digest()
67
+
68
+ """
69
+ Scaler 32-bytes object IDs.
70
+
71
+ Object ID are built from 2x16-bytes parts:
72
+
73
+ - the first 16-bytes uniquely identify the owner of the object (i.e. the Scaler client's hash);
74
+ - the second 16-bytes uniquely identify the object's content.
75
+ """
76
+
77
+ def __new__(cls, value: bytes):
78
+ if len(value) != 32:
79
+ raise ValueError("Scaler object ID must be 32 bytes.")
80
+
81
+ return super().__new__(cls, value)
82
+
83
+ @staticmethod
84
+ def generate_object_id(owner: ClientID) -> "ObjectID":
85
+ owner_hash = hashlib.md5(owner).digest()
86
+ unique_object_tag = uuid.uuid4().bytes
87
+ return ObjectID(owner_hash + unique_object_tag)
88
+
89
+ @staticmethod
90
+ def generate_serializer_object_id(owner: ClientID) -> "ObjectID":
91
+ owner_hash = hashlib.md5(owner).digest()
92
+ return ObjectID(owner_hash + ObjectID.SERIALIZER_TAG)
93
+
94
+ def owner_hash(self) -> bytes:
95
+ return self[:16]
96
+
97
+ def object_tag(self) -> bytes:
98
+ return self[16:]
99
+
100
+ def is_serializer(self) -> bool:
101
+ return self.object_tag() == ObjectID.SERIALIZER_TAG
102
+
103
+ def is_owner(self, owner: ClientID) -> bool:
104
+ return hashlib.md5(owner).digest() == self.owner_hash()
105
+
106
+ def __repr__(self) -> str:
107
+ return f"ObjectID(owner_hash={self.owner_hash().hex()}, object_tag={self.object_tag().hex()})"
File without changes
@@ -0,0 +1,25 @@
1
+ import functools
2
+ import inspect
3
+ import logging
4
+ import typing
5
+
6
+ from scaler.utility.logging.scoped_logger import ScopedLogger
7
+
8
+
9
+ def log_function(level_number: int = 2, logging_level: int = logging.INFO) -> typing.Callable:
10
+ def decorator(func: typing.Callable) -> typing.Callable:
11
+ @functools.wraps(func)
12
+ def wrapper(*args, **kwargs):
13
+ with ScopedLogger(
14
+ f"execute {func.__name__} at {get_caller_location(level_number)}", logging_level=logging_level
15
+ ):
16
+ return func(*args, **kwargs)
17
+
18
+ return wrapper
19
+
20
+ return decorator
21
+
22
+
23
+ def get_caller_location(stack_level: int):
24
+ caller = inspect.getframeinfo(inspect.stack()[stack_level][0])
25
+ return f"{caller.filename}:{caller.lineno}"
@@ -0,0 +1,33 @@
1
+ import datetime
2
+ import logging
3
+ import time
4
+ from typing import Optional
5
+
6
+
7
+ class ScopedLogger:
8
+ def __init__(self, message: str, logging_level=logging.INFO):
9
+ self.timer = TimedLogger(message=message, logging_level=logging_level)
10
+
11
+ def __enter__(self):
12
+ self.timer.begin()
13
+
14
+ def __exit__(self, exc_type, exc_val, exc_tb):
15
+ self.timer.end()
16
+
17
+
18
+ class TimedLogger:
19
+ def __init__(self, message: str, logging_level=logging.INFO):
20
+ self.message = message
21
+ self.logging_level = logging_level
22
+ self.timer: Optional[int] = None
23
+
24
+ def begin(self):
25
+ self.timer = time.perf_counter_ns()
26
+ logging.log(self.logging_level, f"beginning {self.message}")
27
+
28
+ def end(self):
29
+ elapsed = time.perf_counter_ns() - self.timer
30
+ offset = datetime.timedelta(
31
+ seconds=int(elapsed / 1e9), milliseconds=int(elapsed % 1e9 / 1e6), microseconds=int(elapsed % 1e6 / 1e3)
32
+ )
33
+ logging.log(self.logging_level, f"completed {self.message} in {offset}")
@@ -0,0 +1,183 @@
1
+ import dataclasses
2
+ import enum
3
+ import logging
4
+ import logging.config
5
+ import logging.handlers
6
+ import os
7
+ import typing
8
+
9
+ from scaler.config.defaults import DEFAULT_LOGGING_PATHS
10
+
11
+
12
+ class LogType(enum.Enum):
13
+ Screen = enum.auto()
14
+ File = enum.auto()
15
+
16
+
17
+ @dataclasses.dataclass
18
+ class LogPath:
19
+ log_type: LogType
20
+ path: str
21
+
22
+
23
+ class LoggingLevel(enum.Enum):
24
+ CRITICAL = logging.CRITICAL
25
+ ERROR = logging.ERROR
26
+ WARNING = logging.WARNING
27
+ INFO = logging.INFO
28
+ DEBUG = logging.DEBUG
29
+ NOTSET = logging.NOTSET
30
+
31
+
32
+ def setup_logger(
33
+ log_paths: typing.Tuple[str, ...] = DEFAULT_LOGGING_PATHS,
34
+ logging_config_file: typing.Optional[str] = None,
35
+ logging_level: str = LoggingLevel.INFO.name,
36
+ ):
37
+ if not log_paths and not logging_config_file:
38
+ return
39
+
40
+ if isinstance(log_paths, str):
41
+ log_paths = (log_paths,)
42
+
43
+ if logging_config_file is not None:
44
+ print(f"use logging config file: {logging_config_file}")
45
+ logging.config.fileConfig(logging_config_file, disable_existing_loggers=True)
46
+ return
47
+
48
+ resolved_log_paths = [LogPath(log_type=__detect_log_types(file_name), path=file_name) for file_name in log_paths]
49
+ __logging_config(log_paths=resolved_log_paths, logging_level=logging_level)
50
+ logging.info(f"logging to {log_paths}")
51
+
52
+
53
+ def __detect_log_types(file_name: str) -> LogType:
54
+ if file_name in {"-", "/dev/stdout"}:
55
+ return LogType.Screen
56
+
57
+ return LogType.File
58
+
59
+
60
+ def __format(name) -> str:
61
+ if not name:
62
+ return ""
63
+
64
+ return "%({name})s".format(name=name)
65
+
66
+
67
+ def __generate_log_config() -> typing.Dict:
68
+ return {
69
+ "version": 1,
70
+ "disable_existing_loggers": False, # this fixes the problem
71
+ "formatters": {
72
+ "standard": {
73
+ "format": "[{levelname}]{asctime}: {message}".format(
74
+ levelname=__format("levelname"), asctime=__format("asctime"), message=__format("message")
75
+ ),
76
+ "datefmt": "%Y-%m-%d %H:%M:%S%z",
77
+ },
78
+ "verbose": {
79
+ "format": "[{levelname}]{asctime}:{module}:{funcName}:{lineno}: {message}".format(
80
+ levelname=__format("levelname"),
81
+ asctime=__format("asctime"),
82
+ module=__format("module"),
83
+ funcName=__format("funcName"),
84
+ lineno=__format("lineno"),
85
+ message=__format("message"),
86
+ ),
87
+ "datefmt": "%Y-%m-%d %H:%M:%S%z",
88
+ },
89
+ },
90
+ "handlers": {},
91
+ "loggers": {"": {"handlers": [], "level": "DEBUG", "propagate": True}},
92
+ }
93
+
94
+
95
+ def __logging_config(log_paths: typing.List[LogPath], logging_level: str = LoggingLevel.INFO.name):
96
+ logging.addLevelName(logging.INFO, "INFO")
97
+ logging.addLevelName(logging.WARNING, "WARN")
98
+ logging.addLevelName(logging.ERROR, "EROR")
99
+ logging.addLevelName(logging.DEBUG, "DEBG")
100
+ logging.addLevelName(logging.CRITICAL, "CTIC")
101
+
102
+ config = __generate_log_config()
103
+ handlers = config["handlers"]
104
+ root_loggers = config["loggers"][""]["handlers"]
105
+
106
+ for log_path in log_paths:
107
+ if log_path.log_type == LogType.Screen:
108
+ handlers["console"] = __create_stdout_handler(logging_level)
109
+ root_loggers.append("console")
110
+ continue
111
+
112
+ elif log_path.log_type == LogType.File:
113
+ handlers[log_path.path] = __create_time_rotating_file_handler(logging_level, log_path.path)
114
+ root_loggers.append(log_path.path)
115
+ continue
116
+
117
+ raise TypeError(f"Unsupported LogPath: {log_path}")
118
+
119
+ logging.config.dictConfig(config)
120
+
121
+
122
+ def __create_stdout_handler(logging_level: str):
123
+ return {
124
+ "class": "logging.StreamHandler",
125
+ "level": logging_level,
126
+ "formatter": "standard",
127
+ "stream": "ext://sys.stdout",
128
+ }
129
+
130
+
131
+ def __create_time_rotating_file_handler(logging_level: str, file_path: str):
132
+ return {
133
+ "class": "logging.handlers.TimedRotatingFileHandler",
134
+ "level": logging_level,
135
+ "formatter": "verbose",
136
+ "filename": os.path.expandvars(os.path.expanduser(file_path)),
137
+ "when": "midnight",
138
+ }
139
+
140
+
141
+ def __create_size_rotating_file_handler(log_path) -> typing.Dict:
142
+ return {
143
+ "class": "logging.handlers.RotatingFileHandler",
144
+ "level": "INFO",
145
+ "formatter": "verbose",
146
+ "filename": os.path.expandvars(os.path.expanduser(log_path)),
147
+ "maxBytes": 10485760,
148
+ "backupCount": 20,
149
+ "encoding": "utf8",
150
+ }
151
+
152
+
153
+ def __parse_logging_level(value):
154
+ return LoggingLevel(value).value
155
+
156
+
157
+ def get_logger_info(logger: logging.Logger) -> typing.Tuple[str, str, typing.Tuple[str, ...]]:
158
+ """
159
+ Retrieves the format string, level string, and all active log paths from a logger's handlers.
160
+ """
161
+ log_level_str = logging.getLevelName(logger.getEffectiveLevel())
162
+ log_format_str = ""
163
+ log_paths: typing.List[str] = []
164
+
165
+ if logger.hasHandlers():
166
+ first_handler = logger.handlers[0]
167
+ if first_handler.formatter:
168
+ log_format_str = getattr(first_handler.formatter, "_fmt", "")
169
+
170
+ for handler in logger.handlers:
171
+ if isinstance(handler, logging.handlers.BaseRotatingHandler):
172
+ log_paths.append(handler.baseFilename)
173
+ elif isinstance(handler, logging.StreamHandler) and hasattr(handler.stream, "name"):
174
+ if "stdout" in handler.stream.name:
175
+ log_paths.append("/dev/stdout")
176
+ elif "stderr" in handler.stream.name:
177
+ log_paths.append("/dev/stderr")
178
+
179
+ # If no specific path was found, default to stdout
180
+ if not log_paths:
181
+ log_paths.append("/dev/stdout")
182
+
183
+ return log_format_str, log_level_str, tuple(log_paths)
@@ -0,0 +1,123 @@
1
+ from typing import Dict, Generic, Iterable, Set, Tuple, TypeVar
2
+
3
+ KeyT = TypeVar("KeyT")
4
+ ValueT = TypeVar("ValueT")
5
+
6
+ LeftKeyT = TypeVar("LeftKeyT")
7
+ RightKeyT = TypeVar("RightKeyT")
8
+
9
+
10
+ class ManyToManyDict(Generic[LeftKeyT, RightKeyT]):
11
+ def __init__(self):
12
+ self._left_key_to_right_key_set: _KeyValueDictSet[LeftKeyT, RightKeyT] = _KeyValueDictSet()
13
+ self._right_key_to_left_key_set: _KeyValueDictSet[RightKeyT, LeftKeyT] = _KeyValueDictSet()
14
+
15
+ def left_keys(self):
16
+ return self._left_key_to_right_key_set.keys()
17
+
18
+ def right_keys(self):
19
+ return self._right_key_to_left_key_set.keys()
20
+
21
+ def add(self, left_key: LeftKeyT, right_key: RightKeyT):
22
+ self._left_key_to_right_key_set.add(left_key, right_key)
23
+ self._right_key_to_left_key_set.add(right_key, left_key)
24
+
25
+ def remove(self, left_key: LeftKeyT, right_key: RightKeyT):
26
+ self._left_key_to_right_key_set.remove_value(left_key, right_key)
27
+ self._right_key_to_left_key_set.remove_value(right_key, left_key)
28
+
29
+ def has_left_key(self, left_key: LeftKeyT) -> bool:
30
+ return left_key in self._left_key_to_right_key_set
31
+
32
+ def has_right_key(self, right_key: RightKeyT) -> bool:
33
+ return right_key in self._right_key_to_left_key_set
34
+
35
+ def has_key_pair(self, left_key: LeftKeyT, right_key: RightKeyT) -> bool:
36
+ return (
37
+ self.has_left_key(left_key)
38
+ and self.has_right_key(right_key)
39
+ and right_key in self._left_key_to_right_key_set.get_values(left_key)
40
+ and left_key in self._right_key_to_left_key_set.get_values(right_key)
41
+ )
42
+
43
+ def left_key_items(self) -> Iterable[Tuple[LeftKeyT, Set[RightKeyT]]]:
44
+ return self._left_key_to_right_key_set.items()
45
+
46
+ def right_key_items(self) -> Iterable[Tuple[RightKeyT, Set[LeftKeyT]]]:
47
+ return self._right_key_to_left_key_set.items()
48
+
49
+ def get_left_items(self, right_key: RightKeyT) -> Set[LeftKeyT]:
50
+ if right_key not in self._right_key_to_left_key_set:
51
+ raise ValueError(f"cannot find {right_key=} in ManyToManyDict")
52
+
53
+ return self._right_key_to_left_key_set.get_values(right_key)
54
+
55
+ def get_right_items(self, left_key: LeftKeyT) -> Set[RightKeyT]:
56
+ if left_key not in self._left_key_to_right_key_set:
57
+ raise ValueError(f"cannot find {left_key=} in ManyToManyDict")
58
+
59
+ return self._left_key_to_right_key_set.get_values(left_key)
60
+
61
+ def remove_left_key(self, left_key: LeftKeyT) -> Set[RightKeyT]:
62
+ if left_key not in self._left_key_to_right_key_set:
63
+ raise KeyError(f"cannot find {left_key=} in ManyToManyDict")
64
+
65
+ right_keys = self._left_key_to_right_key_set.remove_key(left_key)
66
+ for right_key in right_keys:
67
+ self._right_key_to_left_key_set.remove_value(right_key, left_key)
68
+
69
+ return right_keys
70
+
71
+ def remove_right_key(self, right_key: RightKeyT) -> Set[LeftKeyT]:
72
+ if right_key not in self._right_key_to_left_key_set:
73
+ raise ValueError(f"cannot find {right_key=} in ManyToManyDict")
74
+
75
+ left_keys = self._right_key_to_left_key_set.remove_key(right_key)
76
+ for left_key in left_keys:
77
+ self._left_key_to_right_key_set.remove_value(left_key, right_key)
78
+
79
+ return left_keys
80
+
81
+
82
+ class _KeyValueDictSet(Generic[KeyT, ValueT]):
83
+ def __init__(self):
84
+ self._key_to_value_set: Dict[KeyT, Set[ValueT]] = dict()
85
+
86
+ def __contains__(self, key) -> bool:
87
+ return key in self._key_to_value_set
88
+
89
+ def keys(self):
90
+ return self._key_to_value_set.keys()
91
+
92
+ def values(self):
93
+ return self._key_to_value_set.values()
94
+
95
+ def items(self):
96
+ return self._key_to_value_set.items()
97
+
98
+ def add(self, key: KeyT, value: ValueT):
99
+ if key not in self._key_to_value_set:
100
+ self._key_to_value_set[key] = set()
101
+
102
+ self._key_to_value_set[key].add(value)
103
+
104
+ def get_values(self, key: KeyT) -> Set[ValueT]:
105
+ if key not in self._key_to_value_set:
106
+ raise ValueError(f"cannot find {key=} in KeyValueSet")
107
+
108
+ return self._key_to_value_set[key]
109
+
110
+ def remove_key(self, key: KeyT) -> Set[ValueT]:
111
+ if key not in self._key_to_value_set:
112
+ raise KeyError(f"cannot find {key=} in KeyValueSet")
113
+
114
+ values = self._key_to_value_set.pop(key)
115
+ return values
116
+
117
+ def remove_value(self, key: KeyT, value: ValueT):
118
+ if key not in self._key_to_value_set:
119
+ raise KeyError(f"cannot find {key=} in KeyValueSet")
120
+
121
+ self._key_to_value_set[key].remove(value)
122
+ if not self._key_to_value_set[key]:
123
+ self._key_to_value_set.pop(key)
File without changes
@@ -0,0 +1,31 @@
1
+ import dataclasses
2
+ import struct
3
+ from typing import Optional
4
+
5
+ from scaler.protocol.python.message import TaskResult
6
+
7
+
8
+ @dataclasses.dataclass
9
+ class ProfileResult:
10
+ duration_s: float = dataclasses.field(default=0.0)
11
+ memory_peak: int = dataclasses.field(default=0)
12
+ cpu_time_s: float = dataclasses.field(default=0.0)
13
+
14
+ FORMAT = "!fQf" # duration, memory peak, CPU time
15
+
16
+ def serialize(self) -> bytes:
17
+ return struct.pack(self.FORMAT, self.duration_s, self.memory_peak, self.cpu_time_s)
18
+
19
+ @staticmethod
20
+ def deserialize(data: bytes) -> "ProfileResult":
21
+ return ProfileResult(*struct.unpack(ProfileResult.FORMAT, data))
22
+
23
+
24
+ def retrieve_profiling_result_from_task_result(task_result: TaskResult) -> Optional[ProfileResult]:
25
+ if task_result.metadata == b"":
26
+ return None
27
+
28
+ try:
29
+ return ProfileResult.deserialize(task_result.metadata)
30
+ except struct.error:
31
+ raise ValueError(f"unexpected metadata value (expected {ProfileResult.__name__}).")