halib 0.2.27__py3-none-any.whl → 0.2.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- halib/__init__.py +4 -0
- halib/common/common.py +122 -3
- halib/system/path.py +2 -2
- halib/utils/dict.py +95 -69
- halib/utils/list.py +14 -9
- halib/utils/slack.py +86 -0
- {halib-0.2.27.dist-info → halib-0.2.29.dist-info}/METADATA +46 -3
- {halib-0.2.27.dist-info → halib-0.2.29.dist-info}/RECORD +11 -10
- {halib-0.2.27.dist-info → halib-0.2.29.dist-info}/WHEEL +0 -0
- {halib-0.2.27.dist-info → halib-0.2.29.dist-info}/licenses/LICENSE.txt +0 -0
- {halib-0.2.27.dist-info → halib-0.2.29.dist-info}/top_level.txt +0 -0
halib/__init__.py
CHANGED
|
@@ -10,6 +10,7 @@ __all__ = [
|
|
|
10
10
|
"fs",
|
|
11
11
|
"inspect",
|
|
12
12
|
"load_yaml",
|
|
13
|
+
"log_func",
|
|
13
14
|
"logger",
|
|
14
15
|
"norm_str",
|
|
15
16
|
"now_str",
|
|
@@ -17,6 +18,7 @@ __all__ = [
|
|
|
17
18
|
"omegaconf",
|
|
18
19
|
"OmegaConf",
|
|
19
20
|
"os",
|
|
21
|
+
"pad_string",
|
|
20
22
|
"pd",
|
|
21
23
|
"plt",
|
|
22
24
|
"pprint_box",
|
|
@@ -66,6 +68,8 @@ from .common.common import (
|
|
|
66
68
|
pprint_local_path,
|
|
67
69
|
pprint_stack_trace,
|
|
68
70
|
tcuda,
|
|
71
|
+
log_func,
|
|
72
|
+
pad_string,
|
|
69
73
|
)
|
|
70
74
|
|
|
71
75
|
# for log
|
halib/common/common.py
CHANGED
|
@@ -14,6 +14,10 @@ from pathlib import Path, PureWindowsPath
|
|
|
14
14
|
from typing import Optional
|
|
15
15
|
from loguru import logger
|
|
16
16
|
|
|
17
|
+
import functools
|
|
18
|
+
from typing import Callable, List, Literal, Union
|
|
19
|
+
import time
|
|
20
|
+
import math
|
|
17
21
|
|
|
18
22
|
console = Console()
|
|
19
23
|
|
|
@@ -48,6 +52,78 @@ def now_str(sep_date_time="."):
|
|
|
48
52
|
return now_string
|
|
49
53
|
|
|
50
54
|
|
|
55
|
+
def pad_string(
|
|
56
|
+
text: str,
|
|
57
|
+
target_width: Union[int, float] = -1,
|
|
58
|
+
pad_char: str = ".",
|
|
59
|
+
pad_sides: List[Literal["left", "right"]] = ["left", "right"], # type: ignore
|
|
60
|
+
) -> str:
|
|
61
|
+
"""
|
|
62
|
+
Pads a string to a specific width or a relative multiplier width.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
text: The input string.
|
|
66
|
+
target_width:
|
|
67
|
+
- If int (e.g., 20): The exact total length of the resulting string.
|
|
68
|
+
- If float (e.g., 1.5): Multiplies original length (must be >= 1.0).
|
|
69
|
+
(e.g., length 10 * 1.5 = target width 15).
|
|
70
|
+
pad_char: The character to use for padding.
|
|
71
|
+
pad_sides: A list containing "left", "right", or both.
|
|
72
|
+
"""
|
|
73
|
+
current_len = len(text)
|
|
74
|
+
|
|
75
|
+
# 1. Calculate the final integer target width
|
|
76
|
+
if isinstance(target_width, float):
|
|
77
|
+
if target_width < 1.0:
|
|
78
|
+
raise ValueError(f"Float target_width must be >= 1.0, got {target_width}")
|
|
79
|
+
# Use math.ceil to ensure we don't under-pad (e.g. 1.5 * 5 = 7.5 -> 8)
|
|
80
|
+
final_width = math.ceil(current_len * target_width)
|
|
81
|
+
else:
|
|
82
|
+
final_width = target_width
|
|
83
|
+
|
|
84
|
+
# 2. Return early if no padding needed
|
|
85
|
+
if current_len >= final_width:
|
|
86
|
+
return text
|
|
87
|
+
|
|
88
|
+
# 3. Calculate total padding needed
|
|
89
|
+
padding_needed = final_width - current_len
|
|
90
|
+
|
|
91
|
+
# CASE 1: Pad Both Sides (Center)
|
|
92
|
+
if "left" in pad_sides and "right" in pad_sides:
|
|
93
|
+
left_pad_count = padding_needed // 2
|
|
94
|
+
right_pad_count = padding_needed - left_pad_count
|
|
95
|
+
return (pad_char * left_pad_count) + text + (pad_char * right_pad_count)
|
|
96
|
+
|
|
97
|
+
# CASE 2: Pad Left Only (Right Align)
|
|
98
|
+
elif "left" in pad_sides:
|
|
99
|
+
return (pad_char * padding_needed) + text
|
|
100
|
+
|
|
101
|
+
# CASE 3: Pad Right Only (Left Align)
|
|
102
|
+
elif "right" in pad_sides:
|
|
103
|
+
return text + (pad_char * padding_needed)
|
|
104
|
+
|
|
105
|
+
return text
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# ==========================================
|
|
109
|
+
# Usage Examples
|
|
110
|
+
# ==========================================
|
|
111
|
+
if __name__ == "__main__":
|
|
112
|
+
s = "Hello"
|
|
113
|
+
|
|
114
|
+
# 1. Default (Both sides / Center)
|
|
115
|
+
print(f"'{pad_string(s, 11)}'")
|
|
116
|
+
# Output: "'***Hello***'"
|
|
117
|
+
|
|
118
|
+
# 2. Left Only
|
|
119
|
+
print(f"'{pad_string(s, 10, '-', ['left'])}'")
|
|
120
|
+
# Output: "'-----Hello'"
|
|
121
|
+
|
|
122
|
+
# 3. Right Only
|
|
123
|
+
print(f"'{pad_string(s, 10, '.', ['right'])}'")
|
|
124
|
+
# Output: "'Hello.....'"
|
|
125
|
+
|
|
126
|
+
|
|
51
127
|
def norm_str(in_str):
|
|
52
128
|
# Replace one or more whitespace characters with a single underscore
|
|
53
129
|
norm_string = re.sub(r"\s+", "_", in_str)
|
|
@@ -140,9 +216,12 @@ def pprint_stack_trace(
|
|
|
140
216
|
msg = DEFAULT_STACK_TRACE_MSG
|
|
141
217
|
logger.opt(exception=e).warning(msg)
|
|
142
218
|
if force_stop:
|
|
143
|
-
console.rule(
|
|
219
|
+
console.rule(
|
|
220
|
+
"[red]Force Stop Triggered in <halib.common.pprint_stack_trace>[/red]"
|
|
221
|
+
)
|
|
144
222
|
sys.exit(1)
|
|
145
223
|
|
|
224
|
+
|
|
146
225
|
def pprint_local_path(
|
|
147
226
|
local_path: str, get_wins_path: bool = False, tag: str = ""
|
|
148
227
|
) -> str:
|
|
@@ -181,20 +260,60 @@ def pprint_local_path(
|
|
|
181
260
|
return file_uri
|
|
182
261
|
|
|
183
262
|
|
|
263
|
+
def log_func(
|
|
264
|
+
func: Optional[Callable] = None, *, log_time: bool = False, log_args: bool = False
|
|
265
|
+
):
|
|
266
|
+
"""
|
|
267
|
+
A decorator that logs the start/end of a function.
|
|
268
|
+
Supports both @log_func and @log_func(log_time=True) usage.
|
|
269
|
+
"""
|
|
270
|
+
# 1. HANDLE ARGUMENTS: If called as @log_func(log_time=True), func is None.
|
|
271
|
+
# We return a 'partial' function that remembers the args and waits for the func.
|
|
272
|
+
if func is None:
|
|
273
|
+
return functools.partial(log_func, log_time=log_time, log_args=log_args)
|
|
274
|
+
|
|
275
|
+
# 2. HANDLE DECORATION: If called as @log_func, func is the actual function.
|
|
276
|
+
@functools.wraps(func)
|
|
277
|
+
def wrapper(*args, **kwargs):
|
|
278
|
+
# Safe way to get name (handles partials/lambdas)
|
|
279
|
+
func_name = getattr(func, "__name__", "Unknown_Func")
|
|
280
|
+
|
|
281
|
+
# Note: Ensure 'ConsoleLog' context manager is available in your scope
|
|
282
|
+
with ConsoleLog(func_name):
|
|
283
|
+
start = time.perf_counter()
|
|
284
|
+
try:
|
|
285
|
+
result = func(*args, **kwargs)
|
|
286
|
+
finally:
|
|
287
|
+
# We use finally to ensure logging happens even if func crashes
|
|
288
|
+
end = time.perf_counter()
|
|
289
|
+
|
|
290
|
+
if log_time or log_args:
|
|
291
|
+
|
|
292
|
+
console.print(pad_string(f"Func <{func_name}> summary", 80))
|
|
293
|
+
if log_time:
|
|
294
|
+
console.print(f"{func_name} took {end - start:.6f} seconds")
|
|
295
|
+
if log_args:
|
|
296
|
+
console.print(f"Args: {args}, Kwargs: {kwargs}")
|
|
297
|
+
|
|
298
|
+
return result
|
|
299
|
+
|
|
300
|
+
return wrapper
|
|
301
|
+
|
|
302
|
+
|
|
184
303
|
def tcuda():
|
|
185
304
|
NOT_INSTALLED = "Not Installed"
|
|
186
305
|
GPU_AVAILABLE = "GPU(s) Available"
|
|
187
306
|
ls_lib = ["torch", "tensorflow"]
|
|
188
307
|
lib_stats = {lib: NOT_INSTALLED for lib in ls_lib}
|
|
189
308
|
for lib in ls_lib:
|
|
190
|
-
spec = importlib.util.find_spec(lib)
|
|
309
|
+
spec = importlib.util.find_spec(lib) # ty:ignore[possibly-missing-attribute]
|
|
191
310
|
if spec:
|
|
192
311
|
if lib == "torch":
|
|
193
312
|
import torch
|
|
194
313
|
|
|
195
314
|
lib_stats[lib] = str(torch.cuda.device_count()) + " " + GPU_AVAILABLE
|
|
196
315
|
elif lib == "tensorflow":
|
|
197
|
-
import tensorflow as tf
|
|
316
|
+
import tensorflow as tf # type: ignore
|
|
198
317
|
|
|
199
318
|
lib_stats[lib] = (
|
|
200
319
|
str(len(tf.config.list_physical_devices("GPU")))
|
halib/system/path.py
CHANGED
|
@@ -21,7 +21,7 @@ def load_pc_meta_info():
|
|
|
21
21
|
# 2. Locate the file
|
|
22
22
|
csv_path = resources.files(package_name).joinpath(file_name)
|
|
23
23
|
global PC_TO_ABBR, ABBR_DISK_MAP, pc_df
|
|
24
|
-
pc_df = pd.read_csv(csv_path, sep=';', encoding='utf-8')
|
|
24
|
+
pc_df = pd.read_csv(csv_path, sep=';', encoding='utf-8') # ty:ignore[no-matching-overload]
|
|
25
25
|
PC_TO_ABBR = dict(zip(pc_df['pc_name'], pc_df['abbr']))
|
|
26
26
|
ABBR_DISK_MAP = dict(zip(pc_df['abbr'], pc_df['working_disk']))
|
|
27
27
|
# pprint("Loaded PC meta info:")
|
|
@@ -57,7 +57,7 @@ def get_working_disk(abbr_disk_map=ABBR_DISK_MAP):
|
|
|
57
57
|
|
|
58
58
|
cDisk = get_working_disk()
|
|
59
59
|
|
|
60
|
-
# ! This
|
|
60
|
+
# ! This function search for full paths in the obj and normalize them according to the current platform and working disk
|
|
61
61
|
# ! E.g: "E:/zdataset/DFire", but working_disk: "D:", current_platform: "windows" => "D:/zdataset/DFire"
|
|
62
62
|
# ! E.g: "E:/zdataset/DFire", but working_disk: "D:", current_platform: "linux" => "/mnt/d/zdataset/DFire"
|
|
63
63
|
def normalize_paths(obj, working_disk=cDisk, current_platform=cPlatform):
|
halib/utils/dict.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
from rich.pretty import pprint
|
|
1
|
+
from future.utils.surrogateescape import fn
|
|
2
|
+
import copy
|
|
4
3
|
import json
|
|
5
4
|
import hashlib
|
|
5
|
+
from rich.pretty import pprint
|
|
6
|
+
from typing import Dict, Any, Callable, Optional, List, Tuple
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
class DictUtils:
|
|
@@ -134,70 +135,95 @@ class DictUtils:
|
|
|
134
135
|
# 3. Truncate to desired length
|
|
135
136
|
return full_hash[:length]
|
|
136
137
|
|
|
138
|
+
@staticmethod
|
|
139
|
+
def deep_remove(
|
|
140
|
+
d: Dict[str, Any],
|
|
141
|
+
keys_to_remove: List[str],
|
|
142
|
+
in_place: bool = False,
|
|
143
|
+
sep: str = ".",
|
|
144
|
+
) -> Dict[str, Any]:
|
|
145
|
+
"""
|
|
146
|
+
Removes keys from a nested dictionary based on a list of dot-notation paths.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
d: The dictionary to filter.
|
|
150
|
+
keys_to_remove: A list of flattened keys to remove (e.g., ['model.layers.dropout']).
|
|
151
|
+
in_place: If True, modifies the dictionary directly.
|
|
152
|
+
If False, creates and modifies a deep copy, leaving the original untouched.
|
|
153
|
+
sep: Separator used in the dot-notation keys (default: ".").
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
The modified dictionary (either the original object or the new copy).
|
|
137
157
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
"
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
158
|
+
Example:
|
|
159
|
+
>>> data = {'a': {'b': 1, 'c': 2}}
|
|
160
|
+
>>> DictUtils.deep_remove(data, ['a.b'], in_place=False)
|
|
161
|
+
{'a': {'c': 2}}
|
|
162
|
+
"""
|
|
163
|
+
# 1. Handle the copy logic based on the in_place flag
|
|
164
|
+
if in_place:
|
|
165
|
+
target_dict = d
|
|
166
|
+
else:
|
|
167
|
+
target_dict = copy.deepcopy(d)
|
|
168
|
+
|
|
169
|
+
# 2. Iterate over each dot-notation key we want to delete
|
|
170
|
+
for flat_key in keys_to_remove:
|
|
171
|
+
parts = flat_key.split(sep)
|
|
172
|
+
|
|
173
|
+
# 3. Traverse to the parent container of the key we want to delete
|
|
174
|
+
current_level = target_dict
|
|
175
|
+
parent_found = True
|
|
176
|
+
|
|
177
|
+
# Loop through path parts up to the second-to-last item (the parent)
|
|
178
|
+
for part in parts[:-1]:
|
|
179
|
+
if isinstance(current_level, dict) and part in current_level:
|
|
180
|
+
current_level = current_level[part]
|
|
181
|
+
else:
|
|
182
|
+
# The path doesn't exist in this dict, safely skip deletion
|
|
183
|
+
parent_found = False
|
|
184
|
+
break
|
|
185
|
+
|
|
186
|
+
# 4. Delete the final key (leaf) if the parent was found
|
|
187
|
+
if parent_found and isinstance(current_level, dict):
|
|
188
|
+
leaf_key = parts[-1]
|
|
189
|
+
if leaf_key in current_level:
|
|
190
|
+
del current_level[leaf_key]
|
|
191
|
+
|
|
192
|
+
return target_dict
|
|
193
|
+
|
|
194
|
+
@staticmethod
|
|
195
|
+
def prune(d: Any, prune_values: Tuple[Any, ...] = (None, {}, [], "")) -> Any:
|
|
196
|
+
"""
|
|
197
|
+
Recursively removes keys where values match any item in 'prune_values'.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
d: The dictionary or list to clean.
|
|
201
|
+
prune_values: A tuple of values to be removed.
|
|
202
|
+
Default is (None, {}, [], "") which removes all empty types.
|
|
203
|
+
Pass specific values (e.g., ({}, "")) to keep None or [].
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
The cleaned structure.
|
|
207
|
+
"""
|
|
208
|
+
if isinstance(d, dict):
|
|
209
|
+
new_dict = {}
|
|
210
|
+
for k, v in d.items():
|
|
211
|
+
# 1. Recursively clean children first
|
|
212
|
+
cleaned_v = DictUtils.prune(v, prune_values)
|
|
213
|
+
|
|
214
|
+
# 2. Check if the CLEANED value is in the delete list
|
|
215
|
+
# We use strict check to ensure we don't delete 0 or False unless requested
|
|
216
|
+
if cleaned_v not in prune_values:
|
|
217
|
+
new_dict[k] = cleaned_v
|
|
218
|
+
return new_dict
|
|
219
|
+
|
|
220
|
+
elif isinstance(d, list):
|
|
221
|
+
new_list = []
|
|
222
|
+
for v in d:
|
|
223
|
+
cleaned_v = DictUtils.prune(v, prune_values)
|
|
224
|
+
if cleaned_v not in prune_values:
|
|
225
|
+
new_list.append(cleaned_v)
|
|
226
|
+
return new_list
|
|
227
|
+
|
|
228
|
+
else:
|
|
229
|
+
return d
|
halib/utils/list.py
CHANGED
|
@@ -1,12 +1,17 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
class ListUtils:
|
|
2
|
+
"""Utility functions for list operations."""
|
|
3
3
|
|
|
4
|
+
@staticmethod
|
|
5
|
+
def subtract(list_a, list_b):
|
|
6
|
+
return [item for item in list_a if item not in list_b]
|
|
4
7
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
@staticmethod
|
|
9
|
+
def union(list_a, list_b, no_duplicate=False):
|
|
10
|
+
if no_duplicate:
|
|
11
|
+
return list(set(list_a) | set(list_b))
|
|
12
|
+
else:
|
|
13
|
+
return list_a + list_b
|
|
10
14
|
|
|
11
|
-
|
|
12
|
-
|
|
15
|
+
@staticmethod
|
|
16
|
+
def intersection(list_a, list_b):
|
|
17
|
+
return list(set(list_a) & set(list_b))
|
halib/utils/slack.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from slack_sdk import WebClient
|
|
3
|
+
from slack_sdk.errors import SlackApiError
|
|
4
|
+
from rich.pretty import pprint
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
Utilities for interacting with Slack for experiment notification via Wandb Logger.
|
|
8
|
+
"""
|
|
9
|
+
class SlackUtils:
|
|
10
|
+
_instance = None
|
|
11
|
+
|
|
12
|
+
def __new__(cls, token=None):
|
|
13
|
+
"""
|
|
14
|
+
Singleton __new__ method.
|
|
15
|
+
Ensures only one instance of SlackUtils exists.
|
|
16
|
+
"""
|
|
17
|
+
if cls._instance is None:
|
|
18
|
+
if token is None:
|
|
19
|
+
raise ValueError(
|
|
20
|
+
"A Slack Token is required for the first initialization."
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
# Create the instance
|
|
24
|
+
cls._instance = super(SlackUtils, cls).__new__(cls)
|
|
25
|
+
|
|
26
|
+
# Initialize the WebClient only once
|
|
27
|
+
cls._instance.client = WebClient(token=token)
|
|
28
|
+
cls._instance.token = token
|
|
29
|
+
|
|
30
|
+
return cls._instance
|
|
31
|
+
|
|
32
|
+
def clear_channel(self, channel_id, sleep_interval=1.0):
|
|
33
|
+
"""
|
|
34
|
+
Fetches and deletes all messages in a specified channel.
|
|
35
|
+
"""
|
|
36
|
+
cursor = None
|
|
37
|
+
deleted_count = 0
|
|
38
|
+
|
|
39
|
+
pprint(f"--- Starting cleanup for Channel ID: {channel_id} ---")
|
|
40
|
+
|
|
41
|
+
while True:
|
|
42
|
+
try:
|
|
43
|
+
# Fetch history in batches of 100
|
|
44
|
+
response = self.client.conversations_history( # ty:ignore[unresolved-attribute]
|
|
45
|
+
channel=channel_id, cursor=cursor, limit=100
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
messages = response.get("messages", [])
|
|
49
|
+
|
|
50
|
+
if not messages:
|
|
51
|
+
pprint("No more messages found to delete.")
|
|
52
|
+
break
|
|
53
|
+
|
|
54
|
+
for msg in messages:
|
|
55
|
+
ts = msg.get("ts")
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Attempt delete
|
|
59
|
+
self.client.chat_delete( # ty:ignore[unresolved-attribute]
|
|
60
|
+
channel=channel_id, ts=ts
|
|
61
|
+
)
|
|
62
|
+
pprint(f"Deleted: {ts}")
|
|
63
|
+
deleted_count += 1
|
|
64
|
+
|
|
65
|
+
# Rate limit protection (Tier 3 limit)
|
|
66
|
+
time.sleep(sleep_interval)
|
|
67
|
+
|
|
68
|
+
except SlackApiError as e:
|
|
69
|
+
error_code = e.response["error"]
|
|
70
|
+
if error_code == "cant_delete_message":
|
|
71
|
+
pprint(f"Skipped (Permission denied): {ts}")
|
|
72
|
+
elif error_code == "message_not_found":
|
|
73
|
+
pprint(f"Skipped (Already deleted): {ts}")
|
|
74
|
+
else:
|
|
75
|
+
pprint(f"Error deleting {ts}: {error_code}")
|
|
76
|
+
# Check for pagination
|
|
77
|
+
if response["has_more"]:
|
|
78
|
+
cursor = response["response_metadata"]["next_cursor"]
|
|
79
|
+
else:
|
|
80
|
+
break
|
|
81
|
+
|
|
82
|
+
except SlackApiError as e:
|
|
83
|
+
print(f"Critical API Error fetching history: {e.response['error']}")
|
|
84
|
+
break
|
|
85
|
+
|
|
86
|
+
print(f"--- Completed. Total messages deleted: {deleted_count} ---")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: halib
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.29
|
|
4
4
|
Summary: Small library for common tasks
|
|
5
5
|
Author: Hoang Van Ha
|
|
6
6
|
Author-email: hoangvanhauit@gmail.com
|
|
@@ -42,6 +42,7 @@ Requires-Dist: tube_dl
|
|
|
42
42
|
Requires-Dist: wandb
|
|
43
43
|
Requires-Dist: ipynbname
|
|
44
44
|
Requires-Dist: typed-argument-parser
|
|
45
|
+
Requires-Dist: slack_sdk
|
|
45
46
|
Dynamic: author
|
|
46
47
|
Dynamic: author-email
|
|
47
48
|
Dynamic: classifier
|
|
@@ -56,25 +57,38 @@ Dynamic: summary
|
|
|
56
57
|
|
|
57
58
|
## v0.2.x (Experiment & Core Updates)
|
|
58
59
|
|
|
59
|
-
### **v0.2.
|
|
60
|
+
### **v0.2.29**
|
|
61
|
+
|
|
62
|
+
- ✨ **New Feature:**: add `common.common.log_func` as decorator to log function entry, exit, with execution time and arguments.
|
|
63
|
+
|
|
64
|
+
- 🚀 **Improvement:**: enhance `utils.dict.DictUtils` with `deep_remove` and `prune` function
|
|
65
|
+
|
|
66
|
+
### **v0.2.28**
|
|
67
|
+
|
|
68
|
+
- ✨ **New Feature:** Implement `utils.slack.SlackUtils` class for managing Slack channel message deletion
|
|
69
|
+
|
|
60
70
|
- ✨ **New Feature:** Added `utils.dict.DictUtils` for advanced dictionary manipulations (merging, filtering, transforming).
|
|
61
71
|
|
|
62
72
|
- ✨ **New Feature:** Added `common.common.pprint_stack_trace` to print stack traces with optional custom messages and force stop capability.
|
|
63
73
|
|
|
64
|
-
- 🚀 **Improvement:**
|
|
74
|
+
- 🚀 **Improvement:** `exp.perf.profiler` - allow to export _report dict_ as csv files for further analysis
|
|
65
75
|
|
|
66
76
|
### **v0.2.19**
|
|
77
|
+
|
|
67
78
|
- ✨ **New Feature:** Added `exp.core.param_gen` to facilitate fast generation of parameter combination sweeps (grid search) using YAML configurations.
|
|
68
79
|
|
|
69
80
|
### **v0.2.17**
|
|
81
|
+
|
|
70
82
|
- 🚀 **Improvement:** Updated `exp.perf.profiler` with an `enabled` flag for dynamic toggling.
|
|
71
83
|
- 🚀 **Improvement:** Added a `measure` context manager to simplify performance measuring of code blocks.
|
|
72
84
|
|
|
73
85
|
### **v0.2.13**
|
|
86
|
+
|
|
74
87
|
- ♻️ **Refactor:** Major reorganization of packages. Renamed `research` package to `exp` (Experiment Management).
|
|
75
88
|
- 🚀 **Improvement:** Updated `exp/perfcalc.py` to allow saving computed performance metrics to CSV without explicitly calling `calc_perfs`.
|
|
76
89
|
|
|
77
90
|
### **v0.2.1**
|
|
91
|
+
|
|
78
92
|
- ✨ **New Feature:** Added `eval_exp` method to `exp/base_exp` for running evaluations (e.g., model testing) after experiments conclude.
|
|
79
93
|
|
|
80
94
|
---
|
|
@@ -82,15 +96,19 @@ Dynamic: summary
|
|
|
82
96
|
## v0.1.9x (Visualization & Generators)
|
|
83
97
|
|
|
84
98
|
### **v0.1.99**
|
|
99
|
+
|
|
85
100
|
- ✨ **New Feature:** Added `gen_ipynb_name` to `filetype/ipynb`. Generates filenames based on the current notebook name with optional timestamps.
|
|
86
101
|
|
|
87
102
|
### **v0.1.96**
|
|
103
|
+
|
|
88
104
|
- ✨ **New Feature:** Added `PlotHelper` class in `research/plot` for plotting training history and image grids (dataset samples or model outputs).
|
|
89
105
|
|
|
90
106
|
### **v0.1.91**
|
|
107
|
+
|
|
91
108
|
- ✨ **New Feature:** Added `ParamGen` class to `research/param_gen` for parsing YAML files into parameter lists for hyperparameter searches.
|
|
92
109
|
|
|
93
110
|
### **v0.1.90**
|
|
111
|
+
|
|
94
112
|
- ✨ **New Feature:** Added `zProfiler` class to `research/profiler` for measuring context/step execution time, supporting dynamic color scales in plots.
|
|
95
113
|
|
|
96
114
|
---
|
|
@@ -98,26 +116,33 @@ Dynamic: summary
|
|
|
98
116
|
## v0.1.5x - v0.1.7x (Infrastructure & Utilities)
|
|
99
117
|
|
|
100
118
|
### **v0.1.77**
|
|
119
|
+
|
|
101
120
|
- ✨ **New Feature:** Added `BaseExp` class in `research/base_exp` to handle common experiment tasks (performance calculation, result saving).
|
|
102
121
|
|
|
103
122
|
### **v0.1.67**
|
|
123
|
+
|
|
104
124
|
- 🔧 **Maintenance:** Switched to **uv** for virtual environment management.
|
|
105
125
|
- 🚀 **Improvement:** Updated `research/perfcalc` to support both `torchmetrics` and custom metrics.
|
|
106
126
|
|
|
107
127
|
### **v0.1.61**
|
|
128
|
+
|
|
108
129
|
- ✨ **New Feature:** Added `VideoUtils` (`util/video`) for common video handling tasks.
|
|
109
130
|
- ✨ **New Feature:** Added `GPUMonitor` (`util/gpu_mon`) for tracking GPU usage and performance.
|
|
110
131
|
|
|
111
132
|
### **v0.1.59**
|
|
133
|
+
|
|
112
134
|
- 🔨 **Architecture:** Added `util/perfcalc` abstract base class. This requires implementation of specific performance calculation logic.
|
|
113
135
|
|
|
114
136
|
### **v0.1.55**
|
|
137
|
+
|
|
115
138
|
- ✨ **New Feature:** Added `util/dataclass_util` for dynamic creation of `dataclass` objects from dictionaries or YAML (supports nested structures).
|
|
116
139
|
|
|
117
140
|
### **v0.1.52**
|
|
141
|
+
|
|
118
142
|
- ✨ **New Feature:** Added `research/perftb` module for managing experiment performance tables (filtering by dataset, metric, etc.).
|
|
119
143
|
|
|
120
144
|
### **v0.1.50**
|
|
145
|
+
|
|
121
146
|
- ✨ **New Feature:** Added `pprint_local_path` to print clickable file URIs for local paths.
|
|
122
147
|
- ✨ **New Feature:** Added `research` package containing `benchquery` for dataframe benchmarking.
|
|
123
148
|
- ✨ **New Feature:** Added `wandb` module for offline syncing and batch clearing of Weights & Biases runs.
|
|
@@ -127,19 +152,24 @@ Dynamic: summary
|
|
|
127
152
|
## v0.1.4x (Display & formatting)
|
|
128
153
|
|
|
129
154
|
### **v0.1.47**
|
|
155
|
+
|
|
130
156
|
- ✨ **New Feature:** Added `pprint_box` to print objects or strings inside a decorative box frame.
|
|
131
157
|
|
|
132
158
|
### **v0.1.46**
|
|
159
|
+
|
|
133
160
|
- 🐛 **Fix:** Filtered `UserWarning: Unable to import Axes3D`.
|
|
134
161
|
- 🚀 **Improvement:** Added `auto_wrap_text` to `fn_display_df` to prevent long text overflow in tables.
|
|
135
162
|
|
|
136
163
|
### **v0.1.42**
|
|
164
|
+
|
|
137
165
|
- ✨ **New Feature:** Added `rich_color.py` wrapper for basic color lists.
|
|
138
166
|
|
|
139
167
|
### **v0.1.41**
|
|
168
|
+
|
|
140
169
|
- ✨ **New Feature:** Added `rich_color.py` to support rich color information (palettes, strings) using the `rich` library.
|
|
141
170
|
|
|
142
171
|
### **v0.1.40**
|
|
172
|
+
|
|
143
173
|
- 🚀 **Improvement:** Updated `csvfile.py` to use `itables` and `pygwalker` for interactive dataframe display in Jupyter notebooks.
|
|
144
174
|
|
|
145
175
|
---
|
|
@@ -147,12 +177,15 @@ Dynamic: summary
|
|
|
147
177
|
## v0.1.3x (Data & Loading)
|
|
148
178
|
|
|
149
179
|
### **v0.1.38**
|
|
180
|
+
|
|
150
181
|
- ✨ **New Feature:** Added `torchloader.py` to search for optimal `DataLoader` configurations (num_workers, batch_size, pin_memory).
|
|
151
182
|
|
|
152
183
|
### **v0.1.37**
|
|
184
|
+
|
|
153
185
|
- ✨ **New Feature:** Added `dataset.py` for splitting classification datasets into train/val/test sets.
|
|
154
186
|
|
|
155
187
|
### **v0.1.33**
|
|
188
|
+
|
|
156
189
|
- ✨ **New Feature:** Added `plot.py` for plotting Deep Learning training history (accuracy/loss) using `seaborn` and `matplotlib`.
|
|
157
190
|
|
|
158
191
|
---
|
|
@@ -160,35 +193,45 @@ Dynamic: summary
|
|
|
160
193
|
## v0.1.0 - v0.1.2x (Early Utilities)
|
|
161
194
|
|
|
162
195
|
### **v0.1.29**
|
|
196
|
+
|
|
163
197
|
- 🐛 **Fix:** Pinned `kaleido==0.1.*` for `tele_noti` as version `0.2.*` caused image generation hangs.
|
|
164
198
|
|
|
165
199
|
### **v0.1.24**
|
|
200
|
+
|
|
166
201
|
- ♻️ **Refactor:** Renamed `sys` module to `system` to avoid conflicts with Python's built-in `sys`.
|
|
167
202
|
- ✨ **New Feature:** Added `tele_noti` module for Telegram notifications regarding training progress.
|
|
168
203
|
|
|
169
204
|
### **v0.1.22**
|
|
205
|
+
|
|
170
206
|
- ✨ **New Feature:** Added `cuda.py` to check CUDA availability for both PyTorch and TensorFlow.
|
|
171
207
|
|
|
172
208
|
### **v0.1.21**
|
|
209
|
+
|
|
173
210
|
- ✨ **New Feature:** Added YAML inheritance and overriding support using `networkx` and `omegaconf`.
|
|
174
211
|
|
|
175
212
|
### **v0.1.15**
|
|
213
|
+
|
|
176
214
|
- ✨ **New Feature:** Added common logging library and `@console_log` decorator for function tracing.
|
|
177
215
|
|
|
178
216
|
### **v0.1.10**
|
|
217
|
+
|
|
179
218
|
- 🐛 **Fix:** Fixed typo `is_exit` -> `is_exist` in `filesys`.
|
|
180
219
|
- 🚀 **Improvement:** Updated `gdrive` to support uploading to specific folders and returning direct shareable links.
|
|
181
220
|
|
|
182
221
|
### **v0.1.9**
|
|
222
|
+
|
|
183
223
|
- 🔧 **Maintenance:** Added `requirements.txt`.
|
|
184
224
|
|
|
185
225
|
### **v0.1.6 - v0.1.8**
|
|
226
|
+
|
|
186
227
|
- 🚀 **Performance:** Optimized table insertion by using an in-memory `row_pool_dict` before committing to the DataFrame.
|
|
187
228
|
- ✨ **New Feature:** Added `DFCreator` for manipulating DataFrames (create, insert, display, save).
|
|
188
229
|
|
|
189
230
|
### **v0.1.4 - v0.1.5**
|
|
231
|
+
|
|
190
232
|
- ✨ **New Feature:** Added `cmd` module.
|
|
191
233
|
- ✨ **New Feature:** Support for creating Bitbucket Projects from templates.
|
|
192
234
|
|
|
193
235
|
### **v0.1.2**
|
|
236
|
+
|
|
194
237
|
- ✨ **New Feature:** Added support for uploading local files to Google Drive.
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
halib/__init__.py,sha256=
|
|
1
|
+
halib/__init__.py,sha256=WIdY-2inwVQ73ZbBnv4XPaxwOJicoBPTkGisHso5mnE,1778
|
|
2
2
|
halib/common.py,sha256=9hn-IXOlGZODoBHy8U2A0aLgmPEnTeQjbzAVGwXAjwo,4242
|
|
3
3
|
halib/csvfile.py,sha256=Eoeni0NIbNG3mB5ESWAvNwhJxOjmCaPd1qqYRHImbvk,1567
|
|
4
4
|
halib/cuda.py,sha256=1bvtBY8QvTWdLaxalzK9wqXPl0Ft3AfhcrebupxGzEA,1010
|
|
@@ -17,7 +17,7 @@ halib/textfile.py,sha256=EhVFrit-nRBJx18e6rtIqcE1cSbgsLnMXe_kdhi1EPI,399
|
|
|
17
17
|
halib/torchloader.py,sha256=-q9YE-AoHZE1xQX2dgNxdqtucEXYs4sQ22WXdl6EGfI,6500
|
|
18
18
|
halib/videofile.py,sha256=NTLTZ-j6YD47duw2LN2p-lDQDglYFP1LpEU_0gzHLdI,4737
|
|
19
19
|
halib/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
-
halib/common/common.py,sha256=
|
|
20
|
+
halib/common/common.py,sha256=Ta_4w1k1RUnIXMxvKhsrE5TadRpB9rwqf5qEZcU1oPM,10046
|
|
21
21
|
halib/common/rich_color.py,sha256=tyK5fl3Dtv1tKsfFzt_5Rco4Fj72QliA-w5aGXaVuqQ,6392
|
|
22
22
|
halib/exp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
halib/exp/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -92,18 +92,19 @@ halib/system/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
92
92
|
halib/system/_list_pc.csv,sha256=r8RwxDWYEeNkPCQBs8dOC8cWgBpa3OULZobwb1feuWg,172
|
|
93
93
|
halib/system/cmd.py,sha256=b2x7JPcNnFjLGheIESVYvqAb-w2UwBM1PAwYxMZ5YjA,228
|
|
94
94
|
halib/system/filesys.py,sha256=102J2fkQhmH1_-HQVy2FQ4NOU8LTjMWV3hToT_APtq8,4401
|
|
95
|
-
halib/system/path.py,sha256=
|
|
95
|
+
halib/system/path.py,sha256=ewiHI76SLFBG5NnlihLpxBbOEDfHibRwKTcLMjEz6Hw,3728
|
|
96
96
|
halib/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
97
97
|
halib/utils/dataclass_util.py,sha256=rj2IMLlUzbm2OlF5_B2dRTk9njZOaF7tTjYkOsq8uLY,1416
|
|
98
|
-
halib/utils/dict.py,sha256=
|
|
98
|
+
halib/utils/dict.py,sha256=Mag0G3k9KG75ZKnX7bL2Ga4sBxs-icaoX81JZGe-Emw,8630
|
|
99
99
|
halib/utils/dict_op.py,sha256=wYE6Iw-_CnCWdMg9tpJ2Y2-e2ESkW9FxmdBkZkbUh80,299
|
|
100
100
|
halib/utils/gpu_mon.py,sha256=vD41_ZnmPLKguuq9X44SB_vwd9JrblO4BDzHLXZhhFY,2233
|
|
101
|
-
halib/utils/list.py,sha256=
|
|
101
|
+
halib/utils/list.py,sha256=bbey9_0IaMXnHx1pudv3C3_WU9uFQEQ5qHPklSN-7o0,498
|
|
102
102
|
halib/utils/listop.py,sha256=Vpa8_2fI0wySpB2-8sfTBkyi_A4FhoFVVvFiuvW8N64,339
|
|
103
|
+
halib/utils/slack.py,sha256=2ugWE_eJ0s479ObACJbx7iEu3kjMPD4Rt2hEwuMpuNQ,3099
|
|
103
104
|
halib/utils/tele_noti.py,sha256=-4WXZelCA4W9BroapkRyIdUu9cUVrcJJhegnMs_WpGU,5928
|
|
104
105
|
halib/utils/video.py,sha256=zLoj5EHk4SmP9OnoHjO8mLbzPdtq6gQPzTQisOEDdO8,3261
|
|
105
|
-
halib-0.2.
|
|
106
|
-
halib-0.2.
|
|
107
|
-
halib-0.2.
|
|
108
|
-
halib-0.2.
|
|
109
|
-
halib-0.2.
|
|
106
|
+
halib-0.2.29.dist-info/licenses/LICENSE.txt,sha256=qZssdna4aETiR8znYsShUjidu-U4jUT9Q-EWNlZ9yBQ,1100
|
|
107
|
+
halib-0.2.29.dist-info/METADATA,sha256=EtgQqtzHJW2aM8matCUzWUZLpc74XKWj85KeOaIarlw,8174
|
|
108
|
+
halib-0.2.29.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
109
|
+
halib-0.2.29.dist-info/top_level.txt,sha256=7AD6PLaQTreE0Fn44mdZsoHBe_Zdd7GUmjsWPyQ7I-k,6
|
|
110
|
+
halib-0.2.29.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|