kevin-toolbox-dev 1.4.3__py3-none-any.whl → 1.4.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
kevin_toolbox/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "1.4.3"
1
+ __version__ = "1.4.5"
2
2
 
3
3
 
4
4
  import os
@@ -12,5 +12,5 @@ os.system(
12
12
  os.system(
13
13
  f'python {os.path.split(__file__)[0]}/env_info/check_validity_and_uninstall.py '
14
14
  f'--package_name kevin-toolbox-dev '
15
- f'--expiration_timestamp 1749719201 --verbose 0'
15
+ f'--expiration_timestamp 1750434037 --verbose 0'
16
16
  )
@@ -38,9 +38,9 @@ def write_json(content, file_path, sort_keys=False, converters=None, b_use_sugge
38
38
  content = json.dumps(content, indent=4, ensure_ascii=False, sort_keys=sort_keys)
39
39
 
40
40
  if file_path is not None:
41
- file_path = os.path.abspath(file_path)
41
+ file_path = os.path.abspath(os.path.expanduser(file_path))
42
42
  os.makedirs(os.path.dirname(file_path), exist_ok=True)
43
- with open(file_path, 'w') as f:
43
+ with open(file_path, 'w', encoding="utf-8") as f:
44
44
  f.write(content)
45
45
  else:
46
46
  return content
@@ -0,0 +1,50 @@
1
+ import os
2
+ import pytest
3
+ from kevin_toolbox.env_info.variable_ import Env_Vars_Parser
4
+ from kevin_toolbox.patches.for_test import check_consistency
5
+
6
+
7
+ @pytest.mark.parametrize(
8
+ "input_text, expected",
9
+ [
10
+ # 验证 ${<cfg_name>:<var_name>} 的情况
11
+ (
12
+ "666/123${SYS:HOME}/afasf/${/xxx.../xxx.json:111:222}336",
13
+ ["666/123", ("SYS", [':'], ['HOME']), "/afasf/", ("/xxx.../xxx.json", [':', ':'], ['111', '222']),
14
+ "336"]
15
+ ),
16
+ # 验证 ${<cfg_name>} 和 ${:<var_name>} 的混合情况
17
+ (
18
+ "start${CFG}middle${:VAR}end",
19
+ ["start", ("CFG", [], []), "middle", ('', [':'], ['VAR']), "end"]
20
+ ),
21
+ (
22
+ "${:VAR}",
23
+ [('', [':'], ['VAR'])]
24
+ ),
25
+ (
26
+ "${CFG}",
27
+ [("CFG", [], [])]
28
+ ),
29
+ (
30
+ "{:VAR}",
31
+ ["{:VAR}"]
32
+ ),
33
+ ]
34
+ )
35
+ def test_split_string_in_env_vars_parser(input_text, expected):
36
+ result = Env_Vars_Parser.split_string(input_text)
37
+ check_consistency(result, expected)
38
+
39
+
40
+ def test_env_vars_parser_0():
41
+ env_cfg_file = os.path.expanduser("~/.kvt_cfg/.temp.json")
42
+ from kevin_toolbox.data_flow.file import json_
43
+ json_.write(content={"dataset_dir": ["~/data", "~/dataset"], "version": "001"}, file_path=env_cfg_file)
44
+ input_text = "/root/${KVT_TEMP:dataset_dir@1}/${KVT_TEMP:version}/${HOME}/${SYS:HOME}"
45
+ expected = "/".join(["/root/~/dataset/001", ] + [os.path.expanduser("~")] * 2)
46
+
47
+ #
48
+ parser = Env_Vars_Parser()
49
+ result = parser(input_text)
50
+ check_consistency(expected, result)
@@ -0,0 +1,2 @@
1
+ from .env_vars_parser import Env_Vars_Parser
2
+ env_vars_parser = Env_Vars_Parser()
@@ -0,0 +1,88 @@
1
+ import os
2
+ import re
3
+ from kevin_toolbox.data_flow.file import json_
4
+
5
+
6
+ class Env_Vars_Parser:
7
+ """
8
+ 解释并替换字符串中${}形式指定的环境变量
9
+ 支持以下几种方式:
10
+ - "${HOME}" 家目录
11
+ - "${SYS:<var_name>}" 其他系统环境变量
12
+ 在 linux 系统可以通过 env 命令来打印当前的环境变量,比如家目录也可以使用 ${SYS:HOME} 来表示
13
+ - "${KVT_XXX<ndl_name>}" 读取配置文件 ~/.kvt_cfg/.xxx.json 中的变量(xxx将被自动转为小写)
14
+ 配置文件要求是 ndl 结构,比如当配置文件 ~/.kvt_cfg/.ndl.json 中保存的值为:
15
+ {"dataset_dir":["~/data", "~/dataset"], ...}
16
+ 时,如果要指定使用 dataset_dir 下第二个路径,那么可以使用 ${KVT_NDL:dataset_dir@1} 来表示
17
+ - "${/xxx.../xxx.json<ndl_name>}" 读取指定路径下的配置文件 /xxx.../xxx.json 中的变量
18
+ """
19
+
20
+ def __init__(self, home_dir=None):
21
+ self.cfg_s = dict(
22
+ SYS=dict(os.environ),
23
+ HOME=home_dir if home_dir is not None else os.path.expanduser("~")
24
+ )
25
+
26
+ def __call__(self, *args, **kwargs):
27
+ return self.parse(*args, **kwargs)
28
+
29
+ def parse(self, text):
30
+ """
31
+ 解释并替换
32
+ """
33
+ temp_ls = []
34
+ for it in self.split_string(text=text):
35
+ if isinstance(it, str):
36
+ temp_ls.append(it)
37
+ continue
38
+ root_node, method_ls, node_ls = it
39
+ if root_node not in self.cfg_s:
40
+ try:
41
+ if root_node.startswith("KVT_"):
42
+ t0, t1 = root_node.lower().split("_", 1)
43
+ root_node = os.path.expanduser(f'~/.{t0}_cfg/.{t1}.json')
44
+ assert os.path.isfile(root_node), f'file not exist: {root_node}'
45
+ cfg = json_.read(file_path=root_node, b_use_suggested_converter=True)
46
+ self.cfg_s[root_node] = cfg
47
+ except Exception as e:
48
+ raise ValueError(f"invalid cfg_name: {root_node}, because: {e}")
49
+ cfg = self.cfg_s.get(root_node, None)
50
+ if cfg is None:
51
+ raise ValueError(f"invalid cfg_name: {root_node}")
52
+ #
53
+ from kevin_toolbox.nested_dict_list import get_value
54
+ temp_ls.append(get_value(var=cfg, name=it))
55
+
56
+ return "".join([f'{i}' for i in temp_ls])
57
+
58
+ @staticmethod
59
+ def split_string(text):
60
+ """
61
+ 将字符串中 ${<cfg_name>} 部分的内容分割出来
62
+ 比如对于 "666/123${SYS:HOME}/afasf/${/xxx.../xxx.json:111:222}336"
63
+ 应该分割为 ["666/123", ("SYS:HOME", ), "/afasf/", ("/xxx.../xxx.json:111:222", ), "336"]
64
+ 然后再对其中 tuple 部分使用 ndl.name_handler.parse_name 进行解释
65
+ """
66
+ from kevin_toolbox.nested_dict_list.name_handler import parse_name
67
+ pattern = r'\$\{([^}]+)\}'
68
+ matches = re.finditer(pattern, text)
69
+
70
+ result = []
71
+ last_end = 0
72
+
73
+ for match in matches:
74
+ start = match.start()
75
+ if start > last_end:
76
+ result.append(text[last_end:start])
77
+ result.append(parse_name(name=match.group(1)))
78
+ last_end = match.end()
79
+
80
+ if last_end < len(text):
81
+ result.append(text[last_end:])
82
+
83
+ return result
84
+
85
+
86
+ if __name__ == '__main__':
87
+ env_vars_parser = Env_Vars_Parser()
88
+ print(env_vars_parser.split_string("666/123${:VAR}/afasf/${/xxx.../xxx.json:111:222}336"))
@@ -7,7 +7,7 @@ def get_value(var, name, b_pop=False, **kwargs):
7
7
 
8
8
  参数:
9
9
  var: 任意支持索引取值的变量
10
- name: <str> 名字
10
+ name: <str/parsed_name> 名字
11
11
  名字 name 的具体介绍参见函数 name_handler.parse_name()
12
12
  假设 var=dict(acc=[0.66,0.78,0.99]),如果你想读取 var["acc"][1] => 0.78,那么可以将 name 写成:
13
13
  ":acc@1" 或者 "|acc|1" 等。
@@ -19,7 +19,11 @@ def get_value(var, name, b_pop=False, **kwargs):
19
19
  - 不设置(默认)。当取值失败时将报错。
20
20
  - 设置为任意值。取值失败时将返回该值。
21
21
  """
22
- _, method_ls, node_ls = parse_name(name=name, b_de_escape_node=True)
22
+ if isinstance(name, (tuple, list,)):
23
+ assert len(name) == 3, f'invalid parsed name {name}'
24
+ _, method_ls, node_ls = name
25
+ else:
26
+ _, method_ls, node_ls = parse_name(name=name, b_de_escape_node=True)
23
27
 
24
28
  try:
25
29
  pre, cur = None, var
@@ -46,3 +50,9 @@ def get_value(var, name, b_pop=False, **kwargs):
46
50
  raise IndexError(f'invalid name {name}')
47
51
 
48
52
  return cur
53
+
54
+
55
+ if __name__ == "__main__":
56
+ var_ = dict(acc=[0.66, 0.78, 0.99])
57
+ print(get_value(var_, ''))
58
+ print(get_value(var_, ['', [], []]))
@@ -1,9 +1,9 @@
1
1
  import os
2
- import time
2
+ import tempfile
3
3
  from kevin_toolbox.patches import for_os
4
4
  from kevin_toolbox.data_flow.file import json_
5
5
  import kevin_toolbox.nested_dict_list as ndl
6
- import tempfile
6
+ from kevin_toolbox.env_info.variable_ import env_vars_parser
7
7
 
8
8
 
9
9
  def read(input_path, **kwargs):
@@ -13,8 +13,6 @@ def read(input_path, **kwargs):
13
13
  参数:
14
14
  input_path: <path> 文件夹或者 .tar 文件,具体结构参考 write()
15
15
  """
16
- from kevin_toolbox.nested_dict_list.serializer.variable import SERIALIZER_BACKEND
17
-
18
16
  assert os.path.exists(input_path)
19
17
 
20
18
  with tempfile.TemporaryDirectory(dir=os.path.dirname(input_path)) as temp_dir:
@@ -63,7 +61,10 @@ def _read_unpacked_ndl(input_path, **kwargs):
63
61
  for name in processed_nodes:
64
62
  value = ndl.get_value(var=var, name=name)
65
63
  if isinstance(value, (dict,)) and "backend" in value and "name" in value:
66
- bk = SERIALIZER_BACKEND.get(name=value.pop("backend"))(folder=os.path.join(input_path, "nodes"))
64
+ nodes_dir = env_vars_parser(value.pop("nodes_dir")) if "nodes_dir" in value else os.path.join(input_path,
65
+ "nodes")
66
+ assert os.path.exists(nodes_dir), f"nodes_dir {nodes_dir} does not exist"
67
+ bk = SERIALIZER_BACKEND.get(name=value.pop("backend"))(folder=nodes_dir)
67
68
  ndl.set_value(var=var, name=name, value=bk.read(**value))
68
69
 
69
70
  #
@@ -7,6 +7,7 @@ from kevin_toolbox.data_flow.file import json_
7
7
  from kevin_toolbox.patches import for_os
8
8
  import kevin_toolbox.nested_dict_list as ndl
9
9
  from kevin_toolbox.nested_dict_list.traverse import Traversal_Mode
10
+ from kevin_toolbox.env_info.variable_ import env_vars_parser
10
11
  from .enum_variable import Strictness_Level
11
12
  from .saved_node_name_builder import Saved_Node_Name_Builder
12
13
 
@@ -32,7 +33,17 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
32
33
  var: <nested dict list>
33
34
  settings: <list of dict> 指定对于不同节点or部分的处理模式
34
35
  其结构为:
35
- [{"match_cond": <匹配模式>, "backend": <序列化方式>, "traversal_mode": <遍历方式>}, ...]
36
+ [
37
+ {
38
+ "match_cond": <匹配模式>,
39
+ "backend": <序列化方式>,
40
+ "traversal_mode": <遍历方式>,
41
+ ("nodes_dir": <节点保存目录>,
42
+ "saved_node_name_format": <nodes目录下节点文件/文件夹的命名方式>)
43
+ },
44
+ ...
45
+ ]
46
+ 允许专门指定某个处理模式下所使用的 nodes_dir 和 saved_node_name_format,若不指定,则使用后面的默认值。
36
47
  <匹配模式>支持以下4种:
37
48
  - "<level>..." 匹配指定层的节点,比如"<level>0"表示根节点,"<level>-1"表示所有叶节点
38
49
  - "<node>name" 匹配指定name的节点
@@ -85,7 +96,9 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
85
96
  - "low" / Strictness_Level.IGNORE_FAILURE 匹配不完整,或者某些节点尝试过所有匹配到
86
97
  的 backend 之后仍然无法写入
87
98
  默认是 "normal"
88
- saved_node_name_format: <str> nodes/目录下节点文件/文件夹的命名方式。
99
+ nodes_dir: <path> 节点内容保存目录
100
+ 默认为保存在 <output_dir>/nodes 下
101
+ saved_node_name_format: <str> nodes目录下节点文件/文件夹的命名方式。
89
102
  基本结构为: '{<part_0>}...{<part_1>}...'
90
103
  其中 {} 内将根据 part 指定的类型进行自动填充。目前支持以下几种选项:
91
104
  - "raw_name" 该节点对应位置的 name。
@@ -132,7 +145,8 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
132
145
  var = value_parser.replace_identical_with_reference(var=var, flag="same", b_reverse=False)
133
146
  if settings is None:
134
147
  settings = [{"match_cond": "<level>-1", "backend": (":skip:simple", ":numpy:npy", ":torch:tensor", ":pickle")}]
135
- snn_builder = Saved_Node_Name_Builder(format_=saved_node_name_format)
148
+ default_snn_builder = Saved_Node_Name_Builder(format_=saved_node_name_format)
149
+ default_nodes_dir = os.path.join(temp_output_dir, "nodes")
136
150
 
137
151
  # 构建 processed_s
138
152
  # 为了避免重复处理节点/结构,首先构建与 var 具有相似结构的 processed_s 来记录处理处理进度。
@@ -154,13 +168,20 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
154
168
  if isinstance(setting["match_cond"], str) and setting["match_cond"].startswith("<eval>"):
155
169
  setting["match_cond"] = eval(setting["match_cond"][6:])
156
170
  assert callable(setting["match_cond"]) or isinstance(setting["match_cond"], str)
171
+ # nodes_dir = env_vars_parser(value.pop("nodes_dir")) if "nodes_dir" in value else os.path.join(input_path,
172
+ # "nodes")
173
+ # assert os.path.exists(nodes_dir), f"nodes_dir {nodes_dir} does not exist"
157
174
  # backend
158
175
  backend_name_ls = setting["backend"] if isinstance(setting["backend"], (list, tuple)) else [setting["backend"]]
176
+ nodes_dir = env_vars_parser(setting["nodes_dir"]) if "nodes_dir" in setting else default_nodes_dir
159
177
  for i in backend_name_ls:
160
178
  if i not in backend_s:
161
- backend_s[i] = SERIALIZER_BACKEND.get(name=i)(folder=os.path.join(temp_output_dir, "nodes"))
179
+ backend_s[i] = SERIALIZER_BACKEND.get(name=i)(folder=nodes_dir)
162
180
  #
163
181
  t_mode = Traversal_Mode(setting.get("traversal_mode", traversal_mode))
182
+ # snn_builder
183
+ snn_builder = Saved_Node_Name_Builder(
184
+ format_=setting["saved_node_name_format"]) if "saved_node_name_format" in setting else default_snn_builder
164
185
  # _process and paras
165
186
  if callable(setting["match_cond"]):
166
187
  if t_mode in (Traversal_Mode.DFS_PRE_ORDER, Traversal_Mode.BFS):
@@ -181,7 +202,7 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
181
202
  # print(processed_s)
182
203
  # print(f'backend: {i}')
183
204
  _process(backend=backend_s[i], strictness_level=strictness_level, processed_s=processed_s,
184
- snn_builder=snn_builder, **paras)
205
+ snn_builder=snn_builder, b_record_nodes_dir=nodes_dir != default_nodes_dir, **paras)
185
206
  if "_hook_for_debug" in kwargs:
186
207
  kwargs["_hook_for_debug"]["processed"].append([i, ndl.copy_(var=processed_s, b_deepcopy=True)])
187
208
 
@@ -226,7 +247,7 @@ def write(var, output_dir, settings=None, traversal_mode=Traversal_Mode.BFS, b_p
226
247
  except:
227
248
  count += 1
228
249
  time.sleep(0.5)
229
- if not os.path.isfile(tgt_path):
250
+ if not os.path.exists(tgt_path):
230
251
  for_os.copy(src=src_path, dst=tgt_path, remove_dst_if_exists=True)
231
252
 
232
253
 
@@ -246,13 +267,29 @@ def _judge_processed_or_not(processed_s, name):
246
267
  return b_processed
247
268
 
248
269
 
249
- def _process_for_level(var, processed_s, processed_s_bak, level, backend, strictness_level, snn_builder):
270
+ def _process_for_level(var, processed_s, processed_s_bak, level, backend, strictness_level, snn_builder,
271
+ b_record_nodes_dir):
250
272
  for name, _ in ndl.get_nodes(var=processed_s_bak, level=level, b_strict=True):
251
273
  _process_for_name(var=var, processed_s=processed_s, name=name, backend=backend,
252
- strictness_level=strictness_level, snn_builder=snn_builder)
274
+ strictness_level=strictness_level, snn_builder=snn_builder,
275
+ b_record_nodes_dir=b_record_nodes_dir)
253
276
 
254
277
 
255
- def _process_for_name(var, processed_s, name, backend, strictness_level, snn_builder):
278
+ def _write_by_backend(backend, snn_builder, raw_name, value, strictness_level, b_record_nodes_dir):
279
+ snn_name = snn_builder(name=raw_name, value=value)
280
+ try:
281
+ res = backend.write(name=snn_name, var=value)
282
+ except:
283
+ assert strictness_level in (Strictness_Level.IGNORE_FAILURE, Strictness_Level.COMPATIBLE), \
284
+ f'An error occurred when node {snn_name} was saved using the first matched backend {backend}'
285
+ return False, None # b_success, res
286
+ if b_record_nodes_dir and isinstance(res, (dict,)) and res is not value:
287
+ # 记录节点位置
288
+ res["nodes_dir"] = backend.paras["folder"]
289
+ return True, res
290
+
291
+
292
+ def _process_for_name(var, processed_s, name, backend, strictness_level, snn_builder, b_record_nodes_dir):
256
293
  if _judge_processed_or_not(processed_s=processed_s, name=name) is True:
257
294
  # has been processed
258
295
  return
@@ -262,18 +299,15 @@ def _process_for_name(var, processed_s, name, backend, strictness_level, snn_bui
262
299
  return
263
300
 
264
301
  # write by backend
265
- snn_name = snn_builder(name=name, value=value)
266
- try:
267
- res = backend.write(name=snn_name, var=value)
268
- except:
269
- assert strictness_level in (Strictness_Level.IGNORE_FAILURE, Strictness_Level.COMPATIBLE), \
270
- f'An error occurred when node {name} was saved using the first matched backend {backend}'
302
+ b_success, res = _write_by_backend(backend, snn_builder, name, value, strictness_level, b_record_nodes_dir)
303
+ if not b_success:
271
304
  return
272
305
  ndl.set_value(var=processed_s, name=name, value=True, b_force=False)
273
306
  ndl.set_value(var=var, name=name, value=res, b_force=False)
274
307
 
275
308
 
276
- def _process_from_top_to_down(var, processed_s, match_cond, backend, traversal_mode, strictness_level, snn_builder):
309
+ def _process_from_top_to_down(var, processed_s, match_cond, backend, traversal_mode, strictness_level, snn_builder,
310
+ b_record_nodes_dir):
277
311
  def match_cond_(parent_type, idx, value):
278
312
  nonlocal match_cond, processed_s
279
313
 
@@ -288,12 +322,8 @@ def _process_from_top_to_down(var, processed_s, match_cond, backend, traversal_m
288
322
  nonlocal processed_s, backend, strictness_level
289
323
 
290
324
  # write by backend
291
- snn_name = snn_builder(name=idx, value=value)
292
- try:
293
- res = backend.write(name=snn_name, var=value)
294
- except:
295
- assert strictness_level in (Strictness_Level.IGNORE_FAILURE, Strictness_Level.COMPATIBLE), \
296
- f'An error occurred when node {name} was saved using the first matched backend {backend}'
325
+ b_success, res = _write_by_backend(backend, snn_builder, idx, value, strictness_level, b_record_nodes_dir)
326
+ if not b_success:
297
327
  return value
298
328
  ndl.set_value(var=processed_s, name=idx, value=True, b_force=True)
299
329
  return res
@@ -302,7 +332,8 @@ def _process_from_top_to_down(var, processed_s, match_cond, backend, traversal_m
302
332
  b_use_name_as_idx=True, traversal_mode=traversal_mode, b_traverse_matched_element=False)
303
333
 
304
334
 
305
- def _process_from_down_to_top(var, processed_s, match_cond, backend, traversal_mode, strictness_level, snn_builder):
335
+ def _process_from_down_to_top(var, processed_s, match_cond, backend, traversal_mode, strictness_level, snn_builder,
336
+ b_record_nodes_dir):
306
337
  processed_s_raw, processed_s = processed_s, ndl.copy_(var=processed_s, b_deepcopy=True)
307
338
 
308
339
  def match_cond_(parent_type, idx, value):
@@ -320,12 +351,8 @@ def _process_from_down_to_top(var, processed_s, match_cond, backend, traversal_m
320
351
  nonlocal processed_s, backend, processed_s_raw, strictness_level
321
352
 
322
353
  # write by backend
323
- snn_name = snn_builder(name=idx, value=value)
324
- try:
325
- res = backend.write(name=snn_name, var=value)
326
- except:
327
- assert strictness_level in (Strictness_Level.IGNORE_FAILURE, Strictness_Level.COMPATIBLE), \
328
- f'An error occurred when node {name} was saved using the first matched backend {backend}'
354
+ b_success, res = _write_by_backend(backend, snn_builder, idx, value, strictness_level, b_record_nodes_dir)
355
+ if not b_success:
329
356
  return value
330
357
  ndl.set_value(var=processed_s, name=idx, value=True, b_force=True)
331
358
  ndl.set_value(var=processed_s_raw, name=idx, value=True, b_force=True)
@@ -1,5 +1,5 @@
1
1
  from kevin_toolbox.nested_dict_list import get_value
2
- from kevin_toolbox.nested_dict_list.name_handler import parse_name, escape_node
2
+ from kevin_toolbox.nested_dict_list.name_handler import parse_name
3
3
 
4
4
 
5
5
  def set_value(var, name, value, b_force=False):
@@ -8,7 +8,7 @@ def set_value(var, name, value, b_force=False):
8
8
 
9
9
  参数:
10
10
  var: 任意支持索引赋值的变量
11
- name: <string> 名字
11
+ name: <string/parsed_name> 名字
12
12
  名字 name 的具体介绍参见函数 name_handler.parse_name()
13
13
  假设 var=dict(acc=[0.66,0.78,0.99]),如果你想将 var["acc"][1] 设置为 100,那么可以将 name 写成:
14
14
  ":acc@1" 或者 "|acc|1" 等。
@@ -25,14 +25,18 @@ def set_value(var, name, value, b_force=False):
25
25
  若 b_force 为 True 有可能不会在 var 的基础上进行改变,而是返回一个新的ndl结构,
26
26
  因此建议使用赋值 var = ndl.set_value(var) 来避免可能的错误。
27
27
  """
28
- _, method_ls, node_ls = parse_name(name=name, b_de_escape_node=False)
28
+ if isinstance(name, (tuple, list,)):
29
+ assert len(name) == 3, f'invalid parsed name {name}'
30
+ _, method_ls, node_ls = name
31
+ else:
32
+ _, method_ls, node_ls = parse_name(name=name, b_de_escape_node=True)
29
33
  if len(node_ls) == 0:
30
34
  return value
31
35
 
32
- raw_key = escape_node(node=node_ls[-1], b_reversed=True, times=1)
36
+ raw_key = node_ls[-1]
33
37
 
34
38
  try:
35
- item = get_value(var=var, name=name[:-1 - len(node_ls[-1])])
39
+ item = get_value(var=var, name=('', method_ls[:-1], node_ls[:-1]))
36
40
  if method_ls[-1] == "@":
37
41
  key = eval(raw_key)
38
42
  elif method_ls[-1] == "|":
@@ -63,6 +67,12 @@ def set_value(var, name, value, b_force=False):
63
67
  else:
64
68
  # 其他,比如当 key 为元组、浮点数等等时,则使用 dict 构建
65
69
  value = {key: value}
66
- var = set_value(var=var, name=name[:-1 - len(node_ls[-1])], value=value, b_force=b_force)
70
+ var = set_value(var=var, name=('', method_ls[:-1], node_ls[:-1]), value=value, b_force=b_force)
67
71
 
68
72
  return var
73
+
74
+
75
+ if __name__ == "__main__":
76
+ var_ = []
77
+ set_value(var=var_, name="@2:data", value=1, b_force=True)
78
+ print(var_)
File without changes
@@ -0,0 +1,6 @@
1
+ import os
2
+
3
+ root_dir = os.path.split(os.path.split(os.path.split(__file__)[0])[0])[0]
4
+
5
+ if __name__ == '__main__':
6
+ print(root_dir)
@@ -0,0 +1,73 @@
1
+ Metadata-Version: 2.1
2
+ Name: kevin-toolbox-dev
3
+ Version: 1.4.5
4
+ Summary: 一个常用的工具代码包集合
5
+ Home-page: https://github.com/cantbeblank96/kevin_toolbox
6
+ Download-URL: https://github.com/username/your-package/archive/refs/tags/v1.0.0.tar.gz
7
+ Author: kevin hsu
8
+ Author-email: xukaiming1996@163.com
9
+ License: MIT
10
+ Keywords: mathematics,pytorch,numpy,machine-learning,algorithm
11
+ Platform: UNKNOWN
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Programming Language :: Python :: 3
15
+ Requires-Python: >=3.6
16
+ Description-Content-Type: text/markdown
17
+ Requires-Dist: torch (>=1.2.0)
18
+ Requires-Dist: numpy (>=1.19.0)
19
+ Provides-Extra: plot
20
+ Requires-Dist: matplotlib (>=3.0) ; extra == 'plot'
21
+ Provides-Extra: rest
22
+ Requires-Dist: pytest (>=6.2.5) ; extra == 'rest'
23
+ Requires-Dist: line-profiler (>=3.5) ; extra == 'rest'
24
+
25
+ # kevin_toolbox
26
+
27
+ 一个通用的工具代码包集合
28
+
29
+
30
+
31
+ 环境要求
32
+
33
+ ```shell
34
+ numpy>=1.19
35
+ pytorch>=1.2
36
+ ```
37
+
38
+ 安装方法:
39
+
40
+ ```shell
41
+ pip install kevin-toolbox --no-dependencies
42
+ ```
43
+
44
+
45
+
46
+ [项目地址 Repo](https://github.com/cantbeblank96/kevin_toolbox)
47
+
48
+ [使用指南 User_Guide](./notes/User_Guide.md)
49
+
50
+ [免责声明 Disclaimer](./notes/Disclaimer.md)
51
+
52
+ [版本更新记录](./notes/Release_Record.md):
53
+
54
+ - v 1.4.5 (2024-12-22)【bug fix】【new feature】
55
+ - data_flow.file.json_
56
+ - modify write(),支持输入路径使用 ~ 表示家目录。
57
+ - env_info
58
+ - 【new feature】add variable_,该模块主要包含于处理环境变量相关的函数和类。
59
+ - Env_Vars_Parser:解释并替换字符串中${}形式指定的环境变量,支持以下几种方式:
60
+ - "${HOME}" 家目录
61
+ - "${SYS:<var_name>}" 其他系统环境变量
62
+ - "${KVT_XXX<ndl_name>}" 读取配置文件 ~/.kvt_cfg/.xxx.json 中的变量(xxx将被自动转为小写)
63
+ - "${/xxx.../xxx.json<ndl_name>}" 读取指定路径下的配置文件 /xxx.../xxx.json 中的变量
64
+ - env_vars_parser:类 Env_Vars_Parser 的默认实例
65
+ - 添加了对应的测试用例。
66
+ - nested_dict_list
67
+ - 【new feature】modify get_value() and set_value() for parsed_name input,在字符串name的基础上,进一步支持使用结构化的(root_node, method_ls, node_ls)形式的name作为输入。
68
+ - 相较于字符串形式的name,结构化的name因不用解释而效率更高,推荐使用。
69
+ - 【new feature】modify serializer.read() and write(),支持通过 nodes_dir 指定节点内容保存在哪个目录下,同时支持在 settings 中为每个处理模式单独指定其使用的 nodes_dir 和 saved_node_name_format。
70
+ - 有了该功能,允许多个ndl文件共享多个节点内容,形式更加自由。
71
+ - 添加了对应的测试用例。
72
+
73
+
@@ -1,4 +1,4 @@
1
- kevin_toolbox/__init__.py,sha256=XyLJGqBJkZgDgnrzFo4y-4YiXNVBR7KaaJSMMAkiqtc,410
1
+ kevin_toolbox/__init__.py,sha256=FXO6FMIEpptwoM95rn6R9rr5B72w0_y5nCnNKpjKTeI,410
2
2
  kevin_toolbox/computer_science/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  kevin_toolbox/computer_science/algorithm/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
4
4
  kevin_toolbox/computer_science/algorithm/cache_manager/__init__.py,sha256=p2hddkZ1HfYF9-m2Hx-o9IotwQHd4QwDCePy2ADpTDA,41
@@ -74,7 +74,7 @@ kevin_toolbox/data_flow/core/reader/unified_reader_base.py,sha256=4gIADdV8UKpt2y
74
74
  kevin_toolbox/data_flow/file/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
75
75
  kevin_toolbox/data_flow/file/json_/__init__.py,sha256=VAt8COS2tO3PJRuhSc43i35fEOlArFM_YahdTmEBaHE,85
76
76
  kevin_toolbox/data_flow/file/json_/read_json.py,sha256=BhAUCcagwPsSMaMeCJyyxDW3h9SGf1Dfvb0nXi6B_T8,2084
77
- kevin_toolbox/data_flow/file/json_/write_json.py,sha256=mWaxePr_QzfyeCb0hAy4xTKOGX7q0eFjep0jDqOqIgw,2379
77
+ kevin_toolbox/data_flow/file/json_/write_json.py,sha256=uG6UnQ9KVhL_UWndGjvLLHF_UoGtOwVn4ADi1Gb1nRU,2417
78
78
  kevin_toolbox/data_flow/file/json_/converter/__init__.py,sha256=oQMgAgzELLq_f4LIIfz5E6l_E7g4lFsXqfmnJ3tPZTY,401
79
79
  kevin_toolbox/data_flow/file/json_/converter/convert_dict_key_to_number.py,sha256=SuSZj_HCqKZutHAJ5AttABnGBRZplPGQhMxJBt2Wlgc,559
80
80
  kevin_toolbox/data_flow/file/json_/converter/convert_ndarray_to_list.py,sha256=GALpC1MFJ4aMzs0FZIfJScYznfCP-gmhPeM8sWXGSWg,391
@@ -140,7 +140,10 @@ kevin_toolbox/env_info/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBd
140
140
  kevin_toolbox/env_info/check_validity_and_uninstall.py,sha256=GskNfWwj2ak2AszV_0MMs2hBZb2tmRD0POO11UiouLM,879
141
141
  kevin_toolbox/env_info/check_version_and_update.py,sha256=og9ngoO6VhnlmUIkL0IZCyGXrI8rpZWRoN4t9FYGIkw,1953
142
142
  kevin_toolbox/env_info/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
+ kevin_toolbox/env_info/test/test_variable_.py,sha256=n9To8UNfBSNey8Xy7relXcbrf0yX8ZoZzfJctd2fHBs,1657
143
144
  kevin_toolbox/env_info/test/test_version.py,sha256=xnF7RAcLSN3gpjIbVxFUV2-lmv0w7gOhdRa4XN0z0Q0,1728
145
+ kevin_toolbox/env_info/variable_/__init__.py,sha256=qFs8ZZVBjAfj6IuUujYxaEnmXk6HEbtN6GXaIkuQhoM,81
146
+ kevin_toolbox/env_info/variable_/env_vars_parser.py,sha256=8k6nBCD9oFsFu_Bq6usz3BF5NHr03c7SxFgbDj9EpP8,3781
144
147
  kevin_toolbox/env_info/version/__init__.py,sha256=PSrrvrYccfcE36IkOWG5kLQlKopfenQJ-4xilCdRULY,187
145
148
  kevin_toolbox/env_info/version/compare_version.py,sha256=rAksAR1OuOE5TrfJx3h5w7w5vftpcv_oJPHWGwuX7TI,2489
146
149
  kevin_toolbox/env_info/version/parse_version.py,sha256=QhYVO9hLZ8o4wdXWg5PBr0WIu5VayR-QFKQ_KyLDLgI,860
@@ -235,9 +238,9 @@ kevin_toolbox/nested_dict_list/copy_.py,sha256=MvzNRKm8htYpMe7Td1ao2-ZoaYVC_iNTG
235
238
  kevin_toolbox/nested_dict_list/count_leaf_node_nums.py,sha256=l67u47EvO1inoGinUqH6RZ7cHXwN0VcBQPUvSheqAvA,614
236
239
  kevin_toolbox/nested_dict_list/get_hash.py,sha256=Ygadnn5dnvIeE-9t39p2EwNKNRLzomL37ZsRD5daXxo,1286
237
240
  kevin_toolbox/nested_dict_list/get_nodes.py,sha256=doEcLPYOig4gGloGXEPlroXFcRWe5ovuH0RozsxYZ0U,3748
238
- kevin_toolbox/nested_dict_list/get_value.py,sha256=isvUhqSQyUNHBXgNuZX6_o2c84UV_SpjNjAYm2M3gd4,2083
241
+ kevin_toolbox/nested_dict_list/get_value.py,sha256=IiAqQCphyv-pAZWuQRWm0anEwxYQOkC9CttY5ZlUbSs,2389
239
242
  kevin_toolbox/nested_dict_list/set_default.py,sha256=laSgGP1CbApNgFB9HZGCtxCG9fe7u1C-YOx9ZCoHJms,3460
240
- kevin_toolbox/nested_dict_list/set_value.py,sha256=pmSWzC0y0jBxk7yritsjKU2Q-PPMar0X3A9bF6uWvoQ,3470
243
+ kevin_toolbox/nested_dict_list/set_value.py,sha256=AQ4foDtKo4JxyR---of-VSxjhRWfqkv6TrnQ4EoRo3M,3711
241
244
  kevin_toolbox/nested_dict_list/traverse.py,sha256=5_EirnYVy34JLfXxuTvb-mMjDeO1veyfLOcaVYcuGF8,6846
242
245
  kevin_toolbox/nested_dict_list/name_handler/__init__.py,sha256=P_pWq78oN6NdvWg2h6AduW_sUqbeaaVyoWWbW9kbgmU,107
243
246
  kevin_toolbox/nested_dict_list/name_handler/build_name.py,sha256=VPWyjE8i8l-4Zm4tkD06Ie4J2NCsmI32ecOxZQqqmok,989
@@ -245,10 +248,10 @@ kevin_toolbox/nested_dict_list/name_handler/escape_node.py,sha256=niT9MxmsyrSZYh
245
248
  kevin_toolbox/nested_dict_list/name_handler/parse_name.py,sha256=vUlAXPocpVSxtb3EnRi7U5K40Tz9plFG-_sbwLfYiy4,2280
246
249
  kevin_toolbox/nested_dict_list/serializer/__init__.py,sha256=79dd9l-mNz0bycFKjNm7YsfWPR-JsVx9NoG_Ofqy-HQ,153
247
250
  kevin_toolbox/nested_dict_list/serializer/enum_variable.py,sha256=RWPydtXI4adOJYGo_k5CWHSL0Odzj_bsahb24p1ranY,847
248
- kevin_toolbox/nested_dict_list/serializer/read.py,sha256=BjsEWYoyvEHgRKKVKw0suf1ukug2tAFLMCAmEnndqgg,2945
251
+ kevin_toolbox/nested_dict_list/serializer/read.py,sha256=HaEJJw7hBVNmsIs348kaIyatHP77Kr-JHEwYqRwLrso,3202
249
252
  kevin_toolbox/nested_dict_list/serializer/saved_node_name_builder.py,sha256=qsD-rmDmVaKZP4owN3Wm3QY2Ksi71XlYETqw4VmIsSU,1011
250
253
  kevin_toolbox/nested_dict_list/serializer/variable.py,sha256=ZywG6obipRBCGY1cY42gdvsuWk8GLZXr6eCYcW7ZJ9c,392
251
- kevin_toolbox/nested_dict_list/serializer/write.py,sha256=bGWP24kikE-L0ODFFMw8Z-uS2Almh-zVitoSHmIt7dc,22169
254
+ kevin_toolbox/nested_dict_list/serializer/write.py,sha256=ZUYJlBXQbCkMW2UN3d29obskGGbTA-gm3dmuLLltxLI,24101
252
255
  kevin_toolbox/nested_dict_list/serializer/backends/__init__.py,sha256=8g7y-L3cmctxao616dVkGiot00FJzKNmNl_69V2bSmE,39
253
256
  kevin_toolbox/nested_dict_list/serializer/backends/_json_.py,sha256=oJXIc28yjxsD9ZJuw120pVHTVsTzCdaXEhVUSQeydq4,2145
254
257
  kevin_toolbox/nested_dict_list/serializer/backends/_ndl.py,sha256=3YkAq_Bqzehnw0kGxqxwtF6uUz0EV37tLI-1ROHjixY,1794
@@ -344,7 +347,9 @@ kevin_toolbox/patches/for_torch/math/get_y_at_x.py,sha256=bfoVcasZ_tMdhR_1Me0Jli
344
347
  kevin_toolbox/patches/for_torch/math/my_around.py,sha256=ptpU3ids50gwf663EpHbw7raj9tNrDGBFZ5t_uMNH14,1378
345
348
  kevin_toolbox/patches/for_torch/nn/__init__.py,sha256=aJs3RMqRzQmd8KKDmQW9FxwCqS5yfPqEdg-m0PwlQro,39
346
349
  kevin_toolbox/patches/for_torch/nn/lambda_layer.py,sha256=KUuLiX_Dr4bvRmpAaCW5QTDWDcnMPRnw0jg4NNXTFhM,223
347
- kevin_toolbox_dev-1.4.3.dist-info/METADATA,sha256=OiEFU4vF5eltP539ZVxcEU4UEqbRK9LJy86SuegJCAs,1726
348
- kevin_toolbox_dev-1.4.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
349
- kevin_toolbox_dev-1.4.3.dist-info/top_level.txt,sha256=S5TeRGF-PwlhsaUEPTI-f2vWrpLmh3axpyI6v-Fi75o,14
350
- kevin_toolbox_dev-1.4.3.dist-info/RECORD,,
350
+ kevin_toolbox/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
351
+ kevin_toolbox/utils/variable.py,sha256=PxUmp9w4CKKcKHjgdVNF_Iaw5gwPPOd4aY_Oe5F9U1M,133
352
+ kevin_toolbox_dev-1.4.5.dist-info/METADATA,sha256=-CIxxD1zrM4_RrzEyB7KVLxUsjQfRGqSdUiBI1z_tzM,2818
353
+ kevin_toolbox_dev-1.4.5.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
354
+ kevin_toolbox_dev-1.4.5.dist-info/top_level.txt,sha256=S5TeRGF-PwlhsaUEPTI-f2vWrpLmh3axpyI6v-Fi75o,14
355
+ kevin_toolbox_dev-1.4.5.dist-info/RECORD,,
@@ -1,61 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: kevin-toolbox-dev
3
- Version: 1.4.3
4
- Summary: 一个常用的工具代码包集合
5
- Home-page: https://github.com/cantbeblank96/kevin_toolbox
6
- Download-URL: https://github.com/username/your-package/archive/refs/tags/v1.0.0.tar.gz
7
- Author: kevin hsu
8
- Author-email: xukaiming1996@163.com
9
- License: MIT
10
- Keywords: mathematics,pytorch,numpy,machine-learning,algorithm
11
- Platform: UNKNOWN
12
- Classifier: License :: OSI Approved :: MIT License
13
- Classifier: Programming Language :: Python
14
- Classifier: Programming Language :: Python :: 3
15
- Requires-Python: >=3.6
16
- Description-Content-Type: text/markdown
17
- Requires-Dist: torch (>=1.2.0)
18
- Requires-Dist: numpy (>=1.19.0)
19
- Provides-Extra: plot
20
- Requires-Dist: matplotlib (>=3.0) ; extra == 'plot'
21
- Provides-Extra: rest
22
- Requires-Dist: pytest (>=6.2.5) ; extra == 'rest'
23
- Requires-Dist: line-profiler (>=3.5) ; extra == 'rest'
24
-
25
- # kevin_toolbox
26
-
27
- 一个通用的工具代码包集合
28
-
29
-
30
-
31
- 环境要求
32
-
33
- ```shell
34
- numpy>=1.19
35
- pytorch>=1.2
36
- ```
37
-
38
- 安装方法:
39
-
40
- ```shell
41
- pip install kevin-toolbox --no-dependencies
42
- ```
43
-
44
-
45
-
46
- [项目地址 Repo](https://github.com/cantbeblank96/kevin_toolbox)
47
-
48
- [使用指南 User_Guide](./notes/User_Guide.md)
49
-
50
- [免责声明 Disclaimer](./notes/Disclaimer.md)
51
-
52
- [版本更新记录](./notes/Release_Record.md):
53
-
54
- - v 1.4.3 (2024-12-14)【new feature】
55
- - nested_dict_list.serializer
56
- - modify write(),使用 while(detect) try it except wait 的结构包裹原来的 os.rename 操作,多次尝试,避免因为突发的文件系统阻塞(OSError: [Errno 5] Input/output error)导致保存失败。
57
- - patches.for_numpy.random
58
- - add get/set_rng_state(),用于获取/加载随机生成器的状态。
59
- - 添加了对应的测试用例
60
-
61
-