hjxdl 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. hdl/_version.py +2 -2
  2. hdl/datasets/city_code.json +2576 -0
  3. hdl/datasets/defined_BaseFeatures.fdef +236 -0
  4. hdl/datasets/las.tsv +0 -0
  5. hdl/datasets/route_template.json +113 -0
  6. hdl/datasets/vocab.txt +591 -0
  7. hdl/ju/__init__.py +0 -0
  8. hdl/ju/setup.py +55 -0
  9. hdl/jupyfuncs/__init__.py +0 -0
  10. hdl/jupyfuncs/chem/__init__.py +0 -0
  11. hdl/jupyfuncs/chem/mol.py +548 -0
  12. hdl/jupyfuncs/chem/norm.py +268 -0
  13. hdl/jupyfuncs/chem/pdb_ext.py +94 -0
  14. hdl/jupyfuncs/chem/scaffold.py +25 -0
  15. hdl/jupyfuncs/chem/shape.py +241 -0
  16. hdl/jupyfuncs/chem/tokenizers.py +2 -0
  17. hdl/jupyfuncs/dbtools/__init__.py +0 -0
  18. hdl/jupyfuncs/dbtools/pg.py +42 -0
  19. hdl/jupyfuncs/dbtools/query_info.py +150 -0
  20. hdl/jupyfuncs/dl/__init__.py +0 -0
  21. hdl/jupyfuncs/dl/cp.py +54 -0
  22. hdl/jupyfuncs/dl/dataframe.py +38 -0
  23. hdl/jupyfuncs/dl/fp.py +49 -0
  24. hdl/jupyfuncs/dl/list.py +20 -0
  25. hdl/jupyfuncs/dl/model_utils.py +97 -0
  26. hdl/jupyfuncs/dl/tensor.py +159 -0
  27. hdl/jupyfuncs/dl/uncs.py +112 -0
  28. hdl/jupyfuncs/llm/__init__.py +0 -0
  29. hdl/jupyfuncs/llm/extract.py +123 -0
  30. hdl/jupyfuncs/llm/openapi.py +94 -0
  31. hdl/jupyfuncs/network/__init__.py +0 -0
  32. hdl/jupyfuncs/network/proxy.py +20 -0
  33. hdl/jupyfuncs/path/__init__.py +0 -0
  34. hdl/jupyfuncs/path/glob.py +285 -0
  35. hdl/jupyfuncs/path/strings.py +65 -0
  36. hdl/jupyfuncs/show/__init__.py +0 -0
  37. hdl/jupyfuncs/show/pbar.py +50 -0
  38. hdl/jupyfuncs/show/plot.py +259 -0
  39. hdl/jupyfuncs/utils/__init__.py +0 -0
  40. hdl/jupyfuncs/utils/wrappers.py +8 -0
  41. hdl/utils/llm/chat.py +4 -0
  42. {hjxdl-0.1.12.dist-info → hjxdl-0.1.14.dist-info}/METADATA +1 -1
  43. {hjxdl-0.1.12.dist-info → hjxdl-0.1.14.dist-info}/RECORD +45 -6
  44. {hjxdl-0.1.12.dist-info → hjxdl-0.1.14.dist-info}/WHEEL +1 -1
  45. {hjxdl-0.1.12.dist-info → hjxdl-0.1.14.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,259 @@
1
+ from os import path as osp
2
+ import typing as t
3
+ from typing_extensions import Literal
4
+
5
+ import seaborn as sn
6
+ import sklearn
7
+ import matplotlib.pyplot as plt
8
+ import matplotlib
9
+ import numpy as np
10
+ import pandas as pd
11
+
12
+ from ..path.glob import get_num_lines
13
+ from ..path.strings import splitted_strs_from_line
14
+
15
+ cm = matplotlib.cm.get_cmap('tab20')
16
+ colors = cm.colors
17
+ LABEL = Literal[
18
+ 'training_size',
19
+ 'episode_id',
20
+ ]
21
+
22
+
23
+ def accuracies_heat(y_true, y_pred, num_tasks):
24
+ assert len(y_true) == len(y_pred)
25
+ cm = sklearn.metrics.confusion_matrix(
26
+ y_true, y_pred, normalize='true'
27
+ )
28
+ df_cm = pd.DataFrame(
29
+ cm, range(num_tasks), range(num_tasks)
30
+ )
31
+ plt.figure(figsize=(10, 10))
32
+ sn.set(font_scale=1.4)
33
+ sn.heatmap(df_cm, annot=True, annot_kws={"size": 16}, fmt='.2f')
34
+
35
+
36
+ def get_metrics_curves(
37
+ base_dir,
38
+ ckpts,
39
+ num_points,
40
+ title="Metric Curve",
41
+ metric='accuracy',
42
+ log_file='metrics.log',
43
+ label: LABEL = 'training_size',
44
+ save_dir: str = None,
45
+ figsize=(10, 6)
46
+ ):
47
+ if not save_dir:
48
+ save_dir = osp.join(base_dir, 'metrics_curves.png')
49
+ data_dict = {}
50
+ for ckpt in ckpts:
51
+ log = osp.join(
52
+ base_dir,
53
+ ckpt,
54
+ log_file
55
+ )
56
+ if not osp.exists(log):
57
+ print(f"WARNING: no log file for {ckpt}")
58
+ continue
59
+ data_dict[ckpt] = []
60
+ data_idx = 0
61
+ for line_id in range(get_num_lines(log)):
62
+ line = splitted_strs_from_line(log, line_id)
63
+ if len(line) == 3 and line[1].strip() == metric:
64
+ if label == 'episode_id':
65
+ x = data_idx
66
+ elif label == 'training_size':
67
+ x = int(line[0].strip())
68
+ data_dict[ckpt].append(
69
+ [
70
+ x,
71
+ float(line[2].strip())
72
+ ]
73
+ )
74
+ data_idx += 1
75
+ if line_id >= num_points - 1:
76
+ break
77
+ plt.figure(figsize=figsize, dpi=100)
78
+ # plt.style.use('ggplot')
79
+ plt.title(title)
80
+ for i, (ckpt, points) in enumerate(data_dict.items()):
81
+ points_array = np.array(points).T
82
+ plt.plot(points_array[0], points_array[1], label=ckpt, color=colors[i])
83
+ lg = plt.legend(bbox_to_anchor=(1.2, 1.0), loc='upper right')
84
+ # plt.legend(loc='lower right')
85
+ plt.xlabel(
86
+ label
87
+ )
88
+ plt.ylabel(metric)
89
+ plt.grid(True)
90
+ plt.savefig(
91
+ save_dir,
92
+ format='png',
93
+ bbox_extra_artists=(lg,),
94
+ bbox_inches='tight'
95
+ )
96
+ plt.show()
97
+
98
+
99
+ def get_means_vars(
100
+ log_file: str,
101
+ indices: t.List,
102
+ mode: str,
103
+ nears_each: int,
104
+ ) -> t.List[t.List[int]]:
105
+
106
+ mean_s, var_s = [], []
107
+ num_points = len(indices)
108
+
109
+ nears_lists = []
110
+ if mode == 'id':
111
+
112
+ for index in indices:
113
+ nears = []
114
+ nears.extend(list(range(
115
+ index - nears_each, index + 1 + nears_each
116
+ )))
117
+ nears_lists.append(nears)
118
+
119
+ for nears in nears_lists:
120
+ mean_s.append(np.mean([
121
+ float(splitted_strs_from_line(log_file, line_id)[2])
122
+ for line_id in nears
123
+ ]))
124
+ var_s.append((np.std([
125
+ float(splitted_strs_from_line(log_file, line_id)[2])
126
+ for line_id in nears
127
+ ])))
128
+
129
+ elif mode == 'value':
130
+ datas = [
131
+ splitted_strs_from_line(log_file, line_id)
132
+ for line_id in range(get_num_lines(log_file))
133
+ ]
134
+ training_sizes = [[int(data[0]) for data in datas]]
135
+ values = np.array([float(data[2]) for data in datas])
136
+
137
+ training_sizes = np.repeat(training_sizes, num_points, 0).T
138
+ diffs = training_sizes - indices
139
+
140
+ true_indices = np.argmin(np.abs(diffs), 0)
141
+
142
+ true_indices_list = [
143
+ list(range(
144
+ index - nears_each, index + 1 + nears_each
145
+ ))
146
+ for index in true_indices
147
+ ]
148
+ mean_s = [
149
+ np.mean(values[indices])
150
+ for indices in true_indices_list
151
+ ]
152
+ var_s = [
153
+ np.std(values[indices])
154
+ for indices in true_indices_list
155
+ ]
156
+ var_s = np.array(var_s) / np.sqrt(num_points)
157
+
158
+ return mean_s, var_s
159
+
160
+
161
+ def get_metrics_bars(
162
+ base_dir,
163
+ ckpts,
164
+ title="Metric Bars",
165
+ training_sizes: t.List = [],
166
+ episide_ids: t.List = [],
167
+ nears_each: int = 5,
168
+ pretrained_num: int = 0,
169
+ x_diff: bool = False,
170
+ metric='accuracy',
171
+ log_file='metrics.log',
172
+ label: LABEL = 'training_size',
173
+ save_dir: str = None,
174
+ figsize=(10, 6),
175
+ bar_ratio=0.8,
176
+ minimum=0.0,
177
+ maximum=1.0
178
+ ):
179
+
180
+ if not save_dir:
181
+ save_dir = osp.join(base_dir, 'metrics_bars.png')
182
+
183
+ x_labels, num_points = [], 0
184
+ if label == 'training_size':
185
+ num_points = len(training_sizes)
186
+ x_labels = training_sizes
187
+ mode = 'value'
188
+ elif label == 'episode_id':
189
+ num_points = len(episide_ids)
190
+ x_labels = episide_ids
191
+ mode = 'id'
192
+
193
+ x = np.arange(num_points)
194
+ num_strategies = len(ckpts)
195
+ total_width = bar_ratio
196
+ width = total_width / num_strategies
197
+ x = x - (total_width - width) / 2
198
+
199
+ if not save_dir:
200
+ save_dir = osp.join(base_dir, 'metrics.png')
201
+
202
+ data_dict = {}
203
+ for ckpt in ckpts:
204
+
205
+ log = osp.join(
206
+ base_dir,
207
+ ckpt,
208
+ log_file
209
+ )
210
+ if not osp.exists(log):
211
+ print(f"WARNING: no log file for {ckpt}")
212
+ continue
213
+
214
+ # print(x_labels)
215
+ mean_s, var_s = get_means_vars(
216
+ log_file=log,
217
+ indices=x_labels,
218
+ mode=mode,
219
+ nears_each=nears_each
220
+ )
221
+ data_dict[ckpt] = (mean_s, var_s)
222
+
223
+ if x_diff:
224
+ x_labels = np.array(x_labels, dtype=np.int) - pretrained_num
225
+
226
+ plt.figure(figsize=figsize, dpi=100)
227
+ plt.title(title)
228
+ ax = plt.gca()
229
+ ax.set_ylim([minimum, maximum])
230
+
231
+ for point_idx, (ckpt, datas) in enumerate(data_dict.items()):
232
+ mean_s, var_s = datas
233
+ plt.bar(
234
+ x + width * point_idx,
235
+ mean_s,
236
+ width=width,
237
+ yerr=var_s,
238
+ tick_label=x_labels,
239
+ label=ckpt,
240
+ color=colors[point_idx]
241
+ )
242
+
243
+ lg = plt.legend(bbox_to_anchor=(1.2, 1.0), loc='upper right')
244
+ # plt.legend(loc='lower right')
245
+ plt.xlabel(
246
+ label
247
+ )
248
+ plt.ylabel(metric)
249
+ plt.grid(True)
250
+
251
+ plt.savefig(
252
+ save_dir,
253
+ format='png',
254
+ bbox_extra_artists=(lg,),
255
+ bbox_inches='tight'
256
+ )
257
+ plt.show()
258
+
259
+
File without changes
@@ -0,0 +1,8 @@
1
+ class GeneratorWrapper:
2
+ def __init__(self, generator_func, *args, **kwargs):
3
+ self.generator_func = generator_func
4
+ self.args = args
5
+ self.kwargs = kwargs
6
+
7
+ def __iter__(self):
8
+ return self.generator_func(*self.args, **self.kwargs)
hdl/utils/llm/chat.py CHANGED
@@ -364,6 +364,7 @@ class OpenAI_M():
364
364
  prompt : str,
365
365
  stop: list[str] | None = ["USER:", "ASSISTANT:"],
366
366
  # history: list = [],
367
+ model="default_model",
367
368
  **kwargs: t.Any,
368
369
  ) -> str:
369
370
  """Invoke the chatbot with the given prompt and return the response.
@@ -383,6 +384,7 @@ class OpenAI_M():
383
384
  "content": prompt
384
385
  }],
385
386
  stream=False,
387
+ model=model,
386
388
  # model=kwargs.get("model", "default_model")
387
389
  **kwargs
388
390
  )
@@ -393,6 +395,7 @@ class OpenAI_M():
393
395
  prompt : str,
394
396
  stop: list[str] | None = ["USER:", "ASSISTANT:"],
395
397
  # history: list = [],
398
+ model="default_model",
396
399
  **kwargs: t.Any,
397
400
  ):
398
401
  """Generate text completion in a streaming fashion.
@@ -412,6 +415,7 @@ class OpenAI_M():
412
415
  "content": prompt
413
416
  }],
414
417
  stream=True,
418
+ model=model,
415
419
  # model=kwargs.get("model", "default_model")
416
420
  **kwargs
417
421
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hjxdl
3
- Version: 0.1.12
3
+ Version: 0.1.14
4
4
  Summary: A collection of functions for Jupyter notebooks
5
5
  Home-page: https://github.com/huluxiaohuowa/hdl
6
6
  Author: Jianxing Hu
@@ -1,5 +1,5 @@
1
1
  hdl/__init__.py,sha256=5sZZNySv08wwfzJcSDssGTqUn9wlmDsR6R4XB8J8mFM,70
2
- hdl/_version.py,sha256=cuLjhzc4k2qL9vS7TDaPLs82wisSY2yFKQtDhC08Kis,413
2
+ hdl/_version.py,sha256=1w8OANeC0K0uOMBB6XWDhN49spC7ipRwXZnxjNMUZWU,413
3
3
  hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
5
5
  hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -38,6 +38,11 @@ hdl/data/dataset/samplers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
38
38
  hdl/data/dataset/samplers/chiral.py,sha256=ZS83kg5e2gdHVGgIuCjCepDwk2SKqWDgJawH37oXy78,463
39
39
  hdl/data/dataset/seq/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  hdl/data/dataset/seq/rxn_dataset.py,sha256=jfXFlR3ITAf0KwUfIevzUZHnLBnFYrL69Cc81EMv0x0,1668
41
+ hdl/datasets/city_code.json,sha256=qnTL6ldpGnQanDXN3oitx12E6oiayaCHTh2Zi9RyQjM,60816
42
+ hdl/datasets/defined_BaseFeatures.fdef,sha256=5QhCEcu6fjSTXaTcZZ8-LSgf72_aJj_ykoDk82ZwVBI,7383
43
+ hdl/datasets/las.tsv,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ hdl/datasets/route_template.json,sha256=2qhkbtEZUrUod6PXCWXxAgQmU-jAC0yLcWGBBk2IwgE,3757
45
+ hdl/datasets/vocab.txt,sha256=cXdB1JDs2rY6C9BupRRY7w21xK4KN9SrxPxkN9CUXXQ,3524
41
46
  hdl/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
47
  hdl/features/fp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
48
  hdl/features/fp/features_generators.py,sha256=HbyS97i2I2mOcANdJMohs2okA1LlZmkG4ZIIX6Y9fr4,9017
@@ -45,6 +50,40 @@ hdl/features/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
45
50
  hdl/features/graph/featurization.py,sha256=QLbj33JsgO-OWarIC2HXQP7eMu8pd-GWmppZQj_tQ_k,10902
46
51
  hdl/features/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
52
  hdl/features/utils/utils.py,sha256=aL4UAALblaw1w0AjK7MX8nSj9zwTmrp9CTLwJUX8ZtE,4225
53
+ hdl/ju/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ hdl/ju/setup.py,sha256=MB3rndQYt9QC-bdyGt81HYR0Rdr0l8DbAHktIuFMYU0,1725
55
+ hdl/jupyfuncs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
+ hdl/jupyfuncs/chem/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
+ hdl/jupyfuncs/chem/mol.py,sha256=FDb2L61JL6xkNK7bxXWVjAT-r-st7iMyQNoFezBvTUE,15294
58
+ hdl/jupyfuncs/chem/norm.py,sha256=NRjSP8P7f3Yhy1LmSpaV93qYgYis_qVG7HT0vOWeW1U,10186
59
+ hdl/jupyfuncs/chem/pdb_ext.py,sha256=VgkU34Y3na5Ri0QwW2Hh-QiE4j9DgVwMB49DL3JgAcQ,2710
60
+ hdl/jupyfuncs/chem/scaffold.py,sha256=hWhmsrtdjM_ihStFvTz7XIs27coOMDOvdGh5vAk-UsA,1025
61
+ hdl/jupyfuncs/chem/shape.py,sha256=FQmLxEkUOaWqlYi1CPm-8Ze5w_oOrmoeAzOx4UdYRtI,5807
62
+ hdl/jupyfuncs/chem/tokenizers.py,sha256=UtpgQdDCL3FF1gSYcMLWZZcwcJQSp2RXdG0fRYZDGmU,133
63
+ hdl/jupyfuncs/dbtools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
+ hdl/jupyfuncs/dbtools/pg.py,sha256=B4t1WAfGCmJsaQIm59M5xYdRPfGP-vrNS2SA8rMzlbM,943
65
+ hdl/jupyfuncs/dbtools/query_info.py,sha256=CorUpyopY-FO3mRXlCIjxD09_6nXaAMJR0NHvfqjcIw,4327
66
+ hdl/jupyfuncs/dl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
+ hdl/jupyfuncs/dl/cp.py,sha256=NHBkWBQ0eP6qEUXMwsVv5bI0p24CEsqFhwSJtKmmzGo,1816
68
+ hdl/jupyfuncs/dl/dataframe.py,sha256=Cqlkcv20QplZ4tGQdW9k-3CaoTaaqoyaWQ0mr96n3V0,1023
69
+ hdl/jupyfuncs/dl/fp.py,sha256=VYiW6-Q8lY2fDK9DU5_r6EmAyY5-VVv680-57h_MKPA,1025
70
+ hdl/jupyfuncs/dl/list.py,sha256=WcVoDoJU7LYAG0bp_3zcZF2pW9NfQ4LIIXvGGu32vzs,756
71
+ hdl/jupyfuncs/dl/model_utils.py,sha256=Xnt-a5MjE5NWghIu9Yss8zQ9JYhrv_Rbo5OPNC0MX80,2934
72
+ hdl/jupyfuncs/dl/tensor.py,sha256=vOUyi8Ymc_JKiqywLJUph5jiKPHoq2cUo73YJnf0voQ,4495
73
+ hdl/jupyfuncs/dl/uncs.py,sha256=j4YQE4BrBmZqOk9J_C9UKxbzrvt5pLPUZCCSIdi2FBk,2780
74
+ hdl/jupyfuncs/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
+ hdl/jupyfuncs/llm/extract.py,sha256=UjPhrgbuU7_2u3620lcjXQPHF2l22o_AEmAaH8UXIZA,4531
76
+ hdl/jupyfuncs/llm/openapi.py,sha256=pNBW0Jzt0JAZP8ZexgoQZVF118jrhKnBYheClcMu9bU,2687
77
+ hdl/jupyfuncs/network/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
+ hdl/jupyfuncs/network/proxy.py,sha256=foZm3gGFTPLeMNRfWs4QKNUEmlhtNTr_1GQvn0IgDVw,545
79
+ hdl/jupyfuncs/path/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
+ hdl/jupyfuncs/path/glob.py,sha256=fl0YDLDS9QI2WgDlBzDGlALbvkmPGcXp4UnbbQ17BOM,8300
81
+ hdl/jupyfuncs/path/strings.py,sha256=eZCXElh7pT0xwy6ZBqSu3frq3Xx8CN5TMuQzsxb0Sbw,2009
82
+ hdl/jupyfuncs/show/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
+ hdl/jupyfuncs/show/pbar.py,sha256=QzyHV9XEyk9U5oyonxLSwYb5pD09iVUw_atVlzxYBNQ,1005
84
+ hdl/jupyfuncs/show/plot.py,sha256=kH4UwTiRtjhTJJlR4gj6-mDgQxIH9PQCHVg1_54nIR8,6717
85
+ hdl/jupyfuncs/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
86
+ hdl/jupyfuncs/utils/wrappers.py,sha256=N8rwRFnHX9j2sR_-l-y-B2nmk39NFuu4YTntOvF3pMo,266
48
87
  hdl/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
88
  hdl/layers/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
89
  hdl/layers/general/gp.py,sha256=no1P6i2nCa539b0I5S6hd2mC8CeW0Ds726GM0swwlzc,525
@@ -84,12 +123,12 @@ hdl/utils/database_tools/connect.py,sha256=KUnVG-8raifEJ_N0b3c8LkTTIfn9NIyw8LX6q
84
123
  hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
85
124
  hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
86
125
  hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
87
- hdl/utils/llm/chat.py,sha256=FtpQjp87FIBSIOQBPYKjtscA13RpMRwLk7sKlMAnOQ0,14974
126
+ hdl/utils/llm/chat.py,sha256=Xypu7KlnTeI9oep02tuw04GooAfysZEgY2T_P29xZuU,15086
88
127
  hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
89
128
  hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
90
129
  hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
91
130
  hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
92
- hjxdl-0.1.12.dist-info/METADATA,sha256=oM9y6W0hDN6TupHYR2OECwnI1sI7BLlpWqkbOV9slK0,543
93
- hjxdl-0.1.12.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
94
- hjxdl-0.1.12.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
95
- hjxdl-0.1.12.dist-info/RECORD,,
131
+ hjxdl-0.1.14.dist-info/METADATA,sha256=zSqAbKEZutZyv5WQguUpas0Jm_nTHdBsdUyBpT6o8lQ,543
132
+ hjxdl-0.1.14.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
133
+ hjxdl-0.1.14.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
134
+ hjxdl-0.1.14.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (72.1.0)
2
+ Generator: setuptools (73.0.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5