hjxdl 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. hdl/_version.py +2 -2
  2. hdl/datasets/city_code.json +2576 -0
  3. hdl/datasets/defined_BaseFeatures.fdef +236 -0
  4. hdl/datasets/las.tsv +0 -0
  5. hdl/datasets/route_template.json +113 -0
  6. hdl/datasets/vocab.txt +591 -0
  7. hdl/ju/__init__.py +0 -0
  8. hdl/ju/setup.py +55 -0
  9. hdl/jupyfuncs/__init__.py +0 -0
  10. hdl/jupyfuncs/chem/__init__.py +0 -0
  11. hdl/jupyfuncs/chem/mol.py +548 -0
  12. hdl/jupyfuncs/chem/norm.py +268 -0
  13. hdl/jupyfuncs/chem/pdb_ext.py +94 -0
  14. hdl/jupyfuncs/chem/scaffold.py +25 -0
  15. hdl/jupyfuncs/chem/shape.py +241 -0
  16. hdl/jupyfuncs/chem/tokenizers.py +2 -0
  17. hdl/jupyfuncs/dbtools/__init__.py +0 -0
  18. hdl/jupyfuncs/dbtools/pg.py +42 -0
  19. hdl/jupyfuncs/dbtools/query_info.py +150 -0
  20. hdl/jupyfuncs/dl/__init__.py +0 -0
  21. hdl/jupyfuncs/dl/cp.py +54 -0
  22. hdl/jupyfuncs/dl/dataframe.py +38 -0
  23. hdl/jupyfuncs/dl/fp.py +49 -0
  24. hdl/jupyfuncs/dl/list.py +20 -0
  25. hdl/jupyfuncs/dl/model_utils.py +97 -0
  26. hdl/jupyfuncs/dl/tensor.py +159 -0
  27. hdl/jupyfuncs/dl/uncs.py +112 -0
  28. hdl/jupyfuncs/llm/__init__.py +0 -0
  29. hdl/jupyfuncs/llm/extract.py +123 -0
  30. hdl/jupyfuncs/llm/openapi.py +94 -0
  31. hdl/jupyfuncs/network/__init__.py +0 -0
  32. hdl/jupyfuncs/network/proxy.py +20 -0
  33. hdl/jupyfuncs/path/__init__.py +0 -0
  34. hdl/jupyfuncs/path/glob.py +285 -0
  35. hdl/jupyfuncs/path/strings.py +65 -0
  36. hdl/jupyfuncs/show/__init__.py +0 -0
  37. hdl/jupyfuncs/show/pbar.py +50 -0
  38. hdl/jupyfuncs/show/plot.py +259 -0
  39. hdl/jupyfuncs/utils/__init__.py +0 -0
  40. hdl/jupyfuncs/utils/wrappers.py +8 -0
  41. hdl/utils/weather/__init__.py +0 -0
  42. hdl/utils/weather/weather.py +68 -0
  43. {hjxdl-0.1.13.dist-info → hjxdl-0.1.15.dist-info}/METADATA +1 -1
  44. {hjxdl-0.1.13.dist-info → hjxdl-0.1.15.dist-info}/RECORD +46 -5
  45. {hjxdl-0.1.13.dist-info → hjxdl-0.1.15.dist-info}/WHEEL +1 -1
  46. {hjxdl-0.1.13.dist-info → hjxdl-0.1.15.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,259 @@
1
+ from os import path as osp
2
+ import typing as t
3
+ from typing_extensions import Literal
4
+
5
+ import seaborn as sn
6
+ import sklearn
7
+ import matplotlib.pyplot as plt
8
+ import matplotlib
9
+ import numpy as np
10
+ import pandas as pd
11
+
12
+ from ..path.glob import get_num_lines
13
+ from ..path.strings import splitted_strs_from_line
14
+
15
+ cm = matplotlib.cm.get_cmap('tab20')
16
+ colors = cm.colors
17
+ LABEL = Literal[
18
+ 'training_size',
19
+ 'episode_id',
20
+ ]
21
+
22
+
23
+ def accuracies_heat(y_true, y_pred, num_tasks):
24
+ assert len(y_true) == len(y_pred)
25
+ cm = sklearn.metrics.confusion_matrix(
26
+ y_true, y_pred, normalize='true'
27
+ )
28
+ df_cm = pd.DataFrame(
29
+ cm, range(num_tasks), range(num_tasks)
30
+ )
31
+ plt.figure(figsize=(10, 10))
32
+ sn.set(font_scale=1.4)
33
+ sn.heatmap(df_cm, annot=True, annot_kws={"size": 16}, fmt='.2f')
34
+
35
+
36
+ def get_metrics_curves(
37
+ base_dir,
38
+ ckpts,
39
+ num_points,
40
+ title="Metric Curve",
41
+ metric='accuracy',
42
+ log_file='metrics.log',
43
+ label: LABEL = 'training_size',
44
+ save_dir: str = None,
45
+ figsize=(10, 6)
46
+ ):
47
+ if not save_dir:
48
+ save_dir = osp.join(base_dir, 'metrics_curves.png')
49
+ data_dict = {}
50
+ for ckpt in ckpts:
51
+ log = osp.join(
52
+ base_dir,
53
+ ckpt,
54
+ log_file
55
+ )
56
+ if not osp.exists(log):
57
+ print(f"WARNING: no log file for {ckpt}")
58
+ continue
59
+ data_dict[ckpt] = []
60
+ data_idx = 0
61
+ for line_id in range(get_num_lines(log)):
62
+ line = splitted_strs_from_line(log, line_id)
63
+ if len(line) == 3 and line[1].strip() == metric:
64
+ if label == 'episode_id':
65
+ x = data_idx
66
+ elif label == 'training_size':
67
+ x = int(line[0].strip())
68
+ data_dict[ckpt].append(
69
+ [
70
+ x,
71
+ float(line[2].strip())
72
+ ]
73
+ )
74
+ data_idx += 1
75
+ if line_id >= num_points - 1:
76
+ break
77
+ plt.figure(figsize=figsize, dpi=100)
78
+ # plt.style.use('ggplot')
79
+ plt.title(title)
80
+ for i, (ckpt, points) in enumerate(data_dict.items()):
81
+ points_array = np.array(points).T
82
+ plt.plot(points_array[0], points_array[1], label=ckpt, color=colors[i])
83
+ lg = plt.legend(bbox_to_anchor=(1.2, 1.0), loc='upper right')
84
+ # plt.legend(loc='lower right')
85
+ plt.xlabel(
86
+ label
87
+ )
88
+ plt.ylabel(metric)
89
+ plt.grid(True)
90
+ plt.savefig(
91
+ save_dir,
92
+ format='png',
93
+ bbox_extra_artists=(lg,),
94
+ bbox_inches='tight'
95
+ )
96
+ plt.show()
97
+
98
+
99
+ def get_means_vars(
100
+ log_file: str,
101
+ indices: t.List,
102
+ mode: str,
103
+ nears_each: int,
104
+ ) -> t.List[t.List[int]]:
105
+
106
+ mean_s, var_s = [], []
107
+ num_points = len(indices)
108
+
109
+ nears_lists = []
110
+ if mode == 'id':
111
+
112
+ for index in indices:
113
+ nears = []
114
+ nears.extend(list(range(
115
+ index - nears_each, index + 1 + nears_each
116
+ )))
117
+ nears_lists.append(nears)
118
+
119
+ for nears in nears_lists:
120
+ mean_s.append(np.mean([
121
+ float(splitted_strs_from_line(log_file, line_id)[2])
122
+ for line_id in nears
123
+ ]))
124
+ var_s.append((np.std([
125
+ float(splitted_strs_from_line(log_file, line_id)[2])
126
+ for line_id in nears
127
+ ])))
128
+
129
+ elif mode == 'value':
130
+ datas = [
131
+ splitted_strs_from_line(log_file, line_id)
132
+ for line_id in range(get_num_lines(log_file))
133
+ ]
134
+ training_sizes = [[int(data[0]) for data in datas]]
135
+ values = np.array([float(data[2]) for data in datas])
136
+
137
+ training_sizes = np.repeat(training_sizes, num_points, 0).T
138
+ diffs = training_sizes - indices
139
+
140
+ true_indices = np.argmin(np.abs(diffs), 0)
141
+
142
+ true_indices_list = [
143
+ list(range(
144
+ index - nears_each, index + 1 + nears_each
145
+ ))
146
+ for index in true_indices
147
+ ]
148
+ mean_s = [
149
+ np.mean(values[indices])
150
+ for indices in true_indices_list
151
+ ]
152
+ var_s = [
153
+ np.std(values[indices])
154
+ for indices in true_indices_list
155
+ ]
156
+ var_s = np.array(var_s) / np.sqrt(num_points)
157
+
158
+ return mean_s, var_s
159
+
160
+
161
+ def get_metrics_bars(
162
+ base_dir,
163
+ ckpts,
164
+ title="Metric Bars",
165
+ training_sizes: t.List = [],
166
+ episide_ids: t.List = [],
167
+ nears_each: int = 5,
168
+ pretrained_num: int = 0,
169
+ x_diff: bool = False,
170
+ metric='accuracy',
171
+ log_file='metrics.log',
172
+ label: LABEL = 'training_size',
173
+ save_dir: str = None,
174
+ figsize=(10, 6),
175
+ bar_ratio=0.8,
176
+ minimum=0.0,
177
+ maximum=1.0
178
+ ):
179
+
180
+ if not save_dir:
181
+ save_dir = osp.join(base_dir, 'metrics_bars.png')
182
+
183
+ x_labels, num_points = [], 0
184
+ if label == 'training_size':
185
+ num_points = len(training_sizes)
186
+ x_labels = training_sizes
187
+ mode = 'value'
188
+ elif label == 'episode_id':
189
+ num_points = len(episide_ids)
190
+ x_labels = episide_ids
191
+ mode = 'id'
192
+
193
+ x = np.arange(num_points)
194
+ num_strategies = len(ckpts)
195
+ total_width = bar_ratio
196
+ width = total_width / num_strategies
197
+ x = x - (total_width - width) / 2
198
+
199
+ if not save_dir:
200
+ save_dir = osp.join(base_dir, 'metrics.png')
201
+
202
+ data_dict = {}
203
+ for ckpt in ckpts:
204
+
205
+ log = osp.join(
206
+ base_dir,
207
+ ckpt,
208
+ log_file
209
+ )
210
+ if not osp.exists(log):
211
+ print(f"WARNING: no log file for {ckpt}")
212
+ continue
213
+
214
+ # print(x_labels)
215
+ mean_s, var_s = get_means_vars(
216
+ log_file=log,
217
+ indices=x_labels,
218
+ mode=mode,
219
+ nears_each=nears_each
220
+ )
221
+ data_dict[ckpt] = (mean_s, var_s)
222
+
223
+ if x_diff:
224
+ x_labels = np.array(x_labels, dtype=np.int) - pretrained_num
225
+
226
+ plt.figure(figsize=figsize, dpi=100)
227
+ plt.title(title)
228
+ ax = plt.gca()
229
+ ax.set_ylim([minimum, maximum])
230
+
231
+ for point_idx, (ckpt, datas) in enumerate(data_dict.items()):
232
+ mean_s, var_s = datas
233
+ plt.bar(
234
+ x + width * point_idx,
235
+ mean_s,
236
+ width=width,
237
+ yerr=var_s,
238
+ tick_label=x_labels,
239
+ label=ckpt,
240
+ color=colors[point_idx]
241
+ )
242
+
243
+ lg = plt.legend(bbox_to_anchor=(1.2, 1.0), loc='upper right')
244
+ # plt.legend(loc='lower right')
245
+ plt.xlabel(
246
+ label
247
+ )
248
+ plt.ylabel(metric)
249
+ plt.grid(True)
250
+
251
+ plt.savefig(
252
+ save_dir,
253
+ format='png',
254
+ bbox_extra_artists=(lg,),
255
+ bbox_inches='tight'
256
+ )
257
+ plt.show()
258
+
259
+
File without changes
@@ -0,0 +1,8 @@
1
+ class GeneratorWrapper:
2
+ def __init__(self, generator_func, *args, **kwargs):
3
+ self.generator_func = generator_func
4
+ self.args = args
5
+ self.kwargs = kwargs
6
+
7
+ def __iter__(self):
8
+ return self.generator_func(*self.args, **self.kwargs)
File without changes
@@ -0,0 +1,68 @@
1
+ import requests
2
+ from pathlib import Path
3
+ import os
4
+ import json
5
+ from bs4 import BeautifulSoup
6
+
7
+
8
+ def get_city_codes():
9
+ # with open('../../city.json', 'r', encoding='utf-8') as f:
10
+ # code_dic = eval(f.read())
11
+ # return code_dic
12
+ code_file = Path(__file__).resolve().parent.parent.parent \
13
+ / "datasets" \
14
+ / "city_code.json"
15
+ with code_file.open() as f:
16
+ code = json.load(f)
17
+ return code
18
+
19
+
20
+ def get_html(code):
21
+ weather_url = f'http://www.weather.com.cn/weather/{code}.shtml'
22
+ header = {
23
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36"}
24
+ print(weather_url)
25
+ resp = requests.get(url=weather_url, headers=header)
26
+ resp.encoding = 'utf-8'
27
+ return resp.text
28
+
29
+
30
+ def get_page_data(html):
31
+ soup = BeautifulSoup(html, 'html.parser')
32
+ weather_info = soup.find('div', id='7d')
33
+ seven_weather = weather_info.find('ul')
34
+ weather_list = seven_weather.find_all('li')
35
+ for weather in weather_list:
36
+ print('=' * 60)
37
+ print(weather.find('h1').get_text())
38
+ print('天气状况:', weather.find('p', class_='wea').get_text())
39
+ # 判断标签'p','tem'下是否有标签'span',以此判断是否有最高温
40
+ if weather.find('p', class_='tem').find('span'):
41
+ temp_high = weather.find('p', class_='tem').find('span').get_text()
42
+ else:
43
+ temp_high = '' # 最高温
44
+ temp_low = weather.find('p', class_='tem').find('i').get_text() # 最低温
45
+ print(f'天气温度:{temp_low}/{temp_high}')
46
+ win_list_tag = weather.find('p', class_='win').find('em').find_all('span')
47
+ win_list = []
48
+ for win in win_list_tag:
49
+ win_list.append(win.get('title'))
50
+ print('风向:', '-'.join(win_list))
51
+ print('风力:', weather.find('p', class_='win').find('i').get_text())
52
+
53
+
54
+ def main():
55
+ code_dic = get_city_codes()
56
+ print('=' * 60)
57
+ print('\t' * 5, '天气预报查询系统')
58
+ print('=' * 60)
59
+ city = input("请输入您要查询的城市:")
60
+ if city in code_dic:
61
+ html = get_html(code_dic[city]['AREAID'])
62
+ get_page_data(html)
63
+ else:
64
+ print('你要查询的地方不存在')
65
+
66
+
67
+ if __name__ == '__main__':
68
+ main()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hjxdl
3
- Version: 0.1.13
3
+ Version: 0.1.15
4
4
  Summary: A collection of functions for Jupyter notebooks
5
5
  Home-page: https://github.com/huluxiaohuowa/hdl
6
6
  Author: Jianxing Hu
@@ -1,5 +1,5 @@
1
1
  hdl/__init__.py,sha256=5sZZNySv08wwfzJcSDssGTqUn9wlmDsR6R4XB8J8mFM,70
2
- hdl/_version.py,sha256=S22EPqqZRb53L2H7sobVA3TUXv9skvkYd-YtLuHuV6M,413
2
+ hdl/_version.py,sha256=ZKlmJ822TJ49YEqc2wCAMbrp81vFvzcFa9OTia84voM,413
3
3
  hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
5
5
  hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -38,6 +38,11 @@ hdl/data/dataset/samplers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
38
38
  hdl/data/dataset/samplers/chiral.py,sha256=ZS83kg5e2gdHVGgIuCjCepDwk2SKqWDgJawH37oXy78,463
39
39
  hdl/data/dataset/seq/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  hdl/data/dataset/seq/rxn_dataset.py,sha256=jfXFlR3ITAf0KwUfIevzUZHnLBnFYrL69Cc81EMv0x0,1668
41
+ hdl/datasets/city_code.json,sha256=qnTL6ldpGnQanDXN3oitx12E6oiayaCHTh2Zi9RyQjM,60816
42
+ hdl/datasets/defined_BaseFeatures.fdef,sha256=5QhCEcu6fjSTXaTcZZ8-LSgf72_aJj_ykoDk82ZwVBI,7383
43
+ hdl/datasets/las.tsv,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ hdl/datasets/route_template.json,sha256=2qhkbtEZUrUod6PXCWXxAgQmU-jAC0yLcWGBBk2IwgE,3757
45
+ hdl/datasets/vocab.txt,sha256=cXdB1JDs2rY6C9BupRRY7w21xK4KN9SrxPxkN9CUXXQ,3524
41
46
  hdl/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
47
  hdl/features/fp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
48
  hdl/features/fp/features_generators.py,sha256=HbyS97i2I2mOcANdJMohs2okA1LlZmkG4ZIIX6Y9fr4,9017
@@ -45,6 +50,40 @@ hdl/features/graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
45
50
  hdl/features/graph/featurization.py,sha256=QLbj33JsgO-OWarIC2HXQP7eMu8pd-GWmppZQj_tQ_k,10902
46
51
  hdl/features/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
52
  hdl/features/utils/utils.py,sha256=aL4UAALblaw1w0AjK7MX8nSj9zwTmrp9CTLwJUX8ZtE,4225
53
+ hdl/ju/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ hdl/ju/setup.py,sha256=MB3rndQYt9QC-bdyGt81HYR0Rdr0l8DbAHktIuFMYU0,1725
55
+ hdl/jupyfuncs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
+ hdl/jupyfuncs/chem/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
+ hdl/jupyfuncs/chem/mol.py,sha256=FDb2L61JL6xkNK7bxXWVjAT-r-st7iMyQNoFezBvTUE,15294
58
+ hdl/jupyfuncs/chem/norm.py,sha256=NRjSP8P7f3Yhy1LmSpaV93qYgYis_qVG7HT0vOWeW1U,10186
59
+ hdl/jupyfuncs/chem/pdb_ext.py,sha256=VgkU34Y3na5Ri0QwW2Hh-QiE4j9DgVwMB49DL3JgAcQ,2710
60
+ hdl/jupyfuncs/chem/scaffold.py,sha256=hWhmsrtdjM_ihStFvTz7XIs27coOMDOvdGh5vAk-UsA,1025
61
+ hdl/jupyfuncs/chem/shape.py,sha256=FQmLxEkUOaWqlYi1CPm-8Ze5w_oOrmoeAzOx4UdYRtI,5807
62
+ hdl/jupyfuncs/chem/tokenizers.py,sha256=UtpgQdDCL3FF1gSYcMLWZZcwcJQSp2RXdG0fRYZDGmU,133
63
+ hdl/jupyfuncs/dbtools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
+ hdl/jupyfuncs/dbtools/pg.py,sha256=B4t1WAfGCmJsaQIm59M5xYdRPfGP-vrNS2SA8rMzlbM,943
65
+ hdl/jupyfuncs/dbtools/query_info.py,sha256=CorUpyopY-FO3mRXlCIjxD09_6nXaAMJR0NHvfqjcIw,4327
66
+ hdl/jupyfuncs/dl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
+ hdl/jupyfuncs/dl/cp.py,sha256=NHBkWBQ0eP6qEUXMwsVv5bI0p24CEsqFhwSJtKmmzGo,1816
68
+ hdl/jupyfuncs/dl/dataframe.py,sha256=Cqlkcv20QplZ4tGQdW9k-3CaoTaaqoyaWQ0mr96n3V0,1023
69
+ hdl/jupyfuncs/dl/fp.py,sha256=VYiW6-Q8lY2fDK9DU5_r6EmAyY5-VVv680-57h_MKPA,1025
70
+ hdl/jupyfuncs/dl/list.py,sha256=WcVoDoJU7LYAG0bp_3zcZF2pW9NfQ4LIIXvGGu32vzs,756
71
+ hdl/jupyfuncs/dl/model_utils.py,sha256=Xnt-a5MjE5NWghIu9Yss8zQ9JYhrv_Rbo5OPNC0MX80,2934
72
+ hdl/jupyfuncs/dl/tensor.py,sha256=vOUyi8Ymc_JKiqywLJUph5jiKPHoq2cUo73YJnf0voQ,4495
73
+ hdl/jupyfuncs/dl/uncs.py,sha256=j4YQE4BrBmZqOk9J_C9UKxbzrvt5pLPUZCCSIdi2FBk,2780
74
+ hdl/jupyfuncs/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
+ hdl/jupyfuncs/llm/extract.py,sha256=UjPhrgbuU7_2u3620lcjXQPHF2l22o_AEmAaH8UXIZA,4531
76
+ hdl/jupyfuncs/llm/openapi.py,sha256=pNBW0Jzt0JAZP8ZexgoQZVF118jrhKnBYheClcMu9bU,2687
77
+ hdl/jupyfuncs/network/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
+ hdl/jupyfuncs/network/proxy.py,sha256=foZm3gGFTPLeMNRfWs4QKNUEmlhtNTr_1GQvn0IgDVw,545
79
+ hdl/jupyfuncs/path/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
+ hdl/jupyfuncs/path/glob.py,sha256=fl0YDLDS9QI2WgDlBzDGlALbvkmPGcXp4UnbbQ17BOM,8300
81
+ hdl/jupyfuncs/path/strings.py,sha256=eZCXElh7pT0xwy6ZBqSu3frq3Xx8CN5TMuQzsxb0Sbw,2009
82
+ hdl/jupyfuncs/show/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
+ hdl/jupyfuncs/show/pbar.py,sha256=QzyHV9XEyk9U5oyonxLSwYb5pD09iVUw_atVlzxYBNQ,1005
84
+ hdl/jupyfuncs/show/plot.py,sha256=kH4UwTiRtjhTJJlR4gj6-mDgQxIH9PQCHVg1_54nIR8,6717
85
+ hdl/jupyfuncs/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
86
+ hdl/jupyfuncs/utils/wrappers.py,sha256=N8rwRFnHX9j2sR_-l-y-B2nmk39NFuu4YTntOvF3pMo,266
48
87
  hdl/layers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
88
  hdl/layers/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
89
  hdl/layers/general/gp.py,sha256=no1P6i2nCa539b0I5S6hd2mC8CeW0Ds726GM0swwlzc,525
@@ -89,7 +128,9 @@ hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
89
128
  hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
90
129
  hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
91
130
  hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
92
- hjxdl-0.1.13.dist-info/METADATA,sha256=UlAuoHbbPfrnFywQuD4azAWgCHUGNpvR8snZAAhdnz4,543
93
- hjxdl-0.1.13.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
94
- hjxdl-0.1.13.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
95
- hjxdl-0.1.13.dist-info/RECORD,,
131
+ hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
+ hdl/utils/weather/weather.py,sha256=H4f5wOojY28u_vtGkbPk_Nhe65R1Q9OZUPfVBY5AVAQ,2313
133
+ hjxdl-0.1.15.dist-info/METADATA,sha256=w7Iq4P0m1_G0ZDIzBCKGxm1dtHllYZklxQge8dM5SvE,543
134
+ hjxdl-0.1.15.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
135
+ hjxdl-0.1.15.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
136
+ hjxdl-0.1.15.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (72.1.0)
2
+ Generator: setuptools (73.0.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5