hjxdl 0.1.25__py3-none-any.whl → 0.1.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/database_tools/datetime.py +34 -0
- hdl/utils/desc/func_desc.py +19 -2
- hdl/utils/desc/template.py +2 -2
- hdl/utils/llm/chat.py +0 -113
- {hjxdl-0.1.25.dist-info → hjxdl-0.1.27.dist-info}/METADATA +4 -1
- {hjxdl-0.1.25.dist-info → hjxdl-0.1.27.dist-info}/RECORD +9 -8
- {hjxdl-0.1.25.dist-info → hjxdl-0.1.27.dist-info}/WHEEL +0 -0
- {hjxdl-0.1.25.dist-info → hjxdl-0.1.27.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
@@ -0,0 +1,34 @@
|
|
1
|
+
from geopy.geocoders import Nominatim
|
2
|
+
from timezonefinder import TimezoneFinder
|
3
|
+
from datetime import datetime
|
4
|
+
import pytz
|
5
|
+
|
6
|
+
def get_datetime_by_cityname(city):
|
7
|
+
"""Get current date and time of a city based on its name.
|
8
|
+
|
9
|
+
Args:
|
10
|
+
city_name (str): The name of the city to get date and time for.
|
11
|
+
|
12
|
+
Returns:
|
13
|
+
str: Formatted date and time string in the format '%Y年%m月%d日 %H:%M:%S'.
|
14
|
+
"""
|
15
|
+
# 使用 Nominatim API 通过城市名称获取地理坐标
|
16
|
+
geolocator = Nominatim(user_agent="city_time_locator")
|
17
|
+
location = geolocator.geocode(city)
|
18
|
+
|
19
|
+
if not location:
|
20
|
+
return f"无法找到城市 '{city}',请检查输入的城市名称。"
|
21
|
+
|
22
|
+
# 使用地理坐标获取时区
|
23
|
+
tf = TimezoneFinder()
|
24
|
+
timezone_name = tf.timezone_at(lng=location.longitude, lat=location.latitude)
|
25
|
+
|
26
|
+
if not timezone_name:
|
27
|
+
return f"无法找到城市 '{city}' 的时区。"
|
28
|
+
|
29
|
+
# 获取当前时间
|
30
|
+
timezone = pytz.timezone(timezone_name)
|
31
|
+
city_time = datetime.now(timezone)
|
32
|
+
|
33
|
+
# 返回格式化的日期时间字符串
|
34
|
+
return city_time.strftime('%Y年%m月%d日 %H:%M:%S')
|
hdl/utils/desc/func_desc.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
FN_DESC = {
|
2
2
|
"get_weather": """
|
3
3
|
## 函数名:get_weather
|
4
|
-
|
4
|
+
描述:在用户询问一个城市的天气时,调用此工具获得此城市的天气信息
|
5
5
|
参数:
|
6
6
|
# city (str): 城市名
|
7
|
-
|
7
|
+
返回值 (str):天气信息
|
8
8
|
需要返回的json
|
9
9
|
{
|
10
10
|
"function_name": "get_weather",
|
@@ -13,6 +13,23 @@ FN_DESC = {
|
|
13
13
|
"city": <city_name>
|
14
14
|
}
|
15
15
|
}
|
16
|
+
|
17
|
+
""",
|
18
|
+
"get_datetime_by_cityname": """
|
19
|
+
## 函数名:get_datetime_by_cityname
|
20
|
+
描述:在用户询问一个城市当前的日期或时间时,调用此工具可以获得此城市当前的日期和时间
|
21
|
+
参数:
|
22
|
+
# city (str): 城市名
|
23
|
+
返回值 (str):这个城市当前所在时区的日期和时间
|
24
|
+
需要返回的json
|
25
|
+
{
|
26
|
+
"function_name": "get_datetime_by_cityname",
|
27
|
+
"params":
|
28
|
+
{
|
29
|
+
"city": <city_name>
|
30
|
+
}
|
31
|
+
}
|
32
|
+
|
16
33
|
""",
|
17
34
|
"default": None
|
18
35
|
}
|
hdl/utils/desc/template.py
CHANGED
@@ -12,7 +12,7 @@ FN_TEMPLATE = """
|
|
12
12
|
}
|
13
13
|
|
14
14
|
### 函数:
|
15
|
-
以下是可用于与系统交互的函数列表,每个函数以 “##”
|
16
|
-
|
15
|
+
以下是可用于与系统交互的函数列表,每个函数以 “##” 作为标记开始,每个参数名会以 “#” 作为标记。
|
16
|
+
每个函数都有特定的参数和要求说明,确保仔细遵循每个功能的说明。根据最后用户的问题判断要执行的任务选择合适的一个函数。以说明中的JSON格式提供函数调用所需要的参数,其中参数的具体值从用户的提问中获取,并且不能带“<>”符号:
|
17
17
|
|
18
18
|
"""
|
hdl/utils/llm/chat.py
CHANGED
@@ -205,119 +205,6 @@ class GGUF_M(Llama):
|
|
205
205
|
# self.resps[-1] = "".join(self.resps[-1])
|
206
206
|
|
207
207
|
|
208
|
-
# class GGUF_M():
|
209
|
-
# def __init__(
|
210
|
-
# self,
|
211
|
-
# model_path :str,
|
212
|
-
# device: str='gpu',
|
213
|
-
# generation_kwargs: dict = {},
|
214
|
-
# server_ip: str = "127.0.0.1",
|
215
|
-
# server_port: int = 8000,
|
216
|
-
# ):
|
217
|
-
# """Initialize the model with the provided model path and optional parameters.
|
218
|
-
|
219
|
-
# Args:
|
220
|
-
# model_path (str): The path to the model.
|
221
|
-
# device (str, optional): The device to use for model initialization. Defaults to 'gpu'.
|
222
|
-
# generation_kwargs (dict, optional): Additional keyword arguments for model generation. Defaults to {}.
|
223
|
-
# server_ip (str, optional): The IP address of the server. Defaults to "127.0.0.1".
|
224
|
-
# server_port (int, optional): The port of the server. Defaults to 8000.
|
225
|
-
# """
|
226
|
-
# # 从本地初始化模型
|
227
|
-
# # super().__init__()
|
228
|
-
# self.generation_kwargs = generation_kwargs
|
229
|
-
# print("正在从本地加载模型...")
|
230
|
-
# if device == 'cpu':
|
231
|
-
# self.model = Llama(
|
232
|
-
# model_path=model_path,
|
233
|
-
# n_threads=self.generation_kwargs['num_threads'],
|
234
|
-
# n_ctx=self.generation_kwargs['max_context_length'],
|
235
|
-
# )
|
236
|
-
# else:
|
237
|
-
# self.model = Llama(
|
238
|
-
# model_path=model_path,
|
239
|
-
# n_threads=self.generation_kwargs['num_threads'],
|
240
|
-
# n_ctx=self.generation_kwargs['max_context_length'],
|
241
|
-
# n_gpu_layers=-1,
|
242
|
-
# flash_attn=True
|
243
|
-
# )
|
244
|
-
|
245
|
-
# print("完成本地模型的加载")
|
246
|
-
|
247
|
-
# def invoke(
|
248
|
-
# self,
|
249
|
-
# prompt : str,
|
250
|
-
# stop: list[str] | None = ["USER:", "ASSISTANT:"],
|
251
|
-
# # history: list = [],
|
252
|
-
# **kwargs: t.Any,
|
253
|
-
# ) -> str:
|
254
|
-
# """Invoke the model to generate a response based on the given prompt.
|
255
|
-
|
256
|
-
# Args:
|
257
|
-
# prompt (str): The prompt to be used for generating the response.
|
258
|
-
# stop (list[str], optional): List of strings that indicate when the model should stop generating the response. Defaults to ["USER:", "ASSISTANT:"].
|
259
|
-
# **kwargs: Additional keyword arguments to be passed to the model.
|
260
|
-
|
261
|
-
# Returns:
|
262
|
-
# str: The generated response based on the prompt.
|
263
|
-
# """
|
264
|
-
# prompt_final = f"USER:\n{prompt}\nASSISTANT:\n"
|
265
|
-
|
266
|
-
# result = self.model.create_completion(
|
267
|
-
# prompt_final,
|
268
|
-
# repeat_penalty=self.generation_kwargs["repetition_penalty"],
|
269
|
-
# max_tokens=self.generation_kwargs["max_new_tokens"],
|
270
|
-
# stop=stop,
|
271
|
-
# echo=False,
|
272
|
-
# temperature=self.generation_kwargs["temperature"],
|
273
|
-
# mirostat_mode = 2,
|
274
|
-
# mirostat_tau=4.0,
|
275
|
-
# mirostat_eta=1.1
|
276
|
-
# )
|
277
|
-
# resp = result['choices'][0]['text']
|
278
|
-
# # history.append(
|
279
|
-
# # [prompt, resp]
|
280
|
-
# # )
|
281
|
-
# return resp
|
282
|
-
|
283
|
-
# def stream(
|
284
|
-
# self,
|
285
|
-
# prompt: str,
|
286
|
-
# stop: list[str] | None = ["USER:", "ASSISTANT:"],
|
287
|
-
# # history: list = [],
|
288
|
-
# **kwargs: t.Any,
|
289
|
-
# ):
|
290
|
-
# """Generate text responses based on the given prompt using the model.
|
291
|
-
|
292
|
-
# Args:
|
293
|
-
# prompt (str): The prompt to generate text responses.
|
294
|
-
# stop (list[str], optional): List of strings to stop the generation. Defaults to ["USER:", "ASSISTANT:"].
|
295
|
-
# **kwargs: Additional keyword arguments for the model.
|
296
|
-
|
297
|
-
# Yields:
|
298
|
-
# str: Text responses generated by the model based on the prompt.
|
299
|
-
# """
|
300
|
-
# prompt = f"USER:\n{prompt}\nASSISTANT:\n"
|
301
|
-
# output = self.model.create_completion(
|
302
|
-
# prompt,
|
303
|
-
# stream=True,
|
304
|
-
# repeat_penalty=self.generation_kwargs["repetition_penalty"],
|
305
|
-
# max_tokens=self.generation_kwargs["max_new_tokens"],
|
306
|
-
# stop=stop,
|
307
|
-
# echo=False,
|
308
|
-
# temperature=self.generation_kwargs["temperature"],
|
309
|
-
# mirostat_mode = 2,
|
310
|
-
# mirostat_tau=4.0,
|
311
|
-
# mirostat_eta=1.1
|
312
|
-
# )
|
313
|
-
# # history.append([])
|
314
|
-
# for chunk in output:
|
315
|
-
# item = chunk['choices'][0]['text']
|
316
|
-
# # self.resps[-1].append(item)
|
317
|
-
# yield chunk['choices'][0]['text']
|
318
|
-
# # self.resps[-1] = "".join(self.resps[-1])
|
319
|
-
|
320
|
-
|
321
208
|
class OpenAI_M():
|
322
209
|
def __init__(
|
323
210
|
self,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: hjxdl
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.27
|
4
4
|
Summary: A collection of functions for Jupyter notebooks
|
5
5
|
Home-page: https://github.com/huluxiaohuowa/hdl
|
6
6
|
Author: Jianxing Hu
|
@@ -14,6 +14,9 @@ Requires-Dist: beautifulsoup4
|
|
14
14
|
Requires-Dist: openai
|
15
15
|
Requires-Dist: tqdm
|
16
16
|
Requires-Dist: sentence-transformers
|
17
|
+
Requires-Dist: geopy
|
18
|
+
Requires-Dist: timezonefinder
|
19
|
+
Requires-Dist: pytz
|
17
20
|
|
18
21
|
# DL framework by Jianxing
|
19
22
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=tHeS3HtGOzmDSunNF55dcMNc08kUUPbj6IDY1JKJNiY,413
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -119,20 +119,21 @@ hdl/utils/chemical_tools/query_info.py,sha256=wyQXwKSY_gBGVUNvYggHpYBtOLAtpYKq3P
|
|
119
119
|
hdl/utils/chemical_tools/sdf.py,sha256=71PEqU0H885L6IeGHEa6n7ZLZThvMsZOVLuFG2wnoyM,542
|
120
120
|
hdl/utils/database_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
121
121
|
hdl/utils/database_tools/connect.py,sha256=KUnVG-8raifEJ_N0b3c8LkTTIfn9NIyw8LX6qvpA3YU,723
|
122
|
+
hdl/utils/database_tools/datetime.py,sha256=DI72G_FZ_iOs4QLSakIwU_K5IviPvn4DR2abUG1rBEA,1124
|
122
123
|
hdl/utils/desc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
123
|
-
hdl/utils/desc/func_desc.py,sha256=
|
124
|
-
hdl/utils/desc/template.py,sha256=
|
124
|
+
hdl/utils/desc/func_desc.py,sha256=Bzx0faxgaiNBpzJenPSi98KNGfNTELOHYBQlT-45g0c,824
|
125
|
+
hdl/utils/desc/template.py,sha256=9nvsBa8uPsDLrp78ulK_romT30hpGr19cnaoSabBst8,1121
|
125
126
|
hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
126
127
|
hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
|
127
128
|
hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
128
|
-
hdl/utils/llm/chat.py,sha256=
|
129
|
+
hdl/utils/llm/chat.py,sha256=LH595PmR0MfFPD5XKff93FOxdL2aQQSt6VjR9nyksqU,13414
|
129
130
|
hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
|
130
131
|
hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
|
131
132
|
hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
132
133
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
133
134
|
hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
134
135
|
hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
|
135
|
-
hjxdl-0.1.
|
136
|
-
hjxdl-0.1.
|
137
|
-
hjxdl-0.1.
|
138
|
-
hjxdl-0.1.
|
136
|
+
hjxdl-0.1.27.dist-info/METADATA,sha256=QgqnqWz5dj_T1FP9diDT6vC3JDhgjR8-D3X5DBtbuPc,698
|
137
|
+
hjxdl-0.1.27.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
|
138
|
+
hjxdl-0.1.27.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
139
|
+
hjxdl-0.1.27.dist-info/RECORD,,
|
File without changes
|
File without changes
|