hjxdl 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/llm/chat.py +33 -3
- {hjxdl-0.0.4.dist-info → hjxdl-0.0.6.dist-info}/METADATA +2 -1
- {hjxdl-0.0.4.dist-info → hjxdl-0.0.6.dist-info}/RECORD +6 -6
- {hjxdl-0.0.4.dist-info → hjxdl-0.0.6.dist-info}/WHEEL +0 -0
- {hjxdl-0.0.4.dist-info → hjxdl-0.0.6.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
hdl/utils/llm/chat.py
CHANGED
@@ -16,14 +16,15 @@ class GGUF_M():
|
|
16
16
|
server_ip: str = "127.0.0.1",
|
17
17
|
server_port: int = 8000,
|
18
18
|
):
|
19
|
-
"""Initialize the model
|
20
|
-
Here it does not manage chat histories.
|
19
|
+
"""Initialize the model with the provided model path and optional parameters.
|
21
20
|
|
22
21
|
Args:
|
23
22
|
model_path (str): The path to the model.
|
24
23
|
device (str, optional): The device to use for model initialization. Defaults to 'gpu'.
|
25
24
|
generation_kwargs (dict, optional): Additional keyword arguments for model generation. Defaults to {}.
|
26
|
-
|
25
|
+
server_ip (str, optional): The IP address of the server. Defaults to "127.0.0.1".
|
26
|
+
server_port (int, optional): The port of the server. Defaults to 8000.
|
27
|
+
"""
|
27
28
|
# 从本地初始化模型
|
28
29
|
super().__init__()
|
29
30
|
self.generation_kwargs = generation_kwargs
|
@@ -130,6 +131,15 @@ class OpenAI_M():
|
|
130
131
|
server_ip: str = "172.28.1.2",
|
131
132
|
server_port: int = 8000,
|
132
133
|
):
|
134
|
+
"""Initialize the class with the specified parameters.
|
135
|
+
|
136
|
+
Args:
|
137
|
+
model_path (str, optional): Path to the model file. Defaults to None.
|
138
|
+
device (str, optional): Device to run the model on. Defaults to 'gpu'.
|
139
|
+
generation_kwargs (dict, optional): Additional keyword arguments for model generation. Defaults to {}.
|
140
|
+
server_ip (str, optional): IP address of the server. Defaults to "172.28.1.2".
|
141
|
+
server_port (int, optional): Port number of the server. Defaults to 8000.
|
142
|
+
"""
|
133
143
|
self.model_path = model_path
|
134
144
|
self.server_ip = server_ip
|
135
145
|
self.server_port = server_port
|
@@ -142,6 +152,16 @@ class OpenAI_M():
|
|
142
152
|
# history: list = [],
|
143
153
|
**kwargs: t.Any,
|
144
154
|
) -> str:
|
155
|
+
"""Invoke the chatbot with the given prompt and return the response.
|
156
|
+
|
157
|
+
Args:
|
158
|
+
prompt (str): The prompt to provide to the chatbot.
|
159
|
+
stop (list[str], optional): List of strings that indicate the end of the conversation. Defaults to ["USER:", "ASSISTANT:"].
|
160
|
+
**kwargs: Additional keyword arguments to pass to the chatbot.
|
161
|
+
|
162
|
+
Returns:
|
163
|
+
str: The response generated by the chatbot.
|
164
|
+
"""
|
145
165
|
resp = chat_oai_invoke(
|
146
166
|
base_url=self.base_url,
|
147
167
|
model=self.model_path,
|
@@ -156,6 +176,16 @@ class OpenAI_M():
|
|
156
176
|
# history: list = [],
|
157
177
|
**kwargs: t.Any,
|
158
178
|
) -> str:
|
179
|
+
"""Generate a response by streaming conversation with the OpenAI chat model.
|
180
|
+
|
181
|
+
Args:
|
182
|
+
prompt (str): The prompt to start the conversation.
|
183
|
+
stop (list[str], optional): List of strings that indicate when the conversation should stop. Defaults to ["USER:", "ASSISTANT:"].
|
184
|
+
**kwargs: Additional keyword arguments to pass to the chat model.
|
185
|
+
|
186
|
+
Returns:
|
187
|
+
str: The response generated by the chat model.
|
188
|
+
"""
|
159
189
|
resp = chat_oai_stream(
|
160
190
|
base_url=self.base_url,
|
161
191
|
model=self.model_path,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: hjxdl
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.6
|
4
4
|
Summary: A collection of functions for Jupyter notebooks
|
5
5
|
Home-page: https://github.com/huluxiaohuowa/hdl
|
6
6
|
Author: Jianxing Hu
|
@@ -10,6 +10,7 @@ Classifier: License :: OSI Approved :: MIT License
|
|
10
10
|
Classifier: Operating System :: OS Independent
|
11
11
|
Requires-Python: >=3.6
|
12
12
|
Description-Content-Type: text/markdown
|
13
|
+
Requires-Dist: jupyfuncs
|
13
14
|
|
14
15
|
# DL framework by Jianxing
|
15
16
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=3bp7HUNfj6gqhNMj_EsyToZqjpqggsv9ohT9Sghk2mA,23
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=c6ZQWSJeXXzGZ3WoZWjkA-MiNkBFXMIRV9kZPo4MQ_M,411
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -84,10 +84,10 @@ hdl/utils/database_tools/connect.py,sha256=KUnVG-8raifEJ_N0b3c8LkTTIfn9NIyw8LX6q
|
|
84
84
|
hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
85
85
|
hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
|
86
86
|
hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
87
|
-
hdl/utils/llm/chat.py,sha256=
|
87
|
+
hdl/utils/llm/chat.py,sha256=UFMv_0KQoGLCim0YGQqneA_HUKzQA9R6K69_rTeBmKM,7022
|
88
88
|
hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
89
89
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
90
|
-
hjxdl-0.0.
|
91
|
-
hjxdl-0.0.
|
92
|
-
hjxdl-0.0.
|
93
|
-
hjxdl-0.0.
|
90
|
+
hjxdl-0.0.6.dist-info/METADATA,sha256=X3_IpOR1UIG4a07CtXTAZjqJTF-uzO2dQw4Z-HvcrQA,550
|
91
|
+
hjxdl-0.0.6.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
|
92
|
+
hjxdl-0.0.6.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
93
|
+
hjxdl-0.0.6.dist-info/RECORD,,
|
File without changes
|
File without changes
|