hjxdl 0.1.85__py3-none-any.whl → 0.1.86__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hdl/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.1.85'
16
- __version_tuple__ = version_tuple = (0, 1, 85)
15
+ __version__ = version = '0.1.86'
16
+ __version_tuple__ = version_tuple = (0, 1, 86)
hdl/utils/llm/chat.py CHANGED
@@ -154,22 +154,19 @@ class OpenAI_M():
154
154
  stream: bool = True,
155
155
  **kwargs: t.Any,
156
156
  ):
157
- """Get response from chat completion model.
157
+ """Get response from chatbot based on the provided prompt and optional images.
158
158
 
159
159
  Args:
160
- prompt (str): The prompt text to generate a response for.
161
- images (list, optional): List of image URLs to include in the prompt. Defaults to [].
160
+ prompt (str): The prompt to provide to the chatbot.
161
+ images (list, optional): List of images to include in the response. Defaults to [].
162
162
  image_keys (tuple, optional): Tuple containing keys for image data. Defaults to ("image", "image").
163
- stop (list[str] | None, optional): List of strings to stop the conversation. Defaults to ["USER:", "ASSISTANT:"].
163
+ stop (list[str] | None, optional): List of strings that indicate the end of the conversation. Defaults to ["USER:", "ASSISTANT:"].
164
164
  model (str, optional): The model to use for generating the response. Defaults to "default_model".
165
- stream (bool, optional): Whether to stream the response or not. Defaults to True.
166
- **kwargs: Additional keyword arguments to pass to the chat completion API.
167
-
168
- Yields:
169
- str: The generated response content.
165
+ stream (bool, optional): Whether to stream the response. Defaults to True.
166
+ **kwargs: Additional keyword arguments to pass to the chatbot API.
170
167
 
171
168
  Returns:
172
- str: The generated response content if stream is False.
169
+ dict: The response from the chatbot.
173
170
  """
174
171
  content = [
175
172
  {"type": "text", "text": prompt},
@@ -196,15 +193,64 @@ class OpenAI_M():
196
193
  model=model,
197
194
  **kwargs
198
195
  )
199
- if not stream:
200
- return response.choices[0].message.content
201
- else:
202
- for chunk in response:
203
- content = chunk.choices[0].delta.content
204
- if content:
205
- yield content
196
+ return response
206
197
 
207
198
  def invoke(
199
+ self,
200
+ *args,
201
+ **kwargs
202
+ ):
203
+ """Invoke the function with the given arguments and keyword arguments.
204
+
205
+ Args:
206
+ *args: Variable length argument list.
207
+ **kwargs: Arbitrary keyword arguments.
208
+
209
+ Returns:
210
+ str: The content of the first choice message in the response.
211
+ """
212
+ response = self.get_resp(*args, stream=False, **kwargs)
213
+ return response.choices[0].message.content
214
+
215
+ def stream(
216
+ self,
217
+ *args,
218
+ **kwargs
219
+ ):
220
+ """Stream content from the response in chunks.
221
+
222
+ Args:
223
+ *args: Variable length argument list.
224
+ **kwargs: Arbitrary keyword arguments.
225
+
226
+ Yields:
227
+ str: Content in chunks from the response.
228
+ """
229
+ response = self.get_resp(*args, stream=True, **kwargs)
230
+ for chunk in response:
231
+ content = chunk.choices[0].delta.content
232
+ if content:
233
+ yield content
234
+
235
+
236
+ def chat(self, *args, stream=True, **kwargs):
237
+ """Call either the stream or invoke method based on the value of the stream parameter.
238
+
239
+ Args:
240
+ *args: Variable length argument list.
241
+ stream (bool): A flag to determine whether to call the stream method (default is True).
242
+ **kwargs: Arbitrary keyword arguments.
243
+
244
+ Returns:
245
+ The result of calling either the stream or invoke method based on the value of the stream parameter.
246
+ """
247
+ if stream:
248
+ return self.stream(*args, **kwargs)
249
+ else:
250
+ return self.invoke(*args, **kwargs)
251
+
252
+
253
+ def invoke_response(
208
254
  self,
209
255
  prompt : str,
210
256
  images: list = [],
@@ -259,7 +305,7 @@ class OpenAI_M():
259
305
  return response.choices[0].message.content
260
306
 
261
307
 
262
- def stream(
308
+ def stream_response(
263
309
  self,
264
310
  prompt : str,
265
311
  images: list = [],
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hjxdl
3
- Version: 0.1.85
3
+ Version: 0.1.86
4
4
  Summary: A collection of functions for Jupyter notebooks
5
5
  Home-page: https://github.com/huluxiaohuowa/hdl
6
6
  Author: Jianxing Hu
@@ -1,5 +1,5 @@
1
1
  hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
2
- hdl/_version.py,sha256=u6xmXDjp51Z7_S7NBNHDiPQDrERCBS10JMdkBL0D0x4,413
2
+ hdl/_version.py,sha256=mZDLtoh_fZR4n83Vo2yyLdXtACoTaSE2Hm8mc_lRCCM,413
3
3
  hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
5
5
  hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -127,7 +127,7 @@ hdl/utils/desc/template.py,sha256=a0UAkkKctt_EHY9UECsIIAwVkGPcM1Hr01HSkRMeIuw,12
127
127
  hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
128
128
  hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
129
129
  hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
130
- hdl/utils/llm/chat.py,sha256=apKakrwtApXbVp-3iILeJSZHWLlMGGHyvcTZD8NnMqs,14806
130
+ hdl/utils/llm/chat.py,sha256=hkFfPHjTwTJwXS3Dvy7j3nqARHsKoWsqJasfDdCjqgY,16211
131
131
  hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
132
132
  hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
133
133
  hdl/utils/llm/llama_chat.py,sha256=watcHGOaz-bv3x-yDucYlGk5f8FiqfFhwWogrl334fk,4387
@@ -136,7 +136,7 @@ hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
136
136
  hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
137
137
  hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
138
  hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
139
- hjxdl-0.1.85.dist-info/METADATA,sha256=sQC_9QeM6FgZN7Pr5W6gUKlXX_qEpf4QyOho9Wi7rSo,903
140
- hjxdl-0.1.85.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
141
- hjxdl-0.1.85.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
142
- hjxdl-0.1.85.dist-info/RECORD,,
139
+ hjxdl-0.1.86.dist-info/METADATA,sha256=PqxFzQa-kw7zcPAfn8_8lKyfbgLIdUonwSH-CXFyyP8,903
140
+ hjxdl-0.1.86.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
141
+ hjxdl-0.1.86.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
142
+ hjxdl-0.1.86.dist-info/RECORD,,
File without changes