hjxdl 0.1.92__py3-none-any.whl → 0.1.94__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hdl/_version.py +2 -2
- hdl/utils/llm/chat.py +75 -1
- {hjxdl-0.1.92.dist-info → hjxdl-0.1.94.dist-info}/METADATA +1 -1
- {hjxdl-0.1.92.dist-info → hjxdl-0.1.94.dist-info}/RECORD +6 -6
- {hjxdl-0.1.92.dist-info → hjxdl-0.1.94.dist-info}/WHEEL +0 -0
- {hjxdl-0.1.92.dist-info → hjxdl-0.1.94.dist-info}/top_level.txt +0 -0
hdl/_version.py
CHANGED
hdl/utils/llm/chat.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
import typing as t
|
2
2
|
import asyncio
|
3
3
|
from concurrent.futures import ProcessPoolExecutor
|
4
|
+
import subprocess
|
4
5
|
|
5
6
|
|
6
7
|
from openai import OpenAI
|
@@ -382,4 +383,77 @@ class OpenAI_M():
|
|
382
383
|
return result
|
383
384
|
except Exception as e:
|
384
385
|
print(e)
|
385
|
-
return ""
|
386
|
+
return ""
|
387
|
+
|
388
|
+
|
389
|
+
class MMChatter():
|
390
|
+
def __init__(
|
391
|
+
self,
|
392
|
+
cli_dir: str,
|
393
|
+
model_dir: str,
|
394
|
+
mmproj_dir: str,
|
395
|
+
) -> None:
|
396
|
+
"""Initializes the class with the provided directories.
|
397
|
+
|
398
|
+
Args:
|
399
|
+
cli_dir (str): The directory for the CLI.
|
400
|
+
model_dir (str): The directory for the model.
|
401
|
+
mmproj_dir (str): The directory for the MMProj.
|
402
|
+
|
403
|
+
Returns:
|
404
|
+
None
|
405
|
+
"""
|
406
|
+
self.cli_dir = cli_dir
|
407
|
+
self.model_dir = model_dir
|
408
|
+
self.mmproj_dir = mmproj_dir
|
409
|
+
|
410
|
+
def get_resp(
|
411
|
+
self,
|
412
|
+
prompt: str,
|
413
|
+
image: str,
|
414
|
+
temp: float = 0.1,
|
415
|
+
top_p: float = 0.8,
|
416
|
+
top_k: int = 100,
|
417
|
+
repeat_penalty: float = 1.05
|
418
|
+
):
|
419
|
+
"""Get response from the model based on the given prompt and image.
|
420
|
+
|
421
|
+
Args:
|
422
|
+
prompt (str): The prompt to provide to the model.
|
423
|
+
image (str): The image to be used as input for the model.
|
424
|
+
temp (float, optional): Temperature parameter for sampling. Defaults to 0.1.
|
425
|
+
top_p (float, optional): Top-p sampling parameter. Defaults to 0.8.
|
426
|
+
top_k (int, optional): Top-k sampling parameter. Defaults to 100.
|
427
|
+
repeat_penalty (float, optional): Repeat penalty for the model. Defaults to 1.05.
|
428
|
+
|
429
|
+
Returns:
|
430
|
+
str: The response generated by the model based on the input prompt and image.
|
431
|
+
"""
|
432
|
+
# Define the command as a list of strings
|
433
|
+
command = [
|
434
|
+
self.cli_dir,
|
435
|
+
"-m", self.model_dir,
|
436
|
+
"--mmproj", self.mmproj_dir,
|
437
|
+
"--image", image,
|
438
|
+
"--temp", f"{temp}",
|
439
|
+
"--top-p", f"{top_p}",
|
440
|
+
"--top-k", f"{top_k}",
|
441
|
+
"--repeat-penalty", f"{repeat_penalty}",
|
442
|
+
"-p", prompt
|
443
|
+
]
|
444
|
+
|
445
|
+
# Use subprocess to run the command and capture the output
|
446
|
+
result = subprocess.run(
|
447
|
+
command,
|
448
|
+
stdout=subprocess.PIPE,
|
449
|
+
stderr=subprocess.PIPE,
|
450
|
+
text=True
|
451
|
+
)
|
452
|
+
|
453
|
+
# Only capture stdout (ignoring verbose logging)
|
454
|
+
output = result.stdout
|
455
|
+
|
456
|
+
# Process the model's response by parsing the output
|
457
|
+
response = output.splitlines()[-1] # Assuming the last line is the model's response
|
458
|
+
|
459
|
+
return response
|
@@ -1,5 +1,5 @@
|
|
1
1
|
hdl/__init__.py,sha256=GffnD0jLJdhkd-vo989v40N90sQbofkayRBwxc6TVhQ,72
|
2
|
-
hdl/_version.py,sha256=
|
2
|
+
hdl/_version.py,sha256=03clbULp3b9RrCR0DpkVeYj6R4aCnFJBab4aegAAlio,413
|
3
3
|
hdl/args/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
hdl/args/loss_args.py,sha256=s7YzSdd7IjD24rZvvOrxLLFqMZQb9YylxKeyelSdrTk,70
|
5
5
|
hdl/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -127,7 +127,7 @@ hdl/utils/desc/template.py,sha256=a0UAkkKctt_EHY9UECsIIAwVkGPcM1Hr01HSkRMeIuw,12
|
|
127
127
|
hdl/utils/general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
128
128
|
hdl/utils/general/glob.py,sha256=8-RCnt6L297wMIfn34ZAMCsGCZUjHG3MGglGZI1cX0g,491
|
129
129
|
hdl/utils/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
130
|
-
hdl/utils/llm/chat.py,sha256=
|
130
|
+
hdl/utils/llm/chat.py,sha256=VFTyXodWse63M4NLFznz6JEz6_5hDFYy47lY8cq4gGA,14870
|
131
131
|
hdl/utils/llm/embs.py,sha256=Tf0FOYrOFZp7qQpEPiSCXzlgyHH0X9HVTUtsup74a9E,7174
|
132
132
|
hdl/utils/llm/extract.py,sha256=2sK_WJzmYIc8iuWaM9DA6Nw3_6q1O4lJ5pKpcZo-bBA,6512
|
133
133
|
hdl/utils/llm/llama_chat.py,sha256=watcHGOaz-bv3x-yDucYlGk5f8FiqfFhwWogrl334fk,4387
|
@@ -136,7 +136,7 @@ hdl/utils/schedulers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
|
|
136
136
|
hdl/utils/schedulers/norm_lr.py,sha256=bDwCmdEK-WkgxQMFBiMuchv8Mm7C0-GZJ6usm-PQk14,4461
|
137
137
|
hdl/utils/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
138
138
|
hdl/utils/weather/weather.py,sha256=k11o6wM15kF8b9NMlEfrg68ak-SfSYLN3nOOflFUv-I,4381
|
139
|
-
hjxdl-0.1.
|
140
|
-
hjxdl-0.1.
|
141
|
-
hjxdl-0.1.
|
142
|
-
hjxdl-0.1.
|
139
|
+
hjxdl-0.1.94.dist-info/METADATA,sha256=tH-0-OFejzzH5MiJoLVpWtnlLlpVtLAY5DtDrkk-_dY,866
|
140
|
+
hjxdl-0.1.94.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
141
|
+
hjxdl-0.1.94.dist-info/top_level.txt,sha256=-kxwTM5JPhylp06z3zAVO3w6_h7wtBfBo2zgM6YZoTk,4
|
142
|
+
hjxdl-0.1.94.dist-info/RECORD,,
|
File without changes
|
File without changes
|