lollms-client 0.14.0__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

@@ -28,7 +28,8 @@ class LollmsClient():
28
28
  def __init__(self,
29
29
  # LLM Binding Parameters
30
30
  binding_name: str = "lollms",
31
- host_address: Optional[str] = None, # Shared host address default for all bindings if not specified
31
+ host_address: Optional[str] = None, # Shared host address (for service based bindings) default for all bindings if not specified
32
+ models_path: Optional[str] = None, # Shared models folder path (for local file based bindings) default for all bindings if not specified
32
33
  model_name: str = "",
33
34
  llm_bindings_dir: Path = Path(__file__).parent / "llm_bindings",
34
35
  llm_binding_config: Optional[Dict[str, any]] = None, # Renamed for clarity
@@ -69,6 +70,7 @@ class LollmsClient():
69
70
  Args:
70
71
  binding_name (str): Name of the primary LLM binding (e.g., "lollms", "ollama").
71
72
  host_address (Optional[str]): Default host address for all services. Overridden by binding defaults if None.
73
+ models_path (Optional[str]): Default models folder path. Overridden by binding defaults if None.
72
74
  model_name (str): Default model name for the LLM binding.
73
75
  llm_bindings_dir (Path): Directory for LLM binding implementations.
74
76
  llm_binding_config (Optional[Dict]): Additional config for the LLM binding.
@@ -102,6 +104,7 @@ class LollmsClient():
102
104
  ValueError: If the primary LLM binding cannot be created.
103
105
  """
104
106
  self.host_address = host_address # Store initial preference
107
+ self.models_path = models_path
105
108
  self.service_key = service_key
106
109
  self.verify_ssl_certificate = verify_ssl_certificate
107
110
 
@@ -110,6 +113,7 @@ class LollmsClient():
110
113
  self.binding = self.binding_manager.create_binding(
111
114
  binding_name=binding_name,
112
115
  host_address=host_address, # Pass initial host preference
116
+ models_path=models_path,
113
117
  model_name=model_name,
114
118
  service_key=service_key,
115
119
  verify_ssl_certificate=verify_ssl_certificate,
@@ -123,8 +127,6 @@ class LollmsClient():
123
127
 
124
128
  # Determine the effective host address (use LLM binding's if initial was None)
125
129
  effective_host_address = self.host_address
126
- if effective_host_address is None and self.binding:
127
- effective_host_address = self.binding.host_address
128
130
 
129
131
  # --- Modality Binding Setup ---
130
132
  self.tts_binding_manager = LollmsTTSBindingManager(tts_bindings_dir)
@@ -440,14 +442,12 @@ class LollmsClient():
440
442
  Uses the underlying LLM binding via `generate_text`.
441
443
  """
442
444
  response_full = ""
443
- full_prompt = f"""{self.system_full_header}Act as a code generation assistant that generates code from user prompt.
444
- {self.user_full_header}
445
- {prompt}
446
- """
445
+ system_prompt = f"""Act as a code generation assistant that generates code from user prompt."""
446
+
447
447
  if template:
448
- full_prompt += "Here is a template of the answer:\n"
448
+ system_prompt += "Here is a template of the answer:\n"
449
449
  if code_tag_format=="markdown":
450
- full_prompt += f"""You must answer with the code placed inside the markdown code tag like this:
450
+ system_prompt += f"""You must answer with the code placed inside the markdown code tag like this:
451
451
  ```{language}
452
452
  {template}
453
453
  ```
@@ -456,7 +456,7 @@ The code tag is mandatory.
456
456
  Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
457
457
  """
458
458
  elif code_tag_format=="html":
459
- full_prompt +=f"""You must answer with the code placed inside the html code tag like this:
459
+ system_prompt +=f"""You must answer with the code placed inside the html code tag like this:
460
460
  <code language="{language}">
461
461
  {template}
462
462
  </code>
@@ -464,13 +464,13 @@ Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
464
464
  The code tag is mandatory.
465
465
  Don't forget encapsulate the code inside a html code tag. This is mandatory.
466
466
  """
467
- full_prompt += f"""Do not split the code in multiple tags.
468
- {self.ai_full_header}"""
467
+ system_prompt += f"""Do not split the code in multiple tags."""
469
468
 
470
469
  # Use generate_text which handles images internally
471
470
  response = self.generate_text(
472
- full_prompt,
471
+ prompt,
473
472
  images=images,
473
+ system_prompt=system_prompt,
474
474
  n_predict=max_size,
475
475
  temperature=temperature,
476
476
  top_k=top_k,
@@ -509,14 +509,12 @@ Don't forget encapsulate the code inside a html code tag. This is mandatory.
509
509
  Handles potential continuation if the code block is incomplete.
510
510
  """
511
511
 
512
- full_prompt = f"""{self.system_full_header}Act as a code generation assistant that generates code from user prompt.
513
- {self.user_full_header}
514
- {prompt}
515
- """
512
+ system_prompt = f"""{self.system_full_header}Act as a code generation assistant that generates code from user prompt."""
513
+
516
514
  if template:
517
- full_prompt += "Here is a template of the answer:\n"
515
+ system_prompt += "Here is a template of the answer:\n"
518
516
  if code_tag_format=="markdown":
519
- full_prompt += f"""You must answer with the code placed inside the markdown code tag like this:
517
+ system_prompt += f"""You must answer with the code placed inside the markdown code tag like this:
520
518
  ```{language}
521
519
  {template}
522
520
  ```
@@ -525,7 +523,7 @@ The code tag is mandatory.
525
523
  Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
526
524
  """
527
525
  elif code_tag_format=="html":
528
- full_prompt +=f"""You must answer with the code placed inside the html code tag like this:
526
+ system_prompt +=f"""You must answer with the code placed inside the html code tag like this:
529
527
  <code language="{language}">
530
528
  {template}
531
529
  </code>
@@ -533,13 +531,14 @@ Don't forget encapsulate the code inside a markdown code tag. This is mandatory.
533
531
  The code tag is mandatory.
534
532
  Don't forget encapsulate the code inside a html code tag. This is mandatory.
535
533
  """
536
- full_prompt += f"""You must return a single code tag.
534
+ system_prompt += f"""You must return a single code tag.
537
535
  Do not split the code in multiple tags.
538
536
  {self.ai_full_header}"""
539
537
 
540
538
  response = self.generate_text(
541
- full_prompt,
539
+ prompt,
542
540
  images=images,
541
+ system_prompt=system_prompt,
543
542
  n_predict=max_size,
544
543
  temperature=temperature,
545
544
  top_k=top_k,
@@ -19,11 +19,7 @@ class LollmsLLMBinding(ABC):
19
19
  Initialize the LollmsLLMBinding base class.
20
20
 
21
21
  Args:
22
- host_address (Optional[str]): The host address for the service. Defaults to None.
23
- model_name (str): The name of the model to use. Defaults to empty string.
24
- service_key (Optional[str]): Authentication key for the service. Defaults to None.
25
- verify_ssl_certificate (bool): Whether to verify SSL certificates. Defaults to True.
26
- default_completion_format (ELF_COMPLETION_FORMAT): The completion format (Chat or Instruct)
22
+ binding_name (Optional[str]): The name of the bindingto be used
27
23
  """
28
24
  self.binding_name=binding_name
29
25
  self.model_name = None #Must be set by the instance
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lollms_client
3
- Version: 0.14.0
3
+ Version: 0.15.0
4
4
  Summary: A client library for LoLLMs generate endpoint
5
5
  Author-email: ParisNeo <parisneoai@gmail.com>
6
6
  License: Apache Software License
@@ -1,10 +1,10 @@
1
1
  examples/simple_text_gen_test.py,sha256=RoX9ZKJjGMujeep60wh5WT_GoBn0O9YKJY6WOy-ZmOc,8710
2
- examples/simple_text_gen_with_image_test.py,sha256=Euv53jbKTVJDvs854lgJvA5F-iRnAATLxAklig24ots,8534
2
+ examples/simple_text_gen_with_image_test.py,sha256=V5dc6iLScpsVGDAd1xxbWMsdWqEZHbupyc6VaxH9S6o,9263
3
3
  examples/text_2_audio.py,sha256=MfL4AH_NNwl6m0I0ywl4BXRZJ0b9Y_9fRqDIe6O-Sbw,3523
4
4
  examples/text_2_image.py,sha256=Ri7lQ-GW54YWQh2eofcaN6LpwFoorbpJsJffrcXl3cg,6415
5
5
  examples/text_and_image_2_audio.py,sha256=QLvSsLff8VZZa7k7K1EFGlPpQWZy07zM4Fnli5btAl0,2074
6
- examples/text_gen.py,sha256=IejpNmIlsfz3WpJg8IRm5X6F06JKd7h_GuonUxTITx8,758
7
- examples/text_gen_system_prompt.py,sha256=wumwZ09WZkaK0tQ74KaZmfsYXcmjZIlsdim_P1aJmeA,910
6
+ examples/text_gen.py,sha256=O3wuvsbEJMRSjIWBV828BXzIYtED-VgR85tXCqBBtZY,930
7
+ examples/text_gen_system_prompt.py,sha256=jRQeGe1IVu_zRHX09CFiDYi7WrK9Zd5FlMqC_gnVH-g,1018
8
8
  examples/article_summary/article_summary.py,sha256=CR8mCBNcZEVCR-q34uOmrJyMlG-xk4HkMbsV-TOZEnk,1978
9
9
  examples/deep_analyze/deep_analyse.py,sha256=fZNmDrfEAuxEAfdbjAgJYIh1k6wbiuZ4RvwHRvtyUs8,971
10
10
  examples/deep_analyze/deep_analyze_multiple_files.py,sha256=fOryShA33P4IFxcxUDe-nJ2kW0v9w9yW8KsToS3ETl8,1032
@@ -13,13 +13,13 @@ examples/personality_test/chat_test.py,sha256=o2jlpoddFc-T592iqAiA29xk3x27KsdK5D
13
13
  examples/personality_test/chat_with_aristotle.py,sha256=4X_fwubMpd0Eq2rCReS2bgVlUoAqJprjkLXk2Jz6pXU,1774
14
14
  examples/personality_test/tesks_test.py,sha256=7LIiwrEbva9WWZOLi34fsmCBN__RZbPpxoUOKA_AtYk,1924
15
15
  examples/test_local_models/local_chat.py,sha256=slakja2zaHOEAUsn2tn_VmI4kLx6luLBrPqAeaNsix8,456
16
- lollms_client/__init__.py,sha256=PHFRY4RskAaiectooBrSCrxd6UGpZkdTqMHmXM26VnQ,823
16
+ lollms_client/__init__.py,sha256=-V2JR_LrDutOtLId2GLedsNJ-9m01NLB7izv9sjXDXM,823
17
17
  lollms_client/lollms_config.py,sha256=goEseDwDxYJf3WkYJ4IrLXwg3Tfw73CXV2Avg45M_hE,21876
18
- lollms_client/lollms_core.py,sha256=KkeKjQZVeUjdsQjxw2bygUxq1gXlPNnYiyxdWnwA4L8,78073
18
+ lollms_client/lollms_core.py,sha256=YJqvURx8nWQfnvorqGleR5qkNTlk_u-9mLeU07j4FjY,78296
19
19
  lollms_client/lollms_discussion.py,sha256=9b83m0D894jwpgssWYTQHbVxp1gJoI-J947Ui_dRXII,2073
20
20
  lollms_client/lollms_functions.py,sha256=p8SFtmEPqvVCsIz2fZ5HxyOHaxjrAo5c12uTzJnb6m8,3594
21
21
  lollms_client/lollms_js_analyzer.py,sha256=01zUvuO2F_lnUe_0NLxe1MF5aHE1hO8RZi48mNPv-aw,8361
22
- lollms_client/lollms_llm_binding.py,sha256=7xvtLsFQYqFKS7m0BQQMvVq0XXZWZeGlGuv30mi1dF8,7408
22
+ lollms_client/lollms_llm_binding.py,sha256=sRtCUvXLdlGpaMzAYpqzDdwbCAHvwK5GFEqk3_-WxCU,7004
23
23
  lollms_client/lollms_python_analyzer.py,sha256=7gf1fdYgXCOkPUkBAPNmr6S-66hMH4_KonOMsADASxc,10246
24
24
  lollms_client/lollms_stt_binding.py,sha256=ovmpFF0fnmPC9VNi1-rxAJA8xI4JZDUBh_YwdtoTx28,5818
25
25
  lollms_client/lollms_tasks.py,sha256=Tgqces03gPTHFJCcPaeN9vBCsil3SSJX7nQAjCQ2-yg,34393
@@ -30,11 +30,14 @@ lollms_client/lollms_ttv_binding.py,sha256=u-gLIe22tbu4YsKA5RTyUT7iBlKxPXDmoQzcc
30
30
  lollms_client/lollms_types.py,sha256=cfc1sremM8KR4avkYX99fIVkkdRvXErrCWKGjLrgv50,2723
31
31
  lollms_client/lollms_utilities.py,sha256=YAgamfp0pBVApR68AHKjhp1lh6isMNF8iadwWLl63c0,7045
32
32
  lollms_client/llm_bindings/__init__.py,sha256=9sWGpmWSSj6KQ8H4lKGCjpLYwhnVdL_2N7gXCphPqh4,14
33
+ lollms_client/llm_bindings/llamacpp/__init__.py,sha256=60QIeQBTx7iteeUe7bq6OM_xCIM3aVYPj3KJej9COJY,55076
33
34
  lollms_client/llm_bindings/lollms/__init__.py,sha256=a36AMPFEf3xK4zx1M_L9PC-3-b0iiDf7eyLkknPjgaY,12356
34
- lollms_client/llm_bindings/ollama/__init__.py,sha256=MemSA20Zivn-kfP11JPA9FHigC1U2CGsJ1FaDtUFUUM,26574
35
+ lollms_client/llm_bindings/ollama/__init__.py,sha256=eHRwOcon61r1ISER-47M8zrZhRCKhuRFvHrnrLuY5Lw,26547
35
36
  lollms_client/llm_bindings/openai/__init__.py,sha256=NDZIdzW0pnHy9gPXSKfFyS6SPIOOxj9ZEzEE7gZT2NQ,12054
37
+ lollms_client/llm_bindings/openllm/__init__.py,sha256=LDEwcT8CCsWrTs0ZyUg5OgP_1RV5HdCkDQmF2f5XSLo,29623
38
+ lollms_client/llm_bindings/pythonllamacpp/__init__.py,sha256=xh2faZa57Nn6jscWhhu0WyRvhCC8kZ9cBJFKaE7Ddos,33332
36
39
  lollms_client/llm_bindings/tensor_rt/__init__.py,sha256=IY4CrHVpHY77R1rzsl3iwcoarDjYD24n7bFKk_69PD8,31983
37
- lollms_client/llm_bindings/transformers/__init__.py,sha256=IWfAmBGqZEelt5Z_jYTqpz7LzzKMVsKWx5nv4zBgKCQ,12544
40
+ lollms_client/llm_bindings/transformers/__init__.py,sha256=gcpEQo-cs0Gzk-_gIB8fL_UjE2T_KJ1Y3FQLVA2mA94,36992
38
41
  lollms_client/llm_bindings/vllm/__init__.py,sha256=ZRCR7g3A2kHQ_07viNrNnVHoIGj5TNA4Q41rQWeTlxw,31967
39
42
  lollms_client/stt_bindings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
43
  lollms_client/stt_bindings/lollms/__init__.py,sha256=7-IZkrsn15Vaz0oqkqCxMeNQfMkeilbgScLlrrywES4,6098
@@ -46,8 +49,8 @@ lollms_client/tts_bindings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
46
49
  lollms_client/tts_bindings/lollms/__init__.py,sha256=8x2_T9XscvISw2TiaLoFxvrS7TIsVLdqbwSc04cX-wc,7164
47
50
  lollms_client/ttv_bindings/__init__.py,sha256=UZ8o2izQOJLQgtZ1D1cXoNST7rzqW22rL2Vufc7ddRc,3141
48
51
  lollms_client/ttv_bindings/lollms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
- lollms_client-0.14.0.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
50
- lollms_client-0.14.0.dist-info/METADATA,sha256=gkDoZr-SYxtqgyzp339qwmNj1_iBiFurvPVD6TTvc2Q,7276
51
- lollms_client-0.14.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
52
- lollms_client-0.14.0.dist-info/top_level.txt,sha256=NI_W8S4OYZvJjb0QWMZMSIpOrYzpqwPGYaklhyWKH2w,23
53
- lollms_client-0.14.0.dist-info/RECORD,,
52
+ lollms_client-0.15.0.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
53
+ lollms_client-0.15.0.dist-info/METADATA,sha256=b2b7eOzWqzfTHTeO87lZomNl_r6FOd5raiIhNXIUHMM,7276
54
+ lollms_client-0.15.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
55
+ lollms_client-0.15.0.dist-info/top_level.txt,sha256=NI_W8S4OYZvJjb0QWMZMSIpOrYzpqwPGYaklhyWKH2w,23
56
+ lollms_client-0.15.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5