byllm 0.4.3__tar.gz → 0.4.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of byllm might be problematic. Click here for more details.

@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: byllm
3
- Version: 0.4.3
3
+ Version: 0.4.5
4
4
  Summary: byLLM Provides Easy to use APIs for different LLM Providers to be used with Jaseci's Jaclang Programming Language.
5
5
  License: MIT
6
6
  Keywords: llm,jaclang,jaseci,byLLM
7
7
  Author: Jason Mars
8
- Author-email: jason@jaseci.org
8
+ Author-email: jason@mars.ninja
9
9
  Maintainer: Jason Mars
10
- Maintainer-email: jason@jaseci.org
10
+ Maintainer-email: jason@mars.ninja
11
11
  Classifier: License :: OSI Approved :: MIT License
12
12
  Classifier: Programming Language :: Python :: 2
13
13
  Classifier: Programming Language :: Python :: 2.7
@@ -25,7 +25,7 @@ Classifier: Programming Language :: Python :: 3.13
25
25
  Classifier: Programming Language :: Python :: 3.14
26
26
  Provides-Extra: tools
27
27
  Provides-Extra: video
28
- Requires-Dist: jaclang (>=0.8.8)
28
+ Requires-Dist: jaclang (>=0.8.10)
29
29
  Requires-Dist: litellm (>=1.75.5.post1)
30
30
  Requires-Dist: loguru (>=0.7.2,<0.8.0)
31
31
  Requires-Dist: pillow (>=10.4.0,<10.5.0)
@@ -59,7 +59,7 @@ pip install byllm
59
59
  Consider building an application that translates english to other languages using an LLM. This can be simply built as follows:
60
60
 
61
61
  ```python
62
- import from byllm { Model }
62
+ import from byllm.lib { Model }
63
63
 
64
64
  glob llm = Model(model_name="gpt-4o");
65
65
 
@@ -78,7 +78,7 @@ This simple piece of code replaces traditional prompt engineering without introd
78
78
  Consider a program that detects the personality type of a historical figure from their name. This can eb built in a way that LLM picks from an enum and the output strictly adhere this type.
79
79
 
80
80
  ```python
81
- import from byllm { Model }
81
+ import from byllm.lib { Model }
82
82
  glob llm = Model(model_name="gemini/gemini-2.0-flash");
83
83
 
84
84
  enum Personality {
@@ -26,7 +26,7 @@ pip install byllm
26
26
  Consider building an application that translates english to other languages using an LLM. This can be simply built as follows:
27
27
 
28
28
  ```python
29
- import from byllm { Model }
29
+ import from byllm.lib { Model }
30
30
 
31
31
  glob llm = Model(model_name="gpt-4o");
32
32
 
@@ -45,7 +45,7 @@ This simple piece of code replaces traditional prompt engineering without introd
45
45
  Consider a program that detects the personality type of a historical figure from their name. This can eb built in a way that LLM picks from an enum and the output strictly adhere this type.
46
46
 
47
47
  ```python
48
- import from byllm { Model }
48
+ import from byllm.lib { Model }
49
49
  glob llm = Model(model_name="gemini/gemini-2.0-flash");
50
50
 
51
51
  enum Personality {
@@ -0,0 +1 @@
1
+ """byLLM Package - Lazy Loading."""
@@ -2,7 +2,9 @@
2
2
 
3
3
  from byllm.llm import Model
4
4
  from byllm.mtir import MTIR
5
- from byllm.plugin import by
5
+ from byllm.plugin import JacMachine
6
6
  from byllm.types import Image, MockToolCall, Video
7
7
 
8
+ by = JacMachine.by
9
+
8
10
  __all__ = ["by", "Image", "MockToolCall", "Model", "MTIR", "Video"]
@@ -181,7 +181,7 @@ class LiteLLMConnector(LLMConnector):
181
181
  message: LiteLLMMessage = response.choices[0].message # type: ignore
182
182
  mtir.add_message(message)
183
183
 
184
- output_content: str = message.content # type: ignore
184
+ output_content: str = message.content or "" # type: ignore
185
185
  self.log_info(f"LLM call completed with response:\n{output_content}")
186
186
  output_value = mtir.parse_response(output_content)
187
187
 
@@ -0,0 +1,55 @@
1
+ """Plugin for Jac's with_llm feature."""
2
+
3
+ from __future__ import annotations
4
+
5
+
6
+ from typing import Callable, TYPE_CHECKING
7
+
8
+ from jaclang.runtimelib.machine import hookimpl
9
+
10
+ if TYPE_CHECKING:
11
+ from byllm.llm import Model
12
+ from byllm.mtir import MTIR
13
+
14
+
15
+ class JacMachine:
16
+ """Jac's with_llm feature."""
17
+
18
+ @staticmethod
19
+ @hookimpl
20
+ def get_mtir(caller: Callable, args: dict, call_params: dict) -> object:
21
+ """Call JacLLM and return the result."""
22
+ from byllm.mtir import MTIR
23
+
24
+ return MTIR.factory(caller, args, call_params)
25
+
26
+ @staticmethod
27
+ @hookimpl
28
+ def call_llm(model: Model, mtir: MTIR) -> object:
29
+ """Call JacLLM and return the result."""
30
+ return model.invoke(mtir=mtir)
31
+
32
+ @staticmethod
33
+ @hookimpl
34
+ def by(model: Model) -> Callable:
35
+ """Python library mode decorator for Jac's by llm() syntax."""
36
+
37
+ def _decorator(caller: Callable) -> Callable:
38
+ def _wrapped_caller(*args: object, **kwargs: object) -> object:
39
+ from byllm.mtir import MTIR
40
+
41
+ invoke_args: dict[int | str, object] = {}
42
+ for i, arg in enumerate(args):
43
+ invoke_args[i] = arg
44
+ for key, value in kwargs.items():
45
+ invoke_args[key] = value
46
+ mtir = MTIR.factory(
47
+ caller=caller,
48
+ args=invoke_args,
49
+ call_params=model.llm_connector.call_params,
50
+ )
51
+ return model.invoke(mtir=mtir)
52
+
53
+ return _wrapped_caller
54
+
55
+ return _decorator
@@ -1,15 +1,15 @@
1
1
  [tool.poetry]
2
2
  name = "byllm"
3
- version = "0.4.3"
3
+ version = "0.4.5"
4
4
  description = "byLLM Provides Easy to use APIs for different LLM Providers to be used with Jaseci's Jaclang Programming Language."
5
- authors = ["Jason Mars <jason@jaseci.org>"]
6
- maintainers = ["Jason Mars <jason@jaseci.org>"]
5
+ authors = ["Jason Mars <jason@mars.ninja>"]
6
+ maintainers = ["Jason Mars <jason@mars.ninja>"]
7
7
  license = "MIT"
8
8
  readme = "README.md"
9
9
  keywords = ["llm", "jaclang", "jaseci", "byLLM"]
10
10
 
11
11
  [tool.poetry.dependencies]
12
- jaclang = ">=0.8.8"
12
+ jaclang = ">=0.8.10"
13
13
  litellm = ">=1.75.5.post1"
14
14
  loguru = "~0.7.2"
15
15
  pillow = "~10.4.0"
@@ -1,40 +0,0 @@
1
- """Plugin for Jac's with_llm feature."""
2
-
3
- from typing import Callable
4
-
5
- from byllm.llm import Model
6
- from byllm.mtir import MTIR
7
-
8
- from jaclang.runtimelib.machine import hookimpl
9
-
10
-
11
- class JacMachine:
12
- """Jac's with_llm feature."""
13
-
14
- @staticmethod
15
- @hookimpl
16
- def call_llm(model: Model, mtir: MTIR) -> object:
17
- """Call JacLLM and return the result."""
18
- return model.invoke(mtir=mtir)
19
-
20
-
21
- def by(model: Model) -> Callable:
22
- """Python library mode decorator for Jac's by llm() syntax."""
23
-
24
- def _decorator(caller: Callable) -> Callable:
25
- def _wrapped_caller(*args: object, **kwargs: object) -> object:
26
- invoke_args: dict[int | str, object] = {}
27
- for i, arg in enumerate(args):
28
- invoke_args[i] = arg
29
- for key, value in kwargs.items():
30
- invoke_args[key] = value
31
- mtir = MTIR.factory(
32
- caller=caller,
33
- args=invoke_args,
34
- call_params=model.llm_connector.call_params,
35
- )
36
- return JacMachine.call_llm(model, mtir)
37
-
38
- return _wrapped_caller
39
-
40
- return _decorator
File without changes
File without changes
File without changes
File without changes