gllm-inference-binary 0.5.16__cp312-cp312-manylinux_2_31_x86_64.whl → 0.5.18__cp312-cp312-manylinux_2_31_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gllm-inference-binary might be problematic. Click here for more details.

@@ -1,14 +1,23 @@
1
1
  from gllm_inference.em_invoker.em_invoker import BaseEMInvoker as BaseEMInvoker
2
- from gllm_inference.schema import Vector as Vector
2
+ from gllm_inference.schema import Attachment as Attachment, Vector as Vector
3
+ from gllm_inference.utils.io_utils import DEFAULT_BASE64_ALLOWED_MIMETYPES as DEFAULT_BASE64_ALLOWED_MIMETYPES, base64_to_bytes as base64_to_bytes
3
4
  from langchain_core.embeddings import Embeddings
4
5
  from pydantic import BaseModel
5
6
  from typing import Any
6
7
 
7
8
  class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
8
- """An adapter class that enables an `EMInvoker` to be used as a LangChain `Embeddings`.
9
+ '''An adapter class that enables an `EMInvoker` to be used as a LangChain `Embeddings`.
9
10
 
10
11
  Attributes:
11
12
  em_invoker (BaseEMInvoker): The `EMInvoker` instance to be interacted with.
13
+ use_base64 (bool):
14
+ Whether to apply strict base64 encoding to the input.
15
+ 1, If `True`, only inputs with specific MIME types (e.g. images,
16
+ audio, and video) will be converted into base64 strings before being sent.
17
+ 2. If `False`, each input is treated as a raw string.
18
+
19
+ This ensures "strict" handling: base64 encoding is not applied
20
+ universally, but only when required for those MIME types.
12
21
 
13
22
  Usage example:
14
23
  ```python
@@ -18,8 +27,9 @@ class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
18
27
  em_invoker = OpenAIEMInvoker(...)
19
28
  embeddings = EMInvokerEmbeddings(em_invoker=em_invoker)
20
29
  ```
21
- """
30
+ '''
22
31
  em_invoker: BaseEMInvoker
32
+ use_base64: bool
23
33
  async def aembed_documents(self, texts: list[str], **kwargs: Any) -> list[Vector]:
24
34
  """Asynchronously embed documents using the `EMInvoker`.
25
35
 
@@ -29,6 +39,9 @@ class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
29
39
 
30
40
  Returns:
31
41
  list[Vector]: List of embeddings, one for each text.
42
+
43
+ Raises:
44
+ ValueError: If `texts` is not a list of strings.
32
45
  """
33
46
  async def aembed_query(self, text: str, **kwargs: Any) -> Vector:
34
47
  """Asynchronously embed query using the `EMInvoker`.
@@ -39,6 +52,9 @@ class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
39
52
 
40
53
  Returns:
41
54
  Vector: Embeddings for the text.
55
+
56
+ Raises:
57
+ ValueError: If `text` is not a string.
42
58
  """
43
59
  def embed_documents(self, texts: list[str], **kwargs: Any) -> list[Vector]:
44
60
  """Embed documents using the `EMInvoker`.
@@ -49,6 +65,9 @@ class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
49
65
 
50
66
  Returns:
51
67
  list[Vector]: List of embeddings, one for each text.
68
+
69
+ Raises:
70
+ ValueError: If `texts` is not a list of strings.
52
71
  """
53
72
  def embed_query(self, text: str, **kwargs: Any) -> Vector:
54
73
  """Embed query using the `EMInvoker`.
@@ -59,4 +78,7 @@ class EMInvokerEmbeddings(BaseModel, Embeddings, arbitrary_types_allowed=True):
59
78
 
60
79
  Returns:
61
80
  Vector: Embeddings for the text.
81
+
82
+ Raises:
83
+ ValueError: If `text` is not a string.
62
84
  """
@@ -14,16 +14,15 @@ class Key:
14
14
  """Defines valid keys in LM invokers JSON schema."""
15
15
  ADDITIONAL_PROPERTIES: str
16
16
  ANY_OF: str
17
+ ARGS_SCHEMA: str
17
18
  ARUN: str
19
+ COROUTINE: str
18
20
  DATA_TYPE: str
19
21
  DATA_VALUE: str
20
22
  DEFAULT: str
21
23
  DESCRIPTION: str
22
24
  FUNC: str
23
- FUNCTION: str
24
- META: str
25
25
  NAME: str
26
- PARAMETERS: str
27
26
  PROPERTIES: str
28
27
  REQUIRED: str
29
28
  TITLE: str
@@ -1,4 +1,5 @@
1
+ from gllm_inference.utils.io_utils import base64_to_bytes as base64_to_bytes
1
2
  from gllm_inference.utils.langchain import load_langchain_model as load_langchain_model, parse_model_data as parse_model_data
2
3
  from gllm_inference.utils.validation import validate_string_enum as validate_string_enum
3
4
 
4
- __all__ = ['load_langchain_model', 'parse_model_data', 'validate_string_enum']
5
+ __all__ = ['base64_to_bytes', 'load_langchain_model', 'parse_model_data', 'validate_string_enum']
@@ -0,0 +1,26 @@
1
+ from _typeshed import Incomplete
2
+
3
+ logger: Incomplete
4
+ DEFAULT_BASE64_ALLOWED_MIMETYPES: Incomplete
5
+
6
+ def base64_to_bytes(value: str, *, allowed_mimetypes: tuple[str, ...] | None = ...) -> str | bytes:
7
+ '''Decode a base64 string to bytes based on allowed MIME type.
8
+
9
+ The conversion steps are as follows:
10
+ 1. The function first attempts to decode the given string from base64.
11
+ 2. If decoding succeeds, it checks the MIME type of the decoded content.
12
+ 3. When the MIME type matches one of the allowed patterns (e.g., ``"image/*"``),
13
+ the raw bytes are returned. Otherwise, the original string is returned unchanged.
14
+
15
+ Args:
16
+ value (str): Input data to decode.
17
+ allowed_mimetypes (tuple[str, ...], optional): MIME type prefixes that are allowed
18
+ to be decoded into bytes. Defaults to ("image/*", "audio/*", "video/*").
19
+
20
+ Returns:
21
+ str | bytes: Base64-encoded string or raw bytes if MIME type is allowed;
22
+ otherwise returns original string.
23
+
24
+ Raises:
25
+ ValueError: If the input is not a string.
26
+ '''
gllm_inference.pyi CHANGED
@@ -99,8 +99,6 @@ import litellm
99
99
  import inspect
100
100
  import time
101
101
  import jsonschema
102
- import langchain_core.utils
103
- import langchain_core.utils.function_calling
104
102
  import gllm_inference.schema.MessageContent
105
103
  import gllm_inference.utils.validate_string_enum
106
104
  import gllm_inference.schema.CodeExecResult
@@ -118,4 +116,6 @@ import uuid
118
116
  import pathlib
119
117
  import filetype
120
118
  import magic
119
+ import binascii
120
+ import fnmatch
121
121
  import importlib
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: gllm-inference-binary
3
- Version: 0.5.16
3
+ Version: 0.5.18
4
4
  Summary: A library containing components related to model inferences in Gen AI applications.
5
5
  Author: Henry Wicaksono
6
6
  Author-email: henry.wicaksono@gdplabs.id
@@ -15,7 +15,7 @@ gllm_inference/em_invoker/bedrock_em_invoker.pyi,sha256=KdX1PMWPfrmFIrSbf8Y6jkDu
15
15
  gllm_inference/em_invoker/em_invoker.pyi,sha256=dgIeAIetQcBmohwYwgo1vNw7YNO_3DQCobUaabBtf7g,5043
16
16
  gllm_inference/em_invoker/google_em_invoker.pyi,sha256=oDS4dXBcLg59ePeiLTwdl09927oJNZ_ykIe0n6Ba8gU,6557
17
17
  gllm_inference/em_invoker/langchain/__init__.pyi,sha256=VYGKE5OgU0my1RlhgzkU_A7-GLGnUDDnNFuctuRwILE,148
18
- gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=6nASLqi0FXCpqyYPl7kM3g7hAW-xS5ZwsS3GFudns98,2347
18
+ gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=VU3-Vhb9BCDhJo8PPdWHe2rBEOCs_HMXT6ZaWwjUzZE,3304
19
19
  gllm_inference/em_invoker/langchain_em_invoker.pyi,sha256=HuQD5Do4jwqKoMMgMjgZkic2L21n2ayJewIce09bZ3M,3163
20
20
  gllm_inference/em_invoker/openai_compatible_em_invoker.pyi,sha256=GudWfL7QCAKLInMw94jTNogbyELlD9tDbrDErHB4RRI,5360
21
21
  gllm_inference/em_invoker/openai_em_invoker.pyi,sha256=vsfEmDNvwrlBhDxqsCKyMpMZbl_FaQUWHEgQc9yeo14,4656
@@ -40,7 +40,7 @@ gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=QS84w3WpD3Oyl5Hdxrucsad
40
40
  gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=aPOlaw2rexUDhbMFaXnuKqOT7lqeKxjfeToe9LjwEUw,16787
41
41
  gllm_inference/lm_invoker/langchain_lm_invoker.pyi,sha256=skcbX34ZosGMFli3SLmGITJTzMj9HxkQBWGrd-zYCbU,13264
42
42
  gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=_c56ewpEQ-Ywj5ofFzRYBvQgefR7Q_WkcQt97lnIFgg,13128
43
- gllm_inference/lm_invoker/lm_invoker.pyi,sha256=5D0cskdMtQcbAmEeZdkdxwDvCuKmXBGY18CzTB0xq2c,7835
43
+ gllm_inference/lm_invoker/lm_invoker.pyi,sha256=uKKNom4kjH7xUnOsqh9DBRumo244U-f6EjR5kLncEys,7823
44
44
  gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=_hOAde_Faph3JoGYh7zLch6BRc2Lam8PXZvi5-PkL-E,14938
45
45
  gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=y9IiSvffDVA7cQh24nFZa2qbl-YOBT8A0rBWUcH8I6c,19531
46
46
  gllm_inference/lm_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -91,12 +91,13 @@ gllm_inference/schema/token_usage.pyi,sha256=1GTQVORV0dBNmD_jix8aVaUqxMKFF04KpLP
91
91
  gllm_inference/schema/tool_call.pyi,sha256=zQaVxCnkVxOfOEhBidqohU85gb4PRwnwBiygKaunamk,389
92
92
  gllm_inference/schema/tool_result.pyi,sha256=cAG7TVtB4IWJPt8XBBbB92cuY1ZsX9M276bN9aqjcvM,276
93
93
  gllm_inference/schema/type_alias.pyi,sha256=cQRlLT5vz9YV50n9x5BmufgqUz0UByInpmAvHSv2WTY,719
94
- gllm_inference/utils/__init__.pyi,sha256=npmBmmlBv7cPHMg1hdL3S2_RelD6vk_LhCsGELhN_7s,295
94
+ gllm_inference/utils/__init__.pyi,sha256=mDJ2gLSeQzm-79Tov5-dhrMNaDqgcD1VVzDYAWvIRqA,391
95
+ gllm_inference/utils/io_utils.pyi,sha256=7kUTacHAVRYoemFUOjCH7-Qmw-YsQGd6rGYxjf_qmtw,1094
95
96
  gllm_inference/utils/langchain.pyi,sha256=VluQiHkGigDdqLUbhB6vnXiISCP5hHqV0qokYY6dC1A,1164
96
97
  gllm_inference/utils/validation.pyi,sha256=toxBtRp-VItC_X7sNi-GDd7sjibBdWMrR0q01OI2D7k,385
97
98
  gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
98
- gllm_inference.cpython-312-x86_64-linux-gnu.so,sha256=g-1RtGrQtFBRD143QzVGqSetkn9kayawjat7sm5cgro,4334728
99
- gllm_inference.pyi,sha256=0PMbN8u5rnM8r9fZQFDM9V_UuvlYu3fpX6iLH4NKioA,3658
100
- gllm_inference_binary-0.5.16.dist-info/METADATA,sha256=Hjc4sS2zCgCUwhPEkaUTql8XLPjSdAArBYoTFKqv-nE,4608
101
- gllm_inference_binary-0.5.16.dist-info/WHEEL,sha256=mNY4pwQL4AOAoPmLYEQs2SSpMIbATFeiJFktRD5iKkY,110
102
- gllm_inference_binary-0.5.16.dist-info/RECORD,,
99
+ gllm_inference.cpython-312-x86_64-linux-gnu.so,sha256=wPjqnDDKn1xhzf6LJfvfr0VdD_fpiv4Zah-tCwltF-o,4383944
100
+ gllm_inference.pyi,sha256=lTVixRzlC12Joi4kW_vxnux0rLHAUB_3j7RMFOwLK-M,3616
101
+ gllm_inference_binary-0.5.18.dist-info/METADATA,sha256=Heo4b0XS1gQgOtbWPzIYPTic4-esBmsLMJ2LO68SP3U,4608
102
+ gllm_inference_binary-0.5.18.dist-info/WHEEL,sha256=mNY4pwQL4AOAoPmLYEQs2SSpMIbATFeiJFktRD5iKkY,110
103
+ gllm_inference_binary-0.5.18.dist-info/RECORD,,