gllm-inference-binary 0.4.58__cp313-cp313-macosx_13_0_x86_64.whl → 0.4.60__cp313-cp313-macosx_13_0_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gllm-inference-binary might be problematic. Click here for more details.

@@ -65,7 +65,8 @@ def build_lm_invoker(model_id: str | ModelId, credentials: str | dict[str, Any]
65
65
  },
66
66
  )
67
67
  ```
68
- Providing credentials through environment variable is not supported for Bedrock.
68
+ The credentials can also be provided through the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
69
+ environment variables.
69
70
 
70
71
  # Using Datasaur LLM Projects Deployment API
71
72
  ```python
@@ -0,0 +1,9 @@
1
+ from gllm_inference.model.em.google_em import GoogleEM as GoogleEM
2
+ from gllm_inference.model.em.openai_em import OpenAIEM as OpenAIEM
3
+ from gllm_inference.model.em.twelvelabs_em import TwelveLabsEM as TwelveLabsEM
4
+ from gllm_inference.model.em.voyage_em import VoyageEM as VoyageEM
5
+ from gllm_inference.model.lm.anthropic_lm import AnthropicLM as AnthropicLM
6
+ from gllm_inference.model.lm.google_lm import GoogleLM as GoogleLM
7
+ from gllm_inference.model.lm.openai_lm import OpenAILM as OpenAILM
8
+
9
+ __all__ = ['AnthropicLM', 'GoogleEM', 'GoogleLM', 'OpenAIEM', 'OpenAILM', 'TwelveLabsEM', 'VoyageEM']
File without changes
@@ -0,0 +1,16 @@
1
+ class GoogleEM:
2
+ '''Defines Google embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import GoogleEM
7
+ from gllm_inference.em_invoker import GoogleEMInvoker
8
+
9
+ em_invoker = GoogleEMInvoker(GoogleEM.GEMINI_EMBEDDING_001)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GEMINI_EMBEDDING_001: str
14
+ TEXT_EMBEDDING_004: str
15
+ TEXT_EMBEDDING_005: str
16
+ TEXT_MULTILINGUAL_EMBEDDING_002: str
@@ -0,0 +1,15 @@
1
+ class OpenAIEM:
2
+ '''Defines OpenAI embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import OpenAIEM
7
+ from gllm_inference.em_invoker import OpenAIEMInvoker
8
+
9
+ em_invoker = OpenAIEMInvoker(OpenAIEM.TEXT_EMBEDDING_3_SMALL)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ TEXT_EMBEDDING_3_SMALL: str
14
+ TEXT_EMBEDDING_3_LARGE: str
15
+ TEXT_EMBEDDING_ADA_002: str
@@ -0,0 +1,13 @@
1
+ class TwelveLabsEM:
2
+ '''Defines TwelveLabs embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import TwelveLabsEM
7
+ from gllm_inference.em_invoker import TwelveLabsEMInvoker
8
+
9
+ em_invoker = TwelveLabsEMInvoker(TwelveLabsEM.MARENGO_RETRIEVAL_2_7)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ MARENGO_RETRIEVAL_2_7: str
@@ -0,0 +1,20 @@
1
+ class VoyageEM:
2
+ '''Defines Voyage embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import VoyageEM
7
+ from gllm_inference.em_invoker import VoyageEMInvoker
8
+
9
+ em_invoker = VoyageEMInvoker(VoyageEM.VOYAGE_3_5_LITE)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ VOYAGE_3_5: str
14
+ VOYAGE_3_5_LITE: str
15
+ VOYAGE_3_LARGE: str
16
+ VOYAGE_CODE_3: str
17
+ VOYAGE_FINANCE_2: str
18
+ VOYAGE_LAW_2: str
19
+ VOYAGE_CODE_2: str
20
+ VOYAGE_MULTIMODAL_3: str
File without changes
@@ -0,0 +1,20 @@
1
+ class AnthropicLM:
2
+ '''Defines Anthropic language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import AnthropicLM
7
+ from gllm_inference.lm_invoker import AnthropicLMInvoker
8
+
9
+ lm_invoker = AnthropicLMInvoker(AnthropicLM.CLAUDE_SONNET_4)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ CLAUDE_OPUS_4_1: str
14
+ CLAUDE_OPUS_4: str
15
+ CLAUDE_SONNET_4: str
16
+ CLAUDE_SONNET_3_7: str
17
+ CLAUDE_SONNET_3_5: str
18
+ CLAUDE_HAIKU_3_5: str
19
+ CLAUDE_OPUS_3: str
20
+ CLAUDE_HAIKU_3: str
@@ -0,0 +1,17 @@
1
+ class GoogleLM:
2
+ '''Defines Google language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import GoogleLM
7
+ from gllm_inference.lm_invoker import GoogleLMInvoker
8
+
9
+ lm_invoker = GoogleLMInvoker(GoogleLM.GEMINI_2_5_FLASH)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GEMINI_2_5_PRO: str
14
+ GEMINI_2_5_FLASH: str
15
+ GEMINI_2_5_FLASH_LITE: str
16
+ GEMINI_2_0_FLASH: str
17
+ GEMINI_2_0_FLASH_LITE: str
@@ -0,0 +1,27 @@
1
+ class OpenAILM:
2
+ '''Defines OpenAI language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import OpenAILM
7
+ from gllm_inference.lm_invoker import OpenAILMInvoker
8
+
9
+ lm_invoker = OpenAILMInvoker(OpenAILM.GPT_5_NANO)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GPT_5: str
14
+ GPT_5_MINI: str
15
+ GPT_5_NANO: str
16
+ GPT_4_1: str
17
+ GPT_4_1_MINI: str
18
+ GPT_4_1_NANO: str
19
+ GPT_4O: str
20
+ GPT_4O_MINI: str
21
+ O4_MINI: str
22
+ O4_MINI_DEEP_RESEARCH: str
23
+ O3: str
24
+ O3_PRO: str
25
+ O3_DEEP_RESEARCH: str
26
+ O1: str
27
+ O1_PRO: str
@@ -1,7 +1,6 @@
1
1
  from _typeshed import Incomplete
2
2
  from gllm_core.schema import Chunk as Chunk
3
3
  from gllm_inference.constants import DEFAULT_CONTENT_PLACEHOLDER_TYPE as DEFAULT_CONTENT_PLACEHOLDER_TYPE, HEX_REPR_LENGTH as HEX_REPR_LENGTH
4
- from gllm_inference.utils import get_mime_type as get_mime_type
5
4
  from pydantic import BaseModel
6
5
  from typing import Any
7
6
 
@@ -14,11 +13,13 @@ class Attachment(BaseModel):
14
13
  data (bytes): The content data of the file attachment.
15
14
  filename (str): The filename of the file attachment.
16
15
  mime_type (str): The mime type of the file attachment.
16
+ extension (str): The extension of the file attachment.
17
17
  url (str | None): The URL of the file attachment. Defaults to None.
18
18
  """
19
19
  data: bytes
20
20
  filename: str
21
21
  mime_type: str
22
+ extension: str
22
23
  url: str | None
23
24
  @classmethod
24
25
  def from_bytes(cls, bytes: bytes, filename: str | None = None) -> Attachment:
@@ -27,7 +28,7 @@ class Attachment(BaseModel):
27
28
  Args:
28
29
  bytes (bytes): The bytes of the file.
29
30
  filename (str | None, optional): The filename of the file. Defaults to None,
30
- in which case the filename will be derived from the mime type.
31
+ in which case the filename will be derived from the extension.
31
32
 
32
33
  Returns:
33
34
  Attachment: The instantiated Attachment.
Binary file
gllm_inference.pyi CHANGED
@@ -136,6 +136,9 @@ import gllm_inference.prompt_formatter.HuggingFacePromptFormatter
136
136
  import gllm_inference.prompt_formatter.OpenAIPromptFormatter
137
137
  import transformers
138
138
  import gllm_core.utils.logger_manager
139
+ import mimetypes
140
+ import uuid
141
+ import filetype
142
+ import magic
139
143
  import urllib
140
- import urllib.parse
141
- import magic
144
+ import urllib.parse
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: gllm-inference-binary
3
- Version: 0.4.58
3
+ Version: 0.4.60
4
4
  Summary: A library containing components related to model inferences in Gen AI applications.
5
5
  Author: Henry Wicaksono
6
6
  Author-email: henry.wicaksono@gdplabs.id
@@ -22,6 +22,7 @@ Provides-Extra: voyage
22
22
  Requires-Dist: aioboto3 (>=15.0.0,<16.0.0) ; extra == "bedrock"
23
23
  Requires-Dist: aiohttp (>=3.12.14,<4.0.0)
24
24
  Requires-Dist: anthropic (>=0.60.0,<0.61.0) ; extra == "anthropic"
25
+ Requires-Dist: filetype (>=1.2.0,<2.0.0)
25
26
  Requires-Dist: gllm-core-binary (>=0.3.0,<0.4.0)
26
27
  Requires-Dist: google-genai (==1.20.0) ; extra == "google"
27
28
  Requires-Dist: httpx (>=0.28.0,<0.29.0)
@@ -1,7 +1,7 @@
1
1
  gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  gllm_inference/builder/__init__.pyi,sha256=usz2lvfwO4Yk-ZGKXbCWG1cEr3nlQXxMNDNC-2yc1NM,500
3
3
  gllm_inference/builder/build_em_invoker.pyi,sha256=YL71GriZEXn4uxmXBJHWC200QdWRPwUJY_G0kKi5-dk,5352
4
- gllm_inference/builder/build_lm_invoker.pyi,sha256=iT8ZR8PpycRbgSlLH2VmrGvEgfJM6Opj4aTFzti8ShI,6421
4
+ gllm_inference/builder/build_lm_invoker.pyi,sha256=aXdNU1gUBUz-4jZ-P791tlkmjOOInLYyeiveEJFlYZo,6468
5
5
  gllm_inference/builder/build_lm_request_processor.pyi,sha256=4DpaXHNtsLY4-Fet-Tvxj7spQ8s5v_IKdMZFEHYe6JI,4396
6
6
  gllm_inference/builder/build_output_parser.pyi,sha256=_Lrq-bh1oPsb_Nwkkr_zyEUwIOMysRFZkvEtEM29LZM,936
7
7
  gllm_inference/builder/model_id.pyi,sha256=julTH1t5XVhMynbCB_JIXcEAexRrs3TqGvap3ngwDBA,416
@@ -54,6 +54,16 @@ gllm_inference/lm_invoker/schema/langchain.pyi,sha256=-0JIiMFofXoHDoMtpaFUOysvrP
54
54
  gllm_inference/lm_invoker/schema/openai.pyi,sha256=YogOvOZqPuWkNyfcvyzaxi-Bu7UMfcoRzk4gWtkPG08,1899
55
55
  gllm_inference/lm_invoker/schema/openai_compatible.pyi,sha256=iNaiEjYe_uQnhLdkp0XMhw-D1BCZR2qQZAwgMAM49us,1022
56
56
  gllm_inference/lm_invoker/tgi_lm_invoker.pyi,sha256=-Popi8_C7m90yyV1-2IQfEzBVjMVG2TFzdsZ-GTzR10,2173
57
+ gllm_inference/model/__init__.pyi,sha256=qClHIgljqhPPCKlGTKmHsWgYb4_hADybxtC2q1U8a5Q,593
58
+ gllm_inference/model/em/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
+ gllm_inference/model/em/google_em.pyi,sha256=ZPN5LmReO0bcTfnZixFooUTzgD-daNFPzfxzZ-5WzQQ,471
60
+ gllm_inference/model/em/openai_em.pyi,sha256=KcWpMmxNqS28r4zT4H2TIADHr7e7f3VSI1MPzjJXH9k,442
61
+ gllm_inference/model/em/twelvelabs_em.pyi,sha256=pf9YfTfTPAceBoe1mA5VgtCroHZi5k42mEz-mGSD5QM,400
62
+ gllm_inference/model/em/voyage_em.pyi,sha256=CEfXjLNZamfhsLyAxIkDXND2Jk4GzwXK5puK9yKJDyE,531
63
+ gllm_inference/model/lm/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
+ gllm_inference/model/lm/anthropic_lm.pyi,sha256=36j7T5FguUr8ZNTCgMJE8NF2JZZGyl9JRahvf0hBMw4,558
65
+ gllm_inference/model/lm/google_lm.pyi,sha256=Zy0EHiyqPjfQxmzrsfZzOKKjJWfOf3WX-xz0sqqum-U,479
66
+ gllm_inference/model/lm/openai_lm.pyi,sha256=u11zvvIS7-XaHKZ33cZxGQmT6cZ4DqK9Do8l7gFOUTc,618
57
67
  gllm_inference/multimodal_em_invoker/__init__.pyi,sha256=mvLLTF8a4hdNUECvEQO58inzf6MHWhJ9yabuV8N1vwk,385
58
68
  gllm_inference/multimodal_em_invoker/google_vertexai_multimodal_em_invoker.pyi,sha256=kTe0ZbMdSzYBELadlJOJffRfWAc4XiJ-jEcuijNXEjw,3073
59
69
  gllm_inference/multimodal_em_invoker/multimodal_em_invoker.pyi,sha256=XOug4yrU6bejs8YdO_yKWgbTMR6uDDAa-zGFJkKUet0,1509
@@ -90,7 +100,7 @@ gllm_inference/request_processor/uses_lm_mixin.pyi,sha256=r0gkbtDG1MC1KVF0YxGV-s
90
100
  gllm_inference/schema/__init__.pyi,sha256=B31fohfsVVUtI4Zz_fmZJr9bP_rSFwG2-UQMuaLBQds,1188
91
101
  gllm_inference/schema/enums.pyi,sha256=KJ2fLoyviuz5ZDPzbpxfmWVUgxX0dF0w7V3V2nj3lac,603
92
102
  gllm_inference/schema/model_id.pyi,sha256=cZ8_UNiA-3WhEdJeO4aHaTIW9pm50trQe_9hE6V6IEQ,5236
93
- gllm_inference/schema/model_io.pyi,sha256=D7VW1CqLT7RWPQR96Mv9ib_dpLUW1f9K7fUW3Zv4H0Y,6836
103
+ gllm_inference/schema/model_io.pyi,sha256=TZjcX3-_w6w4CCny5yyzhUbmblV7svTHk6P6TlKgoVs,6854
94
104
  gllm_inference/schema/type_alias.pyi,sha256=PxQT50eVe370q_T-6WucHc7jOmKX6h4cZfvrG7JO_vg,971
95
105
  gllm_inference/utils/__init__.pyi,sha256=2GKVKCMCh1Gc-oC7scNhD2chHdvmSxr1WBU6AkE2ANw,1135
96
106
  gllm_inference/utils/langchain.pyi,sha256=VluQiHkGigDdqLUbhB6vnXiISCP5hHqV0qokYY6dC1A,1164
@@ -98,8 +108,8 @@ gllm_inference/utils/openai_multimodal_lm_helper.pyi,sha256=eF3MVWpQOyu_oYdHRWpR
98
108
  gllm_inference/utils/retry.pyi,sha256=PVDHBDWfWj9Frvl0yf5X6nqI6oromc35LvOs-jDk3So,76
99
109
  gllm_inference/utils/utils.pyi,sha256=px3RqfrgMTR_IvC2byKjSkNSrvE9_80nIe5UUw-d09s,6017
100
110
  gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
101
- gllm_inference.cpython-313-darwin.so,sha256=WzhIbj8uefY0MhE2L87k6eLEohn-FiX8LTThUJrpZlY,4830648
102
- gllm_inference.pyi,sha256=9X7vzatJlBsyNRLVKmsyzwELb-r27h82-anXEQWfkUU,5005
103
- gllm_inference_binary-0.4.58.dist-info/METADATA,sha256=EFxwUwdHZl9BM3T-8sMQInbMyY0IXwSEjoBJ1E_7PcE,4917
104
- gllm_inference_binary-0.4.58.dist-info/WHEEL,sha256=PCOZcL_jcbAVhuFR5ylE4Mr-7HPGHAcfJk9OpuMh7RQ,107
105
- gllm_inference_binary-0.4.58.dist-info/RECORD,,
111
+ gllm_inference.cpython-313-darwin.so,sha256=dtT1vFgXuFq_oC_sp-2xzPfSK-ueOIAfyvDKOUBpsm8,4885640
112
+ gllm_inference.pyi,sha256=U8ZHrNyDfYAAEMXVFrCFSuf6o6NSuxcJBnMnnMTf330,5050
113
+ gllm_inference_binary-0.4.60.dist-info/METADATA,sha256=MuTck_lQY3X1xWM_TrW_I79n2mX-lN1YWkil8z0G9v4,4958
114
+ gllm_inference_binary-0.4.60.dist-info/WHEEL,sha256=PCOZcL_jcbAVhuFR5ylE4Mr-7HPGHAcfJk9OpuMh7RQ,107
115
+ gllm_inference_binary-0.4.60.dist-info/RECORD,,