gllm-inference-binary 0.4.57__cp313-cp313-manylinux_2_31_x86_64.whl → 0.4.59__cp313-cp313-manylinux_2_31_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gllm-inference-binary might be problematic. Click here for more details.

@@ -65,7 +65,8 @@ def build_lm_invoker(model_id: str | ModelId, credentials: str | dict[str, Any]
65
65
  },
66
66
  )
67
67
  ```
68
- Providing credentials through environment variable is not supported for Bedrock.
68
+ The credentials can also be provided through the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
69
+ environment variables.
69
70
 
70
71
  # Using Datasaur LLM Projects Deployment API
71
72
  ```python
@@ -0,0 +1,9 @@
1
+ from gllm_inference.model.em.google_em import GoogleEM as GoogleEM
2
+ from gllm_inference.model.em.openai_em import OpenAIEM as OpenAIEM
3
+ from gllm_inference.model.em.twelvelabs_em import TwelveLabsEM as TwelveLabsEM
4
+ from gllm_inference.model.em.voyage_em import VoyageEM as VoyageEM
5
+ from gllm_inference.model.lm.anthropic_lm import AnthropicLM as AnthropicLM
6
+ from gllm_inference.model.lm.google_lm import GoogleLM as GoogleLM
7
+ from gllm_inference.model.lm.openai_lm import OpenAILM as OpenAILM
8
+
9
+ __all__ = ['AnthropicLM', 'GoogleEM', 'GoogleLM', 'OpenAIEM', 'OpenAILM', 'TwelveLabsEM', 'VoyageEM']
File without changes
@@ -0,0 +1,16 @@
1
+ class GoogleEM:
2
+ '''Defines Google embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import GoogleEM
7
+ from gllm_inference.em_invoker import GoogleEMInvoker
8
+
9
+ em_invoker = GoogleEMInvoker(GoogleEM.GEMINI_EMBEDDING_001)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GEMINI_EMBEDDING_001: str
14
+ TEXT_EMBEDDING_004: str
15
+ TEXT_EMBEDDING_005: str
16
+ TEXT_MULTILINGUAL_EMBEDDING_002: str
@@ -0,0 +1,15 @@
1
+ class OpenAIEM:
2
+ '''Defines OpenAI embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import OpenAIEM
7
+ from gllm_inference.em_invoker import OpenAIEMInvoker
8
+
9
+ em_invoker = OpenAIEMInvoker(OpenAIEM.TEXT_EMBEDDING_3_SMALL)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ TEXT_EMBEDDING_3_SMALL: str
14
+ TEXT_EMBEDDING_3_LARGE: str
15
+ TEXT_EMBEDDING_ADA_002: str
@@ -0,0 +1,13 @@
1
+ class TwelveLabsEM:
2
+ '''Defines TwelveLabs embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import TwelveLabsEM
7
+ from gllm_inference.em_invoker import TwelveLabsEMInvoker
8
+
9
+ em_invoker = TwelveLabsEMInvoker(TwelveLabsEM.MARENGO_RETRIEVAL_2_7)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ MARENGO_RETRIEVAL_2_7: str
@@ -0,0 +1,20 @@
1
+ class VoyageEM:
2
+ '''Defines Voyage embedding model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import VoyageEM
7
+ from gllm_inference.em_invoker import VoyageEMInvoker
8
+
9
+ em_invoker = VoyageEMInvoker(VoyageEM.VOYAGE_3_5_LITE)
10
+ result = await em_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ VOYAGE_3_5: str
14
+ VOYAGE_3_5_LITE: str
15
+ VOYAGE_3_LARGE: str
16
+ VOYAGE_CODE_3: str
17
+ VOYAGE_FINANCE_2: str
18
+ VOYAGE_LAW_2: str
19
+ VOYAGE_CODE_2: str
20
+ VOYAGE_MULTIMODAL_3: str
File without changes
@@ -0,0 +1,20 @@
1
+ class AnthropicLM:
2
+ '''Defines Anthropic language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import AnthropicLM
7
+ from gllm_inference.lm_invoker import AnthropicLMInvoker
8
+
9
+ lm_invoker = AnthropicLMInvoker(AnthropicLM.CLAUDE_SONNET_4)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ CLAUDE_OPUS_4_1: str
14
+ CLAUDE_OPUS_4: str
15
+ CLAUDE_SONNET_4: str
16
+ CLAUDE_SONNET_3_7: str
17
+ CLAUDE_SONNET_3_5: str
18
+ CLAUDE_HAIKU_3_5: str
19
+ CLAUDE_OPUS_3: str
20
+ CLAUDE_HAIKU_3: str
@@ -0,0 +1,17 @@
1
+ class GoogleLM:
2
+ '''Defines Google language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import GoogleLM
7
+ from gllm_inference.lm_invoker import GoogleLMInvoker
8
+
9
+ lm_invoker = GoogleLMInvoker(GoogleLM.GEMINI_2_5_FLASH)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GEMINI_2_5_PRO: str
14
+ GEMINI_2_5_FLASH: str
15
+ GEMINI_2_5_FLASH_LITE: str
16
+ GEMINI_2_0_FLASH: str
17
+ GEMINI_2_0_FLASH_LITE: str
@@ -0,0 +1,27 @@
1
+ class OpenAILM:
2
+ '''Defines OpenAI language model names constants.
3
+
4
+ Usage example:
5
+ ```python
6
+ from gllm_inference.model import OpenAILM
7
+ from gllm_inference.lm_invoker import OpenAILMInvoker
8
+
9
+ lm_invoker = OpenAILMInvoker(OpenAILM.GPT_5_NANO)
10
+ response = await lm_invoker.invoke("Hello, world!")
11
+ ```
12
+ '''
13
+ GPT_5: str
14
+ GPT_5_MINI: str
15
+ GPT_5_NANO: str
16
+ GPT_4_1: str
17
+ GPT_4_1_MINI: str
18
+ GPT_4_1_NANO: str
19
+ GPT_4O: str
20
+ GPT_4O_MINI: str
21
+ O4_MINI: str
22
+ O4_MINI_DEEP_RESEARCH: str
23
+ O3: str
24
+ O3_PRO: str
25
+ O3_DEEP_RESEARCH: str
26
+ O1: str
27
+ O1_PRO: str
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: gllm-inference-binary
3
- Version: 0.4.57
3
+ Version: 0.4.59
4
4
  Summary: A library containing components related to model inferences in Gen AI applications.
5
5
  Author: Henry Wicaksono
6
6
  Author-email: henry.wicaksono@gdplabs.id
@@ -1,7 +1,7 @@
1
1
  gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  gllm_inference/builder/__init__.pyi,sha256=usz2lvfwO4Yk-ZGKXbCWG1cEr3nlQXxMNDNC-2yc1NM,500
3
3
  gllm_inference/builder/build_em_invoker.pyi,sha256=YL71GriZEXn4uxmXBJHWC200QdWRPwUJY_G0kKi5-dk,5352
4
- gllm_inference/builder/build_lm_invoker.pyi,sha256=iT8ZR8PpycRbgSlLH2VmrGvEgfJM6Opj4aTFzti8ShI,6421
4
+ gllm_inference/builder/build_lm_invoker.pyi,sha256=aXdNU1gUBUz-4jZ-P791tlkmjOOInLYyeiveEJFlYZo,6468
5
5
  gllm_inference/builder/build_lm_request_processor.pyi,sha256=4DpaXHNtsLY4-Fet-Tvxj7spQ8s5v_IKdMZFEHYe6JI,4396
6
6
  gllm_inference/builder/build_output_parser.pyi,sha256=_Lrq-bh1oPsb_Nwkkr_zyEUwIOMysRFZkvEtEM29LZM,936
7
7
  gllm_inference/builder/model_id.pyi,sha256=julTH1t5XVhMynbCB_JIXcEAexRrs3TqGvap3ngwDBA,416
@@ -54,6 +54,16 @@ gllm_inference/lm_invoker/schema/langchain.pyi,sha256=-0JIiMFofXoHDoMtpaFUOysvrP
54
54
  gllm_inference/lm_invoker/schema/openai.pyi,sha256=YogOvOZqPuWkNyfcvyzaxi-Bu7UMfcoRzk4gWtkPG08,1899
55
55
  gllm_inference/lm_invoker/schema/openai_compatible.pyi,sha256=iNaiEjYe_uQnhLdkp0XMhw-D1BCZR2qQZAwgMAM49us,1022
56
56
  gllm_inference/lm_invoker/tgi_lm_invoker.pyi,sha256=-Popi8_C7m90yyV1-2IQfEzBVjMVG2TFzdsZ-GTzR10,2173
57
+ gllm_inference/model/__init__.pyi,sha256=qClHIgljqhPPCKlGTKmHsWgYb4_hADybxtC2q1U8a5Q,593
58
+ gllm_inference/model/em/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
+ gllm_inference/model/em/google_em.pyi,sha256=ZPN5LmReO0bcTfnZixFooUTzgD-daNFPzfxzZ-5WzQQ,471
60
+ gllm_inference/model/em/openai_em.pyi,sha256=KcWpMmxNqS28r4zT4H2TIADHr7e7f3VSI1MPzjJXH9k,442
61
+ gllm_inference/model/em/twelvelabs_em.pyi,sha256=pf9YfTfTPAceBoe1mA5VgtCroHZi5k42mEz-mGSD5QM,400
62
+ gllm_inference/model/em/voyage_em.pyi,sha256=CEfXjLNZamfhsLyAxIkDXND2Jk4GzwXK5puK9yKJDyE,531
63
+ gllm_inference/model/lm/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
+ gllm_inference/model/lm/anthropic_lm.pyi,sha256=36j7T5FguUr8ZNTCgMJE8NF2JZZGyl9JRahvf0hBMw4,558
65
+ gllm_inference/model/lm/google_lm.pyi,sha256=Zy0EHiyqPjfQxmzrsfZzOKKjJWfOf3WX-xz0sqqum-U,479
66
+ gllm_inference/model/lm/openai_lm.pyi,sha256=u11zvvIS7-XaHKZ33cZxGQmT6cZ4DqK9Do8l7gFOUTc,618
57
67
  gllm_inference/multimodal_em_invoker/__init__.pyi,sha256=mvLLTF8a4hdNUECvEQO58inzf6MHWhJ9yabuV8N1vwk,385
58
68
  gllm_inference/multimodal_em_invoker/google_vertexai_multimodal_em_invoker.pyi,sha256=kTe0ZbMdSzYBELadlJOJffRfWAc4XiJ-jEcuijNXEjw,3073
59
69
  gllm_inference/multimodal_em_invoker/multimodal_em_invoker.pyi,sha256=XOug4yrU6bejs8YdO_yKWgbTMR6uDDAa-zGFJkKUet0,1509
@@ -98,8 +108,8 @@ gllm_inference/utils/openai_multimodal_lm_helper.pyi,sha256=eF3MVWpQOyu_oYdHRWpR
98
108
  gllm_inference/utils/retry.pyi,sha256=PVDHBDWfWj9Frvl0yf5X6nqI6oromc35LvOs-jDk3So,76
99
109
  gllm_inference/utils/utils.pyi,sha256=px3RqfrgMTR_IvC2byKjSkNSrvE9_80nIe5UUw-d09s,6017
100
110
  gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
101
- gllm_inference.cpython-313-x86_64-linux-gnu.so,sha256=60fowHAuJTrkMPCCTX21wsVv2-f01JZdGjd4zCxMMZ4,4929224
111
+ gllm_inference.cpython-313-x86_64-linux-gnu.so,sha256=-yLMBzgIcrS80Ixasba3Y4Y_hVusMzSJr3JGehgZ03E,4986888
102
112
  gllm_inference.pyi,sha256=9X7vzatJlBsyNRLVKmsyzwELb-r27h82-anXEQWfkUU,5005
103
- gllm_inference_binary-0.4.57.dist-info/METADATA,sha256=r-djtGWt_-2woH-k6oodKJgk13-TKaVPaYF7fEfkIEs,4917
104
- gllm_inference_binary-0.4.57.dist-info/WHEEL,sha256=qGYSeeDMRvGsNMRKS15OK05VQRV6Z0DMQkqDjYiypg0,110
105
- gllm_inference_binary-0.4.57.dist-info/RECORD,,
113
+ gllm_inference_binary-0.4.59.dist-info/METADATA,sha256=LwMFdou-CWZyPaIbalJ2tWROCXCWHR2v7ItRrZL8LGo,4917
114
+ gllm_inference_binary-0.4.59.dist-info/WHEEL,sha256=qGYSeeDMRvGsNMRKS15OK05VQRV6Z0DMQkqDjYiypg0,110
115
+ gllm_inference_binary-0.4.59.dist-info/RECORD,,