gllm-inference-binary 0.5.46__cp313-cp313-win_amd64.whl → 0.5.47__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gllm_inference/em_invoker/schema/jina.pyi +1 -0
- gllm_inference/model/__init__.pyi +4 -1
- gllm_inference/model/em/cohere_em.pyi +17 -0
- gllm_inference/model/em/jina_em.pyi +22 -0
- gllm_inference/model/lm/anthropic_lm.pyi +2 -0
- gllm_inference/model/lm/google_lm.pyi +1 -0
- gllm_inference/model/lm/xai_lm.pyi +19 -0
- gllm_inference.cp313-win_amd64.pyd +0 -0
- {gllm_inference_binary-0.5.46.dist-info → gllm_inference_binary-0.5.47.dist-info}/METADATA +1 -1
- {gllm_inference_binary-0.5.46.dist-info → gllm_inference_binary-0.5.47.dist-info}/RECORD +12 -9
- {gllm_inference_binary-0.5.46.dist-info → gllm_inference_binary-0.5.47.dist-info}/WHEEL +0 -0
- {gllm_inference_binary-0.5.46.dist-info → gllm_inference_binary-0.5.47.dist-info}/top_level.txt +0 -0
|
@@ -1,9 +1,12 @@
|
|
|
1
|
+
from gllm_inference.model.em.cohere_em import CohereEM as CohereEM
|
|
1
2
|
from gllm_inference.model.em.google_em import GoogleEM as GoogleEM
|
|
3
|
+
from gllm_inference.model.em.jina_em import JinaEM as JinaEM
|
|
2
4
|
from gllm_inference.model.em.openai_em import OpenAIEM as OpenAIEM
|
|
3
5
|
from gllm_inference.model.em.twelvelabs_em import TwelveLabsEM as TwelveLabsEM
|
|
4
6
|
from gllm_inference.model.em.voyage_em import VoyageEM as VoyageEM
|
|
5
7
|
from gllm_inference.model.lm.anthropic_lm import AnthropicLM as AnthropicLM
|
|
6
8
|
from gllm_inference.model.lm.google_lm import GoogleLM as GoogleLM
|
|
7
9
|
from gllm_inference.model.lm.openai_lm import OpenAILM as OpenAILM
|
|
10
|
+
from gllm_inference.model.lm.xai_lm import XAILM as XAILM
|
|
8
11
|
|
|
9
|
-
__all__ = ['AnthropicLM', 'GoogleEM', 'GoogleLM', 'OpenAIEM', 'OpenAILM', 'TwelveLabsEM', 'VoyageEM']
|
|
12
|
+
__all__ = ['AnthropicLM', 'CohereEM', 'GoogleEM', 'GoogleLM', 'JinaEM', 'OpenAIEM', 'OpenAILM', 'TwelveLabsEM', 'VoyageEM', 'XAILM']
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
class CohereEM:
|
|
2
|
+
'''Defines Cohere embedding model names constants.
|
|
3
|
+
|
|
4
|
+
Usage example:
|
|
5
|
+
```python
|
|
6
|
+
from gllm_inference.model import CohereEM
|
|
7
|
+
from gllm_inference.em_invoker import CohereEMInvoker
|
|
8
|
+
|
|
9
|
+
em_invoker = CohereEMInvoker(CohereEM.EMBED_V4_0)
|
|
10
|
+
result = await em_invoker.invoke("Hello, world!")
|
|
11
|
+
```
|
|
12
|
+
'''
|
|
13
|
+
EMBED_V4_0: str
|
|
14
|
+
EMBED_ENGLISH_V3_0: str
|
|
15
|
+
EMBED_ENGLISH_LIGHT_V3_0: str
|
|
16
|
+
EMBED_MULTILINGUAL_V3_0: str
|
|
17
|
+
EMBED_MULTILINGUAL_LIGHT_V3_0: str
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
class JinaEM:
|
|
2
|
+
'''Defines Jina embedding model names constants.
|
|
3
|
+
|
|
4
|
+
Usage example:
|
|
5
|
+
```python
|
|
6
|
+
from gllm_inference.model import JinaEM
|
|
7
|
+
from gllm_inference.em_invoker import JinaEMInvoker
|
|
8
|
+
|
|
9
|
+
em_invoker = JinaEMInvoker(JinaEM.JINA_EMBEDDINGS_V4)
|
|
10
|
+
result = await em_invoker.invoke("Hello, world!")
|
|
11
|
+
```
|
|
12
|
+
'''
|
|
13
|
+
JINA_EMBEDDINGS_V4: str
|
|
14
|
+
JINA_EMBEDDINGS_V3: str
|
|
15
|
+
JINA_EMBEDDINGS_V2_BASE_EN: str
|
|
16
|
+
JINA_EMBEDDINGS_V2_BASE_CODE: str
|
|
17
|
+
JINA_CLIP_V2: str
|
|
18
|
+
JINA_CLIP_V1: str
|
|
19
|
+
JINA_CODE_EMBEDDINGS_1_5B: str
|
|
20
|
+
JINA_CODE_EMBEDDINGS_0_5B: str
|
|
21
|
+
JINA_COLBERT_V2: str
|
|
22
|
+
JINA_COLBERT_V1_EN: str
|
|
@@ -12,9 +12,11 @@ class AnthropicLM:
|
|
|
12
12
|
'''
|
|
13
13
|
CLAUDE_OPUS_4_1: str
|
|
14
14
|
CLAUDE_OPUS_4: str
|
|
15
|
+
CLAUDE_SONNET_4_5: str
|
|
15
16
|
CLAUDE_SONNET_4: str
|
|
16
17
|
CLAUDE_SONNET_3_7: str
|
|
17
18
|
CLAUDE_SONNET_3_5: str
|
|
19
|
+
CLAUDE_HAIKU_4_5: str
|
|
18
20
|
CLAUDE_HAIKU_3_5: str
|
|
19
21
|
CLAUDE_OPUS_3: str
|
|
20
22
|
CLAUDE_HAIKU_3: str
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
class XAILM:
|
|
2
|
+
'''Defines XAI language model names constants.
|
|
3
|
+
|
|
4
|
+
Usage example:
|
|
5
|
+
```python
|
|
6
|
+
from gllm_inference.model import XAILM
|
|
7
|
+
from gllm_inference.lm_invoker import XAILMInvoker
|
|
8
|
+
|
|
9
|
+
lm_invoker = XAILMInvoker(XAILM.GROK_4_FAST_REASONING)
|
|
10
|
+
response = await lm_invoker.invoke("Hello, world!")
|
|
11
|
+
```
|
|
12
|
+
'''
|
|
13
|
+
GROK_CODE_FAST_1: str
|
|
14
|
+
GROK_4_FAST_REASONING: str
|
|
15
|
+
GROK_4_FAST_NON_REASONING: str
|
|
16
|
+
GROK_4_0709: str
|
|
17
|
+
GROK_3_MINI: str
|
|
18
|
+
GROK_3: str
|
|
19
|
+
GROK_2_VISION_1212: str
|
|
Binary file
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: gllm-inference-binary
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.47
|
|
4
4
|
Summary: A library containing components related to model inferences in Gen AI applications.
|
|
5
5
|
Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, Resti Febrina <resti.febrina@gdplabs.id>
|
|
6
6
|
Requires-Python: <3.14,>=3.11
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
gllm_inference.cp313-win_amd64.pyd,sha256=
|
|
1
|
+
gllm_inference.cp313-win_amd64.pyd,sha256=Rw9RPvGn34ccThSX5Uq7YbA_TRjLuDffgzVre52ajKQ,3824128
|
|
2
2
|
gllm_inference.pyi,sha256=1WeCtSLoqo97eCY-WiMP-LF9UUJG_pT5NTESuCoStRg,5211
|
|
3
3
|
gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
gllm_inference/constants.pyi,sha256=PncjVw-mkzcJ3ln1ohvVZGdJ-TD-VZy1Ygn4Va8Z7i0,350
|
|
@@ -29,7 +29,7 @@ gllm_inference/em_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeR
|
|
|
29
29
|
gllm_inference/em_invoker/schema/bedrock.pyi,sha256=HoNgVi0T21aFd1JrCnSLu4yryv8k8RnYdR3-tIdHFgA,498
|
|
30
30
|
gllm_inference/em_invoker/schema/cohere.pyi,sha256=Wio6h0sbY93GygqETtflRaaucFzYSeLZRg7jyxMDK0s,567
|
|
31
31
|
gllm_inference/em_invoker/schema/google.pyi,sha256=bzdtu4DFH2kATLybIeNl_Lznj99H-6u2Fvx3Zx52oZg,190
|
|
32
|
-
gllm_inference/em_invoker/schema/jina.pyi,sha256=
|
|
32
|
+
gllm_inference/em_invoker/schema/jina.pyi,sha256=B38heufA7nwWt_f93qY_aQVieuOSOH35Xotf3p_3BKc,770
|
|
33
33
|
gllm_inference/em_invoker/schema/langchain.pyi,sha256=SZ13HDcvAOGmDTi2b72H6Y1J5GePR21JdnM6gYrwcGs,117
|
|
34
34
|
gllm_inference/em_invoker/schema/openai.pyi,sha256=rNRqN62y5wHOKlr4T0n0m41ikAnSrD72CTnoHxo6kEM,146
|
|
35
35
|
gllm_inference/em_invoker/schema/openai_compatible.pyi,sha256=A9MOeBhI-IPuvewOk4YYOAGtgyKohERx6-9cEYtbwvs,157
|
|
@@ -65,16 +65,19 @@ gllm_inference/lm_invoker/schema/openai.pyi,sha256=TsCr8_SM5kK2JyROeXtmH13n46TgK
|
|
|
65
65
|
gllm_inference/lm_invoker/schema/openai_chat_completions.pyi,sha256=nNPb7ETC9IrJwkV5wfbGf6Co3-qdq4lhcXz0l_qYCE4,1261
|
|
66
66
|
gllm_inference/lm_invoker/schema/portkey.pyi,sha256=V2q4JIwDAR7BidqfmO01u1_1mLOMtm5OCon6sN2zNt0,662
|
|
67
67
|
gllm_inference/lm_invoker/schema/xai.pyi,sha256=jpC6ZSBDUltzm9GjD6zvSFIPwqizn_ywLnjvwSa7KuU,663
|
|
68
|
-
gllm_inference/model/__init__.pyi,sha256=
|
|
68
|
+
gllm_inference/model/__init__.pyi,sha256=e9Jq5V2iVPpjBh_bOEBoXdsU2LleAxKfJ0r-1rZJ5R0,822
|
|
69
69
|
gllm_inference/model/em/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
|
+
gllm_inference/model/em/cohere_em.pyi,sha256=uF1AmDO-skQteYqzxJ3DK10SqgfdW0oW9L8Ym34eU04,505
|
|
70
71
|
gllm_inference/model/em/google_em.pyi,sha256=c53H-KNdNOK9ppPLyOSkmCA890eF5FsMd05upkPIzF0,487
|
|
72
|
+
gllm_inference/model/em/jina_em.pyi,sha256=wo3EcKxOqMUnVMgH7Q1Ak8UzaumzhNGuhrtS1KrlXjw,649
|
|
71
73
|
gllm_inference/model/em/openai_em.pyi,sha256=b6ID1JsLZH9OAo9E37CkbgWNR_eI65eKXK6TYi_0ndA,457
|
|
72
74
|
gllm_inference/model/em/twelvelabs_em.pyi,sha256=5R2zkKDiEatdATFzF8TOoKW9XRkOsOoNGY5lORimueo,413
|
|
73
75
|
gllm_inference/model/em/voyage_em.pyi,sha256=kTInLttWfPqCNfBX-TK5VMMaFfPxwqqudBw1kz4hnxk,551
|
|
74
76
|
gllm_inference/model/lm/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
75
|
-
gllm_inference/model/lm/anthropic_lm.pyi,sha256=
|
|
76
|
-
gllm_inference/model/lm/google_lm.pyi,sha256=
|
|
77
|
+
gllm_inference/model/lm/anthropic_lm.pyi,sha256=ccUpxddakurLFHivl5UzJxgODLhcFgx8XC7CKa-99NE,633
|
|
78
|
+
gllm_inference/model/lm/google_lm.pyi,sha256=OLuoqT0FnJOLsNalulBMEXuCYAXoF8Y7vjfSBgjaJxA,529
|
|
77
79
|
gllm_inference/model/lm/openai_lm.pyi,sha256=yj3AJj1xDYRkNIPHX2enw46AJ9wArPZruKsxg1ME9Rg,645
|
|
80
|
+
gllm_inference/model/lm/xai_lm.pyi,sha256=O3G9Lj1Ii31CyCDrwYVkPPJN6X8V-WBF9xILUPUE-qY,525
|
|
78
81
|
gllm_inference/output_parser/__init__.pyi,sha256=dhAeRTBxc6CfS8bhnHjbtrnyqJ1iyffvUZkGp4UrJNM,132
|
|
79
82
|
gllm_inference/output_parser/json_output_parser.pyi,sha256=YtgQh8Uzy8W_Tgh8DfuR7VFFS7qvLEasiTwRfaGZZEU,2993
|
|
80
83
|
gllm_inference/output_parser/output_parser.pyi,sha256=-Xu5onKCBDqShcO-VrQh5icqAmXdihGc3rkZxL93swg,975
|
|
@@ -127,7 +130,7 @@ gllm_inference/utils/io_utils.pyi,sha256=Eg7dvHWdXslTKdjh1j3dG50i7r35XG2zTmJ9XXv
|
|
|
127
130
|
gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
|
|
128
131
|
gllm_inference/utils/validation.pyi,sha256=-RdMmb8afH7F7q4Ao7x6FbwaDfxUHn3hA3WiOgzB-3s,397
|
|
129
132
|
gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
|
|
130
|
-
gllm_inference_binary-0.5.
|
|
131
|
-
gllm_inference_binary-0.5.
|
|
132
|
-
gllm_inference_binary-0.5.
|
|
133
|
-
gllm_inference_binary-0.5.
|
|
133
|
+
gllm_inference_binary-0.5.47.dist-info/METADATA,sha256=1JStZjTbPf51eWVkFu16JBAqbK1e1VohgrqCC5VcnIc,5945
|
|
134
|
+
gllm_inference_binary-0.5.47.dist-info/WHEEL,sha256=O_u6PJIQ2pIcyIInxVQ9r-yArMuUZbBIaF1kpYVkYxA,96
|
|
135
|
+
gllm_inference_binary-0.5.47.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
|
|
136
|
+
gllm_inference_binary-0.5.47.dist-info/RECORD,,
|
|
File without changes
|
{gllm_inference_binary-0.5.46.dist-info → gllm_inference_binary-0.5.47.dist-info}/top_level.txt
RENAMED
|
File without changes
|