gllm-inference-binary 0.5.38__cp312-cp312-win_amd64.whl → 0.5.41__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gllm-inference-binary might be problematic. Click here for more details.
- gllm_inference/em_invoker/azure_openai_em_invoker.pyi +2 -2
- gllm_inference/em_invoker/bedrock_em_invoker.pyi +2 -2
- gllm_inference/em_invoker/google_em_invoker.pyi +2 -2
- gllm_inference/em_invoker/openai_em_invoker.pyi +2 -2
- gllm_inference/em_invoker/twelevelabs_em_invoker.pyi +2 -2
- gllm_inference/em_invoker/voyage_em_invoker.pyi +2 -2
- gllm_inference/lm_invoker/anthropic_lm_invoker.pyi +22 -28
- gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi +24 -29
- gllm_inference/lm_invoker/bedrock_lm_invoker.pyi +10 -20
- gllm_inference/lm_invoker/datasaur_lm_invoker.pyi +11 -21
- gllm_inference/lm_invoker/google_lm_invoker.pyi +46 -28
- gllm_inference/lm_invoker/langchain_lm_invoker.pyi +10 -20
- gllm_inference/lm_invoker/litellm_lm_invoker.pyi +25 -30
- gllm_inference/lm_invoker/lm_invoker.pyi +4 -1
- gllm_inference/lm_invoker/openai_chat_completions_lm_invoker.pyi +22 -28
- gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi +4 -1
- gllm_inference/lm_invoker/openai_lm_invoker.pyi +45 -50
- gllm_inference/lm_invoker/xai_lm_invoker.pyi +26 -42
- gllm_inference/schema/events.pyi +15 -15
- gllm_inference/schema/lm_output.pyi +4 -0
- gllm_inference.cp312-win_amd64.pyd +0 -0
- gllm_inference.pyi +1 -1
- {gllm_inference_binary-0.5.38.dist-info → gllm_inference_binary-0.5.41.dist-info}/METADATA +1 -1
- {gllm_inference_binary-0.5.38.dist-info → gllm_inference_binary-0.5.41.dist-info}/RECORD +26 -26
- {gllm_inference_binary-0.5.38.dist-info → gllm_inference_binary-0.5.41.dist-info}/WHEEL +0 -0
- {gllm_inference_binary-0.5.38.dist-info → gllm_inference_binary-0.5.41.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from gllm_core.schema import Chunk as Chunk
|
|
2
|
+
from gllm_inference.schema.attachment import Attachment as Attachment
|
|
2
3
|
from gllm_inference.schema.code_exec_result import CodeExecResult as CodeExecResult
|
|
3
4
|
from gllm_inference.schema.mcp import MCPCall as MCPCall
|
|
4
5
|
from gllm_inference.schema.reasoning import Reasoning as Reasoning
|
|
@@ -12,6 +13,8 @@ class LMOutput(BaseModel):
|
|
|
12
13
|
|
|
13
14
|
Attributes:
|
|
14
15
|
response (str): The text response. Defaults to an empty string.
|
|
16
|
+
attachments (list[Attachment]): The attachments, if the language model decides to output attachments.
|
|
17
|
+
Defaults to an empty list.
|
|
15
18
|
tool_calls (list[ToolCall]): The tool calls, if the language model decides to invoke tools.
|
|
16
19
|
Defaults to an empty list.
|
|
17
20
|
structured_output (dict[str, Any] | BaseModel | None): The structured output, if a response schema is defined
|
|
@@ -29,6 +32,7 @@ class LMOutput(BaseModel):
|
|
|
29
32
|
Defaults to an empty list.
|
|
30
33
|
"""
|
|
31
34
|
response: str
|
|
35
|
+
attachments: list[Attachment]
|
|
32
36
|
tool_calls: list[ToolCall]
|
|
33
37
|
structured_output: dict[str, Any] | BaseModel | None
|
|
34
38
|
token_usage: TokenUsage | None
|
|
Binary file
|
gllm_inference.pyi
CHANGED
|
@@ -79,6 +79,7 @@ import voyageai
|
|
|
79
79
|
import voyageai.client_async
|
|
80
80
|
import http
|
|
81
81
|
import http.HTTPStatus
|
|
82
|
+
import uuid
|
|
82
83
|
import gllm_core.constants
|
|
83
84
|
import gllm_core.event
|
|
84
85
|
import gllm_core.schema
|
|
@@ -135,7 +136,6 @@ import gllm_inference.realtime_chat.output_streamer.ConsoleOutputStreamer
|
|
|
135
136
|
import google.genai.live
|
|
136
137
|
import gllm_core.utils.logger_manager
|
|
137
138
|
import mimetypes
|
|
138
|
-
import uuid
|
|
139
139
|
import pathlib
|
|
140
140
|
import filetype
|
|
141
141
|
import magic
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: gllm-inference-binary
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.41
|
|
4
4
|
Summary: A library containing components related to model inferences in Gen AI applications.
|
|
5
5
|
Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, Resti Febrina <resti.febrina@gdplabs.id>
|
|
6
6
|
Requires-Python: <3.14,>=3.11
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
gllm_inference.cp312-win_amd64.pyd,sha256=
|
|
2
|
-
gllm_inference.pyi,sha256=
|
|
1
|
+
gllm_inference.cp312-win_amd64.pyd,sha256=OXf_qtyCqhGinm82k20ISwu1u4FCGYG9pL-s1SkrrOc,3558912
|
|
2
|
+
gllm_inference.pyi,sha256=d5iZJ3btcsBjjKRCWz639Tu8HMMIpYliPmJCNCputQY,4730
|
|
3
3
|
gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
gllm_inference/constants.pyi,sha256=1OBoHfeWfW9bXH9kStNEH__MGnGp--jLfyheAeQnogY,302
|
|
5
5
|
gllm_inference/builder/__init__.pyi,sha256=-bw1uDx7CAM7pkvjvb1ZXku9zXlQ7aEAyC83KIn3bz8,506
|
|
@@ -12,15 +12,15 @@ gllm_inference/catalog/catalog.pyi,sha256=eWPqgQKi-SJGHabi_XOTEKpAj96OSRypKsb5ZE
|
|
|
12
12
|
gllm_inference/catalog/lm_request_processor_catalog.pyi,sha256=FiveqPDkV58XbDO2znXL-Ix5tFbZwNiVnitlEa90YOY,5536
|
|
13
13
|
gllm_inference/catalog/prompt_builder_catalog.pyi,sha256=iViWB4SaezzjQY4UY1YxeoXUNxqxa2cTJGaD9JSx4Q8,3279
|
|
14
14
|
gllm_inference/em_invoker/__init__.pyi,sha256=pmbsjmsqXwfe4WPykMnrmasKrYuylJWnf2s0pbo0ioM,997
|
|
15
|
-
gllm_inference/em_invoker/azure_openai_em_invoker.pyi,sha256=
|
|
16
|
-
gllm_inference/em_invoker/bedrock_em_invoker.pyi,sha256=
|
|
15
|
+
gllm_inference/em_invoker/azure_openai_em_invoker.pyi,sha256=TXC5Kgf1eZqK2FHKAyeG3LB1SEsSEStnbk9bI1mjC5k,5049
|
|
16
|
+
gllm_inference/em_invoker/bedrock_em_invoker.pyi,sha256=kQETh2r-WR_H3APtt4QavmfwGOR3KB4k6USNYvFateY,5831
|
|
17
17
|
gllm_inference/em_invoker/em_invoker.pyi,sha256=YDYJ8TGScsz5Gg-OBnEENN1tI1RYvwoddypxUr6SAWw,5191
|
|
18
|
-
gllm_inference/em_invoker/google_em_invoker.pyi,sha256=
|
|
18
|
+
gllm_inference/em_invoker/google_em_invoker.pyi,sha256=zZYjeLp9ncwIVM4UHqDJSVOFn1eXiaz9Ba24-_fCF2c,6953
|
|
19
19
|
gllm_inference/em_invoker/langchain_em_invoker.pyi,sha256=nhX6LynrjhfySEt_44OlLoSBd15hoz3giWyNM9CYLKY,3544
|
|
20
20
|
gllm_inference/em_invoker/openai_compatible_em_invoker.pyi,sha256=SbvCbOhdpkq6IyPhGd_IlxD8hbXDZID2rIehY6mJOIs,2923
|
|
21
|
-
gllm_inference/em_invoker/openai_em_invoker.pyi,sha256=
|
|
22
|
-
gllm_inference/em_invoker/twelevelabs_em_invoker.pyi,sha256=
|
|
23
|
-
gllm_inference/em_invoker/voyage_em_invoker.pyi,sha256=
|
|
21
|
+
gllm_inference/em_invoker/openai_em_invoker.pyi,sha256=dwZr9rjrjm060HEnyaPR9-jFJpxSi7fWx7i9ZB4aEY4,6313
|
|
22
|
+
gllm_inference/em_invoker/twelevelabs_em_invoker.pyi,sha256=4E-xCtkkiry_tuMiI9jUk6l6iwy6iPQNxaq67AqHvjk,5448
|
|
23
|
+
gllm_inference/em_invoker/voyage_em_invoker.pyi,sha256=nlcyjYnd3JvKy8UCGzjfXQLR4UmQIJnRbnNwnDK3xng,5621
|
|
24
24
|
gllm_inference/em_invoker/langchain/__init__.pyi,sha256=aOTlRvS9aG1tBErjsmhe75s4Sq-g2z9ArfGqNW7QyEs,151
|
|
25
25
|
gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=BBSDazMOckO9Aw17tC3LGUTPqLb01my1xUZLtKZlwJY,3388
|
|
26
26
|
gllm_inference/em_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -36,18 +36,18 @@ gllm_inference/exceptions/error_parser.pyi,sha256=4aiJZhBzBOqlhdmpvaCvildGy7_Xxl
|
|
|
36
36
|
gllm_inference/exceptions/exceptions.pyi,sha256=6y3ECgHAStqMGgQv8Dv-Ui-5PDD07mSj6qaRZeSWea4,5857
|
|
37
37
|
gllm_inference/exceptions/provider_error_map.pyi,sha256=4AsAgbXAh91mxEW2YiomEuhBoeSNeAIo9WbT9WK8gQk,1233
|
|
38
38
|
gllm_inference/lm_invoker/__init__.pyi,sha256=jG1xc5fTOeIgeKKVYSnsMzQThKk9kTW38yO_MYtv540,1387
|
|
39
|
-
gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=
|
|
40
|
-
gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=
|
|
41
|
-
gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=
|
|
42
|
-
gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=
|
|
43
|
-
gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=
|
|
44
|
-
gllm_inference/lm_invoker/langchain_lm_invoker.pyi,sha256=
|
|
45
|
-
gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=
|
|
46
|
-
gllm_inference/lm_invoker/lm_invoker.pyi,sha256=
|
|
47
|
-
gllm_inference/lm_invoker/openai_chat_completions_lm_invoker.pyi,sha256=
|
|
48
|
-
gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=
|
|
49
|
-
gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=
|
|
50
|
-
gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=
|
|
39
|
+
gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=JSgKUk9d1ZHlitv_ZjHlAk2hIW-J7u6yslVHflIeUro,16726
|
|
40
|
+
gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=FYfRNPG-oD4wIfitjTHnGib1uMZL7Pid0gbrRsymAHU,14601
|
|
41
|
+
gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=dsNxj3ZfHxUplg6nBLgxVGooGYq1QP89gYzCnmRCz3g,11810
|
|
42
|
+
gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=LR0EM4vTfufq9OWk8JVIwLyFeJFTguPNmPgJBUooSq4,8342
|
|
43
|
+
gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=aSmEgoYj_V72Nb6erDResphw9RaHfbE5C6PhqpMfEeQ,17674
|
|
44
|
+
gllm_inference/lm_invoker/langchain_lm_invoker.pyi,sha256=tJIxkFUKjLF-yz0niaDjN3L0QNCbn4sT8hmPKtERpog,12742
|
|
45
|
+
gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=IJxRUkmgXY8oQwS7tJoskO8fiESB7M4pyvpE64pyXDo,12648
|
|
46
|
+
gllm_inference/lm_invoker/lm_invoker.pyi,sha256=vUmMNEl7F__PavQJ42scoYGyWdEvZOw2Bwxhoqv_gKE,8659
|
|
47
|
+
gllm_inference/lm_invoker/openai_chat_completions_lm_invoker.pyi,sha256=uYJFgi4tJGab77232IC1gdoU9h9AqoClIUj6tM6O47s,15177
|
|
48
|
+
gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=T9sShA_9fgEuaaAuT2gJZq_EYNbEhf3IkWwMCwfszY8,4244
|
|
49
|
+
gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=10iKCyleqHNbJc8M1rj3ogRcNlNxcVgyk0v6TcS6gf4,23452
|
|
50
|
+
gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=gyi12K7M9HkjNX6pU6NVv5Uq3-aHErixO-PVhHjioo8,14632
|
|
51
51
|
gllm_inference/lm_invoker/batch/__init__.pyi,sha256=vJOTHRJ83oq8Bq0UsMdID9_HW5JAxr06gUs4aPRZfEE,130
|
|
52
52
|
gllm_inference/lm_invoker/batch/batch_operations.pyi,sha256=o2U17M41RKVFW6j_oxy-SxU1JqUtVt75pKRxrqXzorE,5499
|
|
53
53
|
gllm_inference/lm_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -101,9 +101,9 @@ gllm_inference/schema/attachment.pyi,sha256=9zgAjGXBjLfzPGaKi68FMW6b5mXdEA352nDe
|
|
|
101
101
|
gllm_inference/schema/code_exec_result.pyi,sha256=WQ-ARoGM9r6nyRX-A0Ro1XKiqrc9R3jRYXZpu_xo5S4,573
|
|
102
102
|
gllm_inference/schema/config.pyi,sha256=NVmjQK6HipIE0dKSfx12hgIC0O-S1HEcAc-TWlXAF5A,689
|
|
103
103
|
gllm_inference/schema/enums.pyi,sha256=U30RGvNFcNNJxTZZPt8vK7SFp3W4KSPVFxTZaiF1eLU,1375
|
|
104
|
-
gllm_inference/schema/events.pyi,sha256=
|
|
104
|
+
gllm_inference/schema/events.pyi,sha256=HMkGU1XoLDR6h9-L13LEn_27Z9jkmKk-kBTD-0qKzJY,4029
|
|
105
105
|
gllm_inference/schema/lm_input.pyi,sha256=HxQiZgY7zcXh_Dw8nK8LSeBTZEHMPZVwmPmnfgSsAbs,197
|
|
106
|
-
gllm_inference/schema/lm_output.pyi,sha256=
|
|
106
|
+
gllm_inference/schema/lm_output.pyi,sha256=DIV8BiIOPaSnMKxzKzH_Mp7j7-MScWCvmllegJDLqFg,2479
|
|
107
107
|
gllm_inference/schema/mcp.pyi,sha256=4SgQ83pEowfWm2p-w9lupV4NayqqVBOy7SuYxIFeWRs,1045
|
|
108
108
|
gllm_inference/schema/message.pyi,sha256=jJV6A0ihEcun2OhzyMtNkiHnf7d6v5R-GdpTBGfJ0AQ,2272
|
|
109
109
|
gllm_inference/schema/model_id.pyi,sha256=NuaS4XlKDRJJezj45CEzn8reDDeII9XeRARmM5SZPqA,5408
|
|
@@ -117,7 +117,7 @@ gllm_inference/utils/io_utils.pyi,sha256=Eg7dvHWdXslTKdjh1j3dG50i7r35XG2zTmJ9XXv
|
|
|
117
117
|
gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
|
|
118
118
|
gllm_inference/utils/validation.pyi,sha256=-RdMmb8afH7F7q4Ao7x6FbwaDfxUHn3hA3WiOgzB-3s,397
|
|
119
119
|
gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
|
|
120
|
-
gllm_inference_binary-0.5.
|
|
121
|
-
gllm_inference_binary-0.5.
|
|
122
|
-
gllm_inference_binary-0.5.
|
|
123
|
-
gllm_inference_binary-0.5.
|
|
120
|
+
gllm_inference_binary-0.5.41.dist-info/METADATA,sha256=8KVy2fPehAniiRxbGTA0N14cLIFFttOQj9gtrTbupEI,5770
|
|
121
|
+
gllm_inference_binary-0.5.41.dist-info/WHEEL,sha256=x5rgv--I0NI0IT1Lh9tN1VG2cI637p3deednwYLKnxc,96
|
|
122
|
+
gllm_inference_binary-0.5.41.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
|
|
123
|
+
gllm_inference_binary-0.5.41.dist-info/RECORD,,
|
|
File without changes
|
{gllm_inference_binary-0.5.38.dist-info → gllm_inference_binary-0.5.41.dist-info}/top_level.txt
RENAMED
|
File without changes
|