gllm-inference-binary 0.5.63__cp313-cp313-win_amd64.whl → 0.5.64__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gllm-inference-binary might be problematic. Click here for more details.
- gllm_inference/lm_invoker/openai_lm_invoker.pyi +3 -0
- gllm_inference/lm_invoker/schema/openai.pyi +1 -0
- gllm_inference.cp313-win_amd64.pyd +0 -0
- {gllm_inference_binary-0.5.63.dist-info → gllm_inference_binary-0.5.64.dist-info}/METADATA +1 -1
- {gllm_inference_binary-0.5.63.dist-info → gllm_inference_binary-0.5.64.dist-info}/RECORD +7 -7
- {gllm_inference_binary-0.5.63.dist-info → gllm_inference_binary-0.5.64.dist-info}/WHEEL +0 -0
- {gllm_inference_binary-0.5.63.dist-info → gllm_inference_binary-0.5.64.dist-info}/top_level.txt +0 -0
|
@@ -7,6 +7,9 @@ from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker as BaseLMInvoker
|
|
|
7
7
|
from gllm_inference.lm_invoker.schema.openai import InputType as InputType, Key as Key, OutputType as OutputType, ReasoningEffort as ReasoningEffort, ReasoningSummary as ReasoningSummary
|
|
8
8
|
from gllm_inference.schema import ActivityEvent as ActivityEvent, Attachment as Attachment, AttachmentType as AttachmentType, CodeEvent as CodeEvent, CodeExecResult as CodeExecResult, LMOutput as LMOutput, MCPCall as MCPCall, MCPCallActivity as MCPCallActivity, MCPListToolsActivity as MCPListToolsActivity, MCPServer as MCPServer, Message as Message, MessageRole as MessageRole, ModelId as ModelId, ModelProvider as ModelProvider, Reasoning as Reasoning, ResponseSchema as ResponseSchema, ThinkingEvent as ThinkingEvent, TokenUsage as TokenUsage, ToolCall as ToolCall, ToolResult as ToolResult, WebSearchActivity as WebSearchActivity
|
|
9
9
|
from langchain_core.tools import Tool as LangChainTool
|
|
10
|
+
from openai import AsyncStream as AsyncStream
|
|
11
|
+
from openai.types.responses import Response as Response, ResponseFunctionWebSearch as ResponseFunctionWebSearch, ResponseOutputItem as ResponseOutputItem
|
|
12
|
+
from openai.types.responses.response_output_item import McpCall as McpCall, McpListTools as McpListTools
|
|
10
13
|
from typing import Any
|
|
11
14
|
|
|
12
15
|
SUPPORTED_ATTACHMENTS: Incomplete
|
|
Binary file
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: gllm-inference-binary
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.64
|
|
4
4
|
Summary: A library containing components related to model inferences in Gen AI applications.
|
|
5
5
|
Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, "Delfia N. A. Putri" <delfia.n.a.putri@gdplabs.id>
|
|
6
6
|
Requires-Python: <3.14,>=3.11
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
gllm_inference.cp313-win_amd64.pyd,sha256=
|
|
1
|
+
gllm_inference.cp313-win_amd64.pyd,sha256=lw6clkti1GE34Nh5SPYV5oRRqOqiyDivjyRvndVk-k0,4032512
|
|
2
2
|
gllm_inference.pyi,sha256=3UoxsyZnC_6PhNzIKIp37rL6CNEw-Ve0RJHiG9jLOBY,5193
|
|
3
3
|
gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
gllm_inference/constants.pyi,sha256=8jIYOyxJYVWUYXSXF3vag9HhHwjq1iU9tzPiosRHkWk,328
|
|
@@ -51,7 +51,7 @@ gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=qG8pPTiDJZR2e7wr5Q2VyceC
|
|
|
51
51
|
gllm_inference/lm_invoker/lm_invoker.pyi,sha256=L_PHRCeHo0dNs6BjnB8H29irGib-qhxKYf7F7pZlU0E,8652
|
|
52
52
|
gllm_inference/lm_invoker/openai_chat_completions_lm_invoker.pyi,sha256=qt9DAdJM7YBB4op-6SOJB0kCouPYVxtIamGUXLGLUeA,13888
|
|
53
53
|
gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=T9sShA_9fgEuaaAuT2gJZq_EYNbEhf3IkWwMCwfszY8,4244
|
|
54
|
-
gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=
|
|
54
|
+
gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=GH-veLKBOnaumflfGj-128VXtQXkXVxYFSfFpliaZPI,23055
|
|
55
55
|
gllm_inference/lm_invoker/portkey_lm_invoker.pyi,sha256=FYOp4BaDfOtompWIRhDqzMVVSK-TiFyw7JA4TznANQE,15236
|
|
56
56
|
gllm_inference/lm_invoker/sea_lion_lm_invoker.pyi,sha256=Qqplj79JsnLGY6xjhbXYbz6Ramxur4sXfvv4svk19os,3533
|
|
57
57
|
gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=6beZsQjGUTo7TdzWBWksRzVGT58XyipErpGfiRq6NH0,13017
|
|
@@ -63,7 +63,7 @@ gllm_inference/lm_invoker/schema/bedrock.pyi,sha256=rB1AWfER2BBKZ5I219211YE2EUFP
|
|
|
63
63
|
gllm_inference/lm_invoker/schema/datasaur.pyi,sha256=8lmb1PRbkqBsF_l7iOffxW0K5Xxpi69GW9Z7KxyxHTE,279
|
|
64
64
|
gllm_inference/lm_invoker/schema/google.pyi,sha256=LQ14PJyDOe3K5TYvE-gzE1fjpZCSAy-0Sy9Lmw6fICY,827
|
|
65
65
|
gllm_inference/lm_invoker/schema/langchain.pyi,sha256=2OJOUQPlGdlUbIOTDOyiWDBOMm3MoVX-kU2nK0zQsF0,452
|
|
66
|
-
gllm_inference/lm_invoker/schema/openai.pyi,sha256=
|
|
66
|
+
gllm_inference/lm_invoker/schema/openai.pyi,sha256=nR4re7BWfYFePvfCM-iCzP6jcPp5C0f1PLvpFmq4gcs,2419
|
|
67
67
|
gllm_inference/lm_invoker/schema/openai_chat_completions.pyi,sha256=nNPb7ETC9IrJwkV5wfbGf6Co3-qdq4lhcXz0l_qYCE4,1261
|
|
68
68
|
gllm_inference/lm_invoker/schema/portkey.pyi,sha256=V2q4JIwDAR7BidqfmO01u1_1mLOMtm5OCon6sN2zNt0,662
|
|
69
69
|
gllm_inference/lm_invoker/schema/xai.pyi,sha256=jpC6ZSBDUltzm9GjD6zvSFIPwqizn_ywLnjvwSa7KuU,663
|
|
@@ -134,7 +134,7 @@ gllm_inference/utils/io_utils.pyi,sha256=Eg7dvHWdXslTKdjh1j3dG50i7r35XG2zTmJ9XXv
|
|
|
134
134
|
gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
|
|
135
135
|
gllm_inference/utils/validation.pyi,sha256=OWRZxeVGIuuvNU0LqLGB-9gNmypvbH-LcSJx91rnH1k,453
|
|
136
136
|
gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
|
|
137
|
-
gllm_inference_binary-0.5.
|
|
138
|
-
gllm_inference_binary-0.5.
|
|
139
|
-
gllm_inference_binary-0.5.
|
|
140
|
-
gllm_inference_binary-0.5.
|
|
137
|
+
gllm_inference_binary-0.5.64.dist-info/METADATA,sha256=SDCtX8UTQF-0fX_doIZLwVuR9pL60A5YCawZxW3v2Zo,5955
|
|
138
|
+
gllm_inference_binary-0.5.64.dist-info/WHEEL,sha256=O_u6PJIQ2pIcyIInxVQ9r-yArMuUZbBIaF1kpYVkYxA,96
|
|
139
|
+
gllm_inference_binary-0.5.64.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
|
|
140
|
+
gllm_inference_binary-0.5.64.dist-info/RECORD,,
|
|
File without changes
|
{gllm_inference_binary-0.5.63.dist-info → gllm_inference_binary-0.5.64.dist-info}/top_level.txt
RENAMED
|
File without changes
|