gllm-inference-binary 0.5.61__cp313-cp313-win_amd64.whl → 0.5.62__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gllm-inference-binary might be problematic. Click here for more details.
- gllm_inference/lm_invoker/anthropic_lm_invoker.pyi +3 -1
- gllm_inference.cp313-win_amd64.pyd +0 -0
- gllm_inference.pyi +1 -1
- {gllm_inference_binary-0.5.61.dist-info → gllm_inference_binary-0.5.62.dist-info}/METADATA +1 -1
- {gllm_inference_binary-0.5.61.dist-info → gllm_inference_binary-0.5.62.dist-info}/RECORD +7 -7
- {gllm_inference_binary-0.5.61.dist-info → gllm_inference_binary-0.5.62.dist-info}/WHEEL +0 -0
- {gllm_inference_binary-0.5.61.dist-info → gllm_inference_binary-0.5.62.dist-info}/top_level.txt +0 -0
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
from _typeshed import Incomplete
|
|
2
|
+
from anthropic.types import ContentBlockStopEvent as ContentBlockStopEvent, Message as Message, RawContentBlockDeltaEvent as RawContentBlockDeltaEvent, RawContentBlockStartEvent as RawContentBlockStartEvent
|
|
2
3
|
from gllm_core.event import EventEmitter as EventEmitter
|
|
3
4
|
from gllm_core.schema.tool import Tool as Tool
|
|
4
5
|
from gllm_core.utils.retry import RetryConfig as RetryConfig
|
|
5
6
|
from gllm_inference.constants import INVOKER_PROPAGATED_MAX_RETRIES as INVOKER_PROPAGATED_MAX_RETRIES
|
|
6
7
|
from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker as BaseLMInvoker
|
|
7
8
|
from gllm_inference.lm_invoker.schema.anthropic import InputType as InputType, Key as Key, OutputType as OutputType
|
|
8
|
-
from gllm_inference.schema import Attachment as Attachment, AttachmentType as AttachmentType, BatchStatus as BatchStatus, LMInput as LMInput, LMOutput as LMOutput,
|
|
9
|
+
from gllm_inference.schema import Attachment as Attachment, AttachmentType as AttachmentType, BatchStatus as BatchStatus, LMInput as LMInput, LMOutput as LMOutput, ModelId as ModelId, ModelProvider as ModelProvider, Reasoning as Reasoning, ResponseSchema as ResponseSchema, ThinkingEvent as ThinkingEvent, TokenUsage as TokenUsage, ToolCall as ToolCall, ToolResult as ToolResult
|
|
9
10
|
from langchain_core.tools import Tool as LangChainTool
|
|
11
|
+
from pydantic import BaseModel as BaseModel
|
|
10
12
|
from typing import Any
|
|
11
13
|
|
|
12
14
|
SUPPORTED_ATTACHMENTS: Incomplete
|
|
Binary file
|
gllm_inference.pyi
CHANGED
|
@@ -83,6 +83,7 @@ import voyageai
|
|
|
83
83
|
import voyageai.client_async
|
|
84
84
|
import http
|
|
85
85
|
import http.HTTPStatus
|
|
86
|
+
import __future__
|
|
86
87
|
import uuid
|
|
87
88
|
import gllm_core.constants
|
|
88
89
|
import gllm_core.event
|
|
@@ -104,7 +105,6 @@ import anthropic.types
|
|
|
104
105
|
import anthropic.types.message_create_params
|
|
105
106
|
import anthropic.types.messages
|
|
106
107
|
import anthropic.types.messages.batch_create_params
|
|
107
|
-
import __future__
|
|
108
108
|
import gllm_inference.schema.MessageRole
|
|
109
109
|
import langchain_core.language_models
|
|
110
110
|
import langchain_core.messages
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: gllm-inference-binary
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.62
|
|
4
4
|
Summary: A library containing components related to model inferences in Gen AI applications.
|
|
5
5
|
Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, Resti Febrina <resti.febrina@gdplabs.id>
|
|
6
6
|
Requires-Python: <3.14,>=3.11
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
gllm_inference.cp313-win_amd64.pyd,sha256=
|
|
2
|
-
gllm_inference.pyi,sha256=
|
|
1
|
+
gllm_inference.cp313-win_amd64.pyd,sha256=a8pnMNYmesZ6SgSG09bgFTzy17VS0WnUP8AV1cJQwqY,4013056
|
|
2
|
+
gllm_inference.pyi,sha256=kqoM8wZ2DtFx8sbbAOaKn6iolfDpeGHOWoDZbSV7LRo,5147
|
|
3
3
|
gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
gllm_inference/constants.pyi,sha256=8jIYOyxJYVWUYXSXF3vag9HhHwjq1iU9tzPiosRHkWk,328
|
|
5
5
|
gllm_inference/builder/__init__.pyi,sha256=-bw1uDx7CAM7pkvjvb1ZXku9zXlQ7aEAyC83KIn3bz8,506
|
|
@@ -41,7 +41,7 @@ gllm_inference/exceptions/error_parser.pyi,sha256=4aiJZhBzBOqlhdmpvaCvildGy7_Xxl
|
|
|
41
41
|
gllm_inference/exceptions/exceptions.pyi,sha256=6y3ECgHAStqMGgQv8Dv-Ui-5PDD07mSj6qaRZeSWea4,5857
|
|
42
42
|
gllm_inference/exceptions/provider_error_map.pyi,sha256=vWa4ZIHn7qIghECGvO-dS2KzOmf3c10GRWKZ4YDPnSQ,1267
|
|
43
43
|
gllm_inference/lm_invoker/__init__.pyi,sha256=Y0laMHRdUwGgfIXo278qe1dwj7ZMSCeO-wMToYim47c,1618
|
|
44
|
-
gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=
|
|
44
|
+
gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=b3VM_mcK20y4Fyo_vWDx95HA001l9h53ACo0caP1sTY,15729
|
|
45
45
|
gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=uV98H2nJsElCTsxAuInZ9KSk1jOTq6SROAGQRPR-_r0,13173
|
|
46
46
|
gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=qXmFK6zsOM3nPfueEhY5pAfG24bZytA1jqemPa63vLY,10951
|
|
47
47
|
gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=FnpayOW_Zi0pWFSawLX8XahEnknbnpsRWrkhKZe8Y3U,8035
|
|
@@ -133,7 +133,7 @@ gllm_inference/utils/io_utils.pyi,sha256=Eg7dvHWdXslTKdjh1j3dG50i7r35XG2zTmJ9XXv
|
|
|
133
133
|
gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
|
|
134
134
|
gllm_inference/utils/validation.pyi,sha256=OWRZxeVGIuuvNU0LqLGB-9gNmypvbH-LcSJx91rnH1k,453
|
|
135
135
|
gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
|
|
136
|
-
gllm_inference_binary-0.5.
|
|
137
|
-
gllm_inference_binary-0.5.
|
|
138
|
-
gllm_inference_binary-0.5.
|
|
139
|
-
gllm_inference_binary-0.5.
|
|
136
|
+
gllm_inference_binary-0.5.62.dist-info/METADATA,sha256=opW0bJ-huAATZSyiNRtx4SictbAKXH4ZLOqskzyWrJU,5945
|
|
137
|
+
gllm_inference_binary-0.5.62.dist-info/WHEEL,sha256=O_u6PJIQ2pIcyIInxVQ9r-yArMuUZbBIaF1kpYVkYxA,96
|
|
138
|
+
gllm_inference_binary-0.5.62.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
|
|
139
|
+
gllm_inference_binary-0.5.62.dist-info/RECORD,,
|
|
File without changes
|
{gllm_inference_binary-0.5.61.dist-info → gllm_inference_binary-0.5.62.dist-info}/top_level.txt
RENAMED
|
File without changes
|