gllm-inference-binary 0.5.31__cp312-cp312-win_amd64.whl → 0.5.33__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,7 +134,12 @@ class AzureOpenAILMInvoker(OpenAILMInvoker):
134
134
  ```python
135
135
  LMOutput(
136
136
  response="Golden retriever is a good dog breed.",
137
- token_usage=TokenUsage(input_tokens=100, output_tokens=50),
137
+ token_usage=TokenUsage(
138
+ input_tokens=1500,
139
+ output_tokens=200,
140
+ input_token_details=InputTokenDetails(cached_tokens=1200, uncached_tokens=300),
141
+ output_token_details=OutputTokenDetails(reasoning_tokens=180, response_tokens=20),
142
+ ),
138
143
  duration=0.729,
139
144
  finish_details={"status": "completed", "incomplete_details": {"reason": None}},
140
145
  )
@@ -11,6 +11,8 @@ from langchain_core.tools import Tool as LangChainTool
11
11
  from typing import Any
12
12
 
13
13
  SUPPORTED_ATTACHMENTS: Incomplete
14
+ STREAM_DATA_TRANSITION_TYPE_MAP: Incomplete
15
+ STREAM_DATA_CONTENT_TYPE_MAP: Incomplete
14
16
 
15
17
  class OpenAILMInvoker(BaseLMInvoker):
16
18
  '''A language model invoker to interact with OpenAI language models.
@@ -135,7 +137,12 @@ class OpenAILMInvoker(BaseLMInvoker):
135
137
  ```python
136
138
  LMOutput(
137
139
  response="Golden retriever is a good dog breed.",
138
- token_usage=TokenUsage(input_tokens=100, output_tokens=50),
140
+ token_usage=TokenUsage(
141
+ input_tokens=1500,
142
+ output_tokens=200,
143
+ input_token_details=InputTokenDetails(cached_tokens=1200, uncached_tokens=300),
144
+ output_token_details=OutputTokenDetails(reasoning_tokens=180, response_tokens=20),
145
+ ),
139
146
  duration=0.729,
140
147
  finish_details={"status": "completed", "incomplete_details": {"reason": None}},
141
148
  )
Binary file
gllm_inference.pyi CHANGED
@@ -109,6 +109,7 @@ import jsonschema
109
109
  import gllm_inference.lm_invoker.batch.BatchOperations
110
110
  import gllm_inference.schema.MessageContent
111
111
  import gllm_inference.utils.validate_string_enum
112
+ import __future__
112
113
  import gllm_inference.schema.CodeExecResult
113
114
  import gllm_inference.schema.MCPCall
114
115
  import gllm_inference.schema.MCPServer
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: gllm-inference-binary
3
- Version: 0.5.31
3
+ Version: 0.5.33
4
4
  Summary: A library containing components related to model inferences in Gen AI applications.
5
5
  Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, Resti Febrina <resti.febrina@gdplabs.id>
6
6
  Requires-Python: <3.14,>=3.11
@@ -1,5 +1,5 @@
1
- gllm_inference.cp312-win_amd64.pyd,sha256=avVBE5O0Qaxipz2kSjKBF0M5C4eb-onuqaQgD8-cGhw,3212800
2
- gllm_inference.pyi,sha256=_kg-gYI4Dx_w13ZGSP-2sC14z7u_GSLeanMYuRkfnZA,4181
1
+ gllm_inference.cp312-win_amd64.pyd,sha256=vZtLh5mzjg5xWtPsQSq54ju3c13bxt6DBnknXYq85wo,3180032
2
+ gllm_inference.pyi,sha256=bvMQNMzysfZtXgjW4ZX0KwSOV4uroNakpE0NUHKCMmk,4199
3
3
  gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  gllm_inference/constants.pyi,sha256=KQmondDEkHK2P249ymmce3SdutVrx8kYm4v1eTCkW9U,277
5
5
  gllm_inference/builder/__init__.pyi,sha256=-bw1uDx7CAM7pkvjvb1ZXku9zXlQ7aEAyC83KIn3bz8,506
@@ -37,7 +37,7 @@ gllm_inference/exceptions/exceptions.pyi,sha256=6y3ECgHAStqMGgQv8Dv-Ui-5PDD07mSj
37
37
  gllm_inference/exceptions/provider_error_map.pyi,sha256=4AsAgbXAh91mxEW2YiomEuhBoeSNeAIo9WbT9WK8gQk,1233
38
38
  gllm_inference/lm_invoker/__init__.pyi,sha256=eE_HDCl9A135mi6mtIV55q-T9J1O8OpbMcqWuny3w9A,1214
39
39
  gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=5fscLpROscxjBNP13GmcU9I83YiZH-pb42FzQ2JzGBA,17575
40
- gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=sZ-6AdbxBSZUPuzYJ11fUCis4-I6XWPbIPr2hwavSbY,15037
40
+ gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=Tcmr0OfEk3As4nmL_lCoQzVB9qTQ2BLASRFhcObvrPY,15286
41
41
  gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=ptyHTm1szPJEpQObdrsxHpbTkchCWE6K-YmVTmbdhvM,13037
42
42
  gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=sR1vSTifBykASzAGMYn7nJVxTEwXMFz-Xa0rQjXzb6A,9482
43
43
  gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=Zkt-BdOZT106mn07_7krBQ5GiXsp9z9aoHs_d96P4lg,17482
@@ -45,7 +45,7 @@ gllm_inference/lm_invoker/langchain_lm_invoker.pyi,sha256=2pghfayLQxNJ1UEJZGPCro
45
45
  gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=7OJSwv0FCg9Hf4wxtdHcblCcFYu4SqPiMACFH-ZM1c0,13489
46
46
  gllm_inference/lm_invoker/lm_invoker.pyi,sha256=JQFExiblFbCMQ3HXOE62Ho1VTMdmxf_CZ-edGSQbCrQ,8312
47
47
  gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=7Tnq-4Cl46sPDLiRQLQBDFuk_INhm0BIknXZXqXYb-8,15316
48
- gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=kQy7WsF-vACIVumJ2SxmUscNbER1onpbjdIYzK2XaGc,22176
48
+ gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=NK-HJXvRrQjDgfpqXGUBi_rf5iTiMjJvxfRsqj_JT_I,22512
49
49
  gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=6TwO3KU1DBWoe4UAsz97MY1yKBf-N38WjbrBqCmWCNU,15992
50
50
  gllm_inference/lm_invoker/batch/__init__.pyi,sha256=vJOTHRJ83oq8Bq0UsMdID9_HW5JAxr06gUs4aPRZfEE,130
51
51
  gllm_inference/lm_invoker/batch/batch_operations.pyi,sha256=o2U17M41RKVFW6j_oxy-SxU1JqUtVt75pKRxrqXzorE,5499
@@ -103,7 +103,7 @@ gllm_inference/utils/io_utils.pyi,sha256=Eg7dvHWdXslTKdjh1j3dG50i7r35XG2zTmJ9XXv
103
103
  gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
104
104
  gllm_inference/utils/validation.pyi,sha256=-RdMmb8afH7F7q4Ao7x6FbwaDfxUHn3hA3WiOgzB-3s,397
105
105
  gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
106
- gllm_inference_binary-0.5.31.dist-info/METADATA,sha256=ozQGz7vkUPZJBx5YZTUgD1nP17Zak200GAvvnPkO94g,4971
107
- gllm_inference_binary-0.5.31.dist-info/WHEEL,sha256=x5rgv--I0NI0IT1Lh9tN1VG2cI637p3deednwYLKnxc,96
108
- gllm_inference_binary-0.5.31.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
109
- gllm_inference_binary-0.5.31.dist-info/RECORD,,
106
+ gllm_inference_binary-0.5.33.dist-info/METADATA,sha256=1P1byafm__5eeo7uRyb-SxSySmI3QvThB1PNYLiRLoY,4971
107
+ gllm_inference_binary-0.5.33.dist-info/WHEEL,sha256=x5rgv--I0NI0IT1Lh9tN1VG2cI637p3deednwYLKnxc,96
108
+ gllm_inference_binary-0.5.33.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
109
+ gllm_inference_binary-0.5.33.dist-info/RECORD,,