gllm-inference-binary 0.5.24__cp311-cp311-manylinux_2_31_x86_64.whl → 0.5.26b1__cp311-cp311-manylinux_2_31_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gllm-inference-binary might be problematic. Click here for more details.

@@ -86,7 +86,7 @@ def build_em_invoker(model_id: str | ModelId, credentials: str | dict[str, Any]
86
86
  # Using Azure OpenAI
87
87
  ```python
88
88
  em_invoker = build_em_invoker(
89
- model_id="azure-openai/https://my-resource.openai.azure.com:my-deployment",
89
+ model_id="azure-openai/https://my-resource.openai.azure.com/openai/v1:my-deployment",
90
90
  credentials="azure-api-key"
91
91
  )
92
92
  ```
@@ -1,6 +1,6 @@
1
1
  from _typeshed import Incomplete
2
2
 
3
- DEFAULT_AZURE_OPENAI_API_VERSION: str
3
+ AZURE_OPENAI_URL_SUFFIX: str
4
4
  DOCUMENT_MIME_TYPES: Incomplete
5
5
  GOOGLE_SCOPES: Incomplete
6
6
  GRPC_ENABLE_RETRIES_KEY: str
@@ -1,6 +1,6 @@
1
1
  from _typeshed import Incomplete
2
2
  from gllm_core.utils.retry import RetryConfig as RetryConfig
3
- from gllm_inference.constants import DEFAULT_AZURE_OPENAI_API_VERSION as DEFAULT_AZURE_OPENAI_API_VERSION, INVOKER_PROPAGATED_MAX_RETRIES as INVOKER_PROPAGATED_MAX_RETRIES
3
+ from gllm_inference.constants import AZURE_OPENAI_URL_SUFFIX as AZURE_OPENAI_URL_SUFFIX, INVOKER_PROPAGATED_MAX_RETRIES as INVOKER_PROPAGATED_MAX_RETRIES
4
4
  from gllm_inference.em_invoker.openai_em_invoker import OpenAIEMInvoker as OpenAIEMInvoker
5
5
  from gllm_inference.em_invoker.schema.openai import Key as Key
6
6
  from gllm_inference.schema import ModelId as ModelId, ModelProvider as ModelProvider, TruncationConfig as TruncationConfig
@@ -69,7 +69,7 @@ class AzureOpenAIEMInvoker(OpenAIEMInvoker):
69
69
  ```
70
70
  '''
71
71
  client: Incomplete
72
- def __init__(self, azure_endpoint: str, azure_deployment: str, api_key: str | None = None, api_version: str = ..., model_kwargs: dict[str, Any] | None = None, default_hyperparameters: dict[str, Any] | None = None, retry_config: RetryConfig | None = None, truncation_config: TruncationConfig | None = None) -> None:
72
+ def __init__(self, azure_endpoint: str, azure_deployment: str, api_key: str | None = None, api_version: str | None = None, model_kwargs: dict[str, Any] | None = None, default_hyperparameters: dict[str, Any] | None = None, retry_config: RetryConfig | None = None, truncation_config: TruncationConfig | None = None) -> None:
73
73
  """Initializes a new instance of the AzureOpenAIEMInvoker class.
74
74
 
75
75
  Args:
@@ -77,8 +77,7 @@ class AzureOpenAIEMInvoker(OpenAIEMInvoker):
77
77
  azure_deployment (str): The deployment name of the Azure OpenAI service.
78
78
  api_key (str | None, optional): The API key for authenticating with Azure OpenAI. Defaults to None, in
79
79
  which case the `AZURE_OPENAI_API_KEY` environment variable will be used.
80
- api_version (str, optional): The API version of the Azure OpenAI service. Defaults to
81
- `DEFAULT_AZURE_OPENAI_API_VERSION`.
80
+ api_version (str | None, optional): Deprecated parameter to be removed in v0.6. Defaults to None.
82
81
  model_kwargs (dict[str, Any] | None, optional): Additional model parameters. Defaults to None.
83
82
  default_hyperparameters (dict[str, Any] | None, optional): Default hyperparameters for invoking the model.
84
83
  Defaults to None.
@@ -1,15 +1,13 @@
1
1
  from _typeshed import Incomplete
2
2
  from gllm_core.schema.tool import Tool as Tool
3
3
  from gllm_core.utils.retry import RetryConfig as RetryConfig
4
- from gllm_inference.constants import INVOKER_PROPAGATED_MAX_RETRIES as INVOKER_PROPAGATED_MAX_RETRIES
4
+ from gllm_inference.constants import AZURE_OPENAI_URL_SUFFIX as AZURE_OPENAI_URL_SUFFIX, INVOKER_PROPAGATED_MAX_RETRIES as INVOKER_PROPAGATED_MAX_RETRIES
5
5
  from gllm_inference.lm_invoker.openai_lm_invoker import OpenAILMInvoker as OpenAILMInvoker, ReasoningEffort as ReasoningEffort, ReasoningSummary as ReasoningSummary
6
6
  from gllm_inference.lm_invoker.schema.openai import Key as Key
7
7
  from gllm_inference.schema import ModelId as ModelId, ModelProvider as ModelProvider, ResponseSchema as ResponseSchema
8
8
  from langchain_core.tools import Tool as LangChainTool
9
9
  from typing import Any
10
10
 
11
- URL_SUFFIX: str
12
-
13
11
  class AzureOpenAILMInvoker(OpenAILMInvoker):
14
12
  '''A language model invoker to interact with Azure OpenAI language models.
15
13
 
@@ -1,47 +1,52 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: gllm-inference-binary
3
- Version: 0.5.24
3
+ Version: 0.5.26b1
4
4
  Summary: A library containing components related to model inferences in Gen AI applications.
5
- Author: Henry Wicaksono
6
- Author-email: henry.wicaksono@gdplabs.id
7
- Requires-Python: >=3.11,<3.14
8
- Classifier: Programming Language :: Python :: 3
9
- Classifier: Programming Language :: Python :: 3.11
10
- Classifier: Programming Language :: Python :: 3.12
5
+ Author-email: Henry Wicaksono <henry.wicaksono@gdplabs.id>, Resti Febrina <resti.febrina@gdplabs.id>
6
+ Requires-Python: <3.14,>=3.11
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: poetry<3.0.0,>=2.1.3
9
+ Requires-Dist: gllm-core-binary<0.4.0,>=0.3.0
10
+ Requires-Dist: aiohttp<4.0.0,>=3.12.14
11
+ Requires-Dist: filetype<2.0.0,>=1.2.0
12
+ Requires-Dist: httpx<0.29.0,>=0.28.0
13
+ Requires-Dist: jinja2<4.0.0,>=3.1.4
14
+ Requires-Dist: jsonschema<5.0.0,>=4.24.0
15
+ Requires-Dist: langchain<0.4.0,>=0.3.0
16
+ Requires-Dist: pandas<3.0.0,>=2.2.3
17
+ Requires-Dist: protobuf<7.0.0,>=6.0.0
18
+ Requires-Dist: python-magic<0.5.0,>=0.4.27; sys_platform != "win32"
19
+ Requires-Dist: python-magic-bin<0.5.0,>=0.4.14; sys_platform == "win32"
20
+ Requires-Dist: sentencepiece<0.3.0,>=0.2.0
21
+ Provides-Extra: dev
22
+ Requires-Dist: coverage<8.0.0,>=7.4.4; extra == "dev"
23
+ Requires-Dist: mypy<2.0.0,>=1.15.0; extra == "dev"
24
+ Requires-Dist: pre-commit<4.0.0,>=3.7.0; extra == "dev"
25
+ Requires-Dist: pytest<9.0.0,>=8.1.1; extra == "dev"
26
+ Requires-Dist: pytest-asyncio<1.0.0,>=0.23.6; extra == "dev"
27
+ Requires-Dist: pytest-cov<6.0.0,>=5.0.0; extra == "dev"
28
+ Requires-Dist: ruff<1.0.0,>=0.6.7; extra == "dev"
11
29
  Provides-Extra: anthropic
30
+ Requires-Dist: anthropic<0.61.0,>=0.60.0; extra == "anthropic"
12
31
  Provides-Extra: bedrock
32
+ Requires-Dist: aioboto3<16.0.0,>=15.0.0; extra == "bedrock"
13
33
  Provides-Extra: datasaur
34
+ Requires-Dist: openai<2.0.0,>=1.98.0; extra == "datasaur"
14
35
  Provides-Extra: google
36
+ Requires-Dist: google-genai<=1.36,>=1.23; extra == "google"
15
37
  Provides-Extra: huggingface
16
- Provides-Extra: litellm
38
+ Requires-Dist: huggingface-hub<0.31.0,>=0.30.0; extra == "huggingface"
39
+ Requires-Dist: transformers==4.52.4; extra == "huggingface"
17
40
  Provides-Extra: openai
41
+ Requires-Dist: openai<2.0.0,>=1.98.0; extra == "openai"
42
+ Provides-Extra: litellm
43
+ Requires-Dist: litellm<2.0.0,>=1.69.2; extra == "litellm"
18
44
  Provides-Extra: twelvelabs
45
+ Requires-Dist: twelvelabs<0.5.0,>=0.4.4; extra == "twelvelabs"
19
46
  Provides-Extra: voyage
47
+ Requires-Dist: voyageai<0.4.0,>=0.3.0; python_version < "3.13" and extra == "voyage"
20
48
  Provides-Extra: xai
21
- Requires-Dist: aioboto3 (>=15.0.0,<16.0.0) ; extra == "bedrock"
22
- Requires-Dist: aiohttp (>=3.12.14,<4.0.0)
23
- Requires-Dist: anthropic (>=0.60.0,<0.61.0) ; extra == "anthropic"
24
- Requires-Dist: filetype (>=1.2.0,<2.0.0)
25
- Requires-Dist: gllm-core-binary (>=0.3.0,<0.4.0)
26
- Requires-Dist: google-genai (>=1.26.0,<2.0.0) ; extra == "google"
27
- Requires-Dist: httpx (>=0.28.0,<0.29.0)
28
- Requires-Dist: huggingface-hub (>=0.30.0,<0.31.0) ; extra == "huggingface"
29
- Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
30
- Requires-Dist: jsonschema (>=4.24.0,<5.0.0)
31
- Requires-Dist: langchain (>=0.3.0,<0.4.0)
32
- Requires-Dist: litellm (>=1.69.2,<2.0.0) ; extra == "litellm"
33
- Requires-Dist: openai (>=1.98.0,<2.0.0) ; extra == "datasaur" or extra == "openai"
34
- Requires-Dist: pandas (>=2.2.3,<3.0.0)
35
- Requires-Dist: poetry (>=2.1.3,<3.0.0)
36
- Requires-Dist: protobuf (>=6.0.0,<7.0.0)
37
- Requires-Dist: python-magic (>=0.4.27,<0.5.0) ; sys_platform != "win32"
38
- Requires-Dist: python-magic-bin (>=0.4.14,<0.5.0) ; sys_platform == "win32"
39
- Requires-Dist: sentencepiece (>=0.2.0,<0.3.0)
40
- Requires-Dist: transformers (==4.52.4) ; extra == "huggingface"
41
- Requires-Dist: twelvelabs (>=0.4.4,<0.5.0) ; extra == "twelvelabs"
42
- Requires-Dist: voyageai (>=0.3.0,<0.4.0) ; (python_version < "3.13") and (extra == "voyage")
43
- Requires-Dist: xai_sdk (>=1.0.0,<2.0.0) ; extra == "xai"
44
- Description-Content-Type: text/markdown
49
+ Requires-Dist: xai_sdk<2.0.0,>=1.0.0; extra == "xai"
45
50
 
46
51
  # GLLM Inference
47
52
 
@@ -107,4 +112,3 @@ to get information about code style, documentation standard, and SCA that you ne
107
112
  poetry run pytest -s tests/unit_tests/
108
113
  ```
109
114
 
110
-
@@ -1,6 +1,9 @@
1
+ gllm_inference.cpython-311-x86_64-linux-gnu.so,sha256=acaJrdLmL4hy_06TSMuRDXch-Vh80BRNZQWsDI61zpY,4285856
2
+ gllm_inference.pyi,sha256=Pq6P04np3S3x7juGVCzC5sL2im4MsyligEvahVQNWzM,3820
1
3
  gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ gllm_inference/constants.pyi,sha256=EFVMtK3xDK2yjGoHp8EL3LeRZWhIefVKClI9jvbfQQ0,267
2
5
  gllm_inference/builder/__init__.pyi,sha256=usz2lvfwO4Yk-ZGKXbCWG1cEr3nlQXxMNDNC-2yc1NM,500
3
- gllm_inference/builder/build_em_invoker.pyi,sha256=7JTt8XpfqLQdv5cltIyi1r6Sf8MAUKgornHn5z57raA,5873
6
+ gllm_inference/builder/build_em_invoker.pyi,sha256=DbtDfY2sD-eWiYrOuZ3a7EoITgvrfUB5l94pF5Ly62g,5883
4
7
  gllm_inference/builder/build_lm_invoker.pyi,sha256=9-M0CJWhd0MVUYHHLD7QCfeW835JBaZ8_XQzUBJJdAc,6886
5
8
  gllm_inference/builder/build_lm_request_processor.pyi,sha256=33Gi3onftl-V2e_mkJios5zmXRKSoAVPX3UK7YBExjk,4491
6
9
  gllm_inference/builder/build_output_parser.pyi,sha256=_Lrq-bh1oPsb_Nwkkr_zyEUwIOMysRFZkvEtEM29LZM,936
@@ -8,17 +11,18 @@ gllm_inference/catalog/__init__.pyi,sha256=JBkPGTyiiZ30GECzJBW-mW8LekWyY2qyzal3e
8
11
  gllm_inference/catalog/catalog.pyi,sha256=a4RNG1lKv51GxQpOqh47tz-PAROMPaeP2o5XNLBSZaU,4790
9
12
  gllm_inference/catalog/lm_request_processor_catalog.pyi,sha256=ranHMbG9--DZj9FJRhIUa6U8e-L-Tm-_hSBpzJ6DDs4,5428
10
13
  gllm_inference/catalog/prompt_builder_catalog.pyi,sha256=OU8k_4HbqjZEzHZlzSM3uzGQZJmM2uGD76Csqom0CEQ,3197
11
- gllm_inference/constants.pyi,sha256=2C5tOEIVWCHg_SPhVmjWBigiLw4jUKAGSTBTdRcCtcI,276
12
14
  gllm_inference/em_invoker/__init__.pyi,sha256=83QVCkMjS2-jMKdAvmZska4LuJ-un755lAxjuVSLZ9o,987
13
- gllm_inference/em_invoker/azure_openai_em_invoker.pyi,sha256=eJd1Ygths8wu0w2S9VmF-2iEQj5zs7nACyNJXAsNXUA,4983
15
+ gllm_inference/em_invoker/azure_openai_em_invoker.pyi,sha256=Tp92kRqUmB2FpqdnWdJXDGZ_ibOzgFeZAEey4coaD5E,4933
14
16
  gllm_inference/em_invoker/bedrock_em_invoker.pyi,sha256=77omAUXnGU_62KPC5sKOnWTwdqsoEXX38IM1JScd6K0,5723
15
17
  gllm_inference/em_invoker/em_invoker.pyi,sha256=KGjLiAWGIA3ziV50zMwSzx6lTVUbYspQCl4LFxqbDlY,5101
16
18
  gllm_inference/em_invoker/google_em_invoker.pyi,sha256=VVpizDo47kznqRk07t6e-Lp_K6Ojfn5KDkIKoqn2boE,6822
17
- gllm_inference/em_invoker/langchain/__init__.pyi,sha256=VYGKE5OgU0my1RlhgzkU_A7-GLGnUDDnNFuctuRwILE,148
18
- gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=VU3-Vhb9BCDhJo8PPdWHe2rBEOCs_HMXT6ZaWwjUzZE,3304
19
19
  gllm_inference/em_invoker/langchain_em_invoker.pyi,sha256=BhZjkYZoxQXPJjP0PgA8W0xrtwiqDkW5E6NpGit0h1E,3498
20
20
  gllm_inference/em_invoker/openai_compatible_em_invoker.pyi,sha256=GudWfL7QCAKLInMw94jTNogbyELlD9tDbrDErHB4RRI,5360
21
21
  gllm_inference/em_invoker/openai_em_invoker.pyi,sha256=vsfEmDNvwrlBhDxqsCKyMpMZbl_FaQUWHEgQc9yeo14,4656
22
+ gllm_inference/em_invoker/twelevelabs_em_invoker.pyi,sha256=9F37VuLYTH5xsPSS_uzhrsPJug0QifX_qh2GwG0jSTU,5345
23
+ gllm_inference/em_invoker/voyage_em_invoker.pyi,sha256=zJZqMvvFKu3sHdrNM773UYjfHVlnwE2w2BmvdFcHzV0,5515
24
+ gllm_inference/em_invoker/langchain/__init__.pyi,sha256=VYGKE5OgU0my1RlhgzkU_A7-GLGnUDDnNFuctuRwILE,148
25
+ gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=VU3-Vhb9BCDhJo8PPdWHe2rBEOCs_HMXT6ZaWwjUzZE,3304
22
26
  gllm_inference/em_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
27
  gllm_inference/em_invoker/schema/bedrock.pyi,sha256=AHFW8uYOTS7RtqV1RmtY-XQK1xpMgsHxWg4RZhVgI_8,476
24
28
  gllm_inference/em_invoker/schema/google.pyi,sha256=ovDlvinu99QJhIxMkvVUoGBEFkkEoAZhadSuk0nI9N8,181
@@ -27,15 +31,13 @@ gllm_inference/em_invoker/schema/openai.pyi,sha256=Q_dsEcodkOXYXPdrkOkW0LnuLhfeq
27
31
  gllm_inference/em_invoker/schema/openai_compatible.pyi,sha256=gmvGtsWoOMBelke_tZjC6dKimFBW9f4Vrgv0Ig0OM9Q,150
28
32
  gllm_inference/em_invoker/schema/twelvelabs.pyi,sha256=F6wKHgG01bYskJpKoheBSpRpHUfFpteKn9sj9n5YfcU,372
29
33
  gllm_inference/em_invoker/schema/voyage.pyi,sha256=HVpor0fqNy-IwapCICfsgFmqf1FJXCOMIxS2vOXhHd8,289
30
- gllm_inference/em_invoker/twelevelabs_em_invoker.pyi,sha256=9F37VuLYTH5xsPSS_uzhrsPJug0QifX_qh2GwG0jSTU,5345
31
- gllm_inference/em_invoker/voyage_em_invoker.pyi,sha256=zJZqMvvFKu3sHdrNM773UYjfHVlnwE2w2BmvdFcHzV0,5515
32
34
  gllm_inference/exceptions/__init__.pyi,sha256=Upcuj7od2lkbdueQ0iMT2ktFYYi-KKTynTLAaxWDTjU,1214
33
35
  gllm_inference/exceptions/error_parser.pyi,sha256=IOfa--NpLUW5E9Qq0mwWi6ZpTAbUyyNe6iAqunBNGLI,1999
34
36
  gllm_inference/exceptions/exceptions.pyi,sha256=Bv996qLa_vju0Qjf4GewMxdkq8CV9LRZb0S6289DldA,5725
35
37
  gllm_inference/exceptions/provider_error_map.pyi,sha256=P1WnhWkM103FW6hqMfNZBOmYSWOmsJtll3VQV8DGb8E,1210
36
38
  gllm_inference/lm_invoker/__init__.pyi,sha256=NmQSqObPjevEP1KbbrNnaz4GMh175EVPERZ19vK5Emc,1202
37
39
  gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=MsF3OmDo0L9aEHuTJYTgsoDILi2B_IgKtPpDcDMduWc,14925
38
- gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=94SgOYmMW-hI3TeGRMslplC5xZPxKs3M7CMcAuAZyO0,14545
40
+ gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=9gzto0yuZySR_8FII0PzbKLN_bCCdDP2vXQlVwnK9V8,14580
39
41
  gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=fAJCLdOMcR4OJpNFj3vN0TiNBOR8PzC1xPvqJDEwlJc,12690
40
42
  gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=QS84w3WpD3Oyl5HdxrucsadCmsHE8gn6Ewl3l01DCgI,9203
41
43
  gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=LG9lE8IXnObl2Uq9VPLeBT4WRqE5zUV_2gojSHiSqwQ,17052
@@ -44,6 +46,7 @@ gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=_c56ewpEQ-Ywj5ofFzRYBvQg
44
46
  gllm_inference/lm_invoker/lm_invoker.pyi,sha256=dQwYtVMCOmqvx68Znr3-pFkeA8upvk5wtRnkbKWyqY4,7881
45
47
  gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=_hOAde_Faph3JoGYh7zLch6BRc2Lam8PXZvi5-PkL-E,14938
46
48
  gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=wPTJr5DkXpoXpxw3MoaqEnzAOUanBRGUu954KdKDaVU,19649
49
+ gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=rV8D3E730OUmwK7jELKSziMUl7MnpbfxMAvMuq8-Aew,15687
47
50
  gllm_inference/lm_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
51
  gllm_inference/lm_invoker/schema/anthropic.pyi,sha256=zNbm4RV454dBEEUUN-Vyl_4cO628wUMPZyrO27O_DfM,991
49
52
  gllm_inference/lm_invoker/schema/bedrock.pyi,sha256=FJLY-ZkkLUYDV48pfsLatnot4ev_xxz9xAayLK28CpU,1027
@@ -53,7 +56,6 @@ gllm_inference/lm_invoker/schema/langchain.pyi,sha256=rZcIxuvABI4pKfyVvkRBRqfJJo
53
56
  gllm_inference/lm_invoker/schema/openai.pyi,sha256=9KjOJMnDyPs4hsysD8qFEMObUkbnxp6U9PmRIiUa3h4,1926
54
57
  gllm_inference/lm_invoker/schema/openai_compatible.pyi,sha256=m3bL2hVpxI_crURIi1bGDUqMy1Z5OgKBVU_-BkhX1mg,1166
55
58
  gllm_inference/lm_invoker/schema/xai.pyi,sha256=cWnbJmDtllqRH3NXpQbiXgkNBcUXr8ksDSDywcgJebE,632
56
- gllm_inference/lm_invoker/xai_lm_invoker.pyi,sha256=rV8D3E730OUmwK7jELKSziMUl7MnpbfxMAvMuq8-Aew,15687
57
59
  gllm_inference/model/__init__.pyi,sha256=qClHIgljqhPPCKlGTKmHsWgYb4_hADybxtC2q1U8a5Q,593
58
60
  gllm_inference/model/em/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
61
  gllm_inference/model/em/google_em.pyi,sha256=ZPN5LmReO0bcTfnZixFooUTzgD-daNFPzfxzZ-5WzQQ,471
@@ -97,8 +99,7 @@ gllm_inference/utils/io_utils.pyi,sha256=7kUTacHAVRYoemFUOjCH7-Qmw-YsQGd6rGYxjf_
97
99
  gllm_inference/utils/langchain.pyi,sha256=VluQiHkGigDdqLUbhB6vnXiISCP5hHqV0qokYY6dC1A,1164
98
100
  gllm_inference/utils/validation.pyi,sha256=toxBtRp-VItC_X7sNi-GDd7sjibBdWMrR0q01OI2D7k,385
99
101
  gllm_inference.build/.gitignore,sha256=aEiIwOuxfzdCmLZe4oB1JsBmCUxwG8x-u-HBCV9JT8E,1
100
- gllm_inference.cpython-311-x86_64-linux-gnu.so,sha256=4mN_sEEAaOyaPHUyNZ34NeZsVNqC9ujR8u4c2qi86ZI,4281760
101
- gllm_inference.pyi,sha256=Pq6P04np3S3x7juGVCzC5sL2im4MsyligEvahVQNWzM,3820
102
- gllm_inference_binary-0.5.24.dist-info/METADATA,sha256=VRF96O0qP_WUnJN5O6eyphnYi4zYXo06tfLHbSlJMtI,4615
103
- gllm_inference_binary-0.5.24.dist-info/WHEEL,sha256=IFe_ZNdNTT_i6vUiBlaFC_vwJqKup8CcDJ489_L8YrY,110
104
- gllm_inference_binary-0.5.24.dist-info/RECORD,,
102
+ gllm_inference_binary-0.5.26b1.dist-info/METADATA,sha256=FbAe_gVQLVeavkPnu_hj4dhWsTuiqjkz-Hh-a1-GUOk,4850
103
+ gllm_inference_binary-0.5.26b1.dist-info/WHEEL,sha256=WMelAR6z66VnlU3tu68fV-jM5qbG8iPyeTqaBcpU3pI,108
104
+ gllm_inference_binary-0.5.26b1.dist-info/top_level.txt,sha256=FpOjtN80F-qVNgbScXSEyqa0w09FYn6301iq6qt69IQ,15
105
+ gllm_inference_binary-0.5.26b1.dist-info/RECORD,,
@@ -1,4 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: Nuitka (2.6.9)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp311-cp311-manylinux_2_31_x86_64
5
+
@@ -0,0 +1 @@
1
+ gllm_inference