gllm-inference-binary 0.5.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gllm-inference-binary might be problematic. Click here for more details.

Files changed (94) hide show
  1. gllm_inference/__init__.pyi +0 -0
  2. gllm_inference/builder/__init__.pyi +6 -0
  3. gllm_inference/builder/build_em_invoker.pyi +122 -0
  4. gllm_inference/builder/build_lm_invoker.pyi +150 -0
  5. gllm_inference/builder/build_lm_request_processor.pyi +93 -0
  6. gllm_inference/builder/build_output_parser.pyi +29 -0
  7. gllm_inference/catalog/__init__.pyi +4 -0
  8. gllm_inference/catalog/catalog.pyi +121 -0
  9. gllm_inference/catalog/lm_request_processor_catalog.pyi +112 -0
  10. gllm_inference/catalog/prompt_builder_catalog.pyi +82 -0
  11. gllm_inference/constants.pyi +10 -0
  12. gllm_inference/em_invoker/__init__.pyi +9 -0
  13. gllm_inference/em_invoker/azure_openai_em_invoker.pyi +86 -0
  14. gllm_inference/em_invoker/em_invoker.pyi +83 -0
  15. gllm_inference/em_invoker/google_em_invoker.pyi +124 -0
  16. gllm_inference/em_invoker/langchain/__init__.pyi +3 -0
  17. gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi +62 -0
  18. gllm_inference/em_invoker/langchain_em_invoker.pyi +41 -0
  19. gllm_inference/em_invoker/openai_compatible_em_invoker.pyi +92 -0
  20. gllm_inference/em_invoker/openai_em_invoker.pyi +86 -0
  21. gllm_inference/em_invoker/schema/__init__.pyi +0 -0
  22. gllm_inference/em_invoker/schema/google.pyi +7 -0
  23. gllm_inference/em_invoker/schema/langchain.pyi +4 -0
  24. gllm_inference/em_invoker/schema/openai.pyi +7 -0
  25. gllm_inference/em_invoker/schema/openai_compatible.pyi +7 -0
  26. gllm_inference/em_invoker/schema/twelvelabs.pyi +17 -0
  27. gllm_inference/em_invoker/schema/voyage.pyi +15 -0
  28. gllm_inference/em_invoker/twelevelabs_em_invoker.pyi +99 -0
  29. gllm_inference/em_invoker/voyage_em_invoker.pyi +101 -0
  30. gllm_inference/exceptions/__init__.pyi +4 -0
  31. gllm_inference/exceptions/error_parser.pyi +48 -0
  32. gllm_inference/exceptions/exceptions.pyi +120 -0
  33. gllm_inference/lm_invoker/__init__.pyi +11 -0
  34. gllm_inference/lm_invoker/anthropic_lm_invoker.pyi +275 -0
  35. gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi +253 -0
  36. gllm_inference/lm_invoker/bedrock_lm_invoker.pyi +232 -0
  37. gllm_inference/lm_invoker/datasaur_lm_invoker.pyi +166 -0
  38. gllm_inference/lm_invoker/google_lm_invoker.pyi +310 -0
  39. gllm_inference/lm_invoker/langchain_lm_invoker.pyi +258 -0
  40. gllm_inference/lm_invoker/litellm_lm_invoker.pyi +248 -0
  41. gllm_inference/lm_invoker/lm_invoker.pyi +151 -0
  42. gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi +265 -0
  43. gllm_inference/lm_invoker/openai_lm_invoker.pyi +361 -0
  44. gllm_inference/lm_invoker/schema/__init__.pyi +0 -0
  45. gllm_inference/lm_invoker/schema/anthropic.pyi +50 -0
  46. gllm_inference/lm_invoker/schema/bedrock.pyi +48 -0
  47. gllm_inference/lm_invoker/schema/datasaur.pyi +12 -0
  48. gllm_inference/lm_invoker/schema/google.pyi +24 -0
  49. gllm_inference/lm_invoker/schema/langchain.pyi +22 -0
  50. gllm_inference/lm_invoker/schema/openai.pyi +90 -0
  51. gllm_inference/lm_invoker/schema/openai_compatible.pyi +56 -0
  52. gllm_inference/model/__init__.pyi +9 -0
  53. gllm_inference/model/em/__init__.pyi +0 -0
  54. gllm_inference/model/em/google_em.pyi +16 -0
  55. gllm_inference/model/em/openai_em.pyi +15 -0
  56. gllm_inference/model/em/twelvelabs_em.pyi +13 -0
  57. gllm_inference/model/em/voyage_em.pyi +20 -0
  58. gllm_inference/model/lm/__init__.pyi +0 -0
  59. gllm_inference/model/lm/anthropic_lm.pyi +20 -0
  60. gllm_inference/model/lm/google_lm.pyi +17 -0
  61. gllm_inference/model/lm/openai_lm.pyi +27 -0
  62. gllm_inference/output_parser/__init__.pyi +3 -0
  63. gllm_inference/output_parser/json_output_parser.pyi +60 -0
  64. gllm_inference/output_parser/output_parser.pyi +27 -0
  65. gllm_inference/prompt_builder/__init__.pyi +3 -0
  66. gllm_inference/prompt_builder/prompt_builder.pyi +56 -0
  67. gllm_inference/prompt_formatter/__init__.pyi +7 -0
  68. gllm_inference/prompt_formatter/agnostic_prompt_formatter.pyi +49 -0
  69. gllm_inference/prompt_formatter/huggingface_prompt_formatter.pyi +55 -0
  70. gllm_inference/prompt_formatter/llama_prompt_formatter.pyi +59 -0
  71. gllm_inference/prompt_formatter/mistral_prompt_formatter.pyi +53 -0
  72. gllm_inference/prompt_formatter/openai_prompt_formatter.pyi +35 -0
  73. gllm_inference/prompt_formatter/prompt_formatter.pyi +30 -0
  74. gllm_inference/request_processor/__init__.pyi +4 -0
  75. gllm_inference/request_processor/lm_request_processor.pyi +92 -0
  76. gllm_inference/request_processor/uses_lm_mixin.pyi +50 -0
  77. gllm_inference/schema/__init__.pyi +13 -0
  78. gllm_inference/schema/attachment.pyi +88 -0
  79. gllm_inference/schema/code_exec_result.pyi +14 -0
  80. gllm_inference/schema/enums.pyi +24 -0
  81. gllm_inference/schema/lm_output.pyi +36 -0
  82. gllm_inference/schema/message.pyi +52 -0
  83. gllm_inference/schema/model_id.pyi +138 -0
  84. gllm_inference/schema/reasoning.pyi +15 -0
  85. gllm_inference/schema/token_usage.pyi +11 -0
  86. gllm_inference/schema/tool_call.pyi +14 -0
  87. gllm_inference/schema/tool_result.pyi +11 -0
  88. gllm_inference/schema/type_alias.pyi +15 -0
  89. gllm_inference/utils/__init__.pyi +4 -0
  90. gllm_inference/utils/langchain.pyi +30 -0
  91. gllm_inference/utils/validation.pyi +12 -0
  92. gllm_inference_binary-0.5.9.dist-info/METADATA +108 -0
  93. gllm_inference_binary-0.5.9.dist-info/RECORD +94 -0
  94. gllm_inference_binary-0.5.9.dist-info/WHEEL +4 -0
@@ -0,0 +1,108 @@
1
+ Metadata-Version: 2.1
2
+ Name: gllm-inference-binary
3
+ Version: 0.5.9
4
+ Summary: A library containing components related to model inferences in Gen AI applications.
5
+ Author: Henry Wicaksono
6
+ Author-email: henry.wicaksono@gdplabs.id
7
+ Requires-Python: >=3.11,<3.14
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.11
10
+ Classifier: Programming Language :: Python :: 3.12
11
+ Provides-Extra: anthropic
12
+ Provides-Extra: bedrock
13
+ Provides-Extra: datasaur
14
+ Provides-Extra: google
15
+ Provides-Extra: huggingface
16
+ Provides-Extra: litellm
17
+ Provides-Extra: openai
18
+ Provides-Extra: twelvelabs
19
+ Provides-Extra: voyage
20
+ Requires-Dist: aioboto3 (>=15.0.0,<16.0.0) ; extra == "bedrock"
21
+ Requires-Dist: aiohttp (>=3.12.14,<4.0.0)
22
+ Requires-Dist: anthropic (>=0.60.0,<0.61.0) ; extra == "anthropic"
23
+ Requires-Dist: filetype (>=1.2.0,<2.0.0)
24
+ Requires-Dist: gllm-core-binary (>=0.3.0,<0.4.0)
25
+ Requires-Dist: google-genai (==1.20.0) ; extra == "google"
26
+ Requires-Dist: httpx (>=0.28.0,<0.29.0)
27
+ Requires-Dist: huggingface-hub (>=0.30.0,<0.31.0) ; extra == "huggingface"
28
+ Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
29
+ Requires-Dist: jsonschema (>=4.24.0,<5.0.0)
30
+ Requires-Dist: langchain (>=0.3.0,<0.4.0)
31
+ Requires-Dist: litellm (>=1.69.2,<2.0.0) ; extra == "litellm"
32
+ Requires-Dist: openai (>=1.98.0,<2.0.0) ; extra == "datasaur" or extra == "openai"
33
+ Requires-Dist: pandas (>=2.2.3,<3.0.0)
34
+ Requires-Dist: poetry (>=2.1.3,<3.0.0)
35
+ Requires-Dist: protobuf (>=5.28.2,<6.0.0)
36
+ Requires-Dist: python-magic (>=0.4.27,<0.5.0) ; sys_platform != "win32"
37
+ Requires-Dist: python-magic-bin (>=0.4.14,<0.5.0) ; sys_platform == "win32"
38
+ Requires-Dist: sentencepiece (>=0.2.0,<0.3.0)
39
+ Requires-Dist: transformers (==4.52.4) ; extra == "huggingface"
40
+ Requires-Dist: twelvelabs (>=0.4.4,<0.5.0) ; extra == "twelvelabs"
41
+ Requires-Dist: voyageai (>=0.3.0,<0.4.0) ; (python_version < "3.13") and (extra == "voyage")
42
+ Description-Content-Type: text/markdown
43
+
44
+ # GLLM Inference
45
+
46
+ ## Description
47
+
48
+ A library containing components related to model inferences in Gen AI applications.
49
+
50
+ ## Installation
51
+
52
+ ### Prerequisites
53
+ - Python 3.11+ - [Install here](https://www.python.org/downloads/)
54
+ - Pip (if using Pip) - [Install here](https://pip.pypa.io/en/stable/installation/)
55
+ - Poetry 1.8.1+ (if using Poetry) - [Install here](https://python-poetry.org/docs/#installation)
56
+ - Git (if using Git) - [Install here](https://git-scm.com/downloads)
57
+ - For git installation:
58
+ - Access to the [GDP Labs SDK github repository](https://github.com/GDP-ADMIN/gen-ai-internal)
59
+
60
+ ### 1. Installation from Artifact Registry
61
+ Choose one of the following methods to install the package:
62
+
63
+ #### Using pip
64
+ ```bash
65
+ pip install gllm-inference-binary
66
+ ```
67
+
68
+ #### Using Poetry
69
+ ```bash
70
+ poetry add gllm-inference-binary
71
+ ```
72
+
73
+ ### 2. Development Installation (Git)
74
+ For development purposes, you can install directly from the Git repository:
75
+ ```bash
76
+ poetry add "git+ssh://git@github.com/GDP-ADMIN/gen-ai-internal.git#subdirectory=libs/gllm-inference"
77
+ ```
78
+
79
+ Available extras:
80
+ - `anthropic`: Install Anthropic models dependencies
81
+ - `google-genai`: Install Google Generative AI models dependencies
82
+ - `google-vertexai`: Install Google Vertex AI models dependencies
83
+ - `huggingface`: Install HuggingFace models dependencies
84
+ - `openai`: Install OpenAI models dependencies
85
+ - `twelvelabs`: Install TwelveLabs models dependencies
86
+
87
+ ## Managing Dependencies
88
+ 1. Go to root folder of `gllm-inference` module, e.g. `cd libs/gllm-inference`.
89
+ 2. Run `poetry shell` to create a virtual environment.
90
+ 3. Run `poetry lock` to create a lock file if you haven't done it yet.
91
+ 4. Run `poetry install` to install the `gllm-inference` requirements for the first time.
92
+ 5. Run `poetry update` if you update any dependency module version at `pyproject.toml`.
93
+
94
+ ## Contributing
95
+ Please refer to this [Python Style Guide](https://docs.google.com/document/d/1uRggCrHnVfDPBnG641FyQBwUwLoFw0kTzNqRm92vUwM/edit?usp=sharing)
96
+ to get information about code style, documentation standard, and SCA that you need to use when contributing to this project
97
+
98
+ 1. Activate `pre-commit` hooks using `pre-commit install`
99
+ 2. Run `poetry shell` to create a virtual environment.
100
+ 3. Run `poetry lock` to create a lock file if you haven't done it yet.
101
+ 4. Run `poetry install` to install the `gllm-inference` requirements for the first time.
102
+ 5. Run `which python` to get the path to be referenced at Visual Studio Code interpreter path (`Ctrl`+`Shift`+`P` or `Cmd`+`Shift`+`P`)
103
+ 6. Try running the unit test to see if it's working:
104
+ ```bash
105
+ poetry run pytest -s tests/unit_tests/
106
+ ```
107
+
108
+
@@ -0,0 +1,94 @@
1
+ gllm_inference/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ gllm_inference/builder/__init__.pyi,sha256=-bw1uDx7CAM7pkvjvb1ZXku9zXlQ7aEAyC83KIn3bz8,506
3
+ gllm_inference/builder/build_em_invoker.pyi,sha256=PGRHlmiQ-GUTDC51PwYFjVkXRxeN0immnaSBOI06Uno,5474
4
+ gllm_inference/builder/build_lm_invoker.pyi,sha256=6dQha47M19hllF5ID5xUeiNPmbWUpKyNbG9D78qFGck,6618
5
+ gllm_inference/builder/build_lm_request_processor.pyi,sha256=0pJINCP4nnXVwuhIbhsaiwzjX8gohQt2oqXFZhTFSUs,4584
6
+ gllm_inference/builder/build_output_parser.pyi,sha256=sgSTrzUmSRxPzUUum0fDU7A3NXYoYhpi6bEx4Q2XMnA,965
7
+ gllm_inference/catalog/__init__.pyi,sha256=HWgPKWIzprpMHRKe_qN9BZSIQhVhrqiyjLjIXwvj1ho,291
8
+ gllm_inference/catalog/catalog.pyi,sha256=eWPqgQKi-SJGHabi_XOTEKpAj96OSRypKsb5ZEC1VWU,4911
9
+ gllm_inference/catalog/lm_request_processor_catalog.pyi,sha256=GemCEjFRHNChtNOfbyXSVsJiA3klOCAe_X11fnymhYs,5540
10
+ gllm_inference/catalog/prompt_builder_catalog.pyi,sha256=iViWB4SaezzjQY4UY1YxeoXUNxqxa2cTJGaD9JSx4Q8,3279
11
+ gllm_inference/constants.pyi,sha256=q1bzWw4fcz2RbdqQs3WYpvWc5aeassX-uQ5F0pDcKw4,295
12
+ gllm_inference/em_invoker/__init__.pyi,sha256=eZifmg3ZS3YdFUwbGPTurrfF4oV_MAPvqErJe7oTpZI,882
13
+ gllm_inference/em_invoker/azure_openai_em_invoker.pyi,sha256=QimqPII-KN9OgsfH1Iubn_tCHhtWjPQ5rilZoT6Ir-U,4688
14
+ gllm_inference/em_invoker/em_invoker.pyi,sha256=KX4i0xBWR5j6z14nEL6T8at3StKfdf3miQ4xixtYhZk,4424
15
+ gllm_inference/em_invoker/google_em_invoker.pyi,sha256=c0XJuLS4ji-Y9aHmbb8PRLwrbH6QA2WtdS1-2HbWFq8,6306
16
+ gllm_inference/em_invoker/langchain/__init__.pyi,sha256=aOTlRvS9aG1tBErjsmhe75s4Sq-g2z9ArfGqNW7QyEs,151
17
+ gllm_inference/em_invoker/langchain/em_invoker_embeddings.pyi,sha256=gEX21gJLngUh9fZo8v6Vbh0gpWFFqS2S-dGNZSrDjFQ,2409
18
+ gllm_inference/em_invoker/langchain_em_invoker.pyi,sha256=sFmsRE89MIdnD8g0VSMsdLvtfZL6dfPkUtDhH_WfgLc,2823
19
+ gllm_inference/em_invoker/openai_compatible_em_invoker.pyi,sha256=S5lRg3MeLoenOkeAG079I22kPaFXAFrltSoWcQSDK4I,5070
20
+ gllm_inference/em_invoker/openai_em_invoker.pyi,sha256=1WTuPtu5RlZCUcBHMXR5xEkAufWCHshKA8_JW7oFakE,4321
21
+ gllm_inference/em_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ gllm_inference/em_invoker/schema/google.pyi,sha256=lPzJ-f18qVar6dctdN4eQWrxWrOFHC9zJ4cuLXXMytw,153
23
+ gllm_inference/em_invoker/schema/langchain.pyi,sha256=JPEqA6naKL64jpW8NEKsEP-V9STY2h8wvyDsFtFEHos,96
24
+ gllm_inference/em_invoker/schema/openai.pyi,sha256=rNRqN62y5wHOKlr4T0n0m41ikAnSrD72CTnoHxo6kEM,146
25
+ gllm_inference/em_invoker/schema/openai_compatible.pyi,sha256=A9MOeBhI-IPuvewOk4YYOAGtgyKohERx6-9cEYtbwvs,157
26
+ gllm_inference/em_invoker/schema/twelvelabs.pyi,sha256=D3F9_1F-UTzE6Ymxj6u0IFdL6OFVGlc7noZJr3iuA6I,389
27
+ gllm_inference/em_invoker/schema/voyage.pyi,sha256=Aqvu6mhFkNb01aXAI5mChLKIgEnFnr-jNKq1lVWB54M,304
28
+ gllm_inference/em_invoker/twelevelabs_em_invoker.pyi,sha256=YGWQNxv3AJ9BpN6HrQSnATiW_p0dRakkqy-JgxNIlf4,5165
29
+ gllm_inference/em_invoker/voyage_em_invoker.pyi,sha256=R8IPBOEhIN84ukof-VkTPxPNbmbkwR_imTa5u6Qyjt0,5235
30
+ gllm_inference/exceptions/__init__.pyi,sha256=2F05RytXZIKaOJScb1pD0O0bATIQHVeEAYYNX4y5N2A,981
31
+ gllm_inference/exceptions/error_parser.pyi,sha256=ggmh8DJXdwFJInNLrP24WVJt_4raxbAVxzXRQgBpndA,2441
32
+ gllm_inference/exceptions/exceptions.pyi,sha256=ViXvIzm7tLcstjqfwC6nPziDg0UAmoUAWZVWrAJyp3w,4763
33
+ gllm_inference/lm_invoker/__init__.pyi,sha256=g-wu6W6ly_WAVPLDWKjt4J5cMo-CJ1x5unuObVSUnug,1115
34
+ gllm_inference/lm_invoker/anthropic_lm_invoker.pyi,sha256=85uvShLv4-eiGOpTMgwWpQGZXPW6XaB6GrexBmxg_sQ,15200
35
+ gllm_inference/lm_invoker/azure_openai_lm_invoker.pyi,sha256=N2TjGz5Gi6xiLkAgI6SzWq_V3tj66HJfMNff7d04uU0,14856
36
+ gllm_inference/lm_invoker/bedrock_lm_invoker.pyi,sha256=ae5P_9sjtcOgMIUaRchvp8F0FujoeP4e2F_OoHSe_go,12655
37
+ gllm_inference/lm_invoker/datasaur_lm_invoker.pyi,sha256=c4H3TOz0LIhWjokCCdQ4asiwQR4_LPyaimo4RAqU9es,9369
38
+ gllm_inference/lm_invoker/google_lm_invoker.pyi,sha256=Ph9QmTdFlar92iUDDcWwwVY_KDkt9Rumfi41zpTNfRg,16853
39
+ gllm_inference/lm_invoker/langchain_lm_invoker.pyi,sha256=bBGOxJfjnzOtDR4kH4PuCiOCKEPu8rTqzZodTXCHQ2k,13522
40
+ gllm_inference/lm_invoker/litellm_lm_invoker.pyi,sha256=HHwW7i8ryXHI23JZQwscyva6aPmPOB13Muhf7gaaMUM,13376
41
+ gllm_inference/lm_invoker/lm_invoker.pyi,sha256=YNJ0Sh_BOl1WbC69xvuxWM75qyByXJSXAYWSwtQ84cc,7960
42
+ gllm_inference/lm_invoker/openai_compatible_lm_invoker.pyi,sha256=U9dolHJT1pDsiiyrdpSAAdcBkil4_qeG_3BKfygq8GM,15193
43
+ gllm_inference/lm_invoker/openai_lm_invoker.pyi,sha256=SEHWAwpT8KmIQukurXtXOU2xyU2rp_HtM2SARsBF3dU,19892
44
+ gllm_inference/lm_invoker/schema/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
+ gllm_inference/lm_invoker/schema/anthropic.pyi,sha256=lGJ7xYLchdtv6003Is4GcaKiGdbmIOAzQsaldKG0Aww,1041
46
+ gllm_inference/lm_invoker/schema/bedrock.pyi,sha256=H3attoGWhBA725W4FpXw7Mty46N9jHKjw9PT-0lMEJs,975
47
+ gllm_inference/lm_invoker/schema/datasaur.pyi,sha256=GLv6XAwKtWyRrX6EsbEufYjkPffHNiEpXwJOn9HqxMA,242
48
+ gllm_inference/lm_invoker/schema/google.pyi,sha256=elXHrUMS46pbTsulk7hBXVVFcT022iD-_U_I590xeV8,529
49
+ gllm_inference/lm_invoker/schema/langchain.pyi,sha256=uEG0DSD0z4L_rDMkBm-TtUy5oTyEHEEJWiLsYvFf1sw,431
50
+ gllm_inference/lm_invoker/schema/openai.pyi,sha256=Cxp5QMkF6lspcVUgCNZR1qDK43Fj6OoEdOiQ1x5arsQ,1992
51
+ gllm_inference/lm_invoker/schema/openai_compatible.pyi,sha256=FnRfc3CiqY-y6WmZqi2OhxOnNrZENBEXCmk2WPADkBQ,1157
52
+ gllm_inference/model/__init__.pyi,sha256=JKQB0wVSVYD-_tdRkG7N_oEVAKGCcoBw0BUOUMLieFo,602
53
+ gllm_inference/model/em/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ gllm_inference/model/em/google_em.pyi,sha256=c53H-KNdNOK9ppPLyOSkmCA890eF5FsMd05upkPIzF0,487
55
+ gllm_inference/model/em/openai_em.pyi,sha256=b6ID1JsLZH9OAo9E37CkbgWNR_eI65eKXK6TYi_0ndA,457
56
+ gllm_inference/model/em/twelvelabs_em.pyi,sha256=5R2zkKDiEatdATFzF8TOoKW9XRkOsOoNGY5lORimueo,413
57
+ gllm_inference/model/em/voyage_em.pyi,sha256=kTInLttWfPqCNfBX-TK5VMMaFfPxwqqudBw1kz4hnxk,551
58
+ gllm_inference/model/lm/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
+ gllm_inference/model/lm/anthropic_lm.pyi,sha256=3rppksDF4nVAR3Konoj6nRi_T8vSaFPxLub1CzJh7Us,578
60
+ gllm_inference/model/lm/google_lm.pyi,sha256=yv5nXnLxuCGDUsh7QP9furSx-6sZj6FQi-pJ9lZbHAk,496
61
+ gllm_inference/model/lm/openai_lm.pyi,sha256=yj3AJj1xDYRkNIPHX2enw46AJ9wArPZruKsxg1ME9Rg,645
62
+ gllm_inference/output_parser/__init__.pyi,sha256=dhAeRTBxc6CfS8bhnHjbtrnyqJ1iyffvUZkGp4UrJNM,132
63
+ gllm_inference/output_parser/json_output_parser.pyi,sha256=YtgQh8Uzy8W_Tgh8DfuR7VFFS7qvLEasiTwRfaGZZEU,2993
64
+ gllm_inference/output_parser/output_parser.pyi,sha256=-Xu5onKCBDqShcO-VrQh5icqAmXdihGc3rkZxL93swg,975
65
+ gllm_inference/prompt_builder/__init__.pyi,sha256=mPsbiafzSNHsgN-CuzjhgZpfXfi1pPC3_gdsq2p0EM4,120
66
+ gllm_inference/prompt_builder/prompt_builder.pyi,sha256=ju52smKHT_Bh2EVMZBWe1Z0ZQjD5aPBDLI_xLaILcgo,3334
67
+ gllm_inference/prompt_formatter/__init__.pyi,sha256=q5sPPrnoCf-4tMGowh7hXxs63uyWfaZyEI-wjLBTGsA,747
68
+ gllm_inference/prompt_formatter/agnostic_prompt_formatter.pyi,sha256=qp4L3x7XK7oZaSYP8B4idewKpPioB4XELeKVV-dNi-Q,2067
69
+ gllm_inference/prompt_formatter/huggingface_prompt_formatter.pyi,sha256=kH60A_3DnHd3BrqbgS_FqQTCTHIjC9BTsk6_FNgcZw8,2784
70
+ gllm_inference/prompt_formatter/llama_prompt_formatter.pyi,sha256=m3m3gXLXbk1LMfWn7a6raKlnSjgMQJNlGsE6FlijogA,2907
71
+ gllm_inference/prompt_formatter/mistral_prompt_formatter.pyi,sha256=bpRXB26qw1REnnY1PqVm8D-Eo-dWYMVLb0s31-g0g_Y,2703
72
+ gllm_inference/prompt_formatter/openai_prompt_formatter.pyi,sha256=xGpytprs5W1TogHFYbsYxBPClIuQc0tXfZSzR9ypRC4,1321
73
+ gllm_inference/prompt_formatter/prompt_formatter.pyi,sha256=hAc6rxWc6JSYdD-OypLixGKXlPA8djE7zJqZpVKXcOs,1176
74
+ gllm_inference/request_processor/__init__.pyi,sha256=giEme2WFQhgyKiBZHhSet0_nKSCHwGy-_2p6NRzg0Zc,231
75
+ gllm_inference/request_processor/lm_request_processor.pyi,sha256=rInXhC95BvQnw9q98KZWpjPH8Q_TV4zC2ycNjypEBZ4,5516
76
+ gllm_inference/request_processor/uses_lm_mixin.pyi,sha256=znBG4AWWm_H70Qqrc1mO4ohmWotX9id81Fqe-x9Qa6Q,2371
77
+ gllm_inference/schema/__init__.pyi,sha256=-ldt0xJQJirVNdwLFev3bmzmFRw9HSUWBRmmIVH7uyU,1251
78
+ gllm_inference/schema/attachment.pyi,sha256=9zgAjGXBjLfzPGaKi68FMW6b5mXdEA352nDe-ynOSvY,3385
79
+ gllm_inference/schema/code_exec_result.pyi,sha256=WQ-ARoGM9r6nyRX-A0Ro1XKiqrc9R3jRYXZpu_xo5S4,573
80
+ gllm_inference/schema/enums.pyi,sha256=SQ9mXt8j7uK333uUnUHRs-mkRxf0Z5NCtkAkgQZPIb4,629
81
+ gllm_inference/schema/lm_output.pyi,sha256=WP2LQrY0D03OJtFoaW_dGoJ_-yFUh2HbVlllgjzpYv4,1992
82
+ gllm_inference/schema/message.pyi,sha256=jJV6A0ihEcun2OhzyMtNkiHnf7d6v5R-GdpTBGfJ0AQ,2272
83
+ gllm_inference/schema/model_id.pyi,sha256=3prO19l-FCSecRupe93ruXe91-Xw3GJOpbuQ66bijo0,5368
84
+ gllm_inference/schema/reasoning.pyi,sha256=jbPxkDRHt0Vt-zdcc8lTT1l2hIE1Jm3HIHeNd0hfXGo,577
85
+ gllm_inference/schema/token_usage.pyi,sha256=Eevs8S-yXoM7kQkkzhXHEvORU8DMGzdQynAamqtIoX4,323
86
+ gllm_inference/schema/tool_call.pyi,sha256=OWT9LUqs_xfUcOkPG0aokAAqzLYYDkfnjTa0zOWvugk,403
87
+ gllm_inference/schema/tool_result.pyi,sha256=IJsU3n8y0Q9nFMEiq4RmLEIHueSiim0Oz_DlhKrTqto,287
88
+ gllm_inference/schema/type_alias.pyi,sha256=qAljeBoeQEfT601maGe_mEpXD9inNzbGte1i6joQafc,740
89
+ gllm_inference/utils/__init__.pyi,sha256=RBTWDu1TDPpTd17fixcPYFv2L_vp4-IAOX0IsxgCsD4,299
90
+ gllm_inference/utils/langchain.pyi,sha256=4AwFiVAO0ZpdgmqeC4Pb5NJwBt8vVr0MSUqLeCdTscc,1194
91
+ gllm_inference/utils/validation.pyi,sha256=-RdMmb8afH7F7q4Ao7x6FbwaDfxUHn3hA3WiOgzB-3s,397
92
+ gllm_inference_binary-0.5.9.dist-info/METADATA,sha256=C4ovZJsiSKuRWp_2DbNV57oJwtxC0sI8sPrKXlCHFN4,4531
93
+ gllm_inference_binary-0.5.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
94
+ gllm_inference_binary-0.5.9.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 1.9.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any