google-genai 1.55.0__py3-none-any.whl → 1.56.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (241) hide show
  1. google/genai/_interactions/_base_client.py +8 -2
  2. google/genai/_interactions/resources/interactions.py +6 -6
  3. google/genai/_interactions/types/__init__.py +2 -0
  4. google/genai/_interactions/types/audio_content.py +0 -1
  5. google/genai/_interactions/types/audio_content_param.py +0 -1
  6. google/genai/_interactions/types/code_execution_call_content.py +0 -1
  7. google/genai/_interactions/types/code_execution_call_content_param.py +0 -1
  8. google/genai/_interactions/types/code_execution_result_content.py +0 -1
  9. google/genai/_interactions/types/code_execution_result_content_param.py +0 -1
  10. google/genai/_interactions/types/content_delta.py +7 -23
  11. google/genai/_interactions/types/deep_research_agent_config.py +0 -1
  12. google/genai/_interactions/types/deep_research_agent_config_param.py +0 -1
  13. google/genai/_interactions/types/document_content.py +3 -2
  14. google/genai/_interactions/types/document_content_param.py +3 -2
  15. google/genai/{tests/__init__.py → _interactions/types/document_mime_type.py} +5 -3
  16. google/genai/{tests/afc/__init__.py → _interactions/types/document_mime_type_param.py} +8 -4
  17. google/genai/_interactions/types/dynamic_agent_config.py +0 -1
  18. google/genai/_interactions/types/dynamic_agent_config_param.py +0 -1
  19. google/genai/_interactions/types/file_search_result_content.py +0 -1
  20. google/genai/_interactions/types/file_search_result_content_param.py +0 -1
  21. google/genai/_interactions/types/function_call_content.py +0 -1
  22. google/genai/_interactions/types/function_call_content_param.py +0 -1
  23. google/genai/_interactions/types/function_result_content.py +1 -2
  24. google/genai/_interactions/types/function_result_content_param.py +1 -2
  25. google/genai/_interactions/types/google_search_call_content.py +0 -1
  26. google/genai/_interactions/types/google_search_call_content_param.py +0 -1
  27. google/genai/_interactions/types/google_search_result_content.py +0 -1
  28. google/genai/_interactions/types/google_search_result_content_param.py +0 -1
  29. google/genai/_interactions/types/image_content.py +1 -2
  30. google/genai/_interactions/types/image_content_param.py +1 -2
  31. google/genai/_interactions/types/mcp_server_tool_call_content.py +0 -1
  32. google/genai/_interactions/types/mcp_server_tool_call_content_param.py +0 -1
  33. google/genai/_interactions/types/mcp_server_tool_result_content.py +1 -2
  34. google/genai/_interactions/types/mcp_server_tool_result_content_param.py +1 -2
  35. google/genai/_interactions/types/text_content.py +0 -1
  36. google/genai/_interactions/types/text_content_param.py +0 -1
  37. google/genai/_interactions/types/thinking_level.py +1 -1
  38. google/genai/_interactions/types/thought_content.py +0 -1
  39. google/genai/_interactions/types/thought_content_param.py +0 -1
  40. google/genai/_interactions/types/url_context_call_content.py +0 -1
  41. google/genai/_interactions/types/url_context_call_content_param.py +0 -1
  42. google/genai/_interactions/types/url_context_result_content.py +0 -1
  43. google/genai/_interactions/types/url_context_result_content_param.py +0 -1
  44. google/genai/_interactions/types/video_content.py +1 -2
  45. google/genai/_interactions/types/video_content_param.py +1 -2
  46. google/genai/_live_converters.py +2 -30
  47. google/genai/client.py +3 -1
  48. google/genai/models.py +2 -29
  49. google/genai/tunings.py +1 -27
  50. google/genai/types.py +20 -22
  51. google/genai/version.py +1 -1
  52. {google_genai-1.55.0.dist-info → google_genai-1.56.0.dist-info}/METADATA +224 -22
  53. google_genai-1.56.0.dist-info/RECORD +162 -0
  54. google/genai/tests/afc/test_convert_if_exist_pydantic_model.py +0 -309
  55. google/genai/tests/afc/test_convert_number_values_for_function_call_args.py +0 -63
  56. google/genai/tests/afc/test_find_afc_incompatible_tool_indexes.py +0 -240
  57. google/genai/tests/afc/test_generate_content_stream_afc.py +0 -530
  58. google/genai/tests/afc/test_generate_content_stream_afc_thoughts.py +0 -77
  59. google/genai/tests/afc/test_get_function_map.py +0 -176
  60. google/genai/tests/afc/test_get_function_response_parts.py +0 -277
  61. google/genai/tests/afc/test_get_max_remote_calls_for_afc.py +0 -130
  62. google/genai/tests/afc/test_invoke_function_from_dict_args.py +0 -241
  63. google/genai/tests/afc/test_raise_error_for_afc_incompatible_config.py +0 -159
  64. google/genai/tests/afc/test_should_append_afc_history.py +0 -53
  65. google/genai/tests/afc/test_should_disable_afc.py +0 -214
  66. google/genai/tests/batches/__init__.py +0 -17
  67. google/genai/tests/batches/test_cancel.py +0 -77
  68. google/genai/tests/batches/test_create.py +0 -78
  69. google/genai/tests/batches/test_create_with_bigquery.py +0 -113
  70. google/genai/tests/batches/test_create_with_file.py +0 -82
  71. google/genai/tests/batches/test_create_with_gcs.py +0 -125
  72. google/genai/tests/batches/test_create_with_inlined_requests.py +0 -255
  73. google/genai/tests/batches/test_delete.py +0 -86
  74. google/genai/tests/batches/test_embedding.py +0 -157
  75. google/genai/tests/batches/test_get.py +0 -78
  76. google/genai/tests/batches/test_list.py +0 -79
  77. google/genai/tests/caches/__init__.py +0 -17
  78. google/genai/tests/caches/constants.py +0 -29
  79. google/genai/tests/caches/test_create.py +0 -210
  80. google/genai/tests/caches/test_create_custom_url.py +0 -105
  81. google/genai/tests/caches/test_delete.py +0 -54
  82. google/genai/tests/caches/test_delete_custom_url.py +0 -52
  83. google/genai/tests/caches/test_get.py +0 -94
  84. google/genai/tests/caches/test_get_custom_url.py +0 -52
  85. google/genai/tests/caches/test_list.py +0 -68
  86. google/genai/tests/caches/test_update.py +0 -70
  87. google/genai/tests/caches/test_update_custom_url.py +0 -58
  88. google/genai/tests/chats/__init__.py +0 -1
  89. google/genai/tests/chats/test_get_history.py +0 -597
  90. google/genai/tests/chats/test_send_message.py +0 -844
  91. google/genai/tests/chats/test_validate_response.py +0 -90
  92. google/genai/tests/client/__init__.py +0 -17
  93. google/genai/tests/client/test_async_stream.py +0 -427
  94. google/genai/tests/client/test_client_close.py +0 -197
  95. google/genai/tests/client/test_client_initialization.py +0 -1687
  96. google/genai/tests/client/test_client_requests.py +0 -355
  97. google/genai/tests/client/test_custom_client.py +0 -77
  98. google/genai/tests/client/test_http_options.py +0 -178
  99. google/genai/tests/client/test_replay_client_equality.py +0 -168
  100. google/genai/tests/client/test_retries.py +0 -846
  101. google/genai/tests/client/test_upload_errors.py +0 -136
  102. google/genai/tests/common/__init__.py +0 -17
  103. google/genai/tests/common/test_common.py +0 -954
  104. google/genai/tests/conftest.py +0 -162
  105. google/genai/tests/documents/__init__.py +0 -17
  106. google/genai/tests/documents/test_delete.py +0 -51
  107. google/genai/tests/documents/test_get.py +0 -85
  108. google/genai/tests/documents/test_list.py +0 -72
  109. google/genai/tests/errors/__init__.py +0 -1
  110. google/genai/tests/errors/test_api_error.py +0 -417
  111. google/genai/tests/file_search_stores/__init__.py +0 -17
  112. google/genai/tests/file_search_stores/test_create.py +0 -66
  113. google/genai/tests/file_search_stores/test_delete.py +0 -64
  114. google/genai/tests/file_search_stores/test_get.py +0 -94
  115. google/genai/tests/file_search_stores/test_import_file.py +0 -112
  116. google/genai/tests/file_search_stores/test_list.py +0 -57
  117. google/genai/tests/file_search_stores/test_upload_to_file_search_store.py +0 -141
  118. google/genai/tests/files/__init__.py +0 -17
  119. google/genai/tests/files/test_delete.py +0 -46
  120. google/genai/tests/files/test_download.py +0 -85
  121. google/genai/tests/files/test_get.py +0 -46
  122. google/genai/tests/files/test_list.py +0 -72
  123. google/genai/tests/files/test_upload.py +0 -255
  124. google/genai/tests/imports/test_no_optional_imports.py +0 -28
  125. google/genai/tests/interactions/test_integration.py +0 -80
  126. google/genai/tests/live/__init__.py +0 -16
  127. google/genai/tests/live/test_live.py +0 -2177
  128. google/genai/tests/live/test_live_music.py +0 -362
  129. google/genai/tests/live/test_live_response.py +0 -163
  130. google/genai/tests/live/test_send_client_content.py +0 -147
  131. google/genai/tests/live/test_send_realtime_input.py +0 -268
  132. google/genai/tests/live/test_send_tool_response.py +0 -222
  133. google/genai/tests/local_tokenizer/__init__.py +0 -17
  134. google/genai/tests/local_tokenizer/test_local_tokenizer.py +0 -343
  135. google/genai/tests/local_tokenizer/test_local_tokenizer_loader.py +0 -235
  136. google/genai/tests/mcp/__init__.py +0 -17
  137. google/genai/tests/mcp/test_has_mcp_tool_usage.py +0 -89
  138. google/genai/tests/mcp/test_mcp_to_gemini_tools.py +0 -191
  139. google/genai/tests/mcp/test_parse_config_for_mcp_sessions.py +0 -201
  140. google/genai/tests/mcp/test_parse_config_for_mcp_usage.py +0 -130
  141. google/genai/tests/mcp/test_set_mcp_usage_header.py +0 -72
  142. google/genai/tests/models/__init__.py +0 -17
  143. google/genai/tests/models/constants.py +0 -8
  144. google/genai/tests/models/test_compute_tokens.py +0 -120
  145. google/genai/tests/models/test_count_tokens.py +0 -159
  146. google/genai/tests/models/test_delete.py +0 -107
  147. google/genai/tests/models/test_edit_image.py +0 -264
  148. google/genai/tests/models/test_embed_content.py +0 -94
  149. google/genai/tests/models/test_function_call_streaming.py +0 -442
  150. google/genai/tests/models/test_generate_content.py +0 -2502
  151. google/genai/tests/models/test_generate_content_cached_content.py +0 -132
  152. google/genai/tests/models/test_generate_content_config_zero_value.py +0 -103
  153. google/genai/tests/models/test_generate_content_from_apikey.py +0 -44
  154. google/genai/tests/models/test_generate_content_http_options.py +0 -40
  155. google/genai/tests/models/test_generate_content_image_generation.py +0 -143
  156. google/genai/tests/models/test_generate_content_mcp.py +0 -343
  157. google/genai/tests/models/test_generate_content_media_resolution.py +0 -97
  158. google/genai/tests/models/test_generate_content_model.py +0 -139
  159. google/genai/tests/models/test_generate_content_part.py +0 -821
  160. google/genai/tests/models/test_generate_content_thought.py +0 -76
  161. google/genai/tests/models/test_generate_content_tools.py +0 -1761
  162. google/genai/tests/models/test_generate_images.py +0 -191
  163. google/genai/tests/models/test_generate_videos.py +0 -759
  164. google/genai/tests/models/test_get.py +0 -104
  165. google/genai/tests/models/test_list.py +0 -233
  166. google/genai/tests/models/test_recontext_image.py +0 -189
  167. google/genai/tests/models/test_segment_image.py +0 -148
  168. google/genai/tests/models/test_update.py +0 -95
  169. google/genai/tests/models/test_upscale_image.py +0 -157
  170. google/genai/tests/operations/__init__.py +0 -17
  171. google/genai/tests/operations/test_get.py +0 -38
  172. google/genai/tests/public_samples/__init__.py +0 -17
  173. google/genai/tests/public_samples/test_gemini_text_only.py +0 -34
  174. google/genai/tests/pytest_helper.py +0 -229
  175. google/genai/tests/shared/__init__.py +0 -16
  176. google/genai/tests/shared/batches/__init__.py +0 -14
  177. google/genai/tests/shared/batches/test_create_delete.py +0 -57
  178. google/genai/tests/shared/batches/test_create_get_cancel.py +0 -56
  179. google/genai/tests/shared/batches/test_list.py +0 -40
  180. google/genai/tests/shared/caches/__init__.py +0 -14
  181. google/genai/tests/shared/caches/test_create_get_delete.py +0 -67
  182. google/genai/tests/shared/caches/test_create_update_get.py +0 -71
  183. google/genai/tests/shared/caches/test_list.py +0 -40
  184. google/genai/tests/shared/chats/__init__.py +0 -14
  185. google/genai/tests/shared/chats/test_send_message.py +0 -48
  186. google/genai/tests/shared/chats/test_send_message_stream.py +0 -50
  187. google/genai/tests/shared/files/__init__.py +0 -14
  188. google/genai/tests/shared/files/test_list.py +0 -41
  189. google/genai/tests/shared/files/test_upload_get_delete.py +0 -54
  190. google/genai/tests/shared/models/__init__.py +0 -14
  191. google/genai/tests/shared/models/test_compute_tokens.py +0 -41
  192. google/genai/tests/shared/models/test_count_tokens.py +0 -40
  193. google/genai/tests/shared/models/test_edit_image.py +0 -67
  194. google/genai/tests/shared/models/test_embed.py +0 -40
  195. google/genai/tests/shared/models/test_generate_content.py +0 -39
  196. google/genai/tests/shared/models/test_generate_content_stream.py +0 -54
  197. google/genai/tests/shared/models/test_generate_images.py +0 -40
  198. google/genai/tests/shared/models/test_generate_videos.py +0 -38
  199. google/genai/tests/shared/models/test_list.py +0 -37
  200. google/genai/tests/shared/models/test_recontext_image.py +0 -55
  201. google/genai/tests/shared/models/test_segment_image.py +0 -52
  202. google/genai/tests/shared/models/test_upscale_image.py +0 -52
  203. google/genai/tests/shared/tunings/__init__.py +0 -16
  204. google/genai/tests/shared/tunings/test_create.py +0 -46
  205. google/genai/tests/shared/tunings/test_create_get_cancel.py +0 -56
  206. google/genai/tests/shared/tunings/test_list.py +0 -39
  207. google/genai/tests/tokens/__init__.py +0 -16
  208. google/genai/tests/tokens/test_create.py +0 -154
  209. google/genai/tests/transformers/__init__.py +0 -17
  210. google/genai/tests/transformers/test_blobs.py +0 -71
  211. google/genai/tests/transformers/test_bytes.py +0 -15
  212. google/genai/tests/transformers/test_duck_type.py +0 -96
  213. google/genai/tests/transformers/test_function_responses.py +0 -72
  214. google/genai/tests/transformers/test_schema.py +0 -653
  215. google/genai/tests/transformers/test_t_batch.py +0 -286
  216. google/genai/tests/transformers/test_t_content.py +0 -160
  217. google/genai/tests/transformers/test_t_contents.py +0 -398
  218. google/genai/tests/transformers/test_t_part.py +0 -85
  219. google/genai/tests/transformers/test_t_parts.py +0 -87
  220. google/genai/tests/transformers/test_t_tool.py +0 -157
  221. google/genai/tests/transformers/test_t_tools.py +0 -195
  222. google/genai/tests/tunings/__init__.py +0 -16
  223. google/genai/tests/tunings/test_cancel.py +0 -39
  224. google/genai/tests/tunings/test_end_to_end.py +0 -106
  225. google/genai/tests/tunings/test_get.py +0 -67
  226. google/genai/tests/tunings/test_list.py +0 -75
  227. google/genai/tests/tunings/test_tune.py +0 -268
  228. google/genai/tests/types/__init__.py +0 -16
  229. google/genai/tests/types/test_bytes_internal.py +0 -271
  230. google/genai/tests/types/test_bytes_type.py +0 -152
  231. google/genai/tests/types/test_future.py +0 -101
  232. google/genai/tests/types/test_optional_types.py +0 -36
  233. google/genai/tests/types/test_part_type.py +0 -616
  234. google/genai/tests/types/test_schema_from_json_schema.py +0 -417
  235. google/genai/tests/types/test_schema_json_schema.py +0 -468
  236. google/genai/tests/types/test_types.py +0 -2903
  237. google_genai-1.55.0.dist-info/RECORD +0 -345
  238. /google/genai/{tests/interactions/__init__.py → _interactions/py.typed} +0 -0
  239. {google_genai-1.55.0.dist-info → google_genai-1.56.0.dist-info}/WHEEL +0 -0
  240. {google_genai-1.55.0.dist-info → google_genai-1.56.0.dist-info}/licenses/LICENSE +0 -0
  241. {google_genai-1.55.0.dist-info → google_genai-1.56.0.dist-info}/top_level.txt +0 -0
@@ -1,343 +0,0 @@
1
- # Copyright 2025 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- #
15
-
16
- import unittest
17
- from unittest.mock import MagicMock, patch
18
-
19
- from sentencepiece import sentencepiece_model_pb2
20
-
21
- from ... import local_tokenizer
22
- from ... import types
23
-
24
-
25
- class TestLocalTokenizer(unittest.TestCase):
26
-
27
- def setUp(self):
28
- # This setup will be used by all tests
29
- self.mock_load_model_proto = patch(
30
- 'genai._local_tokenizer_loader.load_model_proto'
31
- ).start()
32
- self.mock_get_sentencepiece = patch(
33
- 'genai._local_tokenizer_loader.get_sentencepiece'
34
- ).start()
35
-
36
- self.mock_load_model_proto.return_value = MagicMock()
37
- self.mock_tokenizer = MagicMock()
38
- self.mock_get_sentencepiece.return_value = self.mock_tokenizer
39
-
40
- self.tokenizer = local_tokenizer.LocalTokenizer(model_name='gemini-2.5-flash')
41
-
42
- def tearDown(self):
43
- patch.stopall()
44
-
45
- def test_count_tokens_simple_string(self):
46
- self.mock_tokenizer.encode.return_value = [[1, 2, 3]]
47
- result = self.tokenizer.count_tokens('Hello world')
48
- self.assertEqual(result.total_tokens, 3)
49
- self.mock_tokenizer.encode.assert_called_once_with(['Hello world'])
50
-
51
- def test_count_tokens_list_of_strings(self):
52
- self.mock_tokenizer.encode.return_value = [[1, 2], [3]]
53
- result = self.tokenizer.count_tokens(['Hello', 'world'])
54
- self.assertEqual(result.total_tokens, 3)
55
- self.mock_tokenizer.encode.assert_called_once_with(['Hello', 'world'])
56
-
57
- def test_count_tokens_with_content_object(self):
58
- self.mock_tokenizer.encode.return_value = [[1, 2, 3]]
59
- content = types.Content(parts=[types.Part(text='Hello world')])
60
- result = self.tokenizer.count_tokens(content)
61
- self.assertEqual(result.total_tokens, 3)
62
- self.mock_tokenizer.encode.assert_called_once_with(['Hello world'])
63
-
64
- def test_count_tokens_with_chat_history(self):
65
- self.mock_tokenizer.encode.return_value = [[1, 2], [3, 4, 5]]
66
- history = [
67
- types.Content(role='user', parts=[types.Part(text='Hello')]),
68
- types.Content(role='model', parts=[types.Part(text='Hi there!')]),
69
- ]
70
- result = self.tokenizer.count_tokens(history)
71
- self.assertEqual(result.total_tokens, 5)
72
- self.mock_tokenizer.encode.assert_called_once_with(['Hello', 'Hi there!'])
73
-
74
- def test_count_tokens_with_tools(self):
75
- self.mock_tokenizer.encode.return_value = [
76
- [1],
77
- [1, 2],
78
- [1, 2, 3],
79
- [1, 2, 3, 4],
80
- [1, 2, 3, 4, 5],
81
- [1, 2, 3, 4, 5, 6],
82
- ]
83
- tool = types.Tool(
84
- function_declarations=[
85
- types.FunctionDeclaration(
86
- name='get_weather',
87
- description='Get the weather for a location',
88
- parameters=types.Schema(
89
- type=types.Type.OBJECT,
90
- properties={
91
- 'location': types.Schema(
92
- type=types.Type.STRING, description='The location'
93
- )
94
- },
95
- required=['location'],
96
- ),
97
- )
98
- ]
99
- )
100
- config = types.CountTokensConfig(tools=[tool])
101
- result = self.tokenizer.count_tokens(
102
- 'What is the weather in Boston?', config=config
103
- )
104
- self.assertEqual(result.total_tokens, 21)
105
- self.mock_tokenizer.encode.assert_called_once_with([
106
- 'What is the weather in Boston?',
107
- 'get_weather',
108
- 'Get the weather for a location',
109
- 'location',
110
- 'location',
111
- 'The location',
112
- ])
113
-
114
- def test_count_tokens_with_function_call(self):
115
- self.mock_tokenizer.encode.return_value = [[1, 2], [3], [4, 5]]
116
- content = types.Content(
117
- role='model',
118
- parts=[
119
- types.Part(
120
- function_call=types.FunctionCall(
121
- name='get_weather', args={'location': 'Boston'}
122
- )
123
- )
124
- ],
125
- )
126
- result = self.tokenizer.count_tokens(content)
127
- self.assertEqual(result.total_tokens, 5)
128
- self.mock_tokenizer.encode.assert_called_once_with(
129
- ['get_weather', 'location', 'Boston']
130
- )
131
-
132
- def test_count_tokens_with_function_response(self):
133
- self.mock_tokenizer.encode.return_value = [[1, 2], [3], [4, 5]]
134
- content = types.Content(
135
- role='user',
136
- parts=[
137
- types.Part(
138
- function_response=types.FunctionResponse(
139
- name='get_weather', response={'weather': 'sunny'}
140
- )
141
- )
142
- ],
143
- )
144
- result = self.tokenizer.count_tokens(content)
145
- self.assertEqual(result.total_tokens, 5)
146
- self.mock_tokenizer.encode.assert_called_once_with(
147
- ['get_weather', 'weather', 'sunny']
148
- )
149
-
150
- def test_count_tokens_with_unsupported_content(self):
151
- with self.assertRaises(ValueError):
152
- self.tokenizer.count_tokens(
153
- [
154
- types.Content(
155
- parts=[
156
- types.Part(
157
- inline_data=types.Blob(
158
- data=b'test', mime_type='image/png'
159
- )
160
- )
161
- ]
162
- )
163
- ]
164
- )
165
-
166
- def test_count_tokens_with_system_instruction(self):
167
- self.mock_tokenizer.encode.return_value = [[1, 2, 3], [4, 5]]
168
- config = types.CountTokensConfig(
169
- system_instruction=types.Content(
170
- parts=[types.Part(text='You are a helpful assistant.')]
171
- )
172
- )
173
- result = self.tokenizer.count_tokens('Hello', config=config)
174
- self.assertEqual(result.total_tokens, 5)
175
- self.mock_tokenizer.encode.assert_called_once_with(
176
- ['Hello', 'You are a helpful assistant.']
177
- )
178
-
179
- def test_count_tokens_with_response_schema(self):
180
- self.mock_tokenizer.encode.return_value = [
181
- [1],
182
- [1, 2],
183
- [1, 2, 3],
184
- [1, 2, 3, 4],
185
- [1, 2, 3, 4, 5],
186
- ]
187
- schema = types.Schema(
188
- type=types.Type.OBJECT,
189
- format='schema_format',
190
- description='Recipe schema',
191
- enum=['schema_enum1', 'schema_enum2'],
192
- properties={
193
- 'recipe_name': types.Schema(
194
- type=types.Type.STRING,
195
- description='Name of the recipe',
196
- )
197
- },
198
- items=types.Schema(
199
- type=types.Type.STRING,
200
- description='Item in the recipe',
201
- ),
202
- example={
203
- 'recipe_example': types.Schema(
204
- type=types.Type.STRING,
205
- description='example in the recipe',
206
- )
207
- },
208
- required=['recipe_name'],
209
- )
210
- config = types.CountTokensConfig(
211
- generation_config=types.GenerationConfig(response_schema=schema)
212
- )
213
- result = self.tokenizer.count_tokens(
214
- 'Generate a recipe for chocolate chip cookies.', config=config
215
- )
216
- self.assertEqual(result.total_tokens, 15)
217
- self.mock_tokenizer.encode.assert_called_once_with([
218
- 'Generate a recipe for chocolate chip cookies.',
219
- 'schema_format',
220
- 'Recipe schema',
221
- 'schema_enum1',
222
- 'schema_enum2',
223
- 'recipe_name',
224
- 'Item in the recipe',
225
- 'recipe_name',
226
- 'Name of the recipe',
227
- 'recipe_example',
228
- ])
229
-
230
- def test_count_tokens_with_unsupported_fields_logs_warning(self):
231
- self.mock_tokenizer.encode.return_value = [[1, 2, 3]]
232
- content_with_unsupported = types.Content(
233
- role='user',
234
- parts=[
235
- types.Part(text='hello'),
236
- # executable_code is not supported by _TextsAccumulator
237
- types.Part(
238
- executable_code=types.ExecutableCode(
239
- language='PYTHON', code='print(1)'
240
- )
241
- ),
242
- ],
243
- )
244
- with self.assertLogs('google_genai.local_tokenizer', level='WARNING') as cm:
245
- self.tokenizer.count_tokens(content_with_unsupported)
246
- self.assertIn(
247
- 'Content contains unsupported types for token counting', cm.output[0]
248
- )
249
-
250
- def test_compute_tokens_simple_string(self):
251
- mock_spt = MagicMock()
252
- mock_spt.pieces = [
253
- MagicMock(id=1, piece='He'),
254
- MagicMock(id=2, piece='llo'),
255
- MagicMock(id=3, piece=' world'),
256
- ]
257
- self.mock_tokenizer.EncodeAsImmutableProto.return_value = [mock_spt]
258
- result = self.tokenizer.compute_tokens('Hello world')
259
- self.assertEqual(len(result.tokens_info), 1)
260
- self.assertEqual(result.tokens_info[0].token_ids, [1, 2, 3])
261
- self.assertEqual(result.tokens_info[0].tokens, [b'He', b'llo', b' world'])
262
- self.assertEqual(result.tokens_info[0].role, 'user')
263
- self.mock_tokenizer.EncodeAsImmutableProto.assert_called_once_with(
264
- ['Hello world']
265
- )
266
-
267
- def test_compute_tokens_with_chat_history(self):
268
- mock_spt1 = MagicMock()
269
- mock_spt1.pieces = [MagicMock(id=1, piece='Hello')]
270
- mock_spt2 = MagicMock()
271
- mock_spt2.pieces = [
272
- MagicMock(id=2, piece='Hi'),
273
- MagicMock(id=3, piece=' there!'),
274
- ]
275
- self.mock_tokenizer.EncodeAsImmutableProto.return_value = [
276
- mock_spt1,
277
- mock_spt2,
278
- ]
279
- history = [
280
- types.Content(role='user', parts=[types.Part(text='Hello')]),
281
- types.Content(role='model', parts=[types.Part(text='Hi there!')]),
282
- ]
283
- result = self.tokenizer.compute_tokens(history)
284
- self.assertEqual(len(result.tokens_info), 2)
285
- self.assertEqual(result.tokens_info[0].token_ids, [1])
286
- self.assertEqual(result.tokens_info[0].tokens, [b'Hello'])
287
- self.assertEqual(result.tokens_info[0].role, 'user')
288
- self.assertEqual(result.tokens_info[1].token_ids, [2, 3])
289
- self.assertEqual(result.tokens_info[1].tokens, [b'Hi', b' there!'])
290
- self.assertEqual(result.tokens_info[1].role, 'model')
291
- self.mock_tokenizer.EncodeAsImmutableProto.assert_called_once_with(
292
- ['Hello', 'Hi there!']
293
- )
294
-
295
- def test_compute_tokens_with_byte_tokens(self):
296
- mock_spt = MagicMock()
297
- mock_spt.pieces = [
298
- MagicMock(id=1, piece='<0x48>'),
299
- MagicMock(id=2, piece='ello'),
300
- ]
301
- self.mock_tokenizer.EncodeAsImmutableProto.return_value = [mock_spt]
302
- self.tokenizer._model_proto = sentencepiece_model_pb2.ModelProto(
303
- pieces=[
304
- sentencepiece_model_pb2.ModelProto.SentencePiece(),
305
- sentencepiece_model_pb2.ModelProto.SentencePiece(
306
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.BYTE
307
- ),
308
- sentencepiece_model_pb2.ModelProto.SentencePiece(
309
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.NORMAL
310
- ),
311
- ]
312
- )
313
- result = self.tokenizer.compute_tokens('Hello')
314
- self.assertEqual(len(result.tokens_info), 1)
315
- self.assertEqual(result.tokens_info[0].token_ids, [1, 2])
316
- self.assertEqual(result.tokens_info[0].tokens, [b'H', b'ello'])
317
- self.mock_tokenizer.EncodeAsImmutableProto.assert_called_once_with(
318
- ['Hello']
319
- )
320
-
321
-
322
- class TestParseHexByte(unittest.TestCase):
323
-
324
- def test_valid_hex(self):
325
- self.assertEqual(local_tokenizer._parse_hex_byte('<0x41>'), 65)
326
- self.assertEqual(local_tokenizer._parse_hex_byte('<0xFF>'), 255)
327
- self.assertEqual(local_tokenizer._parse_hex_byte('<0x00>'), 0)
328
-
329
- def test_invalid_length(self):
330
- with self.assertRaisesRegex(ValueError, 'Invalid byte length'):
331
- local_tokenizer._parse_hex_byte('<0x41')
332
- with self.assertRaisesRegex(ValueError, 'Invalid byte length'):
333
- local_tokenizer._parse_hex_byte('<0x411>')
334
-
335
- def test_invalid_format(self):
336
- with self.assertRaisesRegex(ValueError, 'Invalid byte format'):
337
- local_tokenizer._parse_hex_byte(' 0x41>')
338
- with self.assertRaisesRegex(ValueError, 'Invalid byte format'):
339
- local_tokenizer._parse_hex_byte('<0x41 ')
340
-
341
- def test_invalid_hex_value(self):
342
- with self.assertRaisesRegex(ValueError, 'Invalid hex value'):
343
- local_tokenizer._parse_hex_byte('<0xFG>')
@@ -1,235 +0,0 @@
1
- # Copyright 2025 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- #
15
-
16
- import unittest
17
- from unittest.mock import MagicMock, mock_open, patch
18
-
19
- import sentencepiece as spm
20
- from sentencepiece import sentencepiece_model_pb2
21
-
22
- from ... import _local_tokenizer_loader as loader
23
-
24
- # A minimal valid sentencepiece model proto
25
- FAKE_MODEL_CONTENT = sentencepiece_model_pb2.ModelProto(
26
- pieces=[
27
- sentencepiece_model_pb2.ModelProto.SentencePiece(
28
- piece="<unk>",
29
- score=0,
30
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.UNKNOWN,
31
- ),
32
- sentencepiece_model_pb2.ModelProto.SentencePiece(
33
- piece="<s>",
34
- score=0,
35
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.CONTROL,
36
- ),
37
- sentencepiece_model_pb2.ModelProto.SentencePiece(
38
- piece="</s>",
39
- score=0,
40
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.CONTROL,
41
- ),
42
- sentencepiece_model_pb2.ModelProto.SentencePiece(
43
- piece="a",
44
- score=0,
45
- type=sentencepiece_model_pb2.ModelProto.SentencePiece.Type.NORMAL,
46
- ),
47
- ]
48
- ).SerializeToString()
49
-
50
- GEMMA2_HASH = "61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2"
51
-
52
-
53
- class TestGetTokenizerName(unittest.TestCase):
54
-
55
- def test_get_tokenizer_name_success(self):
56
- self.assertEqual(loader.get_tokenizer_name("gemini-2.5-pro"), "gemma3")
57
- self.assertEqual(
58
- loader.get_tokenizer_name("gemini-2.5-pro-preview-06-05"), "gemma3"
59
- )
60
-
61
- def test_get_tokenizer_name_unsupported(self):
62
- with self.assertRaisesRegex(
63
- ValueError, "Model unsupported-model is not supported"
64
- ):
65
- loader.get_tokenizer_name("unsupported-model")
66
-
67
-
68
- @patch("genai._local_tokenizer_loader.os.rename")
69
- @patch("genai._local_tokenizer_loader.os.makedirs")
70
- @patch("genai._local_tokenizer_loader.os.remove")
71
- @patch("genai._local_tokenizer_loader.open", new_callable=mock_open)
72
- @patch("genai._local_tokenizer_loader.os.path.exists")
73
- @patch("genai._local_tokenizer_loader.requests.get")
74
- @patch("genai._local_tokenizer_loader.hashlib.sha256")
75
- class TestLoaderFunctions(unittest.TestCase):
76
-
77
- def setUp(self):
78
- # Clear caches before each test
79
- loader.load_model_proto.cache_clear()
80
- loader.get_sentencepiece.cache_clear()
81
- # Patch tempfile.gettempdir to control cache location
82
- self.tempdir_patcher = patch(
83
- "tempfile.gettempdir", return_value="/tmp/fake_temp_dir"
84
- )
85
- self.mock_tempdir = self.tempdir_patcher.start()
86
-
87
- def tearDown(self):
88
- self.tempdir_patcher.stop()
89
-
90
- def _setup_get_mock(self, mock_get):
91
- mock_response = MagicMock()
92
- mock_response.content = FAKE_MODEL_CONTENT
93
- mock_response.raise_for_status.return_value = None
94
- mock_get.return_value = mock_response
95
-
96
- def test_load_model_proto_from_url(
97
- self,
98
- mock_sha256,
99
- mock_get,
100
- mock_exists,
101
- mock_open_func,
102
- mock_remove,
103
- mock_makedirs,
104
- mock_rename,
105
- ):
106
- mock_exists.return_value = False # Don't use cache
107
- self._setup_get_mock(mock_get)
108
- mock_sha256.return_value.hexdigest.return_value = GEMMA2_HASH
109
-
110
- proto = loader.load_model_proto("gemma2")
111
-
112
- self.assertIsInstance(proto, sentencepiece_model_pb2.ModelProto)
113
- self.assertEqual(len(proto.pieces), 4)
114
- mock_get.assert_called_once()
115
- mock_makedirs.assert_called_once()
116
- mock_open_func.assert_called()
117
- mock_rename.assert_called_once()
118
-
119
- def test_load_model_proto_from_cache(
120
- self,
121
- mock_sha256,
122
- mock_get,
123
- mock_exists,
124
- mock_open_func,
125
- mock_remove,
126
- mock_makedirs,
127
- mock_rename,
128
- ):
129
- mock_exists.return_value = True # Use cache
130
- mock_open_func.return_value.read.return_value = FAKE_MODEL_CONTENT
131
- mock_sha256.return_value.hexdigest.return_value = GEMMA2_HASH
132
-
133
- proto = loader.load_model_proto("gemma2")
134
-
135
- self.assertIsInstance(proto, sentencepiece_model_pb2.ModelProto)
136
- mock_get.assert_not_called()
137
-
138
- def test_load_model_proto_corrupted_cache(
139
- self,
140
- mock_sha256,
141
- mock_get,
142
- mock_exists,
143
- mock_open_func,
144
- mock_remove,
145
- mock_makedirs,
146
- mock_rename,
147
- ):
148
- mock_exists.return_value = True # Use cache initially
149
- self._setup_get_mock(mock_get)
150
- mock_open_func.return_value.__enter__.return_value.read.return_value = (
151
- b"corrupted"
152
- )
153
-
154
- # First hash for corrupted cache, second for good download
155
- mock_sha256.side_effect = [
156
- MagicMock(hexdigest=MagicMock(return_value="wrong_hash")),
157
- MagicMock(hexdigest=MagicMock(return_value=GEMMA2_HASH)),
158
- ]
159
-
160
- proto = loader.load_model_proto("gemma2")
161
-
162
- self.assertIsInstance(proto, sentencepiece_model_pb2.ModelProto)
163
- mock_remove.assert_called_once()
164
- mock_get.assert_called_once()
165
-
166
- def test_load_model_proto_bad_hash_from_url(
167
- self,
168
- mock_sha256,
169
- mock_get,
170
- mock_exists,
171
- mock_open_func,
172
- mock_remove,
173
- mock_makedirs,
174
- mock_rename,
175
- ):
176
- mock_exists.return_value = False
177
- self._setup_get_mock(mock_get)
178
- mock_sha256.return_value.hexdigest.return_value = "wrong_hash"
179
-
180
- with self.assertRaisesRegex(
181
- ValueError, "Downloaded model file is corrupted"
182
- ):
183
- loader.load_model_proto("gemma2")
184
-
185
- def test_load_model_proto_unsupported(self, *args):
186
- with self.assertRaisesRegex(
187
- ValueError, "Tokenizer unsupported is not supported"
188
- ):
189
- loader.load_model_proto("unsupported")
190
-
191
- def test_get_sentencepiece_success(
192
- self,
193
- mock_sha256,
194
- mock_get,
195
- mock_exists,
196
- mock_open_func,
197
- mock_remove,
198
- mock_makedirs,
199
- mock_rename,
200
- ):
201
- mock_exists.return_value = False
202
- self._setup_get_mock(mock_get)
203
- mock_sha256.return_value.hexdigest.return_value = GEMMA2_HASH
204
-
205
- processor = loader.get_sentencepiece("gemma2")
206
-
207
- self.assertIsInstance(processor, spm.SentencePieceProcessor)
208
- mock_get.assert_called_once()
209
-
210
- def test_get_sentencepiece_unsupported(self, *args):
211
- with self.assertRaisesRegex(
212
- ValueError, "Tokenizer unsupported is not supported"
213
- ):
214
- loader.get_sentencepiece("unsupported")
215
-
216
- def test_get_sentencepiece_caching(
217
- self,
218
- mock_sha256,
219
- mock_get,
220
- mock_exists,
221
- mock_open_func,
222
- mock_remove,
223
- mock_makedirs,
224
- mock_rename,
225
- ):
226
- mock_exists.return_value = False
227
- self._setup_get_mock(mock_get)
228
- mock_sha256.return_value.hexdigest.return_value = GEMMA2_HASH
229
-
230
- # Call twice
231
- loader.get_sentencepiece("gemma2")
232
- loader.get_sentencepiece("gemma2")
233
-
234
- # Should only be loaded once due to lru_cache
235
- mock_get.assert_called_once()
@@ -1,17 +0,0 @@
1
- # Copyright 2025 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- #
15
-
16
-
17
- """Tests for the Google GenAI SDK's _mcp_utils module."""
@@ -1,89 +0,0 @@
1
- # Copyright 2025 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- #
15
-
16
- import typing
17
- from typing import Any
18
-
19
- from ... import _mcp_utils
20
- from ... import types
21
-
22
- if typing.TYPE_CHECKING:
23
- from mcp.types import Tool as McpTool
24
- from mcp import ClientSession as McpClientSession
25
- else:
26
- McpTool: typing.Type = Any
27
- McpClientSession: typing.Type = Any
28
- try:
29
- from mcp.types import Tool as McpTool
30
- from mcp import ClientSession as McpClientSession
31
- except ImportError:
32
- McpTool = None
33
- McpClientSession = None
34
-
35
-
36
- def test_mcp_tools():
37
- """Test whether the list of tools contains any MCP tools."""
38
- if McpTool is None:
39
- return
40
- mcp_tools = [
41
- McpTool(
42
- name='tool',
43
- description='tool-description',
44
- inputSchema={
45
- 'type': 'OBJECT',
46
- 'properties': {
47
- 'key1': {'type': 'STRING'},
48
- 'key2': {'type': 'NUMBER'},
49
- },
50
- },
51
- ),
52
- ]
53
- assert _mcp_utils.has_mcp_tool_usage(mcp_tools)
54
-
55
-
56
- def test_mcp_client_session():
57
- """Test whether the list of tools contains any MCP tools."""
58
-
59
- class MockMcpClientSession(McpClientSession):
60
-
61
- def __init__(self):
62
- self._read_stream = None
63
- self._write_stream = None
64
-
65
- mcp_tools = [
66
- MockMcpClientSession(),
67
- ]
68
- assert _mcp_utils.has_mcp_tool_usage(mcp_tools)
69
-
70
-
71
- def test_no_mcp_tools():
72
- if McpClientSession is None:
73
- return
74
- """Test whether the list of tools contains any MCP tools."""
75
- gemini_tools = [
76
- types.Tool(
77
- function_declarations=[
78
- types.FunctionDeclaration(
79
- name='tool',
80
- description='tool-description',
81
- parameters=types.Schema(
82
- type='OBJECT',
83
- properties={},
84
- ),
85
- ),
86
- ],
87
- ),
88
- ]
89
- assert not _mcp_utils.has_mcp_tool_usage(gemini_tools)