LLM-Bridge 1.11.3__py3-none-any.whl → 1.11.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,7 @@ import mimetypes
3
3
  from typing import Optional
4
4
 
5
5
  from google.genai import types
6
+ from google.genai.types import Part
6
7
 
7
8
  from llm_bridge.client.implementations.gemini.gemini_token_counter import count_gemini_tokens
8
9
  from llm_bridge.client.implementations.printing_status import PrintingStatus
@@ -28,35 +29,39 @@ class GeminiResponseHandler:
28
29
  citations: list[Citation] = extract_citations(response)
29
30
  input_tokens, stage_output_tokens = await count_gemini_tokens(response)
30
31
 
31
- printing_status = None
32
+ parts: list[Part] = []
32
33
  if candidates := response.candidates:
33
- if candidates[0].content.parts:
34
- for part in response.candidates[0].content.parts:
35
- if part.text is not None:
36
- # Thought
37
- if part.thought:
38
- printing_status = PrintingStatus.Thought
39
- thought += part.text
40
- # Text
41
- elif not part.thought:
42
- printing_status = PrintingStatus.Response
43
- text += part.text
44
- # Code
45
- if part.executable_code is not None:
46
- code += part.executable_code.code
47
- # Code Output
48
- if part.code_execution_result is not None:
49
- code_output += part.code_execution_result.output
50
- # File
51
- if part.inline_data is not None:
52
- mime_type = part.inline_data.mime_type
53
- extension = mimetypes.guess_extension(mime_type) or ""
54
- file = File(
55
- name=f"generated_file{extension}",
56
- data=base64.b64encode(part.inline_data.data).decode('utf-8'),
57
- type=mime_type,
58
- )
59
- files.append(file)
34
+ if content := candidates[0].content:
35
+ if content.parts:
36
+ parts = content.parts
37
+
38
+ printing_status: PrintingStatus | None = None
39
+ for part in parts:
40
+ if part.text is not None:
41
+ # Thought
42
+ if part.thought:
43
+ printing_status = PrintingStatus.Thought
44
+ thought += part.text
45
+ # Text
46
+ elif not part.thought:
47
+ printing_status = PrintingStatus.Response
48
+ text += part.text
49
+ # Code
50
+ if part.executable_code is not None:
51
+ code += part.executable_code.code
52
+ # Code Output
53
+ if part.code_execution_result is not None:
54
+ code_output += part.code_execution_result.output
55
+ # File
56
+ if part.inline_data is not None:
57
+ mime_type = part.inline_data.mime_type
58
+ extension = mimetypes.guess_extension(mime_type) or ""
59
+ file = File(
60
+ name=f"generated_file{extension}",
61
+ data=base64.b64encode(part.inline_data.data).decode('utf-8'),
62
+ type=mime_type,
63
+ )
64
+ files.append(file)
60
65
 
61
66
  # Grounding Sources
62
67
  if candidates := response.candidates:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.11.3
3
+ Version: 1.11.4
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -9,7 +9,7 @@ llm_bridge/client/implementations/claude/claude_token_counter.py,sha256=m_aoLJkF
9
9
  llm_bridge/client/implementations/claude/non_stream_claude_client.py,sha256=1khCk0vJkCQ09Q8wuCqX1ZUV54qcwtTGa21ij8ziyak,2990
10
10
  llm_bridge/client/implementations/claude/stream_claude_client.py,sha256=gOvdoSa_pNAbZ882pG4NAOOwNtjth-X4M3Gt34orXww,2005
11
11
  llm_bridge/client/implementations/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- llm_bridge/client/implementations/gemini/gemini_response_handler.py,sha256=LADXq5h_bbuCclp4RTf70YtQ0_9JFRRzo7j4V5Ts7y4,4388
12
+ llm_bridge/client/implementations/gemini/gemini_response_handler.py,sha256=_bejFAjo07s4jBpXBGF5djPs3nxZZjHaDkr4w1S8lTs,4321
13
13
  llm_bridge/client/implementations/gemini/gemini_token_counter.py,sha256=GdnwJWPhGZMB_xC0fz88zQRparIHzTemkQoqfDcxVEA,687
14
14
  llm_bridge/client/implementations/gemini/non_stream_gemini_client.py,sha256=JGNNpeln42SoXg2vGIC9xG5GGlBh6dIhz4BzYIkgraA,1302
15
15
  llm_bridge/client/implementations/gemini/stream_gemini_client.py,sha256=vqPhQdr-jaHXzn-_1PSZfpo96zM-_89XOEXIx7UBBIw,1545
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
55
55
  llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
56
56
  llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
57
57
  llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
58
- llm_bridge-1.11.3.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
- llm_bridge-1.11.3.dist-info/METADATA,sha256=iyJlwk0I2O3Qm6hEwEGhTFpsYxKK7GUJZE8X9Zw9Z9c,7849
60
- llm_bridge-1.11.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
- llm_bridge-1.11.3.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
- llm_bridge-1.11.3.dist-info/RECORD,,
58
+ llm_bridge-1.11.4.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
+ llm_bridge-1.11.4.dist-info/METADATA,sha256=II8Hf2JeAGqC2jfKWnxlTDS_Q2czQggnn6TuXm9CyF4,7849
60
+ llm_bridge-1.11.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
+ llm_bridge-1.11.4.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
+ llm_bridge-1.11.4.dist-info/RECORD,,