LLM-Bridge 1.9.0a0__py3-none-any.whl → 1.9.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,13 +18,13 @@ class GeminiResponseHandler:
18
18
  self,
19
19
  response: types.GenerateContentResponse,
20
20
  ) -> ChatResponse:
21
- text = ""
22
- thought = ""
23
- code = ""
24
- code_output = ""
25
- display = None
26
- image_base64 = None
27
- citations = extract_citations(response)
21
+ text: str = ""
22
+ thought: str = ""
23
+ code: str = ""
24
+ code_output: str = ""
25
+ image: Optional[str] = None
26
+ display: Optional[str] = None
27
+ citations: list[Citation] = extract_citations(response)
28
28
  input_tokens, stage_output_tokens = await count_gemini_tokens(response)
29
29
 
30
30
  printing_status = None
@@ -48,7 +48,7 @@ class GeminiResponseHandler:
48
48
  code_output += part.code_execution_result.output
49
49
  # Image
50
50
  if part.inline_data is not None:
51
- image_base64 = base64.b64encode(part.inline_data.data).decode('utf-8')
51
+ image = base64.b64encode(part.inline_data.data).decode('utf-8')
52
52
 
53
53
  # Grounding Sources
54
54
  if candidates := response.candidates:
@@ -74,7 +74,7 @@ class GeminiResponseHandler:
74
74
  thought=thought,
75
75
  code=code,
76
76
  code_output=code_output,
77
- image=image_base64,
77
+ image=image,
78
78
  display=display,
79
79
  citations=citations,
80
80
  input_tokens=input_tokens,
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  import re
3
3
  from pprint import pprint
4
+ from typing import Optional
4
5
 
5
6
  import httpx
6
7
  import openai
@@ -22,8 +23,8 @@ def process_openai_responses_non_stream_response(
22
23
 
23
24
  output_list = response.output
24
25
 
25
- text = ""
26
- image = None
26
+ text: str = ""
27
+ image: Optional[str] = None
27
28
  citations: list[Citation] = []
28
29
 
29
30
  for output in output_list:
@@ -40,7 +41,7 @@ def process_openai_responses_non_stream_response(
40
41
  # url=annotation.url
41
42
  # )
42
43
  # )
43
- # Unable to test due to organization verification requirement
44
+ # Image Generation untestable due to organization verification requirement
44
45
  # if output.type == "image_generation_call":
45
46
  # image = output.result
46
47
 
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  import re
3
3
  from pprint import pprint
4
- from typing import AsyncGenerator
4
+ from typing import AsyncGenerator, Optional
5
5
 
6
6
  import httpx
7
7
  import openai
@@ -17,7 +17,8 @@ from llm_bridge.type.serializer import serialize
17
17
 
18
18
 
19
19
  def process_delta(event: ResponseStreamEvent) -> ChatResponse:
20
- text = ""
20
+ text: str = ""
21
+ image: Optional[str] = None
21
22
  citations: list[Citation] = []
22
23
 
23
24
  if event.type == "response.output_text.delta":
@@ -25,9 +26,14 @@ def process_delta(event: ResponseStreamEvent) -> ChatResponse:
25
26
  # Citation is unavailable in OpenAI Responses API
26
27
  if event.type == "response.output_text.annotation.added":
27
28
  pass
29
+ # Image Generation untestable due to organization verification requirement
30
+ # if event.type == "response.image_generation_call.partial_image":
31
+ # image = event.partial_image_b64
28
32
 
29
33
  chat_response = ChatResponse(
30
34
  text=text,
35
+ image=image,
36
+ citations=citations,
31
37
  )
32
38
  return chat_response
33
39
 
@@ -42,6 +48,8 @@ async def generate_chunk(
42
48
  output_tokens = count_openai_output_tokens(chat_response)
43
49
  yield ChatResponse(
44
50
  text=chat_response.text,
51
+ image=chat_response.image,
52
+ citations=chat_response.citations,
45
53
  input_tokens=input_tokens,
46
54
  output_tokens=output_tokens,
47
55
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.9.0a0
3
+ Version: 1.9.0a1
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -13,10 +13,10 @@ License-File: LICENSE
13
13
  Requires-Dist: fastapi
14
14
  Requires-Dist: httpx
15
15
  Requires-Dist: tenacity
16
- Requires-Dist: openai==1.99.6
16
+ Requires-Dist: openai==1.106.1
17
17
  Requires-Dist: tiktoken==0.11.0
18
18
  Requires-Dist: google-genai==1.28.0
19
- Requires-Dist: anthropic==0.62.0
19
+ Requires-Dist: anthropic==0.66.0
20
20
  Requires-Dist: PyMuPDF
21
21
  Requires-Dist: docxlatex>=1.1.1
22
22
  Requires-Dist: openpyxl
@@ -58,6 +58,12 @@ The features listed represent the maximum capabilities of each API type supporte
58
58
  | Claude | Text, Image, PDF | Thinking, Web Search | Text |
59
59
  | Grok | Text, Image | | Text |
60
60
 
61
+ #### Planned Features
62
+
63
+ - OpenAI: Web Search: Citations, Image Output
64
+ - Gemini: Code Execution: Code, Code Output
65
+ - Claude: Code Execution, File Output
66
+
61
67
  ## Installation
62
68
 
63
69
  ```bash
@@ -9,15 +9,15 @@ llm_bridge/client/implementations/claude/claude_token_counter.py,sha256=g8M7BFY2
9
9
  llm_bridge/client/implementations/claude/non_stream_claude_client.py,sha256=xnge1J-j_Er4K4L1UxhjuxAs_Pl6vralxTKk9yItwjI,2500
10
10
  llm_bridge/client/implementations/claude/stream_claude_client.py,sha256=q4w1UYc1yZJw5UFOtnxCoeg8MFp5soc1d57YiCTCCGE,2109
11
11
  llm_bridge/client/implementations/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- llm_bridge/client/implementations/gemini/gemini_response_handler.py,sha256=2fnvtNecRlkwxwht93k93r-0uY1Zz86ffGbRAFNnyjk,3989
12
+ llm_bridge/client/implementations/gemini/gemini_response_handler.py,sha256=eqXpIx1xJK5VZtuUlye5kIVjLr1YWBU9koq2HEEUX9s,4034
13
13
  llm_bridge/client/implementations/gemini/gemini_token_counter.py,sha256=M_mlrtu_dZTgEG9JgRaPDVyXqFtHSSVAIhsknhOaVrs,504
14
14
  llm_bridge/client/implementations/gemini/non_stream_gemini_client.py,sha256=JGNNpeln42SoXg2vGIC9xG5GGlBh6dIhz4BzYIkgraA,1302
15
15
  llm_bridge/client/implementations/gemini/stream_gemini_client.py,sha256=vqPhQdr-jaHXzn-_1PSZfpo96zM-_89XOEXIx7UBBIw,1545
16
16
  llm_bridge/client/implementations/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  llm_bridge/client/implementations/openai/non_stream_openai_client.py,sha256=aceJm6FF6VdzVRECzJyTY8-aQjCekhhbrMPEcUN24fo,2171
18
- llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py,sha256=xUQqeDTwsf61IV5V0MFN0C-q1KGK5vlNJFetoWehfEk,3534
18
+ llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py,sha256=Ecd0dLLDXMqNEBut_t74VPDZlLObbEOoyt3vkttJEug,3595
19
19
  llm_bridge/client/implementations/openai/openai_token_couter.py,sha256=pWsuaUjoqXjnptVlRma-dItczEo9DMw2o_9uF7FPVAk,1449
20
- llm_bridge/client/implementations/openai/steam_openai_responses_client.py,sha256=SbfRNE03JacBCD2_u0pGt4JSXqGchDLAxbKvP1KIVhU,3177
20
+ llm_bridge/client/implementations/openai/steam_openai_responses_client.py,sha256=52re50oU1ArIwWuocDSUN6TNDtZIP348qt4wjO3qj30,3560
21
21
  llm_bridge/client/implementations/openai/stream_openai_client.py,sha256=Izq4xH9EuLjUCBJsuSr6U4Kj6FN5c7w_oHf9wmQatXE,2988
22
22
  llm_bridge/client/model_client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  llm_bridge/client/model_client/claude_client.py,sha256=cuYORseQY8HVt-COh2J0C_mhqPehDB3A4G4vrunoSFA,1352
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
55
55
  llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
56
56
  llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
57
57
  llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
58
- llm_bridge-1.9.0a0.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
- llm_bridge-1.9.0a0.dist-info/METADATA,sha256=J2G_cF3GpKT83q7MTyUYkA-2Fi2Bgq1iKh0zc43Ydaw,7697
60
- llm_bridge-1.9.0a0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
- llm_bridge-1.9.0a0.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
- llm_bridge-1.9.0a0.dist-info/RECORD,,
58
+ llm_bridge-1.9.0a1.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
+ llm_bridge-1.9.0a1.dist-info/METADATA,sha256=Dw18PNKM4wo3O2xeRTG_edhsi0GNd2jP1ss-rOxAtL4,7850
60
+ llm_bridge-1.9.0a1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
+ llm_bridge-1.9.0a1.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
+ llm_bridge-1.9.0a1.dist-info/RECORD,,