LLM-Bridge 1.9.0a0__tar.gz → 1.9.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1/LLM_Bridge.egg-info}/PKG-INFO +9 -3
  2. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/LLM_Bridge.egg-info/requires.txt +2 -2
  3. {llm_bridge-1.9.0a0/LLM_Bridge.egg-info → llm_bridge-1.9.0a1}/PKG-INFO +9 -3
  4. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/README.md +6 -0
  5. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +9 -9
  6. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +4 -3
  7. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +10 -2
  8. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/pyproject.toml +3 -3
  9. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/LICENSE +0 -0
  10. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  11. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  12. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/LLM_Bridge.egg-info/top_level.txt +0 -0
  13. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/MANIFEST.in +0 -0
  14. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/__init__.py +0 -0
  15. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/__init__.py +0 -0
  16. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/chat_client.py +0 -0
  17. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/__init__.py +0 -0
  18. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  19. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/claude/claude_stream_response_handler.py +0 -0
  20. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  21. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  22. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  23. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  24. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  25. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  26. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  27. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  28. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  29. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  30. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  31. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/implementations/printing_status.py +0 -0
  32. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/model_client/__init__.py +0 -0
  33. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/model_client/claude_client.py +0 -0
  34. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/model_client/gemini_client.py +0 -0
  35. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/client/model_client/openai_client.py +0 -0
  36. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/__init__.py +0 -0
  37. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  38. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  39. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  40. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  41. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  42. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  43. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  44. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
  45. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  46. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  47. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  48. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  49. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  50. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/file_fetch.py +0 -0
  51. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  52. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  53. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  54. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  55. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  56. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/resources/model_prices.json +0 -0
  59. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.9.0a0 → llm_bridge-1.9.0a1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.9.0a0
3
+ Version: 1.9.0a1
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -13,10 +13,10 @@ License-File: LICENSE
13
13
  Requires-Dist: fastapi
14
14
  Requires-Dist: httpx
15
15
  Requires-Dist: tenacity
16
- Requires-Dist: openai==1.99.6
16
+ Requires-Dist: openai==1.106.1
17
17
  Requires-Dist: tiktoken==0.11.0
18
18
  Requires-Dist: google-genai==1.28.0
19
- Requires-Dist: anthropic==0.62.0
19
+ Requires-Dist: anthropic==0.66.0
20
20
  Requires-Dist: PyMuPDF
21
21
  Requires-Dist: docxlatex>=1.1.1
22
22
  Requires-Dist: openpyxl
@@ -58,6 +58,12 @@ The features listed represent the maximum capabilities of each API type supporte
58
58
  | Claude | Text, Image, PDF | Thinking, Web Search | Text |
59
59
  | Grok | Text, Image | | Text |
60
60
 
61
+ #### Planned Features
62
+
63
+ - OpenAI: Web Search: Citations, Image Output
64
+ - Gemini: Code Execution: Code, Code Output
65
+ - Claude: Code Execution, File Output
66
+
61
67
  ## Installation
62
68
 
63
69
  ```bash
@@ -1,10 +1,10 @@
1
1
  fastapi
2
2
  httpx
3
3
  tenacity
4
- openai==1.99.6
4
+ openai==1.106.1
5
5
  tiktoken==0.11.0
6
6
  google-genai==1.28.0
7
- anthropic==0.62.0
7
+ anthropic==0.66.0
8
8
  PyMuPDF
9
9
  docxlatex>=1.1.1
10
10
  openpyxl
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.9.0a0
3
+ Version: 1.9.0a1
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -13,10 +13,10 @@ License-File: LICENSE
13
13
  Requires-Dist: fastapi
14
14
  Requires-Dist: httpx
15
15
  Requires-Dist: tenacity
16
- Requires-Dist: openai==1.99.6
16
+ Requires-Dist: openai==1.106.1
17
17
  Requires-Dist: tiktoken==0.11.0
18
18
  Requires-Dist: google-genai==1.28.0
19
- Requires-Dist: anthropic==0.62.0
19
+ Requires-Dist: anthropic==0.66.0
20
20
  Requires-Dist: PyMuPDF
21
21
  Requires-Dist: docxlatex>=1.1.1
22
22
  Requires-Dist: openpyxl
@@ -58,6 +58,12 @@ The features listed represent the maximum capabilities of each API type supporte
58
58
  | Claude | Text, Image, PDF | Thinking, Web Search | Text |
59
59
  | Grok | Text, Image | | Text |
60
60
 
61
+ #### Planned Features
62
+
63
+ - OpenAI: Web Search: Citations, Image Output
64
+ - Gemini: Code Execution: Code, Code Output
65
+ - Claude: Code Execution, File Output
66
+
61
67
  ## Installation
62
68
 
63
69
  ```bash
@@ -28,6 +28,12 @@ The features listed represent the maximum capabilities of each API type supporte
28
28
  | Claude | Text, Image, PDF | Thinking, Web Search | Text |
29
29
  | Grok | Text, Image | | Text |
30
30
 
31
+ #### Planned Features
32
+
33
+ - OpenAI: Web Search: Citations, Image Output
34
+ - Gemini: Code Execution: Code, Code Output
35
+ - Claude: Code Execution, File Output
36
+
31
37
  ## Installation
32
38
 
33
39
  ```bash
@@ -18,13 +18,13 @@ class GeminiResponseHandler:
18
18
  self,
19
19
  response: types.GenerateContentResponse,
20
20
  ) -> ChatResponse:
21
- text = ""
22
- thought = ""
23
- code = ""
24
- code_output = ""
25
- display = None
26
- image_base64 = None
27
- citations = extract_citations(response)
21
+ text: str = ""
22
+ thought: str = ""
23
+ code: str = ""
24
+ code_output: str = ""
25
+ image: Optional[str] = None
26
+ display: Optional[str] = None
27
+ citations: list[Citation] = extract_citations(response)
28
28
  input_tokens, stage_output_tokens = await count_gemini_tokens(response)
29
29
 
30
30
  printing_status = None
@@ -48,7 +48,7 @@ class GeminiResponseHandler:
48
48
  code_output += part.code_execution_result.output
49
49
  # Image
50
50
  if part.inline_data is not None:
51
- image_base64 = base64.b64encode(part.inline_data.data).decode('utf-8')
51
+ image = base64.b64encode(part.inline_data.data).decode('utf-8')
52
52
 
53
53
  # Grounding Sources
54
54
  if candidates := response.candidates:
@@ -74,7 +74,7 @@ class GeminiResponseHandler:
74
74
  thought=thought,
75
75
  code=code,
76
76
  code_output=code_output,
77
- image=image_base64,
77
+ image=image,
78
78
  display=display,
79
79
  citations=citations,
80
80
  input_tokens=input_tokens,
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  import re
3
3
  from pprint import pprint
4
+ from typing import Optional
4
5
 
5
6
  import httpx
6
7
  import openai
@@ -22,8 +23,8 @@ def process_openai_responses_non_stream_response(
22
23
 
23
24
  output_list = response.output
24
25
 
25
- text = ""
26
- image = None
26
+ text: str = ""
27
+ image: Optional[str] = None
27
28
  citations: list[Citation] = []
28
29
 
29
30
  for output in output_list:
@@ -40,7 +41,7 @@ def process_openai_responses_non_stream_response(
40
41
  # url=annotation.url
41
42
  # )
42
43
  # )
43
- # Unable to test due to organization verification requirement
44
+ # Image Generation untestable due to organization verification requirement
44
45
  # if output.type == "image_generation_call":
45
46
  # image = output.result
46
47
 
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  import re
3
3
  from pprint import pprint
4
- from typing import AsyncGenerator
4
+ from typing import AsyncGenerator, Optional
5
5
 
6
6
  import httpx
7
7
  import openai
@@ -17,7 +17,8 @@ from llm_bridge.type.serializer import serialize
17
17
 
18
18
 
19
19
  def process_delta(event: ResponseStreamEvent) -> ChatResponse:
20
- text = ""
20
+ text: str = ""
21
+ image: Optional[str] = None
21
22
  citations: list[Citation] = []
22
23
 
23
24
  if event.type == "response.output_text.delta":
@@ -25,9 +26,14 @@ def process_delta(event: ResponseStreamEvent) -> ChatResponse:
25
26
  # Citation is unavailable in OpenAI Responses API
26
27
  if event.type == "response.output_text.annotation.added":
27
28
  pass
29
+ # Image Generation untestable due to organization verification requirement
30
+ # if event.type == "response.image_generation_call.partial_image":
31
+ # image = event.partial_image_b64
28
32
 
29
33
  chat_response = ChatResponse(
30
34
  text=text,
35
+ image=image,
36
+ citations=citations,
31
37
  )
32
38
  return chat_response
33
39
 
@@ -42,6 +48,8 @@ async def generate_chunk(
42
48
  output_tokens = count_openai_output_tokens(chat_response)
43
49
  yield ChatResponse(
44
50
  text=chat_response.text,
51
+ image=chat_response.image,
52
+ citations=chat_response.citations,
45
53
  input_tokens=input_tokens,
46
54
  output_tokens=output_tokens,
47
55
  )
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.9.0-alpha.0"
7
+ version = "1.9.0-alpha.1"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
@@ -21,10 +21,10 @@ dependencies = [
21
21
  "fastapi",
22
22
  "httpx",
23
23
  "tenacity",
24
- "openai==1.99.6",
24
+ "openai==1.106.1",
25
25
  "tiktoken==0.11.0",
26
26
  "google-genai==1.28.0",
27
- "anthropic==0.62.0",
27
+ "anthropic==0.66.0",
28
28
  "PyMuPDF",
29
29
  "docxlatex>=1.1.1",
30
30
  "openpyxl",
File without changes
File without changes
File without changes