LLM-Bridge 1.15.0__tar.gz → 1.15.0a0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/PKG-INFO +2 -2
  2. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/README.md +1 -1
  3. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/pyproject.toml +1 -1
  4. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/uv.lock +1 -1
  5. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/.gitattributes +0 -0
  6. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/.github/workflows/python-publish.yml +0 -0
  7. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/.gitignore +0 -0
  8. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/LICENSE +0 -0
  9. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/MANIFEST.in +0 -0
  10. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/__init__.py +0 -0
  11. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/__init__.py +0 -0
  12. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/chat_client.py +0 -0
  13. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/__init__.py +0 -0
  14. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  15. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  16. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  17. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  18. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  19. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  20. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  21. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  22. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  23. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  24. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  25. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  26. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  27. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  28. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  29. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  30. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/implementations/printing_status.py +0 -0
  31. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/model_client/__init__.py +0 -0
  32. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/model_client/claude_client.py +0 -0
  33. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/model_client/gemini_client.py +0 -0
  34. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/client/model_client/openai_client.py +0 -0
  35. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/__init__.py +0 -0
  36. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  37. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  38. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  39. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  40. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  41. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  42. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  43. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
  44. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py +0 -0
  45. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  46. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  47. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  48. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  49. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  50. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/file_fetch.py +0 -0
  51. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  52. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  53. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  54. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  55. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  56. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/resources/model_prices.json +17 -17
  59. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/tests/__init__.py +0 -0
  69. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/tests/chat_client_factory_test.py +0 -0
  70. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/tests/message_preprocessor_test.py +0 -0
  71. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/usage/.env.example +0 -0
  72. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/usage/main.py +0 -0
  73. {llm_bridge-1.15.0 → llm_bridge-1.15.0a0}/usage/workflow.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.15.0
3
+ Version: 1.15.0a0
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -48,7 +48,7 @@ The features listed represent the maximum capabilities of each API type supporte
48
48
 
49
49
  | API Type | Input Format | Capabilities | Output Format |
50
50
  |----------|--------------------------------|---------------------------------------------------------|-------------------|
51
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text, Image |
51
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
52
52
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search, Code Execution, Structured Output | Text, Image, File |
53
53
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
54
54
  | Grok | Text, Image | | Text |
@@ -24,7 +24,7 @@ The features listed represent the maximum capabilities of each API type supporte
24
24
 
25
25
  | API Type | Input Format | Capabilities | Output Format |
26
26
  |----------|--------------------------------|---------------------------------------------------------|-------------------|
27
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text, Image |
27
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
28
28
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search, Code Execution, Structured Output | Text, Image, File |
29
29
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
30
30
  | Grok | Text, Image | | Text |
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.15.0"
7
+ version = "1.15.0-alpha.0"
8
8
  dependencies = [
9
9
  "fastapi",
10
10
  "httpx",
@@ -357,7 +357,7 @@ wheels = [
357
357
 
358
358
  [[package]]
359
359
  name = "llm-bridge"
360
- version = "1.15.0a0"
360
+ version = "1.14.1"
361
361
  source = { editable = "." }
362
362
  dependencies = [
363
363
  { name = "anthropic" },
File without changes
File without changes
File without changes
@@ -1,51 +1,51 @@
1
1
  [
2
2
  {
3
- "apiType": "Gemini-Paid",
3
+ "apiType": "Gemini-Vertex",
4
4
  "model": "gemini-3-pro-preview",
5
5
  "input": 4,
6
6
  "output": 18
7
7
  },
8
8
  {
9
- "apiType": "Gemini-Paid",
9
+ "apiType": "Gemini-Vertex",
10
10
  "model": "gemini-3-flash-preview",
11
11
  "input": 1,
12
12
  "output": 3
13
13
  },
14
14
  {
15
- "apiType": "Gemini-Paid",
15
+ "apiType": "Gemini-Vertex",
16
16
  "model": "gemini-3-pro-image-preview",
17
17
  "input": 2,
18
18
  "output": 120
19
19
  },
20
+ {
21
+ "apiType": "Gemini-Free",
22
+ "model": "gemini-3-flash-preview",
23
+ "input": 0,
24
+ "output": 0
25
+ },
20
26
  {
21
27
  "apiType": "Gemini-Paid",
22
- "model": "gemini-flash-latest",
28
+ "model": "gemini-3-flash-preview",
23
29
  "input": 1,
24
- "output": 2.5
30
+ "output": 3
25
31
  },
26
32
  {
27
- "apiType": "Gemini-Vertex",
33
+ "apiType": "Gemini-Paid",
28
34
  "model": "gemini-3-pro-preview",
29
35
  "input": 4,
30
36
  "output": 18
31
37
  },
32
38
  {
33
- "apiType": "Gemini-Vertex",
34
- "model": "gemini-3-flash-preview",
35
- "input": 1,
36
- "output": 3
37
- },
38
- {
39
- "apiType": "Gemini-Vertex",
39
+ "apiType": "Gemini-Paid",
40
40
  "model": "gemini-3-pro-image-preview",
41
41
  "input": 2,
42
42
  "output": 120
43
43
  },
44
44
  {
45
- "apiType": "Gemini-Free",
46
- "model": "gemini-3-flash-preview",
47
- "input": 0,
48
- "output": 0
45
+ "apiType": "Gemini-Paid",
46
+ "model": "gemini-flash-latest",
47
+ "input": 1,
48
+ "output": 2.5
49
49
  },
50
50
  {
51
51
  "apiType": "OpenAI",
File without changes