LLM-Bridge 1.7.16__tar.gz → 1.7.18__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.7.16 → llm_bridge-1.7.18/LLM_Bridge.egg-info}/PKG-INFO +11 -6
  2. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/LLM_Bridge.egg-info/requires.txt +4 -4
  3. {llm_bridge-1.7.16/LLM_Bridge.egg-info → llm_bridge-1.7.18}/PKG-INFO +11 -6
  4. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/README.md +6 -1
  5. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/media_processor.py +4 -3
  6. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +1 -1
  7. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +10 -2
  8. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/message_preprocess/file_type_checker.py +2 -1
  9. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/message_preprocess/message_preprocessor.py +1 -1
  10. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/pyproject.toml +5 -5
  11. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/LICENSE +0 -0
  12. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  13. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  14. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/LLM_Bridge.egg-info/top_level.txt +0 -0
  15. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/MANIFEST.in +0 -0
  16. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/__init__.py +0 -0
  17. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/__init__.py +0 -0
  18. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/chat_client.py +0 -0
  19. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/__init__.py +0 -0
  20. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  21. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/claude/claude_stream_response_handler.py +0 -0
  22. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  23. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  24. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  25. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  26. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  27. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  28. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  29. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  30. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  31. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  32. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  33. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  34. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  35. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  36. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/implementations/printing_status.py +0 -0
  37. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/model_client/__init__.py +0 -0
  38. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/model_client/claude_client.py +0 -0
  39. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/model_client/gemini_client.py +0 -0
  40. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/client/model_client/openai_client.py +0 -0
  41. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/__init__.py +0 -0
  42. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  43. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  44. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  45. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  46. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  47. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  48. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  49. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  50. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  51. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  52. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/file_fetch.py +0 -0
  53. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  54. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  55. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  56. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/resources/model_prices.json +0 -0
  59. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.7.16 → llm_bridge-1.7.18}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.7.16
3
+ Version: 1.7.18
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -13,10 +13,10 @@ License-File: LICENSE
13
13
  Requires-Dist: fastapi
14
14
  Requires-Dist: httpx
15
15
  Requires-Dist: tenacity
16
- Requires-Dist: openai
17
- Requires-Dist: tiktoken
18
- Requires-Dist: google-genai
19
- Requires-Dist: anthropic
16
+ Requires-Dist: openai==1.99.6
17
+ Requires-Dist: tiktoken==0.11.0
18
+ Requires-Dist: google-genai==1.28.0
19
+ Requires-Dist: anthropic==0.62.0
20
20
  Requires-Dist: PyMuPDF
21
21
  Requires-Dist: docxlatex>=1.1.1
22
22
  Requires-Dist: openpyxl
@@ -72,10 +72,15 @@ pytest
72
72
 
73
73
  ## Quick Start
74
74
 
75
- See `./usage/`
75
+ ### Setup
76
+
77
+ 1. Copy `./.env.example` and rename it to `./.env`, then fill in the environment variables.
78
+ 2. Install requirements: `pip install -r requirements.txt`
76
79
 
77
80
  ### Workflow
78
81
 
82
+ See `./usage/`
83
+
79
84
  ```python
80
85
  from typing import AsyncGenerator
81
86
 
@@ -1,10 +1,10 @@
1
1
  fastapi
2
2
  httpx
3
3
  tenacity
4
- openai
5
- tiktoken
6
- google-genai
7
- anthropic
4
+ openai==1.99.6
5
+ tiktoken==0.11.0
6
+ google-genai==1.28.0
7
+ anthropic==0.62.0
8
8
  PyMuPDF
9
9
  docxlatex>=1.1.1
10
10
  openpyxl
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.7.16
3
+ Version: 1.7.18
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -13,10 +13,10 @@ License-File: LICENSE
13
13
  Requires-Dist: fastapi
14
14
  Requires-Dist: httpx
15
15
  Requires-Dist: tenacity
16
- Requires-Dist: openai
17
- Requires-Dist: tiktoken
18
- Requires-Dist: google-genai
19
- Requires-Dist: anthropic
16
+ Requires-Dist: openai==1.99.6
17
+ Requires-Dist: tiktoken==0.11.0
18
+ Requires-Dist: google-genai==1.28.0
19
+ Requires-Dist: anthropic==0.62.0
20
20
  Requires-Dist: PyMuPDF
21
21
  Requires-Dist: docxlatex>=1.1.1
22
22
  Requires-Dist: openpyxl
@@ -72,10 +72,15 @@ pytest
72
72
 
73
73
  ## Quick Start
74
74
 
75
- See `./usage/`
75
+ ### Setup
76
+
77
+ 1. Copy `./.env.example` and rename it to `./.env`, then fill in the environment variables.
78
+ 2. Install requirements: `pip install -r requirements.txt`
76
79
 
77
80
  ### Workflow
78
81
 
82
+ See `./usage/`
83
+
79
84
  ```python
80
85
  from typing import AsyncGenerator
81
86
 
@@ -42,10 +42,15 @@ pytest
42
42
 
43
43
  ## Quick Start
44
44
 
45
- See `./usage/`
45
+ ### Setup
46
+
47
+ 1. Copy `./.env.example` and rename it to `./.env`, then fill in the environment variables.
48
+ 2. Install requirements: `pip install -r requirements.txt`
46
49
 
47
50
  ### Workflow
48
51
 
52
+ See `./usage/`
53
+
49
54
  ```python
50
55
  from typing import AsyncGenerator
51
56
 
@@ -8,10 +8,11 @@ async def get_raw_content_from_url(req_url: str) -> tuple[bytes, str]:
8
8
  return file_data, media_type
9
9
 
10
10
 
11
+ # Base64 Encoded
11
12
  async def get_encoded_content_from_url(req_url: str) -> tuple[str, str]:
12
- img_data, media_type = await get_raw_content_from_url(req_url)
13
- base64_image = base64.b64encode(img_data).decode('utf-8')
14
- return base64_image, media_type
13
+ media_data, media_type = await get_raw_content_from_url(req_url)
14
+ base64_media = base64.b64encode(media_data).decode('utf-8')
15
+ return base64_media, media_type
15
16
 
16
17
 
17
18
  async def get_openai_image_content_from_url(req_img_url: str) -> str:
@@ -44,7 +44,7 @@ async def create_openai_client(
44
44
  else:
45
45
  raise HTTPException(status_code=500, detail="API Type not matched")
46
46
 
47
- if api_type in ("OpenAI", "OpenAI-Azure"):
47
+ if api_type in ("OpenAI", "OpenAI-Azure", "Grok"):
48
48
  use_responses_api = True
49
49
  else:
50
50
  use_responses_api = False
@@ -1,8 +1,8 @@
1
1
  from openai.types.responses import ResponseInputTextParam, ResponseInputImageParam, ResponseOutputTextParam, \
2
- ResponseInputContentParam, EasyInputMessageParam, ResponseOutputMessageParam
2
+ ResponseInputContentParam, EasyInputMessageParam, ResponseOutputMessageParam, ResponseInputFileParam
3
3
 
4
4
  from llm_bridge.logic.chat_generate import media_processor
5
- from llm_bridge.logic.message_preprocess.file_type_checker import get_file_type
5
+ from llm_bridge.logic.message_preprocess.file_type_checker import get_file_type, get_file_name
6
6
  from llm_bridge.type.message import Message, ContentType
7
7
  from llm_bridge.type.model_message.openai_responses_message import OpenAIResponsesMessage
8
8
 
@@ -29,6 +29,14 @@ async def convert_message_to_openai_responses(message: Message) -> OpenAIRespons
29
29
  detail="auto"
30
30
  )
31
31
  content.append(image_content)
32
+ elif sub_type == "pdf":
33
+ file_data, _ = await media_processor.get_encoded_content_from_url(file_url)
34
+ pdf_content = ResponseInputFileParam(
35
+ type="input_file",
36
+ filename=get_file_name(file_url),
37
+ file_data=f"data:application/pdf;base64,{file_data}",
38
+ )
39
+ content.append(pdf_content)
32
40
  # TODO: Responses API is currently unsupported for audio input
33
41
  # elif file_type == "audio":
34
42
  # encoded_string = await media_processor.get_gpt_audio_content_from_url(file_url)
@@ -41,7 +41,8 @@ async def get_file_type(file_url: str) -> tuple[str, str]:
41
41
  return 'unknown', 'unknown'
42
42
 
43
43
 
44
- def get_file_name(file_url) -> str:
44
+ # Without Timestamp
45
+ def get_file_name(file_url: str) -> str:
45
46
  base_name = os.path.basename(file_url)
46
47
  match = re.search(r'-(.+)', base_name)
47
48
  if match:
@@ -21,7 +21,7 @@ async def extract_text_files_to_message(message: Message, api_type: str) -> None
21
21
  if file_type != "text" and file_type != "application":
22
22
  continue
23
23
 
24
- if sub_type == "pdf" and api_type in ("Gemini-Free", "Gemini-Paid", "Claude"):
24
+ if sub_type == "pdf" and api_type in ("OpenAI", "OpenAI-Azure", "Gemini-Free", "Gemini-Paid", "Claude"):
25
25
  continue
26
26
 
27
27
  filename = get_file_name(file_url)
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.7.16"
7
+ version = "1.7.18"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
@@ -21,10 +21,10 @@ dependencies = [
21
21
  "fastapi",
22
22
  "httpx",
23
23
  "tenacity",
24
- "openai",
25
- "tiktoken",
26
- "google-genai",
27
- "anthropic",
24
+ "openai==1.99.6",
25
+ "tiktoken==0.11.0",
26
+ "google-genai==1.28.0",
27
+ "anthropic==0.62.0",
28
28
  "PyMuPDF",
29
29
  "docxlatex>=1.1.1",
30
30
  "openpyxl",
File without changes
File without changes
File without changes