local-openai2anthropic 0.3.3__py3-none-any.whl → 0.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_openai2anthropic/__init__.py +1 -1
- local_openai2anthropic/config.py +1 -0
- local_openai2anthropic/converter.py +8 -4
- local_openai2anthropic/main.py +75 -6
- local_openai2anthropic/protocol.py +1 -1
- local_openai2anthropic/router.py +35 -840
- local_openai2anthropic/streaming/__init__.py +6 -0
- local_openai2anthropic/streaming/handler.py +444 -0
- local_openai2anthropic/tools/__init__.py +14 -0
- local_openai2anthropic/tools/handler.py +357 -0
- local_openai2anthropic/utils/__init__.py +18 -0
- local_openai2anthropic/utils/tokens.py +96 -0
- {local_openai2anthropic-0.3.3.dist-info → local_openai2anthropic-0.3.5.dist-info}/METADATA +1 -1
- local_openai2anthropic-0.3.5.dist-info/RECORD +25 -0
- local_openai2anthropic-0.3.3.dist-info/RECORD +0 -19
- {local_openai2anthropic-0.3.3.dist-info → local_openai2anthropic-0.3.5.dist-info}/WHEEL +0 -0
- {local_openai2anthropic-0.3.3.dist-info → local_openai2anthropic-0.3.5.dist-info}/entry_points.txt +0 -0
- {local_openai2anthropic-0.3.3.dist-info → local_openai2anthropic-0.3.5.dist-info}/licenses/LICENSE +0 -0
local_openai2anthropic/config.py
CHANGED
|
@@ -17,13 +17,11 @@ from anthropic.types import (
|
|
|
17
17
|
from anthropic.types.message_create_params import MessageCreateParams
|
|
18
18
|
from openai.types.chat import (
|
|
19
19
|
ChatCompletion,
|
|
20
|
-
ChatCompletionChunk,
|
|
21
20
|
ChatCompletionToolParam,
|
|
22
21
|
)
|
|
23
22
|
from openai.types.chat.completion_create_params import CompletionCreateParams
|
|
24
23
|
|
|
25
24
|
from local_openai2anthropic.protocol import UsageWithCache
|
|
26
|
-
from local_openai2anthropic.server_tools import ServerToolRegistry
|
|
27
25
|
|
|
28
26
|
logger = logging.getLogger(__name__)
|
|
29
27
|
|
|
@@ -54,6 +52,7 @@ def convert_anthropic_to_openai(
|
|
|
54
52
|
tools = anthropic_params.get("tools")
|
|
55
53
|
top_k = anthropic_params.get("top_k")
|
|
56
54
|
top_p = anthropic_params.get("top_p", 0.95)
|
|
55
|
+
repetition_penalty = anthropic_params.get("repetition_penalty", 1.1)
|
|
57
56
|
thinking = anthropic_params.get("thinking")
|
|
58
57
|
# metadata is accepted but not forwarded to OpenAI
|
|
59
58
|
|
|
@@ -102,6 +101,7 @@ def convert_anthropic_to_openai(
|
|
|
102
101
|
"messages": openai_messages,
|
|
103
102
|
"max_tokens": max_tokens,
|
|
104
103
|
"stream": stream,
|
|
104
|
+
"repetition_penalty": repetition_penalty,
|
|
105
105
|
}
|
|
106
106
|
|
|
107
107
|
# Always include usage in stream for accurate token counting
|
|
@@ -288,11 +288,11 @@ def _convert_anthropic_message_to_openai(
|
|
|
288
288
|
if isinstance(block, dict):
|
|
289
289
|
tool_use_id = block.get("tool_use_id", "")
|
|
290
290
|
result_content = block.get("content", "")
|
|
291
|
-
|
|
291
|
+
# Note: is_error is not directly supported in OpenAI API
|
|
292
292
|
else:
|
|
293
293
|
tool_use_id = block.tool_use_id
|
|
294
294
|
result_content = block.content
|
|
295
|
-
is_error
|
|
295
|
+
# Note: is_error is not directly supported in OpenAI API
|
|
296
296
|
|
|
297
297
|
# Handle content that might be a list or string
|
|
298
298
|
if isinstance(result_content, list):
|
|
@@ -404,6 +404,10 @@ def convert_openai_to_anthropic(
|
|
|
404
404
|
# Convert tool calls
|
|
405
405
|
if message.tool_calls:
|
|
406
406
|
for tc in message.tool_calls:
|
|
407
|
+
# Handle case where function might be None
|
|
408
|
+
if not tc.function:
|
|
409
|
+
continue
|
|
410
|
+
|
|
407
411
|
tool_input: dict[str, Any] = {}
|
|
408
412
|
try:
|
|
409
413
|
tool_input = json.loads(tc.function.arguments)
|
local_openai2anthropic/main.py
CHANGED
|
@@ -5,7 +5,10 @@ Main entry point for the local-openai2anthropic proxy server.
|
|
|
5
5
|
|
|
6
6
|
import argparse
|
|
7
7
|
import logging
|
|
8
|
+
import os
|
|
8
9
|
import sys
|
|
10
|
+
from logging.handlers import TimedRotatingFileHandler
|
|
11
|
+
from pathlib import Path
|
|
9
12
|
|
|
10
13
|
import uvicorn
|
|
11
14
|
from fastapi import FastAPI, HTTPException, Request
|
|
@@ -17,22 +20,88 @@ from local_openai2anthropic.protocol import AnthropicError, AnthropicErrorRespon
|
|
|
17
20
|
from local_openai2anthropic.router import router
|
|
18
21
|
|
|
19
22
|
|
|
23
|
+
def get_default_log_dir() -> str:
|
|
24
|
+
"""Get default log directory based on platform.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
Path to log directory
|
|
28
|
+
"""
|
|
29
|
+
if sys.platform == 'win32':
|
|
30
|
+
# Windows: use %LOCALAPPDATA%\local-openai2anthropic\logs
|
|
31
|
+
base_dir = os.environ.get('LOCALAPPDATA', os.path.expanduser('~\\AppData\\Local'))
|
|
32
|
+
return os.path.join(base_dir, 'local-openai2anthropic', 'logs')
|
|
33
|
+
else:
|
|
34
|
+
# macOS/Linux: use ~/.local/share/local-openai2anthropic/logs
|
|
35
|
+
return os.path.expanduser("~/.local/share/local-openai2anthropic/logs")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def setup_logging(log_level: str, log_dir: str | None = None) -> None:
|
|
39
|
+
"""Setup logging with daily rotation, keeping only today's logs.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
log_level: Logging level (DEBUG, INFO, WARNING, ERROR)
|
|
43
|
+
log_dir: Directory for log files (platform-specific default)
|
|
44
|
+
"""
|
|
45
|
+
# Default log directory based on platform
|
|
46
|
+
if log_dir is None:
|
|
47
|
+
log_dir = get_default_log_dir()
|
|
48
|
+
|
|
49
|
+
# Expand user directory if specified
|
|
50
|
+
log_dir = os.path.expanduser(log_dir)
|
|
51
|
+
|
|
52
|
+
# Create log directory if it doesn't exist
|
|
53
|
+
Path(log_dir).mkdir(parents=True, exist_ok=True)
|
|
54
|
+
|
|
55
|
+
log_file = os.path.join(log_dir, "server.log")
|
|
56
|
+
|
|
57
|
+
# Create formatter
|
|
58
|
+
formatter = logging.Formatter(
|
|
59
|
+
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Setup root logger
|
|
63
|
+
root_logger = logging.getLogger()
|
|
64
|
+
root_logger.setLevel(getattr(logging, log_level.upper()))
|
|
65
|
+
|
|
66
|
+
# Clear existing handlers
|
|
67
|
+
root_logger.handlers = []
|
|
68
|
+
|
|
69
|
+
# Console handler
|
|
70
|
+
console_handler = logging.StreamHandler(sys.stdout)
|
|
71
|
+
console_handler.setFormatter(formatter)
|
|
72
|
+
root_logger.addHandler(console_handler)
|
|
73
|
+
|
|
74
|
+
# File handler with daily rotation
|
|
75
|
+
# backupCount=0 means no backup files are kept (only today's log)
|
|
76
|
+
# when='midnight' rotates at midnight
|
|
77
|
+
file_handler = TimedRotatingFileHandler(
|
|
78
|
+
log_file,
|
|
79
|
+
when='midnight',
|
|
80
|
+
interval=1,
|
|
81
|
+
backupCount=0, # Keep only today's log
|
|
82
|
+
encoding='utf-8'
|
|
83
|
+
)
|
|
84
|
+
file_handler.setFormatter(formatter)
|
|
85
|
+
root_logger.addHandler(file_handler)
|
|
86
|
+
|
|
87
|
+
logging.info(f"Logging configured. Log file: {log_file}")
|
|
88
|
+
|
|
89
|
+
|
|
20
90
|
def create_app(settings: Settings | None = None) -> FastAPI:
|
|
21
91
|
"""Create and configure the FastAPI application."""
|
|
22
92
|
if settings is None:
|
|
23
93
|
settings = get_settings()
|
|
24
94
|
|
|
25
|
-
# Configure logging
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
)
|
|
95
|
+
# Configure logging with daily rotation
|
|
96
|
+
# Use platform-specific default if log_dir is not set
|
|
97
|
+
log_dir = settings.log_dir if settings.log_dir else None
|
|
98
|
+
setup_logging(settings.log_level, log_dir)
|
|
30
99
|
|
|
31
100
|
# Create FastAPI app
|
|
32
101
|
app = FastAPI(
|
|
33
102
|
title="local-openai2anthropic",
|
|
34
103
|
description="A proxy server that converts Anthropic Messages API to OpenAI API",
|
|
35
|
-
version="0.
|
|
104
|
+
version="0.3.5",
|
|
36
105
|
docs_url="/docs",
|
|
37
106
|
redoc_url="/redoc",
|
|
38
107
|
)
|