local-openai2anthropic 0.3.3__py3-none-any.whl → 0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,7 @@
3
3
  local-openai2anthropic: A proxy server that converts Anthropic Messages API to OpenAI API.
4
4
  """
5
5
 
6
- __version__ = "0.2.5"
6
+ __version__ = "0.3.5"
7
7
 
8
8
  from local_openai2anthropic.protocol import (
9
9
  AnthropicError,
@@ -42,6 +42,7 @@ class Settings(BaseSettings):
42
42
 
43
43
  # Logging
44
44
  log_level: str = "DEBUG"
45
+ log_dir: str = "" # Empty means use platform-specific default
45
46
 
46
47
  # Tavily Web Search Configuration
47
48
  tavily_api_key: Optional[str] = None
@@ -17,13 +17,11 @@ from anthropic.types import (
17
17
  from anthropic.types.message_create_params import MessageCreateParams
18
18
  from openai.types.chat import (
19
19
  ChatCompletion,
20
- ChatCompletionChunk,
21
20
  ChatCompletionToolParam,
22
21
  )
23
22
  from openai.types.chat.completion_create_params import CompletionCreateParams
24
23
 
25
24
  from local_openai2anthropic.protocol import UsageWithCache
26
- from local_openai2anthropic.server_tools import ServerToolRegistry
27
25
 
28
26
  logger = logging.getLogger(__name__)
29
27
 
@@ -54,6 +52,7 @@ def convert_anthropic_to_openai(
54
52
  tools = anthropic_params.get("tools")
55
53
  top_k = anthropic_params.get("top_k")
56
54
  top_p = anthropic_params.get("top_p", 0.95)
55
+ repetition_penalty = anthropic_params.get("repetition_penalty", 1.1)
57
56
  thinking = anthropic_params.get("thinking")
58
57
  # metadata is accepted but not forwarded to OpenAI
59
58
 
@@ -102,6 +101,7 @@ def convert_anthropic_to_openai(
102
101
  "messages": openai_messages,
103
102
  "max_tokens": max_tokens,
104
103
  "stream": stream,
104
+ "repetition_penalty": repetition_penalty,
105
105
  }
106
106
 
107
107
  # Always include usage in stream for accurate token counting
@@ -288,11 +288,11 @@ def _convert_anthropic_message_to_openai(
288
288
  if isinstance(block, dict):
289
289
  tool_use_id = block.get("tool_use_id", "")
290
290
  result_content = block.get("content", "")
291
- is_error = block.get("is_error", False)
291
+ # Note: is_error is not directly supported in OpenAI API
292
292
  else:
293
293
  tool_use_id = block.tool_use_id
294
294
  result_content = block.content
295
- is_error = getattr(block, "is_error", False)
295
+ # Note: is_error is not directly supported in OpenAI API
296
296
 
297
297
  # Handle content that might be a list or string
298
298
  if isinstance(result_content, list):
@@ -404,6 +404,10 @@ def convert_openai_to_anthropic(
404
404
  # Convert tool calls
405
405
  if message.tool_calls:
406
406
  for tc in message.tool_calls:
407
+ # Handle case where function might be None
408
+ if not tc.function:
409
+ continue
410
+
407
411
  tool_input: dict[str, Any] = {}
408
412
  try:
409
413
  tool_input = json.loads(tc.function.arguments)
@@ -5,7 +5,10 @@ Main entry point for the local-openai2anthropic proxy server.
5
5
 
6
6
  import argparse
7
7
  import logging
8
+ import os
8
9
  import sys
10
+ from logging.handlers import TimedRotatingFileHandler
11
+ from pathlib import Path
9
12
 
10
13
  import uvicorn
11
14
  from fastapi import FastAPI, HTTPException, Request
@@ -17,22 +20,88 @@ from local_openai2anthropic.protocol import AnthropicError, AnthropicErrorRespon
17
20
  from local_openai2anthropic.router import router
18
21
 
19
22
 
23
+ def get_default_log_dir() -> str:
24
+ """Get default log directory based on platform.
25
+
26
+ Returns:
27
+ Path to log directory
28
+ """
29
+ if sys.platform == 'win32':
30
+ # Windows: use %LOCALAPPDATA%\local-openai2anthropic\logs
31
+ base_dir = os.environ.get('LOCALAPPDATA', os.path.expanduser('~\\AppData\\Local'))
32
+ return os.path.join(base_dir, 'local-openai2anthropic', 'logs')
33
+ else:
34
+ # macOS/Linux: use ~/.local/share/local-openai2anthropic/logs
35
+ return os.path.expanduser("~/.local/share/local-openai2anthropic/logs")
36
+
37
+
38
+ def setup_logging(log_level: str, log_dir: str | None = None) -> None:
39
+ """Setup logging with daily rotation, keeping only today's logs.
40
+
41
+ Args:
42
+ log_level: Logging level (DEBUG, INFO, WARNING, ERROR)
43
+ log_dir: Directory for log files (platform-specific default)
44
+ """
45
+ # Default log directory based on platform
46
+ if log_dir is None:
47
+ log_dir = get_default_log_dir()
48
+
49
+ # Expand user directory if specified
50
+ log_dir = os.path.expanduser(log_dir)
51
+
52
+ # Create log directory if it doesn't exist
53
+ Path(log_dir).mkdir(parents=True, exist_ok=True)
54
+
55
+ log_file = os.path.join(log_dir, "server.log")
56
+
57
+ # Create formatter
58
+ formatter = logging.Formatter(
59
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
60
+ )
61
+
62
+ # Setup root logger
63
+ root_logger = logging.getLogger()
64
+ root_logger.setLevel(getattr(logging, log_level.upper()))
65
+
66
+ # Clear existing handlers
67
+ root_logger.handlers = []
68
+
69
+ # Console handler
70
+ console_handler = logging.StreamHandler(sys.stdout)
71
+ console_handler.setFormatter(formatter)
72
+ root_logger.addHandler(console_handler)
73
+
74
+ # File handler with daily rotation
75
+ # backupCount=0 means no backup files are kept (only today's log)
76
+ # when='midnight' rotates at midnight
77
+ file_handler = TimedRotatingFileHandler(
78
+ log_file,
79
+ when='midnight',
80
+ interval=1,
81
+ backupCount=0, # Keep only today's log
82
+ encoding='utf-8'
83
+ )
84
+ file_handler.setFormatter(formatter)
85
+ root_logger.addHandler(file_handler)
86
+
87
+ logging.info(f"Logging configured. Log file: {log_file}")
88
+
89
+
20
90
  def create_app(settings: Settings | None = None) -> FastAPI:
21
91
  """Create and configure the FastAPI application."""
22
92
  if settings is None:
23
93
  settings = get_settings()
24
94
 
25
- # Configure logging
26
- logging.basicConfig(
27
- level=getattr(logging, settings.log_level.upper()),
28
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
29
- )
95
+ # Configure logging with daily rotation
96
+ # Use platform-specific default if log_dir is not set
97
+ log_dir = settings.log_dir if settings.log_dir else None
98
+ setup_logging(settings.log_level, log_dir)
30
99
 
31
100
  # Create FastAPI app
32
101
  app = FastAPI(
33
102
  title="local-openai2anthropic",
34
103
  description="A proxy server that converts Anthropic Messages API to OpenAI API",
35
- version="0.2.5",
104
+ version="0.3.5",
36
105
  docs_url="/docs",
37
106
  redoc_url="/redoc",
38
107
  )
@@ -6,7 +6,7 @@ Uses Anthropic SDK types for request/response models.
6
6
 
7
7
  from typing import Any, Literal, Optional
8
8
 
9
- from pydantic import BaseModel, Field
9
+ from pydantic import BaseModel
10
10
 
11
11
  # Re-export all Anthropic types for convenience
12
12
  from anthropic.types import (