router-maestro 0.1.4__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. router_maestro-0.1.6/.github/workflows/ci.yml +55 -0
  2. router_maestro-0.1.6/.github/workflows/release.yml +167 -0
  3. {router_maestro-0.1.4 → router_maestro-0.1.6}/.gitignore +11 -0
  4. {router_maestro-0.1.4 → router_maestro-0.1.6}/CLAUDE.md +2 -2
  5. {router_maestro-0.1.4 → router_maestro-0.1.6}/PKG-INFO +4 -1
  6. {router_maestro-0.1.4 → router_maestro-0.1.6}/README.md +3 -0
  7. {router_maestro-0.1.4 → router_maestro-0.1.6}/pyproject.toml +1 -1
  8. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/__init__.py +1 -1
  9. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/config.py +19 -9
  10. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/routes/anthropic.py +99 -5
  11. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/schemas/anthropic.py +21 -0
  12. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/utils/tokens.py +10 -2
  13. router_maestro-0.1.6/tests/test_anthropic_models.py +220 -0
  14. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_auth.py +8 -7
  15. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_providers.py +0 -2
  16. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_router.py +1 -3
  17. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_translation.py +5 -7
  18. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_utils.py +0 -2
  19. {router_maestro-0.1.4 → router_maestro-0.1.6}/uv.lock +1 -1
  20. {router_maestro-0.1.4 → router_maestro-0.1.6}/.env.example +0 -0
  21. {router_maestro-0.1.4 → router_maestro-0.1.6}/.markdownlint.json +0 -0
  22. {router_maestro-0.1.4 → router_maestro-0.1.6}/Dockerfile +0 -0
  23. {router_maestro-0.1.4 → router_maestro-0.1.6}/LICENSE +0 -0
  24. {router_maestro-0.1.4 → router_maestro-0.1.6}/Makefile +0 -0
  25. {router_maestro-0.1.4 → router_maestro-0.1.6}/docker-compose.yml +0 -0
  26. {router_maestro-0.1.4 → router_maestro-0.1.6}/docs/deployment.md +0 -0
  27. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/__main__.py +0 -0
  28. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/auth/__init__.py +0 -0
  29. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/auth/github_oauth.py +0 -0
  30. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/auth/manager.py +0 -0
  31. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/auth/storage.py +0 -0
  32. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/__init__.py +0 -0
  33. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/auth.py +0 -0
  34. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/client.py +0 -0
  35. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/context.py +0 -0
  36. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/main.py +0 -0
  37. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/model.py +0 -0
  38. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/cli/server.py +0 -0
  39. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/__init__.py +0 -0
  40. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/contexts.py +0 -0
  41. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/paths.py +0 -0
  42. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/priorities.py +0 -0
  43. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/providers.py +0 -0
  44. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/server.py +0 -0
  45. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/config/settings.py +0 -0
  46. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/__init__.py +0 -0
  47. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/anthropic.py +0 -0
  48. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/base.py +0 -0
  49. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/copilot.py +0 -0
  50. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/openai.py +0 -0
  51. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/providers/openai_compat.py +0 -0
  52. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/routing/__init__.py +0 -0
  53. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/routing/router.py +0 -0
  54. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/__init__.py +0 -0
  55. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/app.py +0 -0
  56. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/middleware/__init__.py +0 -0
  57. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/middleware/auth.py +0 -0
  58. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/oauth_sessions.py +0 -0
  59. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/routes/__init__.py +0 -0
  60. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/routes/admin.py +0 -0
  61. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/routes/chat.py +0 -0
  62. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/routes/models.py +0 -0
  63. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/schemas/__init__.py +0 -0
  64. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/schemas/admin.py +0 -0
  65. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/schemas/openai.py +0 -0
  66. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/server/translation.py +0 -0
  67. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/utils/__init__.py +0 -0
  68. {router_maestro-0.1.4 → router_maestro-0.1.6}/src/router_maestro/utils/logging.py +0 -0
  69. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/__init__.py +0 -0
  70. {router_maestro-0.1.4 → router_maestro-0.1.6}/tests/test_config.py +0 -0
@@ -0,0 +1,55 @@
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ branches: [master, main]
6
+ pull_request:
7
+ branches: [master, main]
8
+
9
+ concurrency:
10
+ group: ${{ github.workflow }}-${{ github.ref }}
11
+ cancel-in-progress: true
12
+
13
+ jobs:
14
+ lint:
15
+ name: Lint
16
+ runs-on: ubuntu-latest
17
+ steps:
18
+ - uses: actions/checkout@v4
19
+
20
+ - name: Install uv
21
+ uses: astral-sh/setup-uv@v5
22
+
23
+ - name: Set up Python
24
+ run: uv python install 3.12
25
+
26
+ - name: Install dependencies
27
+ run: uv sync --extra dev
28
+
29
+ - name: Run ruff check
30
+ run: uv run ruff check src/ tests/
31
+
32
+ - name: Run ruff format check
33
+ run: uv run ruff format --check src/ tests/
34
+
35
+ test:
36
+ name: Test (Python ${{ matrix.python-version }})
37
+ runs-on: ubuntu-latest
38
+ strategy:
39
+ fail-fast: false
40
+ matrix:
41
+ python-version: ["3.11", "3.12"]
42
+ steps:
43
+ - uses: actions/checkout@v4
44
+
45
+ - name: Install uv
46
+ uses: astral-sh/setup-uv@v5
47
+
48
+ - name: Set up Python ${{ matrix.python-version }}
49
+ run: uv python install ${{ matrix.python-version }}
50
+
51
+ - name: Install dependencies
52
+ run: uv sync --extra dev
53
+
54
+ - name: Run tests
55
+ run: uv run pytest tests/ -v
@@ -0,0 +1,167 @@
1
+ name: Release
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - "v*.*.*"
7
+ workflow_dispatch:
8
+ inputs:
9
+ version:
10
+ description: "Version to release (e.g., 0.1.6). Leave empty to use pyproject.toml version."
11
+ required: false
12
+ type: string
13
+ skip_pypi:
14
+ description: "Skip PyPI publishing"
15
+ required: false
16
+ type: boolean
17
+ default: false
18
+ skip_docker:
19
+ description: "Skip Docker publishing"
20
+ required: false
21
+ type: boolean
22
+ default: false
23
+
24
+ concurrency:
25
+ group: release
26
+ cancel-in-progress: false
27
+
28
+ jobs:
29
+ test:
30
+ name: Test
31
+ runs-on: ubuntu-latest
32
+ steps:
33
+ - uses: actions/checkout@v4
34
+
35
+ - name: Install uv
36
+ uses: astral-sh/setup-uv@v5
37
+
38
+ - name: Set up Python
39
+ run: uv python install 3.12
40
+
41
+ - name: Install dependencies
42
+ run: uv sync --extra dev
43
+
44
+ - name: Run ruff check
45
+ run: uv run ruff check src/ tests/
46
+
47
+ - name: Run tests
48
+ run: uv run pytest tests/ -v
49
+
50
+ prepare:
51
+ name: Prepare Release
52
+ runs-on: ubuntu-latest
53
+ needs: test
54
+ outputs:
55
+ version: ${{ steps.version.outputs.version }}
56
+ docker_tags: ${{ steps.docker.outputs.tags }}
57
+ steps:
58
+ - uses: actions/checkout@v4
59
+
60
+ - name: Determine version
61
+ id: version
62
+ run: |
63
+ if [[ "${{ github.event_name }}" == "push" && "${{ github.ref_type }}" == "tag" ]]; then
64
+ # Extract version from tag (v1.2.3 -> 1.2.3)
65
+ VERSION="${GITHUB_REF_NAME#v}"
66
+ elif [[ -n "${{ inputs.version }}" ]]; then
67
+ # Use workflow_dispatch input
68
+ VERSION="${{ inputs.version }}"
69
+ else
70
+ # Read from pyproject.toml
71
+ VERSION=$(grep -Po '(?<=^version = ")[^"]*' pyproject.toml)
72
+ fi
73
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
74
+ echo "Determined version: $VERSION"
75
+
76
+ - name: Prepare Docker tags
77
+ id: docker
78
+ run: |
79
+ VERSION="${{ steps.version.outputs.version }}"
80
+ TAGS="likanwen/router-maestro:${VERSION},likanwen/router-maestro:latest"
81
+ echo "tags=$TAGS" >> $GITHUB_OUTPUT
82
+ echo "Docker tags: $TAGS"
83
+
84
+ publish-pypi:
85
+ name: Publish to PyPI
86
+ runs-on: ubuntu-latest
87
+ needs: prepare
88
+ if: ${{ !inputs.skip_pypi }}
89
+ environment:
90
+ name: pypi
91
+ url: https://pypi.org/project/router-maestro/
92
+ permissions:
93
+ id-token: write
94
+ steps:
95
+ - uses: actions/checkout@v4
96
+
97
+ - name: Install uv
98
+ uses: astral-sh/setup-uv@v5
99
+
100
+ - name: Set up Python
101
+ run: uv python install 3.12
102
+
103
+ - name: Build package
104
+ run: uv build
105
+
106
+ - name: Verify build version
107
+ run: |
108
+ EXPECTED_VERSION="${{ needs.prepare.outputs.version }}"
109
+ BUILT_VERSION=$(ls dist/*.tar.gz | grep -Po 'router_maestro-\K[0-9]+\.[0-9]+\.[0-9]+')
110
+ if [[ "$BUILT_VERSION" != "$EXPECTED_VERSION" ]]; then
111
+ echo "Version mismatch! Expected: $EXPECTED_VERSION, Built: $BUILT_VERSION"
112
+ exit 1
113
+ fi
114
+ echo "Version verified: $BUILT_VERSION"
115
+
116
+ - name: Publish to PyPI
117
+ uses: pypa/gh-action-pypi-publish@release/v1
118
+ with:
119
+ verbose: true
120
+
121
+ publish-docker:
122
+ name: Publish Docker Image
123
+ runs-on: ubuntu-latest
124
+ needs: prepare
125
+ if: ${{ !inputs.skip_docker }}
126
+ steps:
127
+ - uses: actions/checkout@v4
128
+
129
+ - name: Set up QEMU
130
+ uses: docker/setup-qemu-action@v3
131
+
132
+ - name: Set up Docker Buildx
133
+ uses: docker/setup-buildx-action@v3
134
+
135
+ - name: Login to Docker Hub
136
+ uses: docker/login-action@v3
137
+ with:
138
+ username: ${{ secrets.DOCKER_USERNAME }}
139
+ password: ${{ secrets.DOCKER_TOKEN }}
140
+
141
+ - name: Build and push
142
+ uses: docker/build-push-action@v6
143
+ with:
144
+ context: .
145
+ platforms: linux/amd64,linux/arm64
146
+ push: true
147
+ tags: ${{ needs.prepare.outputs.docker_tags }}
148
+ cache-from: type=gha
149
+ cache-to: type=gha,mode=max
150
+
151
+ create-release:
152
+ name: Create GitHub Release
153
+ runs-on: ubuntu-latest
154
+ needs: [prepare, publish-pypi, publish-docker]
155
+ if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }}
156
+ permissions:
157
+ contents: write
158
+ steps:
159
+ - uses: actions/checkout@v4
160
+
161
+ - name: Create GitHub Release
162
+ uses: softprops/action-gh-release@v2
163
+ with:
164
+ name: v${{ needs.prepare.outputs.version }}
165
+ generate_release_notes: true
166
+ draft: false
167
+ prerelease: false
@@ -101,3 +101,14 @@ dmypy.json
101
101
  *.tmp
102
102
  *.bak
103
103
  *~
104
+
105
+ # Auto Claude data directory
106
+ .auto-claude/
107
+
108
+ # Auto Claude generated files
109
+ .auto-claude-security.json
110
+ .auto-claude-status
111
+ .claude_settings.json
112
+ .worktrees/
113
+ .security-key
114
+ logs/security/
@@ -57,7 +57,7 @@ Router-Maestro is a multi-model routing system that exposes both OpenAI-compatib
57
57
  - `schemas/` - Pydantic models for both API formats
58
58
 
59
59
  **CLI (`src/router_maestro/cli/`)**
60
- - Typer-based CLI with subcommands: `server`, `auth`, `model`, `context`, `config`, `stats`
60
+ - Typer-based CLI with subcommands: `server`, `auth`, `model`, `context`, `config`
61
61
  - Each subcommand in its own module registered in `main.py`
62
62
 
63
63
  ### Data Flow
@@ -72,7 +72,7 @@ Router-Maestro is a multi-model routing system that exposes both OpenAI-compatib
72
72
 
73
73
  Configuration and data files follow XDG conventions:
74
74
  - **Config** (`~/.config/router-maestro/`): `providers.json`, `priorities.json`, `contexts.json`
75
- - **Data** (`~/.local/share/router-maestro/`): `auth.json`, `server.json`, `stats.db`
75
+ - **Data** (`~/.local/share/router-maestro/`): `auth.json`, `server.json`
76
76
 
77
77
  ### Model Identification
78
78
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: router-maestro
3
- Version: 0.1.4
3
+ Version: 0.1.6
4
4
  Summary: Multi-model routing and load balancing system with OpenAI-compatible API
5
5
  Author-email: Kanwen Li <likanwen@icloud.com>
6
6
  License-Expression: MIT
@@ -37,6 +37,9 @@ Description-Content-Type: text/markdown
37
37
 
38
38
  # Router-Maestro
39
39
 
40
+ [![CI](https://github.com/MadSkittles/Router-Maestro/actions/workflows/ci.yml/badge.svg)](https://github.com/MadSkittles/Router-Maestro/actions/workflows/ci.yml)
41
+ [![Release](https://github.com/MadSkittles/Router-Maestro/actions/workflows/release.yml/badge.svg)](https://github.com/MadSkittles/Router-Maestro/actions/workflows/release.yml)
42
+
40
43
  Multi-model routing router with OpenAI-compatible and Anthropic-compatible APIs. Route LLM requests across GitHub Copilot, OpenAI, Anthropic, and custom providers with intelligent fallback and priority-based selection.
41
44
 
42
45
  ## TL;DR
@@ -1,5 +1,8 @@
1
1
  # Router-Maestro
2
2
 
3
+ [![CI](https://github.com/MadSkittles/Router-Maestro/actions/workflows/ci.yml/badge.svg)](https://github.com/MadSkittles/Router-Maestro/actions/workflows/ci.yml)
4
+ [![Release](https://github.com/MadSkittles/Router-Maestro/actions/workflows/release.yml/badge.svg)](https://github.com/MadSkittles/Router-Maestro/actions/workflows/release.yml)
5
+
3
6
  Multi-model routing router with OpenAI-compatible and Anthropic-compatible APIs. Route LLM requests across GitHub Copilot, OpenAI, Anthropic, and custom providers with intelligent fallback and priority-based selection.
4
7
 
5
8
  ## TL;DR
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "router-maestro"
3
- version = "0.1.4"
3
+ version = "0.1.6"
4
4
  description = "Multi-model routing and load balancing system with OpenAI-compatible API"
5
5
  readme = "README.md"
6
6
  license = "MIT"
@@ -1,3 +1,3 @@
1
1
  """Router-Maestro: Multi-model routing and load balancing system."""
2
2
 
3
- __version__ = "0.1.4"
3
+ __version__ = "0.1.6"
@@ -139,19 +139,29 @@ def claude_code_config() -> None:
139
139
  )
140
140
  anthropic_url = f"{base_url}/api/anthropic"
141
141
 
142
- config = {
143
- "env": {
144
- "ANTHROPIC_BASE_URL": anthropic_url,
145
- "ANTHROPIC_AUTH_TOKEN": auth_token,
146
- "ANTHROPIC_MODEL": main_model,
147
- "ANTHROPIC_SMALL_FAST_MODEL": fast_model,
148
- "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC": "1",
149
- }
142
+ env_config = {
143
+ "ANTHROPIC_BASE_URL": anthropic_url,
144
+ "ANTHROPIC_AUTH_TOKEN": auth_token,
145
+ "ANTHROPIC_MODEL": main_model,
146
+ "ANTHROPIC_SMALL_FAST_MODEL": fast_model,
147
+ "CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC": "1",
150
148
  }
151
149
 
150
+ # Load existing settings to preserve other sections (e.g., MCP servers)
151
+ existing_config: dict = {}
152
+ if settings_path.exists():
153
+ try:
154
+ with open(settings_path, encoding="utf-8") as f:
155
+ existing_config = json.load(f)
156
+ except (json.JSONDecodeError, OSError):
157
+ pass # If file is corrupted, start fresh
158
+
159
+ # Merge: update env section while preserving other sections
160
+ existing_config["env"] = env_config
161
+
152
162
  settings_path.parent.mkdir(parents=True, exist_ok=True)
153
163
  with open(settings_path, "w", encoding="utf-8") as f:
154
- json.dump(config, f, indent=2)
164
+ json.dump(existing_config, f, indent=2)
155
165
 
156
166
  console.print(
157
167
  Panel(
@@ -3,6 +3,7 @@
3
3
  import json
4
4
  import uuid
5
5
  from collections.abc import AsyncGenerator
6
+ from datetime import UTC, datetime
6
7
 
7
8
  from fastapi import APIRouter, HTTPException
8
9
  from fastapi.responses import StreamingResponse
@@ -13,6 +14,8 @@ from router_maestro.server.schemas.anthropic import (
13
14
  AnthropicCountTokensRequest,
14
15
  AnthropicMessagesRequest,
15
16
  AnthropicMessagesResponse,
17
+ AnthropicModelInfo,
18
+ AnthropicModelList,
16
19
  AnthropicStreamState,
17
20
  AnthropicTextBlock,
18
21
  AnthropicUsage,
@@ -26,6 +29,7 @@ from router_maestro.utils import (
26
29
  get_logger,
27
30
  map_openai_stop_reason_to_anthropic,
28
31
  )
32
+ from router_maestro.utils.tokens import AnthropicStopReason
29
33
 
30
34
  logger = get_logger("server.routes.anthropic")
31
35
 
@@ -106,7 +110,7 @@ async def count_tokens(request: AnthropicCountTokensRequest):
106
110
 
107
111
  # Count messages
108
112
  for msg in request.messages:
109
- content = msg.content if hasattr(msg, "content") else msg.get("content", "")
113
+ content = msg.content
110
114
  if isinstance(content, str):
111
115
  total_chars += len(content)
112
116
  elif isinstance(content, list):
@@ -115,12 +119,12 @@ async def count_tokens(request: AnthropicCountTokensRequest):
115
119
  if block.get("type") == "text":
116
120
  total_chars += len(block.get("text", ""))
117
121
  elif hasattr(block, "text"):
118
- total_chars += len(block.text)
122
+ total_chars += len(block.text) # type: ignore[union-attr]
119
123
 
120
124
  return {"input_tokens": estimate_tokens_from_char_count(total_chars)}
121
125
 
122
126
 
123
- def _map_finish_reason(reason: str | None) -> str | None:
127
+ def _map_finish_reason(reason: str | None) -> AnthropicStopReason | None:
124
128
  """Map OpenAI finish reason to Anthropic stop reason."""
125
129
  return map_openai_stop_reason_to_anthropic(reason)
126
130
 
@@ -144,7 +148,7 @@ def _estimate_input_tokens(request: AnthropicMessagesRequest) -> int:
144
148
 
145
149
  # Count messages
146
150
  for msg in request.messages:
147
- content = msg.content if hasattr(msg, "content") else msg.get("content", "")
151
+ content = msg.content
148
152
  if isinstance(content, str):
149
153
  total_chars += len(content)
150
154
  elif isinstance(content, list):
@@ -161,7 +165,7 @@ def _estimate_input_tokens(request: AnthropicMessagesRequest) -> int:
161
165
  if isinstance(tc, dict) and tc.get("type") == "text":
162
166
  total_chars += len(tc.get("text", ""))
163
167
  elif hasattr(block, "text"):
164
- total_chars += len(block.text)
168
+ total_chars += len(block.text) # type: ignore[union-attr]
165
169
 
166
170
  # Count tools definitions if present
167
171
  if request.tools:
@@ -226,3 +230,93 @@ async def stream_response(
226
230
  },
227
231
  }
228
232
  yield f"event: error\ndata: {json.dumps(error_event)}\n\n"
233
+
234
+
235
+ def _generate_display_name(model_id: str) -> str:
236
+ """Generate a human-readable display name from model ID.
237
+
238
+ Transforms model IDs like 'github-copilot/claude-sonnet-4' into
239
+ 'Claude Sonnet 4 (github-copilot)'.
240
+ """
241
+ if "/" in model_id:
242
+ provider, model_name = model_id.split("/", 1)
243
+ else:
244
+ provider = ""
245
+ model_name = model_id
246
+
247
+ # Capitalize words and handle common patterns
248
+ words = model_name.replace("-", " ").replace("_", " ").split()
249
+ display_words = []
250
+ for word in words:
251
+ # Keep version numbers as-is
252
+ if word.replace(".", "").isdigit():
253
+ display_words.append(word)
254
+ else:
255
+ display_words.append(word.capitalize())
256
+
257
+ display_name = " ".join(display_words)
258
+ if provider:
259
+ display_name = f"{display_name} ({provider})"
260
+
261
+ return display_name
262
+
263
+
264
+ @router.get("/api/anthropic/v1/models")
265
+ async def list_models(
266
+ limit: int = 20,
267
+ after_id: str | None = None,
268
+ before_id: str | None = None,
269
+ ) -> AnthropicModelList:
270
+ """List available models in Anthropic format.
271
+
272
+ Args:
273
+ limit: Maximum number of models to return (default 20)
274
+ after_id: Return models after this ID (for forward pagination)
275
+ before_id: Return models before this ID (for backward pagination)
276
+ """
277
+ model_router = get_router()
278
+ models = await model_router.list_models()
279
+
280
+ # Generate ISO 8601 timestamp for created_at
281
+ # Using current time since actual creation dates aren't tracked
282
+ created_at = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ")
283
+
284
+ # Convert to Anthropic format
285
+ anthropic_models = [
286
+ AnthropicModelInfo(
287
+ id=model.id,
288
+ created_at=created_at,
289
+ display_name=_generate_display_name(model.id),
290
+ type="model",
291
+ )
292
+ for model in models
293
+ ]
294
+
295
+ # Handle pagination
296
+ start_idx = 0
297
+ if after_id:
298
+ for i, model in enumerate(anthropic_models):
299
+ if model.id == after_id:
300
+ start_idx = i + 1
301
+ break
302
+
303
+ end_idx = len(anthropic_models)
304
+ if before_id:
305
+ for i, model in enumerate(anthropic_models):
306
+ if model.id == before_id:
307
+ end_idx = i
308
+ break
309
+
310
+ # Apply limit
311
+ paginated = anthropic_models[start_idx : min(start_idx + limit, end_idx)]
312
+
313
+ first_id = paginated[0].id if paginated else None
314
+ last_id = paginated[-1].id if paginated else None
315
+ has_more = (start_idx + limit) < end_idx
316
+
317
+ return AnthropicModelList(
318
+ data=paginated,
319
+ first_id=first_id,
320
+ last_id=last_id,
321
+ has_more=has_more,
322
+ )
@@ -244,3 +244,24 @@ class AnthropicStreamState(BaseModel):
244
244
  estimated_input_tokens: int = 0 # Estimated input tokens from request
245
245
  last_usage: dict | None = None # Track the latest usage from stream chunks
246
246
  message_complete: bool = False # Track if message_stop was sent
247
+
248
+
249
+ # Models API types
250
+
251
+
252
+ class AnthropicModelInfo(BaseModel):
253
+ """Anthropic model object."""
254
+
255
+ id: str
256
+ created_at: str # ISO 8601 datetime
257
+ display_name: str
258
+ type: Literal["model"] = "model"
259
+
260
+
261
+ class AnthropicModelList(BaseModel):
262
+ """Anthropic models list response with pagination."""
263
+
264
+ data: list[AnthropicModelInfo]
265
+ first_id: str | None = None
266
+ last_id: str | None = None
267
+ has_more: bool = False
@@ -1,8 +1,14 @@
1
1
  """Token estimation utilities."""
2
2
 
3
+ from typing import Literal
4
+
3
5
  # Approximate characters per token for English text
4
6
  CHARS_PER_TOKEN = 4
5
7
 
8
+ AnthropicStopReason = Literal[
9
+ "end_turn", "max_tokens", "stop_sequence", "tool_use", "pause_turn", "refusal"
10
+ ]
11
+
6
12
 
7
13
  def estimate_tokens(text: str) -> int:
8
14
  """Estimate token count from text.
@@ -31,7 +37,9 @@ def estimate_tokens_from_char_count(char_count: int) -> int:
31
37
  return char_count // CHARS_PER_TOKEN
32
38
 
33
39
 
34
- def map_openai_stop_reason_to_anthropic(openai_reason: str | None) -> str | None:
40
+ def map_openai_stop_reason_to_anthropic(
41
+ openai_reason: str | None,
42
+ ) -> AnthropicStopReason | None:
35
43
  """Map OpenAI finish reason to Anthropic stop reason.
36
44
 
37
45
  Args:
@@ -42,7 +50,7 @@ def map_openai_stop_reason_to_anthropic(openai_reason: str | None) -> str | None
42
50
  """
43
51
  if openai_reason is None:
44
52
  return None
45
- mapping = {
53
+ mapping: dict[str, AnthropicStopReason] = {
46
54
  "stop": "end_turn",
47
55
  "length": "max_tokens",
48
56
  "tool_calls": "tool_use",
@@ -0,0 +1,220 @@
1
+ """Tests for the Anthropic models endpoint."""
2
+
3
+ from unittest.mock import AsyncMock, patch
4
+
5
+ import pytest
6
+ from fastapi import FastAPI
7
+ from fastapi.testclient import TestClient
8
+
9
+ from router_maestro.providers.base import ModelInfo
10
+ from router_maestro.server.routes.anthropic import (
11
+ _generate_display_name,
12
+ list_models,
13
+ router,
14
+ )
15
+ from router_maestro.server.schemas.anthropic import AnthropicModelInfo, AnthropicModelList
16
+
17
+
18
+ class TestGenerateDisplayName:
19
+ """Tests for display name generation."""
20
+
21
+ def test_simple_model_name(self):
22
+ """Test display name for simple model ID."""
23
+ result = _generate_display_name("claude-sonnet-4")
24
+ assert result == "Claude Sonnet 4"
25
+
26
+ def test_model_with_provider(self):
27
+ """Test display name includes provider."""
28
+ result = _generate_display_name("github-copilot/claude-sonnet-4")
29
+ assert result == "Claude Sonnet 4 (github-copilot)"
30
+
31
+ def test_model_with_version_number(self):
32
+ """Test display name preserves version numbers."""
33
+ result = _generate_display_name("openai/gpt-4o")
34
+ assert result == "Gpt 4o (openai)"
35
+
36
+ def test_model_with_underscores(self):
37
+ """Test display name handles underscores."""
38
+ result = _generate_display_name("provider/some_model_name")
39
+ assert result == "Some Model Name (provider)"
40
+
41
+
42
+ class TestAnthropicModelInfoSchema:
43
+ """Tests for AnthropicModelInfo schema."""
44
+
45
+ def test_model_info_fields(self):
46
+ """Test that model info has required fields."""
47
+ model = AnthropicModelInfo(
48
+ id="claude-sonnet-4",
49
+ created_at="2025-02-02T00:00:00Z",
50
+ display_name="Claude Sonnet 4",
51
+ type="model",
52
+ )
53
+ assert model.id == "claude-sonnet-4"
54
+ assert model.created_at == "2025-02-02T00:00:00Z"
55
+ assert model.display_name == "Claude Sonnet 4"
56
+ assert model.type == "model"
57
+
58
+ def test_model_info_default_type(self):
59
+ """Test that type defaults to 'model'."""
60
+ model = AnthropicModelInfo(
61
+ id="test-model",
62
+ created_at="2025-02-02T00:00:00Z",
63
+ display_name="Test Model",
64
+ )
65
+ assert model.type == "model"
66
+
67
+
68
+ class TestAnthropicModelListSchema:
69
+ """Tests for AnthropicModelList schema."""
70
+
71
+ def test_model_list_with_data(self):
72
+ """Test model list with data."""
73
+ models = AnthropicModelList(
74
+ data=[
75
+ AnthropicModelInfo(
76
+ id="model-1",
77
+ created_at="2025-02-02T00:00:00Z",
78
+ display_name="Model 1",
79
+ )
80
+ ],
81
+ first_id="model-1",
82
+ last_id="model-1",
83
+ has_more=False,
84
+ )
85
+ assert len(models.data) == 1
86
+ assert models.first_id == "model-1"
87
+ assert models.last_id == "model-1"
88
+ assert models.has_more is False
89
+
90
+ def test_model_list_empty(self):
91
+ """Test empty model list."""
92
+ models = AnthropicModelList(data=[])
93
+ assert len(models.data) == 0
94
+ assert models.first_id is None
95
+ assert models.last_id is None
96
+ assert models.has_more is False
97
+
98
+ def test_model_list_pagination(self):
99
+ """Test model list with pagination."""
100
+ models = AnthropicModelList(
101
+ data=[],
102
+ first_id="first",
103
+ last_id="last",
104
+ has_more=True,
105
+ )
106
+ assert models.has_more is True
107
+
108
+
109
+ @pytest.fixture
110
+ def mock_router():
111
+ """Create a mock router."""
112
+ mock = AsyncMock()
113
+ mock.list_models = AsyncMock(
114
+ return_value=[
115
+ ModelInfo(
116
+ id="github-copilot/claude-sonnet-4",
117
+ name="claude-sonnet-4",
118
+ provider="github-copilot",
119
+ ),
120
+ ModelInfo(
121
+ id="github-copilot/gpt-4o",
122
+ name="gpt-4o",
123
+ provider="github-copilot",
124
+ ),
125
+ ModelInfo(id="openai/gpt-4o", name="gpt-4o", provider="openai"),
126
+ ]
127
+ )
128
+ return mock
129
+
130
+
131
+ @pytest.fixture
132
+ def app():
133
+ """Create a test FastAPI app."""
134
+ app = FastAPI()
135
+ app.include_router(router)
136
+ return app
137
+
138
+
139
+ @pytest.fixture
140
+ def client(app):
141
+ """Create a test client."""
142
+ return TestClient(app)
143
+
144
+
145
+ class TestListModelsEndpoint:
146
+ """Tests for the /api/anthropic/v1/models endpoint."""
147
+
148
+ @pytest.mark.anyio
149
+ async def test_list_models_response_format(self, mock_router):
150
+ """Test that response matches Anthropic format."""
151
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock_router):
152
+ response = await list_models()
153
+
154
+ assert isinstance(response, AnthropicModelList)
155
+ assert len(response.data) == 3
156
+ assert response.first_id == "github-copilot/claude-sonnet-4"
157
+ assert response.last_id == "openai/gpt-4o"
158
+ assert response.has_more is False
159
+
160
+ @pytest.mark.anyio
161
+ async def test_list_models_model_fields(self, mock_router):
162
+ """Test that each model has required Anthropic fields."""
163
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock_router):
164
+ response = await list_models()
165
+
166
+ model = response.data[0]
167
+ assert model.id == "github-copilot/claude-sonnet-4"
168
+ assert model.type == "model"
169
+ assert model.display_name == "Claude Sonnet 4 (github-copilot)"
170
+ # created_at should be ISO 8601 format
171
+ assert "T" in model.created_at
172
+ assert model.created_at.endswith("Z")
173
+
174
+ @pytest.mark.anyio
175
+ async def test_list_models_pagination_limit(self, mock_router):
176
+ """Test pagination with limit parameter."""
177
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock_router):
178
+ response = await list_models(limit=2)
179
+
180
+ assert len(response.data) == 2
181
+ assert response.has_more is True
182
+ assert response.first_id == "github-copilot/claude-sonnet-4"
183
+ assert response.last_id == "github-copilot/gpt-4o"
184
+
185
+ @pytest.mark.anyio
186
+ async def test_list_models_pagination_after_id(self, mock_router):
187
+ """Test pagination with after_id parameter."""
188
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock_router):
189
+ response = await list_models(after_id="github-copilot/claude-sonnet-4")
190
+
191
+ assert len(response.data) == 2
192
+ assert response.data[0].id == "github-copilot/gpt-4o"
193
+ assert response.has_more is False
194
+
195
+ @pytest.mark.anyio
196
+ async def test_list_models_empty(self):
197
+ """Test response when no models available."""
198
+ mock = AsyncMock()
199
+ mock.list_models = AsyncMock(return_value=[])
200
+
201
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock):
202
+ response = await list_models()
203
+
204
+ assert len(response.data) == 0
205
+ assert response.first_id is None
206
+ assert response.last_id is None
207
+ assert response.has_more is False
208
+
209
+ def test_http_endpoint(self, client, mock_router):
210
+ """Test the HTTP endpoint via test client."""
211
+ with patch("router_maestro.server.routes.anthropic.get_router", return_value=mock_router):
212
+ response = client.get("/api/anthropic/v1/models")
213
+
214
+ assert response.status_code == 200
215
+ data = response.json()
216
+ assert "data" in data
217
+ assert "first_id" in data
218
+ assert "last_id" in data
219
+ assert "has_more" in data
220
+ assert len(data["data"]) == 3
@@ -3,8 +3,6 @@
3
3
  import tempfile
4
4
  from pathlib import Path
5
5
 
6
- import pytest
7
-
8
6
  from router_maestro.auth.storage import ApiKeyCredential, AuthStorage, AuthType, OAuthCredential
9
7
 
10
8
 
@@ -74,11 +72,14 @@ class TestAuthStorage:
74
72
  # Create and save
75
73
  storage = AuthStorage()
76
74
  storage.set("openai", ApiKeyCredential(key="test-key"))
77
- storage.set("github-copilot", OAuthCredential(
78
- refresh="refresh",
79
- access="access",
80
- expires=12345,
81
- ))
75
+ storage.set(
76
+ "github-copilot",
77
+ OAuthCredential(
78
+ refresh="refresh",
79
+ access="access",
80
+ expires=12345,
81
+ ),
82
+ )
82
83
  storage.save(path)
83
84
 
84
85
  # Load and verify
@@ -1,7 +1,5 @@
1
1
  """Tests for providers module."""
2
2
 
3
- import pytest
4
-
5
3
  from router_maestro.providers import (
6
4
  AnthropicProvider,
7
5
  ChatRequest,
@@ -19,9 +19,7 @@ class MockProvider(BaseProvider):
19
19
  ):
20
20
  self._name = name
21
21
  self._authenticated = authenticated
22
- self._models = models or [
23
- ModelInfo(id="test-model", name="Test Model", provider=name)
24
- ]
22
+ self._models = models or [ModelInfo(id="test-model", name="Test Model", provider=name)]
25
23
  self._fail_on_request = fail_on_request
26
24
 
27
25
  @property
@@ -1,7 +1,10 @@
1
1
  """Tests for the translation module."""
2
2
 
3
- import pytest
4
-
3
+ from router_maestro.server.schemas.anthropic import (
4
+ AnthropicMessagesRequest,
5
+ AnthropicTextBlock,
6
+ AnthropicUserMessage,
7
+ )
5
8
  from router_maestro.server.translation import (
6
9
  _extract_text_content,
7
10
  _map_stop_reason,
@@ -10,11 +13,6 @@ from router_maestro.server.translation import (
10
13
  _translate_tools,
11
14
  translate_anthropic_to_openai,
12
15
  )
13
- from router_maestro.server.schemas.anthropic import (
14
- AnthropicMessagesRequest,
15
- AnthropicTextBlock,
16
- AnthropicUserMessage,
17
- )
18
16
 
19
17
 
20
18
  class TestModelNameTranslation:
@@ -1,7 +1,5 @@
1
1
  """Tests for utility functions."""
2
2
 
3
- import pytest
4
-
5
3
  from router_maestro.utils import (
6
4
  estimate_tokens,
7
5
  estimate_tokens_from_char_count,
@@ -925,7 +925,7 @@ wheels = [
925
925
 
926
926
  [[package]]
927
927
  name = "router-maestro"
928
- version = "0.1.4"
928
+ version = "0.1.6"
929
929
  source = { editable = "." }
930
930
  dependencies = [
931
931
  { name = "aiosqlite" },
File without changes
File without changes