ccproxy-api 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ccproxy/__init__.py +4 -0
- ccproxy/__main__.py +7 -0
- ccproxy/_version.py +21 -0
- ccproxy/adapters/__init__.py +11 -0
- ccproxy/adapters/base.py +80 -0
- ccproxy/adapters/openai/__init__.py +43 -0
- ccproxy/adapters/openai/adapter.py +915 -0
- ccproxy/adapters/openai/models.py +412 -0
- ccproxy/adapters/openai/streaming.py +449 -0
- ccproxy/api/__init__.py +28 -0
- ccproxy/api/app.py +225 -0
- ccproxy/api/dependencies.py +140 -0
- ccproxy/api/middleware/__init__.py +11 -0
- ccproxy/api/middleware/auth.py +0 -0
- ccproxy/api/middleware/cors.py +55 -0
- ccproxy/api/middleware/errors.py +703 -0
- ccproxy/api/middleware/headers.py +51 -0
- ccproxy/api/middleware/logging.py +175 -0
- ccproxy/api/middleware/request_id.py +69 -0
- ccproxy/api/middleware/server_header.py +62 -0
- ccproxy/api/responses.py +84 -0
- ccproxy/api/routes/__init__.py +16 -0
- ccproxy/api/routes/claude.py +181 -0
- ccproxy/api/routes/health.py +489 -0
- ccproxy/api/routes/metrics.py +1033 -0
- ccproxy/api/routes/proxy.py +238 -0
- ccproxy/auth/__init__.py +75 -0
- ccproxy/auth/bearer.py +68 -0
- ccproxy/auth/credentials_adapter.py +93 -0
- ccproxy/auth/dependencies.py +229 -0
- ccproxy/auth/exceptions.py +79 -0
- ccproxy/auth/manager.py +102 -0
- ccproxy/auth/models.py +118 -0
- ccproxy/auth/oauth/__init__.py +26 -0
- ccproxy/auth/oauth/models.py +49 -0
- ccproxy/auth/oauth/routes.py +396 -0
- ccproxy/auth/oauth/storage.py +0 -0
- ccproxy/auth/storage/__init__.py +12 -0
- ccproxy/auth/storage/base.py +57 -0
- ccproxy/auth/storage/json_file.py +159 -0
- ccproxy/auth/storage/keyring.py +192 -0
- ccproxy/claude_sdk/__init__.py +20 -0
- ccproxy/claude_sdk/client.py +169 -0
- ccproxy/claude_sdk/converter.py +331 -0
- ccproxy/claude_sdk/options.py +120 -0
- ccproxy/cli/__init__.py +14 -0
- ccproxy/cli/commands/__init__.py +8 -0
- ccproxy/cli/commands/auth.py +553 -0
- ccproxy/cli/commands/config/__init__.py +14 -0
- ccproxy/cli/commands/config/commands.py +766 -0
- ccproxy/cli/commands/config/schema_commands.py +119 -0
- ccproxy/cli/commands/serve.py +630 -0
- ccproxy/cli/docker/__init__.py +34 -0
- ccproxy/cli/docker/adapter_factory.py +157 -0
- ccproxy/cli/docker/params.py +278 -0
- ccproxy/cli/helpers.py +144 -0
- ccproxy/cli/main.py +193 -0
- ccproxy/cli/options/__init__.py +14 -0
- ccproxy/cli/options/claude_options.py +216 -0
- ccproxy/cli/options/core_options.py +40 -0
- ccproxy/cli/options/security_options.py +48 -0
- ccproxy/cli/options/server_options.py +117 -0
- ccproxy/config/__init__.py +40 -0
- ccproxy/config/auth.py +154 -0
- ccproxy/config/claude.py +124 -0
- ccproxy/config/cors.py +79 -0
- ccproxy/config/discovery.py +87 -0
- ccproxy/config/docker_settings.py +265 -0
- ccproxy/config/loader.py +108 -0
- ccproxy/config/observability.py +158 -0
- ccproxy/config/pricing.py +88 -0
- ccproxy/config/reverse_proxy.py +31 -0
- ccproxy/config/scheduler.py +89 -0
- ccproxy/config/security.py +14 -0
- ccproxy/config/server.py +81 -0
- ccproxy/config/settings.py +534 -0
- ccproxy/config/validators.py +231 -0
- ccproxy/core/__init__.py +274 -0
- ccproxy/core/async_utils.py +675 -0
- ccproxy/core/constants.py +97 -0
- ccproxy/core/errors.py +256 -0
- ccproxy/core/http.py +328 -0
- ccproxy/core/http_transformers.py +428 -0
- ccproxy/core/interfaces.py +247 -0
- ccproxy/core/logging.py +189 -0
- ccproxy/core/middleware.py +114 -0
- ccproxy/core/proxy.py +143 -0
- ccproxy/core/system.py +38 -0
- ccproxy/core/transformers.py +259 -0
- ccproxy/core/types.py +129 -0
- ccproxy/core/validators.py +288 -0
- ccproxy/docker/__init__.py +67 -0
- ccproxy/docker/adapter.py +588 -0
- ccproxy/docker/docker_path.py +207 -0
- ccproxy/docker/middleware.py +103 -0
- ccproxy/docker/models.py +228 -0
- ccproxy/docker/protocol.py +192 -0
- ccproxy/docker/stream_process.py +264 -0
- ccproxy/docker/validators.py +173 -0
- ccproxy/models/__init__.py +123 -0
- ccproxy/models/errors.py +42 -0
- ccproxy/models/messages.py +243 -0
- ccproxy/models/requests.py +85 -0
- ccproxy/models/responses.py +227 -0
- ccproxy/models/types.py +102 -0
- ccproxy/observability/__init__.py +51 -0
- ccproxy/observability/access_logger.py +400 -0
- ccproxy/observability/context.py +447 -0
- ccproxy/observability/metrics.py +539 -0
- ccproxy/observability/pushgateway.py +366 -0
- ccproxy/observability/sse_events.py +303 -0
- ccproxy/observability/stats_printer.py +755 -0
- ccproxy/observability/storage/__init__.py +1 -0
- ccproxy/observability/storage/duckdb_simple.py +665 -0
- ccproxy/observability/storage/models.py +55 -0
- ccproxy/pricing/__init__.py +19 -0
- ccproxy/pricing/cache.py +212 -0
- ccproxy/pricing/loader.py +267 -0
- ccproxy/pricing/models.py +106 -0
- ccproxy/pricing/updater.py +309 -0
- ccproxy/scheduler/__init__.py +39 -0
- ccproxy/scheduler/core.py +335 -0
- ccproxy/scheduler/exceptions.py +34 -0
- ccproxy/scheduler/manager.py +186 -0
- ccproxy/scheduler/registry.py +150 -0
- ccproxy/scheduler/tasks.py +484 -0
- ccproxy/services/__init__.py +10 -0
- ccproxy/services/claude_sdk_service.py +614 -0
- ccproxy/services/credentials/__init__.py +55 -0
- ccproxy/services/credentials/config.py +105 -0
- ccproxy/services/credentials/manager.py +562 -0
- ccproxy/services/credentials/oauth_client.py +482 -0
- ccproxy/services/proxy_service.py +1536 -0
- ccproxy/static/.keep +0 -0
- ccproxy/testing/__init__.py +34 -0
- ccproxy/testing/config.py +148 -0
- ccproxy/testing/content_generation.py +197 -0
- ccproxy/testing/mock_responses.py +262 -0
- ccproxy/testing/response_handlers.py +161 -0
- ccproxy/testing/scenarios.py +241 -0
- ccproxy/utils/__init__.py +6 -0
- ccproxy/utils/cost_calculator.py +210 -0
- ccproxy/utils/streaming_metrics.py +199 -0
- ccproxy_api-0.1.0.dist-info/METADATA +253 -0
- ccproxy_api-0.1.0.dist-info/RECORD +148 -0
- ccproxy_api-0.1.0.dist-info/WHEEL +4 -0
- ccproxy_api-0.1.0.dist-info/entry_points.txt +2 -0
- ccproxy_api-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""Docker settings configuration for CCProxy API."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field, field_validator, model_validator
|
|
7
|
+
|
|
8
|
+
from ccproxy import __version__
|
|
9
|
+
from ccproxy.core.async_utils import format_version, get_claude_docker_home_dir
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Docker validation functions moved here to avoid utils dependency
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def validate_host_path(path: str) -> str:
|
|
16
|
+
"""Validate host path for Docker volume mounting."""
|
|
17
|
+
import os
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
|
|
20
|
+
if not path:
|
|
21
|
+
raise ValueError("Path cannot be empty")
|
|
22
|
+
|
|
23
|
+
# Expand environment variables and user home directory
|
|
24
|
+
expanded_path = os.path.expandvars(str(Path(path).expanduser()))
|
|
25
|
+
|
|
26
|
+
# Convert to absolute path and normalize
|
|
27
|
+
abs_path = Path(expanded_path).resolve()
|
|
28
|
+
return str(abs_path)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def validate_volumes_list(volumes: list[str]) -> list[str]:
|
|
32
|
+
"""Validate Docker volumes list format."""
|
|
33
|
+
validated = []
|
|
34
|
+
|
|
35
|
+
for volume in volumes:
|
|
36
|
+
if not volume:
|
|
37
|
+
continue
|
|
38
|
+
|
|
39
|
+
# Use validate_volume_format for comprehensive validation
|
|
40
|
+
validated_volume = validate_volume_format(volume)
|
|
41
|
+
validated.append(validated_volume)
|
|
42
|
+
|
|
43
|
+
return validated
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def validate_volume_format(volume: str) -> str:
|
|
47
|
+
"""Validate individual Docker volume format.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
volume: Volume mount string in format 'host:container[:options]'
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Validated volume string with normalized host path
|
|
54
|
+
|
|
55
|
+
Raises:
|
|
56
|
+
ValueError: If volume format is invalid or host path doesn't exist
|
|
57
|
+
"""
|
|
58
|
+
import os
|
|
59
|
+
from pathlib import Path
|
|
60
|
+
|
|
61
|
+
if not volume:
|
|
62
|
+
raise ValueError("Volume cannot be empty")
|
|
63
|
+
|
|
64
|
+
# Expected format: "host_path:container_path" or "host_path:container_path:options"
|
|
65
|
+
parts = volume.split(":")
|
|
66
|
+
if len(parts) < 2:
|
|
67
|
+
raise ValueError(
|
|
68
|
+
f"Invalid volume format: {volume}. Expected 'host:container' or 'host:container:options'"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
host_path = parts[0]
|
|
72
|
+
container_path = parts[1]
|
|
73
|
+
options = ":".join(parts[2:]) if len(parts) > 2 else ""
|
|
74
|
+
|
|
75
|
+
if not host_path or not container_path:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Invalid volume format: {volume}. Expected 'host:container' or 'host:container:options'"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Expand environment variables and user home directory
|
|
81
|
+
expanded_host_path = os.path.expandvars(str(Path(host_path).expanduser()))
|
|
82
|
+
|
|
83
|
+
# Convert to absolute path
|
|
84
|
+
abs_host_path = Path(expanded_host_path).resolve()
|
|
85
|
+
|
|
86
|
+
# Check if the path exists
|
|
87
|
+
if not abs_host_path.exists():
|
|
88
|
+
raise ValueError(f"Host path does not exist: {expanded_host_path}")
|
|
89
|
+
|
|
90
|
+
# Validate container path (should be absolute)
|
|
91
|
+
if not container_path.startswith("/"):
|
|
92
|
+
raise ValueError(f"Container path must be absolute: {container_path}")
|
|
93
|
+
|
|
94
|
+
# Reconstruct the volume string with normalized host path
|
|
95
|
+
result = f"{abs_host_path}:{container_path}"
|
|
96
|
+
if options:
|
|
97
|
+
result += f":{options}"
|
|
98
|
+
|
|
99
|
+
return result
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def validate_environment_variable(env_var: str) -> tuple[str, str]:
|
|
103
|
+
"""Validate environment variable format.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
env_var: Environment variable string in format 'KEY=VALUE'
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
Tuple of (key, value)
|
|
110
|
+
|
|
111
|
+
Raises:
|
|
112
|
+
ValueError: If environment variable format is invalid
|
|
113
|
+
"""
|
|
114
|
+
if not env_var:
|
|
115
|
+
raise ValueError("Environment variable cannot be empty")
|
|
116
|
+
|
|
117
|
+
if "=" not in env_var:
|
|
118
|
+
raise ValueError(
|
|
119
|
+
f"Invalid environment variable format: {env_var}. Expected KEY=VALUE format"
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Split on first equals sign only (value may contain equals)
|
|
123
|
+
key, value = env_var.split("=", 1)
|
|
124
|
+
|
|
125
|
+
if not key:
|
|
126
|
+
raise ValueError(
|
|
127
|
+
f"Invalid environment variable format: {env_var}. Expected KEY=VALUE format"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
return key, value
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def validate_docker_volumes(volumes: list[str]) -> list[str]:
|
|
134
|
+
"""Validate Docker volumes list format.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
volumes: List of volume mount strings
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
List of validated volume strings with normalized host paths
|
|
141
|
+
|
|
142
|
+
Raises:
|
|
143
|
+
ValueError: If any volume format is invalid
|
|
144
|
+
"""
|
|
145
|
+
validated = []
|
|
146
|
+
|
|
147
|
+
for volume in volumes:
|
|
148
|
+
if not volume:
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
validated_volume = validate_volume_format(volume)
|
|
152
|
+
validated.append(validated_volume)
|
|
153
|
+
|
|
154
|
+
return validated
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class DockerSettings(BaseModel):
|
|
158
|
+
"""Docker configuration settings for running Claude commands in containers."""
|
|
159
|
+
|
|
160
|
+
docker_image: str = Field(
|
|
161
|
+
default=f"ghcr.io/caddyglow/ccproxy:{format_version(__version__, level='docker')}",
|
|
162
|
+
description="Docker image to use for Claude commands",
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
docker_volumes: list[str] = Field(
|
|
166
|
+
default_factory=list,
|
|
167
|
+
description="List of volume mounts in 'host:container[:options]' format",
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
docker_environment: dict[str, str] = Field(
|
|
171
|
+
default_factory=dict,
|
|
172
|
+
description="Environment variables to pass to Docker container",
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
docker_additional_args: list[str] = Field(
|
|
176
|
+
default_factory=list,
|
|
177
|
+
description="Additional arguments to pass to docker run command",
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
docker_home_directory: str | None = Field(
|
|
181
|
+
default=None,
|
|
182
|
+
description="Local host directory to mount as the home directory in container",
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
docker_workspace_directory: str | None = Field(
|
|
186
|
+
default=None,
|
|
187
|
+
description="Local host directory to mount as the workspace directory in container",
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
user_mapping_enabled: bool = Field(
|
|
191
|
+
default=True,
|
|
192
|
+
description="Enable/disable UID/GID mapping for container user",
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
user_uid: int | None = Field(
|
|
196
|
+
default=None,
|
|
197
|
+
description="User ID to run container as (auto-detect current user if None)",
|
|
198
|
+
ge=0,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
user_gid: int | None = Field(
|
|
202
|
+
default=None,
|
|
203
|
+
description="Group ID to run container as (auto-detect current user if None)",
|
|
204
|
+
ge=0,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
@field_validator("docker_volumes")
|
|
208
|
+
@classmethod
|
|
209
|
+
def validate_docker_volumes(cls, v: list[str]) -> list[str]:
|
|
210
|
+
"""Validate Docker volume mount format."""
|
|
211
|
+
return validate_volumes_list(v)
|
|
212
|
+
|
|
213
|
+
@field_validator("docker_home_directory")
|
|
214
|
+
@classmethod
|
|
215
|
+
def validate_docker_home_directory(cls, v: str | None) -> str | None:
|
|
216
|
+
"""Validate and normalize Docker home directory (host path)."""
|
|
217
|
+
if v is None:
|
|
218
|
+
return None
|
|
219
|
+
return validate_host_path(v)
|
|
220
|
+
|
|
221
|
+
@field_validator("docker_workspace_directory")
|
|
222
|
+
@classmethod
|
|
223
|
+
def validate_docker_workspace_directory(cls, v: str | None) -> str | None:
|
|
224
|
+
"""Validate and normalize Docker workspace directory (host path)."""
|
|
225
|
+
if v is None:
|
|
226
|
+
return None
|
|
227
|
+
return validate_host_path(v)
|
|
228
|
+
|
|
229
|
+
@model_validator(mode="after")
|
|
230
|
+
def setup_docker_configuration(self) -> "DockerSettings":
|
|
231
|
+
"""Set up Docker volumes and user mapping configuration."""
|
|
232
|
+
# Set up Docker volumes based on home and workspace directories
|
|
233
|
+
if (
|
|
234
|
+
not self.docker_volumes
|
|
235
|
+
and not self.docker_home_directory
|
|
236
|
+
and not self.docker_workspace_directory
|
|
237
|
+
):
|
|
238
|
+
# Use XDG config directory for Claude CLI data
|
|
239
|
+
claude_config_dir = get_claude_docker_home_dir()
|
|
240
|
+
home_host_path = str(claude_config_dir)
|
|
241
|
+
workspace_host_path = os.path.expandvars("$PWD")
|
|
242
|
+
|
|
243
|
+
self.docker_volumes = [
|
|
244
|
+
f"{home_host_path}:/data/home",
|
|
245
|
+
f"{workspace_host_path}:/data/workspace",
|
|
246
|
+
]
|
|
247
|
+
|
|
248
|
+
# Update environment variables to point to container paths
|
|
249
|
+
if "CLAUDE_HOME" not in self.docker_environment:
|
|
250
|
+
self.docker_environment["CLAUDE_HOME"] = "/data/home"
|
|
251
|
+
if "CLAUDE_WORKSPACE" not in self.docker_environment:
|
|
252
|
+
self.docker_environment["CLAUDE_WORKSPACE"] = "/data/workspace"
|
|
253
|
+
|
|
254
|
+
# Set up user mapping with auto-detection if enabled but not configured
|
|
255
|
+
if self.user_mapping_enabled and os.name == "posix":
|
|
256
|
+
# Auto-detect current user UID/GID if not explicitly set
|
|
257
|
+
if self.user_uid is None:
|
|
258
|
+
self.user_uid = os.getuid()
|
|
259
|
+
if self.user_gid is None:
|
|
260
|
+
self.user_gid = os.getgid()
|
|
261
|
+
elif self.user_mapping_enabled and os.name != "posix":
|
|
262
|
+
# Disable user mapping on non-Unix systems (Windows)
|
|
263
|
+
self.user_mapping_enabled = False
|
|
264
|
+
|
|
265
|
+
return self
|
ccproxy/config/loader.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"""Configuration file loader for ccproxy."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
from ccproxy.config.discovery import find_toml_config_file
|
|
10
|
+
from ccproxy.config.settings import Settings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ConfigurationError(Exception):
|
|
14
|
+
"""Configuration loading error."""
|
|
15
|
+
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConfigLoader:
|
|
20
|
+
"""Load configuration from multiple sources."""
|
|
21
|
+
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
self._cached_config: dict[str, Any] | None = None
|
|
24
|
+
|
|
25
|
+
def load(self, config_file: Path | None = None) -> Settings:
|
|
26
|
+
"""Load configuration from multiple sources.
|
|
27
|
+
|
|
28
|
+
Priority: ENV > config file > defaults
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
config_file: Optional path to config file
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Settings instance with loaded configuration
|
|
35
|
+
|
|
36
|
+
Raises:
|
|
37
|
+
ConfigurationError: If config file is invalid or cannot be loaded
|
|
38
|
+
"""
|
|
39
|
+
config_data = self._load_config_file(config_file)
|
|
40
|
+
|
|
41
|
+
# Environment variables take precedence over config file
|
|
42
|
+
return Settings(**config_data) if config_data else Settings()
|
|
43
|
+
|
|
44
|
+
def _load_config_file(self, config_file: Path | None = None) -> dict[str, Any]:
|
|
45
|
+
"""Load configuration from file.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
config_file: Optional path to config file
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Configuration dictionary
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
ConfigurationError: If config file is invalid
|
|
55
|
+
"""
|
|
56
|
+
if config_file is None:
|
|
57
|
+
config_file = find_toml_config_file()
|
|
58
|
+
|
|
59
|
+
if config_file is None or not config_file.exists():
|
|
60
|
+
return {}
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
if config_file.suffix.lower() in [".toml", ".tml"]:
|
|
64
|
+
return self._load_toml_config(config_file)
|
|
65
|
+
else:
|
|
66
|
+
raise ConfigurationError(
|
|
67
|
+
f"Unsupported config file format: {config_file.suffix}. Only TOML (.toml) files are supported."
|
|
68
|
+
)
|
|
69
|
+
except Exception as e:
|
|
70
|
+
raise ConfigurationError(
|
|
71
|
+
f"Failed to load config file {config_file}: {e}"
|
|
72
|
+
) from e
|
|
73
|
+
|
|
74
|
+
def _load_toml_config(self, config_file: Path) -> dict[str, Any]:
|
|
75
|
+
"""Load TOML configuration file."""
|
|
76
|
+
try:
|
|
77
|
+
import tomllib
|
|
78
|
+
except ImportError:
|
|
79
|
+
try:
|
|
80
|
+
import tomli as tomllib # type: ignore
|
|
81
|
+
except ImportError:
|
|
82
|
+
raise ConfigurationError(
|
|
83
|
+
"TOML support not available. Install 'tomli' for Python < 3.11"
|
|
84
|
+
) from None
|
|
85
|
+
|
|
86
|
+
with config_file.open("rb") as f:
|
|
87
|
+
data = tomllib.load(f)
|
|
88
|
+
return data if isinstance(data, dict) else {}
|
|
89
|
+
|
|
90
|
+
def clear_cache(self) -> None:
|
|
91
|
+
"""Clear cached configuration."""
|
|
92
|
+
self._cached_config = None
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# Global config loader instance
|
|
96
|
+
config_loader = ConfigLoader()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def load_config(config_file: Path | None = None) -> Settings:
|
|
100
|
+
"""Load configuration using the global loader.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
config_file: Optional path to config file
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Settings instance with loaded configuration
|
|
107
|
+
"""
|
|
108
|
+
return config_loader.load(config_file)
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"""Observability configuration settings."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Literal
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field, field_validator, model_validator
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ObservabilitySettings(BaseModel):
|
|
13
|
+
"""Observability configuration settings."""
|
|
14
|
+
|
|
15
|
+
# Endpoint Controls
|
|
16
|
+
metrics_endpoint_enabled: bool = Field(
|
|
17
|
+
default=False,
|
|
18
|
+
description="Enable Prometheus /metrics endpoint",
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
logs_endpoints_enabled: bool = Field(
|
|
22
|
+
default=False,
|
|
23
|
+
description="Enable logs query/analytics/streaming endpoints (/logs/*)",
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
dashboard_enabled: bool = Field(
|
|
27
|
+
default=False,
|
|
28
|
+
description="Enable metrics dashboard endpoint (/dashboard)",
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
# Data Collection & Storage
|
|
32
|
+
logs_collection_enabled: bool = Field(
|
|
33
|
+
default=False,
|
|
34
|
+
description="Enable collection of request/response logs to storage backend",
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
log_storage_backend: Literal["duckdb", "none"] = Field(
|
|
38
|
+
default="duckdb",
|
|
39
|
+
description="Storage backend for logs ('duckdb' or 'none')",
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Storage Configuration
|
|
43
|
+
duckdb_path: str = Field(
|
|
44
|
+
default_factory=lambda: str(
|
|
45
|
+
Path(os.environ.get("XDG_DATA_HOME", Path.home() / ".local" / "share"))
|
|
46
|
+
/ "ccproxy"
|
|
47
|
+
/ "metrics.duckdb"
|
|
48
|
+
),
|
|
49
|
+
description="Path to DuckDB database file",
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# Pushgateway Configuration
|
|
53
|
+
pushgateway_url: str | None = Field(
|
|
54
|
+
default=None,
|
|
55
|
+
description="Pushgateway URL (e.g., http://pushgateway:9091)",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
pushgateway_job: str = Field(
|
|
59
|
+
default="ccproxy",
|
|
60
|
+
description="Job name for Pushgateway metrics",
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# Stats printing configuration
|
|
64
|
+
stats_printing_format: str = Field(
|
|
65
|
+
default="console",
|
|
66
|
+
description="Format for stats output: 'console', 'rich', 'log', 'json'",
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Enhanced logging integration
|
|
70
|
+
logging_pipeline_enabled: bool = Field(
|
|
71
|
+
default=True,
|
|
72
|
+
description="Enable structlog pipeline integration for observability",
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
logging_format: str = Field(
|
|
76
|
+
default="auto",
|
|
77
|
+
description="Logging format for observability: 'rich', 'json', 'auto' (auto-detects based on environment)",
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@model_validator(mode="after")
|
|
81
|
+
def check_feature_dependencies(self) -> ObservabilitySettings:
|
|
82
|
+
"""Validate feature dependencies to prevent invalid configurations."""
|
|
83
|
+
# Dashboard requires logs endpoints (functional dependency)
|
|
84
|
+
if self.dashboard_enabled and not self.logs_endpoints_enabled:
|
|
85
|
+
raise ValueError(
|
|
86
|
+
"Cannot enable 'dashboard_enabled' without 'logs_endpoints_enabled'. "
|
|
87
|
+
"Dashboard needs logs API to function."
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Logs endpoints require storage to query from
|
|
91
|
+
if self.logs_endpoints_enabled and self.log_storage_backend == "none":
|
|
92
|
+
raise ValueError(
|
|
93
|
+
"Cannot enable 'logs_endpoints_enabled' when 'log_storage_backend' is 'none'. "
|
|
94
|
+
"Logs endpoints need storage backend to query data."
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
# Log collection requires storage to write to
|
|
98
|
+
if self.logs_collection_enabled and self.log_storage_backend == "none":
|
|
99
|
+
raise ValueError(
|
|
100
|
+
"Cannot enable 'logs_collection_enabled' when 'log_storage_backend' is 'none'. "
|
|
101
|
+
"Collection needs storage backend to persist data."
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
return self
|
|
105
|
+
|
|
106
|
+
@field_validator("stats_printing_format")
|
|
107
|
+
@classmethod
|
|
108
|
+
def validate_stats_printing_format(cls, v: str) -> str:
|
|
109
|
+
"""Validate and normalize stats printing format."""
|
|
110
|
+
lower_v = v.lower()
|
|
111
|
+
valid_formats = ["console", "rich", "log", "json"]
|
|
112
|
+
if lower_v not in valid_formats:
|
|
113
|
+
raise ValueError(
|
|
114
|
+
f"Invalid stats printing format: {v}. Must be one of {valid_formats}"
|
|
115
|
+
)
|
|
116
|
+
return lower_v
|
|
117
|
+
|
|
118
|
+
@field_validator("logging_format")
|
|
119
|
+
@classmethod
|
|
120
|
+
def validate_logging_format(cls, v: str) -> str:
|
|
121
|
+
"""Validate and normalize logging format."""
|
|
122
|
+
lower_v = v.lower()
|
|
123
|
+
valid_formats = ["auto", "rich", "json", "plain"]
|
|
124
|
+
if lower_v not in valid_formats:
|
|
125
|
+
raise ValueError(
|
|
126
|
+
f"Invalid logging format: {v}. Must be one of {valid_formats}"
|
|
127
|
+
)
|
|
128
|
+
return lower_v
|
|
129
|
+
|
|
130
|
+
@property
|
|
131
|
+
def needs_storage_backend(self) -> bool:
|
|
132
|
+
"""Check if any feature requires storage backend initialization."""
|
|
133
|
+
return self.logs_endpoints_enabled or self.logs_collection_enabled
|
|
134
|
+
|
|
135
|
+
@property
|
|
136
|
+
def any_endpoint_enabled(self) -> bool:
|
|
137
|
+
"""Check if any observability endpoint is enabled."""
|
|
138
|
+
return (
|
|
139
|
+
self.metrics_endpoint_enabled
|
|
140
|
+
or self.logs_endpoints_enabled
|
|
141
|
+
or self.dashboard_enabled
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Backward compatibility properties
|
|
145
|
+
@property
|
|
146
|
+
def metrics_enabled(self) -> bool:
|
|
147
|
+
"""Backward compatibility: True if any metrics feature is enabled."""
|
|
148
|
+
return self.any_endpoint_enabled
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
def duckdb_enabled(self) -> bool:
|
|
152
|
+
"""Backward compatibility: True if DuckDB storage backend is selected."""
|
|
153
|
+
return self.log_storage_backend == "duckdb"
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def enabled(self) -> bool:
|
|
157
|
+
"""Check if observability is enabled (backward compatibility property)."""
|
|
158
|
+
return self.any_endpoint_enabled or self.logging_pipeline_enabled
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"""Pricing configuration settings."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from pydantic import Field, field_validator
|
|
6
|
+
from pydantic_settings import BaseSettings
|
|
7
|
+
|
|
8
|
+
from ccproxy.core.system import get_xdg_cache_home
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PricingSettings(BaseSettings):
|
|
12
|
+
"""
|
|
13
|
+
Configuration settings for the pricing system.
|
|
14
|
+
|
|
15
|
+
Controls pricing cache behavior, data sources, and update mechanisms.
|
|
16
|
+
Settings can be configured via environment variables with PRICING__ prefix.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
# Cache settings
|
|
20
|
+
cache_dir: Path = Field(
|
|
21
|
+
default_factory=lambda: get_xdg_cache_home() / "ccproxy",
|
|
22
|
+
description="Directory for pricing cache files (defaults to XDG_CACHE_HOME/ccproxy)",
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
cache_ttl_hours: int = Field(
|
|
26
|
+
default=24,
|
|
27
|
+
ge=1,
|
|
28
|
+
le=168, # Max 1 week
|
|
29
|
+
description="Hours before pricing cache expires",
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
# Data source settings
|
|
33
|
+
source_url: str = Field(
|
|
34
|
+
default="https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json",
|
|
35
|
+
description="URL to download pricing data from",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
download_timeout: int = Field(
|
|
39
|
+
default=30,
|
|
40
|
+
ge=1,
|
|
41
|
+
le=300, # Max 5 minutes
|
|
42
|
+
description="Request timeout in seconds for downloading pricing data",
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
# Update behavior settings
|
|
46
|
+
auto_update: bool = Field(
|
|
47
|
+
default=True,
|
|
48
|
+
description="Whether to automatically update stale cache",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
fallback_to_embedded: bool = Field(
|
|
52
|
+
default=True,
|
|
53
|
+
description="Whether to fallback to embedded pricing on failure",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Memory cache settings
|
|
57
|
+
memory_cache_ttl: int = Field(
|
|
58
|
+
default=300,
|
|
59
|
+
ge=1,
|
|
60
|
+
le=3600, # Max 1 hour
|
|
61
|
+
description="Time to live for in-memory pricing cache in seconds",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
@field_validator("cache_dir", mode="before")
|
|
65
|
+
@classmethod
|
|
66
|
+
def validate_cache_dir(cls, v: str | Path | None) -> Path:
|
|
67
|
+
"""Validate and convert cache directory path."""
|
|
68
|
+
if v is None:
|
|
69
|
+
return get_xdg_cache_home() / "ccproxy"
|
|
70
|
+
if isinstance(v, str):
|
|
71
|
+
if v.startswith("~/"):
|
|
72
|
+
return Path(v).expanduser()
|
|
73
|
+
return Path(v)
|
|
74
|
+
return v
|
|
75
|
+
|
|
76
|
+
@field_validator("source_url")
|
|
77
|
+
@classmethod
|
|
78
|
+
def validate_source_url(cls, v: str) -> str:
|
|
79
|
+
"""Validate source URL format."""
|
|
80
|
+
if not v.startswith(("http://", "https://")):
|
|
81
|
+
raise ValueError("Source URL must start with http:// or https://")
|
|
82
|
+
return v
|
|
83
|
+
|
|
84
|
+
class Config:
|
|
85
|
+
"""Pydantic configuration."""
|
|
86
|
+
|
|
87
|
+
env_prefix = "PRICING__"
|
|
88
|
+
case_sensitive = False
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""Reverse proxy configuration settings."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ReverseProxySettings(BaseModel):
|
|
9
|
+
"""Reverse proxy configuration settings."""
|
|
10
|
+
|
|
11
|
+
target_url: str = Field(
|
|
12
|
+
default="https://api.anthropic.com",
|
|
13
|
+
description="Target URL for reverse proxy requests",
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
timeout: float = Field(
|
|
17
|
+
default=120.0,
|
|
18
|
+
description="Timeout for reverse proxy requests in seconds",
|
|
19
|
+
ge=1.0,
|
|
20
|
+
le=600.0,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
default_mode: Literal["claude_code", "full", "minimal"] = Field(
|
|
24
|
+
default="claude_code",
|
|
25
|
+
description="Default transformation mode for root path reverse proxy, over claude code or auth injection with full",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
claude_code_prefix: str = Field(
|
|
29
|
+
default="/cc",
|
|
30
|
+
description="URL prefix for Claude Code SDK endpoints",
|
|
31
|
+
)
|