ccproxy-api 0.1.6__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ccproxy/api/__init__.py +1 -15
- ccproxy/api/app.py +439 -212
- ccproxy/api/bootstrap.py +30 -0
- ccproxy/api/decorators.py +85 -0
- ccproxy/api/dependencies.py +145 -176
- ccproxy/api/format_validation.py +54 -0
- ccproxy/api/middleware/cors.py +6 -3
- ccproxy/api/middleware/errors.py +402 -530
- ccproxy/api/middleware/hooks.py +563 -0
- ccproxy/api/middleware/normalize_headers.py +59 -0
- ccproxy/api/middleware/request_id.py +35 -16
- ccproxy/api/middleware/streaming_hooks.py +292 -0
- ccproxy/api/routes/__init__.py +5 -14
- ccproxy/api/routes/health.py +39 -672
- ccproxy/api/routes/plugins.py +277 -0
- ccproxy/auth/__init__.py +2 -19
- ccproxy/auth/bearer.py +25 -15
- ccproxy/auth/dependencies.py +123 -157
- ccproxy/auth/exceptions.py +0 -12
- ccproxy/auth/manager.py +35 -49
- ccproxy/auth/managers/__init__.py +10 -0
- ccproxy/auth/managers/base.py +523 -0
- ccproxy/auth/managers/base_enhanced.py +63 -0
- ccproxy/auth/managers/token_snapshot.py +77 -0
- ccproxy/auth/models/base.py +65 -0
- ccproxy/auth/models/credentials.py +40 -0
- ccproxy/auth/oauth/__init__.py +4 -18
- ccproxy/auth/oauth/base.py +533 -0
- ccproxy/auth/oauth/cli_errors.py +37 -0
- ccproxy/auth/oauth/flows.py +430 -0
- ccproxy/auth/oauth/protocol.py +366 -0
- ccproxy/auth/oauth/registry.py +408 -0
- ccproxy/auth/oauth/router.py +396 -0
- ccproxy/auth/oauth/routes.py +186 -113
- ccproxy/auth/oauth/session.py +151 -0
- ccproxy/auth/oauth/templates.py +342 -0
- ccproxy/auth/storage/__init__.py +2 -5
- ccproxy/auth/storage/base.py +279 -5
- ccproxy/auth/storage/generic.py +134 -0
- ccproxy/cli/__init__.py +1 -2
- ccproxy/cli/_settings_help.py +351 -0
- ccproxy/cli/commands/auth.py +1519 -793
- ccproxy/cli/commands/config/commands.py +209 -276
- ccproxy/cli/commands/plugins.py +669 -0
- ccproxy/cli/commands/serve.py +75 -810
- ccproxy/cli/commands/status.py +254 -0
- ccproxy/cli/decorators.py +83 -0
- ccproxy/cli/helpers.py +22 -60
- ccproxy/cli/main.py +359 -10
- ccproxy/cli/options/claude_options.py +0 -25
- ccproxy/config/__init__.py +7 -11
- ccproxy/config/core.py +227 -0
- ccproxy/config/env_generator.py +232 -0
- ccproxy/config/runtime.py +67 -0
- ccproxy/config/security.py +36 -3
- ccproxy/config/settings.py +382 -441
- ccproxy/config/toml_generator.py +299 -0
- ccproxy/config/utils.py +452 -0
- ccproxy/core/__init__.py +7 -271
- ccproxy/{_version.py → core/_version.py} +16 -3
- ccproxy/core/async_task_manager.py +516 -0
- ccproxy/core/async_utils.py +47 -14
- ccproxy/core/auth/__init__.py +6 -0
- ccproxy/core/constants.py +16 -50
- ccproxy/core/errors.py +53 -0
- ccproxy/core/id_utils.py +20 -0
- ccproxy/core/interfaces.py +16 -123
- ccproxy/core/logging.py +473 -18
- ccproxy/core/plugins/__init__.py +77 -0
- ccproxy/core/plugins/cli_discovery.py +211 -0
- ccproxy/core/plugins/declaration.py +455 -0
- ccproxy/core/plugins/discovery.py +604 -0
- ccproxy/core/plugins/factories.py +967 -0
- ccproxy/core/plugins/hooks/__init__.py +30 -0
- ccproxy/core/plugins/hooks/base.py +58 -0
- ccproxy/core/plugins/hooks/events.py +46 -0
- ccproxy/core/plugins/hooks/implementations/__init__.py +16 -0
- ccproxy/core/plugins/hooks/implementations/formatters/__init__.py +11 -0
- ccproxy/core/plugins/hooks/implementations/formatters/json.py +552 -0
- ccproxy/core/plugins/hooks/implementations/formatters/raw.py +370 -0
- ccproxy/core/plugins/hooks/implementations/http_tracer.py +431 -0
- ccproxy/core/plugins/hooks/layers.py +44 -0
- ccproxy/core/plugins/hooks/manager.py +186 -0
- ccproxy/core/plugins/hooks/registry.py +139 -0
- ccproxy/core/plugins/hooks/thread_manager.py +203 -0
- ccproxy/core/plugins/hooks/types.py +22 -0
- ccproxy/core/plugins/interfaces.py +416 -0
- ccproxy/core/plugins/loader.py +166 -0
- ccproxy/core/plugins/middleware.py +233 -0
- ccproxy/core/plugins/models.py +59 -0
- ccproxy/core/plugins/protocol.py +180 -0
- ccproxy/core/plugins/runtime.py +519 -0
- ccproxy/{observability/context.py → core/request_context.py} +137 -94
- ccproxy/core/status_report.py +211 -0
- ccproxy/core/transformers.py +13 -8
- ccproxy/data/claude_headers_fallback.json +558 -0
- ccproxy/data/codex_headers_fallback.json +121 -0
- ccproxy/http/__init__.py +30 -0
- ccproxy/http/base.py +95 -0
- ccproxy/http/client.py +323 -0
- ccproxy/http/hooks.py +642 -0
- ccproxy/http/pool.py +279 -0
- ccproxy/llms/formatters/__init__.py +7 -0
- ccproxy/llms/formatters/anthropic_to_openai/__init__.py +55 -0
- ccproxy/llms/formatters/anthropic_to_openai/errors.py +65 -0
- ccproxy/llms/formatters/anthropic_to_openai/requests.py +356 -0
- ccproxy/llms/formatters/anthropic_to_openai/responses.py +153 -0
- ccproxy/llms/formatters/anthropic_to_openai/streams.py +1546 -0
- ccproxy/llms/formatters/base.py +140 -0
- ccproxy/llms/formatters/base_model.py +33 -0
- ccproxy/llms/formatters/common/__init__.py +51 -0
- ccproxy/llms/formatters/common/identifiers.py +48 -0
- ccproxy/llms/formatters/common/streams.py +254 -0
- ccproxy/llms/formatters/common/thinking.py +74 -0
- ccproxy/llms/formatters/common/usage.py +135 -0
- ccproxy/llms/formatters/constants.py +55 -0
- ccproxy/llms/formatters/context.py +116 -0
- ccproxy/llms/formatters/mapping.py +33 -0
- ccproxy/llms/formatters/openai_to_anthropic/__init__.py +55 -0
- ccproxy/llms/formatters/openai_to_anthropic/_helpers.py +141 -0
- ccproxy/llms/formatters/openai_to_anthropic/errors.py +53 -0
- ccproxy/llms/formatters/openai_to_anthropic/requests.py +674 -0
- ccproxy/llms/formatters/openai_to_anthropic/responses.py +285 -0
- ccproxy/llms/formatters/openai_to_anthropic/streams.py +530 -0
- ccproxy/llms/formatters/openai_to_openai/__init__.py +53 -0
- ccproxy/llms/formatters/openai_to_openai/_helpers.py +325 -0
- ccproxy/llms/formatters/openai_to_openai/errors.py +6 -0
- ccproxy/llms/formatters/openai_to_openai/requests.py +388 -0
- ccproxy/llms/formatters/openai_to_openai/responses.py +594 -0
- ccproxy/llms/formatters/openai_to_openai/streams.py +1832 -0
- ccproxy/llms/formatters/utils.py +306 -0
- ccproxy/llms/models/__init__.py +9 -0
- ccproxy/llms/models/anthropic.py +619 -0
- ccproxy/llms/models/openai.py +844 -0
- ccproxy/llms/streaming/__init__.py +26 -0
- ccproxy/llms/streaming/accumulators.py +1074 -0
- ccproxy/llms/streaming/formatters.py +251 -0
- ccproxy/{adapters/openai/streaming.py → llms/streaming/processors.py} +193 -240
- ccproxy/models/__init__.py +8 -159
- ccproxy/models/detection.py +92 -193
- ccproxy/models/provider.py +75 -0
- ccproxy/plugins/access_log/README.md +32 -0
- ccproxy/plugins/access_log/__init__.py +20 -0
- ccproxy/plugins/access_log/config.py +33 -0
- ccproxy/plugins/access_log/formatter.py +126 -0
- ccproxy/plugins/access_log/hook.py +763 -0
- ccproxy/plugins/access_log/logger.py +254 -0
- ccproxy/plugins/access_log/plugin.py +137 -0
- ccproxy/plugins/access_log/writer.py +109 -0
- ccproxy/plugins/analytics/README.md +24 -0
- ccproxy/plugins/analytics/__init__.py +1 -0
- ccproxy/plugins/analytics/config.py +5 -0
- ccproxy/plugins/analytics/ingest.py +85 -0
- ccproxy/plugins/analytics/models.py +97 -0
- ccproxy/plugins/analytics/plugin.py +121 -0
- ccproxy/plugins/analytics/routes.py +163 -0
- ccproxy/plugins/analytics/service.py +284 -0
- ccproxy/plugins/claude_api/README.md +29 -0
- ccproxy/plugins/claude_api/__init__.py +10 -0
- ccproxy/plugins/claude_api/adapter.py +829 -0
- ccproxy/plugins/claude_api/config.py +52 -0
- ccproxy/plugins/claude_api/detection_service.py +461 -0
- ccproxy/plugins/claude_api/health.py +175 -0
- ccproxy/plugins/claude_api/hooks.py +284 -0
- ccproxy/plugins/claude_api/models.py +256 -0
- ccproxy/plugins/claude_api/plugin.py +298 -0
- ccproxy/plugins/claude_api/routes.py +118 -0
- ccproxy/plugins/claude_api/streaming_metrics.py +68 -0
- ccproxy/plugins/claude_api/tasks.py +84 -0
- ccproxy/plugins/claude_sdk/README.md +35 -0
- ccproxy/plugins/claude_sdk/__init__.py +80 -0
- ccproxy/plugins/claude_sdk/adapter.py +749 -0
- ccproxy/plugins/claude_sdk/auth.py +57 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/client.py +63 -39
- ccproxy/plugins/claude_sdk/config.py +210 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/converter.py +6 -6
- ccproxy/plugins/claude_sdk/detection_service.py +163 -0
- ccproxy/{services/claude_sdk_service.py → plugins/claude_sdk/handler.py} +123 -304
- ccproxy/plugins/claude_sdk/health.py +113 -0
- ccproxy/plugins/claude_sdk/hooks.py +115 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/manager.py +42 -32
- ccproxy/{claude_sdk → plugins/claude_sdk}/message_queue.py +8 -8
- ccproxy/{models/claude_sdk.py → plugins/claude_sdk/models.py} +64 -16
- ccproxy/plugins/claude_sdk/options.py +154 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/parser.py +23 -5
- ccproxy/plugins/claude_sdk/plugin.py +269 -0
- ccproxy/plugins/claude_sdk/routes.py +104 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/session_client.py +124 -12
- ccproxy/plugins/claude_sdk/session_pool.py +700 -0
- ccproxy/{claude_sdk → plugins/claude_sdk}/stream_handle.py +48 -43
- ccproxy/{claude_sdk → plugins/claude_sdk}/stream_worker.py +22 -18
- ccproxy/{claude_sdk → plugins/claude_sdk}/streaming.py +50 -16
- ccproxy/plugins/claude_sdk/tasks.py +97 -0
- ccproxy/plugins/claude_shared/README.md +18 -0
- ccproxy/plugins/claude_shared/__init__.py +12 -0
- ccproxy/plugins/claude_shared/model_defaults.py +171 -0
- ccproxy/plugins/codex/README.md +35 -0
- ccproxy/plugins/codex/__init__.py +6 -0
- ccproxy/plugins/codex/adapter.py +635 -0
- ccproxy/{config/codex.py → plugins/codex/config.py} +78 -12
- ccproxy/plugins/codex/detection_service.py +544 -0
- ccproxy/plugins/codex/health.py +162 -0
- ccproxy/plugins/codex/hooks.py +263 -0
- ccproxy/plugins/codex/model_defaults.py +39 -0
- ccproxy/plugins/codex/models.py +263 -0
- ccproxy/plugins/codex/plugin.py +275 -0
- ccproxy/plugins/codex/routes.py +129 -0
- ccproxy/plugins/codex/streaming_metrics.py +324 -0
- ccproxy/plugins/codex/tasks.py +106 -0
- ccproxy/plugins/codex/utils/__init__.py +1 -0
- ccproxy/plugins/codex/utils/sse_parser.py +106 -0
- ccproxy/plugins/command_replay/README.md +34 -0
- ccproxy/plugins/command_replay/__init__.py +17 -0
- ccproxy/plugins/command_replay/config.py +133 -0
- ccproxy/plugins/command_replay/formatter.py +432 -0
- ccproxy/plugins/command_replay/hook.py +294 -0
- ccproxy/plugins/command_replay/plugin.py +161 -0
- ccproxy/plugins/copilot/README.md +39 -0
- ccproxy/plugins/copilot/__init__.py +11 -0
- ccproxy/plugins/copilot/adapter.py +465 -0
- ccproxy/plugins/copilot/config.py +155 -0
- ccproxy/plugins/copilot/data/copilot_fallback.json +41 -0
- ccproxy/plugins/copilot/detection_service.py +255 -0
- ccproxy/plugins/copilot/manager.py +275 -0
- ccproxy/plugins/copilot/model_defaults.py +284 -0
- ccproxy/plugins/copilot/models.py +148 -0
- ccproxy/plugins/copilot/oauth/__init__.py +16 -0
- ccproxy/plugins/copilot/oauth/client.py +494 -0
- ccproxy/plugins/copilot/oauth/models.py +385 -0
- ccproxy/plugins/copilot/oauth/provider.py +602 -0
- ccproxy/plugins/copilot/oauth/storage.py +170 -0
- ccproxy/plugins/copilot/plugin.py +360 -0
- ccproxy/plugins/copilot/routes.py +294 -0
- ccproxy/plugins/credential_balancer/README.md +124 -0
- ccproxy/plugins/credential_balancer/__init__.py +6 -0
- ccproxy/plugins/credential_balancer/config.py +270 -0
- ccproxy/plugins/credential_balancer/factory.py +415 -0
- ccproxy/plugins/credential_balancer/hook.py +51 -0
- ccproxy/plugins/credential_balancer/manager.py +587 -0
- ccproxy/plugins/credential_balancer/plugin.py +146 -0
- ccproxy/plugins/dashboard/README.md +25 -0
- ccproxy/plugins/dashboard/__init__.py +1 -0
- ccproxy/plugins/dashboard/config.py +8 -0
- ccproxy/plugins/dashboard/plugin.py +71 -0
- ccproxy/plugins/dashboard/routes.py +67 -0
- ccproxy/plugins/docker/README.md +32 -0
- ccproxy/{docker → plugins/docker}/__init__.py +3 -0
- ccproxy/{docker → plugins/docker}/adapter.py +108 -10
- ccproxy/plugins/docker/config.py +82 -0
- ccproxy/{docker → plugins/docker}/docker_path.py +4 -3
- ccproxy/{docker → plugins/docker}/middleware.py +2 -2
- ccproxy/plugins/docker/plugin.py +198 -0
- ccproxy/{docker → plugins/docker}/stream_process.py +3 -3
- ccproxy/plugins/duckdb_storage/README.md +26 -0
- ccproxy/plugins/duckdb_storage/__init__.py +1 -0
- ccproxy/plugins/duckdb_storage/config.py +22 -0
- ccproxy/plugins/duckdb_storage/plugin.py +128 -0
- ccproxy/plugins/duckdb_storage/routes.py +51 -0
- ccproxy/plugins/duckdb_storage/storage.py +633 -0
- ccproxy/plugins/max_tokens/README.md +38 -0
- ccproxy/plugins/max_tokens/__init__.py +12 -0
- ccproxy/plugins/max_tokens/adapter.py +235 -0
- ccproxy/plugins/max_tokens/config.py +86 -0
- ccproxy/plugins/max_tokens/models.py +53 -0
- ccproxy/plugins/max_tokens/plugin.py +200 -0
- ccproxy/plugins/max_tokens/service.py +271 -0
- ccproxy/plugins/max_tokens/token_limits.json +54 -0
- ccproxy/plugins/metrics/README.md +35 -0
- ccproxy/plugins/metrics/__init__.py +10 -0
- ccproxy/{observability/metrics.py → plugins/metrics/collector.py} +20 -153
- ccproxy/plugins/metrics/config.py +85 -0
- ccproxy/plugins/metrics/grafana/dashboards/ccproxy-dashboard.json +1720 -0
- ccproxy/plugins/metrics/hook.py +403 -0
- ccproxy/plugins/metrics/plugin.py +268 -0
- ccproxy/{observability → plugins/metrics}/pushgateway.py +57 -59
- ccproxy/plugins/metrics/routes.py +107 -0
- ccproxy/plugins/metrics/tasks.py +117 -0
- ccproxy/plugins/oauth_claude/README.md +35 -0
- ccproxy/plugins/oauth_claude/__init__.py +14 -0
- ccproxy/plugins/oauth_claude/client.py +270 -0
- ccproxy/plugins/oauth_claude/config.py +84 -0
- ccproxy/plugins/oauth_claude/manager.py +482 -0
- ccproxy/plugins/oauth_claude/models.py +266 -0
- ccproxy/plugins/oauth_claude/plugin.py +149 -0
- ccproxy/plugins/oauth_claude/provider.py +571 -0
- ccproxy/plugins/oauth_claude/storage.py +212 -0
- ccproxy/plugins/oauth_codex/README.md +38 -0
- ccproxy/plugins/oauth_codex/__init__.py +14 -0
- ccproxy/plugins/oauth_codex/client.py +224 -0
- ccproxy/plugins/oauth_codex/config.py +95 -0
- ccproxy/plugins/oauth_codex/manager.py +256 -0
- ccproxy/plugins/oauth_codex/models.py +239 -0
- ccproxy/plugins/oauth_codex/plugin.py +146 -0
- ccproxy/plugins/oauth_codex/provider.py +574 -0
- ccproxy/plugins/oauth_codex/storage.py +92 -0
- ccproxy/plugins/permissions/README.md +28 -0
- ccproxy/plugins/permissions/__init__.py +22 -0
- ccproxy/plugins/permissions/config.py +28 -0
- ccproxy/{cli/commands/permission_handler.py → plugins/permissions/handlers/cli.py} +49 -25
- ccproxy/plugins/permissions/handlers/protocol.py +33 -0
- ccproxy/plugins/permissions/handlers/terminal.py +675 -0
- ccproxy/{api/routes → plugins/permissions}/mcp.py +34 -7
- ccproxy/{models/permissions.py → plugins/permissions/models.py} +65 -1
- ccproxy/plugins/permissions/plugin.py +153 -0
- ccproxy/{api/routes/permissions.py → plugins/permissions/routes.py} +20 -16
- ccproxy/{api/services/permission_service.py → plugins/permissions/service.py} +65 -11
- ccproxy/{api → plugins/permissions}/ui/permission_handler_protocol.py +1 -1
- ccproxy/{api → plugins/permissions}/ui/terminal_permission_handler.py +66 -10
- ccproxy/plugins/pricing/README.md +34 -0
- ccproxy/plugins/pricing/__init__.py +6 -0
- ccproxy/{pricing → plugins/pricing}/cache.py +7 -6
- ccproxy/{config/pricing.py → plugins/pricing/config.py} +32 -6
- ccproxy/plugins/pricing/exceptions.py +35 -0
- ccproxy/plugins/pricing/loader.py +440 -0
- ccproxy/{pricing → plugins/pricing}/models.py +13 -23
- ccproxy/plugins/pricing/plugin.py +169 -0
- ccproxy/plugins/pricing/service.py +191 -0
- ccproxy/plugins/pricing/tasks.py +300 -0
- ccproxy/{pricing → plugins/pricing}/updater.py +86 -72
- ccproxy/plugins/pricing/utils.py +99 -0
- ccproxy/plugins/request_tracer/README.md +40 -0
- ccproxy/plugins/request_tracer/__init__.py +7 -0
- ccproxy/plugins/request_tracer/config.py +120 -0
- ccproxy/plugins/request_tracer/hook.py +415 -0
- ccproxy/plugins/request_tracer/plugin.py +255 -0
- ccproxy/scheduler/__init__.py +2 -14
- ccproxy/scheduler/core.py +26 -41
- ccproxy/scheduler/manager.py +63 -107
- ccproxy/scheduler/registry.py +6 -32
- ccproxy/scheduler/tasks.py +346 -314
- ccproxy/services/__init__.py +0 -1
- ccproxy/services/adapters/__init__.py +11 -0
- ccproxy/services/adapters/base.py +123 -0
- ccproxy/services/adapters/chain_composer.py +88 -0
- ccproxy/services/adapters/chain_validation.py +44 -0
- ccproxy/services/adapters/chat_accumulator.py +200 -0
- ccproxy/services/adapters/delta_utils.py +142 -0
- ccproxy/services/adapters/format_adapter.py +136 -0
- ccproxy/services/adapters/format_context.py +11 -0
- ccproxy/services/adapters/format_registry.py +158 -0
- ccproxy/services/adapters/http_adapter.py +1045 -0
- ccproxy/services/adapters/mock_adapter.py +118 -0
- ccproxy/services/adapters/protocols.py +35 -0
- ccproxy/services/adapters/simple_converters.py +571 -0
- ccproxy/services/auth_registry.py +180 -0
- ccproxy/services/cache/__init__.py +6 -0
- ccproxy/services/cache/response_cache.py +261 -0
- ccproxy/services/cli_detection.py +437 -0
- ccproxy/services/config/__init__.py +6 -0
- ccproxy/services/config/proxy_configuration.py +111 -0
- ccproxy/services/container.py +256 -0
- ccproxy/services/factories.py +380 -0
- ccproxy/services/handler_config.py +76 -0
- ccproxy/services/interfaces.py +298 -0
- ccproxy/services/mocking/__init__.py +6 -0
- ccproxy/services/mocking/mock_handler.py +291 -0
- ccproxy/services/tracing/__init__.py +7 -0
- ccproxy/services/tracing/interfaces.py +61 -0
- ccproxy/services/tracing/null_tracer.py +57 -0
- ccproxy/streaming/__init__.py +23 -0
- ccproxy/streaming/buffer.py +1056 -0
- ccproxy/streaming/deferred.py +897 -0
- ccproxy/streaming/handler.py +117 -0
- ccproxy/streaming/interfaces.py +77 -0
- ccproxy/streaming/simple_adapter.py +39 -0
- ccproxy/streaming/sse.py +109 -0
- ccproxy/streaming/sse_parser.py +127 -0
- ccproxy/templates/__init__.py +6 -0
- ccproxy/templates/plugin_scaffold.py +695 -0
- ccproxy/testing/endpoints/__init__.py +33 -0
- ccproxy/testing/endpoints/cli.py +215 -0
- ccproxy/testing/endpoints/config.py +874 -0
- ccproxy/testing/endpoints/console.py +57 -0
- ccproxy/testing/endpoints/models.py +100 -0
- ccproxy/testing/endpoints/runner.py +1903 -0
- ccproxy/testing/endpoints/tools.py +308 -0
- ccproxy/testing/mock_responses.py +70 -1
- ccproxy/testing/response_handlers.py +20 -0
- ccproxy/utils/__init__.py +0 -6
- ccproxy/utils/binary_resolver.py +476 -0
- ccproxy/utils/caching.py +327 -0
- ccproxy/utils/cli_logging.py +101 -0
- ccproxy/utils/command_line.py +251 -0
- ccproxy/utils/headers.py +228 -0
- ccproxy/utils/model_mapper.py +120 -0
- ccproxy/utils/startup_helpers.py +95 -342
- ccproxy/utils/version_checker.py +279 -6
- ccproxy_api-0.2.0.dist-info/METADATA +212 -0
- ccproxy_api-0.2.0.dist-info/RECORD +417 -0
- {ccproxy_api-0.1.6.dist-info → ccproxy_api-0.2.0.dist-info}/WHEEL +1 -1
- ccproxy_api-0.2.0.dist-info/entry_points.txt +24 -0
- ccproxy/__init__.py +0 -4
- ccproxy/adapters/__init__.py +0 -11
- ccproxy/adapters/base.py +0 -80
- ccproxy/adapters/codex/__init__.py +0 -11
- ccproxy/adapters/openai/__init__.py +0 -42
- ccproxy/adapters/openai/adapter.py +0 -953
- ccproxy/adapters/openai/models.py +0 -412
- ccproxy/adapters/openai/response_adapter.py +0 -355
- ccproxy/adapters/openai/response_models.py +0 -178
- ccproxy/api/middleware/headers.py +0 -49
- ccproxy/api/middleware/logging.py +0 -180
- ccproxy/api/middleware/request_content_logging.py +0 -297
- ccproxy/api/middleware/server_header.py +0 -58
- ccproxy/api/responses.py +0 -89
- ccproxy/api/routes/claude.py +0 -371
- ccproxy/api/routes/codex.py +0 -1231
- ccproxy/api/routes/metrics.py +0 -1029
- ccproxy/api/routes/proxy.py +0 -211
- ccproxy/api/services/__init__.py +0 -6
- ccproxy/auth/conditional.py +0 -84
- ccproxy/auth/credentials_adapter.py +0 -93
- ccproxy/auth/models.py +0 -118
- ccproxy/auth/oauth/models.py +0 -48
- ccproxy/auth/openai/__init__.py +0 -13
- ccproxy/auth/openai/credentials.py +0 -166
- ccproxy/auth/openai/oauth_client.py +0 -334
- ccproxy/auth/openai/storage.py +0 -184
- ccproxy/auth/storage/json_file.py +0 -158
- ccproxy/auth/storage/keyring.py +0 -189
- ccproxy/claude_sdk/__init__.py +0 -18
- ccproxy/claude_sdk/options.py +0 -194
- ccproxy/claude_sdk/session_pool.py +0 -550
- ccproxy/cli/docker/__init__.py +0 -34
- ccproxy/cli/docker/adapter_factory.py +0 -157
- ccproxy/cli/docker/params.py +0 -274
- ccproxy/config/auth.py +0 -153
- ccproxy/config/claude.py +0 -348
- ccproxy/config/cors.py +0 -79
- ccproxy/config/discovery.py +0 -95
- ccproxy/config/docker_settings.py +0 -264
- ccproxy/config/observability.py +0 -158
- ccproxy/config/reverse_proxy.py +0 -31
- ccproxy/config/scheduler.py +0 -108
- ccproxy/config/server.py +0 -86
- ccproxy/config/validators.py +0 -231
- ccproxy/core/codex_transformers.py +0 -389
- ccproxy/core/http.py +0 -328
- ccproxy/core/http_transformers.py +0 -812
- ccproxy/core/proxy.py +0 -143
- ccproxy/core/validators.py +0 -288
- ccproxy/models/errors.py +0 -42
- ccproxy/models/messages.py +0 -269
- ccproxy/models/requests.py +0 -107
- ccproxy/models/responses.py +0 -270
- ccproxy/models/types.py +0 -102
- ccproxy/observability/__init__.py +0 -51
- ccproxy/observability/access_logger.py +0 -457
- ccproxy/observability/sse_events.py +0 -303
- ccproxy/observability/stats_printer.py +0 -753
- ccproxy/observability/storage/__init__.py +0 -1
- ccproxy/observability/storage/duckdb_simple.py +0 -677
- ccproxy/observability/storage/models.py +0 -70
- ccproxy/observability/streaming_response.py +0 -107
- ccproxy/pricing/__init__.py +0 -19
- ccproxy/pricing/loader.py +0 -251
- ccproxy/services/claude_detection_service.py +0 -269
- ccproxy/services/codex_detection_service.py +0 -263
- ccproxy/services/credentials/__init__.py +0 -55
- ccproxy/services/credentials/config.py +0 -105
- ccproxy/services/credentials/manager.py +0 -561
- ccproxy/services/credentials/oauth_client.py +0 -481
- ccproxy/services/proxy_service.py +0 -1827
- ccproxy/static/.keep +0 -0
- ccproxy/utils/cost_calculator.py +0 -210
- ccproxy/utils/disconnection_monitor.py +0 -83
- ccproxy/utils/model_mapping.py +0 -199
- ccproxy/utils/models_provider.py +0 -150
- ccproxy/utils/simple_request_logger.py +0 -284
- ccproxy/utils/streaming_metrics.py +0 -199
- ccproxy_api-0.1.6.dist-info/METADATA +0 -615
- ccproxy_api-0.1.6.dist-info/RECORD +0 -189
- ccproxy_api-0.1.6.dist-info/entry_points.txt +0 -4
- /ccproxy/{api/middleware/auth.py → auth/models/__init__.py} +0 -0
- /ccproxy/{claude_sdk → plugins/claude_sdk}/exceptions.py +0 -0
- /ccproxy/{docker → plugins/docker}/models.py +0 -0
- /ccproxy/{docker → plugins/docker}/protocol.py +0 -0
- /ccproxy/{docker → plugins/docker}/validators.py +0 -0
- /ccproxy/{auth/oauth/storage.py → plugins/permissions/handlers/__init__.py} +0 -0
- /ccproxy/{api → plugins/permissions}/ui/__init__.py +0 -0
- {ccproxy_api-0.1.6.dist-info → ccproxy_api-0.2.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
"""Request conversion entry points for Anthropic→OpenAI adapters."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from ccproxy.llms.formatters.context import register_request
|
|
9
|
+
from ccproxy.llms.models import anthropic as anthropic_models
|
|
10
|
+
from ccproxy.llms.models import openai as openai_models
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _build_responses_payload_from_anthropic_request(
|
|
14
|
+
request: anthropic_models.CreateMessageRequest,
|
|
15
|
+
) -> tuple[dict[str, Any], str | None]:
|
|
16
|
+
"""Project an Anthropic message request into Responses payload fields."""
|
|
17
|
+
|
|
18
|
+
payload_data: dict[str, Any] = {"model": request.model}
|
|
19
|
+
instructions_text: str | None = None
|
|
20
|
+
|
|
21
|
+
if request.max_tokens is not None:
|
|
22
|
+
payload_data["max_output_tokens"] = int(request.max_tokens)
|
|
23
|
+
if request.stream:
|
|
24
|
+
payload_data["stream"] = True
|
|
25
|
+
|
|
26
|
+
if request.service_tier is not None:
|
|
27
|
+
payload_data["service_tier"] = request.service_tier
|
|
28
|
+
if request.temperature is not None:
|
|
29
|
+
payload_data["temperature"] = request.temperature
|
|
30
|
+
if request.top_p is not None:
|
|
31
|
+
payload_data["top_p"] = request.top_p
|
|
32
|
+
|
|
33
|
+
if request.metadata is not None and hasattr(request.metadata, "model_dump"):
|
|
34
|
+
meta_dump = request.metadata.model_dump()
|
|
35
|
+
payload_data["metadata"] = meta_dump
|
|
36
|
+
|
|
37
|
+
if request.system:
|
|
38
|
+
if isinstance(request.system, str):
|
|
39
|
+
instructions_text = request.system
|
|
40
|
+
payload_data["instructions"] = request.system
|
|
41
|
+
else:
|
|
42
|
+
joined = "".join(block.text for block in request.system if block.text)
|
|
43
|
+
instructions_text = joined or None
|
|
44
|
+
if joined:
|
|
45
|
+
payload_data["instructions"] = joined
|
|
46
|
+
|
|
47
|
+
last_user_text: str | None = None
|
|
48
|
+
for msg in reversed(request.messages):
|
|
49
|
+
if msg.role != "user":
|
|
50
|
+
continue
|
|
51
|
+
if isinstance(msg.content, str):
|
|
52
|
+
last_user_text = msg.content
|
|
53
|
+
elif isinstance(msg.content, list):
|
|
54
|
+
texts: list[str] = []
|
|
55
|
+
for block in msg.content:
|
|
56
|
+
if isinstance(block, dict):
|
|
57
|
+
if block.get("type") == "text" and isinstance(
|
|
58
|
+
block.get("text"), str
|
|
59
|
+
):
|
|
60
|
+
texts.append(block.get("text") or "")
|
|
61
|
+
elif (
|
|
62
|
+
getattr(block, "type", None) == "text"
|
|
63
|
+
and hasattr(block, "text")
|
|
64
|
+
and isinstance(getattr(block, "text", None), str)
|
|
65
|
+
):
|
|
66
|
+
texts.append(block.text or "")
|
|
67
|
+
if texts:
|
|
68
|
+
last_user_text = " ".join(texts)
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
if last_user_text:
|
|
72
|
+
payload_data["input"] = [
|
|
73
|
+
{
|
|
74
|
+
"type": "message",
|
|
75
|
+
"role": "user",
|
|
76
|
+
"content": [
|
|
77
|
+
{"type": "input_text", "text": last_user_text},
|
|
78
|
+
],
|
|
79
|
+
}
|
|
80
|
+
]
|
|
81
|
+
else:
|
|
82
|
+
payload_data["input"] = []
|
|
83
|
+
|
|
84
|
+
if request.tools:
|
|
85
|
+
tools: list[dict[str, Any]] = []
|
|
86
|
+
for tool in request.tools:
|
|
87
|
+
if isinstance(tool, anthropic_models.Tool):
|
|
88
|
+
tools.append(
|
|
89
|
+
{
|
|
90
|
+
"type": "function",
|
|
91
|
+
"name": tool.name,
|
|
92
|
+
"description": tool.description,
|
|
93
|
+
"parameters": tool.input_schema,
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
if tools:
|
|
97
|
+
payload_data["tools"] = tools
|
|
98
|
+
|
|
99
|
+
tc = request.tool_choice
|
|
100
|
+
if tc is not None:
|
|
101
|
+
tc_type = getattr(tc, "type", None)
|
|
102
|
+
if tc_type == "none":
|
|
103
|
+
payload_data["tool_choice"] = "none"
|
|
104
|
+
elif tc_type == "auto":
|
|
105
|
+
payload_data["tool_choice"] = "auto"
|
|
106
|
+
elif tc_type == "any":
|
|
107
|
+
payload_data["tool_choice"] = "required"
|
|
108
|
+
elif tc_type == "tool":
|
|
109
|
+
name = getattr(tc, "name", None)
|
|
110
|
+
if name:
|
|
111
|
+
payload_data["tool_choice"] = {
|
|
112
|
+
"type": "function",
|
|
113
|
+
"function": {"name": name},
|
|
114
|
+
}
|
|
115
|
+
disable_parallel = getattr(tc, "disable_parallel_tool_use", None)
|
|
116
|
+
if isinstance(disable_parallel, bool):
|
|
117
|
+
payload_data["parallel_tool_calls"] = not disable_parallel
|
|
118
|
+
|
|
119
|
+
payload_data.setdefault("background", None)
|
|
120
|
+
|
|
121
|
+
return payload_data, instructions_text
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def convert__anthropic_message_to_openai_responses__request(
|
|
125
|
+
request: anthropic_models.CreateMessageRequest,
|
|
126
|
+
) -> openai_models.ResponseRequest:
|
|
127
|
+
"""Convert Anthropic CreateMessageRequest to OpenAI ResponseRequest using typed models."""
|
|
128
|
+
payload_data, instructions_text = _build_responses_payload_from_anthropic_request(
|
|
129
|
+
request
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
response_request = openai_models.ResponseRequest.model_validate(payload_data)
|
|
133
|
+
|
|
134
|
+
register_request(request, instructions_text)
|
|
135
|
+
|
|
136
|
+
return response_request
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def convert__anthropic_message_to_openai_chat__request(
|
|
140
|
+
request: anthropic_models.CreateMessageRequest,
|
|
141
|
+
) -> openai_models.ChatCompletionRequest:
|
|
142
|
+
"""Convert Anthropic CreateMessageRequest to OpenAI ChatCompletionRequest using typed models."""
|
|
143
|
+
openai_messages: list[dict[str, Any]] = []
|
|
144
|
+
# System prompt
|
|
145
|
+
if request.system:
|
|
146
|
+
if isinstance(request.system, str):
|
|
147
|
+
sys_content = request.system
|
|
148
|
+
else:
|
|
149
|
+
sys_content = "".join(block.text for block in request.system)
|
|
150
|
+
if sys_content:
|
|
151
|
+
openai_messages.append({"role": "system", "content": sys_content})
|
|
152
|
+
|
|
153
|
+
# User/assistant messages with text + data-url images
|
|
154
|
+
for msg in request.messages:
|
|
155
|
+
role = msg.role
|
|
156
|
+
content = msg.content
|
|
157
|
+
|
|
158
|
+
# Handle tool usage and results
|
|
159
|
+
if role == "assistant" and isinstance(content, list):
|
|
160
|
+
tool_calls = []
|
|
161
|
+
text_parts = []
|
|
162
|
+
for block in content:
|
|
163
|
+
block_type = getattr(block, "type", None)
|
|
164
|
+
if block_type == "tool_use":
|
|
165
|
+
# Type guard for ToolUseBlock
|
|
166
|
+
if hasattr(block, "id") and hasattr(block, "name"):
|
|
167
|
+
# Safely get input with fallback to empty dict
|
|
168
|
+
tool_input = getattr(block, "input", {}) or {}
|
|
169
|
+
|
|
170
|
+
# Ensure input is properly serialized as JSON
|
|
171
|
+
try:
|
|
172
|
+
args_str = json.dumps(tool_input)
|
|
173
|
+
except Exception:
|
|
174
|
+
args_str = json.dumps({"arguments": str(tool_input)})
|
|
175
|
+
|
|
176
|
+
tool_calls.append(
|
|
177
|
+
{
|
|
178
|
+
"id": block.id,
|
|
179
|
+
"type": "function",
|
|
180
|
+
"function": {
|
|
181
|
+
"name": block.name,
|
|
182
|
+
"arguments": args_str,
|
|
183
|
+
},
|
|
184
|
+
}
|
|
185
|
+
)
|
|
186
|
+
elif block_type == "text":
|
|
187
|
+
# Type guard for TextBlock
|
|
188
|
+
if hasattr(block, "text"):
|
|
189
|
+
text_parts.append(block.text)
|
|
190
|
+
if tool_calls:
|
|
191
|
+
assistant_msg: dict[str, Any] = {
|
|
192
|
+
"role": "assistant",
|
|
193
|
+
"tool_calls": tool_calls,
|
|
194
|
+
}
|
|
195
|
+
assistant_msg["content"] = " ".join(text_parts) if text_parts else None
|
|
196
|
+
openai_messages.append(assistant_msg)
|
|
197
|
+
continue
|
|
198
|
+
elif role == "user" and isinstance(content, list):
|
|
199
|
+
is_tool_result = any(
|
|
200
|
+
getattr(b, "type", None) == "tool_result" for b in content
|
|
201
|
+
)
|
|
202
|
+
if is_tool_result:
|
|
203
|
+
for block in content:
|
|
204
|
+
if getattr(block, "type", None) == "tool_result":
|
|
205
|
+
# Type guard for ToolResultBlock
|
|
206
|
+
if hasattr(block, "tool_use_id"):
|
|
207
|
+
# Get content with an empty string fallback
|
|
208
|
+
result_content = getattr(block, "content", "")
|
|
209
|
+
|
|
210
|
+
# Convert complex content to string representation
|
|
211
|
+
if not isinstance(result_content, str):
|
|
212
|
+
try:
|
|
213
|
+
if isinstance(result_content, list):
|
|
214
|
+
# Handle list of text blocks
|
|
215
|
+
text_parts = []
|
|
216
|
+
for part in result_content:
|
|
217
|
+
if (
|
|
218
|
+
hasattr(part, "text")
|
|
219
|
+
and hasattr(part, "type")
|
|
220
|
+
and part.type == "text"
|
|
221
|
+
):
|
|
222
|
+
text_parts.append(part.text)
|
|
223
|
+
if text_parts:
|
|
224
|
+
result_content = " ".join(text_parts)
|
|
225
|
+
else:
|
|
226
|
+
result_content = json.dumps(result_content)
|
|
227
|
+
else:
|
|
228
|
+
# Convert other non-string content to JSON
|
|
229
|
+
result_content = json.dumps(result_content)
|
|
230
|
+
except Exception:
|
|
231
|
+
# Fallback to string representation
|
|
232
|
+
result_content = str(result_content)
|
|
233
|
+
|
|
234
|
+
openai_messages.append(
|
|
235
|
+
{
|
|
236
|
+
"role": "tool",
|
|
237
|
+
"tool_call_id": block.tool_use_id,
|
|
238
|
+
"content": result_content,
|
|
239
|
+
}
|
|
240
|
+
)
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
if isinstance(content, list):
|
|
244
|
+
parts: list[dict[str, Any]] = []
|
|
245
|
+
text_accum: list[str] = []
|
|
246
|
+
for block in content:
|
|
247
|
+
# Support both raw dicts and Anthropic model instances
|
|
248
|
+
if isinstance(block, dict):
|
|
249
|
+
btype = block.get("type")
|
|
250
|
+
if btype == "text" and isinstance(block.get("text"), str):
|
|
251
|
+
text_accum.append(block.get("text") or "")
|
|
252
|
+
elif btype == "image":
|
|
253
|
+
source = block.get("source") or {}
|
|
254
|
+
if (
|
|
255
|
+
isinstance(source, dict)
|
|
256
|
+
and source.get("type") == "base64"
|
|
257
|
+
and isinstance(source.get("media_type"), str)
|
|
258
|
+
and isinstance(source.get("data"), str)
|
|
259
|
+
):
|
|
260
|
+
url = f"data:{source['media_type']};base64,{source['data']}"
|
|
261
|
+
parts.append(
|
|
262
|
+
{
|
|
263
|
+
"type": "image_url",
|
|
264
|
+
"image_url": {"url": url},
|
|
265
|
+
}
|
|
266
|
+
)
|
|
267
|
+
else:
|
|
268
|
+
# Pydantic models
|
|
269
|
+
btype = getattr(block, "type", None)
|
|
270
|
+
if (
|
|
271
|
+
btype == "text"
|
|
272
|
+
and hasattr(block, "text")
|
|
273
|
+
and isinstance(getattr(block, "text", None), str)
|
|
274
|
+
):
|
|
275
|
+
text_accum.append(block.text or "")
|
|
276
|
+
elif btype == "image":
|
|
277
|
+
source = getattr(block, "source", None)
|
|
278
|
+
if (
|
|
279
|
+
source is not None
|
|
280
|
+
and getattr(source, "type", None) == "base64"
|
|
281
|
+
and isinstance(getattr(source, "media_type", None), str)
|
|
282
|
+
and isinstance(getattr(source, "data", None), str)
|
|
283
|
+
):
|
|
284
|
+
url = f"data:{source.media_type};base64,{source.data}"
|
|
285
|
+
parts.append(
|
|
286
|
+
{
|
|
287
|
+
"type": "image_url",
|
|
288
|
+
"image_url": {"url": url},
|
|
289
|
+
}
|
|
290
|
+
)
|
|
291
|
+
if parts or len(text_accum) > 1:
|
|
292
|
+
if text_accum:
|
|
293
|
+
parts.insert(0, {"type": "text", "text": " ".join(text_accum)})
|
|
294
|
+
openai_messages.append({"role": role, "content": parts})
|
|
295
|
+
else:
|
|
296
|
+
openai_messages.append(
|
|
297
|
+
{"role": role, "content": (text_accum[0] if text_accum else "")}
|
|
298
|
+
)
|
|
299
|
+
else:
|
|
300
|
+
openai_messages.append({"role": role, "content": content})
|
|
301
|
+
|
|
302
|
+
# Tools mapping (custom tools -> function tools)
|
|
303
|
+
tools: list[dict[str, Any]] = []
|
|
304
|
+
if request.tools:
|
|
305
|
+
for tool in request.tools:
|
|
306
|
+
if isinstance(tool, anthropic_models.Tool):
|
|
307
|
+
tools.append(
|
|
308
|
+
{
|
|
309
|
+
"type": "function",
|
|
310
|
+
"function": {
|
|
311
|
+
"name": tool.name,
|
|
312
|
+
"description": tool.description,
|
|
313
|
+
"parameters": tool.input_schema,
|
|
314
|
+
},
|
|
315
|
+
}
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
params: dict[str, Any] = {
|
|
319
|
+
"model": request.model,
|
|
320
|
+
"messages": openai_messages,
|
|
321
|
+
"max_completion_tokens": request.max_tokens,
|
|
322
|
+
"stream": request.stream or None,
|
|
323
|
+
}
|
|
324
|
+
if tools:
|
|
325
|
+
params["tools"] = tools
|
|
326
|
+
|
|
327
|
+
# tool_choice mapping
|
|
328
|
+
tc = request.tool_choice
|
|
329
|
+
if tc is not None:
|
|
330
|
+
tc_type = getattr(tc, "type", None)
|
|
331
|
+
if tc_type == "none":
|
|
332
|
+
params["tool_choice"] = "none"
|
|
333
|
+
elif tc_type == "auto":
|
|
334
|
+
params["tool_choice"] = "auto"
|
|
335
|
+
elif tc_type == "any":
|
|
336
|
+
params["tool_choice"] = "required"
|
|
337
|
+
elif tc_type == "tool":
|
|
338
|
+
name = getattr(tc, "name", None)
|
|
339
|
+
if name:
|
|
340
|
+
params["tool_choice"] = {
|
|
341
|
+
"type": "function",
|
|
342
|
+
"function": {"name": name},
|
|
343
|
+
}
|
|
344
|
+
# parallel_tool_calls from disable_parallel_tool_use
|
|
345
|
+
disable_parallel = getattr(tc, "disable_parallel_tool_use", None)
|
|
346
|
+
if isinstance(disable_parallel, bool):
|
|
347
|
+
params["parallel_tool_calls"] = not disable_parallel
|
|
348
|
+
|
|
349
|
+
# Validate against OpenAI model
|
|
350
|
+
return openai_models.ChatCompletionRequest.model_validate(params)
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
__all__ = [
|
|
354
|
+
"convert__anthropic_message_to_openai_chat__request",
|
|
355
|
+
"convert__anthropic_message_to_openai_responses__request",
|
|
356
|
+
]
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""Response conversion entry points for Anthropic→OpenAI adapters."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import time
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import ccproxy.core.logging
|
|
9
|
+
from ccproxy.llms.formatters.common import (
|
|
10
|
+
convert_anthropic_usage_to_openai_completion_usage,
|
|
11
|
+
convert_anthropic_usage_to_openai_responses_usage,
|
|
12
|
+
)
|
|
13
|
+
from ccproxy.llms.formatters.constants import ANTHROPIC_TO_OPENAI_FINISH_REASON
|
|
14
|
+
from ccproxy.llms.models import anthropic as anthropic_models
|
|
15
|
+
from ccproxy.llms.models import openai as openai_models
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
logger = ccproxy.core.logging.get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def convert__anthropic_usage_to_openai_completion__usage(
|
|
22
|
+
usage: anthropic_models.Usage,
|
|
23
|
+
) -> openai_models.CompletionUsage:
|
|
24
|
+
return convert_anthropic_usage_to_openai_completion_usage(usage)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def convert__anthropic_usage_to_openai_responses__usage(
|
|
28
|
+
usage: anthropic_models.Usage,
|
|
29
|
+
) -> openai_models.ResponseUsage:
|
|
30
|
+
return convert_anthropic_usage_to_openai_responses_usage(usage)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def convert__anthropic_message_to_openai_responses__response(
|
|
34
|
+
response: anthropic_models.MessageResponse,
|
|
35
|
+
) -> openai_models.ResponseObject:
|
|
36
|
+
"""Convert Anthropic MessageResponse to an OpenAI ResponseObject."""
|
|
37
|
+
text_parts: list[str] = []
|
|
38
|
+
tool_contents: list[dict[str, Any]] = []
|
|
39
|
+
for block in response.content:
|
|
40
|
+
block_type = getattr(block, "type", None)
|
|
41
|
+
if block_type == "text":
|
|
42
|
+
text_parts.append(getattr(block, "text", ""))
|
|
43
|
+
elif block_type == "thinking":
|
|
44
|
+
thinking = getattr(block, "thinking", None) or ""
|
|
45
|
+
signature = getattr(block, "signature", None)
|
|
46
|
+
sig_attr = (
|
|
47
|
+
f' signature="{signature}"'
|
|
48
|
+
if isinstance(signature, str) and signature
|
|
49
|
+
else ""
|
|
50
|
+
)
|
|
51
|
+
text_parts.append(f"<thinking{sig_attr}>{thinking}</thinking>")
|
|
52
|
+
elif block_type == "tool_use":
|
|
53
|
+
tool_contents.append(
|
|
54
|
+
{
|
|
55
|
+
"type": "tool_use",
|
|
56
|
+
"id": getattr(block, "id", "tool_1"),
|
|
57
|
+
"name": getattr(block, "name", "function"),
|
|
58
|
+
"arguments": getattr(block, "input", {}) or {},
|
|
59
|
+
}
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
message_content: list[dict[str, Any]] = []
|
|
63
|
+
if text_parts:
|
|
64
|
+
message_content.append(
|
|
65
|
+
openai_models.OutputTextContent(
|
|
66
|
+
type="output_text",
|
|
67
|
+
text="".join(text_parts),
|
|
68
|
+
).model_dump()
|
|
69
|
+
)
|
|
70
|
+
message_content.extend(tool_contents)
|
|
71
|
+
|
|
72
|
+
usage_model = None
|
|
73
|
+
if response.usage is not None:
|
|
74
|
+
usage_model = convert__anthropic_usage_to_openai_responses__usage(
|
|
75
|
+
response.usage
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return openai_models.ResponseObject(
|
|
79
|
+
id=response.id,
|
|
80
|
+
object="response",
|
|
81
|
+
created_at=0,
|
|
82
|
+
status="completed",
|
|
83
|
+
model=response.model,
|
|
84
|
+
output=[
|
|
85
|
+
openai_models.MessageOutput(
|
|
86
|
+
type="message",
|
|
87
|
+
id=f"{response.id}_msg_0",
|
|
88
|
+
status="completed",
|
|
89
|
+
role="assistant",
|
|
90
|
+
content=message_content, # type: ignore[arg-type]
|
|
91
|
+
)
|
|
92
|
+
],
|
|
93
|
+
parallel_tool_calls=False,
|
|
94
|
+
usage=usage_model,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def convert__anthropic_message_to_openai_chat__response(
|
|
99
|
+
response: anthropic_models.MessageResponse,
|
|
100
|
+
) -> openai_models.ChatCompletionResponse:
|
|
101
|
+
"""Convert Anthropic MessageResponse to an OpenAI ChatCompletionResponse."""
|
|
102
|
+
content_blocks = response.content
|
|
103
|
+
parts: list[str] = []
|
|
104
|
+
for block in content_blocks:
|
|
105
|
+
btype = getattr(block, "type", None)
|
|
106
|
+
if btype == "text":
|
|
107
|
+
text = getattr(block, "text", None)
|
|
108
|
+
if isinstance(text, str):
|
|
109
|
+
parts.append(text)
|
|
110
|
+
elif btype == "thinking":
|
|
111
|
+
thinking = getattr(block, "thinking", None)
|
|
112
|
+
signature = getattr(block, "signature", None)
|
|
113
|
+
if isinstance(thinking, str):
|
|
114
|
+
sig_attr = (
|
|
115
|
+
f' signature="{signature}"'
|
|
116
|
+
if isinstance(signature, str) and signature
|
|
117
|
+
else ""
|
|
118
|
+
)
|
|
119
|
+
parts.append(f"<thinking{sig_attr}>{thinking}</thinking>")
|
|
120
|
+
|
|
121
|
+
content_text = "".join(parts)
|
|
122
|
+
|
|
123
|
+
stop_reason = response.stop_reason
|
|
124
|
+
finish_reason = ANTHROPIC_TO_OPENAI_FINISH_REASON.get(
|
|
125
|
+
stop_reason or "end_turn", "stop"
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
usage_model = convert__anthropic_usage_to_openai_completion__usage(response.usage)
|
|
129
|
+
|
|
130
|
+
payload = {
|
|
131
|
+
"id": response.id,
|
|
132
|
+
"choices": [
|
|
133
|
+
{
|
|
134
|
+
"index": 0,
|
|
135
|
+
"message": {"role": "assistant", "content": content_text},
|
|
136
|
+
"finish_reason": finish_reason,
|
|
137
|
+
}
|
|
138
|
+
],
|
|
139
|
+
"created": int(time.time()),
|
|
140
|
+
"model": response.model,
|
|
141
|
+
"object": "chat.completion",
|
|
142
|
+
"usage": usage_model.model_dump(),
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
return openai_models.ChatCompletionResponse.model_validate(payload)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
__all__ = [
|
|
149
|
+
"convert__anthropic_message_to_openai_chat__response",
|
|
150
|
+
"convert__anthropic_message_to_openai_responses__response",
|
|
151
|
+
"convert__anthropic_usage_to_openai_completion__usage",
|
|
152
|
+
"convert__anthropic_usage_to_openai_responses__usage",
|
|
153
|
+
]
|