ccproxy-api 0.1.6__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (481) hide show
  1. ccproxy/api/__init__.py +1 -15
  2. ccproxy/api/app.py +439 -212
  3. ccproxy/api/bootstrap.py +30 -0
  4. ccproxy/api/decorators.py +85 -0
  5. ccproxy/api/dependencies.py +145 -176
  6. ccproxy/api/format_validation.py +54 -0
  7. ccproxy/api/middleware/cors.py +6 -3
  8. ccproxy/api/middleware/errors.py +402 -530
  9. ccproxy/api/middleware/hooks.py +563 -0
  10. ccproxy/api/middleware/normalize_headers.py +59 -0
  11. ccproxy/api/middleware/request_id.py +35 -16
  12. ccproxy/api/middleware/streaming_hooks.py +292 -0
  13. ccproxy/api/routes/__init__.py +5 -14
  14. ccproxy/api/routes/health.py +39 -672
  15. ccproxy/api/routes/plugins.py +277 -0
  16. ccproxy/auth/__init__.py +2 -19
  17. ccproxy/auth/bearer.py +25 -15
  18. ccproxy/auth/dependencies.py +123 -157
  19. ccproxy/auth/exceptions.py +0 -12
  20. ccproxy/auth/manager.py +35 -49
  21. ccproxy/auth/managers/__init__.py +10 -0
  22. ccproxy/auth/managers/base.py +523 -0
  23. ccproxy/auth/managers/base_enhanced.py +63 -0
  24. ccproxy/auth/managers/token_snapshot.py +77 -0
  25. ccproxy/auth/models/base.py +65 -0
  26. ccproxy/auth/models/credentials.py +40 -0
  27. ccproxy/auth/oauth/__init__.py +4 -18
  28. ccproxy/auth/oauth/base.py +533 -0
  29. ccproxy/auth/oauth/cli_errors.py +37 -0
  30. ccproxy/auth/oauth/flows.py +430 -0
  31. ccproxy/auth/oauth/protocol.py +366 -0
  32. ccproxy/auth/oauth/registry.py +408 -0
  33. ccproxy/auth/oauth/router.py +396 -0
  34. ccproxy/auth/oauth/routes.py +186 -113
  35. ccproxy/auth/oauth/session.py +151 -0
  36. ccproxy/auth/oauth/templates.py +342 -0
  37. ccproxy/auth/storage/__init__.py +2 -5
  38. ccproxy/auth/storage/base.py +279 -5
  39. ccproxy/auth/storage/generic.py +134 -0
  40. ccproxy/cli/__init__.py +1 -2
  41. ccproxy/cli/_settings_help.py +351 -0
  42. ccproxy/cli/commands/auth.py +1519 -793
  43. ccproxy/cli/commands/config/commands.py +209 -276
  44. ccproxy/cli/commands/plugins.py +669 -0
  45. ccproxy/cli/commands/serve.py +75 -810
  46. ccproxy/cli/commands/status.py +254 -0
  47. ccproxy/cli/decorators.py +83 -0
  48. ccproxy/cli/helpers.py +22 -60
  49. ccproxy/cli/main.py +359 -10
  50. ccproxy/cli/options/claude_options.py +0 -25
  51. ccproxy/config/__init__.py +7 -11
  52. ccproxy/config/core.py +227 -0
  53. ccproxy/config/env_generator.py +232 -0
  54. ccproxy/config/runtime.py +67 -0
  55. ccproxy/config/security.py +36 -3
  56. ccproxy/config/settings.py +382 -441
  57. ccproxy/config/toml_generator.py +299 -0
  58. ccproxy/config/utils.py +452 -0
  59. ccproxy/core/__init__.py +7 -271
  60. ccproxy/{_version.py → core/_version.py} +16 -3
  61. ccproxy/core/async_task_manager.py +516 -0
  62. ccproxy/core/async_utils.py +47 -14
  63. ccproxy/core/auth/__init__.py +6 -0
  64. ccproxy/core/constants.py +16 -50
  65. ccproxy/core/errors.py +53 -0
  66. ccproxy/core/id_utils.py +20 -0
  67. ccproxy/core/interfaces.py +16 -123
  68. ccproxy/core/logging.py +473 -18
  69. ccproxy/core/plugins/__init__.py +77 -0
  70. ccproxy/core/plugins/cli_discovery.py +211 -0
  71. ccproxy/core/plugins/declaration.py +455 -0
  72. ccproxy/core/plugins/discovery.py +604 -0
  73. ccproxy/core/plugins/factories.py +967 -0
  74. ccproxy/core/plugins/hooks/__init__.py +30 -0
  75. ccproxy/core/plugins/hooks/base.py +58 -0
  76. ccproxy/core/plugins/hooks/events.py +46 -0
  77. ccproxy/core/plugins/hooks/implementations/__init__.py +16 -0
  78. ccproxy/core/plugins/hooks/implementations/formatters/__init__.py +11 -0
  79. ccproxy/core/plugins/hooks/implementations/formatters/json.py +552 -0
  80. ccproxy/core/plugins/hooks/implementations/formatters/raw.py +370 -0
  81. ccproxy/core/plugins/hooks/implementations/http_tracer.py +431 -0
  82. ccproxy/core/plugins/hooks/layers.py +44 -0
  83. ccproxy/core/plugins/hooks/manager.py +186 -0
  84. ccproxy/core/plugins/hooks/registry.py +139 -0
  85. ccproxy/core/plugins/hooks/thread_manager.py +203 -0
  86. ccproxy/core/plugins/hooks/types.py +22 -0
  87. ccproxy/core/plugins/interfaces.py +416 -0
  88. ccproxy/core/plugins/loader.py +166 -0
  89. ccproxy/core/plugins/middleware.py +233 -0
  90. ccproxy/core/plugins/models.py +59 -0
  91. ccproxy/core/plugins/protocol.py +180 -0
  92. ccproxy/core/plugins/runtime.py +519 -0
  93. ccproxy/{observability/context.py → core/request_context.py} +137 -94
  94. ccproxy/core/status_report.py +211 -0
  95. ccproxy/core/transformers.py +13 -8
  96. ccproxy/data/claude_headers_fallback.json +558 -0
  97. ccproxy/data/codex_headers_fallback.json +121 -0
  98. ccproxy/http/__init__.py +30 -0
  99. ccproxy/http/base.py +95 -0
  100. ccproxy/http/client.py +323 -0
  101. ccproxy/http/hooks.py +642 -0
  102. ccproxy/http/pool.py +279 -0
  103. ccproxy/llms/formatters/__init__.py +7 -0
  104. ccproxy/llms/formatters/anthropic_to_openai/__init__.py +55 -0
  105. ccproxy/llms/formatters/anthropic_to_openai/errors.py +65 -0
  106. ccproxy/llms/formatters/anthropic_to_openai/requests.py +356 -0
  107. ccproxy/llms/formatters/anthropic_to_openai/responses.py +153 -0
  108. ccproxy/llms/formatters/anthropic_to_openai/streams.py +1546 -0
  109. ccproxy/llms/formatters/base.py +140 -0
  110. ccproxy/llms/formatters/base_model.py +33 -0
  111. ccproxy/llms/formatters/common/__init__.py +51 -0
  112. ccproxy/llms/formatters/common/identifiers.py +48 -0
  113. ccproxy/llms/formatters/common/streams.py +254 -0
  114. ccproxy/llms/formatters/common/thinking.py +74 -0
  115. ccproxy/llms/formatters/common/usage.py +135 -0
  116. ccproxy/llms/formatters/constants.py +55 -0
  117. ccproxy/llms/formatters/context.py +116 -0
  118. ccproxy/llms/formatters/mapping.py +33 -0
  119. ccproxy/llms/formatters/openai_to_anthropic/__init__.py +55 -0
  120. ccproxy/llms/formatters/openai_to_anthropic/_helpers.py +141 -0
  121. ccproxy/llms/formatters/openai_to_anthropic/errors.py +53 -0
  122. ccproxy/llms/formatters/openai_to_anthropic/requests.py +674 -0
  123. ccproxy/llms/formatters/openai_to_anthropic/responses.py +285 -0
  124. ccproxy/llms/formatters/openai_to_anthropic/streams.py +530 -0
  125. ccproxy/llms/formatters/openai_to_openai/__init__.py +53 -0
  126. ccproxy/llms/formatters/openai_to_openai/_helpers.py +325 -0
  127. ccproxy/llms/formatters/openai_to_openai/errors.py +6 -0
  128. ccproxy/llms/formatters/openai_to_openai/requests.py +388 -0
  129. ccproxy/llms/formatters/openai_to_openai/responses.py +594 -0
  130. ccproxy/llms/formatters/openai_to_openai/streams.py +1832 -0
  131. ccproxy/llms/formatters/utils.py +306 -0
  132. ccproxy/llms/models/__init__.py +9 -0
  133. ccproxy/llms/models/anthropic.py +619 -0
  134. ccproxy/llms/models/openai.py +844 -0
  135. ccproxy/llms/streaming/__init__.py +26 -0
  136. ccproxy/llms/streaming/accumulators.py +1074 -0
  137. ccproxy/llms/streaming/formatters.py +251 -0
  138. ccproxy/{adapters/openai/streaming.py → llms/streaming/processors.py} +193 -240
  139. ccproxy/models/__init__.py +8 -159
  140. ccproxy/models/detection.py +92 -193
  141. ccproxy/models/provider.py +75 -0
  142. ccproxy/plugins/access_log/README.md +32 -0
  143. ccproxy/plugins/access_log/__init__.py +20 -0
  144. ccproxy/plugins/access_log/config.py +33 -0
  145. ccproxy/plugins/access_log/formatter.py +126 -0
  146. ccproxy/plugins/access_log/hook.py +763 -0
  147. ccproxy/plugins/access_log/logger.py +254 -0
  148. ccproxy/plugins/access_log/plugin.py +137 -0
  149. ccproxy/plugins/access_log/writer.py +109 -0
  150. ccproxy/plugins/analytics/README.md +24 -0
  151. ccproxy/plugins/analytics/__init__.py +1 -0
  152. ccproxy/plugins/analytics/config.py +5 -0
  153. ccproxy/plugins/analytics/ingest.py +85 -0
  154. ccproxy/plugins/analytics/models.py +97 -0
  155. ccproxy/plugins/analytics/plugin.py +121 -0
  156. ccproxy/plugins/analytics/routes.py +163 -0
  157. ccproxy/plugins/analytics/service.py +284 -0
  158. ccproxy/plugins/claude_api/README.md +29 -0
  159. ccproxy/plugins/claude_api/__init__.py +10 -0
  160. ccproxy/plugins/claude_api/adapter.py +829 -0
  161. ccproxy/plugins/claude_api/config.py +52 -0
  162. ccproxy/plugins/claude_api/detection_service.py +461 -0
  163. ccproxy/plugins/claude_api/health.py +175 -0
  164. ccproxy/plugins/claude_api/hooks.py +284 -0
  165. ccproxy/plugins/claude_api/models.py +256 -0
  166. ccproxy/plugins/claude_api/plugin.py +298 -0
  167. ccproxy/plugins/claude_api/routes.py +118 -0
  168. ccproxy/plugins/claude_api/streaming_metrics.py +68 -0
  169. ccproxy/plugins/claude_api/tasks.py +84 -0
  170. ccproxy/plugins/claude_sdk/README.md +35 -0
  171. ccproxy/plugins/claude_sdk/__init__.py +80 -0
  172. ccproxy/plugins/claude_sdk/adapter.py +749 -0
  173. ccproxy/plugins/claude_sdk/auth.py +57 -0
  174. ccproxy/{claude_sdk → plugins/claude_sdk}/client.py +63 -39
  175. ccproxy/plugins/claude_sdk/config.py +210 -0
  176. ccproxy/{claude_sdk → plugins/claude_sdk}/converter.py +6 -6
  177. ccproxy/plugins/claude_sdk/detection_service.py +163 -0
  178. ccproxy/{services/claude_sdk_service.py → plugins/claude_sdk/handler.py} +123 -304
  179. ccproxy/plugins/claude_sdk/health.py +113 -0
  180. ccproxy/plugins/claude_sdk/hooks.py +115 -0
  181. ccproxy/{claude_sdk → plugins/claude_sdk}/manager.py +42 -32
  182. ccproxy/{claude_sdk → plugins/claude_sdk}/message_queue.py +8 -8
  183. ccproxy/{models/claude_sdk.py → plugins/claude_sdk/models.py} +64 -16
  184. ccproxy/plugins/claude_sdk/options.py +154 -0
  185. ccproxy/{claude_sdk → plugins/claude_sdk}/parser.py +23 -5
  186. ccproxy/plugins/claude_sdk/plugin.py +269 -0
  187. ccproxy/plugins/claude_sdk/routes.py +104 -0
  188. ccproxy/{claude_sdk → plugins/claude_sdk}/session_client.py +124 -12
  189. ccproxy/plugins/claude_sdk/session_pool.py +700 -0
  190. ccproxy/{claude_sdk → plugins/claude_sdk}/stream_handle.py +48 -43
  191. ccproxy/{claude_sdk → plugins/claude_sdk}/stream_worker.py +22 -18
  192. ccproxy/{claude_sdk → plugins/claude_sdk}/streaming.py +50 -16
  193. ccproxy/plugins/claude_sdk/tasks.py +97 -0
  194. ccproxy/plugins/claude_shared/README.md +18 -0
  195. ccproxy/plugins/claude_shared/__init__.py +12 -0
  196. ccproxy/plugins/claude_shared/model_defaults.py +171 -0
  197. ccproxy/plugins/codex/README.md +35 -0
  198. ccproxy/plugins/codex/__init__.py +6 -0
  199. ccproxy/plugins/codex/adapter.py +635 -0
  200. ccproxy/{config/codex.py → plugins/codex/config.py} +78 -12
  201. ccproxy/plugins/codex/detection_service.py +544 -0
  202. ccproxy/plugins/codex/health.py +162 -0
  203. ccproxy/plugins/codex/hooks.py +263 -0
  204. ccproxy/plugins/codex/model_defaults.py +39 -0
  205. ccproxy/plugins/codex/models.py +263 -0
  206. ccproxy/plugins/codex/plugin.py +275 -0
  207. ccproxy/plugins/codex/routes.py +129 -0
  208. ccproxy/plugins/codex/streaming_metrics.py +324 -0
  209. ccproxy/plugins/codex/tasks.py +106 -0
  210. ccproxy/plugins/codex/utils/__init__.py +1 -0
  211. ccproxy/plugins/codex/utils/sse_parser.py +106 -0
  212. ccproxy/plugins/command_replay/README.md +34 -0
  213. ccproxy/plugins/command_replay/__init__.py +17 -0
  214. ccproxy/plugins/command_replay/config.py +133 -0
  215. ccproxy/plugins/command_replay/formatter.py +432 -0
  216. ccproxy/plugins/command_replay/hook.py +294 -0
  217. ccproxy/plugins/command_replay/plugin.py +161 -0
  218. ccproxy/plugins/copilot/README.md +39 -0
  219. ccproxy/plugins/copilot/__init__.py +11 -0
  220. ccproxy/plugins/copilot/adapter.py +465 -0
  221. ccproxy/plugins/copilot/config.py +155 -0
  222. ccproxy/plugins/copilot/data/copilot_fallback.json +41 -0
  223. ccproxy/plugins/copilot/detection_service.py +255 -0
  224. ccproxy/plugins/copilot/manager.py +275 -0
  225. ccproxy/plugins/copilot/model_defaults.py +284 -0
  226. ccproxy/plugins/copilot/models.py +148 -0
  227. ccproxy/plugins/copilot/oauth/__init__.py +16 -0
  228. ccproxy/plugins/copilot/oauth/client.py +494 -0
  229. ccproxy/plugins/copilot/oauth/models.py +385 -0
  230. ccproxy/plugins/copilot/oauth/provider.py +602 -0
  231. ccproxy/plugins/copilot/oauth/storage.py +170 -0
  232. ccproxy/plugins/copilot/plugin.py +360 -0
  233. ccproxy/plugins/copilot/routes.py +294 -0
  234. ccproxy/plugins/credential_balancer/README.md +124 -0
  235. ccproxy/plugins/credential_balancer/__init__.py +6 -0
  236. ccproxy/plugins/credential_balancer/config.py +270 -0
  237. ccproxy/plugins/credential_balancer/factory.py +415 -0
  238. ccproxy/plugins/credential_balancer/hook.py +51 -0
  239. ccproxy/plugins/credential_balancer/manager.py +587 -0
  240. ccproxy/plugins/credential_balancer/plugin.py +146 -0
  241. ccproxy/plugins/dashboard/README.md +25 -0
  242. ccproxy/plugins/dashboard/__init__.py +1 -0
  243. ccproxy/plugins/dashboard/config.py +8 -0
  244. ccproxy/plugins/dashboard/plugin.py +71 -0
  245. ccproxy/plugins/dashboard/routes.py +67 -0
  246. ccproxy/plugins/docker/README.md +32 -0
  247. ccproxy/{docker → plugins/docker}/__init__.py +3 -0
  248. ccproxy/{docker → plugins/docker}/adapter.py +108 -10
  249. ccproxy/plugins/docker/config.py +82 -0
  250. ccproxy/{docker → plugins/docker}/docker_path.py +4 -3
  251. ccproxy/{docker → plugins/docker}/middleware.py +2 -2
  252. ccproxy/plugins/docker/plugin.py +198 -0
  253. ccproxy/{docker → plugins/docker}/stream_process.py +3 -3
  254. ccproxy/plugins/duckdb_storage/README.md +26 -0
  255. ccproxy/plugins/duckdb_storage/__init__.py +1 -0
  256. ccproxy/plugins/duckdb_storage/config.py +22 -0
  257. ccproxy/plugins/duckdb_storage/plugin.py +128 -0
  258. ccproxy/plugins/duckdb_storage/routes.py +51 -0
  259. ccproxy/plugins/duckdb_storage/storage.py +633 -0
  260. ccproxy/plugins/max_tokens/README.md +38 -0
  261. ccproxy/plugins/max_tokens/__init__.py +12 -0
  262. ccproxy/plugins/max_tokens/adapter.py +235 -0
  263. ccproxy/plugins/max_tokens/config.py +86 -0
  264. ccproxy/plugins/max_tokens/models.py +53 -0
  265. ccproxy/plugins/max_tokens/plugin.py +200 -0
  266. ccproxy/plugins/max_tokens/service.py +271 -0
  267. ccproxy/plugins/max_tokens/token_limits.json +54 -0
  268. ccproxy/plugins/metrics/README.md +35 -0
  269. ccproxy/plugins/metrics/__init__.py +10 -0
  270. ccproxy/{observability/metrics.py → plugins/metrics/collector.py} +20 -153
  271. ccproxy/plugins/metrics/config.py +85 -0
  272. ccproxy/plugins/metrics/grafana/dashboards/ccproxy-dashboard.json +1720 -0
  273. ccproxy/plugins/metrics/hook.py +403 -0
  274. ccproxy/plugins/metrics/plugin.py +268 -0
  275. ccproxy/{observability → plugins/metrics}/pushgateway.py +57 -59
  276. ccproxy/plugins/metrics/routes.py +107 -0
  277. ccproxy/plugins/metrics/tasks.py +117 -0
  278. ccproxy/plugins/oauth_claude/README.md +35 -0
  279. ccproxy/plugins/oauth_claude/__init__.py +14 -0
  280. ccproxy/plugins/oauth_claude/client.py +270 -0
  281. ccproxy/plugins/oauth_claude/config.py +84 -0
  282. ccproxy/plugins/oauth_claude/manager.py +482 -0
  283. ccproxy/plugins/oauth_claude/models.py +266 -0
  284. ccproxy/plugins/oauth_claude/plugin.py +149 -0
  285. ccproxy/plugins/oauth_claude/provider.py +571 -0
  286. ccproxy/plugins/oauth_claude/storage.py +212 -0
  287. ccproxy/plugins/oauth_codex/README.md +38 -0
  288. ccproxy/plugins/oauth_codex/__init__.py +14 -0
  289. ccproxy/plugins/oauth_codex/client.py +224 -0
  290. ccproxy/plugins/oauth_codex/config.py +95 -0
  291. ccproxy/plugins/oauth_codex/manager.py +256 -0
  292. ccproxy/plugins/oauth_codex/models.py +239 -0
  293. ccproxy/plugins/oauth_codex/plugin.py +146 -0
  294. ccproxy/plugins/oauth_codex/provider.py +574 -0
  295. ccproxy/plugins/oauth_codex/storage.py +92 -0
  296. ccproxy/plugins/permissions/README.md +28 -0
  297. ccproxy/plugins/permissions/__init__.py +22 -0
  298. ccproxy/plugins/permissions/config.py +28 -0
  299. ccproxy/{cli/commands/permission_handler.py → plugins/permissions/handlers/cli.py} +49 -25
  300. ccproxy/plugins/permissions/handlers/protocol.py +33 -0
  301. ccproxy/plugins/permissions/handlers/terminal.py +675 -0
  302. ccproxy/{api/routes → plugins/permissions}/mcp.py +34 -7
  303. ccproxy/{models/permissions.py → plugins/permissions/models.py} +65 -1
  304. ccproxy/plugins/permissions/plugin.py +153 -0
  305. ccproxy/{api/routes/permissions.py → plugins/permissions/routes.py} +20 -16
  306. ccproxy/{api/services/permission_service.py → plugins/permissions/service.py} +65 -11
  307. ccproxy/{api → plugins/permissions}/ui/permission_handler_protocol.py +1 -1
  308. ccproxy/{api → plugins/permissions}/ui/terminal_permission_handler.py +66 -10
  309. ccproxy/plugins/pricing/README.md +34 -0
  310. ccproxy/plugins/pricing/__init__.py +6 -0
  311. ccproxy/{pricing → plugins/pricing}/cache.py +7 -6
  312. ccproxy/{config/pricing.py → plugins/pricing/config.py} +32 -6
  313. ccproxy/plugins/pricing/exceptions.py +35 -0
  314. ccproxy/plugins/pricing/loader.py +440 -0
  315. ccproxy/{pricing → plugins/pricing}/models.py +13 -23
  316. ccproxy/plugins/pricing/plugin.py +169 -0
  317. ccproxy/plugins/pricing/service.py +191 -0
  318. ccproxy/plugins/pricing/tasks.py +300 -0
  319. ccproxy/{pricing → plugins/pricing}/updater.py +86 -72
  320. ccproxy/plugins/pricing/utils.py +99 -0
  321. ccproxy/plugins/request_tracer/README.md +40 -0
  322. ccproxy/plugins/request_tracer/__init__.py +7 -0
  323. ccproxy/plugins/request_tracer/config.py +120 -0
  324. ccproxy/plugins/request_tracer/hook.py +415 -0
  325. ccproxy/plugins/request_tracer/plugin.py +255 -0
  326. ccproxy/scheduler/__init__.py +2 -14
  327. ccproxy/scheduler/core.py +26 -41
  328. ccproxy/scheduler/manager.py +63 -107
  329. ccproxy/scheduler/registry.py +6 -32
  330. ccproxy/scheduler/tasks.py +346 -314
  331. ccproxy/services/__init__.py +0 -1
  332. ccproxy/services/adapters/__init__.py +11 -0
  333. ccproxy/services/adapters/base.py +123 -0
  334. ccproxy/services/adapters/chain_composer.py +88 -0
  335. ccproxy/services/adapters/chain_validation.py +44 -0
  336. ccproxy/services/adapters/chat_accumulator.py +200 -0
  337. ccproxy/services/adapters/delta_utils.py +142 -0
  338. ccproxy/services/adapters/format_adapter.py +136 -0
  339. ccproxy/services/adapters/format_context.py +11 -0
  340. ccproxy/services/adapters/format_registry.py +158 -0
  341. ccproxy/services/adapters/http_adapter.py +1045 -0
  342. ccproxy/services/adapters/mock_adapter.py +118 -0
  343. ccproxy/services/adapters/protocols.py +35 -0
  344. ccproxy/services/adapters/simple_converters.py +571 -0
  345. ccproxy/services/auth_registry.py +180 -0
  346. ccproxy/services/cache/__init__.py +6 -0
  347. ccproxy/services/cache/response_cache.py +261 -0
  348. ccproxy/services/cli_detection.py +437 -0
  349. ccproxy/services/config/__init__.py +6 -0
  350. ccproxy/services/config/proxy_configuration.py +111 -0
  351. ccproxy/services/container.py +256 -0
  352. ccproxy/services/factories.py +380 -0
  353. ccproxy/services/handler_config.py +76 -0
  354. ccproxy/services/interfaces.py +298 -0
  355. ccproxy/services/mocking/__init__.py +6 -0
  356. ccproxy/services/mocking/mock_handler.py +291 -0
  357. ccproxy/services/tracing/__init__.py +7 -0
  358. ccproxy/services/tracing/interfaces.py +61 -0
  359. ccproxy/services/tracing/null_tracer.py +57 -0
  360. ccproxy/streaming/__init__.py +23 -0
  361. ccproxy/streaming/buffer.py +1056 -0
  362. ccproxy/streaming/deferred.py +897 -0
  363. ccproxy/streaming/handler.py +117 -0
  364. ccproxy/streaming/interfaces.py +77 -0
  365. ccproxy/streaming/simple_adapter.py +39 -0
  366. ccproxy/streaming/sse.py +109 -0
  367. ccproxy/streaming/sse_parser.py +127 -0
  368. ccproxy/templates/__init__.py +6 -0
  369. ccproxy/templates/plugin_scaffold.py +695 -0
  370. ccproxy/testing/endpoints/__init__.py +33 -0
  371. ccproxy/testing/endpoints/cli.py +215 -0
  372. ccproxy/testing/endpoints/config.py +874 -0
  373. ccproxy/testing/endpoints/console.py +57 -0
  374. ccproxy/testing/endpoints/models.py +100 -0
  375. ccproxy/testing/endpoints/runner.py +1903 -0
  376. ccproxy/testing/endpoints/tools.py +308 -0
  377. ccproxy/testing/mock_responses.py +70 -1
  378. ccproxy/testing/response_handlers.py +20 -0
  379. ccproxy/utils/__init__.py +0 -6
  380. ccproxy/utils/binary_resolver.py +476 -0
  381. ccproxy/utils/caching.py +327 -0
  382. ccproxy/utils/cli_logging.py +101 -0
  383. ccproxy/utils/command_line.py +251 -0
  384. ccproxy/utils/headers.py +228 -0
  385. ccproxy/utils/model_mapper.py +120 -0
  386. ccproxy/utils/startup_helpers.py +95 -342
  387. ccproxy/utils/version_checker.py +279 -6
  388. ccproxy_api-0.2.0.dist-info/METADATA +212 -0
  389. ccproxy_api-0.2.0.dist-info/RECORD +417 -0
  390. {ccproxy_api-0.1.6.dist-info → ccproxy_api-0.2.0.dist-info}/WHEEL +1 -1
  391. ccproxy_api-0.2.0.dist-info/entry_points.txt +24 -0
  392. ccproxy/__init__.py +0 -4
  393. ccproxy/adapters/__init__.py +0 -11
  394. ccproxy/adapters/base.py +0 -80
  395. ccproxy/adapters/codex/__init__.py +0 -11
  396. ccproxy/adapters/openai/__init__.py +0 -42
  397. ccproxy/adapters/openai/adapter.py +0 -953
  398. ccproxy/adapters/openai/models.py +0 -412
  399. ccproxy/adapters/openai/response_adapter.py +0 -355
  400. ccproxy/adapters/openai/response_models.py +0 -178
  401. ccproxy/api/middleware/headers.py +0 -49
  402. ccproxy/api/middleware/logging.py +0 -180
  403. ccproxy/api/middleware/request_content_logging.py +0 -297
  404. ccproxy/api/middleware/server_header.py +0 -58
  405. ccproxy/api/responses.py +0 -89
  406. ccproxy/api/routes/claude.py +0 -371
  407. ccproxy/api/routes/codex.py +0 -1231
  408. ccproxy/api/routes/metrics.py +0 -1029
  409. ccproxy/api/routes/proxy.py +0 -211
  410. ccproxy/api/services/__init__.py +0 -6
  411. ccproxy/auth/conditional.py +0 -84
  412. ccproxy/auth/credentials_adapter.py +0 -93
  413. ccproxy/auth/models.py +0 -118
  414. ccproxy/auth/oauth/models.py +0 -48
  415. ccproxy/auth/openai/__init__.py +0 -13
  416. ccproxy/auth/openai/credentials.py +0 -166
  417. ccproxy/auth/openai/oauth_client.py +0 -334
  418. ccproxy/auth/openai/storage.py +0 -184
  419. ccproxy/auth/storage/json_file.py +0 -158
  420. ccproxy/auth/storage/keyring.py +0 -189
  421. ccproxy/claude_sdk/__init__.py +0 -18
  422. ccproxy/claude_sdk/options.py +0 -194
  423. ccproxy/claude_sdk/session_pool.py +0 -550
  424. ccproxy/cli/docker/__init__.py +0 -34
  425. ccproxy/cli/docker/adapter_factory.py +0 -157
  426. ccproxy/cli/docker/params.py +0 -274
  427. ccproxy/config/auth.py +0 -153
  428. ccproxy/config/claude.py +0 -348
  429. ccproxy/config/cors.py +0 -79
  430. ccproxy/config/discovery.py +0 -95
  431. ccproxy/config/docker_settings.py +0 -264
  432. ccproxy/config/observability.py +0 -158
  433. ccproxy/config/reverse_proxy.py +0 -31
  434. ccproxy/config/scheduler.py +0 -108
  435. ccproxy/config/server.py +0 -86
  436. ccproxy/config/validators.py +0 -231
  437. ccproxy/core/codex_transformers.py +0 -389
  438. ccproxy/core/http.py +0 -328
  439. ccproxy/core/http_transformers.py +0 -812
  440. ccproxy/core/proxy.py +0 -143
  441. ccproxy/core/validators.py +0 -288
  442. ccproxy/models/errors.py +0 -42
  443. ccproxy/models/messages.py +0 -269
  444. ccproxy/models/requests.py +0 -107
  445. ccproxy/models/responses.py +0 -270
  446. ccproxy/models/types.py +0 -102
  447. ccproxy/observability/__init__.py +0 -51
  448. ccproxy/observability/access_logger.py +0 -457
  449. ccproxy/observability/sse_events.py +0 -303
  450. ccproxy/observability/stats_printer.py +0 -753
  451. ccproxy/observability/storage/__init__.py +0 -1
  452. ccproxy/observability/storage/duckdb_simple.py +0 -677
  453. ccproxy/observability/storage/models.py +0 -70
  454. ccproxy/observability/streaming_response.py +0 -107
  455. ccproxy/pricing/__init__.py +0 -19
  456. ccproxy/pricing/loader.py +0 -251
  457. ccproxy/services/claude_detection_service.py +0 -269
  458. ccproxy/services/codex_detection_service.py +0 -263
  459. ccproxy/services/credentials/__init__.py +0 -55
  460. ccproxy/services/credentials/config.py +0 -105
  461. ccproxy/services/credentials/manager.py +0 -561
  462. ccproxy/services/credentials/oauth_client.py +0 -481
  463. ccproxy/services/proxy_service.py +0 -1827
  464. ccproxy/static/.keep +0 -0
  465. ccproxy/utils/cost_calculator.py +0 -210
  466. ccproxy/utils/disconnection_monitor.py +0 -83
  467. ccproxy/utils/model_mapping.py +0 -199
  468. ccproxy/utils/models_provider.py +0 -150
  469. ccproxy/utils/simple_request_logger.py +0 -284
  470. ccproxy/utils/streaming_metrics.py +0 -199
  471. ccproxy_api-0.1.6.dist-info/METADATA +0 -615
  472. ccproxy_api-0.1.6.dist-info/RECORD +0 -189
  473. ccproxy_api-0.1.6.dist-info/entry_points.txt +0 -4
  474. /ccproxy/{api/middleware/auth.py → auth/models/__init__.py} +0 -0
  475. /ccproxy/{claude_sdk → plugins/claude_sdk}/exceptions.py +0 -0
  476. /ccproxy/{docker → plugins/docker}/models.py +0 -0
  477. /ccproxy/{docker → plugins/docker}/protocol.py +0 -0
  478. /ccproxy/{docker → plugins/docker}/validators.py +0 -0
  479. /ccproxy/{auth/oauth/storage.py → plugins/permissions/handlers/__init__.py} +0 -0
  480. /ccproxy/{api → plugins/permissions}/ui/__init__.py +0 -0
  481. {ccproxy_api-0.1.6.dist-info → ccproxy_api-0.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,355 +0,0 @@
1
- """Adapter for converting between OpenAI Chat Completions and Response API formats.
2
-
3
- This adapter handles bidirectional conversion between:
4
- - OpenAI Chat Completions API (used by most OpenAI clients)
5
- - OpenAI Response API (used by Codex/ChatGPT backend)
6
- """
7
-
8
- from __future__ import annotations
9
-
10
- import json
11
- import time
12
- import uuid
13
- from collections.abc import AsyncIterator
14
- from typing import Any
15
-
16
- import structlog
17
-
18
- from ccproxy.adapters.openai.models import (
19
- OpenAIChatCompletionRequest,
20
- OpenAIChatCompletionResponse,
21
- OpenAIChoice,
22
- OpenAIResponseMessage,
23
- OpenAIUsage,
24
- )
25
- from ccproxy.adapters.openai.response_models import (
26
- ResponseCompleted,
27
- ResponseMessage,
28
- ResponseMessageContent,
29
- ResponseReasoning,
30
- ResponseRequest,
31
- )
32
-
33
-
34
- logger = structlog.get_logger(__name__)
35
-
36
-
37
- class ResponseAdapter:
38
- """Adapter for OpenAI Response API format conversion."""
39
-
40
- def chat_to_response_request(
41
- self, chat_request: dict[str, Any] | OpenAIChatCompletionRequest
42
- ) -> ResponseRequest:
43
- """Convert Chat Completions request to Response API format.
44
-
45
- Args:
46
- chat_request: OpenAI Chat Completions request
47
-
48
- Returns:
49
- Response API formatted request
50
- """
51
- if isinstance(chat_request, OpenAIChatCompletionRequest):
52
- chat_dict = chat_request.model_dump()
53
- else:
54
- chat_dict = chat_request
55
-
56
- # Extract messages and convert to Response API format
57
- messages = chat_dict.get("messages", [])
58
- response_input = []
59
- instructions = None
60
-
61
- for msg in messages:
62
- role = msg.get("role", "user")
63
- content = msg.get("content", "")
64
-
65
- # System messages become instructions
66
- if role == "system":
67
- instructions = content
68
- continue
69
-
70
- # Convert user/assistant messages to Response API format
71
- response_msg = ResponseMessage(
72
- type="message",
73
- id=None,
74
- role=role if role in ["user", "assistant"] else "user",
75
- content=[
76
- ResponseMessageContent(
77
- type="input_text" if role == "user" else "output_text",
78
- text=content if isinstance(content, str) else str(content),
79
- )
80
- ],
81
- )
82
- response_input.append(response_msg)
83
-
84
- # Leave instructions field unset to let codex_transformers inject them
85
- # The backend validates instructions and needs the full Codex ones
86
- instructions = None
87
- # Actually, we need to not include the field at all if it's None
88
- # Otherwise the backend complains "Instructions are required"
89
-
90
- # Map model (Codex uses gpt-5)
91
- model = chat_dict.get("model", "gpt-4")
92
- # For Codex, we typically use gpt-5
93
- response_model = (
94
- "gpt-5" if "codex" in model.lower() or "gpt-5" in model.lower() else model
95
- )
96
-
97
- # Build Response API request
98
- # Note: Response API always requires stream=true and store=false
99
- # Also, Response API doesn't support temperature and other OpenAI-specific parameters
100
- request = ResponseRequest(
101
- model=response_model,
102
- instructions=instructions,
103
- input=response_input,
104
- stream=True, # Always use streaming for Response API
105
- tool_choice="auto",
106
- parallel_tool_calls=chat_dict.get("parallel_tool_calls", False),
107
- reasoning=ResponseReasoning(effort="medium", summary="auto"),
108
- store=False, # Must be false for Response API
109
- # The following parameters are not supported by Response API:
110
- # temperature, max_output_tokens, top_p, frequency_penalty, presence_penalty
111
- )
112
-
113
- return request
114
-
115
- def response_to_chat_completion(
116
- self, response_data: dict[str, Any] | ResponseCompleted
117
- ) -> OpenAIChatCompletionResponse:
118
- """Convert Response API response to Chat Completions format.
119
-
120
- Args:
121
- response_data: Response API response
122
-
123
- Returns:
124
- Chat Completions formatted response
125
- """
126
- # Extract the actual response data
127
- response_dict: dict[str, Any]
128
- if isinstance(response_data, ResponseCompleted):
129
- # Convert Pydantic model to dict
130
- response_dict = response_data.response.model_dump()
131
- else: # isinstance(response_data, dict)
132
- if "response" in response_data:
133
- response_dict = response_data["response"]
134
- else:
135
- response_dict = response_data
136
-
137
- # Extract content from Response API output
138
- content = ""
139
- output = response_dict.get("output", [])
140
- # Look for message type output (skip reasoning)
141
- for output_item in output:
142
- if output_item.get("type") == "message":
143
- output_content = output_item.get("content", [])
144
- for content_block in output_content:
145
- if content_block.get("type") in ["output_text", "text"]:
146
- content += content_block.get("text", "")
147
-
148
- # Build Chat Completions response
149
- usage_data = response_dict.get("usage")
150
- converted_usage = self._convert_usage(usage_data) if usage_data else None
151
-
152
- return OpenAIChatCompletionResponse(
153
- id=response_dict.get("id", f"resp_{uuid.uuid4().hex}"),
154
- object="chat.completion",
155
- created=response_dict.get("created_at", int(time.time())),
156
- model=response_dict.get("model", "gpt-5"),
157
- choices=[
158
- OpenAIChoice(
159
- index=0,
160
- message=OpenAIResponseMessage(
161
- role="assistant", content=content or None
162
- ),
163
- finish_reason="stop",
164
- )
165
- ],
166
- usage=converted_usage,
167
- system_fingerprint=response_dict.get("safety_identifier"),
168
- )
169
-
170
- async def stream_response_to_chat(
171
- self, response_stream: AsyncIterator[bytes]
172
- ) -> AsyncIterator[dict[str, Any]]:
173
- """Convert Response API SSE stream to Chat Completions format.
174
-
175
- Args:
176
- response_stream: Async iterator of SSE bytes from Response API
177
-
178
- Yields:
179
- Chat Completions formatted streaming chunks
180
- """
181
- stream_id = f"chatcmpl_{uuid.uuid4().hex[:29]}"
182
- created = int(time.time())
183
- accumulated_content = ""
184
- buffer = ""
185
-
186
- logger.debug("response_adapter_stream_started", stream_id=stream_id)
187
- raw_chunk_count = 0
188
- event_count = 0
189
-
190
- async for chunk in response_stream:
191
- raw_chunk_count += 1
192
- chunk_size = len(chunk)
193
- logger.debug(
194
- "response_adapter_raw_chunk_received",
195
- chunk_number=raw_chunk_count,
196
- chunk_size=chunk_size,
197
- buffer_size_before=len(buffer),
198
- )
199
-
200
- # Add chunk to buffer
201
- buffer += chunk.decode("utf-8")
202
-
203
- # Process complete SSE events (separated by double newlines)
204
- while "\n\n" in buffer:
205
- event_str, buffer = buffer.split("\n\n", 1)
206
- event_count += 1
207
-
208
- # Parse the SSE event
209
- event_type = None
210
- event_data = None
211
-
212
- for line in event_str.strip().split("\n"):
213
- if not line:
214
- continue
215
-
216
- if line.startswith("event:"):
217
- event_type = line[6:].strip()
218
- elif line.startswith("data:"):
219
- data_str = line[5:].strip()
220
- if data_str == "[DONE]":
221
- logger.debug(
222
- "response_adapter_done_marker_found",
223
- event_number=event_count,
224
- )
225
- continue
226
- try:
227
- event_data = json.loads(data_str)
228
- except json.JSONDecodeError:
229
- logger.debug(
230
- "response_adapter_sse_parse_failed",
231
- data_preview=data_str[:100],
232
- event_number=event_count,
233
- )
234
- continue
235
-
236
- # Process complete events
237
- if event_type and event_data:
238
- logger.debug(
239
- "response_adapter_sse_event_parsed",
240
- event_type=event_type,
241
- event_number=event_count,
242
- has_output="output" in str(event_data),
243
- )
244
- if event_type in [
245
- "response.output.delta",
246
- "response.output_text.delta",
247
- ]:
248
- # Extract delta content
249
- delta_content = ""
250
-
251
- # Handle different event structures
252
- if event_type == "response.output_text.delta":
253
- # Direct text delta event
254
- delta_content = event_data.get("delta", "")
255
- else:
256
- # Standard output delta with nested structure
257
- output = event_data.get("output", [])
258
- if output:
259
- for output_item in output:
260
- if output_item.get("type") == "message":
261
- content_blocks = output_item.get("content", [])
262
- for block in content_blocks:
263
- if block.get("type") in [
264
- "output_text",
265
- "text",
266
- ]:
267
- delta_content += block.get("text", "")
268
-
269
- if delta_content:
270
- accumulated_content += delta_content
271
-
272
- logger.debug(
273
- "response_adapter_yielding_content",
274
- content_length=len(delta_content),
275
- accumulated_length=len(accumulated_content),
276
- )
277
-
278
- # Create Chat Completions streaming chunk
279
- yield {
280
- "id": stream_id,
281
- "object": "chat.completion.chunk",
282
- "created": created,
283
- "model": event_data.get("model", "gpt-5"),
284
- "choices": [
285
- {
286
- "index": 0,
287
- "delta": {"content": delta_content},
288
- "finish_reason": None,
289
- }
290
- ],
291
- }
292
-
293
- elif event_type == "response.completed":
294
- # Final chunk with usage info
295
- response = event_data.get("response", {})
296
- usage = response.get("usage")
297
-
298
- logger.debug(
299
- "response_adapter_stream_completed",
300
- total_content_length=len(accumulated_content),
301
- has_usage=usage is not None,
302
- )
303
-
304
- chunk_data = {
305
- "id": stream_id,
306
- "object": "chat.completion.chunk",
307
- "created": created,
308
- "model": response.get("model", "gpt-5"),
309
- "choices": [
310
- {"index": 0, "delta": {}, "finish_reason": "stop"}
311
- ],
312
- }
313
-
314
- # Add usage if available
315
- converted_usage = self._convert_usage(usage) if usage else None
316
- if converted_usage:
317
- chunk_data["usage"] = converted_usage.model_dump()
318
-
319
- yield chunk_data
320
-
321
- logger.debug(
322
- "response_adapter_stream_finished",
323
- stream_id=stream_id,
324
- total_raw_chunks=raw_chunk_count,
325
- total_events=event_count,
326
- final_buffer_size=len(buffer),
327
- )
328
-
329
- def _convert_usage(
330
- self, response_usage: dict[str, Any] | None
331
- ) -> OpenAIUsage | None:
332
- """Convert Response API usage to Chat Completions format."""
333
- if not response_usage:
334
- return None
335
-
336
- return OpenAIUsage(
337
- prompt_tokens=response_usage.get("input_tokens", 0),
338
- completion_tokens=response_usage.get("output_tokens", 0),
339
- total_tokens=response_usage.get("total_tokens", 0),
340
- )
341
-
342
- def _get_default_codex_instructions(self) -> str:
343
- """Get default Codex CLI instructions."""
344
- return (
345
- "You are a coding agent running in the Codex CLI, a terminal-based coding assistant. "
346
- "Codex CLI is an open source project led by OpenAI. You are expected to be precise, safe, and helpful.\n\n"
347
- "Your capabilities:\n"
348
- "- Receive user prompts and other context provided by the harness, such as files in the workspace.\n"
349
- "- Communicate with the user by streaming thinking & responses, and by making & updating plans.\n"
350
- "- Emit function calls to run terminal commands and apply patches. Depending on how this specific run is configured, "
351
- "you can request that these function calls be escalated to the user for approval before running. "
352
- 'More on this in the "Sandbox and approvals" section.\n\n'
353
- "Within this context, Codex refers to the open-source agentic coding interface "
354
- "(not the old Codex language model built by OpenAI)."
355
- )
@@ -1,178 +0,0 @@
1
- """OpenAI Response API models.
2
-
3
- This module contains data models for OpenAI's Response API format
4
- used by Codex/ChatGPT backend.
5
- """
6
-
7
- from __future__ import annotations
8
-
9
- from typing import Any, Literal
10
-
11
- from pydantic import BaseModel
12
-
13
-
14
- # Request Models
15
-
16
-
17
- class ResponseMessageContent(BaseModel):
18
- """Content block in a Response API message."""
19
-
20
- type: Literal["input_text", "output_text"]
21
- text: str
22
-
23
-
24
- class ResponseMessage(BaseModel):
25
- """Message in Response API format."""
26
-
27
- type: Literal["message"]
28
- id: str | None = None
29
- role: Literal["user", "assistant", "system"]
30
- content: list[ResponseMessageContent]
31
-
32
-
33
- class ResponseReasoning(BaseModel):
34
- """Reasoning configuration for Response API."""
35
-
36
- effort: Literal["low", "medium", "high"] = "medium"
37
- summary: Literal["auto", "none"] | None = "auto"
38
-
39
-
40
- class ResponseRequest(BaseModel):
41
- """OpenAI Response API request format."""
42
-
43
- model: str
44
- instructions: str | None = None
45
- input: list[ResponseMessage]
46
- stream: bool = True
47
- tool_choice: Literal["auto", "none", "required"] | str = "auto"
48
- parallel_tool_calls: bool = False
49
- reasoning: ResponseReasoning | None = None
50
- store: bool = False
51
- include: list[str] | None = None
52
- prompt_cache_key: str | None = None
53
- # Note: The following OpenAI parameters are not supported by Response API (Codex backend):
54
- # temperature, max_output_tokens, top_p, frequency_penalty, presence_penalty, metadata
55
- # If included, they'll cause "Unsupported parameter" errors
56
-
57
-
58
- # Response Models
59
-
60
-
61
- class ResponseOutput(BaseModel):
62
- """Output content in Response API."""
63
-
64
- id: str
65
- type: Literal["message"]
66
- status: Literal["completed", "in_progress"]
67
- content: list[ResponseMessageContent]
68
- role: Literal["assistant"]
69
-
70
-
71
- class ResponseUsage(BaseModel):
72
- """Usage statistics in Response API."""
73
-
74
- input_tokens: int
75
- output_tokens: int
76
- total_tokens: int
77
- input_tokens_details: dict[str, Any] | None = None
78
- output_tokens_details: dict[str, Any] | None = None
79
-
80
-
81
- class ResponseReasoningContent(BaseModel):
82
- """Reasoning content in response."""
83
-
84
- effort: Literal["low", "medium", "high"]
85
- summary: str | None = None
86
- encrypted_content: str | None = None
87
-
88
-
89
- class ResponseData(BaseModel):
90
- """Complete response data structure."""
91
-
92
- id: str
93
- object: Literal["response"]
94
- created_at: int
95
- status: Literal["completed", "failed", "cancelled"]
96
- background: bool = False
97
- error: dict[str, Any] | None = None
98
- incomplete_details: dict[str, Any] | None = None
99
- instructions: str | None = None
100
- max_output_tokens: int | None = None
101
- model: str
102
- output: list[ResponseOutput]
103
- parallel_tool_calls: bool = False
104
- previous_response_id: str | None = None
105
- prompt_cache_key: str | None = None
106
- reasoning: ResponseReasoningContent | None = None
107
- safety_identifier: str | None = None
108
- service_tier: str | None = None
109
- store: bool = False
110
- temperature: float | None = None
111
- text: dict[str, Any] | None = None
112
- tool_choice: str | None = None
113
- tools: list[dict[str, Any]] | None = None
114
- top_logprobs: int | None = None
115
- top_p: float | None = None
116
- truncation: str | None = None
117
- usage: ResponseUsage | None = None
118
- user: str | None = None
119
- metadata: dict[str, Any] | None = None
120
-
121
-
122
- class ResponseCompleted(BaseModel):
123
- """Complete response from Response API."""
124
-
125
- type: Literal["response.completed"]
126
- sequence_number: int
127
- response: ResponseData
128
-
129
-
130
- # Streaming Models
131
-
132
-
133
- class StreamingDelta(BaseModel):
134
- """Delta content in streaming response."""
135
-
136
- content: str | None = None
137
- role: Literal["assistant"] | None = None
138
- reasoning_content: str | None = None
139
- output: list[dict[str, Any]] | None = None
140
-
141
-
142
- class StreamingChoice(BaseModel):
143
- """Choice in streaming response."""
144
-
145
- index: int
146
- delta: StreamingDelta
147
- finish_reason: Literal["stop", "length", "tool_calls", "content_filter"] | None = (
148
- None
149
- )
150
-
151
-
152
- class StreamingChunk(BaseModel):
153
- """Streaming chunk from Response API."""
154
-
155
- id: str
156
- object: Literal["response.chunk", "chat.completion.chunk"]
157
- created: int
158
- model: str
159
- choices: list[StreamingChoice]
160
- usage: ResponseUsage | None = None
161
- system_fingerprint: str | None = None
162
-
163
-
164
- class StreamingEvent(BaseModel):
165
- """Server-sent event wrapper for streaming."""
166
-
167
- event: (
168
- Literal[
169
- "response.created",
170
- "response.output.started",
171
- "response.output.delta",
172
- "response.output.completed",
173
- "response.completed",
174
- "response.failed",
175
- ]
176
- | None
177
- ) = None
178
- data: dict[str, Any] | str
@@ -1,49 +0,0 @@
1
- """Header preservation middleware to maintain proxy response headers."""
2
-
3
- from fastapi import Request, Response
4
- from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
5
- from starlette.types import ASGIApp
6
-
7
-
8
- class HeaderPreservationMiddleware(BaseHTTPMiddleware):
9
- """Middleware to preserve certain headers from proxy responses.
10
-
11
- This middleware ensures that headers like 'server' from the upstream
12
- API are preserved and not overridden by Uvicorn/Starlette.
13
- """
14
-
15
- def __init__(self, app: ASGIApp):
16
- """Initialize the header preservation middleware.
17
-
18
- Args:
19
- app: The ASGI application
20
- """
21
- super().__init__(app)
22
-
23
- async def dispatch(
24
- self, request: Request, call_next: RequestResponseEndpoint
25
- ) -> Response:
26
- """Process the request and preserve specific headers.
27
-
28
- Args:
29
- request: The incoming HTTP request
30
- call_next: The next middleware/handler in the chain
31
-
32
- Returns:
33
- The HTTP response with preserved headers
34
- """
35
- # Process the request
36
- response = await call_next(request)
37
-
38
- # Check if we have a stored server header to preserve
39
- # This would be set by the proxy service if we want to preserve it
40
- if hasattr(request.state, "preserve_headers"):
41
- for header_name, header_value in request.state.preserve_headers.items():
42
- # Force set the header to override any default values
43
- response.headers[header_name] = header_value
44
- # Also try raw header setting for more control
45
- response.raw_headers.append(
46
- (header_name.encode(), header_value.encode())
47
- )
48
-
49
- return response