ccproxy-api 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. ccproxy/__init__.py +4 -0
  2. ccproxy/__main__.py +7 -0
  3. ccproxy/_version.py +21 -0
  4. ccproxy/adapters/__init__.py +11 -0
  5. ccproxy/adapters/base.py +80 -0
  6. ccproxy/adapters/openai/__init__.py +43 -0
  7. ccproxy/adapters/openai/adapter.py +915 -0
  8. ccproxy/adapters/openai/models.py +412 -0
  9. ccproxy/adapters/openai/streaming.py +449 -0
  10. ccproxy/api/__init__.py +28 -0
  11. ccproxy/api/app.py +225 -0
  12. ccproxy/api/dependencies.py +140 -0
  13. ccproxy/api/middleware/__init__.py +11 -0
  14. ccproxy/api/middleware/auth.py +0 -0
  15. ccproxy/api/middleware/cors.py +55 -0
  16. ccproxy/api/middleware/errors.py +703 -0
  17. ccproxy/api/middleware/headers.py +51 -0
  18. ccproxy/api/middleware/logging.py +175 -0
  19. ccproxy/api/middleware/request_id.py +69 -0
  20. ccproxy/api/middleware/server_header.py +62 -0
  21. ccproxy/api/responses.py +84 -0
  22. ccproxy/api/routes/__init__.py +16 -0
  23. ccproxy/api/routes/claude.py +181 -0
  24. ccproxy/api/routes/health.py +489 -0
  25. ccproxy/api/routes/metrics.py +1033 -0
  26. ccproxy/api/routes/proxy.py +238 -0
  27. ccproxy/auth/__init__.py +75 -0
  28. ccproxy/auth/bearer.py +68 -0
  29. ccproxy/auth/credentials_adapter.py +93 -0
  30. ccproxy/auth/dependencies.py +229 -0
  31. ccproxy/auth/exceptions.py +79 -0
  32. ccproxy/auth/manager.py +102 -0
  33. ccproxy/auth/models.py +118 -0
  34. ccproxy/auth/oauth/__init__.py +26 -0
  35. ccproxy/auth/oauth/models.py +49 -0
  36. ccproxy/auth/oauth/routes.py +396 -0
  37. ccproxy/auth/oauth/storage.py +0 -0
  38. ccproxy/auth/storage/__init__.py +12 -0
  39. ccproxy/auth/storage/base.py +57 -0
  40. ccproxy/auth/storage/json_file.py +159 -0
  41. ccproxy/auth/storage/keyring.py +192 -0
  42. ccproxy/claude_sdk/__init__.py +20 -0
  43. ccproxy/claude_sdk/client.py +169 -0
  44. ccproxy/claude_sdk/converter.py +331 -0
  45. ccproxy/claude_sdk/options.py +120 -0
  46. ccproxy/cli/__init__.py +14 -0
  47. ccproxy/cli/commands/__init__.py +8 -0
  48. ccproxy/cli/commands/auth.py +553 -0
  49. ccproxy/cli/commands/config/__init__.py +14 -0
  50. ccproxy/cli/commands/config/commands.py +766 -0
  51. ccproxy/cli/commands/config/schema_commands.py +119 -0
  52. ccproxy/cli/commands/serve.py +630 -0
  53. ccproxy/cli/docker/__init__.py +34 -0
  54. ccproxy/cli/docker/adapter_factory.py +157 -0
  55. ccproxy/cli/docker/params.py +278 -0
  56. ccproxy/cli/helpers.py +144 -0
  57. ccproxy/cli/main.py +193 -0
  58. ccproxy/cli/options/__init__.py +14 -0
  59. ccproxy/cli/options/claude_options.py +216 -0
  60. ccproxy/cli/options/core_options.py +40 -0
  61. ccproxy/cli/options/security_options.py +48 -0
  62. ccproxy/cli/options/server_options.py +117 -0
  63. ccproxy/config/__init__.py +40 -0
  64. ccproxy/config/auth.py +154 -0
  65. ccproxy/config/claude.py +124 -0
  66. ccproxy/config/cors.py +79 -0
  67. ccproxy/config/discovery.py +87 -0
  68. ccproxy/config/docker_settings.py +265 -0
  69. ccproxy/config/loader.py +108 -0
  70. ccproxy/config/observability.py +158 -0
  71. ccproxy/config/pricing.py +88 -0
  72. ccproxy/config/reverse_proxy.py +31 -0
  73. ccproxy/config/scheduler.py +89 -0
  74. ccproxy/config/security.py +14 -0
  75. ccproxy/config/server.py +81 -0
  76. ccproxy/config/settings.py +534 -0
  77. ccproxy/config/validators.py +231 -0
  78. ccproxy/core/__init__.py +274 -0
  79. ccproxy/core/async_utils.py +675 -0
  80. ccproxy/core/constants.py +97 -0
  81. ccproxy/core/errors.py +256 -0
  82. ccproxy/core/http.py +328 -0
  83. ccproxy/core/http_transformers.py +428 -0
  84. ccproxy/core/interfaces.py +247 -0
  85. ccproxy/core/logging.py +189 -0
  86. ccproxy/core/middleware.py +114 -0
  87. ccproxy/core/proxy.py +143 -0
  88. ccproxy/core/system.py +38 -0
  89. ccproxy/core/transformers.py +259 -0
  90. ccproxy/core/types.py +129 -0
  91. ccproxy/core/validators.py +288 -0
  92. ccproxy/docker/__init__.py +67 -0
  93. ccproxy/docker/adapter.py +588 -0
  94. ccproxy/docker/docker_path.py +207 -0
  95. ccproxy/docker/middleware.py +103 -0
  96. ccproxy/docker/models.py +228 -0
  97. ccproxy/docker/protocol.py +192 -0
  98. ccproxy/docker/stream_process.py +264 -0
  99. ccproxy/docker/validators.py +173 -0
  100. ccproxy/models/__init__.py +123 -0
  101. ccproxy/models/errors.py +42 -0
  102. ccproxy/models/messages.py +243 -0
  103. ccproxy/models/requests.py +85 -0
  104. ccproxy/models/responses.py +227 -0
  105. ccproxy/models/types.py +102 -0
  106. ccproxy/observability/__init__.py +51 -0
  107. ccproxy/observability/access_logger.py +400 -0
  108. ccproxy/observability/context.py +447 -0
  109. ccproxy/observability/metrics.py +539 -0
  110. ccproxy/observability/pushgateway.py +366 -0
  111. ccproxy/observability/sse_events.py +303 -0
  112. ccproxy/observability/stats_printer.py +755 -0
  113. ccproxy/observability/storage/__init__.py +1 -0
  114. ccproxy/observability/storage/duckdb_simple.py +665 -0
  115. ccproxy/observability/storage/models.py +55 -0
  116. ccproxy/pricing/__init__.py +19 -0
  117. ccproxy/pricing/cache.py +212 -0
  118. ccproxy/pricing/loader.py +267 -0
  119. ccproxy/pricing/models.py +106 -0
  120. ccproxy/pricing/updater.py +309 -0
  121. ccproxy/scheduler/__init__.py +39 -0
  122. ccproxy/scheduler/core.py +335 -0
  123. ccproxy/scheduler/exceptions.py +34 -0
  124. ccproxy/scheduler/manager.py +186 -0
  125. ccproxy/scheduler/registry.py +150 -0
  126. ccproxy/scheduler/tasks.py +484 -0
  127. ccproxy/services/__init__.py +10 -0
  128. ccproxy/services/claude_sdk_service.py +614 -0
  129. ccproxy/services/credentials/__init__.py +55 -0
  130. ccproxy/services/credentials/config.py +105 -0
  131. ccproxy/services/credentials/manager.py +562 -0
  132. ccproxy/services/credentials/oauth_client.py +482 -0
  133. ccproxy/services/proxy_service.py +1536 -0
  134. ccproxy/static/.keep +0 -0
  135. ccproxy/testing/__init__.py +34 -0
  136. ccproxy/testing/config.py +148 -0
  137. ccproxy/testing/content_generation.py +197 -0
  138. ccproxy/testing/mock_responses.py +262 -0
  139. ccproxy/testing/response_handlers.py +161 -0
  140. ccproxy/testing/scenarios.py +241 -0
  141. ccproxy/utils/__init__.py +6 -0
  142. ccproxy/utils/cost_calculator.py +210 -0
  143. ccproxy/utils/streaming_metrics.py +199 -0
  144. ccproxy_api-0.1.0.dist-info/METADATA +253 -0
  145. ccproxy_api-0.1.0.dist-info/RECORD +148 -0
  146. ccproxy_api-0.1.0.dist-info/WHEEL +4 -0
  147. ccproxy_api-0.1.0.dist-info/entry_points.txt +2 -0
  148. ccproxy_api-0.1.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,675 @@
1
+ """Async utilities for the CCProxy API."""
2
+
3
+ import asyncio
4
+ import re
5
+ from collections.abc import AsyncIterator, Awaitable, Callable, Iterator
6
+ from contextlib import asynccontextmanager, contextmanager
7
+ from pathlib import Path
8
+ from typing import Any, TypeVar
9
+
10
+
11
+ T = TypeVar("T")
12
+
13
+
14
+ # Extract the typing fix from utils/helper.py
15
+ @contextmanager
16
+ def patched_typing() -> Iterator[None]:
17
+ """Fix for typing.TypedDict not supported in older Python versions.
18
+
19
+ This patches typing.TypedDict to use typing_extensions.TypedDict.
20
+ """
21
+ import typing
22
+
23
+ import typing_extensions
24
+
25
+ original = typing.TypedDict
26
+ typing.TypedDict = typing_extensions.TypedDict
27
+ try:
28
+ yield
29
+ finally:
30
+ typing.TypedDict = original
31
+
32
+
33
+ def get_package_dir() -> Path:
34
+ """Get the package directory path.
35
+
36
+ Returns:
37
+ Path to the package directory
38
+ """
39
+ try:
40
+ import importlib.util
41
+
42
+ # Get the path to the ccproxy package and resolve it
43
+ spec = importlib.util.find_spec(get_root_package_name())
44
+ if spec and spec.origin:
45
+ package_dir = Path(spec.origin).parent.parent.resolve()
46
+ else:
47
+ package_dir = Path(__file__).parent.parent.parent.resolve()
48
+ except Exception:
49
+ package_dir = Path(__file__).parent.parent.parent.resolve()
50
+
51
+ return package_dir
52
+
53
+
54
+ def get_root_package_name() -> str:
55
+ """Get the root package name.
56
+
57
+ Returns:
58
+ The root package name
59
+ """
60
+ if __package__:
61
+ return __package__.split(".")[0]
62
+ return __name__.split(".")[0]
63
+
64
+
65
+ async def run_in_executor(func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
66
+ """Run a synchronous function in an executor.
67
+
68
+ Args:
69
+ func: The synchronous function to run
70
+ *args: Positional arguments to pass to the function
71
+ **kwargs: Keyword arguments to pass to the function
72
+
73
+ Returns:
74
+ The result of the function call
75
+ """
76
+ loop = asyncio.get_event_loop()
77
+
78
+ # Create a partial function if we have kwargs
79
+ if kwargs:
80
+ from functools import partial
81
+
82
+ func = partial(func, **kwargs)
83
+
84
+ return await loop.run_in_executor(None, func, *args)
85
+
86
+
87
+ async def safe_await(awaitable: Awaitable[T], timeout: float | None = None) -> T | None:
88
+ """Safely await an awaitable with optional timeout.
89
+
90
+ Args:
91
+ awaitable: The awaitable to wait for
92
+ timeout: Optional timeout in seconds
93
+
94
+ Returns:
95
+ The result of the awaitable or None if timeout/error
96
+ """
97
+ try:
98
+ if timeout is not None:
99
+ return await asyncio.wait_for(awaitable, timeout=timeout)
100
+ return await awaitable
101
+ except TimeoutError:
102
+ return None
103
+ except Exception:
104
+ return None
105
+
106
+
107
+ async def gather_with_concurrency(
108
+ limit: int, *awaitables: Awaitable[T], return_exceptions: bool = False
109
+ ) -> list[T | BaseException] | list[T]:
110
+ """Gather awaitables with concurrency limit.
111
+
112
+ Args:
113
+ limit: Maximum number of concurrent operations
114
+ *awaitables: Awaitables to execute
115
+ return_exceptions: Whether to return exceptions as results
116
+
117
+ Returns:
118
+ List of results from the awaitables
119
+ """
120
+ semaphore = asyncio.Semaphore(limit)
121
+
122
+ async def _limited_awaitable(awaitable: Awaitable[T]) -> T:
123
+ async with semaphore:
124
+ return await awaitable
125
+
126
+ limited_awaitables = [_limited_awaitable(aw) for aw in awaitables]
127
+ if return_exceptions:
128
+ return await asyncio.gather(*limited_awaitables, return_exceptions=True)
129
+ else:
130
+ return await asyncio.gather(*limited_awaitables)
131
+
132
+
133
+ @asynccontextmanager
134
+ async def async_timer() -> AsyncIterator[Callable[[], float]]:
135
+ """Context manager for timing async operations.
136
+
137
+ Yields:
138
+ Function that returns elapsed time in seconds
139
+ """
140
+ import time
141
+
142
+ start_time = time.perf_counter()
143
+
144
+ def get_elapsed() -> float:
145
+ return time.perf_counter() - start_time
146
+
147
+ yield get_elapsed
148
+
149
+
150
+ async def retry_async(
151
+ func: Callable[..., Awaitable[T]],
152
+ *args: Any,
153
+ max_retries: int = 3,
154
+ delay: float = 1.0,
155
+ backoff: float = 2.0,
156
+ exceptions: tuple[type[Exception], ...] = (Exception,),
157
+ **kwargs: Any,
158
+ ) -> T:
159
+ """Retry an async function with exponential backoff.
160
+
161
+ Args:
162
+ func: The async function to retry
163
+ *args: Positional arguments to pass to the function
164
+ max_retries: Maximum number of retries
165
+ delay: Initial delay between retries
166
+ backoff: Backoff multiplier
167
+ exceptions: Exception types to catch and retry on
168
+ **kwargs: Keyword arguments to pass to the function
169
+
170
+ Returns:
171
+ The result of the successful function call
172
+
173
+ Raises:
174
+ The last exception if all retries fail
175
+ """
176
+ last_exception = None
177
+ current_delay = delay
178
+
179
+ for attempt in range(max_retries + 1):
180
+ try:
181
+ return await func(*args, **kwargs)
182
+ except exceptions as e:
183
+ last_exception = e
184
+ if attempt < max_retries:
185
+ await asyncio.sleep(current_delay)
186
+ current_delay *= backoff
187
+ else:
188
+ raise
189
+
190
+ # This should never be reached, but just in case
191
+ raise last_exception if last_exception else Exception("Retry failed")
192
+
193
+
194
+ async def wait_for_condition(
195
+ condition: Callable[[], bool | Awaitable[bool]],
196
+ timeout: float = 30.0,
197
+ interval: float = 0.1,
198
+ ) -> bool:
199
+ """Wait for a condition to become true.
200
+
201
+ Args:
202
+ condition: Function that returns True when condition is met
203
+ timeout: Maximum time to wait in seconds
204
+ interval: Check interval in seconds
205
+
206
+ Returns:
207
+ True if condition was met, False if timeout occurred
208
+ """
209
+ start_time = asyncio.get_event_loop().time()
210
+
211
+ while True:
212
+ try:
213
+ result = condition()
214
+ if asyncio.iscoroutine(result):
215
+ result = await result
216
+ if result:
217
+ return True
218
+ except Exception:
219
+ pass
220
+
221
+ if asyncio.get_event_loop().time() - start_time > timeout:
222
+ return False
223
+
224
+ await asyncio.sleep(interval)
225
+
226
+
227
+ _cache: dict[str, tuple[float, Any]] = {}
228
+
229
+
230
+ async def async_cache_result(
231
+ func: Callable[..., Awaitable[T]],
232
+ cache_key: str,
233
+ cache_duration: float = 300.0,
234
+ *args: Any,
235
+ **kwargs: Any,
236
+ ) -> T:
237
+ """Cache the result of an async function call.
238
+
239
+ Args:
240
+ func: The async function to cache
241
+ cache_key: Unique key for caching
242
+ cache_duration: Cache duration in seconds
243
+ *args: Positional arguments to pass to the function
244
+ **kwargs: Keyword arguments to pass to the function
245
+
246
+ Returns:
247
+ The cached or computed result
248
+ """
249
+ import time
250
+
251
+ current_time = time.time()
252
+
253
+ # Check if we have a valid cached result
254
+ if cache_key in _cache:
255
+ cached_time, cached_result = _cache[cache_key]
256
+ if current_time - cached_time < cache_duration:
257
+ return cached_result # type: ignore[no-any-return]
258
+
259
+ # Compute and cache the result
260
+ result = await func(*args, **kwargs)
261
+ _cache[cache_key] = (current_time, result)
262
+
263
+ return result
264
+
265
+
266
+ def parse_version(version_string: str) -> tuple[int, int, int, str]:
267
+ """
268
+ Parse version string into components.
269
+
270
+ Handles various formats:
271
+ - 1.2.3
272
+ - 1.2.3-dev
273
+ - 1.2.3.dev59+g1624e1e.d19800101
274
+ - 0.1.dev59+g1624e1e.d19800101
275
+ """
276
+ # Clean up setuptools-scm dev versions
277
+ clean_version = re.sub(r"\.dev\d+\+.*", "", version_string)
278
+
279
+ # Handle dev versions without patch number
280
+ if ".dev" in version_string:
281
+ base_version = version_string.split(".dev")[0]
282
+ parts = base_version.split(".")
283
+ if len(parts) == 2:
284
+ # 0.1.dev59 -> 0.1.0-dev
285
+ major, minor = int(parts[0]), int(parts[1])
286
+ patch = 0
287
+ suffix = "dev"
288
+ else:
289
+ # 1.2.3.dev59 -> 1.2.3-dev
290
+ major, minor, patch = int(parts[0]), int(parts[1]), int(parts[2])
291
+ suffix = "dev"
292
+ else:
293
+ # Regular version
294
+ parts = clean_version.split(".")
295
+ if len(parts) < 3:
296
+ parts.extend(["0"] * (3 - len(parts)))
297
+
298
+ major, minor, patch = int(parts[0]), int(parts[1]), int(parts[2])
299
+ suffix = ""
300
+
301
+ return major, minor, patch, suffix
302
+
303
+
304
+ def format_version(version: str, level: str) -> str:
305
+ major, minor, patch, suffix = parse_version(version)
306
+
307
+ """Format version according to specified level."""
308
+ base_version = f"{major}.{minor}.{patch}"
309
+
310
+ if level == "major":
311
+ return str(major)
312
+ elif level == "minor":
313
+ return f"{major}.{minor}"
314
+ elif level == "patch" or level == "full":
315
+ if suffix:
316
+ return f"{base_version}-{suffix}"
317
+ return base_version
318
+ elif level == "docker":
319
+ # Docker-compatible version (no + characters)
320
+ if suffix:
321
+ return f"{base_version}-{suffix}"
322
+ return base_version
323
+ elif level == "npm":
324
+ # NPM-compatible version
325
+ if suffix:
326
+ return f"{base_version}-{suffix}.0"
327
+ return base_version
328
+ elif level == "python":
329
+ # Python-compatible version
330
+ if suffix:
331
+ return f"{base_version}.{suffix}0"
332
+ return base_version
333
+ else:
334
+ raise ValueError(f"Unknown version level: {level}")
335
+
336
+
337
+ def get_claude_docker_home_dir() -> str:
338
+ """Get the Claude Docker home directory path.
339
+
340
+ Returns:
341
+ Path to Claude Docker home directory
342
+ """
343
+ import os
344
+ from pathlib import Path
345
+
346
+ # Use XDG_DATA_HOME if available, otherwise default to ~/.local/share
347
+ xdg_data_home = os.environ.get("XDG_DATA_HOME")
348
+ if xdg_data_home:
349
+ base_dir = Path(xdg_data_home)
350
+ else:
351
+ base_dir = Path.home() / ".local" / "share"
352
+
353
+ claude_dir = base_dir / "claude"
354
+ claude_dir.mkdir(parents=True, exist_ok=True)
355
+
356
+ return str(claude_dir)
357
+
358
+
359
+ def generate_schema_files(output_dir: Path | None = None) -> list[Path]:
360
+ """Generate JSON Schema files for TOML configuration validation.
361
+
362
+ Args:
363
+ output_dir: Directory to write schema files to. If None, uses current directory.
364
+
365
+ Returns:
366
+ List of generated schema file paths
367
+
368
+ Raises:
369
+ ImportError: If required dependencies are not available
370
+ OSError: If unable to write files
371
+ """
372
+ if output_dir is None:
373
+ output_dir = Path.cwd()
374
+
375
+ output_dir = Path(output_dir)
376
+ output_dir.mkdir(parents=True, exist_ok=True)
377
+
378
+ generated_files: list[Path] = []
379
+
380
+ # Generate schema for main Settings model
381
+ schema = generate_json_schema()
382
+ main_schema_path = output_dir / "ccproxy-schema.json"
383
+ save_schema_file(schema, main_schema_path)
384
+ generated_files.append(main_schema_path)
385
+
386
+ # Generate a combined schema file that can be used for complete config validation
387
+ combined_schema_path = output_dir / ".ccproxy-schema.json"
388
+ save_schema_file(schema, combined_schema_path)
389
+ generated_files.append(combined_schema_path)
390
+
391
+ return generated_files
392
+
393
+
394
+ def generate_taplo_config(output_dir: Path | None = None) -> Path:
395
+ """Generate taplo configuration for TOML editor support.
396
+
397
+ Args:
398
+ output_dir: Directory to write taplo config to. If None, uses current directory.
399
+
400
+ Returns:
401
+ Path to generated .taplo.toml file
402
+
403
+ Raises:
404
+ OSError: If unable to write file
405
+ """
406
+ if output_dir is None:
407
+ output_dir = Path.cwd()
408
+
409
+ output_dir = Path(output_dir)
410
+ output_dir.mkdir(parents=True, exist_ok=True)
411
+
412
+ taplo_config_path = output_dir / ".taplo.toml"
413
+
414
+ # Generate taplo configuration that references our schema files
415
+ taplo_config = """# Taplo configuration for Claude Code Proxy TOML files
416
+ # This configuration enables schema validation and autocomplete in editors
417
+
418
+ [[rule]]
419
+ name = "ccproxy-config"
420
+ include = [
421
+ ".ccproxy.toml",
422
+ "ccproxy.toml",
423
+ "config.toml",
424
+ "**/ccproxy*.toml",
425
+ "**/config*.toml"
426
+ ]
427
+ schema = "ccproxy-schema.json"
428
+
429
+ [formatting]
430
+ # Standard TOML formatting options
431
+ indent_string = " "
432
+ trailing_newline = true
433
+ crlf = false
434
+
435
+ [schema]
436
+ # Enable schema validation
437
+ enabled = true
438
+ # Show completions from schema
439
+ completion = true
440
+ """
441
+
442
+ taplo_config_path.write_text(taplo_config, encoding="utf-8")
443
+
444
+ return taplo_config_path
445
+
446
+
447
+ def validate_config_with_schema(
448
+ config_path: Path, schema_path: Path | None = None
449
+ ) -> bool:
450
+ """Validate a config file against the schema.
451
+
452
+ Args:
453
+ config_path: Path to configuration file to validate
454
+ schema_path: Optional path to schema file. If None, generates schema from Settings
455
+
456
+ Returns:
457
+ True if validation passes, False otherwise
458
+
459
+ Raises:
460
+ ImportError: If check-jsonschema is not available
461
+ FileNotFoundError: If config file doesn't exist
462
+ tomllib.TOMLDecodeError: If TOML file has invalid syntax
463
+ ValueError: For other validation errors
464
+ """
465
+ import json
466
+ import subprocess
467
+ import tempfile
468
+ from typing import Any
469
+
470
+ # Import tomllib for Python 3.11+ or fallback to tomli
471
+ try:
472
+ import tomllib
473
+ except ImportError:
474
+ import tomli as tomllib # type: ignore[import-not-found,no-redef]
475
+
476
+ from ccproxy.config.settings import Settings
477
+
478
+ config_path = Path()
479
+
480
+ if not config_path.exists():
481
+ raise FileNotFoundError(f"Configuration file not found: {config_path}")
482
+
483
+ # Determine the file type
484
+ suffix = config_path.suffix.lower()
485
+
486
+ if suffix == ".toml":
487
+ # Read and parse TOML - let TOML parse errors bubble up
488
+ with config_path.open("rb") as f:
489
+ toml_data = tomllib.load(f)
490
+
491
+ # Get or generate schema
492
+ if schema_path:
493
+ with schema_path.open("r", encoding="utf-8") as f:
494
+ schema = json.load(f)
495
+ else:
496
+ schema = generate_json_schema()
497
+
498
+ # Create temporary files for validation
499
+ with tempfile.NamedTemporaryFile(
500
+ mode="w", suffix=".json", delete=False, encoding="utf-8"
501
+ ) as schema_file:
502
+ json.dump(schema, schema_file, indent=2)
503
+ temp_schema_path = schema_file.name
504
+
505
+ with tempfile.NamedTemporaryFile(
506
+ mode="w", suffix=".json", delete=False, encoding="utf-8"
507
+ ) as json_file:
508
+ json.dump(toml_data, json_file, indent=2)
509
+ temp_json_path = json_file.name
510
+
511
+ try:
512
+ # Use check-jsonschema to validate
513
+ result = subprocess.run(
514
+ ["check-jsonschema", "--schemafile", temp_schema_path, temp_json_path],
515
+ capture_output=True,
516
+ text=True,
517
+ check=False,
518
+ )
519
+
520
+ # Clean up temporary files
521
+ Path(temp_schema_path).unlink(missing_ok=True)
522
+ Path(temp_json_path).unlink(missing_ok=True)
523
+
524
+ return result.returncode == 0
525
+
526
+ except FileNotFoundError as e:
527
+ # Clean up temporary files
528
+ Path(temp_schema_path).unlink(missing_ok=True)
529
+ Path(temp_json_path).unlink(missing_ok=True)
530
+ raise ImportError(
531
+ "check-jsonschema command not found. "
532
+ "Install with: pip install check-jsonschema"
533
+ ) from e
534
+ except Exception as e:
535
+ # Clean up temporary files in case of error
536
+ Path(temp_schema_path).unlink(missing_ok=True)
537
+ Path(temp_json_path).unlink(missing_ok=True)
538
+ raise ValueError(f"Validation error: {e}") from e
539
+
540
+ elif suffix == ".json":
541
+ # Parse JSON to validate it's well-formed - let JSON parse errors bubble up
542
+ with config_path.open("r", encoding="utf-8") as f:
543
+ json.load(f)
544
+
545
+ # Get or generate schema
546
+ if schema_path:
547
+ temp_schema_path = str(schema_path)
548
+ cleanup_schema = False
549
+ else:
550
+ schema = generate_json_schema()
551
+ with tempfile.NamedTemporaryFile(
552
+ mode="w", suffix=".json", delete=False, encoding="utf-8"
553
+ ) as schema_file:
554
+ json.dump(schema, schema_file, indent=2)
555
+ temp_schema_path = schema_file.name
556
+ cleanup_schema = True
557
+
558
+ try:
559
+ result = subprocess.run(
560
+ [
561
+ "check-jsonschema",
562
+ "--schemafile",
563
+ temp_schema_path,
564
+ str(config_path),
565
+ ],
566
+ capture_output=True,
567
+ text=True,
568
+ check=False,
569
+ )
570
+
571
+ if cleanup_schema:
572
+ Path(temp_schema_path).unlink(missing_ok=True)
573
+
574
+ return result.returncode == 0
575
+
576
+ except FileNotFoundError as e:
577
+ if cleanup_schema:
578
+ Path(temp_schema_path).unlink(missing_ok=True)
579
+ raise ImportError(
580
+ "check-jsonschema command not found. "
581
+ "Install with: pip install check-jsonschema"
582
+ ) from e
583
+ except Exception as e:
584
+ if cleanup_schema:
585
+ Path(temp_schema_path).unlink(missing_ok=True)
586
+ raise ValueError(f"Validation error: {e}") from e
587
+
588
+ else:
589
+ raise ValueError(
590
+ f"Unsupported config file format: {suffix}. Only TOML (.toml) files are supported."
591
+ )
592
+
593
+
594
+ # TODO: Remove this function or update this function
595
+ def generate_json_schema() -> dict[str, Any]:
596
+ """Generate JSON Schema from Settings model.
597
+
598
+ Returns:
599
+ JSON Schema dictionary
600
+
601
+ Raises:
602
+ ImportError: If required dependencies are not available
603
+ """
604
+ try:
605
+ from ccproxy.config.settings import Settings
606
+ except ImportError as e:
607
+ raise ImportError(f"Required dependencies not available: {e}") from e
608
+
609
+ schema = Settings.model_json_schema()
610
+
611
+ # Add schema metadata
612
+ schema["$schema"] = "https://json-schema.org/draft/2020-12/schema"
613
+ schema["title"] = "CCProxy API Configuration"
614
+
615
+ # Add examples for common properties
616
+ properties = schema.get("properties", {})
617
+ if "host" in properties:
618
+ properties["host"]["examples"] = ["127.0.0.1", "0.0.0.0", "localhost"]
619
+ if "port" in properties:
620
+ properties["port"]["examples"] = [8000, 8080, 3000]
621
+ if "log_level" in properties:
622
+ properties["log_level"]["examples"] = ["DEBUG", "INFO", "WARNING", "ERROR"]
623
+ if "cors_origins" in properties:
624
+ properties["cors_origins"]["examples"] = [
625
+ ["*"],
626
+ ["https://example.com", "https://app.example.com"],
627
+ ["http://localhost:3000"],
628
+ ]
629
+
630
+ return schema
631
+
632
+
633
+ def save_schema_file(schema: dict[str, Any], output_path: Path) -> None:
634
+ """Save JSON Schema to a file.
635
+
636
+ Args:
637
+ schema: JSON Schema dictionary to save
638
+ output_path: Path to write schema file to
639
+
640
+ Raises:
641
+ OSError: If unable to write file
642
+ """
643
+ import json
644
+
645
+ output_path = Path(output_path)
646
+ output_path.parent.mkdir(parents=True, exist_ok=True)
647
+
648
+ with output_path.open("w", encoding="utf-8") as f:
649
+ json.dump(schema, f, indent=2, ensure_ascii=False)
650
+
651
+
652
+ def validate_toml_with_schema(
653
+ config_path: Path, schema_path: Path | None = None
654
+ ) -> bool:
655
+ """Validate a TOML config file against JSON Schema.
656
+
657
+ Args:
658
+ config_path: Path to TOML configuration file
659
+ schema_path: Optional path to schema file. If None, generates schema from Settings
660
+
661
+ Returns:
662
+ True if validation passes, False otherwise
663
+
664
+ Raises:
665
+ ImportError: If check-jsonschema is not available
666
+ FileNotFoundError: If config file doesn't exist
667
+ ValueError: If unable to parse or validate file
668
+ """
669
+ # This is a thin wrapper around validate_config_with_schema for TOML files
670
+ config_path = Path(config_path)
671
+
672
+ if config_path.suffix.lower() != ".toml":
673
+ raise ValueError(f"Expected TOML file, got: {config_path.suffix}")
674
+
675
+ return validate_config_with_schema(config_path)