@sdk-it/python 0.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +62 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1974 -0
- package/dist/index.js.map +7 -0
- package/dist/lib/generate.d.ts +18 -0
- package/dist/lib/generate.d.ts.map +1 -0
- package/dist/lib/index.d.ts +3 -0
- package/dist/lib/index.d.ts.map +1 -0
- package/dist/lib/python-emitter.d.ts +28 -0
- package/dist/lib/python-emitter.d.ts.map +1 -0
- package/package.json +37 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1974 @@
|
|
|
1
|
+
// packages/python/src/lib/generate.ts
|
|
2
|
+
import { readdir } from "node:fs/promises";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { snakecase as snakecase2 } from "stringcase";
|
|
5
|
+
import { isEmpty, isRef as isRef2, pascalcase as pascalcase2 } from "@sdk-it/core";
|
|
6
|
+
import {
|
|
7
|
+
createWriterProxy,
|
|
8
|
+
writeFiles
|
|
9
|
+
} from "@sdk-it/core/file-system.js";
|
|
10
|
+
import {
|
|
11
|
+
augmentSpec,
|
|
12
|
+
cleanFiles,
|
|
13
|
+
forEachOperation,
|
|
14
|
+
isSuccessStatusCode,
|
|
15
|
+
parseJsonContentType,
|
|
16
|
+
readWriteMetadata
|
|
17
|
+
} from "@sdk-it/spec";
|
|
18
|
+
|
|
19
|
+
// packages/python/src/lib/http/dispatcher.txt
|
|
20
|
+
var dispatcher_default = `"""HTTP dispatcher for making API requests."""
|
|
21
|
+
|
|
22
|
+
import asyncio
|
|
23
|
+
import logging
|
|
24
|
+
from typing import Any, Dict, List, Optional, Union
|
|
25
|
+
from urllib.parse import urljoin, urlparse
|
|
26
|
+
|
|
27
|
+
import httpx
|
|
28
|
+
from pydantic import BaseModel
|
|
29
|
+
|
|
30
|
+
from .interceptors import Interceptor
|
|
31
|
+
from .responses import ApiResponse, ErrorResponse
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class RequestConfig(BaseModel):
|
|
35
|
+
"""Configuration for an HTTP request."""
|
|
36
|
+
|
|
37
|
+
method: str
|
|
38
|
+
url: str
|
|
39
|
+
headers: Optional[Dict[str, str]] = None
|
|
40
|
+
params: Optional[Dict[str, Any]] = None
|
|
41
|
+
json_data: Optional[Dict[str, Any]] = None
|
|
42
|
+
form_data: Optional[Dict[str, Any]] = None
|
|
43
|
+
files: Optional[Dict[str, Any]] = None
|
|
44
|
+
timeout: Optional[Union[float, httpx.Timeout]] = None
|
|
45
|
+
|
|
46
|
+
class Config:
|
|
47
|
+
"""Pydantic configuration."""
|
|
48
|
+
arbitrary_types_allowed = True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class Dispatcher:
|
|
52
|
+
"""HTTP client dispatcher with interceptor support."""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
interceptors: Optional[List[Interceptor]] = None,
|
|
57
|
+
client: Optional[httpx.AsyncClient] = None,
|
|
58
|
+
timeout: Optional[Union[float, httpx.Timeout]] = None
|
|
59
|
+
):
|
|
60
|
+
"""Initialize the dispatcher.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
interceptors: List of interceptors to apply to requests/responses
|
|
64
|
+
client: Custom httpx.AsyncClient instance (creates default if None)
|
|
65
|
+
timeout: Default timeout for requests
|
|
66
|
+
"""
|
|
67
|
+
self.interceptors = interceptors or []
|
|
68
|
+
self.client = client or httpx.AsyncClient(timeout=timeout)
|
|
69
|
+
self.logger = logging.getLogger(__name__)
|
|
70
|
+
|
|
71
|
+
async def __aenter__(self):
|
|
72
|
+
"""Async context manager entry."""
|
|
73
|
+
return self
|
|
74
|
+
|
|
75
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
76
|
+
"""Async context manager exit."""
|
|
77
|
+
await self.client.aclose()
|
|
78
|
+
|
|
79
|
+
async def request(self, config: RequestConfig) -> httpx.Response:
|
|
80
|
+
"""Execute an HTTP request with interceptor processing.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
config: Request configuration
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
HTTP response after processing through interceptors
|
|
87
|
+
|
|
88
|
+
Raises:
|
|
89
|
+
httpx.HTTPError: For HTTP-related errors
|
|
90
|
+
ValueError: For invalid request configuration
|
|
91
|
+
"""
|
|
92
|
+
# Process request interceptors
|
|
93
|
+
processed_config = config
|
|
94
|
+
for interceptor in self.interceptors:
|
|
95
|
+
processed_config = await interceptor.process_request(processed_config)
|
|
96
|
+
|
|
97
|
+
# Prepare request arguments
|
|
98
|
+
request_kwargs = self._prepare_request_kwargs(processed_config)
|
|
99
|
+
|
|
100
|
+
try:
|
|
101
|
+
# Execute request
|
|
102
|
+
response = await self.client.request(**request_kwargs)
|
|
103
|
+
|
|
104
|
+
# Process response interceptors (in reverse order)
|
|
105
|
+
for interceptor in reversed(self.interceptors):
|
|
106
|
+
response = await interceptor.process_response(response)
|
|
107
|
+
|
|
108
|
+
return response
|
|
109
|
+
|
|
110
|
+
except httpx.RequestError as e:
|
|
111
|
+
self.logger.error(f"Request failed: {e}")
|
|
112
|
+
raise
|
|
113
|
+
except Exception as e:
|
|
114
|
+
self.logger.error(f"Unexpected error during request: {e}")
|
|
115
|
+
raise
|
|
116
|
+
|
|
117
|
+
def _prepare_request_kwargs(self, config: RequestConfig) -> Dict[str, Any]:
|
|
118
|
+
"""Prepare keyword arguments for httpx request.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
config: Request configuration
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
Dictionary of kwargs for httpx.request
|
|
125
|
+
|
|
126
|
+
Raises:
|
|
127
|
+
ValueError: If request configuration is invalid
|
|
128
|
+
"""
|
|
129
|
+
if not config.method:
|
|
130
|
+
raise ValueError("Request method cannot be empty")
|
|
131
|
+
|
|
132
|
+
if not config.url:
|
|
133
|
+
raise ValueError("Request URL cannot be empty")
|
|
134
|
+
|
|
135
|
+
request_kwargs = {
|
|
136
|
+
'method': config.method.upper(),
|
|
137
|
+
'url': config.url,
|
|
138
|
+
'headers': config.headers or {},
|
|
139
|
+
'params': config.params,
|
|
140
|
+
'timeout': config.timeout,
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Handle different content types
|
|
144
|
+
content_type_set = False
|
|
145
|
+
|
|
146
|
+
if config.json_data is not None:
|
|
147
|
+
request_kwargs['json'] = config.json_data
|
|
148
|
+
if 'Content-Type' not in request_kwargs['headers']:
|
|
149
|
+
request_kwargs['headers']['Content-Type'] = 'application/json'
|
|
150
|
+
content_type_set = True
|
|
151
|
+
|
|
152
|
+
elif config.form_data is not None:
|
|
153
|
+
request_kwargs['data'] = config.form_data
|
|
154
|
+
if 'Content-Type' not in request_kwargs['headers']:
|
|
155
|
+
request_kwargs['headers']['Content-Type'] = 'application/x-www-form-urlencoded'
|
|
156
|
+
content_type_set = True
|
|
157
|
+
|
|
158
|
+
elif config.files is not None:
|
|
159
|
+
request_kwargs['files'] = config.files
|
|
160
|
+
# Don't set Content-Type for multipart/form-data - httpx will handle it automatically
|
|
161
|
+
content_type_set = True
|
|
162
|
+
|
|
163
|
+
# Validate that only one content type is set
|
|
164
|
+
content_fields = [config.json_data, config.form_data, config.files]
|
|
165
|
+
non_none_count = sum(1 for field in content_fields if field is not None)
|
|
166
|
+
|
|
167
|
+
if non_none_count > 1:
|
|
168
|
+
raise ValueError(
|
|
169
|
+
"Only one of json_data, form_data, or files can be set in a single request"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
return request_kwargs
|
|
173
|
+
|
|
174
|
+
async def json(self, config: RequestConfig) -> httpx.Response:
|
|
175
|
+
"""Make a JSON request.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
config: Request configuration
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
HTTP response
|
|
182
|
+
"""
|
|
183
|
+
return await self.request(config)
|
|
184
|
+
|
|
185
|
+
async def form(self, config: RequestConfig) -> httpx.Response:
|
|
186
|
+
"""Make a form-encoded request.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
config: Request configuration
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
HTTP response
|
|
193
|
+
"""
|
|
194
|
+
return await self.request(config)
|
|
195
|
+
|
|
196
|
+
async def multipart(self, config: RequestConfig) -> httpx.Response:
|
|
197
|
+
"""Make a multipart/form-data request.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
config: Request configuration
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
HTTP response
|
|
204
|
+
"""
|
|
205
|
+
return await self.request(config)
|
|
206
|
+
|
|
207
|
+
async def close(self):
|
|
208
|
+
"""Close the HTTP client and clean up resources."""
|
|
209
|
+
await self.client.aclose()
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class Receiver:
|
|
213
|
+
"""Response processor with interceptor support."""
|
|
214
|
+
|
|
215
|
+
def __init__(
|
|
216
|
+
self,
|
|
217
|
+
interceptors: Optional[List[Interceptor]] = None,
|
|
218
|
+
logger: Optional[logging.Logger] = None
|
|
219
|
+
):
|
|
220
|
+
"""Initialize the receiver.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
interceptors: List of interceptors to apply to responses
|
|
224
|
+
logger: Custom logger instance
|
|
225
|
+
"""
|
|
226
|
+
self.interceptors = interceptors or []
|
|
227
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
228
|
+
|
|
229
|
+
async def json(
|
|
230
|
+
self,
|
|
231
|
+
response: httpx.Response,
|
|
232
|
+
success_model: Optional[type] = None,
|
|
233
|
+
error_model: Optional[type] = None
|
|
234
|
+
) -> Any:
|
|
235
|
+
"""Process a JSON response.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
response: HTTP response to process
|
|
239
|
+
success_model: Pydantic model for successful responses
|
|
240
|
+
error_model: Pydantic model for error responses
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
Parsed response data, optionally as model instances
|
|
244
|
+
|
|
245
|
+
Raises:
|
|
246
|
+
ErrorResponse: For HTTP error status codes
|
|
247
|
+
ValueError: For response parsing errors
|
|
248
|
+
"""
|
|
249
|
+
# Process response interceptors
|
|
250
|
+
processed_response = response
|
|
251
|
+
for interceptor in self.interceptors:
|
|
252
|
+
processed_response = await interceptor.process_response(processed_response)
|
|
253
|
+
|
|
254
|
+
# Handle different status codes
|
|
255
|
+
if 200 <= processed_response.status_code < 300:
|
|
256
|
+
return await self._handle_success_response(
|
|
257
|
+
processed_response, success_model
|
|
258
|
+
)
|
|
259
|
+
else:
|
|
260
|
+
await self._handle_error_response(
|
|
261
|
+
processed_response, error_model
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
async def _handle_success_response(
|
|
265
|
+
self,
|
|
266
|
+
response: httpx.Response,
|
|
267
|
+
success_model: Optional[type] = None
|
|
268
|
+
) -> Any:
|
|
269
|
+
"""Handle successful response.
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
response: HTTP response
|
|
273
|
+
success_model: Pydantic model for successful responses
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Parsed response data
|
|
277
|
+
|
|
278
|
+
Raises:
|
|
279
|
+
ValueError: For parsing errors
|
|
280
|
+
"""
|
|
281
|
+
if not response.content:
|
|
282
|
+
return None
|
|
283
|
+
|
|
284
|
+
try:
|
|
285
|
+
data = response.json()
|
|
286
|
+
|
|
287
|
+
if success_model:
|
|
288
|
+
if isinstance(data, list):
|
|
289
|
+
return [success_model(**item) for item in data]
|
|
290
|
+
else:
|
|
291
|
+
return success_model(**data)
|
|
292
|
+
|
|
293
|
+
return data
|
|
294
|
+
|
|
295
|
+
except Exception as e:
|
|
296
|
+
self.logger.error(f"Failed to parse success response: {e}")
|
|
297
|
+
raise ValueError(f"Failed to parse response: {e}")
|
|
298
|
+
|
|
299
|
+
async def _handle_error_response(
|
|
300
|
+
self,
|
|
301
|
+
response: httpx.Response,
|
|
302
|
+
error_model: Optional[type] = None
|
|
303
|
+
) -> None:
|
|
304
|
+
"""Handle error response.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
response: HTTP response
|
|
308
|
+
error_model: Pydantic model for error responses
|
|
309
|
+
|
|
310
|
+
Raises:
|
|
311
|
+
ErrorResponse: Always raises with error details
|
|
312
|
+
"""
|
|
313
|
+
error_data = {}
|
|
314
|
+
|
|
315
|
+
if response.content:
|
|
316
|
+
try:
|
|
317
|
+
error_data = response.json()
|
|
318
|
+
except Exception:
|
|
319
|
+
# Fallback to text content if JSON parsing fails
|
|
320
|
+
error_data = {'message': response.text}
|
|
321
|
+
|
|
322
|
+
if error_model:
|
|
323
|
+
try:
|
|
324
|
+
error = error_model(**error_data)
|
|
325
|
+
raise ErrorResponse(error, response.status_code, dict(response.headers))
|
|
326
|
+
except Exception as e:
|
|
327
|
+
self.logger.warning(f"Failed to parse error with model {error_model}: {e}")
|
|
328
|
+
|
|
329
|
+
raise ErrorResponse(error_data, response.status_code, dict(response.headers))
|
|
330
|
+
|
|
331
|
+
async def stream(self, response: httpx.Response) -> httpx.Response:
|
|
332
|
+
"""Return streaming response as-is.
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
response: HTTP response
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
The unmodified streaming response
|
|
339
|
+
"""
|
|
340
|
+
return response
|
|
341
|
+
|
|
342
|
+
async def text(self, response: httpx.Response) -> str:
|
|
343
|
+
"""Get response as text.
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
response: HTTP response
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
Response body as text
|
|
350
|
+
|
|
351
|
+
Raises:
|
|
352
|
+
ErrorResponse: For HTTP error status codes
|
|
353
|
+
"""
|
|
354
|
+
# Process response interceptors
|
|
355
|
+
processed_response = response
|
|
356
|
+
for interceptor in self.interceptors:
|
|
357
|
+
processed_response = await interceptor.process_response(processed_response)
|
|
358
|
+
|
|
359
|
+
if 200 <= processed_response.status_code < 300:
|
|
360
|
+
return processed_response.text
|
|
361
|
+
else:
|
|
362
|
+
error_data = {'message': processed_response.text}
|
|
363
|
+
raise ErrorResponse(error_data, processed_response.status_code, dict(processed_response.headers))
|
|
364
|
+
|
|
365
|
+
async def bytes(self, response: httpx.Response) -> bytes:
|
|
366
|
+
"""Get response as bytes.
|
|
367
|
+
|
|
368
|
+
Args:
|
|
369
|
+
response: HTTP response
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
Response body as bytes
|
|
373
|
+
|
|
374
|
+
Raises:
|
|
375
|
+
ErrorResponse: For HTTP error status codes
|
|
376
|
+
"""
|
|
377
|
+
# Process response interceptors
|
|
378
|
+
processed_response = response
|
|
379
|
+
for interceptor in self.interceptors:
|
|
380
|
+
processed_response = await interceptor.process_response(processed_response)
|
|
381
|
+
|
|
382
|
+
if 200 <= processed_response.status_code < 300:
|
|
383
|
+
return processed_response.content
|
|
384
|
+
else:
|
|
385
|
+
error_data = {'message': 'Binary response error'}
|
|
386
|
+
raise ErrorResponse(error_data, processed_response.status_code, dict(processed_response.headers))
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
# Convenience functions for common use cases
|
|
390
|
+
async def quick_request(
|
|
391
|
+
method: str,
|
|
392
|
+
url: str,
|
|
393
|
+
interceptors: Optional[List[Interceptor]] = None,
|
|
394
|
+
**kwargs
|
|
395
|
+
) -> httpx.Response:
|
|
396
|
+
"""Make a quick HTTP request with interceptors.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
method: HTTP method
|
|
400
|
+
url: Request URL
|
|
401
|
+
interceptors: List of interceptors to apply
|
|
402
|
+
**kwargs: Additional request configuration
|
|
403
|
+
|
|
404
|
+
Returns:
|
|
405
|
+
HTTP response
|
|
406
|
+
"""
|
|
407
|
+
config = RequestConfig(method=method, url=url, **kwargs)
|
|
408
|
+
|
|
409
|
+
async with Dispatcher(interceptors=interceptors) as dispatcher:
|
|
410
|
+
return await dispatcher.request(config)
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
async def quick_json_request(
|
|
414
|
+
method: str,
|
|
415
|
+
url: str,
|
|
416
|
+
json_data: Optional[Dict[str, Any]] = None,
|
|
417
|
+
interceptors: Optional[List[Interceptor]] = None,
|
|
418
|
+
success_model: Optional[type] = None,
|
|
419
|
+
error_model: Optional[type] = None,
|
|
420
|
+
**kwargs
|
|
421
|
+
) -> Any:
|
|
422
|
+
"""Make a quick JSON HTTP request with interceptors.
|
|
423
|
+
|
|
424
|
+
Args:
|
|
425
|
+
method: HTTP method
|
|
426
|
+
url: Request URL
|
|
427
|
+
json_data: JSON data to send
|
|
428
|
+
interceptors: List of interceptors to apply
|
|
429
|
+
success_model: Pydantic model for successful responses
|
|
430
|
+
error_model: Pydantic model for error responses
|
|
431
|
+
**kwargs: Additional request configuration
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
Parsed JSON response
|
|
435
|
+
"""
|
|
436
|
+
config = RequestConfig(method=method, url=url, json_data=json_data, **kwargs)
|
|
437
|
+
|
|
438
|
+
async with Dispatcher(interceptors=interceptors) as dispatcher:
|
|
439
|
+
response = await dispatcher.request(config)
|
|
440
|
+
receiver = Receiver(interceptors=interceptors)
|
|
441
|
+
return await receiver.json(response, success_model, error_model)
|
|
442
|
+
`;
|
|
443
|
+
|
|
444
|
+
// packages/python/src/lib/http/interceptors.txt
|
|
445
|
+
var interceptors_default = `"""HTTP interceptors for request/response processing."""
|
|
446
|
+
|
|
447
|
+
import asyncio
|
|
448
|
+
import logging
|
|
449
|
+
import time
|
|
450
|
+
from abc import ABC, abstractmethod
|
|
451
|
+
from typing import Dict, Optional, List, Any, Union
|
|
452
|
+
from urllib.parse import urljoin
|
|
453
|
+
|
|
454
|
+
import httpx
|
|
455
|
+
|
|
456
|
+
from .dispatcher import RequestConfig
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
class Interceptor(ABC):
|
|
460
|
+
"""Base class for HTTP interceptors."""
|
|
461
|
+
|
|
462
|
+
@abstractmethod
|
|
463
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
464
|
+
"""Process an outgoing request.
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
config: The request configuration to process
|
|
468
|
+
|
|
469
|
+
Returns:
|
|
470
|
+
The modified request configuration
|
|
471
|
+
"""
|
|
472
|
+
pass
|
|
473
|
+
|
|
474
|
+
@abstractmethod
|
|
475
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
476
|
+
"""Process an incoming response.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
response: The HTTP response to process
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
The processed response
|
|
483
|
+
"""
|
|
484
|
+
pass
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class BaseUrlInterceptor(Interceptor):
|
|
488
|
+
"""Interceptor that prepends base URL to relative URLs."""
|
|
489
|
+
|
|
490
|
+
def __init__(self, base_url: str):
|
|
491
|
+
"""Initialize the base URL interceptor.
|
|
492
|
+
|
|
493
|
+
Args:
|
|
494
|
+
base_url: The base URL to prepend to relative URLs
|
|
495
|
+
"""
|
|
496
|
+
self.base_url = base_url.rstrip('/')
|
|
497
|
+
|
|
498
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
499
|
+
"""Prepend base URL if the request URL is relative.
|
|
500
|
+
|
|
501
|
+
Args:
|
|
502
|
+
config: The request configuration
|
|
503
|
+
|
|
504
|
+
Returns:
|
|
505
|
+
The modified request configuration with absolute URL
|
|
506
|
+
"""
|
|
507
|
+
if not config.url.startswith(('http://', 'https://')):
|
|
508
|
+
# Use urljoin for proper URL joining, ensuring single slash
|
|
509
|
+
config.url = urljoin(self.base_url + '/', config.url.lstrip('/'))
|
|
510
|
+
return config
|
|
511
|
+
|
|
512
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
513
|
+
"""Pass through response unchanged.
|
|
514
|
+
|
|
515
|
+
Args:
|
|
516
|
+
response: The HTTP response
|
|
517
|
+
|
|
518
|
+
Returns:
|
|
519
|
+
The unmodified response
|
|
520
|
+
"""
|
|
521
|
+
return response
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
class LoggingInterceptor(Interceptor):
|
|
525
|
+
"""Interceptor that logs requests and responses using Python's logging module."""
|
|
526
|
+
|
|
527
|
+
def __init__(
|
|
528
|
+
self,
|
|
529
|
+
enabled: bool = True,
|
|
530
|
+
logger: Optional[logging.Logger] = None,
|
|
531
|
+
log_level: int = logging.INFO,
|
|
532
|
+
include_headers: bool = True,
|
|
533
|
+
include_sensitive_headers: bool = False
|
|
534
|
+
):
|
|
535
|
+
"""Initialize the logging interceptor.
|
|
536
|
+
|
|
537
|
+
Args:
|
|
538
|
+
enabled: Whether logging is enabled
|
|
539
|
+
logger: Custom logger instance (creates default if None)
|
|
540
|
+
log_level: Logging level to use
|
|
541
|
+
include_headers: Whether to log request/response headers
|
|
542
|
+
include_sensitive_headers: Whether to log sensitive headers like Authorization
|
|
543
|
+
"""
|
|
544
|
+
self.enabled = enabled
|
|
545
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
546
|
+
self.log_level = log_level
|
|
547
|
+
self.include_headers = include_headers
|
|
548
|
+
self.include_sensitive_headers = include_sensitive_headers
|
|
549
|
+
self._sensitive_headers = {'authorization', 'x-api-key', 'cookie', 'set-cookie'}
|
|
550
|
+
|
|
551
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
552
|
+
"""Log outgoing request.
|
|
553
|
+
|
|
554
|
+
Args:
|
|
555
|
+
config: The request configuration
|
|
556
|
+
|
|
557
|
+
Returns:
|
|
558
|
+
The unmodified request configuration
|
|
559
|
+
"""
|
|
560
|
+
if not self.enabled:
|
|
561
|
+
return config
|
|
562
|
+
|
|
563
|
+
self.logger.log(self.log_level, f"\u2192 {config.method.upper()} {config.url}")
|
|
564
|
+
|
|
565
|
+
if self.include_headers and config.headers:
|
|
566
|
+
for key, value in config.headers.items():
|
|
567
|
+
if (key.lower() in self._sensitive_headers and
|
|
568
|
+
not self.include_sensitive_headers):
|
|
569
|
+
self.logger.log(self.log_level, f" {key}: [REDACTED]")
|
|
570
|
+
else:
|
|
571
|
+
self.logger.log(self.log_level, f" {key}: {value}")
|
|
572
|
+
|
|
573
|
+
return config
|
|
574
|
+
|
|
575
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
576
|
+
"""Log incoming response.
|
|
577
|
+
|
|
578
|
+
Args:
|
|
579
|
+
response: The HTTP response
|
|
580
|
+
|
|
581
|
+
Returns:
|
|
582
|
+
The unmodified response
|
|
583
|
+
"""
|
|
584
|
+
if not self.enabled:
|
|
585
|
+
return response
|
|
586
|
+
|
|
587
|
+
status_icon = "\u2713" if 200 <= response.status_code < 300 else "\u2717"
|
|
588
|
+
self.logger.log(
|
|
589
|
+
self.log_level,
|
|
590
|
+
f"\u2190 {status_icon} {response.status_code} {response.reason_phrase or ''}"
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
if self.include_headers and response.headers:
|
|
594
|
+
for key, value in response.headers.items():
|
|
595
|
+
if (key.lower() in self._sensitive_headers and
|
|
596
|
+
not self.include_sensitive_headers):
|
|
597
|
+
self.logger.log(self.log_level, f" {key}: [REDACTED]")
|
|
598
|
+
else:
|
|
599
|
+
self.logger.log(self.log_level, f" {key}: {value}")
|
|
600
|
+
|
|
601
|
+
return response
|
|
602
|
+
|
|
603
|
+
|
|
604
|
+
class AuthInterceptor(Interceptor):
|
|
605
|
+
"""Interceptor that adds authentication headers."""
|
|
606
|
+
|
|
607
|
+
def __init__(
|
|
608
|
+
self,
|
|
609
|
+
token: Optional[str] = None,
|
|
610
|
+
api_key: Optional[str] = None,
|
|
611
|
+
api_key_header: str = 'X-API-Key',
|
|
612
|
+
auth_type: str = 'Bearer'
|
|
613
|
+
):
|
|
614
|
+
"""Initialize the authentication interceptor.
|
|
615
|
+
|
|
616
|
+
Args:
|
|
617
|
+
token: Bearer token for Authorization header
|
|
618
|
+
api_key: API key value
|
|
619
|
+
api_key_header: Header name for API key
|
|
620
|
+
auth_type: Type of authentication (Bearer, Basic, etc.)
|
|
621
|
+
"""
|
|
622
|
+
self.token = token
|
|
623
|
+
self.api_key = api_key
|
|
624
|
+
self.api_key_header = api_key_header
|
|
625
|
+
self.auth_type = auth_type
|
|
626
|
+
|
|
627
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
628
|
+
"""Add authentication headers.
|
|
629
|
+
|
|
630
|
+
Args:
|
|
631
|
+
config: The request configuration
|
|
632
|
+
|
|
633
|
+
Returns:
|
|
634
|
+
The modified request configuration with auth headers
|
|
635
|
+
"""
|
|
636
|
+
if config.headers is None:
|
|
637
|
+
config.headers = {}
|
|
638
|
+
|
|
639
|
+
if self.token:
|
|
640
|
+
config.headers['Authorization'] = f'{self.auth_type} {self.token}'
|
|
641
|
+
elif self.api_key:
|
|
642
|
+
config.headers[self.api_key_header] = self.api_key
|
|
643
|
+
|
|
644
|
+
return config
|
|
645
|
+
|
|
646
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
647
|
+
"""Pass through response unchanged.
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
response: The HTTP response
|
|
651
|
+
|
|
652
|
+
Returns:
|
|
653
|
+
The unmodified response
|
|
654
|
+
"""
|
|
655
|
+
return response
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
class RetryInterceptor(Interceptor):
|
|
659
|
+
"""Interceptor that retries failed requests with exponential backoff."""
|
|
660
|
+
|
|
661
|
+
def __init__(
|
|
662
|
+
self,
|
|
663
|
+
max_retries: int = 3,
|
|
664
|
+
retry_delay: float = 1.0,
|
|
665
|
+
backoff_factor: float = 2.0,
|
|
666
|
+
retry_on_status: Optional[List[int]] = None,
|
|
667
|
+
retry_on_exceptions: Optional[List[type]] = None
|
|
668
|
+
):
|
|
669
|
+
"""Initialize the retry interceptor.
|
|
670
|
+
|
|
671
|
+
Args:
|
|
672
|
+
max_retries: Maximum number of retry attempts
|
|
673
|
+
retry_delay: Initial delay between retries in seconds
|
|
674
|
+
backoff_factor: Exponential backoff multiplier
|
|
675
|
+
retry_on_status: HTTP status codes that should trigger retries
|
|
676
|
+
retry_on_exceptions: Exception types that should trigger retries
|
|
677
|
+
"""
|
|
678
|
+
self.max_retries = max_retries
|
|
679
|
+
self.retry_delay = retry_delay
|
|
680
|
+
self.backoff_factor = backoff_factor
|
|
681
|
+
self.retry_on_status = retry_on_status or [500, 502, 503, 504, 408, 429]
|
|
682
|
+
self.retry_on_exceptions = retry_on_exceptions or [
|
|
683
|
+
httpx.TimeoutException,
|
|
684
|
+
httpx.ConnectError,
|
|
685
|
+
httpx.RemoteProtocolError
|
|
686
|
+
]
|
|
687
|
+
self._original_request_func = None
|
|
688
|
+
self.logger = logging.getLogger(__name__)
|
|
689
|
+
|
|
690
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
691
|
+
"""Store original request for potential retries.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
config: The request configuration
|
|
695
|
+
|
|
696
|
+
Returns:
|
|
697
|
+
The unmodified request configuration
|
|
698
|
+
"""
|
|
699
|
+
# Store the original config for retries
|
|
700
|
+
self._original_config = config.model_copy() if hasattr(config, 'model_copy') else config
|
|
701
|
+
return config
|
|
702
|
+
|
|
703
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
704
|
+
"""Check if response needs retry and handle accordingly.
|
|
705
|
+
|
|
706
|
+
Args:
|
|
707
|
+
response: The HTTP response
|
|
708
|
+
|
|
709
|
+
Returns:
|
|
710
|
+
The response (possibly after retries)
|
|
711
|
+
"""
|
|
712
|
+
# For retry logic to work properly, it needs to be integrated at the dispatcher level
|
|
713
|
+
# This is a simplified version that just passes through
|
|
714
|
+
# In a full implementation, the retry logic would need access to the original request method
|
|
715
|
+
return response
|
|
716
|
+
|
|
717
|
+
async def execute_with_retry(self, request_func, *args, **kwargs) -> httpx.Response:
|
|
718
|
+
"""Execute a request function with retry logic.
|
|
719
|
+
|
|
720
|
+
Args:
|
|
721
|
+
request_func: Function that executes the HTTP request
|
|
722
|
+
*args: Arguments to pass to request_func
|
|
723
|
+
**kwargs: Keyword arguments to pass to request_func
|
|
724
|
+
|
|
725
|
+
Returns:
|
|
726
|
+
The HTTP response after potential retries
|
|
727
|
+
|
|
728
|
+
Raises:
|
|
729
|
+
The last exception encountered if all retries fail
|
|
730
|
+
"""
|
|
731
|
+
last_exception = None
|
|
732
|
+
|
|
733
|
+
for attempt in range(self.max_retries + 1):
|
|
734
|
+
try:
|
|
735
|
+
response = await request_func(*args, **kwargs)
|
|
736
|
+
|
|
737
|
+
# Check if response status requires retry
|
|
738
|
+
if response.status_code not in self.retry_on_status:
|
|
739
|
+
return response
|
|
740
|
+
|
|
741
|
+
if attempt == self.max_retries:
|
|
742
|
+
self.logger.warning(
|
|
743
|
+
f"Max retries ({self.max_retries}) reached for request. "
|
|
744
|
+
f"Final status: {response.status_code}"
|
|
745
|
+
)
|
|
746
|
+
return response
|
|
747
|
+
|
|
748
|
+
# Wait before retry
|
|
749
|
+
delay = self.retry_delay * (self.backoff_factor ** attempt)
|
|
750
|
+
self.logger.info(
|
|
751
|
+
f"Retrying request (attempt {attempt + 1}/{self.max_retries + 1}) "
|
|
752
|
+
f"after {delay:.2f}s due to status {response.status_code}"
|
|
753
|
+
)
|
|
754
|
+
await asyncio.sleep(delay)
|
|
755
|
+
|
|
756
|
+
except Exception as e:
|
|
757
|
+
# Check if exception type requires retry
|
|
758
|
+
if not any(isinstance(e, exc_type) for exc_type in self.retry_on_exceptions):
|
|
759
|
+
raise e
|
|
760
|
+
|
|
761
|
+
last_exception = e
|
|
762
|
+
|
|
763
|
+
if attempt == self.max_retries:
|
|
764
|
+
self.logger.error(
|
|
765
|
+
f"Max retries ({self.max_retries}) reached. "
|
|
766
|
+
f"Final exception: {type(e).__name__}: {e}"
|
|
767
|
+
)
|
|
768
|
+
raise e
|
|
769
|
+
|
|
770
|
+
# Wait before retry
|
|
771
|
+
delay = self.retry_delay * (self.backoff_factor ** attempt)
|
|
772
|
+
self.logger.info(
|
|
773
|
+
f"Retrying request (attempt {attempt + 1}/{self.max_retries + 1}) "
|
|
774
|
+
f"after {delay:.2f}s due to {type(e).__name__}: {e}"
|
|
775
|
+
)
|
|
776
|
+
await asyncio.sleep(delay)
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
class UserAgentInterceptor(Interceptor):
|
|
780
|
+
"""Interceptor that adds a User-Agent header."""
|
|
781
|
+
|
|
782
|
+
def __init__(self, user_agent: str):
|
|
783
|
+
"""Initialize the User-Agent interceptor.
|
|
784
|
+
|
|
785
|
+
Args:
|
|
786
|
+
user_agent: The User-Agent string to set
|
|
787
|
+
"""
|
|
788
|
+
self.user_agent = user_agent
|
|
789
|
+
|
|
790
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
791
|
+
"""Add User-Agent header if not already present.
|
|
792
|
+
|
|
793
|
+
Args:
|
|
794
|
+
config: The request configuration
|
|
795
|
+
|
|
796
|
+
Returns:
|
|
797
|
+
The modified request configuration with User-Agent header
|
|
798
|
+
"""
|
|
799
|
+
if config.headers is None:
|
|
800
|
+
config.headers = {}
|
|
801
|
+
|
|
802
|
+
# Only set User-Agent if not already present (case-insensitive check)
|
|
803
|
+
has_user_agent = any(
|
|
804
|
+
key.lower() == 'user-agent'
|
|
805
|
+
for key in config.headers.keys()
|
|
806
|
+
)
|
|
807
|
+
|
|
808
|
+
if not has_user_agent:
|
|
809
|
+
config.headers['User-Agent'] = self.user_agent
|
|
810
|
+
|
|
811
|
+
return config
|
|
812
|
+
|
|
813
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
814
|
+
"""Pass through response unchanged.
|
|
815
|
+
|
|
816
|
+
Args:
|
|
817
|
+
response: The HTTP response
|
|
818
|
+
|
|
819
|
+
Returns:
|
|
820
|
+
The unmodified response
|
|
821
|
+
"""
|
|
822
|
+
return response
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
class TimeoutInterceptor(Interceptor):
|
|
826
|
+
"""Interceptor that sets request timeouts."""
|
|
827
|
+
|
|
828
|
+
def __init__(self, timeout: Union[float, httpx.Timeout]):
|
|
829
|
+
"""Initialize the timeout interceptor.
|
|
830
|
+
|
|
831
|
+
Args:
|
|
832
|
+
timeout: Timeout value in seconds or httpx.Timeout object
|
|
833
|
+
"""
|
|
834
|
+
self.timeout = timeout
|
|
835
|
+
|
|
836
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
837
|
+
"""Set timeout for the request.
|
|
838
|
+
|
|
839
|
+
Args:
|
|
840
|
+
config: The request configuration
|
|
841
|
+
|
|
842
|
+
Returns:
|
|
843
|
+
The modified request configuration with timeout
|
|
844
|
+
"""
|
|
845
|
+
if config.timeout is None:
|
|
846
|
+
config.timeout = self.timeout
|
|
847
|
+
return config
|
|
848
|
+
|
|
849
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
850
|
+
"""Pass through response unchanged.
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
response: The HTTP response
|
|
854
|
+
|
|
855
|
+
Returns:
|
|
856
|
+
The unmodified response
|
|
857
|
+
"""
|
|
858
|
+
return response
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
class RateLimitInterceptor(Interceptor):
|
|
862
|
+
"""Interceptor that implements client-side rate limiting."""
|
|
863
|
+
|
|
864
|
+
def __init__(self, max_requests: int, time_window: float = 60.0):
|
|
865
|
+
"""Initialize the rate limit interceptor.
|
|
866
|
+
|
|
867
|
+
Args:
|
|
868
|
+
max_requests: Maximum number of requests allowed in the time window
|
|
869
|
+
time_window: Time window in seconds
|
|
870
|
+
"""
|
|
871
|
+
self.max_requests = max_requests
|
|
872
|
+
self.time_window = time_window
|
|
873
|
+
self.requests = []
|
|
874
|
+
self._lock = asyncio.Lock()
|
|
875
|
+
|
|
876
|
+
async def process_request(self, config: RequestConfig) -> RequestConfig:
|
|
877
|
+
"""Apply rate limiting before request.
|
|
878
|
+
|
|
879
|
+
Args:
|
|
880
|
+
config: The request configuration
|
|
881
|
+
|
|
882
|
+
Returns:
|
|
883
|
+
The unmodified request configuration
|
|
884
|
+
"""
|
|
885
|
+
async with self._lock:
|
|
886
|
+
now = time.time()
|
|
887
|
+
|
|
888
|
+
# Remove requests outside the time window
|
|
889
|
+
self.requests = [req_time for req_time in self.requests
|
|
890
|
+
if now - req_time < self.time_window]
|
|
891
|
+
|
|
892
|
+
# Check if we've exceeded the rate limit
|
|
893
|
+
if len(self.requests) >= self.max_requests:
|
|
894
|
+
# Calculate how long to wait
|
|
895
|
+
oldest_request = min(self.requests)
|
|
896
|
+
wait_time = self.time_window - (now - oldest_request)
|
|
897
|
+
|
|
898
|
+
if wait_time > 0:
|
|
899
|
+
await asyncio.sleep(wait_time)
|
|
900
|
+
|
|
901
|
+
# Record this request
|
|
902
|
+
self.requests.append(now)
|
|
903
|
+
|
|
904
|
+
return config
|
|
905
|
+
|
|
906
|
+
async def process_response(self, response: httpx.Response) -> httpx.Response:
|
|
907
|
+
"""Pass through response unchanged.
|
|
908
|
+
|
|
909
|
+
Args:
|
|
910
|
+
response: The HTTP response
|
|
911
|
+
|
|
912
|
+
Returns:
|
|
913
|
+
The unmodified response
|
|
914
|
+
"""
|
|
915
|
+
return response
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
# Factory functions for convenient interceptor creation
|
|
919
|
+
def create_base_url_interceptor(base_url: str) -> BaseUrlInterceptor:
|
|
920
|
+
"""Create a BaseUrlInterceptor instance.
|
|
921
|
+
|
|
922
|
+
Args:
|
|
923
|
+
base_url: The base URL to prepend to relative URLs
|
|
924
|
+
|
|
925
|
+
Returns:
|
|
926
|
+
Configured BaseUrlInterceptor instance
|
|
927
|
+
"""
|
|
928
|
+
return BaseUrlInterceptor(base_url)
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
def create_logging_interceptor(
|
|
932
|
+
enabled: bool = True,
|
|
933
|
+
log_level: int = logging.INFO,
|
|
934
|
+
include_headers: bool = True,
|
|
935
|
+
include_sensitive_headers: bool = False
|
|
936
|
+
) -> LoggingInterceptor:
|
|
937
|
+
"""Create a LoggingInterceptor instance.
|
|
938
|
+
|
|
939
|
+
Args:
|
|
940
|
+
enabled: Whether logging is enabled
|
|
941
|
+
log_level: Logging level to use
|
|
942
|
+
include_headers: Whether to log headers
|
|
943
|
+
include_sensitive_headers: Whether to log sensitive headers
|
|
944
|
+
|
|
945
|
+
Returns:
|
|
946
|
+
Configured LoggingInterceptor instance
|
|
947
|
+
"""
|
|
948
|
+
return LoggingInterceptor(
|
|
949
|
+
enabled=enabled,
|
|
950
|
+
log_level=log_level,
|
|
951
|
+
include_headers=include_headers,
|
|
952
|
+
include_sensitive_headers=include_sensitive_headers
|
|
953
|
+
)
|
|
954
|
+
|
|
955
|
+
|
|
956
|
+
def create_auth_interceptor(
|
|
957
|
+
token: Optional[str] = None,
|
|
958
|
+
api_key: Optional[str] = None,
|
|
959
|
+
api_key_header: str = 'X-API-Key',
|
|
960
|
+
auth_type: str = 'Bearer'
|
|
961
|
+
) -> AuthInterceptor:
|
|
962
|
+
"""Create an AuthInterceptor instance.
|
|
963
|
+
|
|
964
|
+
Args:
|
|
965
|
+
token: Bearer token for Authorization header
|
|
966
|
+
api_key: API key value
|
|
967
|
+
api_key_header: Header name for API key
|
|
968
|
+
auth_type: Type of authentication
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
Configured AuthInterceptor instance
|
|
972
|
+
"""
|
|
973
|
+
return AuthInterceptor(
|
|
974
|
+
token=token,
|
|
975
|
+
api_key=api_key,
|
|
976
|
+
api_key_header=api_key_header,
|
|
977
|
+
auth_type=auth_type
|
|
978
|
+
)
|
|
979
|
+
|
|
980
|
+
|
|
981
|
+
def create_retry_interceptor(
|
|
982
|
+
max_retries: int = 3,
|
|
983
|
+
retry_delay: float = 1.0,
|
|
984
|
+
backoff_factor: float = 2.0,
|
|
985
|
+
retry_on_status: Optional[List[int]] = None
|
|
986
|
+
) -> RetryInterceptor:
|
|
987
|
+
"""Create a RetryInterceptor instance.
|
|
988
|
+
|
|
989
|
+
Args:
|
|
990
|
+
max_retries: Maximum number of retry attempts
|
|
991
|
+
retry_delay: Initial delay between retries in seconds
|
|
992
|
+
backoff_factor: Exponential backoff multiplier
|
|
993
|
+
retry_on_status: HTTP status codes that should trigger retries
|
|
994
|
+
|
|
995
|
+
Returns:
|
|
996
|
+
Configured RetryInterceptor instance
|
|
997
|
+
"""
|
|
998
|
+
return RetryInterceptor(
|
|
999
|
+
max_retries=max_retries,
|
|
1000
|
+
retry_delay=retry_delay,
|
|
1001
|
+
backoff_factor=backoff_factor,
|
|
1002
|
+
retry_on_status=retry_on_status
|
|
1003
|
+
)
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def create_user_agent_interceptor(user_agent: str) -> UserAgentInterceptor:
|
|
1007
|
+
"""Create a UserAgentInterceptor instance.
|
|
1008
|
+
|
|
1009
|
+
Args:
|
|
1010
|
+
user_agent: The User-Agent string to set
|
|
1011
|
+
|
|
1012
|
+
Returns:
|
|
1013
|
+
Configured UserAgentInterceptor instance
|
|
1014
|
+
"""
|
|
1015
|
+
return UserAgentInterceptor(user_agent)
|
|
1016
|
+
`;
|
|
1017
|
+
|
|
1018
|
+
// packages/python/src/lib/http/responses.txt
|
|
1019
|
+
var responses_default = `"""HTTP response models and exceptions."""
|
|
1020
|
+
|
|
1021
|
+
from typing import Any, Dict, Optional, Union
|
|
1022
|
+
|
|
1023
|
+
import httpx
|
|
1024
|
+
from pydantic import BaseModel
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
class ApiResponse(BaseModel):
|
|
1028
|
+
"""Base class for API responses."""
|
|
1029
|
+
|
|
1030
|
+
status_code: int
|
|
1031
|
+
headers: Dict[str, str]
|
|
1032
|
+
data: Any
|
|
1033
|
+
|
|
1034
|
+
class Config:
|
|
1035
|
+
"""Pydantic configuration."""
|
|
1036
|
+
arbitrary_types_allowed = True
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
class SuccessResponse(ApiResponse):
|
|
1040
|
+
"""Represents a successful API response."""
|
|
1041
|
+
|
|
1042
|
+
def __init__(self, data: Any, status_code: int = 200, headers: Optional[Dict[str, str]] = None):
|
|
1043
|
+
"""Initialize success response.
|
|
1044
|
+
|
|
1045
|
+
Args:
|
|
1046
|
+
data: Response data
|
|
1047
|
+
status_code: HTTP status code
|
|
1048
|
+
headers: Response headers
|
|
1049
|
+
"""
|
|
1050
|
+
super().__init__(
|
|
1051
|
+
status_code=status_code,
|
|
1052
|
+
headers=headers or {},
|
|
1053
|
+
data=data
|
|
1054
|
+
)
|
|
1055
|
+
|
|
1056
|
+
|
|
1057
|
+
class ErrorResponse(Exception):
|
|
1058
|
+
"""Exception raised for HTTP error responses."""
|
|
1059
|
+
|
|
1060
|
+
def __init__(
|
|
1061
|
+
self,
|
|
1062
|
+
data: Any,
|
|
1063
|
+
status_code: int,
|
|
1064
|
+
headers: Optional[Dict[str, str]] = None,
|
|
1065
|
+
message: Optional[str] = None
|
|
1066
|
+
):
|
|
1067
|
+
"""Initialize error response.
|
|
1068
|
+
|
|
1069
|
+
Args:
|
|
1070
|
+
data: Error response data
|
|
1071
|
+
status_code: HTTP status code
|
|
1072
|
+
headers: Response headers
|
|
1073
|
+
message: Custom error message
|
|
1074
|
+
"""
|
|
1075
|
+
self.data = data
|
|
1076
|
+
self.status_code = status_code
|
|
1077
|
+
self.headers = headers or {}
|
|
1078
|
+
self.message = message or f"HTTP {status_code} Error"
|
|
1079
|
+
|
|
1080
|
+
super().__init__(self.message)
|
|
1081
|
+
|
|
1082
|
+
def __str__(self) -> str:
|
|
1083
|
+
"""String representation of the error."""
|
|
1084
|
+
return f"ErrorResponse(status_code={self.status_code}, message='{self.message}')"
|
|
1085
|
+
|
|
1086
|
+
def __repr__(self) -> str:
|
|
1087
|
+
"""Detailed string representation of the error."""
|
|
1088
|
+
return (
|
|
1089
|
+
f"ErrorResponse(status_code={self.status_code}, "
|
|
1090
|
+
f"message='{self.message}', data={self.data})"
|
|
1091
|
+
)
|
|
1092
|
+
|
|
1093
|
+
|
|
1094
|
+
class TimeoutError(ErrorResponse):
|
|
1095
|
+
"""Exception raised for request timeouts."""
|
|
1096
|
+
|
|
1097
|
+
def __init__(self, message: str = "Request timed out"):
|
|
1098
|
+
"""Initialize timeout error.
|
|
1099
|
+
|
|
1100
|
+
Args:
|
|
1101
|
+
message: Error message
|
|
1102
|
+
"""
|
|
1103
|
+
super().__init__(
|
|
1104
|
+
data={'error': 'timeout'},
|
|
1105
|
+
status_code=408,
|
|
1106
|
+
message=message
|
|
1107
|
+
)
|
|
1108
|
+
|
|
1109
|
+
|
|
1110
|
+
class ConnectionError(ErrorResponse):
|
|
1111
|
+
"""Exception raised for connection errors."""
|
|
1112
|
+
|
|
1113
|
+
def __init__(self, message: str = "Connection failed"):
|
|
1114
|
+
"""Initialize connection error.
|
|
1115
|
+
|
|
1116
|
+
Args:
|
|
1117
|
+
message: Error message
|
|
1118
|
+
"""
|
|
1119
|
+
super().__init__(
|
|
1120
|
+
data={'error': 'connection'},
|
|
1121
|
+
status_code=503,
|
|
1122
|
+
message=message
|
|
1123
|
+
)
|
|
1124
|
+
|
|
1125
|
+
|
|
1126
|
+
class BadRequestError(ErrorResponse):
|
|
1127
|
+
"""Exception raised for 400 Bad Request errors."""
|
|
1128
|
+
|
|
1129
|
+
def __init__(self, data: Any = None, message: str = "Bad Request"):
|
|
1130
|
+
"""Initialize bad request error.
|
|
1131
|
+
|
|
1132
|
+
Args:
|
|
1133
|
+
data: Error data
|
|
1134
|
+
message: Error message
|
|
1135
|
+
"""
|
|
1136
|
+
super().__init__(
|
|
1137
|
+
data=data or {'error': 'bad_request'},
|
|
1138
|
+
status_code=400,
|
|
1139
|
+
message=message
|
|
1140
|
+
)
|
|
1141
|
+
|
|
1142
|
+
|
|
1143
|
+
class UnauthorizedError(ErrorResponse):
|
|
1144
|
+
"""Exception raised for 401 Unauthorized errors."""
|
|
1145
|
+
|
|
1146
|
+
def __init__(self, data: Any = None, message: str = "Unauthorized"):
|
|
1147
|
+
"""Initialize unauthorized error.
|
|
1148
|
+
|
|
1149
|
+
Args:
|
|
1150
|
+
data: Error data
|
|
1151
|
+
message: Error message
|
|
1152
|
+
"""
|
|
1153
|
+
super().__init__(
|
|
1154
|
+
data=data or {'error': 'unauthorized'},
|
|
1155
|
+
status_code=401,
|
|
1156
|
+
message=message
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
|
|
1160
|
+
class ForbiddenError(ErrorResponse):
|
|
1161
|
+
"""Exception raised for 403 Forbidden errors."""
|
|
1162
|
+
|
|
1163
|
+
def __init__(self, data: Any = None, message: str = "Forbidden"):
|
|
1164
|
+
"""Initialize forbidden error.
|
|
1165
|
+
|
|
1166
|
+
Args:
|
|
1167
|
+
data: Error data
|
|
1168
|
+
message: Error message
|
|
1169
|
+
"""
|
|
1170
|
+
super().__init__(
|
|
1171
|
+
data=data or {'error': 'forbidden'},
|
|
1172
|
+
status_code=403,
|
|
1173
|
+
message=message
|
|
1174
|
+
)
|
|
1175
|
+
|
|
1176
|
+
|
|
1177
|
+
class NotFoundError(ErrorResponse):
|
|
1178
|
+
"""Exception raised for 404 Not Found errors."""
|
|
1179
|
+
|
|
1180
|
+
def __init__(self, data: Any = None, message: str = "Not Found"):
|
|
1181
|
+
"""Initialize not found error.
|
|
1182
|
+
|
|
1183
|
+
Args:
|
|
1184
|
+
data: Error data
|
|
1185
|
+
message: Error message
|
|
1186
|
+
"""
|
|
1187
|
+
super().__init__(
|
|
1188
|
+
data=data or {'error': 'not_found'},
|
|
1189
|
+
status_code=404,
|
|
1190
|
+
message=message
|
|
1191
|
+
)
|
|
1192
|
+
|
|
1193
|
+
|
|
1194
|
+
class InternalServerError(ErrorResponse):
|
|
1195
|
+
"""Exception raised for 500 Internal Server Error."""
|
|
1196
|
+
|
|
1197
|
+
def __init__(self, data: Any = None, message: str = "Internal Server Error"):
|
|
1198
|
+
"""Initialize internal server error.
|
|
1199
|
+
|
|
1200
|
+
Args:
|
|
1201
|
+
data: Error data
|
|
1202
|
+
message: Error message
|
|
1203
|
+
"""
|
|
1204
|
+
super().__init__(
|
|
1205
|
+
data=data or {'error': 'internal_server_error'},
|
|
1206
|
+
status_code=500,
|
|
1207
|
+
message=message
|
|
1208
|
+
)
|
|
1209
|
+
|
|
1210
|
+
|
|
1211
|
+
def create_error_from_response(response: httpx.Response) -> ErrorResponse:
|
|
1212
|
+
"""Create appropriate error exception from HTTP response.
|
|
1213
|
+
|
|
1214
|
+
Args:
|
|
1215
|
+
response: HTTP response
|
|
1216
|
+
|
|
1217
|
+
Returns:
|
|
1218
|
+
Appropriate error exception
|
|
1219
|
+
"""
|
|
1220
|
+
status_code = response.status_code
|
|
1221
|
+
headers = dict(response.headers)
|
|
1222
|
+
|
|
1223
|
+
# Try to parse error data
|
|
1224
|
+
try:
|
|
1225
|
+
data = response.json()
|
|
1226
|
+
except Exception:
|
|
1227
|
+
data = {'message': response.text}
|
|
1228
|
+
|
|
1229
|
+
# Create specific error types based on status code
|
|
1230
|
+
error_classes = {
|
|
1231
|
+
400: BadRequestError,
|
|
1232
|
+
401: UnauthorizedError,
|
|
1233
|
+
403: ForbiddenError,
|
|
1234
|
+
404: NotFoundError,
|
|
1235
|
+
500: InternalServerError,
|
|
1236
|
+
}
|
|
1237
|
+
|
|
1238
|
+
error_class = error_classes.get(status_code, ErrorResponse)
|
|
1239
|
+
|
|
1240
|
+
if error_class == ErrorResponse:
|
|
1241
|
+
return ErrorResponse(data, status_code, headers)
|
|
1242
|
+
else:
|
|
1243
|
+
return error_class(data)
|
|
1244
|
+
`;
|
|
1245
|
+
|
|
1246
|
+
// packages/python/src/lib/python-emitter.ts
|
|
1247
|
+
import { snakecase } from "stringcase";
|
|
1248
|
+
import { isRef, notRef, parseRef, pascalcase } from "@sdk-it/core";
|
|
1249
|
+
import { isPrimitiveSchema } from "@sdk-it/spec";
|
|
1250
|
+
function coerceObject(schema) {
|
|
1251
|
+
schema = structuredClone(schema);
|
|
1252
|
+
if (schema["x-properties"]) {
|
|
1253
|
+
schema.properties = {
|
|
1254
|
+
...schema.properties ?? {},
|
|
1255
|
+
...schema["x-properties"] ?? {}
|
|
1256
|
+
};
|
|
1257
|
+
}
|
|
1258
|
+
if (schema["x-required"]) {
|
|
1259
|
+
schema.required = Array.from(
|
|
1260
|
+
/* @__PURE__ */ new Set([
|
|
1261
|
+
...Array.isArray(schema.required) ? schema.required : [],
|
|
1262
|
+
...schema["x-required"] || []
|
|
1263
|
+
])
|
|
1264
|
+
);
|
|
1265
|
+
}
|
|
1266
|
+
return schema;
|
|
1267
|
+
}
|
|
1268
|
+
var PythonEmitter = class {
|
|
1269
|
+
#spec;
|
|
1270
|
+
#emitHandler;
|
|
1271
|
+
#emitHistory = /* @__PURE__ */ new Set();
|
|
1272
|
+
#typeCache = /* @__PURE__ */ new Map();
|
|
1273
|
+
// Cache for resolved types
|
|
1274
|
+
#emit(name, content, schema) {
|
|
1275
|
+
if (this.#emitHistory.has(content)) {
|
|
1276
|
+
return;
|
|
1277
|
+
}
|
|
1278
|
+
this.#emitHistory.add(content);
|
|
1279
|
+
this.#emitHandler?.(name, content, schema);
|
|
1280
|
+
}
|
|
1281
|
+
constructor(spec) {
|
|
1282
|
+
this.#spec = spec;
|
|
1283
|
+
}
|
|
1284
|
+
onEmit(emit) {
|
|
1285
|
+
this.#emitHandler = emit;
|
|
1286
|
+
}
|
|
1287
|
+
#formatFieldName(name) {
|
|
1288
|
+
let fieldName = snakecase(name);
|
|
1289
|
+
const reservedKeywords = [
|
|
1290
|
+
"class",
|
|
1291
|
+
"def",
|
|
1292
|
+
"if",
|
|
1293
|
+
"else",
|
|
1294
|
+
"elif",
|
|
1295
|
+
"while",
|
|
1296
|
+
"for",
|
|
1297
|
+
"try",
|
|
1298
|
+
"except",
|
|
1299
|
+
"finally",
|
|
1300
|
+
"with",
|
|
1301
|
+
"as",
|
|
1302
|
+
"import",
|
|
1303
|
+
"from",
|
|
1304
|
+
"global",
|
|
1305
|
+
"nonlocal",
|
|
1306
|
+
"lambda",
|
|
1307
|
+
"yield",
|
|
1308
|
+
"return",
|
|
1309
|
+
"pass",
|
|
1310
|
+
"break",
|
|
1311
|
+
"continue",
|
|
1312
|
+
"True",
|
|
1313
|
+
"False",
|
|
1314
|
+
"None",
|
|
1315
|
+
"and",
|
|
1316
|
+
"or",
|
|
1317
|
+
"not",
|
|
1318
|
+
"in",
|
|
1319
|
+
"is"
|
|
1320
|
+
];
|
|
1321
|
+
if (reservedKeywords.includes(fieldName)) {
|
|
1322
|
+
fieldName = `${fieldName}_`;
|
|
1323
|
+
}
|
|
1324
|
+
return fieldName;
|
|
1325
|
+
}
|
|
1326
|
+
#ref(ref) {
|
|
1327
|
+
const cacheKey = ref.$ref;
|
|
1328
|
+
if (this.#typeCache.has(cacheKey)) {
|
|
1329
|
+
return this.#typeCache.get(cacheKey);
|
|
1330
|
+
}
|
|
1331
|
+
const refInfo = parseRef(ref.$ref);
|
|
1332
|
+
const refName = refInfo.model;
|
|
1333
|
+
const className = pascalcase(refName);
|
|
1334
|
+
const result = {
|
|
1335
|
+
type: className,
|
|
1336
|
+
content: "",
|
|
1337
|
+
use: className,
|
|
1338
|
+
fromJson: `${className}.parse_obj`,
|
|
1339
|
+
simple: false
|
|
1340
|
+
};
|
|
1341
|
+
this.#typeCache.set(cacheKey, result);
|
|
1342
|
+
return result;
|
|
1343
|
+
}
|
|
1344
|
+
#oneOf(variants, context) {
|
|
1345
|
+
const variantTypes = variants.map((variant) => this.handle(variant, context)).map((result) => result.type || "Any").filter((type, index, arr) => arr.indexOf(type) === index);
|
|
1346
|
+
if (variantTypes.length === 0) {
|
|
1347
|
+
return {
|
|
1348
|
+
type: "Any",
|
|
1349
|
+
content: "",
|
|
1350
|
+
use: "Any",
|
|
1351
|
+
fromJson: "Any",
|
|
1352
|
+
simple: true
|
|
1353
|
+
};
|
|
1354
|
+
}
|
|
1355
|
+
if (variantTypes.length === 1) {
|
|
1356
|
+
return {
|
|
1357
|
+
type: variantTypes[0],
|
|
1358
|
+
content: "",
|
|
1359
|
+
use: variantTypes[0],
|
|
1360
|
+
fromJson: variantTypes[0],
|
|
1361
|
+
simple: true
|
|
1362
|
+
};
|
|
1363
|
+
}
|
|
1364
|
+
const unionType = `Union[${variantTypes.join(", ")}]`;
|
|
1365
|
+
return {
|
|
1366
|
+
type: unionType,
|
|
1367
|
+
content: "",
|
|
1368
|
+
use: unionType,
|
|
1369
|
+
fromJson: unionType,
|
|
1370
|
+
simple: true
|
|
1371
|
+
};
|
|
1372
|
+
}
|
|
1373
|
+
#object(className, schema, context) {
|
|
1374
|
+
const { properties = {}, required = [] } = coerceObject(schema);
|
|
1375
|
+
const fields = [];
|
|
1376
|
+
let baseClass = "BaseModel";
|
|
1377
|
+
if (schema.allOf) {
|
|
1378
|
+
const bases = schema.allOf.filter(notRef).map((s) => this.handle(s, context)).filter((result) => result.type).map((result) => result.type);
|
|
1379
|
+
if (bases.length > 0 && bases[0]) {
|
|
1380
|
+
baseClass = bases[0];
|
|
1381
|
+
}
|
|
1382
|
+
}
|
|
1383
|
+
for (const [propName, propSchema] of Object.entries(properties)) {
|
|
1384
|
+
if (isRef(propSchema)) {
|
|
1385
|
+
const refResult = this.#ref(propSchema);
|
|
1386
|
+
const refInfo = parseRef(propSchema.$ref);
|
|
1387
|
+
const refName = refInfo.model;
|
|
1388
|
+
const pythonType = pascalcase(refName);
|
|
1389
|
+
const fieldName = this.#formatFieldName(propName);
|
|
1390
|
+
const isRequired = required.includes(propName);
|
|
1391
|
+
const fieldType = isRequired ? pythonType : `Optional[${pythonType}]`;
|
|
1392
|
+
const defaultValue = isRequired ? "" : " = None";
|
|
1393
|
+
fields.push(` ${fieldName}: ${fieldType}${defaultValue}`);
|
|
1394
|
+
} else {
|
|
1395
|
+
const result = this.handle(propSchema, { ...context, name: propName });
|
|
1396
|
+
const fieldName = this.#formatFieldName(propName);
|
|
1397
|
+
const isRequired = required.includes(propName);
|
|
1398
|
+
let fieldType = result.type || "Any";
|
|
1399
|
+
if (!isRequired) {
|
|
1400
|
+
fieldType = `Optional[${fieldType}]`;
|
|
1401
|
+
}
|
|
1402
|
+
const defaultValue = isRequired ? "" : " = None";
|
|
1403
|
+
let fieldDef = ` ${fieldName}: ${fieldType}${defaultValue}`;
|
|
1404
|
+
if (fieldName !== propName) {
|
|
1405
|
+
fieldDef = ` ${fieldName}: ${fieldType} = Field(alias='${propName}'${defaultValue ? ", default=None" : ""})`;
|
|
1406
|
+
}
|
|
1407
|
+
if (propSchema.description) {
|
|
1408
|
+
fieldDef += ` # ${propSchema.description}`;
|
|
1409
|
+
}
|
|
1410
|
+
fields.push(fieldDef);
|
|
1411
|
+
}
|
|
1412
|
+
}
|
|
1413
|
+
if (schema.oneOf || schema.anyOf) {
|
|
1414
|
+
const unionResult = this.#oneOf(
|
|
1415
|
+
schema.oneOf || schema.anyOf || [],
|
|
1416
|
+
context
|
|
1417
|
+
);
|
|
1418
|
+
fields.push(` value: ${unionResult.type}`);
|
|
1419
|
+
}
|
|
1420
|
+
if (schema.additionalProperties && typeof schema.additionalProperties === "object") {
|
|
1421
|
+
const addlResult = this.handle(schema.additionalProperties, context);
|
|
1422
|
+
fields.push(
|
|
1423
|
+
` additional_properties: Optional[Dict[str, ${addlResult.type || "Any"}]] = None`
|
|
1424
|
+
);
|
|
1425
|
+
}
|
|
1426
|
+
const docstring = schema.description ? ` """${schema.description}"""
|
|
1427
|
+
` : "";
|
|
1428
|
+
let requestConfigMethod = "";
|
|
1429
|
+
if (schema["x-inputname"]) {
|
|
1430
|
+
requestConfigMethod = `
|
|
1431
|
+
def to_request_config(self, config: RequestConfig) -> RequestConfig:
|
|
1432
|
+
"""Convert this input model to request configuration."""
|
|
1433
|
+
# Handle path parameters
|
|
1434
|
+
path_params = {}
|
|
1435
|
+
for key, value in self.dict(exclude_none=True).items():
|
|
1436
|
+
if key in config.url:
|
|
1437
|
+
path_params[key] = str(value)
|
|
1438
|
+
config.url = config.url.replace(f'{{{key}}}', str(value))
|
|
1439
|
+
|
|
1440
|
+
# Handle query parameters
|
|
1441
|
+
query_params = {k: v for k, v in self.dict(exclude_none=True).items()
|
|
1442
|
+
if k not in path_params}
|
|
1443
|
+
if query_params:
|
|
1444
|
+
config.params = query_params
|
|
1445
|
+
|
|
1446
|
+
return config
|
|
1447
|
+
`;
|
|
1448
|
+
}
|
|
1449
|
+
const content = `class ${className}(${baseClass}):
|
|
1450
|
+
${docstring}${fields.length > 0 ? fields.join("\n") : " pass"}${requestConfigMethod}
|
|
1451
|
+
`;
|
|
1452
|
+
this.#emit(className, content, schema);
|
|
1453
|
+
return {
|
|
1454
|
+
type: className,
|
|
1455
|
+
content,
|
|
1456
|
+
use: className,
|
|
1457
|
+
fromJson: `${className}.parse_obj`,
|
|
1458
|
+
simple: false
|
|
1459
|
+
};
|
|
1460
|
+
}
|
|
1461
|
+
#primitive(schema, context) {
|
|
1462
|
+
const { type, format } = schema;
|
|
1463
|
+
const nullable = schema.nullable;
|
|
1464
|
+
let pythonType = "Any";
|
|
1465
|
+
switch (type) {
|
|
1466
|
+
case "string":
|
|
1467
|
+
if (format === "date-time") {
|
|
1468
|
+
pythonType = "datetime";
|
|
1469
|
+
} else if (format === "date") {
|
|
1470
|
+
pythonType = "date";
|
|
1471
|
+
} else if (format === "uuid") {
|
|
1472
|
+
pythonType = "UUID";
|
|
1473
|
+
} else if (format === "binary" || format === "byte") {
|
|
1474
|
+
pythonType = "bytes";
|
|
1475
|
+
} else {
|
|
1476
|
+
pythonType = "str";
|
|
1477
|
+
}
|
|
1478
|
+
break;
|
|
1479
|
+
case "integer":
|
|
1480
|
+
if (format === "int64") {
|
|
1481
|
+
pythonType = "int";
|
|
1482
|
+
} else {
|
|
1483
|
+
pythonType = "int";
|
|
1484
|
+
}
|
|
1485
|
+
break;
|
|
1486
|
+
case "number":
|
|
1487
|
+
pythonType = "float";
|
|
1488
|
+
break;
|
|
1489
|
+
case "boolean":
|
|
1490
|
+
pythonType = "bool";
|
|
1491
|
+
break;
|
|
1492
|
+
default:
|
|
1493
|
+
pythonType = "Any";
|
|
1494
|
+
}
|
|
1495
|
+
if (nullable) {
|
|
1496
|
+
pythonType = `Optional[${pythonType}]`;
|
|
1497
|
+
}
|
|
1498
|
+
return {
|
|
1499
|
+
type: pythonType,
|
|
1500
|
+
content: "",
|
|
1501
|
+
use: pythonType,
|
|
1502
|
+
fromJson: pythonType,
|
|
1503
|
+
simple: true,
|
|
1504
|
+
nullable
|
|
1505
|
+
};
|
|
1506
|
+
}
|
|
1507
|
+
#array(schema, context) {
|
|
1508
|
+
const itemsSchema = schema.items;
|
|
1509
|
+
if (!itemsSchema) {
|
|
1510
|
+
return {
|
|
1511
|
+
type: "List[Any]",
|
|
1512
|
+
content: "",
|
|
1513
|
+
use: "List[Any]",
|
|
1514
|
+
fromJson: "list",
|
|
1515
|
+
simple: true
|
|
1516
|
+
};
|
|
1517
|
+
}
|
|
1518
|
+
const itemsResult = this.handle(itemsSchema, context);
|
|
1519
|
+
const listType = `List[${itemsResult.type || "Any"}]`;
|
|
1520
|
+
return {
|
|
1521
|
+
type: listType,
|
|
1522
|
+
content: itemsResult.content,
|
|
1523
|
+
use: listType,
|
|
1524
|
+
fromJson: `List[${itemsResult.fromJson || itemsResult.type}]`,
|
|
1525
|
+
simple: true
|
|
1526
|
+
};
|
|
1527
|
+
}
|
|
1528
|
+
#enum(schema, context) {
|
|
1529
|
+
const { enum: enumValues } = schema;
|
|
1530
|
+
if (!enumValues || enumValues.length === 0) {
|
|
1531
|
+
return this.#primitive(schema, context);
|
|
1532
|
+
}
|
|
1533
|
+
if (!context.name) {
|
|
1534
|
+
throw new Error("Enum schemas must have a name in context");
|
|
1535
|
+
}
|
|
1536
|
+
const className = pascalcase(context.name);
|
|
1537
|
+
const enumItems = enumValues.map((value, index) => {
|
|
1538
|
+
const name = typeof value === "string" ? value.toUpperCase().replace(/[^A-Z0-9]/g, "_") : `VALUE_${index}`;
|
|
1539
|
+
const pythonValue = typeof value === "string" ? `'${value}'` : String(value);
|
|
1540
|
+
return ` ${name} = ${pythonValue}`;
|
|
1541
|
+
});
|
|
1542
|
+
const content = `class ${className}(Enum):
|
|
1543
|
+
"""Enumeration for ${context.name}."""
|
|
1544
|
+
${enumItems.join("\n")}
|
|
1545
|
+
`;
|
|
1546
|
+
this.#emit(className, content, schema);
|
|
1547
|
+
return {
|
|
1548
|
+
type: className,
|
|
1549
|
+
content,
|
|
1550
|
+
use: className,
|
|
1551
|
+
fromJson: className,
|
|
1552
|
+
simple: false
|
|
1553
|
+
};
|
|
1554
|
+
}
|
|
1555
|
+
#const(schema, context) {
|
|
1556
|
+
const { const: constValue } = schema;
|
|
1557
|
+
if (typeof constValue === "string") {
|
|
1558
|
+
return {
|
|
1559
|
+
type: `Literal['${constValue}']`,
|
|
1560
|
+
content: "",
|
|
1561
|
+
use: `Literal['${constValue}']`,
|
|
1562
|
+
fromJson: `'${constValue}'`,
|
|
1563
|
+
simple: true,
|
|
1564
|
+
literal: constValue
|
|
1565
|
+
};
|
|
1566
|
+
}
|
|
1567
|
+
return {
|
|
1568
|
+
type: `Literal[${JSON.stringify(constValue)}]`,
|
|
1569
|
+
content: "",
|
|
1570
|
+
use: `Literal[${JSON.stringify(constValue)}]`,
|
|
1571
|
+
fromJson: JSON.stringify(constValue),
|
|
1572
|
+
simple: true,
|
|
1573
|
+
literal: constValue
|
|
1574
|
+
};
|
|
1575
|
+
}
|
|
1576
|
+
handle(schema, context = {}) {
|
|
1577
|
+
if (isRef(schema)) {
|
|
1578
|
+
return this.#ref(schema);
|
|
1579
|
+
}
|
|
1580
|
+
if ("const" in schema && schema.const !== void 0) {
|
|
1581
|
+
return this.#const(schema, context);
|
|
1582
|
+
}
|
|
1583
|
+
if (schema.enum) {
|
|
1584
|
+
return this.#enum(schema, context);
|
|
1585
|
+
}
|
|
1586
|
+
if (schema.type === "array") {
|
|
1587
|
+
return this.#array(schema, context);
|
|
1588
|
+
}
|
|
1589
|
+
if (schema.oneOf || schema.anyOf) {
|
|
1590
|
+
return this.#oneOf(schema.oneOf || schema.anyOf || [], context);
|
|
1591
|
+
}
|
|
1592
|
+
if (schema.type === "object" || schema.properties || schema.allOf || schema.oneOf || schema.anyOf) {
|
|
1593
|
+
if (!context.name) {
|
|
1594
|
+
throw new Error("Object schemas must have a name in context");
|
|
1595
|
+
}
|
|
1596
|
+
const className = pascalcase(context.name);
|
|
1597
|
+
return this.#object(className, schema, context);
|
|
1598
|
+
}
|
|
1599
|
+
if (isPrimitiveSchema(schema)) {
|
|
1600
|
+
return this.#primitive(schema, context);
|
|
1601
|
+
}
|
|
1602
|
+
return {
|
|
1603
|
+
type: "Any",
|
|
1604
|
+
content: "",
|
|
1605
|
+
use: "Any",
|
|
1606
|
+
fromJson: "Any",
|
|
1607
|
+
simple: true
|
|
1608
|
+
};
|
|
1609
|
+
}
|
|
1610
|
+
};
|
|
1611
|
+
|
|
1612
|
+
// packages/python/src/lib/generate.ts
|
|
1613
|
+
async function generate(openapi, settings) {
|
|
1614
|
+
const spec = augmentSpec({ spec: openapi }, true);
|
|
1615
|
+
const clientName = settings.name || "Client";
|
|
1616
|
+
const output = settings.output;
|
|
1617
|
+
const { writer, files: writtenFiles } = createWriterProxy(
|
|
1618
|
+
settings.writer ?? writeFiles,
|
|
1619
|
+
settings.output
|
|
1620
|
+
);
|
|
1621
|
+
settings.writer = writer;
|
|
1622
|
+
settings.readFolder ??= async (folder) => {
|
|
1623
|
+
const files = await readdir(folder, { withFileTypes: true });
|
|
1624
|
+
return files.map((file) => ({
|
|
1625
|
+
fileName: file.name,
|
|
1626
|
+
filePath: join(file.parentPath, file.name),
|
|
1627
|
+
isFolder: file.isDirectory()
|
|
1628
|
+
}));
|
|
1629
|
+
};
|
|
1630
|
+
const groups = {};
|
|
1631
|
+
forEachOperation(spec, (entry, operation) => {
|
|
1632
|
+
console.log(`Processing ${entry.method} ${entry.path}`);
|
|
1633
|
+
const group = groups[entry.groupName] ??= {
|
|
1634
|
+
className: `${pascalcase2(entry.groupName)}Api`,
|
|
1635
|
+
methods: []
|
|
1636
|
+
};
|
|
1637
|
+
const input = toInputs(spec, { entry, operation });
|
|
1638
|
+
const response = toOutput(spec, operation);
|
|
1639
|
+
const methodName = snakecase2(
|
|
1640
|
+
operation.operationId || `${entry.method}_${entry.path.replace(/[^a-zA-Z0-9]/g, "_")}`
|
|
1641
|
+
);
|
|
1642
|
+
const returnType = response ? response.returnType : "httpx.Response";
|
|
1643
|
+
const docstring = operation.summary || operation.description ? ` """${operation.summary || operation.description}"""` : "";
|
|
1644
|
+
group.methods.push(`
|
|
1645
|
+
async def ${methodName}(self${input.haveInput ? `, input_data: ${input.inputName}` : ""}) -> ${returnType}:
|
|
1646
|
+
${docstring}
|
|
1647
|
+
config = RequestConfig(
|
|
1648
|
+
method='${entry.method.toUpperCase()}',
|
|
1649
|
+
url='${entry.path}',
|
|
1650
|
+
)
|
|
1651
|
+
|
|
1652
|
+
${input.haveInput ? "config = input_data.to_request_config(config)" : ""}
|
|
1653
|
+
|
|
1654
|
+
response = await self.dispatcher.${input.contentType}(config)
|
|
1655
|
+
${response ? `return await self.receiver.json(response, ${response.successModel || "None"}, ${response.errorModel || "None"})` : "return response"}
|
|
1656
|
+
`);
|
|
1657
|
+
});
|
|
1658
|
+
const emitter = new PythonEmitter(spec);
|
|
1659
|
+
const models = await serializeModels(spec, emitter);
|
|
1660
|
+
const apiClasses = Object.entries(groups).reduce(
|
|
1661
|
+
(acc, [name, { className, methods }]) => {
|
|
1662
|
+
const fileName = `api/${snakecase2(name)}_api.py`;
|
|
1663
|
+
const imports = [
|
|
1664
|
+
"from typing import Optional",
|
|
1665
|
+
"import httpx",
|
|
1666
|
+
"",
|
|
1667
|
+
"from ..http.dispatcher import Dispatcher, RequestConfig",
|
|
1668
|
+
"from ..http.responses import Receiver",
|
|
1669
|
+
"from ..inputs import *",
|
|
1670
|
+
"from ..outputs import *",
|
|
1671
|
+
"from ..models import *",
|
|
1672
|
+
""
|
|
1673
|
+
].join("\n");
|
|
1674
|
+
acc[fileName] = `${imports}
|
|
1675
|
+
class ${className}:
|
|
1676
|
+
"""API client for ${name} operations."""
|
|
1677
|
+
|
|
1678
|
+
def __init__(self, dispatcher: Dispatcher, receiver: Receiver):
|
|
1679
|
+
self.dispatcher = dispatcher
|
|
1680
|
+
self.receiver = receiver
|
|
1681
|
+
${methods.join("\n")}
|
|
1682
|
+
`;
|
|
1683
|
+
return acc;
|
|
1684
|
+
},
|
|
1685
|
+
{}
|
|
1686
|
+
);
|
|
1687
|
+
const apiImports = Object.keys(groups).map(
|
|
1688
|
+
(name) => `from .api.${snakecase2(name)}_api import ${pascalcase2(name)}Api`
|
|
1689
|
+
).join("\\n");
|
|
1690
|
+
const apiProperties = Object.keys(groups).map(
|
|
1691
|
+
(name) => ` self.${snakecase2(name)} = ${pascalcase2(name)}Api(dispatcher, receiver)`
|
|
1692
|
+
).join("\\n");
|
|
1693
|
+
const clientCode = `"""Main API client."""
|
|
1694
|
+
|
|
1695
|
+
from typing import Optional, List
|
|
1696
|
+
import httpx
|
|
1697
|
+
|
|
1698
|
+
${apiImports}
|
|
1699
|
+
from .http.dispatcher import Dispatcher, RequestConfig
|
|
1700
|
+
from .http.responses import Receiver
|
|
1701
|
+
from .http.interceptors import (
|
|
1702
|
+
Interceptor,
|
|
1703
|
+
BaseUrlInterceptor,
|
|
1704
|
+
LoggingInterceptor,
|
|
1705
|
+
AuthInterceptor,
|
|
1706
|
+
UserAgentInterceptor,
|
|
1707
|
+
)
|
|
1708
|
+
|
|
1709
|
+
|
|
1710
|
+
class ${clientName}:
|
|
1711
|
+
"""Main API client for the SDK."""
|
|
1712
|
+
|
|
1713
|
+
def __init__(
|
|
1714
|
+
self,
|
|
1715
|
+
base_url: str,
|
|
1716
|
+
token: Optional[str] = None,
|
|
1717
|
+
api_key: Optional[str] = None,
|
|
1718
|
+
api_key_header: str = 'X-API-Key',
|
|
1719
|
+
enable_logging: bool = False,
|
|
1720
|
+
user_agent: Optional[str] = None,
|
|
1721
|
+
custom_interceptors: Optional[List[Interceptor]] = None,
|
|
1722
|
+
):
|
|
1723
|
+
"""
|
|
1724
|
+
Initialize the API client.
|
|
1725
|
+
|
|
1726
|
+
Args:
|
|
1727
|
+
base_url: Base URL for the API
|
|
1728
|
+
token: Bearer token for authentication
|
|
1729
|
+
api_key: API key for authentication
|
|
1730
|
+
api_key_header: Header name for API key authentication
|
|
1731
|
+
enable_logging: Enable request/response logging
|
|
1732
|
+
user_agent: Custom User-Agent header
|
|
1733
|
+
custom_interceptors: Additional custom interceptors
|
|
1734
|
+
"""
|
|
1735
|
+
self.base_url = base_url
|
|
1736
|
+
|
|
1737
|
+
# Build interceptor chain
|
|
1738
|
+
interceptors = []
|
|
1739
|
+
|
|
1740
|
+
# Base URL interceptor (always first)
|
|
1741
|
+
interceptors.append(BaseUrlInterceptor(base_url))
|
|
1742
|
+
|
|
1743
|
+
# Authentication interceptor
|
|
1744
|
+
if token or api_key:
|
|
1745
|
+
interceptors.append(AuthInterceptor(token=token, api_key=api_key, api_key_header=api_key_header))
|
|
1746
|
+
|
|
1747
|
+
# User agent interceptor
|
|
1748
|
+
if user_agent:
|
|
1749
|
+
interceptors.append(UserAgentInterceptor(user_agent))
|
|
1750
|
+
|
|
1751
|
+
# Logging interceptor
|
|
1752
|
+
if enable_logging:
|
|
1753
|
+
interceptors.append(LoggingInterceptor())
|
|
1754
|
+
|
|
1755
|
+
# Custom interceptors
|
|
1756
|
+
if custom_interceptors:
|
|
1757
|
+
interceptors.extend(custom_interceptors)
|
|
1758
|
+
|
|
1759
|
+
# Initialize dispatcher and receiver
|
|
1760
|
+
self.dispatcher = Dispatcher(interceptors)
|
|
1761
|
+
self.receiver = Receiver(interceptors)
|
|
1762
|
+
|
|
1763
|
+
# Initialize API clients
|
|
1764
|
+
${apiProperties}
|
|
1765
|
+
|
|
1766
|
+
async def __aenter__(self):
|
|
1767
|
+
return self
|
|
1768
|
+
|
|
1769
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
1770
|
+
await self.close()
|
|
1771
|
+
|
|
1772
|
+
async def close(self):
|
|
1773
|
+
"""Close the HTTP client."""
|
|
1774
|
+
await self.dispatcher.close()
|
|
1775
|
+
`;
|
|
1776
|
+
await settings.writer(output, {
|
|
1777
|
+
...models,
|
|
1778
|
+
...apiClasses,
|
|
1779
|
+
"client.py": clientCode,
|
|
1780
|
+
"http/dispatcher.py": dispatcher_default,
|
|
1781
|
+
"http/interceptors.py": interceptors_default,
|
|
1782
|
+
"http/responses.py": responses_default,
|
|
1783
|
+
"__init__.py": `"""SDK package."""
|
|
1784
|
+
|
|
1785
|
+
from .client import ${clientName}
|
|
1786
|
+
|
|
1787
|
+
__all__ = ['${clientName}']
|
|
1788
|
+
`
|
|
1789
|
+
});
|
|
1790
|
+
if (settings.mode === "full") {
|
|
1791
|
+
const requirements = `# HTTP client
|
|
1792
|
+
httpx>=0.24.0,<1.0.0
|
|
1793
|
+
|
|
1794
|
+
# Data validation and serialization
|
|
1795
|
+
pydantic>=2.0.0,<3.0.0
|
|
1796
|
+
|
|
1797
|
+
# Enhanced type hints
|
|
1798
|
+
typing-extensions>=4.0.0
|
|
1799
|
+
|
|
1800
|
+
# Optional: For better datetime handling
|
|
1801
|
+
python-dateutil>=2.8.0
|
|
1802
|
+
`;
|
|
1803
|
+
await settings.writer(output, {
|
|
1804
|
+
"requirements.txt": requirements
|
|
1805
|
+
});
|
|
1806
|
+
}
|
|
1807
|
+
const metadata = await readWriteMetadata(
|
|
1808
|
+
settings.output,
|
|
1809
|
+
Array.from(writtenFiles)
|
|
1810
|
+
);
|
|
1811
|
+
if (settings.cleanup !== false && writtenFiles.size > 0) {
|
|
1812
|
+
await cleanFiles(metadata.content, settings.output, [
|
|
1813
|
+
"/__init__.py",
|
|
1814
|
+
"requirements.txt",
|
|
1815
|
+
"/metadata.json"
|
|
1816
|
+
]);
|
|
1817
|
+
}
|
|
1818
|
+
await settings.writer(output, {
|
|
1819
|
+
"models/__init__.py": await generateModuleInit(
|
|
1820
|
+
join(output, "models"),
|
|
1821
|
+
settings.readFolder
|
|
1822
|
+
),
|
|
1823
|
+
"inputs/__init__.py": await generateModuleInit(
|
|
1824
|
+
join(output, "inputs"),
|
|
1825
|
+
settings.readFolder
|
|
1826
|
+
),
|
|
1827
|
+
"outputs/__init__.py": await generateModuleInit(
|
|
1828
|
+
join(output, "outputs"),
|
|
1829
|
+
settings.readFolder
|
|
1830
|
+
),
|
|
1831
|
+
"api/__init__.py": await generateModuleInit(
|
|
1832
|
+
join(output, "api"),
|
|
1833
|
+
settings.readFolder
|
|
1834
|
+
),
|
|
1835
|
+
"http/__init__.py": `"""HTTP utilities."""
|
|
1836
|
+
|
|
1837
|
+
from .dispatcher import Dispatcher, RequestConfig
|
|
1838
|
+
from .interceptors import *
|
|
1839
|
+
from .responses import *
|
|
1840
|
+
|
|
1841
|
+
__all__ = [
|
|
1842
|
+
'Dispatcher',
|
|
1843
|
+
'RequestConfig',
|
|
1844
|
+
'ApiResponse',
|
|
1845
|
+
'ErrorResponse',
|
|
1846
|
+
'Interceptor',
|
|
1847
|
+
'BaseUrlInterceptor',
|
|
1848
|
+
'LoggingInterceptor',
|
|
1849
|
+
'AuthInterceptor',
|
|
1850
|
+
]
|
|
1851
|
+
`
|
|
1852
|
+
});
|
|
1853
|
+
if (settings.formatCode) {
|
|
1854
|
+
await settings.formatCode({ output: settings.output });
|
|
1855
|
+
}
|
|
1856
|
+
}
|
|
1857
|
+
async function generateModuleInit(folder, readFolder) {
|
|
1858
|
+
try {
|
|
1859
|
+
const files = await readFolder(folder);
|
|
1860
|
+
const pyFiles = files.filter(
|
|
1861
|
+
(file) => file.fileName.endsWith(".py") && file.fileName !== "__init__.py"
|
|
1862
|
+
).map((file) => file.fileName.replace(".py", ""));
|
|
1863
|
+
if (pyFiles.length === 0) {
|
|
1864
|
+
return '"""Package module."""\n';
|
|
1865
|
+
}
|
|
1866
|
+
const imports = pyFiles.map((name) => `from .${name} import *`).join("\n");
|
|
1867
|
+
return `"""Package module."""
|
|
1868
|
+
|
|
1869
|
+
${imports}
|
|
1870
|
+
`;
|
|
1871
|
+
} catch {
|
|
1872
|
+
return '"""Package module."""\\n';
|
|
1873
|
+
}
|
|
1874
|
+
}
|
|
1875
|
+
function toInputs(spec, { entry, operation }) {
|
|
1876
|
+
const inputName = entry.inputName || "Input";
|
|
1877
|
+
const haveInput = !isEmpty(operation.parameters) || !isEmpty(operation.requestBody);
|
|
1878
|
+
let contentType = "json";
|
|
1879
|
+
if (operation.requestBody && !isRef2(operation.requestBody)) {
|
|
1880
|
+
const content = operation.requestBody.content;
|
|
1881
|
+
if (content) {
|
|
1882
|
+
const contentTypes = Object.keys(content);
|
|
1883
|
+
if (contentTypes.some((type) => type.includes("multipart"))) {
|
|
1884
|
+
contentType = "multipart";
|
|
1885
|
+
} else if (contentTypes.some((type) => type.includes("form"))) {
|
|
1886
|
+
contentType = "form";
|
|
1887
|
+
}
|
|
1888
|
+
}
|
|
1889
|
+
}
|
|
1890
|
+
return {
|
|
1891
|
+
inputName,
|
|
1892
|
+
haveInput,
|
|
1893
|
+
contentType
|
|
1894
|
+
};
|
|
1895
|
+
}
|
|
1896
|
+
function toOutput(spec, operation) {
|
|
1897
|
+
if (!operation.responses) {
|
|
1898
|
+
return null;
|
|
1899
|
+
}
|
|
1900
|
+
const successResponse = Object.entries(operation.responses).find(
|
|
1901
|
+
([code]) => isSuccessStatusCode(Number(code))
|
|
1902
|
+
);
|
|
1903
|
+
if (!successResponse) {
|
|
1904
|
+
return null;
|
|
1905
|
+
}
|
|
1906
|
+
const [statusCode, response] = successResponse;
|
|
1907
|
+
if (isRef2(response)) {
|
|
1908
|
+
return null;
|
|
1909
|
+
}
|
|
1910
|
+
const content = response.content;
|
|
1911
|
+
if (!content) {
|
|
1912
|
+
return { returnType: "None", successModel: null, errorModel: null };
|
|
1913
|
+
}
|
|
1914
|
+
const jsonContent = Object.entries(content).find(
|
|
1915
|
+
([type]) => parseJsonContentType(type)
|
|
1916
|
+
);
|
|
1917
|
+
if (!jsonContent) {
|
|
1918
|
+
return {
|
|
1919
|
+
returnType: "httpx.Response",
|
|
1920
|
+
successModel: null,
|
|
1921
|
+
errorModel: null
|
|
1922
|
+
};
|
|
1923
|
+
}
|
|
1924
|
+
const [, mediaType] = jsonContent;
|
|
1925
|
+
const schema = mediaType.schema;
|
|
1926
|
+
if (!schema || isRef2(schema)) {
|
|
1927
|
+
return { returnType: "Any", successModel: null, errorModel: null };
|
|
1928
|
+
}
|
|
1929
|
+
const emitter = new PythonEmitter(spec);
|
|
1930
|
+
const result = emitter.handle(schema, {});
|
|
1931
|
+
return {
|
|
1932
|
+
returnType: result.type || "Any",
|
|
1933
|
+
successModel: result.type,
|
|
1934
|
+
errorModel: null
|
|
1935
|
+
// TODO: Handle error models
|
|
1936
|
+
};
|
|
1937
|
+
}
|
|
1938
|
+
async function serializeModels(spec, emitter) {
|
|
1939
|
+
const models = {};
|
|
1940
|
+
const standardImports = [
|
|
1941
|
+
"from typing import Any, Dict, List, Optional, Union, Literal",
|
|
1942
|
+
"from pydantic import BaseModel, Field",
|
|
1943
|
+
"from datetime import datetime, date",
|
|
1944
|
+
"from uuid import UUID",
|
|
1945
|
+
"from enum import Enum"
|
|
1946
|
+
].join("\n");
|
|
1947
|
+
emitter.onEmit((name, content, schema) => {
|
|
1948
|
+
const fullContent = `${standardImports}
|
|
1949
|
+
${schema["x-inputname"] ? "from ..http.dispatcher import RequestConfig" : ""}
|
|
1950
|
+
|
|
1951
|
+
|
|
1952
|
+
${content}`;
|
|
1953
|
+
if (schema["x-inputname"]) {
|
|
1954
|
+
models[`inputs/${snakecase2(name)}.py`] = fullContent;
|
|
1955
|
+
} else if (schema["x-response-name"]) {
|
|
1956
|
+
models[`outputs/${snakecase2(name)}.py`] = fullContent;
|
|
1957
|
+
} else {
|
|
1958
|
+
models[`models/${snakecase2(name)}.py`] = fullContent;
|
|
1959
|
+
}
|
|
1960
|
+
});
|
|
1961
|
+
if (spec.components?.schemas) {
|
|
1962
|
+
for (const [name, schema] of Object.entries(spec.components.schemas)) {
|
|
1963
|
+
if (!isRef2(schema)) {
|
|
1964
|
+
emitter.handle(schema, { name });
|
|
1965
|
+
}
|
|
1966
|
+
}
|
|
1967
|
+
}
|
|
1968
|
+
return models;
|
|
1969
|
+
}
|
|
1970
|
+
export {
|
|
1971
|
+
PythonEmitter,
|
|
1972
|
+
generate
|
|
1973
|
+
};
|
|
1974
|
+
//# sourceMappingURL=index.js.map
|