miso-client 0.2.0__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of miso-client might be problematic. Click here for more details.
- miso_client/__init__.py +59 -3
- miso_client/errors.py +22 -1
- miso_client/models/__init__.py +4 -0
- miso_client/models/error_response.py +50 -0
- miso_client/models/filter.py +140 -0
- miso_client/models/pagination.py +66 -0
- miso_client/models/sort.py +25 -0
- miso_client/services/logger.py +7 -6
- miso_client/utils/data_masker.py +77 -5
- miso_client/utils/error_utils.py +104 -0
- miso_client/utils/filter.py +256 -0
- miso_client/utils/http_client.py +517 -212
- miso_client/utils/internal_http_client.py +471 -0
- miso_client/utils/pagination.py +157 -0
- miso_client/utils/sensitive_fields_loader.py +116 -0
- miso_client/utils/sort.py +116 -0
- {miso_client-0.2.0.dist-info → miso_client-0.5.0.dist-info}/METADATA +348 -3
- miso_client-0.5.0.dist-info/RECORD +33 -0
- miso_client-0.2.0.dist-info/RECORD +0 -23
- {miso_client-0.2.0.dist-info → miso_client-0.5.0.dist-info}/WHEEL +0 -0
- {miso_client-0.2.0.dist-info → miso_client-0.5.0.dist-info}/licenses/LICENSE +0 -0
- {miso_client-0.2.0.dist-info → miso_client-0.5.0.dist-info}/top_level.txt +0 -0
miso_client/utils/http_client.py
CHANGED
|
@@ -1,160 +1,353 @@
|
|
|
1
1
|
"""
|
|
2
|
-
HTTP client utility for controller communication.
|
|
2
|
+
Public HTTP client utility for controller communication with ISO 27001 compliant logging.
|
|
3
3
|
|
|
4
|
-
This module provides
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
This module provides the public HTTP client interface that wraps InternalHttpClient
|
|
5
|
+
and adds automatic audit and debug logging for all HTTP requests. All sensitive
|
|
6
|
+
data is automatically masked using DataMasker before logging to comply with ISO 27001.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
-
import
|
|
10
|
-
from datetime import datetime, timedelta
|
|
9
|
+
import time
|
|
11
10
|
from typing import Any, Dict, Literal, Optional
|
|
11
|
+
from urllib.parse import parse_qs, urlparse
|
|
12
12
|
|
|
13
|
-
import
|
|
14
|
-
|
|
15
|
-
from ..
|
|
16
|
-
from ..
|
|
13
|
+
from ..models.config import MisoClientConfig
|
|
14
|
+
from ..services.logger import LoggerService
|
|
15
|
+
from ..utils.data_masker import DataMasker
|
|
16
|
+
from ..utils.jwt_tools import decode_token
|
|
17
|
+
from .internal_http_client import InternalHttpClient
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class HttpClient:
|
|
20
|
-
"""
|
|
21
|
+
"""
|
|
22
|
+
Public HTTP client for Miso Controller communication with ISO 27001 compliant logging.
|
|
23
|
+
|
|
24
|
+
This class wraps InternalHttpClient and adds:
|
|
25
|
+
- Automatic audit logging for all requests
|
|
26
|
+
- Debug logging when log_level is 'debug'
|
|
27
|
+
- Automatic data masking for all sensitive information
|
|
21
28
|
|
|
22
|
-
|
|
29
|
+
All sensitive data (headers, bodies, query params) is masked using DataMasker
|
|
30
|
+
before logging to ensure ISO 27001 compliance.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def __init__(self, config: MisoClientConfig, logger: LoggerService):
|
|
23
34
|
"""
|
|
24
|
-
Initialize HTTP client with configuration.
|
|
35
|
+
Initialize public HTTP client with configuration and logger.
|
|
25
36
|
|
|
26
37
|
Args:
|
|
27
38
|
config: MisoClient configuration
|
|
39
|
+
logger: LoggerService instance for audit and debug logging
|
|
28
40
|
"""
|
|
29
41
|
self.config = config
|
|
30
|
-
self.
|
|
31
|
-
self.
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
)
|
|
42
|
+
self.logger = logger
|
|
43
|
+
self._internal_client = InternalHttpClient(config)
|
|
44
|
+
|
|
45
|
+
async def close(self):
|
|
46
|
+
"""Close the HTTP client."""
|
|
47
|
+
await self._internal_client.close()
|
|
48
|
+
|
|
49
|
+
async def __aenter__(self):
|
|
50
|
+
"""Async context manager entry."""
|
|
51
|
+
return self
|
|
52
|
+
|
|
53
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
54
|
+
"""Async context manager exit."""
|
|
55
|
+
await self.close()
|
|
45
56
|
|
|
46
|
-
async def
|
|
57
|
+
async def get_environment_token(self) -> str:
|
|
47
58
|
"""
|
|
48
|
-
Get
|
|
59
|
+
Get environment token using client credentials.
|
|
49
60
|
|
|
50
|
-
|
|
61
|
+
This is called automatically by HttpClient but can be called manually.
|
|
51
62
|
|
|
52
63
|
Returns:
|
|
53
64
|
Client token string
|
|
65
|
+
"""
|
|
66
|
+
return await self._internal_client.get_environment_token()
|
|
54
67
|
|
|
55
|
-
|
|
56
|
-
AuthenticationError: If token fetch fails
|
|
68
|
+
def _should_skip_logging(self, url: str) -> bool:
|
|
57
69
|
"""
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
):
|
|
68
|
-
assert self.client_token is not None
|
|
69
|
-
return self.client_token
|
|
70
|
-
|
|
71
|
-
# Acquire lock to prevent concurrent token fetches
|
|
72
|
-
async with self.token_refresh_lock:
|
|
73
|
-
# Double-check after acquiring lock
|
|
74
|
-
if (
|
|
75
|
-
self.client_token
|
|
76
|
-
and self.token_expires_at
|
|
77
|
-
and self.token_expires_at > now + timedelta(seconds=60)
|
|
78
|
-
):
|
|
79
|
-
assert self.client_token is not None
|
|
80
|
-
return self.client_token
|
|
81
|
-
|
|
82
|
-
# Fetch new token
|
|
83
|
-
await self._fetch_client_token()
|
|
84
|
-
assert self.client_token is not None
|
|
85
|
-
return self.client_token
|
|
86
|
-
|
|
87
|
-
async def _fetch_client_token(self) -> None:
|
|
70
|
+
Check if logging should be skipped for this URL.
|
|
71
|
+
|
|
72
|
+
Skips logging for /api/logs and /api/auth/token to prevent infinite loops.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
url: Request URL
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
True if logging should be skipped, False otherwise
|
|
88
79
|
"""
|
|
89
|
-
|
|
80
|
+
# Skip logging for log endpoint (prevent infinite audit loops)
|
|
81
|
+
if url == "/api/logs" or url.startswith("/api/logs"):
|
|
82
|
+
return True
|
|
90
83
|
|
|
91
|
-
|
|
92
|
-
|
|
84
|
+
# Skip logging for token endpoint (client token fetch, prevent loops)
|
|
85
|
+
if url == "/api/auth/token" or url.startswith("/api/auth/token"):
|
|
86
|
+
return True
|
|
87
|
+
|
|
88
|
+
return False
|
|
89
|
+
|
|
90
|
+
def _extract_user_id_from_headers(self, headers: Dict[str, Any]) -> Optional[str]:
|
|
93
91
|
"""
|
|
94
|
-
|
|
92
|
+
Extract user ID from JWT token in Authorization header.
|
|
95
93
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
temp_client = httpx.AsyncClient(
|
|
99
|
-
base_url=self.config.controller_url,
|
|
100
|
-
timeout=30.0,
|
|
101
|
-
headers={
|
|
102
|
-
"Content-Type": "application/json",
|
|
103
|
-
"x-client-id": self.config.client_id,
|
|
104
|
-
"x-client-secret": self.config.client_secret,
|
|
105
|
-
},
|
|
106
|
-
)
|
|
94
|
+
Args:
|
|
95
|
+
headers: Request headers dictionary
|
|
107
96
|
|
|
108
|
-
|
|
109
|
-
|
|
97
|
+
Returns:
|
|
98
|
+
User ID if found, None otherwise
|
|
99
|
+
"""
|
|
100
|
+
auth_header = headers.get("authorization") or headers.get("Authorization")
|
|
101
|
+
if not auth_header or not isinstance(auth_header, str):
|
|
102
|
+
return None
|
|
110
103
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
104
|
+
# Extract token (Bearer <token> format)
|
|
105
|
+
if auth_header.startswith("Bearer "):
|
|
106
|
+
token = auth_header[7:]
|
|
107
|
+
else:
|
|
108
|
+
token = auth_header
|
|
116
109
|
|
|
117
|
-
|
|
118
|
-
|
|
110
|
+
try:
|
|
111
|
+
decoded = decode_token(token)
|
|
112
|
+
if decoded:
|
|
113
|
+
return decoded.get("sub") or decoded.get("userId") or decoded.get("user_id")
|
|
114
|
+
except Exception:
|
|
115
|
+
pass
|
|
119
116
|
|
|
120
|
-
|
|
121
|
-
raise AuthenticationError("Failed to get client token: Invalid response")
|
|
117
|
+
return None
|
|
122
118
|
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
119
|
+
async def _log_http_request_audit(
|
|
120
|
+
self,
|
|
121
|
+
method: str,
|
|
122
|
+
url: str,
|
|
123
|
+
response: Optional[Any] = None,
|
|
124
|
+
error: Optional[Exception] = None,
|
|
125
|
+
start_time: float = 0.0,
|
|
126
|
+
request_data: Optional[Dict[str, Any]] = None,
|
|
127
|
+
request_headers: Optional[Dict[str, Any]] = None,
|
|
128
|
+
**kwargs,
|
|
129
|
+
) -> None:
|
|
130
|
+
"""
|
|
131
|
+
Log HTTP request audit event with ISO 27001 compliant data masking.
|
|
127
132
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
133
|
+
Args:
|
|
134
|
+
method: HTTP method
|
|
135
|
+
url: Request URL
|
|
136
|
+
response: Response data (if successful)
|
|
137
|
+
error: Exception (if request failed)
|
|
138
|
+
start_time: Request start time
|
|
139
|
+
request_data: Request body data
|
|
140
|
+
request_headers: Request headers
|
|
141
|
+
**kwargs: Additional request parameters
|
|
142
|
+
"""
|
|
143
|
+
try:
|
|
144
|
+
# Skip logging for certain endpoints
|
|
145
|
+
if self._should_skip_logging(url):
|
|
146
|
+
return
|
|
147
|
+
|
|
148
|
+
# Calculate duration
|
|
149
|
+
duration_ms = int((time.perf_counter() - start_time) * 1000)
|
|
150
|
+
|
|
151
|
+
# Extract status code
|
|
152
|
+
status_code: Optional[int] = None
|
|
153
|
+
response_size: Optional[int] = None
|
|
154
|
+
if response is not None:
|
|
155
|
+
# Response is already parsed JSON from InternalHttpClient
|
|
156
|
+
# We don't have direct access to status code from parsed response
|
|
157
|
+
# But we can infer success (no error means success)
|
|
158
|
+
status_code = 200 # Default assumption if response exists
|
|
159
|
+
# Estimate response size
|
|
160
|
+
try:
|
|
161
|
+
response_str = str(response)
|
|
162
|
+
response_size = len(response_str.encode("utf-8"))
|
|
163
|
+
except Exception:
|
|
164
|
+
pass
|
|
165
|
+
|
|
166
|
+
if error is not None:
|
|
167
|
+
# Extract status code from error if available
|
|
168
|
+
if hasattr(error, "status_code"):
|
|
169
|
+
status_code = error.status_code
|
|
170
|
+
else:
|
|
171
|
+
status_code = 500 # Default for errors
|
|
172
|
+
|
|
173
|
+
# Extract user ID from headers
|
|
174
|
+
user_id: Optional[str] = None
|
|
175
|
+
if request_headers:
|
|
176
|
+
user_id = self._extract_user_id_from_headers(request_headers)
|
|
177
|
+
|
|
178
|
+
# Calculate request size
|
|
179
|
+
request_size: Optional[int] = None
|
|
180
|
+
if request_data is not None:
|
|
181
|
+
try:
|
|
182
|
+
request_str = str(request_data)
|
|
183
|
+
request_size = len(request_str.encode("utf-8"))
|
|
184
|
+
except Exception:
|
|
185
|
+
pass
|
|
186
|
+
|
|
187
|
+
# Mask sensitive data in error message
|
|
188
|
+
error_message: Optional[str] = None
|
|
189
|
+
if error is not None:
|
|
190
|
+
error_message = str(error)
|
|
191
|
+
# Mask error message if it contains sensitive data
|
|
192
|
+
try:
|
|
193
|
+
# Try to mask if error message looks like it contains structured data
|
|
194
|
+
if isinstance(error_message, str) and any(
|
|
195
|
+
keyword in error_message.lower()
|
|
196
|
+
for keyword in ["password", "token", "secret", "key"]
|
|
197
|
+
):
|
|
198
|
+
error_message = DataMasker.MASKED_VALUE
|
|
199
|
+
except Exception:
|
|
200
|
+
pass
|
|
201
|
+
|
|
202
|
+
# Build audit context (all sensitive data must be masked)
|
|
203
|
+
audit_context: Dict[str, Any] = {
|
|
204
|
+
"method": method,
|
|
205
|
+
"url": url,
|
|
206
|
+
"statusCode": status_code,
|
|
207
|
+
"duration": duration_ms,
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if user_id:
|
|
211
|
+
audit_context["userId"] = user_id
|
|
212
|
+
if request_size is not None:
|
|
213
|
+
audit_context["requestSize"] = request_size
|
|
214
|
+
if response_size is not None:
|
|
215
|
+
audit_context["responseSize"] = response_size
|
|
216
|
+
if error_message:
|
|
217
|
+
audit_context["error"] = error_message
|
|
218
|
+
|
|
219
|
+
# Log audit event
|
|
220
|
+
action = f"http.request.{method.upper()}"
|
|
221
|
+
await self.logger.audit(action, url, audit_context)
|
|
222
|
+
|
|
223
|
+
# Log debug details if log level is debug
|
|
224
|
+
if self.config.log_level == "debug":
|
|
225
|
+
await self._log_http_request_debug(
|
|
226
|
+
method,
|
|
227
|
+
url,
|
|
228
|
+
response,
|
|
229
|
+
error,
|
|
230
|
+
duration_ms,
|
|
231
|
+
status_code,
|
|
232
|
+
user_id,
|
|
233
|
+
request_data,
|
|
234
|
+
request_headers,
|
|
235
|
+
**kwargs,
|
|
236
|
+
)
|
|
134
237
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
if self.client:
|
|
139
|
-
self.client.headers["x-client-token"] = token
|
|
238
|
+
except Exception:
|
|
239
|
+
# Silently swallow all logging errors - never break HTTP requests
|
|
240
|
+
pass
|
|
140
241
|
|
|
141
|
-
async def
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
242
|
+
async def _log_http_request_debug(
|
|
243
|
+
self,
|
|
244
|
+
method: str,
|
|
245
|
+
url: str,
|
|
246
|
+
response: Optional[Any],
|
|
247
|
+
error: Optional[Exception],
|
|
248
|
+
duration_ms: int,
|
|
249
|
+
status_code: Optional[int],
|
|
250
|
+
user_id: Optional[str],
|
|
251
|
+
request_data: Optional[Dict[str, Any]],
|
|
252
|
+
request_headers: Optional[Dict[str, Any]],
|
|
253
|
+
**kwargs,
|
|
254
|
+
) -> None:
|
|
255
|
+
"""
|
|
256
|
+
Log detailed debug information for HTTP request.
|
|
146
257
|
|
|
147
|
-
|
|
148
|
-
"""Async context manager entry."""
|
|
149
|
-
return self
|
|
258
|
+
All sensitive data is masked before logging.
|
|
150
259
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
260
|
+
Args:
|
|
261
|
+
method: HTTP method
|
|
262
|
+
url: Request URL
|
|
263
|
+
response: Response data
|
|
264
|
+
error: Exception if request failed
|
|
265
|
+
duration_ms: Request duration in milliseconds
|
|
266
|
+
status_code: HTTP status code
|
|
267
|
+
user_id: User ID if available
|
|
268
|
+
request_data: Request body data
|
|
269
|
+
request_headers: Request headers
|
|
270
|
+
**kwargs: Additional request parameters
|
|
271
|
+
"""
|
|
272
|
+
try:
|
|
273
|
+
# Mask request headers
|
|
274
|
+
masked_request_headers: Optional[Dict[str, Any]] = None
|
|
275
|
+
if request_headers:
|
|
276
|
+
masked_request_headers = DataMasker.mask_sensitive_data(request_headers)
|
|
277
|
+
|
|
278
|
+
# Mask request body
|
|
279
|
+
masked_request_body: Optional[Any] = None
|
|
280
|
+
if request_data is not None:
|
|
281
|
+
masked_request_body = DataMasker.mask_sensitive_data(request_data)
|
|
282
|
+
|
|
283
|
+
# Mask response body (limit to first 1000 characters)
|
|
284
|
+
# Note: Response headers not available from InternalHttpClient (returns parsed JSON)
|
|
285
|
+
masked_response_body: Optional[str] = None
|
|
286
|
+
if response is not None:
|
|
287
|
+
try:
|
|
288
|
+
response_str = str(response)
|
|
289
|
+
# Limit to first 1000 characters
|
|
290
|
+
if len(response_str) > 1000:
|
|
291
|
+
response_str = response_str[:1000] + "..."
|
|
292
|
+
# Mask sensitive data
|
|
293
|
+
try:
|
|
294
|
+
# Try to mask if response is a dict
|
|
295
|
+
if isinstance(response, dict):
|
|
296
|
+
masked_dict = DataMasker.mask_sensitive_data(response)
|
|
297
|
+
masked_response_body = str(masked_dict)
|
|
298
|
+
else:
|
|
299
|
+
masked_response_body = response_str
|
|
300
|
+
except Exception:
|
|
301
|
+
masked_response_body = response_str
|
|
302
|
+
except Exception:
|
|
303
|
+
pass
|
|
304
|
+
|
|
305
|
+
# Extract query parameters from URL and mask
|
|
306
|
+
query_params: Optional[Dict[str, Any]] = None
|
|
307
|
+
try:
|
|
308
|
+
parsed_url = urlparse(url)
|
|
309
|
+
if parsed_url.query:
|
|
310
|
+
query_dict = parse_qs(parsed_url.query)
|
|
311
|
+
# Convert lists to single values for simplicity
|
|
312
|
+
query_simple: Dict[str, Any] = {
|
|
313
|
+
k: v[0] if len(v) == 1 else v for k, v in query_dict.items()
|
|
314
|
+
}
|
|
315
|
+
query_params = DataMasker.mask_sensitive_data(query_simple)
|
|
316
|
+
except Exception:
|
|
317
|
+
pass
|
|
318
|
+
|
|
319
|
+
# Build debug context (all sensitive data must be masked)
|
|
320
|
+
debug_context: Dict[str, Any] = {
|
|
321
|
+
"method": method,
|
|
322
|
+
"url": url,
|
|
323
|
+
"statusCode": status_code,
|
|
324
|
+
"duration": duration_ms,
|
|
325
|
+
"baseURL": self.config.controller_url,
|
|
326
|
+
"timeout": 30.0, # Default timeout
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
if user_id:
|
|
330
|
+
debug_context["userId"] = user_id
|
|
331
|
+
if masked_request_headers:
|
|
332
|
+
debug_context["requestHeaders"] = masked_request_headers
|
|
333
|
+
if masked_request_body is not None:
|
|
334
|
+
debug_context["requestBody"] = masked_request_body
|
|
335
|
+
if masked_response_body:
|
|
336
|
+
debug_context["responseBody"] = masked_response_body
|
|
337
|
+
if query_params:
|
|
338
|
+
debug_context["queryParams"] = query_params
|
|
339
|
+
|
|
340
|
+
# Log debug message
|
|
341
|
+
message = f"HTTP {method} {url} - Status: {status_code}, Duration: {duration_ms}ms"
|
|
342
|
+
await self.logger.debug(message, debug_context)
|
|
343
|
+
|
|
344
|
+
except Exception:
|
|
345
|
+
# Silently swallow all logging errors - never break HTTP requests
|
|
346
|
+
pass
|
|
154
347
|
|
|
155
348
|
async def get(self, url: str, **kwargs) -> Any:
|
|
156
349
|
"""
|
|
157
|
-
Make GET request.
|
|
350
|
+
Make GET request with automatic audit and debug logging.
|
|
158
351
|
|
|
159
352
|
Args:
|
|
160
353
|
url: Request URL
|
|
@@ -166,33 +359,37 @@ class HttpClient:
|
|
|
166
359
|
Raises:
|
|
167
360
|
MisoClientError: If request fails
|
|
168
361
|
"""
|
|
169
|
-
|
|
170
|
-
|
|
362
|
+
start_time = time.perf_counter()
|
|
363
|
+
request_headers = kwargs.get("headers", {})
|
|
171
364
|
try:
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
365
|
+
response = await self._internal_client.get(url, **kwargs)
|
|
366
|
+
await self._log_http_request_audit(
|
|
367
|
+
"GET",
|
|
368
|
+
url,
|
|
369
|
+
response=response,
|
|
370
|
+
error=None,
|
|
371
|
+
start_time=start_time,
|
|
372
|
+
request_data=None,
|
|
373
|
+
request_headers=request_headers,
|
|
374
|
+
**kwargs,
|
|
375
|
+
)
|
|
376
|
+
return response
|
|
377
|
+
except Exception as e:
|
|
378
|
+
await self._log_http_request_audit(
|
|
379
|
+
"GET",
|
|
380
|
+
url,
|
|
381
|
+
response=None,
|
|
382
|
+
error=e,
|
|
383
|
+
start_time=start_time,
|
|
384
|
+
request_data=None,
|
|
385
|
+
request_headers=request_headers,
|
|
386
|
+
**kwargs,
|
|
189
387
|
)
|
|
190
|
-
|
|
191
|
-
raise ConnectionError(f"Request failed: {str(e)}")
|
|
388
|
+
raise
|
|
192
389
|
|
|
193
390
|
async def post(self, url: str, data: Optional[Dict[str, Any]] = None, **kwargs) -> Any:
|
|
194
391
|
"""
|
|
195
|
-
Make POST request.
|
|
392
|
+
Make POST request with automatic audit and debug logging.
|
|
196
393
|
|
|
197
394
|
Args:
|
|
198
395
|
url: Request URL
|
|
@@ -205,32 +402,37 @@ class HttpClient:
|
|
|
205
402
|
Raises:
|
|
206
403
|
MisoClientError: If request fails
|
|
207
404
|
"""
|
|
208
|
-
|
|
209
|
-
|
|
405
|
+
start_time = time.perf_counter()
|
|
406
|
+
request_headers = kwargs.get("headers", {})
|
|
210
407
|
try:
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
raise MisoClientError(
|
|
222
|
-
f"HTTP {e.response.status_code}: {e.response.text}",
|
|
223
|
-
status_code=e.response.status_code,
|
|
224
|
-
error_body=e.response.json()
|
|
225
|
-
if e.response.headers.get("content-type", "").startswith("application/json")
|
|
226
|
-
else {},
|
|
408
|
+
response = await self._internal_client.post(url, data, **kwargs)
|
|
409
|
+
await self._log_http_request_audit(
|
|
410
|
+
"POST",
|
|
411
|
+
url,
|
|
412
|
+
response=response,
|
|
413
|
+
error=None,
|
|
414
|
+
start_time=start_time,
|
|
415
|
+
request_data=data,
|
|
416
|
+
request_headers=request_headers,
|
|
417
|
+
**kwargs,
|
|
227
418
|
)
|
|
228
|
-
|
|
229
|
-
|
|
419
|
+
return response
|
|
420
|
+
except Exception as e:
|
|
421
|
+
await self._log_http_request_audit(
|
|
422
|
+
"POST",
|
|
423
|
+
url,
|
|
424
|
+
response=None,
|
|
425
|
+
error=e,
|
|
426
|
+
start_time=start_time,
|
|
427
|
+
request_data=data,
|
|
428
|
+
request_headers=request_headers,
|
|
429
|
+
**kwargs,
|
|
430
|
+
)
|
|
431
|
+
raise
|
|
230
432
|
|
|
231
433
|
async def put(self, url: str, data: Optional[Dict[str, Any]] = None, **kwargs) -> Any:
|
|
232
434
|
"""
|
|
233
|
-
Make PUT request.
|
|
435
|
+
Make PUT request with automatic audit and debug logging.
|
|
234
436
|
|
|
235
437
|
Args:
|
|
236
438
|
url: Request URL
|
|
@@ -243,32 +445,37 @@ class HttpClient:
|
|
|
243
445
|
Raises:
|
|
244
446
|
MisoClientError: If request fails
|
|
245
447
|
"""
|
|
246
|
-
|
|
247
|
-
|
|
448
|
+
start_time = time.perf_counter()
|
|
449
|
+
request_headers = kwargs.get("headers", {})
|
|
248
450
|
try:
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
451
|
+
response = await self._internal_client.put(url, data, **kwargs)
|
|
452
|
+
await self._log_http_request_audit(
|
|
453
|
+
"PUT",
|
|
454
|
+
url,
|
|
455
|
+
response=response,
|
|
456
|
+
error=None,
|
|
457
|
+
start_time=start_time,
|
|
458
|
+
request_data=data,
|
|
459
|
+
request_headers=request_headers,
|
|
460
|
+
**kwargs,
|
|
461
|
+
)
|
|
462
|
+
return response
|
|
463
|
+
except Exception as e:
|
|
464
|
+
await self._log_http_request_audit(
|
|
465
|
+
"PUT",
|
|
466
|
+
url,
|
|
467
|
+
response=None,
|
|
468
|
+
error=e,
|
|
469
|
+
start_time=start_time,
|
|
470
|
+
request_data=data,
|
|
471
|
+
request_headers=request_headers,
|
|
472
|
+
**kwargs,
|
|
265
473
|
)
|
|
266
|
-
|
|
267
|
-
raise ConnectionError(f"Request failed: {str(e)}")
|
|
474
|
+
raise
|
|
268
475
|
|
|
269
476
|
async def delete(self, url: str, **kwargs) -> Any:
|
|
270
477
|
"""
|
|
271
|
-
Make DELETE request.
|
|
478
|
+
Make DELETE request with automatic audit and debug logging.
|
|
272
479
|
|
|
273
480
|
Args:
|
|
274
481
|
url: Request URL
|
|
@@ -280,28 +487,33 @@ class HttpClient:
|
|
|
280
487
|
Raises:
|
|
281
488
|
MisoClientError: If request fails
|
|
282
489
|
"""
|
|
283
|
-
|
|
284
|
-
|
|
490
|
+
start_time = time.perf_counter()
|
|
491
|
+
request_headers = kwargs.get("headers", {})
|
|
285
492
|
try:
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
raise MisoClientError(
|
|
297
|
-
f"HTTP {e.response.status_code}: {e.response.text}",
|
|
298
|
-
status_code=e.response.status_code,
|
|
299
|
-
error_body=e.response.json()
|
|
300
|
-
if e.response.headers.get("content-type", "").startswith("application/json")
|
|
301
|
-
else {},
|
|
493
|
+
response = await self._internal_client.delete(url, **kwargs)
|
|
494
|
+
await self._log_http_request_audit(
|
|
495
|
+
"DELETE",
|
|
496
|
+
url,
|
|
497
|
+
response=response,
|
|
498
|
+
error=None,
|
|
499
|
+
start_time=start_time,
|
|
500
|
+
request_data=None,
|
|
501
|
+
request_headers=request_headers,
|
|
502
|
+
**kwargs,
|
|
302
503
|
)
|
|
303
|
-
|
|
304
|
-
|
|
504
|
+
return response
|
|
505
|
+
except Exception as e:
|
|
506
|
+
await self._log_http_request_audit(
|
|
507
|
+
"DELETE",
|
|
508
|
+
url,
|
|
509
|
+
response=None,
|
|
510
|
+
error=e,
|
|
511
|
+
start_time=start_time,
|
|
512
|
+
request_data=None,
|
|
513
|
+
request_headers=request_headers,
|
|
514
|
+
**kwargs,
|
|
515
|
+
)
|
|
516
|
+
raise
|
|
305
517
|
|
|
306
518
|
async def request(
|
|
307
519
|
self,
|
|
@@ -311,7 +523,7 @@ class HttpClient:
|
|
|
311
523
|
**kwargs,
|
|
312
524
|
) -> Any:
|
|
313
525
|
"""
|
|
314
|
-
Generic request method.
|
|
526
|
+
Generic request method with automatic audit and debug logging.
|
|
315
527
|
|
|
316
528
|
Args:
|
|
317
529
|
method: HTTP method
|
|
@@ -346,9 +558,9 @@ class HttpClient:
|
|
|
346
558
|
**kwargs,
|
|
347
559
|
) -> Any:
|
|
348
560
|
"""
|
|
349
|
-
Make authenticated request with Bearer token.
|
|
561
|
+
Make authenticated request with Bearer token and automatic audit/debug logging.
|
|
350
562
|
|
|
351
|
-
IMPORTANT: Client token is sent as x-client-token header (via
|
|
563
|
+
IMPORTANT: Client token is sent as x-client-token header (via InternalHttpClient)
|
|
352
564
|
User token is sent as Authorization: Bearer header (this method parameter)
|
|
353
565
|
These are two separate tokens for different purposes.
|
|
354
566
|
|
|
@@ -365,23 +577,116 @@ class HttpClient:
|
|
|
365
577
|
Raises:
|
|
366
578
|
MisoClientError: If request fails
|
|
367
579
|
"""
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
# Add Bearer token for user authentication
|
|
371
|
-
# x-client-token is automatically added by _ensure_client_token
|
|
580
|
+
# Add Bearer token to headers for logging context
|
|
372
581
|
headers = kwargs.get("headers", {})
|
|
373
582
|
headers["Authorization"] = f"Bearer {token}"
|
|
374
583
|
kwargs["headers"] = headers
|
|
375
584
|
|
|
376
585
|
return await self.request(method, url, data, **kwargs)
|
|
377
586
|
|
|
378
|
-
async def
|
|
587
|
+
async def get_with_filters(
|
|
588
|
+
self,
|
|
589
|
+
url: str,
|
|
590
|
+
filter_builder: Optional[Any] = None,
|
|
591
|
+
**kwargs,
|
|
592
|
+
) -> Any:
|
|
379
593
|
"""
|
|
380
|
-
|
|
594
|
+
Make GET request with filter builder support.
|
|
381
595
|
|
|
382
|
-
|
|
596
|
+
Args:
|
|
597
|
+
url: Request URL
|
|
598
|
+
filter_builder: Optional FilterBuilder instance with filters
|
|
599
|
+
**kwargs: Additional httpx request parameters
|
|
383
600
|
|
|
384
601
|
Returns:
|
|
385
|
-
|
|
602
|
+
Response data (JSON parsed)
|
|
603
|
+
|
|
604
|
+
Raises:
|
|
605
|
+
MisoClientError: If request fails
|
|
606
|
+
|
|
607
|
+
Examples:
|
|
608
|
+
>>> from miso_client.models.filter import FilterBuilder
|
|
609
|
+
>>> filter_builder = FilterBuilder().add('status', 'eq', 'active')
|
|
610
|
+
>>> response = await client.http_client.get_with_filters('/api/items', filter_builder)
|
|
386
611
|
"""
|
|
387
|
-
|
|
612
|
+
from ..models.filter import FilterQuery
|
|
613
|
+
from ..utils.filter import build_query_string
|
|
614
|
+
|
|
615
|
+
# Build query string from filter builder
|
|
616
|
+
if filter_builder:
|
|
617
|
+
# Create FilterQuery from FilterBuilder
|
|
618
|
+
filter_query = FilterQuery(filters=filter_builder.build())
|
|
619
|
+
query_string = build_query_string(filter_query)
|
|
620
|
+
|
|
621
|
+
# Parse query string into params
|
|
622
|
+
if query_string:
|
|
623
|
+
from urllib.parse import parse_qs
|
|
624
|
+
|
|
625
|
+
# Parse query string into dict
|
|
626
|
+
query_params = parse_qs(query_string)
|
|
627
|
+
# Convert single-item lists to values
|
|
628
|
+
params = {k: v[0] if len(v) == 1 else v for k, v in query_params.items()}
|
|
629
|
+
|
|
630
|
+
# Merge with existing params
|
|
631
|
+
existing_params = kwargs.get("params", {})
|
|
632
|
+
if existing_params:
|
|
633
|
+
# Merge params (filter builder takes precedence)
|
|
634
|
+
merged_params = {**existing_params, **params}
|
|
635
|
+
else:
|
|
636
|
+
merged_params = params
|
|
637
|
+
|
|
638
|
+
kwargs["params"] = merged_params
|
|
639
|
+
|
|
640
|
+
return await self.get(url, **kwargs)
|
|
641
|
+
|
|
642
|
+
async def get_paginated(
|
|
643
|
+
self,
|
|
644
|
+
url: str,
|
|
645
|
+
page: Optional[int] = None,
|
|
646
|
+
page_size: Optional[int] = None,
|
|
647
|
+
**kwargs,
|
|
648
|
+
) -> Any:
|
|
649
|
+
"""
|
|
650
|
+
Make GET request with pagination support.
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
url: Request URL
|
|
654
|
+
page: Optional page number (1-based)
|
|
655
|
+
page_size: Optional number of items per page
|
|
656
|
+
**kwargs: Additional httpx request parameters
|
|
657
|
+
|
|
658
|
+
Returns:
|
|
659
|
+
PaginatedListResponse with meta and data (or raw response if format doesn't match)
|
|
660
|
+
|
|
661
|
+
Raises:
|
|
662
|
+
MisoClientError: If request fails
|
|
663
|
+
|
|
664
|
+
Examples:
|
|
665
|
+
>>> response = await client.http_client.get_paginated('/api/items', page=1, page_size=25)
|
|
666
|
+
>>> response.meta.total_items
|
|
667
|
+
120
|
|
668
|
+
>>> len(response.data)
|
|
669
|
+
25
|
|
670
|
+
"""
|
|
671
|
+
from ..models.pagination import PaginatedListResponse
|
|
672
|
+
|
|
673
|
+
# Add pagination params
|
|
674
|
+
params = kwargs.get("params", {})
|
|
675
|
+
if page is not None:
|
|
676
|
+
params["page"] = page
|
|
677
|
+
if page_size is not None:
|
|
678
|
+
params["page_size"] = page_size
|
|
679
|
+
|
|
680
|
+
if params:
|
|
681
|
+
kwargs["params"] = params
|
|
682
|
+
|
|
683
|
+
# Make request
|
|
684
|
+
response_data = await self.get(url, **kwargs)
|
|
685
|
+
|
|
686
|
+
# Try to parse as PaginatedListResponse
|
|
687
|
+
try:
|
|
688
|
+
return PaginatedListResponse(**response_data)
|
|
689
|
+
except Exception:
|
|
690
|
+
# If response doesn't match PaginatedListResponse format, return as-is
|
|
691
|
+
# This allows flexibility for different response formats
|
|
692
|
+
return response_data
|