aiqa-client 0.3.1__tar.gz → 0.3.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aiqa_client-0.3.1/aiqa_client.egg-info → aiqa_client-0.3.4}/PKG-INFO +1 -1
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/__init__.py +1 -1
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/aiqa_exporter.py +138 -51
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/object_serialiser.py +54 -19
- {aiqa_client-0.3.1 → aiqa_client-0.3.4/aiqa_client.egg-info}/PKG-INFO +1 -1
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/pyproject.toml +1 -1
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/LICENSE +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/MANIFEST.in +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/README.md +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/client.py +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/experiment_runner.py +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/py.typed +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/test_experiment_runner.py +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/test_tracing.py +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa/tracing.py +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa_client.egg-info/SOURCES.txt +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa_client.egg-info/dependency_links.txt +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa_client.egg-info/requires.txt +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/aiqa_client.egg-info/top_level.txt +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/setup.cfg +0 -0
- {aiqa_client-0.3.1 → aiqa_client-0.3.4}/setup.py +0 -0
|
@@ -27,6 +27,7 @@ class AIQASpanExporter(SpanExporter):
|
|
|
27
27
|
server_url: Optional[str] = None,
|
|
28
28
|
api_key: Optional[str] = None,
|
|
29
29
|
flush_interval_seconds: float = 5.0,
|
|
30
|
+
max_batch_size_bytes: int = 5 * 1024 * 1024, # 5MB default
|
|
30
31
|
):
|
|
31
32
|
"""
|
|
32
33
|
Initialize the AIQA span exporter.
|
|
@@ -35,10 +36,12 @@ class AIQASpanExporter(SpanExporter):
|
|
|
35
36
|
server_url: URL of the AIQA server (defaults to AIQA_SERVER_URL env var)
|
|
36
37
|
api_key: API key for authentication (defaults to AIQA_API_KEY env var)
|
|
37
38
|
flush_interval_seconds: How often to flush spans to the server
|
|
39
|
+
max_batch_size_bytes: Maximum size of a single batch in bytes (default: 5mb)
|
|
38
40
|
"""
|
|
39
41
|
self._server_url = server_url
|
|
40
42
|
self._api_key = api_key
|
|
41
43
|
self.flush_interval_ms = flush_interval_seconds * 1000
|
|
44
|
+
self.max_batch_size_bytes = max_batch_size_bytes
|
|
42
45
|
self.buffer: List[Dict[str, Any]] = []
|
|
43
46
|
self.buffer_span_keys: set = set() # Track (traceId, spanId) tuples to prevent duplicates (Python 3.8 compatible)
|
|
44
47
|
self.buffer_lock = threading.Lock()
|
|
@@ -243,6 +246,59 @@ class AIQASpanExporter(SpanExporter):
|
|
|
243
246
|
self.buffer.clear()
|
|
244
247
|
self.buffer_span_keys.clear()
|
|
245
248
|
|
|
249
|
+
def _split_into_batches(self, spans: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]:
|
|
250
|
+
"""
|
|
251
|
+
Split spans into batches based on max_batch_size_bytes.
|
|
252
|
+
Each batch will be as large as possible without exceeding the limit.
|
|
253
|
+
If a single span exceeds the limit, it will be sent in its own batch with a warning.
|
|
254
|
+
"""
|
|
255
|
+
if not spans:
|
|
256
|
+
return []
|
|
257
|
+
|
|
258
|
+
batches = []
|
|
259
|
+
current_batch = []
|
|
260
|
+
current_batch_size = 0
|
|
261
|
+
|
|
262
|
+
for span in spans:
|
|
263
|
+
# Estimate size of this span when serialized
|
|
264
|
+
span_json = json.dumps(span)
|
|
265
|
+
span_size = len(span_json.encode('utf-8'))
|
|
266
|
+
|
|
267
|
+
# Check if this single span exceeds the limit
|
|
268
|
+
if span_size > self.max_batch_size_bytes:
|
|
269
|
+
# If we have a current batch, save it first
|
|
270
|
+
if current_batch:
|
|
271
|
+
batches.append(current_batch)
|
|
272
|
+
current_batch = []
|
|
273
|
+
current_batch_size = 0
|
|
274
|
+
|
|
275
|
+
# Log warning about oversized span
|
|
276
|
+
span_name = span.get('name', 'unknown')
|
|
277
|
+
span_trace_id = span.get('traceId', 'unknown')
|
|
278
|
+
logger.warning(
|
|
279
|
+
f"Span '{span_name}' (traceId={span_trace_id}) exceeds max_batch_size_bytes "
|
|
280
|
+
f"({span_size} bytes > {self.max_batch_size_bytes} bytes). "
|
|
281
|
+
f"Will attempt to send it anyway - may fail if server/nginx limit is exceeded."
|
|
282
|
+
)
|
|
283
|
+
# Still create a batch with just this span - we'll try to send it
|
|
284
|
+
batches.append([span])
|
|
285
|
+
continue
|
|
286
|
+
|
|
287
|
+
# If adding this span would exceed the limit, start a new batch
|
|
288
|
+
if current_batch and current_batch_size + span_size > self.max_batch_size_bytes:
|
|
289
|
+
batches.append(current_batch)
|
|
290
|
+
current_batch = []
|
|
291
|
+
current_batch_size = 0
|
|
292
|
+
|
|
293
|
+
current_batch.append(span)
|
|
294
|
+
current_batch_size += span_size
|
|
295
|
+
|
|
296
|
+
# Add the last batch if it has any spans
|
|
297
|
+
if current_batch:
|
|
298
|
+
batches.append(current_batch)
|
|
299
|
+
|
|
300
|
+
return batches
|
|
301
|
+
|
|
246
302
|
async def flush(self) -> None:
|
|
247
303
|
"""
|
|
248
304
|
Flush buffered spans to the server. Thread-safe: ensures only one flush operation runs at a time.
|
|
@@ -345,77 +401,108 @@ class AIQASpanExporter(SpanExporter):
|
|
|
345
401
|
logger.info(f"Auto-flush thread started: {flush_thread.name} (daemon={flush_thread.daemon})")
|
|
346
402
|
|
|
347
403
|
async def _send_spans(self, spans: List[Dict[str, Any]]) -> None:
|
|
348
|
-
"""Send spans to the server API (async)."""
|
|
404
|
+
"""Send spans to the server API (async). Batches large payloads automatically."""
|
|
349
405
|
import aiohttp
|
|
350
406
|
|
|
407
|
+
# Split into batches if needed
|
|
408
|
+
batches = self._split_into_batches(spans)
|
|
409
|
+
if len(batches) > 1:
|
|
410
|
+
logger.info(f"_send_spans() splitting {len(spans)} spans into {len(batches)} batches")
|
|
411
|
+
|
|
351
412
|
url = self._get_span_url()
|
|
352
413
|
headers = self._build_request_headers()
|
|
353
|
-
|
|
414
|
+
|
|
354
415
|
if self.api_key:
|
|
355
416
|
logger.debug("_send_spans() using API key authentication")
|
|
356
417
|
else:
|
|
357
418
|
logger.debug("_send_spans() no API key provided")
|
|
358
419
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
f"
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
420
|
+
errors = []
|
|
421
|
+
async with aiohttp.ClientSession() as session:
|
|
422
|
+
for batch_idx, batch in enumerate(batches):
|
|
423
|
+
try:
|
|
424
|
+
logger.debug(f"_send_spans() sending batch {batch_idx + 1}/{len(batches)} with {len(batch)} spans to {url}")
|
|
425
|
+
# Pre-serialize JSON to bytes and wrap in BytesIO to avoid blocking event loop
|
|
426
|
+
json_bytes = json.dumps(batch).encode('utf-8')
|
|
427
|
+
data = io.BytesIO(json_bytes)
|
|
428
|
+
|
|
429
|
+
async with session.post(url, data=data, headers=headers) as response:
|
|
430
|
+
logger.debug(f"_send_spans() batch {batch_idx + 1} received response: status={response.status}")
|
|
431
|
+
if not response.ok:
|
|
432
|
+
error_text = await response.text()
|
|
433
|
+
error_msg = f"Failed to send batch {batch_idx + 1}/{len(batches)}: {response.status} {response.reason} - {error_text[:200]}"
|
|
434
|
+
logger.error(f"_send_spans() {error_msg}")
|
|
435
|
+
errors.append((batch_idx + 1, error_msg))
|
|
436
|
+
# Continue with other batches even if one fails
|
|
437
|
+
continue
|
|
438
|
+
logger.debug(f"_send_spans() batch {batch_idx + 1} successfully sent {len(batch)} spans")
|
|
439
|
+
except RuntimeError as e:
|
|
440
|
+
if self._is_interpreter_shutdown_error(e):
|
|
441
|
+
if self.shutdown_requested:
|
|
442
|
+
logger.debug(f"_send_spans() skipped due to interpreter shutdown: {e}")
|
|
443
|
+
else:
|
|
444
|
+
logger.warning(f"_send_spans() interrupted by interpreter shutdown: {e}")
|
|
445
|
+
raise
|
|
446
|
+
error_msg = f"RuntimeError in batch {batch_idx + 1}: {type(e).__name__}: {e}"
|
|
447
|
+
logger.error(f"_send_spans() {error_msg}")
|
|
448
|
+
errors.append((batch_idx + 1, error_msg))
|
|
449
|
+
# Continue with other batches
|
|
450
|
+
except Exception as e:
|
|
451
|
+
error_msg = f"Exception in batch {batch_idx + 1}: {type(e).__name__}: {e}"
|
|
452
|
+
logger.error(f"_send_spans() {error_msg}")
|
|
453
|
+
errors.append((batch_idx + 1, error_msg))
|
|
454
|
+
# Continue with other batches
|
|
455
|
+
|
|
456
|
+
# If any batches failed, raise an exception with details
|
|
457
|
+
if errors:
|
|
458
|
+
error_summary = "; ".join([f"batch {idx}: {msg}" for idx, msg in errors])
|
|
459
|
+
raise Exception(f"Failed to send some spans: {error_summary}")
|
|
460
|
+
|
|
461
|
+
logger.debug(f"_send_spans() successfully sent all {len(spans)} spans in {len(batches)} batch(es)")
|
|
390
462
|
|
|
391
463
|
def _send_spans_sync(self, spans: List[Dict[str, Any]]) -> None:
|
|
392
|
-
"""Send spans to the server API (synchronous, for shutdown scenarios)."""
|
|
464
|
+
"""Send spans to the server API (synchronous, for shutdown scenarios). Batches large payloads automatically."""
|
|
393
465
|
import requests
|
|
394
466
|
|
|
467
|
+
# Split into batches if needed
|
|
468
|
+
batches = self._split_into_batches(spans)
|
|
469
|
+
if len(batches) > 1:
|
|
470
|
+
logger.info(f"_send_spans_sync() splitting {len(spans)} spans into {len(batches)} batches")
|
|
471
|
+
|
|
395
472
|
url = self._get_span_url()
|
|
396
473
|
headers = self._build_request_headers()
|
|
397
|
-
|
|
474
|
+
|
|
398
475
|
if self.api_key:
|
|
399
476
|
logger.debug("_send_spans_sync() using API key authentication")
|
|
400
477
|
else:
|
|
401
478
|
logger.debug("_send_spans_sync() no API key provided")
|
|
402
479
|
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
logger.
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
480
|
+
errors = []
|
|
481
|
+
for batch_idx, batch in enumerate(batches):
|
|
482
|
+
try:
|
|
483
|
+
logger.debug(f"_send_spans_sync() sending batch {batch_idx + 1}/{len(batches)} with {len(batch)} spans to {url}")
|
|
484
|
+
response = requests.post(url, json=batch, headers=headers, timeout=10.0)
|
|
485
|
+
logger.debug(f"_send_spans_sync() batch {batch_idx + 1} received response: status={response.status_code}")
|
|
486
|
+
if not response.ok:
|
|
487
|
+
error_text = response.text[:200] if response.text else ""
|
|
488
|
+
error_msg = f"Failed to send batch {batch_idx + 1}/{len(batches)}: {response.status_code} {response.reason} - {error_text}"
|
|
489
|
+
logger.error(f"_send_spans_sync() {error_msg}")
|
|
490
|
+
errors.append((batch_idx + 1, error_msg))
|
|
491
|
+
# Continue with other batches even if one fails
|
|
492
|
+
continue
|
|
493
|
+
logger.debug(f"_send_spans_sync() batch {batch_idx + 1} successfully sent {len(batch)} spans")
|
|
494
|
+
except Exception as e:
|
|
495
|
+
error_msg = f"Exception in batch {batch_idx + 1}: {type(e).__name__}: {e}"
|
|
496
|
+
logger.error(f"_send_spans_sync() {error_msg}")
|
|
497
|
+
errors.append((batch_idx + 1, error_msg))
|
|
498
|
+
# Continue with other batches
|
|
499
|
+
|
|
500
|
+
# If any batches failed, raise an exception with details
|
|
501
|
+
if errors:
|
|
502
|
+
error_summary = "; ".join([f"batch {idx}: {msg}" for idx, msg in errors])
|
|
503
|
+
raise Exception(f"Failed to send some spans: {error_summary}")
|
|
504
|
+
|
|
505
|
+
logger.debug(f"_send_spans_sync() successfully sent all {len(spans)} spans in {len(batches)} batch(es)")
|
|
419
506
|
|
|
420
507
|
def shutdown(self) -> None:
|
|
421
508
|
"""Shutdown the exporter, flushing any remaining spans. Call before process exit."""
|
|
@@ -6,9 +6,12 @@ Handles objects, dataclasses, circular references, and size limits.
|
|
|
6
6
|
import json
|
|
7
7
|
import os
|
|
8
8
|
import dataclasses
|
|
9
|
+
import logging
|
|
9
10
|
from datetime import datetime, date, time
|
|
10
11
|
from typing import Any, Callable, Set
|
|
11
12
|
|
|
13
|
+
logger = logging.getLogger("aiqa")
|
|
14
|
+
|
|
12
15
|
def toNumber(value: str|int|None) -> int:
|
|
13
16
|
"""Convert string to number. handling units like g, m, k, (also mb kb gb though these should be avoided)"""
|
|
14
17
|
if value is None:
|
|
@@ -195,24 +198,39 @@ def object_to_dict(obj: Any, visited: Set[int], max_depth: int = 10, current_dep
|
|
|
195
198
|
try:
|
|
196
199
|
result = {}
|
|
197
200
|
for k, v in obj.items():
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
+
try:
|
|
202
|
+
key_str = str(k) if not isinstance(k, (str, int, float, bool)) else k
|
|
203
|
+
filtered_value = _apply_data_filters(key_str, v)
|
|
204
|
+
result[key_str] = object_to_dict(filtered_value, visited, max_depth, current_depth + 1)
|
|
205
|
+
except Exception as e:
|
|
206
|
+
# If one key-value pair fails, log and use string representation for the value
|
|
207
|
+
key_str = str(k) if not isinstance(k, (str, int, float, bool)) else k
|
|
208
|
+
logger.debug(f"Failed to convert dict value for key '{key_str}': {e}")
|
|
209
|
+
result[key_str] = safe_str_repr(v)
|
|
201
210
|
visited.remove(obj_id)
|
|
202
211
|
return result
|
|
203
|
-
except Exception:
|
|
212
|
+
except Exception as e:
|
|
204
213
|
visited.discard(obj_id)
|
|
214
|
+
logger.debug(f"Failed to convert dict to dict: {e}")
|
|
205
215
|
return safe_str_repr(obj)
|
|
206
216
|
|
|
207
217
|
# Handle list/tuple
|
|
208
218
|
if isinstance(obj, (list, tuple)):
|
|
209
219
|
visited.add(obj_id)
|
|
210
220
|
try:
|
|
211
|
-
result = [
|
|
221
|
+
result = []
|
|
222
|
+
for item in obj:
|
|
223
|
+
try:
|
|
224
|
+
result.append(object_to_dict(item, visited, max_depth, current_depth + 1))
|
|
225
|
+
except Exception as e:
|
|
226
|
+
# If one item fails, log and use its string representation
|
|
227
|
+
logger.debug(f"Failed to convert list item {type(item).__name__} to dict: {e}")
|
|
228
|
+
result.append(safe_str_repr(item))
|
|
212
229
|
visited.remove(obj_id)
|
|
213
230
|
return result
|
|
214
|
-
except Exception:
|
|
231
|
+
except Exception as e:
|
|
215
232
|
visited.discard(obj_id)
|
|
233
|
+
logger.debug(f"Failed to convert list/tuple to dict: {e}")
|
|
216
234
|
return safe_str_repr(obj)
|
|
217
235
|
|
|
218
236
|
# Handle dataclasses
|
|
@@ -221,13 +239,19 @@ def object_to_dict(obj: Any, visited: Set[int], max_depth: int = 10, current_dep
|
|
|
221
239
|
try:
|
|
222
240
|
result = {}
|
|
223
241
|
for field in dataclasses.fields(obj):
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
242
|
+
try:
|
|
243
|
+
value = getattr(obj, field.name, None)
|
|
244
|
+
filtered_value = _apply_data_filters(field.name, value)
|
|
245
|
+
result[field.name] = object_to_dict(filtered_value, visited, max_depth, current_depth + 1)
|
|
246
|
+
except Exception as e:
|
|
247
|
+
# If accessing a field fails, log and skip it
|
|
248
|
+
logger.debug(f"Failed to access field {field.name} on {type(obj).__name__}: {e}")
|
|
249
|
+
result[field.name] = "<error accessing field>"
|
|
227
250
|
visited.remove(obj_id)
|
|
228
251
|
return result
|
|
229
|
-
except Exception:
|
|
252
|
+
except Exception as e:
|
|
230
253
|
visited.discard(obj_id)
|
|
254
|
+
logger.debug(f"Failed to convert dataclass {type(obj).__name__} to dict: {e}")
|
|
231
255
|
return safe_str_repr(obj)
|
|
232
256
|
|
|
233
257
|
# Handle objects with __dict__
|
|
@@ -242,8 +266,10 @@ def object_to_dict(obj: Any, visited: Set[int], max_depth: int = 10, current_dep
|
|
|
242
266
|
result[key] = object_to_dict(filtered_value, visited, max_depth, current_depth + 1)
|
|
243
267
|
visited.remove(obj_id)
|
|
244
268
|
return result
|
|
245
|
-
except Exception:
|
|
269
|
+
except Exception as e:
|
|
246
270
|
visited.discard(obj_id)
|
|
271
|
+
# Log the error for debugging, but still return string representation
|
|
272
|
+
logger.debug(f"Failed to convert object {type(obj).__name__} to dict: {e}")
|
|
247
273
|
return safe_str_repr(obj)
|
|
248
274
|
|
|
249
275
|
# Handle objects with __slots__
|
|
@@ -252,14 +278,20 @@ def object_to_dict(obj: Any, visited: Set[int], max_depth: int = 10, current_dep
|
|
|
252
278
|
try:
|
|
253
279
|
result = {}
|
|
254
280
|
for slot in obj.__slots__:
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
281
|
+
try:
|
|
282
|
+
if hasattr(obj, slot):
|
|
283
|
+
value = getattr(obj, slot, None)
|
|
284
|
+
filtered_value = _apply_data_filters(slot, value)
|
|
285
|
+
result[slot] = object_to_dict(filtered_value, visited, max_depth, current_depth + 1)
|
|
286
|
+
except Exception as e:
|
|
287
|
+
# If accessing a slot fails, log and skip it
|
|
288
|
+
logger.debug(f"Failed to access slot {slot} on {type(obj).__name__}: {e}")
|
|
289
|
+
result[slot] = "<error accessing slot>"
|
|
259
290
|
visited.remove(obj_id)
|
|
260
291
|
return result
|
|
261
|
-
except Exception:
|
|
292
|
+
except Exception as e:
|
|
262
293
|
visited.discard(obj_id)
|
|
294
|
+
logger.debug(f"Failed to convert slotted object {type(obj).__name__} to dict: {e}")
|
|
263
295
|
return safe_str_repr(obj)
|
|
264
296
|
|
|
265
297
|
# Fallback: try to get a few common attributes
|
|
@@ -301,14 +333,16 @@ def safe_json_dumps(value: Any) -> str:
|
|
|
301
333
|
# across the whole object graph
|
|
302
334
|
try:
|
|
303
335
|
converted = object_to_dict(value, visited)
|
|
304
|
-
except Exception:
|
|
336
|
+
except Exception as e:
|
|
305
337
|
# If conversion fails, try with a fresh visited set and json default handler
|
|
338
|
+
logger.debug(f"object_to_dict failed for {type(value).__name__}, trying json.dumps with default handler: {e}")
|
|
306
339
|
try:
|
|
307
340
|
json_str = json.dumps(value, default=json_default_handler_factory(set()))
|
|
308
341
|
if len(json_str) > max_size_chars:
|
|
309
342
|
return f"<object {type(value)} too large: {len(json_str)} chars (limit: {max_size_chars} chars) begins: {json_str[:100]}... conversion error: {e}>"
|
|
310
343
|
return json_str
|
|
311
|
-
except Exception:
|
|
344
|
+
except Exception as e2:
|
|
345
|
+
logger.debug(f"json.dumps with default handler also failed for {type(value).__name__}: {e2}")
|
|
312
346
|
return safe_str_repr(value)
|
|
313
347
|
|
|
314
348
|
# Try JSON serialization of the converted structure
|
|
@@ -318,7 +352,8 @@ def safe_json_dumps(value: Any) -> str:
|
|
|
318
352
|
if len(json_str) > max_size_chars:
|
|
319
353
|
return f"<object {type(value)} too large: {len(json_str)} chars (limit: {max_size_chars} chars) begins: {json_str[:100]}...>"
|
|
320
354
|
return json_str
|
|
321
|
-
except Exception:
|
|
355
|
+
except Exception as e:
|
|
356
|
+
logger.debug(f"json.dumps total fail for {type(value).__name__}: {e2}")
|
|
322
357
|
# Final fallback
|
|
323
358
|
return safe_str_repr(value)
|
|
324
359
|
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "aiqa-client"
|
|
7
|
-
version = "0.3.
|
|
7
|
+
version = "0.3.4"
|
|
8
8
|
description = "OpenTelemetry-based Python client for tracing functions and sending traces to the AIQA server"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.8"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|