aiqa-client 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aiqa/__init__.py CHANGED
@@ -13,7 +13,7 @@ from .tracing import (
13
13
  exporter,
14
14
  )
15
15
 
16
- __version__ = "0.1.0"
16
+ __version__ = "0.1.1"
17
17
 
18
18
  __all__ = [
19
19
  "WithTracing",
aiqa/aiqa_exporter.py CHANGED
@@ -43,6 +43,11 @@ class AIQASpanExporter(SpanExporter):
43
43
  self.flush_lock = threading.Lock()
44
44
  self.shutdown_requested = False
45
45
  self.flush_timer: Optional[threading.Thread] = None
46
+
47
+ logger.info(
48
+ f"Initializing AIQASpanExporter: server_url={self.server_url or 'not set'}, "
49
+ f"flush_interval={flush_interval_seconds}s"
50
+ )
46
51
  self._start_auto_flush()
47
52
 
48
53
  @property
@@ -58,12 +63,19 @@ class AIQASpanExporter(SpanExporter):
58
63
  Export spans to the AIQA server. Adds spans to buffer for async flushing.
59
64
  """
60
65
  if not spans:
66
+ logger.debug("export() called with empty spans list")
61
67
  return SpanExportResult.SUCCESS
62
68
 
63
69
  # Serialize and add to buffer
64
70
  with self.buffer_lock:
65
71
  serialized_spans = [self._serialize_span(span) for span in spans]
66
72
  self.buffer.extend(serialized_spans)
73
+ buffer_size = len(self.buffer)
74
+
75
+ logger.debug(
76
+ f"export() added {len(spans)} span(s) to buffer. "
77
+ f"Total buffered: {buffer_size}"
78
+ )
67
79
 
68
80
  return SpanExportResult.SUCCESS
69
81
 
@@ -141,13 +153,17 @@ class AIQASpanExporter(SpanExporter):
141
153
  """
142
154
  Flush buffered spans to the server. Thread-safe: ensures only one flush operation runs at a time.
143
155
  """
156
+ logger.debug("flush() called - attempting to acquire flush lock")
144
157
  with self.flush_lock:
158
+ logger.debug("flush() acquired flush lock")
145
159
  # Get current buffer and clear it atomically
146
160
  with self.buffer_lock:
147
161
  spans_to_flush = self.buffer[:]
148
162
  self.buffer.clear()
163
+ logger.debug(f"flush() extracted {len(spans_to_flush)} span(s) from buffer")
149
164
 
150
165
  if not spans_to_flush:
166
+ logger.debug("flush() completed: no spans to flush")
151
167
  return
152
168
 
153
169
  # Skip sending if server URL is not configured
@@ -157,43 +173,60 @@ class AIQASpanExporter(SpanExporter):
157
173
  )
158
174
  return
159
175
 
176
+ logger.info(f"flush() sending {len(spans_to_flush)} span(s) to server")
160
177
  try:
161
178
  await self._send_spans(spans_to_flush)
179
+ logger.info(f"flush() successfully sent {len(spans_to_flush)} span(s) to server")
162
180
  except Exception as error:
163
- logger.error(f"Error flushing spans to server: {error}")
181
+ logger.error(f"Error flushing spans to server: {error}", exc_info=True)
164
182
  if self.shutdown_requested:
165
183
  raise
166
184
 
167
185
  def _start_auto_flush(self) -> None:
168
186
  """Start the auto-flush timer."""
169
187
  if self.shutdown_requested:
188
+ logger.warning("_start_auto_flush() called but shutdown already requested")
170
189
  return
171
190
 
191
+ logger.info(f"Starting auto-flush thread with interval {self.flush_interval_ms / 1000.0}s")
192
+
172
193
  def flush_worker():
173
194
  import asyncio
195
+ logger.debug("Auto-flush worker thread started")
174
196
  loop = asyncio.new_event_loop()
175
197
  asyncio.set_event_loop(loop)
176
198
 
199
+ cycle_count = 0
177
200
  while not self.shutdown_requested:
201
+ cycle_count += 1
202
+ logger.debug(f"Auto-flush cycle #{cycle_count} starting")
178
203
  try:
179
204
  loop.run_until_complete(self.flush())
205
+ logger.debug(f"Auto-flush cycle #{cycle_count} completed, sleeping {self.flush_interval_ms / 1000.0}s")
180
206
  time.sleep(self.flush_interval_ms / 1000.0)
181
207
  except Exception as e:
182
- logger.error(f"Error in auto-flush: {e}")
208
+ logger.error(f"Error in auto-flush cycle #{cycle_count}: {e}", exc_info=True)
209
+ logger.debug(f"Auto-flush cycle #{cycle_count} error handled, sleeping {self.flush_interval_ms / 1000.0}s")
183
210
  time.sleep(self.flush_interval_ms / 1000.0)
184
211
 
212
+ logger.info(f"Auto-flush worker thread stopping (shutdown requested). Completed {cycle_count} cycles.")
213
+
185
214
  # Final flush on shutdown
186
215
  if self.shutdown_requested:
216
+ logger.info("Performing final flush on shutdown")
187
217
  try:
188
218
  loop.run_until_complete(self.flush())
219
+ logger.info("Final flush completed successfully")
189
220
  except Exception as e:
190
- logger.error(f"Error in final flush: {e}")
221
+ logger.error(f"Error in final flush: {e}", exc_info=True)
191
222
  finally:
192
223
  loop.close()
224
+ logger.debug("Auto-flush worker thread event loop closed")
193
225
 
194
- flush_thread = threading.Thread(target=flush_worker, daemon=True)
226
+ flush_thread = threading.Thread(target=flush_worker, daemon=True, name="AIQA-AutoFlush")
195
227
  flush_thread.start()
196
228
  self.flush_timer = flush_thread
229
+ logger.info(f"Auto-flush thread started: {flush_thread.name} (daemon={flush_thread.daemon})")
197
230
 
198
231
  async def _send_spans(self, spans: List[Dict[str, Any]]) -> None:
199
232
  """Send spans to the server API."""
@@ -202,47 +235,88 @@ class AIQASpanExporter(SpanExporter):
202
235
 
203
236
  import aiohttp
204
237
 
205
- logger.debug(f"Sending {len(spans)} spans to server: {self.server_url}")
238
+ url = f"{self.server_url}/span"
239
+ logger.debug(f"_send_spans() sending {len(spans)} spans to {url}")
206
240
 
207
241
  headers = {
208
242
  "Content-Type": "application/json",
209
243
  }
210
244
  if self.api_key:
211
- headers["Authorization"] = f"ApiKey {self.api_key}"
212
-
213
- async with aiohttp.ClientSession() as session:
214
- async with session.post(
215
- f"{self.server_url}/span",
216
- json=spans,
217
- headers=headers,
218
- ) as response:
219
- if not response.ok:
220
- error_text = await response.text()
221
- raise Exception(
222
- f"Failed to send spans: {response.status} {response.reason} - {error_text}"
223
- )
245
+ headers["Authorization"] = f"ApiKey {self.api_key[:10]}..." # Log partial key for security
246
+ logger.debug("_send_spans() using API key authentication")
247
+ else:
248
+ logger.debug("_send_spans() no API key provided")
249
+
250
+ try:
251
+ async with aiohttp.ClientSession() as session:
252
+ logger.debug(f"_send_spans() POST request starting to {url}")
253
+ async with session.post(
254
+ url,
255
+ json=spans,
256
+ headers=headers,
257
+ ) as response:
258
+ logger.debug(f"_send_spans() received response: status={response.status}")
259
+ if not response.ok:
260
+ error_text = await response.text()
261
+ logger.error(
262
+ f"_send_spans() failed: status={response.status}, "
263
+ f"reason={response.reason}, error={error_text[:200]}"
264
+ )
265
+ raise Exception(
266
+ f"Failed to send spans: {response.status} {response.reason} - {error_text}"
267
+ )
268
+ logger.debug(f"_send_spans() successfully sent {len(spans)} spans")
269
+ except Exception as e:
270
+ logger.error(f"_send_spans() exception: {type(e).__name__}: {e}", exc_info=True)
271
+ raise
224
272
 
225
273
  def shutdown(self) -> None:
226
274
  """Shutdown the exporter, flushing any remaining spans. Call before process exit."""
275
+ logger.info("shutdown() called - initiating exporter shutdown")
227
276
  self.shutdown_requested = True
228
277
 
278
+ # Check buffer state before shutdown
279
+ with self.buffer_lock:
280
+ buffer_size = len(self.buffer)
281
+ logger.info(f"shutdown() buffer contains {buffer_size} span(s) before shutdown")
282
+
229
283
  # Wait for flush thread to finish (it will do final flush)
230
284
  if self.flush_timer and self.flush_timer.is_alive():
285
+ logger.info("shutdown() waiting for auto-flush thread to complete (timeout=10s)")
231
286
  self.flush_timer.join(timeout=10.0)
287
+ if self.flush_timer.is_alive():
288
+ logger.warning("shutdown() auto-flush thread did not complete within timeout")
289
+ else:
290
+ logger.info("shutdown() auto-flush thread completed")
291
+ else:
292
+ logger.debug("shutdown() no active auto-flush thread to wait for")
232
293
 
233
294
  # Final flush attempt (synchronous)
234
295
  import asyncio
235
296
  try:
236
297
  loop = asyncio.get_event_loop()
237
298
  if loop.is_running():
299
+ logger.debug("shutdown() event loop is running, using ThreadPoolExecutor for final flush")
238
300
  # If loop is running, schedule flush
239
301
  import concurrent.futures
240
302
  with concurrent.futures.ThreadPoolExecutor() as executor:
241
303
  future = executor.submit(asyncio.run, self.flush())
242
304
  future.result(timeout=10.0)
243
305
  else:
306
+ logger.debug("shutdown() event loop exists but not running, using run_until_complete")
244
307
  loop.run_until_complete(self.flush())
245
308
  except RuntimeError:
246
309
  # No event loop, create one
310
+ logger.debug("shutdown() no event loop found, creating new one for final flush")
247
311
  asyncio.run(self.flush())
312
+
313
+ # Check buffer state after shutdown
314
+ with self.buffer_lock:
315
+ buffer_size = len(self.buffer)
316
+ if buffer_size > 0:
317
+ logger.warning(f"shutdown() buffer still contains {buffer_size} span(s) after shutdown")
318
+ else:
319
+ logger.info("shutdown() buffer is empty after shutdown")
320
+
321
+ logger.info("shutdown() completed")
248
322
 
aiqa/tracing.py CHANGED
@@ -181,9 +181,10 @@ def WithTracing(
181
181
  input_data = _prepare_input(args, kwargs)
182
182
  if filter_input:
183
183
  input_data = filter_input(input_data)
184
- if ignore_input and isinstance(input_data, dict):
185
- # TODO: implement ignore_input logic
186
- pass
184
+ if ignore_input and isinstance(input_data, dict):
185
+ for key in ignore_input:
186
+ if key in input_data:
187
+ del input_data[key]
187
188
 
188
189
  if input_data is not None:
189
190
  # Serialize for span attributes (OpenTelemetry only accepts primitives or JSON strings)
@@ -201,10 +202,13 @@ def WithTracing(
201
202
  # Prepare output
202
203
  output_data = result
203
204
  if filter_output:
204
- output_data = filter_output(output_data)
205
+ output_data = filter_output(output_data)
205
206
  if ignore_output and isinstance(output_data, dict):
206
- # TODO: implement ignore_output logic
207
- pass
207
+ # Make a copy of output_data to avoid modifying the original
208
+ output_data = output_data.copy()
209
+ for key in ignore_output:
210
+ if key in output_data:
211
+ del output_data[key]
208
212
 
209
213
  span.set_attribute("output", _serialize_for_span(output_data))
210
214
  span.set_status(Status(StatusCode.OK))
@@ -231,8 +235,9 @@ def WithTracing(
231
235
  if filter_input:
232
236
  input_data = filter_input(input_data)
233
237
  if ignore_input and isinstance(input_data, dict):
234
- # TODO: implement ignore_input logic
235
- pass
238
+ for key in ignore_input:
239
+ if key in input_data:
240
+ del input_data[key]
236
241
 
237
242
  if input_data is not None:
238
243
  # Serialize for span attributes (OpenTelemetry only accepts primitives or JSON strings)
@@ -252,8 +257,11 @@ def WithTracing(
252
257
  if filter_output:
253
258
  output_data = filter_output(output_data)
254
259
  if ignore_output and isinstance(output_data, dict):
255
- # TODO: implement ignore_output logic
256
- pass
260
+ # Make a copy of output_data to avoid modifying the original
261
+ output_data = output_data.copy()
262
+ for key in ignore_output:
263
+ if key in output_data:
264
+ del output_data[key]
257
265
 
258
266
  span.set_attribute("output", _serialize_for_span(output_data))
259
267
  span.set_status(Status(StatusCode.OK))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aiqa-client
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: OpenTelemetry-based Python client for tracing functions and sending traces to the AIQA server
5
5
  Author-email: AIQA <info@aiqa.dev>
6
6
  License: MIT
@@ -0,0 +1,9 @@
1
+ aiqa/__init__.py,sha256=LEONMsfGaQXePRZN9XxdULWsufbfJnTJ1t1-LU9c-9o,470
2
+ aiqa/aiqa_exporter.py,sha256=Y0VrZqnb3LG4WSb9XQYct9ABwunG04T9aM8I6EH0qQQ,13781
3
+ aiqa/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ aiqa/tracing.py,sha256=RwcHk9P_dKoQUv545VVHqnJ4x8jaZkqA4YhdVUXGywc,11889
5
+ aiqa_client-0.1.1.dist-info/licenses/LICENSE,sha256=kIzkzLuzG0HHaWYm4F4W5FeJ1Yxut3Ec6bhLWyw798A,1062
6
+ aiqa_client-0.1.1.dist-info/METADATA,sha256=NcmuXu5XyQsq2X-MRn_qaq4pCgnwH-1khBuZruYAwKQ,3740
7
+ aiqa_client-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ aiqa_client-0.1.1.dist-info/top_level.txt,sha256=nwcsuVVSuWu27iLxZd4n1evVzv1W6FVTrSnCXCc-NQs,5
9
+ aiqa_client-0.1.1.dist-info/RECORD,,
@@ -1,9 +0,0 @@
1
- aiqa/__init__.py,sha256=FMWyKfD9ZiEd1LtgIPaTSU1LTcvCFqHCw8oth2aO5js,470
2
- aiqa/aiqa_exporter.py,sha256=hxnvdjCIebVFAqpeb9nXLYEi8A1oDysp7DebXcHA4po,9396
3
- aiqa/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- aiqa/tracing.py,sha256=Rk_XLVbBZqxiQRZVVi3Sedo9mkDTLUn9rZiUPdC7PTw,11325
5
- aiqa_client-0.1.0.dist-info/licenses/LICENSE,sha256=kIzkzLuzG0HHaWYm4F4W5FeJ1Yxut3Ec6bhLWyw798A,1062
6
- aiqa_client-0.1.0.dist-info/METADATA,sha256=gQgQZ6-LLbWucsVxSKQgR856JSgJWWtdUZA064pr-T0,3740
7
- aiqa_client-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
- aiqa_client-0.1.0.dist-info/top_level.txt,sha256=nwcsuVVSuWu27iLxZd4n1evVzv1W6FVTrSnCXCc-NQs,5
9
- aiqa_client-0.1.0.dist-info/RECORD,,