opentelemetry-instrumentation-groq 0.38.6__py3-none-any.whl → 0.38.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of opentelemetry-instrumentation-groq might be problematic. Click here for more details.
- opentelemetry/instrumentation/groq/__init__.py +119 -9
- opentelemetry/instrumentation/groq/version.py +1 -1
- {opentelemetry_instrumentation_groq-0.38.6.dist-info → opentelemetry_instrumentation_groq-0.38.7.dist-info}/METADATA +1 -1
- opentelemetry_instrumentation_groq-0.38.7.dist-info/RECORD +8 -0
- opentelemetry_instrumentation_groq-0.38.6.dist-info/RECORD +0 -8
- {opentelemetry_instrumentation_groq-0.38.6.dist-info → opentelemetry_instrumentation_groq-0.38.7.dist-info}/WHEEL +0 -0
- {opentelemetry_instrumentation_groq-0.38.6.dist-info → opentelemetry_instrumentation_groq-0.38.7.dist-info}/entry_points.txt +0 -0
|
@@ -21,7 +21,9 @@ from opentelemetry.instrumentation.groq.version import __version__
|
|
|
21
21
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
22
22
|
from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY, unwrap
|
|
23
23
|
from opentelemetry.metrics import Counter, Histogram, Meter, get_meter
|
|
24
|
-
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import
|
|
24
|
+
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import (
|
|
25
|
+
GEN_AI_RESPONSE_ID,
|
|
26
|
+
)
|
|
25
27
|
from opentelemetry.semconv_ai import (
|
|
26
28
|
SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY,
|
|
27
29
|
LLMRequestTypeValues,
|
|
@@ -97,7 +99,9 @@ def _set_input_attributes(span, kwargs):
|
|
|
97
99
|
set_span_attribute(
|
|
98
100
|
span, SpanAttributes.LLM_PRESENCE_PENALTY, kwargs.get("presence_penalty")
|
|
99
101
|
)
|
|
100
|
-
set_span_attribute(
|
|
102
|
+
set_span_attribute(
|
|
103
|
+
span, SpanAttributes.LLM_IS_STREAMING, kwargs.get("stream") or False
|
|
104
|
+
)
|
|
101
105
|
|
|
102
106
|
if should_send_prompts():
|
|
103
107
|
if kwargs.get("prompt") is not None:
|
|
@@ -124,9 +128,7 @@ def _set_completions(span, choices):
|
|
|
124
128
|
for choice in choices:
|
|
125
129
|
index = choice.get("index")
|
|
126
130
|
prefix = f"{SpanAttributes.LLM_COMPLETIONS}.{index}"
|
|
127
|
-
set_span_attribute(
|
|
128
|
-
span, f"{prefix}.finish_reason", choice.get("finish_reason")
|
|
129
|
-
)
|
|
131
|
+
set_span_attribute(span, f"{prefix}.finish_reason", choice.get("finish_reason"))
|
|
130
132
|
|
|
131
133
|
if choice.get("content_filter_results"):
|
|
132
134
|
set_span_attribute(
|
|
@@ -268,6 +270,96 @@ def _create_metrics(meter: Meter):
|
|
|
268
270
|
return token_histogram, choice_counter, duration_histogram
|
|
269
271
|
|
|
270
272
|
|
|
273
|
+
def _process_streaming_chunk(chunk):
|
|
274
|
+
"""Extract content, finish_reason and usage from a streaming chunk."""
|
|
275
|
+
if not chunk.choices:
|
|
276
|
+
return None, None, None
|
|
277
|
+
|
|
278
|
+
delta = chunk.choices[0].delta
|
|
279
|
+
content = delta.content if hasattr(delta, "content") else None
|
|
280
|
+
finish_reason = chunk.choices[0].finish_reason
|
|
281
|
+
|
|
282
|
+
# Extract usage from x_groq if present in the final chunk
|
|
283
|
+
usage = None
|
|
284
|
+
if hasattr(chunk, "x_groq") and chunk.x_groq and chunk.x_groq.usage:
|
|
285
|
+
usage = chunk.x_groq.usage
|
|
286
|
+
|
|
287
|
+
return content, finish_reason, usage
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def _set_streaming_response_attributes(
|
|
291
|
+
span, accumulated_content, finish_reason=None, usage=None
|
|
292
|
+
):
|
|
293
|
+
"""Set span attributes for accumulated streaming response."""
|
|
294
|
+
if not span.is_recording():
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
prefix = f"{SpanAttributes.LLM_COMPLETIONS}.0"
|
|
298
|
+
set_span_attribute(span, f"{prefix}.role", "assistant")
|
|
299
|
+
set_span_attribute(span, f"{prefix}.content", accumulated_content)
|
|
300
|
+
if finish_reason:
|
|
301
|
+
set_span_attribute(span, f"{prefix}.finish_reason", finish_reason)
|
|
302
|
+
|
|
303
|
+
if usage:
|
|
304
|
+
set_span_attribute(
|
|
305
|
+
span, SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, usage.completion_tokens
|
|
306
|
+
)
|
|
307
|
+
set_span_attribute(
|
|
308
|
+
span, SpanAttributes.LLM_USAGE_PROMPT_TOKENS, usage.prompt_tokens
|
|
309
|
+
)
|
|
310
|
+
set_span_attribute(
|
|
311
|
+
span, SpanAttributes.LLM_USAGE_TOTAL_TOKENS, usage.total_tokens
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def _create_stream_processor(response, span):
|
|
316
|
+
"""Create a generator that processes a stream while collecting telemetry."""
|
|
317
|
+
accumulated_content = ""
|
|
318
|
+
finish_reason = None
|
|
319
|
+
usage = None
|
|
320
|
+
|
|
321
|
+
for chunk in response:
|
|
322
|
+
content, chunk_finish_reason, chunk_usage = _process_streaming_chunk(chunk)
|
|
323
|
+
if content:
|
|
324
|
+
accumulated_content += content
|
|
325
|
+
if chunk_finish_reason:
|
|
326
|
+
finish_reason = chunk_finish_reason
|
|
327
|
+
if chunk_usage:
|
|
328
|
+
usage = chunk_usage
|
|
329
|
+
yield chunk
|
|
330
|
+
|
|
331
|
+
if span.is_recording():
|
|
332
|
+
_set_streaming_response_attributes(
|
|
333
|
+
span, accumulated_content, finish_reason, usage
|
|
334
|
+
)
|
|
335
|
+
span.set_status(Status(StatusCode.OK))
|
|
336
|
+
span.end()
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
async def _create_async_stream_processor(response, span):
|
|
340
|
+
"""Create an async generator that processes a stream while collecting telemetry."""
|
|
341
|
+
accumulated_content = ""
|
|
342
|
+
finish_reason = None
|
|
343
|
+
usage = None
|
|
344
|
+
|
|
345
|
+
async for chunk in response:
|
|
346
|
+
content, chunk_finish_reason, chunk_usage = _process_streaming_chunk(chunk)
|
|
347
|
+
if content:
|
|
348
|
+
accumulated_content += content
|
|
349
|
+
if chunk_finish_reason:
|
|
350
|
+
finish_reason = chunk_finish_reason
|
|
351
|
+
if chunk_usage:
|
|
352
|
+
usage = chunk_usage
|
|
353
|
+
yield chunk
|
|
354
|
+
|
|
355
|
+
if span.is_recording():
|
|
356
|
+
_set_streaming_response_attributes(
|
|
357
|
+
span, accumulated_content, finish_reason, usage
|
|
358
|
+
)
|
|
359
|
+
span.set_status(Status(StatusCode.OK))
|
|
360
|
+
span.end()
|
|
361
|
+
|
|
362
|
+
|
|
271
363
|
@_with_chat_telemetry_wrapper
|
|
272
364
|
def _wrap(
|
|
273
365
|
tracer: Tracer,
|
|
@@ -315,8 +407,16 @@ def _wrap(
|
|
|
315
407
|
end_time = time.time()
|
|
316
408
|
|
|
317
409
|
if is_streaming_response(response):
|
|
318
|
-
|
|
319
|
-
|
|
410
|
+
try:
|
|
411
|
+
return _create_stream_processor(response, span)
|
|
412
|
+
except Exception as ex:
|
|
413
|
+
logger.warning(
|
|
414
|
+
"Failed to process streaming response for groq span, error: %s",
|
|
415
|
+
str(ex),
|
|
416
|
+
)
|
|
417
|
+
span.set_status(Status(StatusCode.ERROR))
|
|
418
|
+
span.end()
|
|
419
|
+
raise
|
|
320
420
|
elif response:
|
|
321
421
|
try:
|
|
322
422
|
metric_attributes = shared_metrics_attributes(response)
|
|
@@ -391,9 +491,19 @@ async def _awrap(
|
|
|
391
491
|
|
|
392
492
|
raise e
|
|
393
493
|
|
|
494
|
+
end_time = time.time()
|
|
495
|
+
|
|
394
496
|
if is_streaming_response(response):
|
|
395
|
-
|
|
396
|
-
|
|
497
|
+
try:
|
|
498
|
+
return await _create_async_stream_processor(response, span)
|
|
499
|
+
except Exception as ex:
|
|
500
|
+
logger.warning(
|
|
501
|
+
"Failed to process streaming response for groq span, error: %s",
|
|
502
|
+
str(ex),
|
|
503
|
+
)
|
|
504
|
+
span.set_status(Status(StatusCode.ERROR))
|
|
505
|
+
span.end()
|
|
506
|
+
raise
|
|
397
507
|
elif response:
|
|
398
508
|
metric_attributes = shared_metrics_attributes(response)
|
|
399
509
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.38.
|
|
1
|
+
__version__ = "0.38.7"
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
opentelemetry/instrumentation/groq/__init__.py,sha256=fayNsnzV4_JH6gBkh5AJWcx0VuwKMITTw8cvmX2MXio,19609
|
|
2
|
+
opentelemetry/instrumentation/groq/config.py,sha256=eN2YxQdWlAF-qWPwZZr0xFM-8tx9zUjmiparuB64jcU,170
|
|
3
|
+
opentelemetry/instrumentation/groq/utils.py,sha256=1ESL4NCp8Mjww8cGEzQO_AEqGiSK4JSiMFYUhwBnuao,2151
|
|
4
|
+
opentelemetry/instrumentation/groq/version.py,sha256=RanUfcT5k652mRHEv7z2fcayUpT7ohxKS4Y7VRiWOuY,23
|
|
5
|
+
opentelemetry_instrumentation_groq-0.38.7.dist-info/METADATA,sha256=hoTOgXWHilBqEBjRnUD8BL3pog_ip-eaClovZMUgJCA,2117
|
|
6
|
+
opentelemetry_instrumentation_groq-0.38.7.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
|
7
|
+
opentelemetry_instrumentation_groq-0.38.7.dist-info/entry_points.txt,sha256=uezQe06CpIK8xTZZSK0lF29nOKkz_w6VR4sQnb4IAFQ,87
|
|
8
|
+
opentelemetry_instrumentation_groq-0.38.7.dist-info/RECORD,,
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
opentelemetry/instrumentation/groq/__init__.py,sha256=RgXnJKztm990BbUtJvOcliYqU-c975JF1CRv9wSalcc,16048
|
|
2
|
-
opentelemetry/instrumentation/groq/config.py,sha256=eN2YxQdWlAF-qWPwZZr0xFM-8tx9zUjmiparuB64jcU,170
|
|
3
|
-
opentelemetry/instrumentation/groq/utils.py,sha256=1ESL4NCp8Mjww8cGEzQO_AEqGiSK4JSiMFYUhwBnuao,2151
|
|
4
|
-
opentelemetry/instrumentation/groq/version.py,sha256=uJq0GABwRg0CsFCDLTKi9uMtcv-BL9qIEL5QqdBeXXE,23
|
|
5
|
-
opentelemetry_instrumentation_groq-0.38.6.dist-info/METADATA,sha256=Yiphzu4bN1WwFwrnOc8vPDne4aiS3yKXe9mcuny6NKk,2117
|
|
6
|
-
opentelemetry_instrumentation_groq-0.38.6.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
|
7
|
-
opentelemetry_instrumentation_groq-0.38.6.dist-info/entry_points.txt,sha256=uezQe06CpIK8xTZZSK0lF29nOKkz_w6VR4sQnb4IAFQ,87
|
|
8
|
-
opentelemetry_instrumentation_groq-0.38.6.dist-info/RECORD,,
|
|
File without changes
|