sentry-sdk 2.37.1__tar.gz → 2.38.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sentry-sdk might be problematic. Click here for more details.
- {sentry_sdk-2.37.1/sentry_sdk.egg-info → sentry_sdk-2.38.0}/PKG-INFO +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/__init__.py +4 -2
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_types.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/ai/utils.py +11 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/consts.py +2 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/envelope.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/__init__.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/anthropic.py +47 -12
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/asyncio.py +2 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/dedupe.py +3 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/gql.py +22 -5
- sentry_sdk-2.38.0/sentry_sdk/integrations/huggingface_hub.py +377 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/langchain.py +5 -3
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/patches/agent_run.py +4 -4
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +2 -2
- sentry_sdk-2.38.0/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +78 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/utils.py +0 -10
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/threading.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/profiler/continuous_profiler.py +13 -3
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/tracing.py +1 -2
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/tracing_utils.py +18 -22
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/utils.py +6 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0/sentry_sdk.egg-info}/PKG-INFO +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/setup.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_dsc.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_envelope.py +22 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_monitor.py +1 -1
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_propagationcontext.py +10 -7
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_transport.py +38 -1
- sentry_sdk-2.37.1/sentry_sdk/integrations/huggingface_hub.py +0 -181
- sentry_sdk-2.37.1/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +0 -34
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/LICENSE +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/MANIFEST.in +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/README.md +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/pyproject.toml +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_compat.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_init_implementation.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_log_batcher.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_lru_cache.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_queue.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/_werkzeug.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/ai/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/ai/monitoring.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/api.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/attachments.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/client.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/crons/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/crons/api.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/crons/consts.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/crons/decorator.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/debug.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/feature_flags.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/hub.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/_asgi_common.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/_wsgi_common.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/aiohttp.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/argv.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/ariadne.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/arq.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/asgi.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/asyncpg.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/atexit.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/aws_lambda.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/beam.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/boto3.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/bottle.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/celery/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/celery/beat.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/celery/utils.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/chalice.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/clickhouse_driver.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/cloud_resource_context.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/cohere.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/asgi.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/caching.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/middleware.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/signals_handlers.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/templates.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/transactions.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/django/views.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/dramatiq.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/excepthook.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/executing.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/falcon.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/fastapi.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/flask.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/gcp.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/gnu_backtrace.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/graphene.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/aio/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/aio/client.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/aio/server.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/client.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/consts.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/grpc/server.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/httpx.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/huey.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/langgraph.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/launchdarkly.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/litestar.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/logging.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/loguru.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/modules.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/consts.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/patches/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/patches/models.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/patches/runner.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/patches/tools.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/spans/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/spans/ai_client.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/spans/execute_tool.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openai_agents/spans/handoff.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/openfeature.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/opentelemetry/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/opentelemetry/consts.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/opentelemetry/integration.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/opentelemetry/propagator.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/opentelemetry/span_processor.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/pure_eval.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/pymongo.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/pyramid.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/quart.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/ray.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/_async_common.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/_sync_common.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/consts.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/modules/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/modules/caches.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/modules/queries.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/rb.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/redis.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/redis_cluster.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/redis/utils.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/rq.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/rust_tracing.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/sanic.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/serverless.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/socket.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/spark/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/spark/spark_driver.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/spark/spark_worker.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/sqlalchemy.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/starlette.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/starlite.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/statsig.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/stdlib.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/strawberry.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/sys_exit.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/tornado.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/trytond.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/typer.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/unleash.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/unraisablehook.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/integrations/wsgi.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/logger.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/metrics.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/monitor.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/profiler/__init__.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/profiler/transaction_profiler.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/profiler/utils.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/py.typed +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/scope.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/scrubber.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/serializer.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/session.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/sessions.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/spotlight.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/transport.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/types.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk/worker.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/SOURCES.txt +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/dependency_links.txt +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/entry_points.txt +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/not-zip-safe +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/requires.txt +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/sentry_sdk.egg-info/top_level.txt +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/setup.cfg +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_ai_monitoring.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_api.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_basics.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_client.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_conftest.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_crons.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_exceptiongroup.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_feature_flags.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_full_stack_frames.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_gevent.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_import.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_logs.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_lru_cache.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_metrics.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_scope.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_scrubber.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_serializer.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_sessions.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_spotlight.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_tracing_utils.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_types.py +0 -0
- {sentry_sdk-2.37.1 → sentry_sdk-2.38.0}/tests/test_utils.py +0 -0
|
@@ -1,10 +1,10 @@
|
|
|
1
|
+
from sentry_sdk import profiler
|
|
1
2
|
from sentry_sdk.scope import Scope
|
|
2
3
|
from sentry_sdk.transport import Transport, HttpTransport
|
|
3
4
|
from sentry_sdk.client import Client
|
|
4
5
|
|
|
5
6
|
from sentry_sdk.api import * # noqa
|
|
6
|
-
|
|
7
|
-
from sentry_sdk.consts import VERSION # noqa
|
|
7
|
+
from sentry_sdk.consts import VERSION
|
|
8
8
|
|
|
9
9
|
__all__ = [ # noqa
|
|
10
10
|
"Hub",
|
|
@@ -12,6 +12,7 @@ __all__ = [ # noqa
|
|
|
12
12
|
"Client",
|
|
13
13
|
"Transport",
|
|
14
14
|
"HttpTransport",
|
|
15
|
+
"VERSION",
|
|
15
16
|
"integrations",
|
|
16
17
|
# From sentry_sdk.api
|
|
17
18
|
"init",
|
|
@@ -47,6 +48,7 @@ __all__ = [ # noqa
|
|
|
47
48
|
"trace",
|
|
48
49
|
"monitor",
|
|
49
50
|
"logger",
|
|
51
|
+
"profiler",
|
|
50
52
|
"start_session",
|
|
51
53
|
"end_session",
|
|
52
54
|
"set_transaction_name",
|
|
@@ -3,9 +3,10 @@ import json
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
if TYPE_CHECKING:
|
|
6
|
-
from typing import Any
|
|
6
|
+
from typing import Any, Callable
|
|
7
7
|
from sentry_sdk.tracing import Span
|
|
8
8
|
|
|
9
|
+
import sentry_sdk
|
|
9
10
|
from sentry_sdk.utils import logger
|
|
10
11
|
|
|
11
12
|
|
|
@@ -37,3 +38,12 @@ def set_data_normalized(span, key, value, unpack=True):
|
|
|
37
38
|
span.set_data(key, normalized)
|
|
38
39
|
else:
|
|
39
40
|
span.set_data(key, json.dumps(normalized))
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_start_span_function():
|
|
44
|
+
# type: () -> Callable[..., Any]
|
|
45
|
+
current_span = sentry_sdk.get_current_span()
|
|
46
|
+
transaction_exists = (
|
|
47
|
+
current_span is not None and current_span.containing_transaction is not None
|
|
48
|
+
)
|
|
49
|
+
return sentry_sdk.start_span if transaction_exists else sentry_sdk.start_transaction
|
|
@@ -795,6 +795,7 @@ class OP:
|
|
|
795
795
|
GEN_AI_CREATE_AGENT = "gen_ai.create_agent"
|
|
796
796
|
GEN_AI_EMBEDDINGS = "gen_ai.embeddings"
|
|
797
797
|
GEN_AI_EXECUTE_TOOL = "gen_ai.execute_tool"
|
|
798
|
+
GEN_AI_GENERATE_TEXT = "gen_ai.generate_text"
|
|
798
799
|
GEN_AI_HANDOFF = "gen_ai.handoff"
|
|
799
800
|
GEN_AI_PIPELINE = "gen_ai.pipeline"
|
|
800
801
|
GEN_AI_INVOKE_AGENT = "gen_ai.invoke_agent"
|
|
@@ -1330,4 +1331,4 @@ DEFAULT_OPTIONS = _get_default_options()
|
|
|
1330
1331
|
del _get_default_options
|
|
1331
1332
|
|
|
1332
1333
|
|
|
1333
|
-
VERSION = "2.
|
|
1334
|
+
VERSION = "2.38.0"
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
from functools import wraps
|
|
2
|
-
import json
|
|
3
2
|
from typing import TYPE_CHECKING
|
|
4
3
|
|
|
5
4
|
import sentry_sdk
|
|
6
5
|
from sentry_sdk.ai.monitoring import record_token_usage
|
|
7
|
-
from sentry_sdk.ai.utils import set_data_normalized
|
|
6
|
+
from sentry_sdk.ai.utils import set_data_normalized, get_start_span_function
|
|
8
7
|
from sentry_sdk.consts import OP, SPANDATA
|
|
9
8
|
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
|
10
9
|
from sentry_sdk.scope import should_send_default_pii
|
|
@@ -117,8 +116,29 @@ def _set_input_data(span, kwargs, integration):
|
|
|
117
116
|
and should_send_default_pii()
|
|
118
117
|
and integration.include_prompts
|
|
119
118
|
):
|
|
119
|
+
normalized_messages = []
|
|
120
|
+
for message in messages:
|
|
121
|
+
if (
|
|
122
|
+
message.get("role") == "user"
|
|
123
|
+
and "content" in message
|
|
124
|
+
and isinstance(message["content"], (list, tuple))
|
|
125
|
+
):
|
|
126
|
+
for item in message["content"]:
|
|
127
|
+
if item.get("type") == "tool_result":
|
|
128
|
+
normalized_messages.append(
|
|
129
|
+
{
|
|
130
|
+
"role": "tool",
|
|
131
|
+
"content": {
|
|
132
|
+
"tool_use_id": item.get("tool_use_id"),
|
|
133
|
+
"output": item.get("content"),
|
|
134
|
+
},
|
|
135
|
+
}
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
normalized_messages.append(message)
|
|
139
|
+
|
|
120
140
|
set_data_normalized(
|
|
121
|
-
span, SPANDATA.GEN_AI_REQUEST_MESSAGES,
|
|
141
|
+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, normalized_messages, unpack=False
|
|
122
142
|
)
|
|
123
143
|
|
|
124
144
|
set_data_normalized(
|
|
@@ -159,12 +179,29 @@ def _set_output_data(
|
|
|
159
179
|
Set output data for the span based on the AI response."""
|
|
160
180
|
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, model)
|
|
161
181
|
if should_send_default_pii() and integration.include_prompts:
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
182
|
+
output_messages = {
|
|
183
|
+
"response": [],
|
|
184
|
+
"tool": [],
|
|
185
|
+
} # type: (dict[str, list[Any]])
|
|
186
|
+
|
|
187
|
+
for output in content_blocks:
|
|
188
|
+
if output["type"] == "text":
|
|
189
|
+
output_messages["response"].append(output["text"])
|
|
190
|
+
elif output["type"] == "tool_use":
|
|
191
|
+
output_messages["tool"].append(output)
|
|
192
|
+
|
|
193
|
+
if len(output_messages["tool"]) > 0:
|
|
194
|
+
set_data_normalized(
|
|
195
|
+
span,
|
|
196
|
+
SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
|
|
197
|
+
output_messages["tool"],
|
|
198
|
+
unpack=False,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if len(output_messages["response"]) > 0:
|
|
202
|
+
set_data_normalized(
|
|
203
|
+
span, SPANDATA.GEN_AI_RESPONSE_TEXT, output_messages["response"]
|
|
204
|
+
)
|
|
168
205
|
|
|
169
206
|
record_token_usage(
|
|
170
207
|
span,
|
|
@@ -172,8 +209,6 @@ def _set_output_data(
|
|
|
172
209
|
output_tokens=output_tokens,
|
|
173
210
|
)
|
|
174
211
|
|
|
175
|
-
# TODO: GEN_AI_RESPONSE_TOOL_CALLS ?
|
|
176
|
-
|
|
177
212
|
if finish_span:
|
|
178
213
|
span.__exit__(None, None, None)
|
|
179
214
|
|
|
@@ -194,7 +229,7 @@ def _sentry_patched_create_common(f, *args, **kwargs):
|
|
|
194
229
|
|
|
195
230
|
model = kwargs.get("model", "")
|
|
196
231
|
|
|
197
|
-
span =
|
|
232
|
+
span = get_start_span_function()(
|
|
198
233
|
op=OP.GEN_AI_CHAT,
|
|
199
234
|
name=f"chat {model}".strip(),
|
|
200
235
|
origin=AnthropicIntegration.origin,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import sentry_sdk
|
|
2
|
-
from sentry_sdk.utils import ContextVar
|
|
2
|
+
from sentry_sdk.utils import ContextVar, logger
|
|
3
3
|
from sentry_sdk.integrations import Integration
|
|
4
4
|
from sentry_sdk.scope import add_global_event_processor
|
|
5
5
|
|
|
@@ -37,7 +37,9 @@ class DedupeIntegration(Integration):
|
|
|
37
37
|
|
|
38
38
|
exc = exc_info[1]
|
|
39
39
|
if integration._last_seen.get(None) is exc:
|
|
40
|
+
logger.info("DedupeIntegration dropped duplicated error event %s", exc)
|
|
40
41
|
return None
|
|
42
|
+
|
|
41
43
|
integration._last_seen.set(exc)
|
|
42
44
|
return event
|
|
43
45
|
|
|
@@ -18,6 +18,13 @@ try:
|
|
|
18
18
|
)
|
|
19
19
|
from gql.transport import Transport, AsyncTransport # type: ignore[import-not-found]
|
|
20
20
|
from gql.transport.exceptions import TransportQueryError # type: ignore[import-not-found]
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
# gql 4.0+
|
|
24
|
+
from gql import GraphQLRequest
|
|
25
|
+
except ImportError:
|
|
26
|
+
GraphQLRequest = None
|
|
27
|
+
|
|
21
28
|
except ImportError:
|
|
22
29
|
raise DidNotEnable("gql is not installed")
|
|
23
30
|
|
|
@@ -92,13 +99,13 @@ def _patch_execute():
|
|
|
92
99
|
real_execute = gql.Client.execute
|
|
93
100
|
|
|
94
101
|
@ensure_integration_enabled(GQLIntegration, real_execute)
|
|
95
|
-
def sentry_patched_execute(self,
|
|
102
|
+
def sentry_patched_execute(self, document_or_request, *args, **kwargs):
|
|
96
103
|
# type: (gql.Client, DocumentNode, Any, Any) -> Any
|
|
97
104
|
scope = sentry_sdk.get_isolation_scope()
|
|
98
|
-
scope.add_event_processor(_make_gql_event_processor(self,
|
|
105
|
+
scope.add_event_processor(_make_gql_event_processor(self, document_or_request))
|
|
99
106
|
|
|
100
107
|
try:
|
|
101
|
-
return real_execute(self,
|
|
108
|
+
return real_execute(self, document_or_request, *args, **kwargs)
|
|
102
109
|
except TransportQueryError as e:
|
|
103
110
|
event, hint = event_from_exception(
|
|
104
111
|
e,
|
|
@@ -112,8 +119,8 @@ def _patch_execute():
|
|
|
112
119
|
gql.Client.execute = sentry_patched_execute
|
|
113
120
|
|
|
114
121
|
|
|
115
|
-
def _make_gql_event_processor(client,
|
|
116
|
-
# type: (gql.Client, DocumentNode) -> EventProcessor
|
|
122
|
+
def _make_gql_event_processor(client, document_or_request):
|
|
123
|
+
# type: (gql.Client, Union[DocumentNode, gql.GraphQLRequest]) -> EventProcessor
|
|
117
124
|
def processor(event, hint):
|
|
118
125
|
# type: (Event, dict[str, Any]) -> Event
|
|
119
126
|
try:
|
|
@@ -130,6 +137,16 @@ def _make_gql_event_processor(client, document):
|
|
|
130
137
|
)
|
|
131
138
|
|
|
132
139
|
if should_send_default_pii():
|
|
140
|
+
if GraphQLRequest is not None and isinstance(
|
|
141
|
+
document_or_request, GraphQLRequest
|
|
142
|
+
):
|
|
143
|
+
# In v4.0.0, gql moved to using GraphQLRequest instead of
|
|
144
|
+
# DocumentNode in execute
|
|
145
|
+
# https://github.com/graphql-python/gql/pull/556
|
|
146
|
+
document = document_or_request.document
|
|
147
|
+
else:
|
|
148
|
+
document = document_or_request
|
|
149
|
+
|
|
133
150
|
request["data"] = _data_from_document(document)
|
|
134
151
|
contexts = event.setdefault("contexts", {})
|
|
135
152
|
response = contexts.setdefault("response", {})
|
|
@@ -0,0 +1,377 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from functools import wraps
|
|
3
|
+
|
|
4
|
+
import sentry_sdk
|
|
5
|
+
from sentry_sdk.ai.monitoring import record_token_usage
|
|
6
|
+
from sentry_sdk.ai.utils import set_data_normalized
|
|
7
|
+
from sentry_sdk.consts import OP, SPANDATA
|
|
8
|
+
from sentry_sdk.integrations import DidNotEnable, Integration
|
|
9
|
+
from sentry_sdk.scope import should_send_default_pii
|
|
10
|
+
from sentry_sdk.utils import (
|
|
11
|
+
capture_internal_exceptions,
|
|
12
|
+
event_from_exception,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from typing import TYPE_CHECKING
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from typing import Any, Callable, Iterable
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
import huggingface_hub.inference._client
|
|
22
|
+
except ImportError:
|
|
23
|
+
raise DidNotEnable("Huggingface not installed")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class HuggingfaceHubIntegration(Integration):
|
|
27
|
+
identifier = "huggingface_hub"
|
|
28
|
+
origin = f"auto.ai.{identifier}"
|
|
29
|
+
|
|
30
|
+
def __init__(self, include_prompts=True):
|
|
31
|
+
# type: (HuggingfaceHubIntegration, bool) -> None
|
|
32
|
+
self.include_prompts = include_prompts
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def setup_once():
|
|
36
|
+
# type: () -> None
|
|
37
|
+
|
|
38
|
+
# Other tasks that can be called: https://huggingface.co/docs/huggingface_hub/guides/inference#supported-providers-and-tasks
|
|
39
|
+
huggingface_hub.inference._client.InferenceClient.text_generation = (
|
|
40
|
+
_wrap_huggingface_task(
|
|
41
|
+
huggingface_hub.inference._client.InferenceClient.text_generation,
|
|
42
|
+
OP.GEN_AI_GENERATE_TEXT,
|
|
43
|
+
)
|
|
44
|
+
)
|
|
45
|
+
huggingface_hub.inference._client.InferenceClient.chat_completion = (
|
|
46
|
+
_wrap_huggingface_task(
|
|
47
|
+
huggingface_hub.inference._client.InferenceClient.chat_completion,
|
|
48
|
+
OP.GEN_AI_CHAT,
|
|
49
|
+
)
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _capture_exception(exc):
|
|
54
|
+
# type: (Any) -> None
|
|
55
|
+
event, hint = event_from_exception(
|
|
56
|
+
exc,
|
|
57
|
+
client_options=sentry_sdk.get_client().options,
|
|
58
|
+
mechanism={"type": "huggingface_hub", "handled": False},
|
|
59
|
+
)
|
|
60
|
+
sentry_sdk.capture_event(event, hint=hint)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _wrap_huggingface_task(f, op):
|
|
64
|
+
# type: (Callable[..., Any], str) -> Callable[..., Any]
|
|
65
|
+
@wraps(f)
|
|
66
|
+
def new_huggingface_task(*args, **kwargs):
|
|
67
|
+
# type: (*Any, **Any) -> Any
|
|
68
|
+
integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration)
|
|
69
|
+
if integration is None:
|
|
70
|
+
return f(*args, **kwargs)
|
|
71
|
+
|
|
72
|
+
prompt = None
|
|
73
|
+
if "prompt" in kwargs:
|
|
74
|
+
prompt = kwargs["prompt"]
|
|
75
|
+
elif "messages" in kwargs:
|
|
76
|
+
prompt = kwargs["messages"]
|
|
77
|
+
elif len(args) >= 2:
|
|
78
|
+
if isinstance(args[1], str) or isinstance(args[1], list):
|
|
79
|
+
prompt = args[1]
|
|
80
|
+
|
|
81
|
+
if prompt is None:
|
|
82
|
+
# invalid call, dont instrument, let it return error
|
|
83
|
+
return f(*args, **kwargs)
|
|
84
|
+
|
|
85
|
+
client = args[0]
|
|
86
|
+
model = client.model or kwargs.get("model") or ""
|
|
87
|
+
operation_name = op.split(".")[-1]
|
|
88
|
+
|
|
89
|
+
span = sentry_sdk.start_span(
|
|
90
|
+
op=op,
|
|
91
|
+
name=f"{operation_name} {model}",
|
|
92
|
+
origin=HuggingfaceHubIntegration.origin,
|
|
93
|
+
)
|
|
94
|
+
span.__enter__()
|
|
95
|
+
|
|
96
|
+
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, operation_name)
|
|
97
|
+
|
|
98
|
+
if model:
|
|
99
|
+
span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model)
|
|
100
|
+
|
|
101
|
+
# Input attributes
|
|
102
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
103
|
+
set_data_normalized(
|
|
104
|
+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, prompt, unpack=False
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
attribute_mapping = {
|
|
108
|
+
"tools": SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS,
|
|
109
|
+
"frequency_penalty": SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY,
|
|
110
|
+
"max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
|
|
111
|
+
"presence_penalty": SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY,
|
|
112
|
+
"temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
|
|
113
|
+
"top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
|
|
114
|
+
"top_k": SPANDATA.GEN_AI_REQUEST_TOP_K,
|
|
115
|
+
"stream": SPANDATA.GEN_AI_RESPONSE_STREAMING,
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
for attribute, span_attribute in attribute_mapping.items():
|
|
119
|
+
value = kwargs.get(attribute, None)
|
|
120
|
+
if value is not None:
|
|
121
|
+
if isinstance(value, (int, float, bool, str)):
|
|
122
|
+
span.set_data(span_attribute, value)
|
|
123
|
+
else:
|
|
124
|
+
set_data_normalized(span, span_attribute, value, unpack=False)
|
|
125
|
+
|
|
126
|
+
# LLM Execution
|
|
127
|
+
try:
|
|
128
|
+
res = f(*args, **kwargs)
|
|
129
|
+
except Exception as e:
|
|
130
|
+
# Error Handling
|
|
131
|
+
span.set_status("error")
|
|
132
|
+
_capture_exception(e)
|
|
133
|
+
span.__exit__(None, None, None)
|
|
134
|
+
raise e from None
|
|
135
|
+
|
|
136
|
+
# Output attributes
|
|
137
|
+
finish_reason = None
|
|
138
|
+
response_model = None
|
|
139
|
+
response_text_buffer: list[str] = []
|
|
140
|
+
tokens_used = 0
|
|
141
|
+
tool_calls = None
|
|
142
|
+
usage = None
|
|
143
|
+
|
|
144
|
+
with capture_internal_exceptions():
|
|
145
|
+
if isinstance(res, str) and res is not None:
|
|
146
|
+
response_text_buffer.append(res)
|
|
147
|
+
|
|
148
|
+
if hasattr(res, "generated_text") and res.generated_text is not None:
|
|
149
|
+
response_text_buffer.append(res.generated_text)
|
|
150
|
+
|
|
151
|
+
if hasattr(res, "model") and res.model is not None:
|
|
152
|
+
response_model = res.model
|
|
153
|
+
|
|
154
|
+
if hasattr(res, "details") and hasattr(res.details, "finish_reason"):
|
|
155
|
+
finish_reason = res.details.finish_reason
|
|
156
|
+
|
|
157
|
+
if (
|
|
158
|
+
hasattr(res, "details")
|
|
159
|
+
and hasattr(res.details, "generated_tokens")
|
|
160
|
+
and res.details.generated_tokens is not None
|
|
161
|
+
):
|
|
162
|
+
tokens_used = res.details.generated_tokens
|
|
163
|
+
|
|
164
|
+
if hasattr(res, "usage") and res.usage is not None:
|
|
165
|
+
usage = res.usage
|
|
166
|
+
|
|
167
|
+
if hasattr(res, "choices") and res.choices is not None:
|
|
168
|
+
for choice in res.choices:
|
|
169
|
+
if hasattr(choice, "finish_reason"):
|
|
170
|
+
finish_reason = choice.finish_reason
|
|
171
|
+
if hasattr(choice, "message") and hasattr(
|
|
172
|
+
choice.message, "tool_calls"
|
|
173
|
+
):
|
|
174
|
+
tool_calls = choice.message.tool_calls
|
|
175
|
+
if (
|
|
176
|
+
hasattr(choice, "message")
|
|
177
|
+
and hasattr(choice.message, "content")
|
|
178
|
+
and choice.message.content is not None
|
|
179
|
+
):
|
|
180
|
+
response_text_buffer.append(choice.message.content)
|
|
181
|
+
|
|
182
|
+
if response_model is not None:
|
|
183
|
+
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, response_model)
|
|
184
|
+
|
|
185
|
+
if finish_reason is not None:
|
|
186
|
+
set_data_normalized(
|
|
187
|
+
span,
|
|
188
|
+
SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
|
|
189
|
+
finish_reason,
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
193
|
+
if tool_calls is not None and len(tool_calls) > 0:
|
|
194
|
+
set_data_normalized(
|
|
195
|
+
span,
|
|
196
|
+
SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
|
|
197
|
+
tool_calls,
|
|
198
|
+
unpack=False,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if len(response_text_buffer) > 0:
|
|
202
|
+
text_response = "".join(response_text_buffer)
|
|
203
|
+
if text_response:
|
|
204
|
+
set_data_normalized(
|
|
205
|
+
span,
|
|
206
|
+
SPANDATA.GEN_AI_RESPONSE_TEXT,
|
|
207
|
+
text_response,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
if usage is not None:
|
|
211
|
+
record_token_usage(
|
|
212
|
+
span,
|
|
213
|
+
input_tokens=usage.prompt_tokens,
|
|
214
|
+
output_tokens=usage.completion_tokens,
|
|
215
|
+
total_tokens=usage.total_tokens,
|
|
216
|
+
)
|
|
217
|
+
elif tokens_used > 0:
|
|
218
|
+
record_token_usage(
|
|
219
|
+
span,
|
|
220
|
+
total_tokens=tokens_used,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# If the response is not a generator (meaning a streaming response)
|
|
224
|
+
# we are done and can return the response
|
|
225
|
+
if not inspect.isgenerator(res):
|
|
226
|
+
span.__exit__(None, None, None)
|
|
227
|
+
return res
|
|
228
|
+
|
|
229
|
+
if kwargs.get("details", False):
|
|
230
|
+
# text-generation stream output
|
|
231
|
+
def new_details_iterator():
|
|
232
|
+
# type: () -> Iterable[Any]
|
|
233
|
+
finish_reason = None
|
|
234
|
+
response_text_buffer: list[str] = []
|
|
235
|
+
tokens_used = 0
|
|
236
|
+
|
|
237
|
+
with capture_internal_exceptions():
|
|
238
|
+
for chunk in res:
|
|
239
|
+
if (
|
|
240
|
+
hasattr(chunk, "token")
|
|
241
|
+
and hasattr(chunk.token, "text")
|
|
242
|
+
and chunk.token.text is not None
|
|
243
|
+
):
|
|
244
|
+
response_text_buffer.append(chunk.token.text)
|
|
245
|
+
|
|
246
|
+
if hasattr(chunk, "details") and hasattr(
|
|
247
|
+
chunk.details, "finish_reason"
|
|
248
|
+
):
|
|
249
|
+
finish_reason = chunk.details.finish_reason
|
|
250
|
+
|
|
251
|
+
if (
|
|
252
|
+
hasattr(chunk, "details")
|
|
253
|
+
and hasattr(chunk.details, "generated_tokens")
|
|
254
|
+
and chunk.details.generated_tokens is not None
|
|
255
|
+
):
|
|
256
|
+
tokens_used = chunk.details.generated_tokens
|
|
257
|
+
|
|
258
|
+
yield chunk
|
|
259
|
+
|
|
260
|
+
if finish_reason is not None:
|
|
261
|
+
set_data_normalized(
|
|
262
|
+
span,
|
|
263
|
+
SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
|
|
264
|
+
finish_reason,
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
268
|
+
if len(response_text_buffer) > 0:
|
|
269
|
+
text_response = "".join(response_text_buffer)
|
|
270
|
+
if text_response:
|
|
271
|
+
set_data_normalized(
|
|
272
|
+
span,
|
|
273
|
+
SPANDATA.GEN_AI_RESPONSE_TEXT,
|
|
274
|
+
text_response,
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
if tokens_used > 0:
|
|
278
|
+
record_token_usage(
|
|
279
|
+
span,
|
|
280
|
+
total_tokens=tokens_used,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
span.__exit__(None, None, None)
|
|
284
|
+
|
|
285
|
+
return new_details_iterator()
|
|
286
|
+
|
|
287
|
+
else:
|
|
288
|
+
# chat-completion stream output
|
|
289
|
+
def new_iterator():
|
|
290
|
+
# type: () -> Iterable[str]
|
|
291
|
+
finish_reason = None
|
|
292
|
+
response_model = None
|
|
293
|
+
response_text_buffer: list[str] = []
|
|
294
|
+
tool_calls = None
|
|
295
|
+
usage = None
|
|
296
|
+
|
|
297
|
+
with capture_internal_exceptions():
|
|
298
|
+
for chunk in res:
|
|
299
|
+
if hasattr(chunk, "model") and chunk.model is not None:
|
|
300
|
+
response_model = chunk.model
|
|
301
|
+
|
|
302
|
+
if hasattr(chunk, "usage") and chunk.usage is not None:
|
|
303
|
+
usage = chunk.usage
|
|
304
|
+
|
|
305
|
+
if isinstance(chunk, str):
|
|
306
|
+
if chunk is not None:
|
|
307
|
+
response_text_buffer.append(chunk)
|
|
308
|
+
|
|
309
|
+
if hasattr(chunk, "choices") and chunk.choices is not None:
|
|
310
|
+
for choice in chunk.choices:
|
|
311
|
+
if (
|
|
312
|
+
hasattr(choice, "delta")
|
|
313
|
+
and hasattr(choice.delta, "content")
|
|
314
|
+
and choice.delta.content is not None
|
|
315
|
+
):
|
|
316
|
+
response_text_buffer.append(
|
|
317
|
+
choice.delta.content
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
if (
|
|
321
|
+
hasattr(choice, "finish_reason")
|
|
322
|
+
and choice.finish_reason is not None
|
|
323
|
+
):
|
|
324
|
+
finish_reason = choice.finish_reason
|
|
325
|
+
|
|
326
|
+
if (
|
|
327
|
+
hasattr(choice, "delta")
|
|
328
|
+
and hasattr(choice.delta, "tool_calls")
|
|
329
|
+
and choice.delta.tool_calls is not None
|
|
330
|
+
):
|
|
331
|
+
tool_calls = choice.delta.tool_calls
|
|
332
|
+
|
|
333
|
+
yield chunk
|
|
334
|
+
|
|
335
|
+
if response_model is not None:
|
|
336
|
+
span.set_data(
|
|
337
|
+
SPANDATA.GEN_AI_RESPONSE_MODEL, response_model
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
if finish_reason is not None:
|
|
341
|
+
set_data_normalized(
|
|
342
|
+
span,
|
|
343
|
+
SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
|
|
344
|
+
finish_reason,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
348
|
+
if tool_calls is not None and len(tool_calls) > 0:
|
|
349
|
+
set_data_normalized(
|
|
350
|
+
span,
|
|
351
|
+
SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
|
|
352
|
+
tool_calls,
|
|
353
|
+
unpack=False,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
if len(response_text_buffer) > 0:
|
|
357
|
+
text_response = "".join(response_text_buffer)
|
|
358
|
+
if text_response:
|
|
359
|
+
set_data_normalized(
|
|
360
|
+
span,
|
|
361
|
+
SPANDATA.GEN_AI_RESPONSE_TEXT,
|
|
362
|
+
text_response,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
if usage is not None:
|
|
366
|
+
record_token_usage(
|
|
367
|
+
span,
|
|
368
|
+
input_tokens=usage.prompt_tokens,
|
|
369
|
+
output_tokens=usage.completion_tokens,
|
|
370
|
+
total_tokens=usage.total_tokens,
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
span.__exit__(None, None, None)
|
|
374
|
+
|
|
375
|
+
return new_iterator()
|
|
376
|
+
|
|
377
|
+
return new_huggingface_task
|