sentry-sdk 0.7.5__py2.py3-none-any.whl → 2.46.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (193) hide show
  1. sentry_sdk/__init__.py +48 -30
  2. sentry_sdk/_compat.py +74 -61
  3. sentry_sdk/_init_implementation.py +84 -0
  4. sentry_sdk/_log_batcher.py +172 -0
  5. sentry_sdk/_lru_cache.py +47 -0
  6. sentry_sdk/_metrics_batcher.py +167 -0
  7. sentry_sdk/_queue.py +289 -0
  8. sentry_sdk/_types.py +338 -0
  9. sentry_sdk/_werkzeug.py +98 -0
  10. sentry_sdk/ai/__init__.py +7 -0
  11. sentry_sdk/ai/monitoring.py +137 -0
  12. sentry_sdk/ai/utils.py +144 -0
  13. sentry_sdk/api.py +496 -80
  14. sentry_sdk/attachments.py +75 -0
  15. sentry_sdk/client.py +1023 -103
  16. sentry_sdk/consts.py +1438 -66
  17. sentry_sdk/crons/__init__.py +10 -0
  18. sentry_sdk/crons/api.py +62 -0
  19. sentry_sdk/crons/consts.py +4 -0
  20. sentry_sdk/crons/decorator.py +135 -0
  21. sentry_sdk/debug.py +15 -14
  22. sentry_sdk/envelope.py +369 -0
  23. sentry_sdk/feature_flags.py +71 -0
  24. sentry_sdk/hub.py +611 -280
  25. sentry_sdk/integrations/__init__.py +276 -49
  26. sentry_sdk/integrations/_asgi_common.py +108 -0
  27. sentry_sdk/integrations/_wsgi_common.py +180 -44
  28. sentry_sdk/integrations/aiohttp.py +291 -42
  29. sentry_sdk/integrations/anthropic.py +439 -0
  30. sentry_sdk/integrations/argv.py +9 -8
  31. sentry_sdk/integrations/ariadne.py +161 -0
  32. sentry_sdk/integrations/arq.py +247 -0
  33. sentry_sdk/integrations/asgi.py +341 -0
  34. sentry_sdk/integrations/asyncio.py +144 -0
  35. sentry_sdk/integrations/asyncpg.py +208 -0
  36. sentry_sdk/integrations/atexit.py +17 -10
  37. sentry_sdk/integrations/aws_lambda.py +377 -62
  38. sentry_sdk/integrations/beam.py +176 -0
  39. sentry_sdk/integrations/boto3.py +137 -0
  40. sentry_sdk/integrations/bottle.py +221 -0
  41. sentry_sdk/integrations/celery/__init__.py +529 -0
  42. sentry_sdk/integrations/celery/beat.py +293 -0
  43. sentry_sdk/integrations/celery/utils.py +43 -0
  44. sentry_sdk/integrations/chalice.py +134 -0
  45. sentry_sdk/integrations/clickhouse_driver.py +177 -0
  46. sentry_sdk/integrations/cloud_resource_context.py +280 -0
  47. sentry_sdk/integrations/cohere.py +274 -0
  48. sentry_sdk/integrations/dedupe.py +48 -14
  49. sentry_sdk/integrations/django/__init__.py +584 -191
  50. sentry_sdk/integrations/django/asgi.py +245 -0
  51. sentry_sdk/integrations/django/caching.py +204 -0
  52. sentry_sdk/integrations/django/middleware.py +187 -0
  53. sentry_sdk/integrations/django/signals_handlers.py +91 -0
  54. sentry_sdk/integrations/django/templates.py +79 -5
  55. sentry_sdk/integrations/django/transactions.py +49 -22
  56. sentry_sdk/integrations/django/views.py +96 -0
  57. sentry_sdk/integrations/dramatiq.py +226 -0
  58. sentry_sdk/integrations/excepthook.py +50 -13
  59. sentry_sdk/integrations/executing.py +67 -0
  60. sentry_sdk/integrations/falcon.py +272 -0
  61. sentry_sdk/integrations/fastapi.py +141 -0
  62. sentry_sdk/integrations/flask.py +142 -88
  63. sentry_sdk/integrations/gcp.py +239 -0
  64. sentry_sdk/integrations/gnu_backtrace.py +99 -0
  65. sentry_sdk/integrations/google_genai/__init__.py +301 -0
  66. sentry_sdk/integrations/google_genai/consts.py +16 -0
  67. sentry_sdk/integrations/google_genai/streaming.py +155 -0
  68. sentry_sdk/integrations/google_genai/utils.py +576 -0
  69. sentry_sdk/integrations/gql.py +162 -0
  70. sentry_sdk/integrations/graphene.py +151 -0
  71. sentry_sdk/integrations/grpc/__init__.py +168 -0
  72. sentry_sdk/integrations/grpc/aio/__init__.py +7 -0
  73. sentry_sdk/integrations/grpc/aio/client.py +95 -0
  74. sentry_sdk/integrations/grpc/aio/server.py +100 -0
  75. sentry_sdk/integrations/grpc/client.py +91 -0
  76. sentry_sdk/integrations/grpc/consts.py +1 -0
  77. sentry_sdk/integrations/grpc/server.py +66 -0
  78. sentry_sdk/integrations/httpx.py +178 -0
  79. sentry_sdk/integrations/huey.py +174 -0
  80. sentry_sdk/integrations/huggingface_hub.py +378 -0
  81. sentry_sdk/integrations/langchain.py +1132 -0
  82. sentry_sdk/integrations/langgraph.py +337 -0
  83. sentry_sdk/integrations/launchdarkly.py +61 -0
  84. sentry_sdk/integrations/litellm.py +287 -0
  85. sentry_sdk/integrations/litestar.py +315 -0
  86. sentry_sdk/integrations/logging.py +307 -96
  87. sentry_sdk/integrations/loguru.py +213 -0
  88. sentry_sdk/integrations/mcp.py +566 -0
  89. sentry_sdk/integrations/modules.py +14 -31
  90. sentry_sdk/integrations/openai.py +725 -0
  91. sentry_sdk/integrations/openai_agents/__init__.py +61 -0
  92. sentry_sdk/integrations/openai_agents/consts.py +1 -0
  93. sentry_sdk/integrations/openai_agents/patches/__init__.py +5 -0
  94. sentry_sdk/integrations/openai_agents/patches/agent_run.py +140 -0
  95. sentry_sdk/integrations/openai_agents/patches/error_tracing.py +77 -0
  96. sentry_sdk/integrations/openai_agents/patches/models.py +50 -0
  97. sentry_sdk/integrations/openai_agents/patches/runner.py +45 -0
  98. sentry_sdk/integrations/openai_agents/patches/tools.py +77 -0
  99. sentry_sdk/integrations/openai_agents/spans/__init__.py +5 -0
  100. sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +21 -0
  101. sentry_sdk/integrations/openai_agents/spans/ai_client.py +42 -0
  102. sentry_sdk/integrations/openai_agents/spans/execute_tool.py +48 -0
  103. sentry_sdk/integrations/openai_agents/spans/handoff.py +19 -0
  104. sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +86 -0
  105. sentry_sdk/integrations/openai_agents/utils.py +199 -0
  106. sentry_sdk/integrations/openfeature.py +35 -0
  107. sentry_sdk/integrations/opentelemetry/__init__.py +7 -0
  108. sentry_sdk/integrations/opentelemetry/consts.py +5 -0
  109. sentry_sdk/integrations/opentelemetry/integration.py +58 -0
  110. sentry_sdk/integrations/opentelemetry/propagator.py +117 -0
  111. sentry_sdk/integrations/opentelemetry/span_processor.py +391 -0
  112. sentry_sdk/integrations/otlp.py +82 -0
  113. sentry_sdk/integrations/pure_eval.py +141 -0
  114. sentry_sdk/integrations/pydantic_ai/__init__.py +47 -0
  115. sentry_sdk/integrations/pydantic_ai/consts.py +1 -0
  116. sentry_sdk/integrations/pydantic_ai/patches/__init__.py +4 -0
  117. sentry_sdk/integrations/pydantic_ai/patches/agent_run.py +215 -0
  118. sentry_sdk/integrations/pydantic_ai/patches/graph_nodes.py +110 -0
  119. sentry_sdk/integrations/pydantic_ai/patches/model_request.py +40 -0
  120. sentry_sdk/integrations/pydantic_ai/patches/tools.py +98 -0
  121. sentry_sdk/integrations/pydantic_ai/spans/__init__.py +3 -0
  122. sentry_sdk/integrations/pydantic_ai/spans/ai_client.py +246 -0
  123. sentry_sdk/integrations/pydantic_ai/spans/execute_tool.py +49 -0
  124. sentry_sdk/integrations/pydantic_ai/spans/invoke_agent.py +112 -0
  125. sentry_sdk/integrations/pydantic_ai/utils.py +223 -0
  126. sentry_sdk/integrations/pymongo.py +214 -0
  127. sentry_sdk/integrations/pyramid.py +112 -68
  128. sentry_sdk/integrations/quart.py +237 -0
  129. sentry_sdk/integrations/ray.py +165 -0
  130. sentry_sdk/integrations/redis/__init__.py +48 -0
  131. sentry_sdk/integrations/redis/_async_common.py +116 -0
  132. sentry_sdk/integrations/redis/_sync_common.py +119 -0
  133. sentry_sdk/integrations/redis/consts.py +19 -0
  134. sentry_sdk/integrations/redis/modules/__init__.py +0 -0
  135. sentry_sdk/integrations/redis/modules/caches.py +118 -0
  136. sentry_sdk/integrations/redis/modules/queries.py +65 -0
  137. sentry_sdk/integrations/redis/rb.py +32 -0
  138. sentry_sdk/integrations/redis/redis.py +69 -0
  139. sentry_sdk/integrations/redis/redis_cluster.py +107 -0
  140. sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +50 -0
  141. sentry_sdk/integrations/redis/utils.py +148 -0
  142. sentry_sdk/integrations/rq.py +95 -37
  143. sentry_sdk/integrations/rust_tracing.py +284 -0
  144. sentry_sdk/integrations/sanic.py +294 -123
  145. sentry_sdk/integrations/serverless.py +48 -19
  146. sentry_sdk/integrations/socket.py +96 -0
  147. sentry_sdk/integrations/spark/__init__.py +4 -0
  148. sentry_sdk/integrations/spark/spark_driver.py +316 -0
  149. sentry_sdk/integrations/spark/spark_worker.py +116 -0
  150. sentry_sdk/integrations/sqlalchemy.py +142 -0
  151. sentry_sdk/integrations/starlette.py +737 -0
  152. sentry_sdk/integrations/starlite.py +292 -0
  153. sentry_sdk/integrations/statsig.py +37 -0
  154. sentry_sdk/integrations/stdlib.py +235 -29
  155. sentry_sdk/integrations/strawberry.py +394 -0
  156. sentry_sdk/integrations/sys_exit.py +70 -0
  157. sentry_sdk/integrations/threading.py +158 -28
  158. sentry_sdk/integrations/tornado.py +84 -52
  159. sentry_sdk/integrations/trytond.py +50 -0
  160. sentry_sdk/integrations/typer.py +60 -0
  161. sentry_sdk/integrations/unleash.py +33 -0
  162. sentry_sdk/integrations/unraisablehook.py +53 -0
  163. sentry_sdk/integrations/wsgi.py +201 -119
  164. sentry_sdk/logger.py +96 -0
  165. sentry_sdk/metrics.py +81 -0
  166. sentry_sdk/monitor.py +120 -0
  167. sentry_sdk/profiler/__init__.py +49 -0
  168. sentry_sdk/profiler/continuous_profiler.py +730 -0
  169. sentry_sdk/profiler/transaction_profiler.py +839 -0
  170. sentry_sdk/profiler/utils.py +195 -0
  171. sentry_sdk/py.typed +0 -0
  172. sentry_sdk/scope.py +1713 -85
  173. sentry_sdk/scrubber.py +177 -0
  174. sentry_sdk/serializer.py +405 -0
  175. sentry_sdk/session.py +177 -0
  176. sentry_sdk/sessions.py +275 -0
  177. sentry_sdk/spotlight.py +242 -0
  178. sentry_sdk/tracing.py +1486 -0
  179. sentry_sdk/tracing_utils.py +1236 -0
  180. sentry_sdk/transport.py +806 -134
  181. sentry_sdk/types.py +52 -0
  182. sentry_sdk/utils.py +1625 -465
  183. sentry_sdk/worker.py +54 -25
  184. sentry_sdk-2.46.0.dist-info/METADATA +268 -0
  185. sentry_sdk-2.46.0.dist-info/RECORD +189 -0
  186. {sentry_sdk-0.7.5.dist-info → sentry_sdk-2.46.0.dist-info}/WHEEL +1 -1
  187. sentry_sdk-2.46.0.dist-info/entry_points.txt +2 -0
  188. sentry_sdk-2.46.0.dist-info/licenses/LICENSE +21 -0
  189. sentry_sdk/integrations/celery.py +0 -119
  190. sentry_sdk-0.7.5.dist-info/LICENSE +0 -9
  191. sentry_sdk-0.7.5.dist-info/METADATA +0 -36
  192. sentry_sdk-0.7.5.dist-info/RECORD +0 -39
  193. {sentry_sdk-0.7.5.dist-info → sentry_sdk-2.46.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,378 @@
1
+ import inspect
2
+ from functools import wraps
3
+
4
+ import sentry_sdk
5
+ from sentry_sdk.ai.monitoring import record_token_usage
6
+ from sentry_sdk.ai.utils import set_data_normalized
7
+ from sentry_sdk.consts import OP, SPANDATA
8
+ from sentry_sdk.integrations import DidNotEnable, Integration
9
+ from sentry_sdk.scope import should_send_default_pii
10
+ from sentry_sdk.tracing_utils import set_span_errored
11
+ from sentry_sdk.utils import (
12
+ capture_internal_exceptions,
13
+ event_from_exception,
14
+ )
15
+
16
+ from typing import TYPE_CHECKING
17
+
18
+ if TYPE_CHECKING:
19
+ from typing import Any, Callable, Iterable
20
+
21
+ try:
22
+ import huggingface_hub.inference._client
23
+ except ImportError:
24
+ raise DidNotEnable("Huggingface not installed")
25
+
26
+
27
+ class HuggingfaceHubIntegration(Integration):
28
+ identifier = "huggingface_hub"
29
+ origin = f"auto.ai.{identifier}"
30
+
31
+ def __init__(self, include_prompts=True):
32
+ # type: (HuggingfaceHubIntegration, bool) -> None
33
+ self.include_prompts = include_prompts
34
+
35
+ @staticmethod
36
+ def setup_once():
37
+ # type: () -> None
38
+
39
+ # Other tasks that can be called: https://huggingface.co/docs/huggingface_hub/guides/inference#supported-providers-and-tasks
40
+ huggingface_hub.inference._client.InferenceClient.text_generation = (
41
+ _wrap_huggingface_task(
42
+ huggingface_hub.inference._client.InferenceClient.text_generation,
43
+ OP.GEN_AI_GENERATE_TEXT,
44
+ )
45
+ )
46
+ huggingface_hub.inference._client.InferenceClient.chat_completion = (
47
+ _wrap_huggingface_task(
48
+ huggingface_hub.inference._client.InferenceClient.chat_completion,
49
+ OP.GEN_AI_CHAT,
50
+ )
51
+ )
52
+
53
+
54
+ def _capture_exception(exc):
55
+ # type: (Any) -> None
56
+ set_span_errored()
57
+
58
+ event, hint = event_from_exception(
59
+ exc,
60
+ client_options=sentry_sdk.get_client().options,
61
+ mechanism={"type": "huggingface_hub", "handled": False},
62
+ )
63
+ sentry_sdk.capture_event(event, hint=hint)
64
+
65
+
66
+ def _wrap_huggingface_task(f, op):
67
+ # type: (Callable[..., Any], str) -> Callable[..., Any]
68
+ @wraps(f)
69
+ def new_huggingface_task(*args, **kwargs):
70
+ # type: (*Any, **Any) -> Any
71
+ integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration)
72
+ if integration is None:
73
+ return f(*args, **kwargs)
74
+
75
+ prompt = None
76
+ if "prompt" in kwargs:
77
+ prompt = kwargs["prompt"]
78
+ elif "messages" in kwargs:
79
+ prompt = kwargs["messages"]
80
+ elif len(args) >= 2:
81
+ if isinstance(args[1], str) or isinstance(args[1], list):
82
+ prompt = args[1]
83
+
84
+ if prompt is None:
85
+ # invalid call, dont instrument, let it return error
86
+ return f(*args, **kwargs)
87
+
88
+ client = args[0]
89
+ model = client.model or kwargs.get("model") or ""
90
+ operation_name = op.split(".")[-1]
91
+
92
+ span = sentry_sdk.start_span(
93
+ op=op,
94
+ name=f"{operation_name} {model}",
95
+ origin=HuggingfaceHubIntegration.origin,
96
+ )
97
+ span.__enter__()
98
+
99
+ span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, operation_name)
100
+
101
+ if model:
102
+ span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model)
103
+
104
+ # Input attributes
105
+ if should_send_default_pii() and integration.include_prompts:
106
+ set_data_normalized(
107
+ span, SPANDATA.GEN_AI_REQUEST_MESSAGES, prompt, unpack=False
108
+ )
109
+
110
+ attribute_mapping = {
111
+ "tools": SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS,
112
+ "frequency_penalty": SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY,
113
+ "max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
114
+ "presence_penalty": SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY,
115
+ "temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
116
+ "top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
117
+ "top_k": SPANDATA.GEN_AI_REQUEST_TOP_K,
118
+ "stream": SPANDATA.GEN_AI_RESPONSE_STREAMING,
119
+ }
120
+
121
+ for attribute, span_attribute in attribute_mapping.items():
122
+ value = kwargs.get(attribute, None)
123
+ if value is not None:
124
+ if isinstance(value, (int, float, bool, str)):
125
+ span.set_data(span_attribute, value)
126
+ else:
127
+ set_data_normalized(span, span_attribute, value, unpack=False)
128
+
129
+ # LLM Execution
130
+ try:
131
+ res = f(*args, **kwargs)
132
+ except Exception as e:
133
+ _capture_exception(e)
134
+ span.__exit__(None, None, None)
135
+ raise e from None
136
+
137
+ # Output attributes
138
+ finish_reason = None
139
+ response_model = None
140
+ response_text_buffer: list[str] = []
141
+ tokens_used = 0
142
+ tool_calls = None
143
+ usage = None
144
+
145
+ with capture_internal_exceptions():
146
+ if isinstance(res, str) and res is not None:
147
+ response_text_buffer.append(res)
148
+
149
+ if hasattr(res, "generated_text") and res.generated_text is not None:
150
+ response_text_buffer.append(res.generated_text)
151
+
152
+ if hasattr(res, "model") and res.model is not None:
153
+ response_model = res.model
154
+
155
+ if hasattr(res, "details") and hasattr(res.details, "finish_reason"):
156
+ finish_reason = res.details.finish_reason
157
+
158
+ if (
159
+ hasattr(res, "details")
160
+ and hasattr(res.details, "generated_tokens")
161
+ and res.details.generated_tokens is not None
162
+ ):
163
+ tokens_used = res.details.generated_tokens
164
+
165
+ if hasattr(res, "usage") and res.usage is not None:
166
+ usage = res.usage
167
+
168
+ if hasattr(res, "choices") and res.choices is not None:
169
+ for choice in res.choices:
170
+ if hasattr(choice, "finish_reason"):
171
+ finish_reason = choice.finish_reason
172
+ if hasattr(choice, "message") and hasattr(
173
+ choice.message, "tool_calls"
174
+ ):
175
+ tool_calls = choice.message.tool_calls
176
+ if (
177
+ hasattr(choice, "message")
178
+ and hasattr(choice.message, "content")
179
+ and choice.message.content is not None
180
+ ):
181
+ response_text_buffer.append(choice.message.content)
182
+
183
+ if response_model is not None:
184
+ span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, response_model)
185
+
186
+ if finish_reason is not None:
187
+ set_data_normalized(
188
+ span,
189
+ SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
190
+ finish_reason,
191
+ )
192
+
193
+ if should_send_default_pii() and integration.include_prompts:
194
+ if tool_calls is not None and len(tool_calls) > 0:
195
+ set_data_normalized(
196
+ span,
197
+ SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
198
+ tool_calls,
199
+ unpack=False,
200
+ )
201
+
202
+ if len(response_text_buffer) > 0:
203
+ text_response = "".join(response_text_buffer)
204
+ if text_response:
205
+ set_data_normalized(
206
+ span,
207
+ SPANDATA.GEN_AI_RESPONSE_TEXT,
208
+ text_response,
209
+ )
210
+
211
+ if usage is not None:
212
+ record_token_usage(
213
+ span,
214
+ input_tokens=usage.prompt_tokens,
215
+ output_tokens=usage.completion_tokens,
216
+ total_tokens=usage.total_tokens,
217
+ )
218
+ elif tokens_used > 0:
219
+ record_token_usage(
220
+ span,
221
+ total_tokens=tokens_used,
222
+ )
223
+
224
+ # If the response is not a generator (meaning a streaming response)
225
+ # we are done and can return the response
226
+ if not inspect.isgenerator(res):
227
+ span.__exit__(None, None, None)
228
+ return res
229
+
230
+ if kwargs.get("details", False):
231
+ # text-generation stream output
232
+ def new_details_iterator():
233
+ # type: () -> Iterable[Any]
234
+ finish_reason = None
235
+ response_text_buffer: list[str] = []
236
+ tokens_used = 0
237
+
238
+ with capture_internal_exceptions():
239
+ for chunk in res:
240
+ if (
241
+ hasattr(chunk, "token")
242
+ and hasattr(chunk.token, "text")
243
+ and chunk.token.text is not None
244
+ ):
245
+ response_text_buffer.append(chunk.token.text)
246
+
247
+ if hasattr(chunk, "details") and hasattr(
248
+ chunk.details, "finish_reason"
249
+ ):
250
+ finish_reason = chunk.details.finish_reason
251
+
252
+ if (
253
+ hasattr(chunk, "details")
254
+ and hasattr(chunk.details, "generated_tokens")
255
+ and chunk.details.generated_tokens is not None
256
+ ):
257
+ tokens_used = chunk.details.generated_tokens
258
+
259
+ yield chunk
260
+
261
+ if finish_reason is not None:
262
+ set_data_normalized(
263
+ span,
264
+ SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
265
+ finish_reason,
266
+ )
267
+
268
+ if should_send_default_pii() and integration.include_prompts:
269
+ if len(response_text_buffer) > 0:
270
+ text_response = "".join(response_text_buffer)
271
+ if text_response:
272
+ set_data_normalized(
273
+ span,
274
+ SPANDATA.GEN_AI_RESPONSE_TEXT,
275
+ text_response,
276
+ )
277
+
278
+ if tokens_used > 0:
279
+ record_token_usage(
280
+ span,
281
+ total_tokens=tokens_used,
282
+ )
283
+
284
+ span.__exit__(None, None, None)
285
+
286
+ return new_details_iterator()
287
+
288
+ else:
289
+ # chat-completion stream output
290
+ def new_iterator():
291
+ # type: () -> Iterable[str]
292
+ finish_reason = None
293
+ response_model = None
294
+ response_text_buffer: list[str] = []
295
+ tool_calls = None
296
+ usage = None
297
+
298
+ with capture_internal_exceptions():
299
+ for chunk in res:
300
+ if hasattr(chunk, "model") and chunk.model is not None:
301
+ response_model = chunk.model
302
+
303
+ if hasattr(chunk, "usage") and chunk.usage is not None:
304
+ usage = chunk.usage
305
+
306
+ if isinstance(chunk, str):
307
+ if chunk is not None:
308
+ response_text_buffer.append(chunk)
309
+
310
+ if hasattr(chunk, "choices") and chunk.choices is not None:
311
+ for choice in chunk.choices:
312
+ if (
313
+ hasattr(choice, "delta")
314
+ and hasattr(choice.delta, "content")
315
+ and choice.delta.content is not None
316
+ ):
317
+ response_text_buffer.append(
318
+ choice.delta.content
319
+ )
320
+
321
+ if (
322
+ hasattr(choice, "finish_reason")
323
+ and choice.finish_reason is not None
324
+ ):
325
+ finish_reason = choice.finish_reason
326
+
327
+ if (
328
+ hasattr(choice, "delta")
329
+ and hasattr(choice.delta, "tool_calls")
330
+ and choice.delta.tool_calls is not None
331
+ ):
332
+ tool_calls = choice.delta.tool_calls
333
+
334
+ yield chunk
335
+
336
+ if response_model is not None:
337
+ span.set_data(
338
+ SPANDATA.GEN_AI_RESPONSE_MODEL, response_model
339
+ )
340
+
341
+ if finish_reason is not None:
342
+ set_data_normalized(
343
+ span,
344
+ SPANDATA.GEN_AI_RESPONSE_FINISH_REASONS,
345
+ finish_reason,
346
+ )
347
+
348
+ if should_send_default_pii() and integration.include_prompts:
349
+ if tool_calls is not None and len(tool_calls) > 0:
350
+ set_data_normalized(
351
+ span,
352
+ SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
353
+ tool_calls,
354
+ unpack=False,
355
+ )
356
+
357
+ if len(response_text_buffer) > 0:
358
+ text_response = "".join(response_text_buffer)
359
+ if text_response:
360
+ set_data_normalized(
361
+ span,
362
+ SPANDATA.GEN_AI_RESPONSE_TEXT,
363
+ text_response,
364
+ )
365
+
366
+ if usage is not None:
367
+ record_token_usage(
368
+ span,
369
+ input_tokens=usage.prompt_tokens,
370
+ output_tokens=usage.completion_tokens,
371
+ total_tokens=usage.total_tokens,
372
+ )
373
+
374
+ span.__exit__(None, None, None)
375
+
376
+ return new_iterator()
377
+
378
+ return new_huggingface_task