arize-phoenix 10.0.4__py3-none-any.whl → 10.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (25) hide show
  1. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/METADATA +3 -3
  2. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/RECORD +25 -25
  3. phoenix/db/insertion/dataset.py +2 -1
  4. phoenix/db/types/model_provider.py +1 -0
  5. phoenix/server/api/helpers/playground_clients.py +33 -0
  6. phoenix/server/api/helpers/prompts/models.py +20 -0
  7. phoenix/server/api/routers/v1/datasets.py +8 -2
  8. phoenix/server/api/routers/v1/spans.py +521 -4
  9. phoenix/server/api/types/GenerativeProvider.py +5 -0
  10. phoenix/server/static/.vite/manifest.json +44 -44
  11. phoenix/server/static/assets/{components-DULKeDfL.js → components-ClD3sHta.js} +384 -336
  12. phoenix/server/static/assets/{index-E0M82BdE.js → index-CXawXHw0.js} +6 -2
  13. phoenix/server/static/assets/{pages-Cl0A-0U2.js → pages-BFtNRfTL.js} +534 -544
  14. phoenix/server/static/assets/{vendor-oB4u9zuV.js → vendor-DOUbLVp5.js} +1 -1
  15. phoenix/server/static/assets/{vendor-arizeai-Dy-0mSNw.js → vendor-arizeai-DHqMQzfV.js} +6 -6
  16. phoenix/server/static/assets/{vendor-codemirror-DBtifKNr.js → vendor-codemirror-DWdZV1Is.js} +1 -1
  17. phoenix/server/static/assets/{vendor-recharts-D-T4KPz2.js → vendor-recharts-BfHdRd1Y.js} +1 -1
  18. phoenix/server/static/assets/{vendor-shiki-BMn4O_9F.js → vendor-shiki-CHu75YVL.js} +1 -1
  19. phoenix/session/evaluation.py +6 -1
  20. phoenix/trace/dsl/helpers.py +90 -1
  21. phoenix/version.py +1 -1
  22. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/WHEEL +0 -0
  23. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/entry_points.txt +0 -0
  24. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/licenses/IP_NOTICE +0 -0
  25. {arize_phoenix-10.0.4.dist-info → arize_phoenix-10.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,12 +2,13 @@ import warnings
2
2
  from asyncio import get_running_loop
3
3
  from collections.abc import AsyncIterator
4
4
  from datetime import datetime, timezone
5
+ from enum import Enum
5
6
  from secrets import token_urlsafe
6
- from typing import Any, Literal, Optional
7
+ from typing import Annotated, Any, Literal, Optional, Union
7
8
 
8
9
  import pandas as pd
9
- from fastapi import APIRouter, Header, HTTPException, Query
10
- from pydantic import Field
10
+ from fastapi import APIRouter, Header, HTTPException, Path, Query
11
+ from pydantic import BaseModel, Field
11
12
  from sqlalchemy import select
12
13
  from starlette.requests import Request
13
14
  from starlette.responses import Response, StreamingResponse
@@ -23,11 +24,18 @@ from phoenix.db.insertion.types import Precursors
23
24
  from phoenix.server.api.routers.utils import df_to_bytes
24
25
  from phoenix.server.bearer_auth import PhoenixUser
25
26
  from phoenix.server.dml_event import SpanAnnotationInsertEvent
27
+ from phoenix.trace.attributes import flatten
26
28
  from phoenix.trace.dsl import SpanQuery as SpanQuery_
27
29
  from phoenix.utilities.json import encode_df_as_json_string
28
30
 
29
31
  from .models import V1RoutesBaseModel
30
- from .utils import RequestBody, ResponseBody, add_errors_to_responses
32
+ from .utils import (
33
+ PaginatedResponseBody,
34
+ RequestBody,
35
+ ResponseBody,
36
+ _get_project_by_identifier,
37
+ add_errors_to_responses,
38
+ )
31
39
 
32
40
  DEFAULT_SPAN_LIMIT = 1000
33
41
 
@@ -68,6 +76,306 @@ class QuerySpansRequestBody(V1RoutesBaseModel):
68
76
  )
69
77
 
70
78
 
79
+ ################################################################################
80
+ # Autogenerated OTLP models
81
+
82
+ # These models are autogenerated from the OTLP v1 protobuf schemas
83
+ # Source: https://github.com/open-telemetry/opentelemetry-proto/
84
+ # ...blob/main/opentelemetry/proto/trace/v1/trace.proto
85
+ # ...blob/main/opentelemetry/proto/common/v1/common.proto
86
+ # ...blob/main/opentelemetry/proto/resource/v1/resource.proto
87
+
88
+ # The autogeneration is done using the `protoc` tool and the `protoc-gen-jsonschema` go plugin
89
+ # The generated JSON schemas are coverted to Pydantic using `datamodel-codegen`
90
+ ################################################################################
91
+
92
+
93
+ class OtlpDoubleValue(Enum):
94
+ Infinity = "Infinity"
95
+ field_Infinity = "-Infinity"
96
+ NaN = "NaN"
97
+
98
+
99
+ class OtlpArrayValue(BaseModel):
100
+ model_config = {"extra": "forbid"}
101
+
102
+ values: Optional[list["OtlpAnyValue"]] = Field(
103
+ None,
104
+ description="Array of values. The array may be empty (contain 0 elements).",
105
+ )
106
+
107
+
108
+ class OtlpAnyValue(BaseModel):
109
+ model_config = {"extra": "forbid"}
110
+
111
+ array_value: Optional[OtlpArrayValue] = None
112
+ bool_value: Optional[bool] = None
113
+ bytes_value: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = None
114
+ double_value: Optional[Union[float, OtlpDoubleValue, str]] = None
115
+ int_value: Optional[
116
+ Union[
117
+ Annotated[int, Field(ge=-9223372036854775808, lt=9223372036854775808)],
118
+ Annotated[str, Field(pattern=r"^-?[0-9]+$")],
119
+ ]
120
+ ] = None
121
+ kvlist_value: None = None # TODO: Add KeyValueList model
122
+ string_value: Optional[str] = None
123
+
124
+
125
+ class OtlpKeyValue(BaseModel):
126
+ model_config = {"extra": "forbid"}
127
+
128
+ key: Optional[str] = None
129
+ value: Optional[OtlpAnyValue] = None
130
+
131
+
132
+ class StatusCode(str, Enum):
133
+ # This is not autogenerated, but used to convert the status code in our DB to an OTLP
134
+ # status code integer
135
+ UNSET = "UNSET"
136
+ OK = "OK"
137
+ ERROR = "ERROR"
138
+
139
+ def to_int(self) -> int:
140
+ return {
141
+ "UNSET": 0,
142
+ "OK": 1,
143
+ "ERROR": 2,
144
+ }[self.value]
145
+
146
+
147
+ class OtlpStatus(BaseModel):
148
+ model_config = {"extra": "forbid"}
149
+
150
+ code: Optional[Annotated[int, Field(ge=-2147483648, le=2147483647)]] = Field(
151
+ None, description="The status code."
152
+ )
153
+ message: Optional[str] = Field(
154
+ None, description="A developer-facing human readable error message."
155
+ )
156
+
157
+
158
+ class OtlpKind(Enum):
159
+ SPAN_KIND_UNSPECIFIED = "SPAN_KIND_UNSPECIFIED"
160
+ SPAN_KIND_INTERNAL = "SPAN_KIND_INTERNAL"
161
+ SPAN_KIND_SERVER = "SPAN_KIND_SERVER"
162
+ SPAN_KIND_CLIENT = "SPAN_KIND_CLIENT"
163
+ SPAN_KIND_PRODUCER = "SPAN_KIND_PRODUCER"
164
+ SPAN_KIND_CONSUMER = "SPAN_KIND_CONSUMER"
165
+
166
+
167
+ class OtlpEvent(BaseModel):
168
+ model_config = {"extra": "forbid"}
169
+
170
+ attributes: Optional[list[OtlpKeyValue]] = Field(
171
+ None,
172
+ description=(
173
+ "attributes is a collection of attribute key/value pairs on the event. "
174
+ "Attribute keys MUST be unique (it is not allowed to have more than one "
175
+ "attribute with the same key)."
176
+ ),
177
+ )
178
+ dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
179
+ None,
180
+ description=(
181
+ "dropped_attributes_count is the number of dropped attributes. If the value is 0, "
182
+ "then no attributes were dropped."
183
+ ),
184
+ )
185
+ name: Optional[str] = Field(
186
+ None,
187
+ description=(
188
+ "name of the event. "
189
+ "This field is semantically required to be set to non-empty string."
190
+ ),
191
+ )
192
+ time_unix_nano: Optional[
193
+ Union[
194
+ Annotated[int, Field(ge=0, lt=18446744073709551616)],
195
+ Annotated[str, Field(pattern=r"^[0-9]+$")],
196
+ ]
197
+ ] = Field(
198
+ None,
199
+ description=(
200
+ "time_unix_nano is the time the event occurred. "
201
+ "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970."
202
+ ),
203
+ )
204
+
205
+
206
+ class OtlpSpan(BaseModel):
207
+ model_config = {"extra": "forbid"}
208
+
209
+ attributes: Optional[list[OtlpKeyValue]] = Field(
210
+ None,
211
+ description=(
212
+ "attributes is a collection of key/value pairs. Note, global attributes like server "
213
+ "name can be set using the resource API. Examples of attributes:\n\n"
214
+ ' "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) '
215
+ 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"\n'
216
+ ' "/http/server_latency": 300\n'
217
+ ' "example.com/myattribute": true\n'
218
+ ' "example.com/score": 10.239\n\n'
219
+ "The OpenTelemetry API specification further restricts the allowed value types:\n"
220
+ "https://github.com/open-telemetry/opentelemetry-specification/blob/main/"
221
+ "specification/common/README.md#attribute\n"
222
+ "Attribute keys MUST be unique (it is not allowed to have more than one attribute "
223
+ "with the same key)."
224
+ ),
225
+ )
226
+ dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
227
+ None,
228
+ description=(
229
+ "dropped_attributes_count is the number of attributes that were discarded. Attributes "
230
+ "can be discarded because their keys are too long or because there are too many "
231
+ "attributes. If this value is 0, then no attributes were dropped."
232
+ ),
233
+ )
234
+ dropped_events_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
235
+ None,
236
+ description=(
237
+ "dropped_events_count is the number of dropped events. If the value is 0, then no "
238
+ "events were dropped."
239
+ ),
240
+ )
241
+ dropped_links_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
242
+ None,
243
+ description=(
244
+ "dropped_links_count is the number of dropped links after the maximum size was "
245
+ "enforced. If this value is 0, then no links were dropped."
246
+ ),
247
+ )
248
+ end_time_unix_nano: Optional[
249
+ Union[
250
+ Annotated[int, Field(ge=0, lt=18446744073709551616)],
251
+ Annotated[str, Field(pattern=r"^[0-9]+$")],
252
+ ]
253
+ ] = Field(
254
+ None,
255
+ description=(
256
+ "end_time_unix_nano is the end time of the span. On the client side, this is the time "
257
+ "kept by the local machine where the span execution ends. On the server side, this is "
258
+ "the time when the server application handler stops running.\n"
259
+ "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
260
+ "This field is semantically required and it is expected that end_time >= start_time."
261
+ ),
262
+ )
263
+ events: Optional[list[OtlpEvent]] = Field(
264
+ None,
265
+ description=("events is a collection of Event items. " "A span with no events is valid."),
266
+ )
267
+ flags: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
268
+ None,
269
+ description=(
270
+ "Flags, a bit field.\n\n"
271
+ "Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace "
272
+ "Context specification. To read the 8-bit W3C trace flag, use "
273
+ "`flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.\n\n"
274
+ "See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.\n\n"
275
+ "Bits 8 and 9 represent the 3 states of whether a span's parent is remote. The states "
276
+ "are (unknown, is not remote, is remote).\n"
277
+ "To read whether the value is known, use "
278
+ "`(flags & SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK) != 0`.\n"
279
+ "To read whether the span is remote, use "
280
+ "`(flags & SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK) != 0`.\n\n"
281
+ "When creating span messages, if the message is logically forwarded from another "
282
+ "source with an equivalent flags fields (i.e., usually another OTLP span message), the "
283
+ "field SHOULD be copied as-is. If creating from a source that does not have an "
284
+ "equivalent flags field (such as a runtime representation of an OpenTelemetry span), "
285
+ "the high 22 bits MUST be set to zero.\n"
286
+ "Readers MUST NOT assume that bits 10-31 (22 most significant bits) will be zero.\n\n"
287
+ "[Optional]."
288
+ ),
289
+ )
290
+ kind: Optional[Union[OtlpKind, Annotated[int, Field(ge=-2147483648, le=2147483647)]]] = Field(
291
+ OtlpKind.SPAN_KIND_INTERNAL, # INTERNAL because OpenInference uses its own SpanKind
292
+ description=(
293
+ "Distinguishes between spans generated in a particular context. For example, two spans "
294
+ "with the same name may be distinguished using `CLIENT` (caller) and `SERVER` (callee) "
295
+ "to identify queueing latency associated with the span."
296
+ ),
297
+ )
298
+ links: None = None # TODO: Add Link model
299
+ name: Optional[str] = Field(
300
+ None,
301
+ description=(
302
+ "A description of the span's operation.\n\n"
303
+ "For example, the name can be a qualified method name or a file name and a line number "
304
+ "where the operation is called. A best practice is to use the same display name at the "
305
+ "same call point in an application. This makes it easier to correlate spans in "
306
+ "different traces.\n\n"
307
+ "This field is semantically required to be set to non-empty string. Empty value is "
308
+ "equivalent to an unknown span name.\n\n"
309
+ "This field is required."
310
+ ),
311
+ )
312
+ parent_span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
313
+ None,
314
+ description=(
315
+ "The `span_id` of this span's parent span. If this is a root span, then this field "
316
+ "must be empty. The ID is an 8-byte array."
317
+ ),
318
+ )
319
+ span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
320
+ None,
321
+ description=(
322
+ "A unique identifier for a span within a trace, assigned when the span is created. The "
323
+ "ID is an 8-byte array. An ID with all zeroes OR of length other than 8 bytes is "
324
+ "considered invalid (empty string in OTLP/JSON is zero-length and thus is also "
325
+ "invalid).\n\n"
326
+ "This field is required."
327
+ ),
328
+ )
329
+ start_time_unix_nano: Optional[
330
+ Union[
331
+ Annotated[int, Field(ge=0, lt=18446744073709551616)],
332
+ Annotated[str, Field(pattern=r"^[0-9]+$")],
333
+ ]
334
+ ] = Field(
335
+ None,
336
+ description=(
337
+ "start_time_unix_nano is the start time of the span. On the client side, this is the "
338
+ "time kept by the local machine where the span execution starts. On the server side, "
339
+ "this is the time when the server's application handler starts running.\n"
340
+ "Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
341
+ "This field is semantically required and it is expected that end_time >= start_time."
342
+ ),
343
+ )
344
+ status: Optional[OtlpStatus] = Field(
345
+ None,
346
+ description=(
347
+ "An optional final status for this span. Semantically when Status isn't set, it means "
348
+ "span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0)."
349
+ ),
350
+ )
351
+ trace_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
352
+ None,
353
+ description=(
354
+ "A unique identifier for a trace. All spans from the same trace share the same "
355
+ "`trace_id`. The ID is a 16-byte array. An ID with all zeroes OR of length other than "
356
+ "16 bytes is considered invalid (empty string in OTLP/JSON is zero-length and thus is "
357
+ "also invalid).\n\n"
358
+ "This field is required."
359
+ ),
360
+ )
361
+ trace_state: Optional[str] = Field(
362
+ None,
363
+ description=(
364
+ "trace_state conveys information about request position in multiple distributed "
365
+ "tracing graphs. It is a trace_state in w3c-trace-context format: "
366
+ "https://www.w3.org/TR/trace-context/#tracestate-header\n"
367
+ "See also https://github.com/w3c/distributed-tracing for more details about this "
368
+ "field."
369
+ ),
370
+ )
371
+
372
+
373
+ class SpanSearchResponseBody(PaginatedResponseBody[OtlpSpan]):
374
+ """Paginated response where each span follows OTLP JSON structure."""
375
+
376
+ pass
377
+
378
+
71
379
  # TODO: Add property details to SpanQuery schema
72
380
  @router.post(
73
381
  "/spans",
@@ -154,6 +462,215 @@ async def _json_multipart(
154
462
  yield f"--{boundary_token}--\r\n"
155
463
 
156
464
 
465
+ def _to_array_value(values: list[Any]) -> OtlpArrayValue:
466
+ """Convert a list of values to an OtlpArrayValue.
467
+
468
+ If the values are not all of the same type, they will be coerced to strings.
469
+ Nested lists/tuples are not allowed and will be stringified.
470
+ """
471
+ if not values:
472
+ return OtlpArrayValue(values=[])
473
+
474
+ # Convert any list/tuple values to strings to prevent nesting
475
+ processed_values = [str(v) if isinstance(v, (list, tuple)) else v for v in values]
476
+
477
+ # Check if all values are of the same type
478
+ first_type = type(processed_values[0])
479
+ if all(isinstance(v, first_type) for v in processed_values):
480
+ # All values are of the same type, convert normally
481
+ return OtlpArrayValue(values=[_to_any_value(v) for v in processed_values])
482
+
483
+ # Values are not homogeneous, convert everything to strings
484
+ return OtlpArrayValue(values=[OtlpAnyValue(string_value=str(v)) for v in processed_values])
485
+
486
+
487
+ def _to_any_value(value: Any) -> OtlpAnyValue:
488
+ if value is None:
489
+ return OtlpAnyValue()
490
+ elif isinstance(value, bool):
491
+ return OtlpAnyValue(bool_value=value)
492
+ elif isinstance(value, int):
493
+ return OtlpAnyValue(int_value=value)
494
+ elif isinstance(value, float):
495
+ if value in (float("inf"), float("-inf"), float("nan")):
496
+ return OtlpAnyValue(double_value=str(value))
497
+ return OtlpAnyValue(double_value=value)
498
+ elif isinstance(value, str):
499
+ return OtlpAnyValue(string_value=value)
500
+ elif isinstance(value, bytes):
501
+ return OtlpAnyValue(bytes_value=value.hex())
502
+ elif isinstance(value, (list, tuple)):
503
+ return OtlpAnyValue(array_value=_to_array_value(list(value)))
504
+ elif isinstance(value, dict):
505
+ # TODO: Implement kvlist_value when KeyValueList model is added
506
+ return OtlpAnyValue()
507
+ else:
508
+ # For any other type, convert to string
509
+ return OtlpAnyValue(string_value=str(value))
510
+
511
+
512
+ @router.get(
513
+ "/projects/{project_identifier}/spans/otlpv1",
514
+ operation_id="spanSearch",
515
+ summary="Search spans with simple filters (no DSL)",
516
+ description="Return spans within a project filtered by time range, annotation names, "
517
+ "and ordered by start_time. Supports cursor-based pagination.",
518
+ responses=add_errors_to_responses([HTTP_404_NOT_FOUND, HTTP_422_UNPROCESSABLE_ENTITY]),
519
+ )
520
+ async def span_search(
521
+ request: Request,
522
+ project_identifier: str = Path(
523
+ description=(
524
+ "The project identifier: either project ID or project name. If using a project name, "
525
+ "it cannot contain slash (/), question mark (?), or pound sign (#) characters."
526
+ )
527
+ ),
528
+ cursor: Optional[str] = Query(default=None, description="Pagination cursor (GlobalID of Span)"),
529
+ limit: int = Query(default=100, gt=0, le=1000, description="Maximum number of spans to return"),
530
+ sort_direction: Literal["asc", "desc"] = Query(
531
+ default="desc",
532
+ description="Sort direction for the sort field",
533
+ ),
534
+ start_time: Optional[datetime] = Query(default=None, description="Inclusive lower bound time"),
535
+ end_time: Optional[datetime] = Query(default=None, description="Exclusive upper bound time"),
536
+ annotation_names: Optional[list[str]] = Query(
537
+ default=None,
538
+ description=(
539
+ "If provided, only include spans that have at least one annotation with one "
540
+ "of these names."
541
+ ),
542
+ alias="annotationNames",
543
+ ),
544
+ ) -> SpanSearchResponseBody:
545
+ """Search spans with minimal filters instead of the old SpanQuery DSL."""
546
+
547
+ async with request.app.state.db() as session:
548
+ project = await _get_project_by_identifier(session, project_identifier)
549
+
550
+ project_id: int = project.id
551
+ order_by = [models.Span.id.asc() if sort_direction == "asc" else models.Span.id.desc()]
552
+
553
+ stmt = (
554
+ select(
555
+ models.Span,
556
+ models.Trace.trace_id,
557
+ )
558
+ .join(models.Trace, onclause=models.Trace.id == models.Span.trace_rowid)
559
+ .join(models.Project, onclause=models.Project.id == project_id)
560
+ .order_by(*order_by)
561
+ )
562
+
563
+ if start_time:
564
+ stmt = stmt.where(models.Span.start_time >= normalize_datetime(start_time, timezone.utc))
565
+ if end_time:
566
+ stmt = stmt.where(models.Span.start_time < normalize_datetime(end_time, timezone.utc))
567
+
568
+ if annotation_names:
569
+ stmt = (
570
+ stmt.join(
571
+ models.SpanAnnotation,
572
+ onclause=models.SpanAnnotation.span_rowid == models.Span.id,
573
+ )
574
+ .where(models.SpanAnnotation.name.in_(annotation_names))
575
+ .group_by(models.Span.id, models.Trace.trace_id)
576
+ )
577
+
578
+ if cursor:
579
+ try:
580
+ cursor_rowid = int(GlobalID.from_id(cursor).node_id)
581
+ if sort_direction == "asc":
582
+ stmt = stmt.where(models.Span.id >= cursor_rowid)
583
+ else:
584
+ stmt = stmt.where(models.Span.id <= cursor_rowid)
585
+ except Exception:
586
+ raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="Invalid cursor")
587
+
588
+ stmt = stmt.limit(limit + 1)
589
+
590
+ async with request.app.state.db() as session:
591
+ rows: list[tuple[models.Span, str]] = [r async for r in await session.stream(stmt)]
592
+
593
+ if not rows:
594
+ return SpanSearchResponseBody(next_cursor=None, data=[])
595
+
596
+ next_cursor: Optional[str] = None
597
+ if len(rows) == limit + 1:
598
+ *rows, extra = rows # extra is first item of next page
599
+ span_extra, _ = extra
600
+ next_cursor = str(GlobalID("Span", str(span_extra.id)))
601
+
602
+ # Convert ORM rows -> OTLP-style spans
603
+ result_spans: list[OtlpSpan] = []
604
+ for span_orm, trace_id in rows:
605
+ try:
606
+ status_code_enum = StatusCode(span_orm.status_code or "UNSET")
607
+ except ValueError:
608
+ status_code_enum = StatusCode.UNSET
609
+
610
+ # Convert attributes to KeyValue list
611
+ attributes_kv: list[OtlpKeyValue] = []
612
+ if span_orm.attributes:
613
+ for k, v in flatten(span_orm.attributes or {}, recurse_on_sequence=True):
614
+ attributes_kv.append(OtlpKeyValue(key=k, value=_to_any_value(v)))
615
+
616
+ # Convert events to OTLP Event list
617
+ events: Optional[list[OtlpEvent]] = None
618
+ if span_orm.events:
619
+ events = []
620
+ for event in span_orm.events:
621
+ event_attributes: list[OtlpKeyValue] = []
622
+ if event.get("attributes"):
623
+ for k, v in flatten(event["attributes"], recurse_on_sequence=True):
624
+ event_attributes.append(OtlpKeyValue(key=k, value=_to_any_value(v)))
625
+
626
+ # Convert event timestamp to nanoseconds
627
+ event_time = event.get("timestamp")
628
+ time_unix_nano = None
629
+ if event_time:
630
+ if isinstance(event_time, datetime):
631
+ time_unix_nano = int(event_time.timestamp() * 1_000_000_000)
632
+ elif isinstance(event_time, str):
633
+ try:
634
+ dt = datetime.fromisoformat(event_time)
635
+ time_unix_nano = int(dt.timestamp() * 1_000_000_000)
636
+ except ValueError:
637
+ pass
638
+ elif isinstance(event_time, (int, float)):
639
+ time_unix_nano = int(event_time)
640
+
641
+ events.append(
642
+ OtlpEvent(
643
+ name=event.get("name"),
644
+ attributes=event_attributes,
645
+ time_unix_nano=time_unix_nano,
646
+ dropped_attributes_count=event.get("dropped_attributes_count"),
647
+ )
648
+ )
649
+
650
+ start_ns = (
651
+ int(span_orm.start_time.timestamp() * 1_000_000_000) if span_orm.start_time else None
652
+ )
653
+ end_ns = int(span_orm.end_time.timestamp() * 1_000_000_000) if span_orm.end_time else None
654
+
655
+ result_spans.append(
656
+ OtlpSpan(
657
+ trace_id=trace_id,
658
+ span_id=span_orm.span_id,
659
+ parent_span_id=span_orm.parent_id,
660
+ name=span_orm.name,
661
+ start_time_unix_nano=start_ns,
662
+ end_time_unix_nano=end_ns,
663
+ attributes=attributes_kv,
664
+ events=events,
665
+ status=OtlpStatus(
666
+ code=status_code_enum.to_int(), message=span_orm.status_message or None
667
+ ),
668
+ )
669
+ )
670
+
671
+ return SpanSearchResponseBody(next_cursor=next_cursor, data=result_spans)
672
+
673
+
157
674
  @router.get("/spans", include_in_schema=False, deprecated=True)
158
675
  async def get_spans_handler(
159
676
  request: Request,
@@ -14,6 +14,7 @@ class GenerativeProviderKey(Enum):
14
14
  ANTHROPIC = "Anthropic"
15
15
  AZURE_OPENAI = "Azure OpenAI"
16
16
  GOOGLE = "Google AI Studio"
17
+ DEEPSEEK = "DeepSeek"
17
18
 
18
19
 
19
20
  @strawberry.type
@@ -26,6 +27,7 @@ class GenerativeProvider:
26
27
  GenerativeProviderKey.ANTHROPIC: ["claude"],
27
28
  GenerativeProviderKey.OPENAI: ["gpt", "o1"],
28
29
  GenerativeProviderKey.GOOGLE: ["gemini"],
30
+ GenerativeProviderKey.DEEPSEEK: ["deepseek"],
29
31
  }
30
32
 
31
33
  attribute_provider_to_generative_provider_map: ClassVar[dict[str, GenerativeProviderKey]] = {
@@ -33,6 +35,8 @@ class GenerativeProvider:
33
35
  OpenInferenceLLMProviderValues.ANTHROPIC.value: GenerativeProviderKey.ANTHROPIC,
34
36
  OpenInferenceLLMProviderValues.AZURE.value: GenerativeProviderKey.AZURE_OPENAI,
35
37
  OpenInferenceLLMProviderValues.GOOGLE.value: GenerativeProviderKey.GOOGLE,
38
+ # Note: DeepSeek uses OpenAI compatibility but we can't duplicate the key in the dict
39
+ # The provider will be determined through model name prefix matching instead
36
40
  }
37
41
 
38
42
  model_provider_to_api_key_env_var_map: ClassVar[dict[GenerativeProviderKey, str]] = {
@@ -40,6 +44,7 @@ class GenerativeProvider:
40
44
  GenerativeProviderKey.ANTHROPIC: "ANTHROPIC_API_KEY",
41
45
  GenerativeProviderKey.OPENAI: "OPENAI_API_KEY",
42
46
  GenerativeProviderKey.GOOGLE: "GEMINI_API_KEY",
47
+ GenerativeProviderKey.DEEPSEEK: "DEEPSEEK_API_KEY",
43
48
  }
44
49
 
45
50
  @strawberry.field