arize-phoenix 11.18.0__py3-none-any.whl → 11.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

@@ -8,9 +8,10 @@ from secrets import token_urlsafe
8
8
  from typing import Annotated, Any, Literal, Optional, Union
9
9
 
10
10
  import pandas as pd
11
+ import sqlalchemy as sa
11
12
  from fastapi import APIRouter, Depends, Header, HTTPException, Path, Query
12
13
  from pydantic import BaseModel, Field
13
- from sqlalchemy import select
14
+ from sqlalchemy import exists, select, update
14
15
  from starlette.requests import Request
15
16
  from starlette.responses import Response, StreamingResponse
16
17
  from starlette.status import (
@@ -24,13 +25,14 @@ from strawberry.relay import GlobalID
24
25
  from phoenix.config import DEFAULT_PROJECT_NAME
25
26
  from phoenix.datetime_utils import normalize_datetime
26
27
  from phoenix.db import models
27
- from phoenix.db.helpers import SupportedSQLDialect
28
+ from phoenix.db.helpers import SupportedSQLDialect, get_ancestor_span_rowids
28
29
  from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
29
30
  from phoenix.db.insertion.types import Precursors
30
31
  from phoenix.server.api.routers.utils import df_to_bytes
32
+ from phoenix.server.api.types.node import from_global_id_with_expected_type
31
33
  from phoenix.server.authorization import is_not_locked
32
34
  from phoenix.server.bearer_auth import PhoenixUser
33
- from phoenix.server.dml_event import SpanAnnotationInsertEvent
35
+ from phoenix.server.dml_event import SpanAnnotationInsertEvent, SpanDeleteEvent
34
36
  from phoenix.trace.attributes import flatten
35
37
  from phoenix.trace.dsl import SpanQuery as SpanQuery_
36
38
  from phoenix.trace.schemas import (
@@ -1119,3 +1121,126 @@ async def create_spans(
1119
1121
  total_received=total_received,
1120
1122
  total_queued=len(spans_to_queue),
1121
1123
  )
1124
+
1125
+
1126
+ @router.delete(
1127
+ "/spans/{span_identifier}",
1128
+ dependencies=[Depends(is_not_locked)],
1129
+ operation_id="deleteSpan",
1130
+ summary="Delete a span by span_identifier",
1131
+ description=(
1132
+ """
1133
+ Delete a single span by identifier.
1134
+
1135
+ **Important**: This operation deletes ONLY the specified span itself and does NOT
1136
+ delete its descendants/children. All child spans will remain in the trace and
1137
+ become orphaned (their parent_id will point to a non-existent span).
1138
+
1139
+ Behavior:
1140
+ - Deletes only the target span (preserves all descendant spans)
1141
+ - If this was the last span in the trace, the trace record is also deleted
1142
+ - If the deleted span had a parent, its cumulative metrics (error count, token counts)
1143
+ are subtracted from all ancestor spans in the chain
1144
+
1145
+ **Note**: This operation is irreversible and may create orphaned spans.
1146
+ """
1147
+ ),
1148
+ responses=add_errors_to_responses([HTTP_404_NOT_FOUND]),
1149
+ status_code=204, # No Content for successful deletion
1150
+ )
1151
+ async def delete_span(
1152
+ request: Request,
1153
+ span_identifier: str = Path(
1154
+ description="The span identifier: either a relay GlobalID or OpenTelemetry span_id"
1155
+ ),
1156
+ ) -> None:
1157
+ """
1158
+ Delete a single span by identifier.
1159
+
1160
+ This operation deletes ONLY the specified span and preserves all its descendants,
1161
+ which may become orphaned (parent_id pointing to non-existent span).
1162
+
1163
+ Steps:
1164
+ 1. Find the target span to delete (supports both GlobalID and OpenTelemetry span_id)
1165
+ 2. Delete only the target span (all descendants remain untouched)
1166
+ 3. If trace becomes empty, delete the trace record
1167
+ 4. If deleted span had a parent, subtract its cumulative metrics from ancestor chain
1168
+ 5. Return 204 No Content on success
1169
+
1170
+ Args:
1171
+ request: FastAPI request object
1172
+ span_identifier: Either relay GlobalID or OpenTelemetry span_id
1173
+
1174
+ Raises:
1175
+ HTTPException(404): If span not found
1176
+
1177
+ Returns:
1178
+ None (204 No Content status)
1179
+ """
1180
+ async with request.app.state.db() as session:
1181
+ # Determine the predicate for deletion based on identifier type
1182
+ try:
1183
+ span_rowid = from_global_id_with_expected_type(
1184
+ GlobalID.from_id(span_identifier),
1185
+ "Span",
1186
+ )
1187
+ predicate = models.Span.id == span_rowid
1188
+ error_detail = f"Span with relay ID '{span_identifier}' not found"
1189
+ except Exception:
1190
+ predicate = models.Span.span_id == span_identifier
1191
+ error_detail = f"Span with span_id '{span_identifier}' not found"
1192
+
1193
+ # Delete the span and return its data in one operation
1194
+ target_span = await session.scalar(
1195
+ sa.delete(models.Span).where(predicate).returning(models.Span)
1196
+ )
1197
+
1198
+ if target_span is None:
1199
+ raise HTTPException(
1200
+ status_code=HTTP_404_NOT_FOUND,
1201
+ detail=error_detail,
1202
+ )
1203
+
1204
+ # Store values needed for later operations
1205
+ trace_rowid = target_span.trace_rowid
1206
+ parent_id = target_span.parent_id
1207
+ cumulative_error_count = target_span.cumulative_error_count
1208
+ cumulative_llm_token_count_prompt = target_span.cumulative_llm_token_count_prompt
1209
+ cumulative_llm_token_count_completion = target_span.cumulative_llm_token_count_completion
1210
+
1211
+ # Step 2: Check if trace is empty—if so, delete the trace record
1212
+ trace_is_empty = await session.scalar(
1213
+ select(~exists().where(models.Span.trace_rowid == trace_rowid))
1214
+ )
1215
+
1216
+ if trace_is_empty:
1217
+ # Trace is empty, delete the trace record
1218
+ await session.execute(sa.delete(models.Trace).where(models.Trace.id == trace_rowid))
1219
+
1220
+ # Step 3: Propagate negative cumulative values up ancestor chain if parent_id is not null
1221
+ if not trace_is_empty and parent_id is not None:
1222
+ # Use the helper function to get all ancestor span IDs
1223
+ ancestor_ids_query = get_ancestor_span_rowids(parent_id)
1224
+
1225
+ # Propagate negative cumulative values to ancestors
1226
+ await session.execute(
1227
+ update(models.Span)
1228
+ .where(models.Span.id.in_(ancestor_ids_query))
1229
+ .values(
1230
+ cumulative_error_count=(
1231
+ models.Span.cumulative_error_count - cumulative_error_count
1232
+ ),
1233
+ cumulative_llm_token_count_prompt=(
1234
+ models.Span.cumulative_llm_token_count_prompt
1235
+ - cumulative_llm_token_count_prompt
1236
+ ),
1237
+ cumulative_llm_token_count_completion=(
1238
+ models.Span.cumulative_llm_token_count_completion
1239
+ - cumulative_llm_token_count_completion
1240
+ ),
1241
+ )
1242
+ )
1243
+ # Trigger cache invalidation event
1244
+ request.state.event_queue.put(SpanDeleteEvent((trace_rowid,)))
1245
+
1246
+ return None
@@ -12,6 +12,7 @@ from strawberry.types import Info
12
12
 
13
13
  from phoenix.db import models
14
14
  from phoenix.server.api.context import Context
15
+ from phoenix.server.api.exceptions import BadRequest
15
16
  from phoenix.server.api.input_types.DatasetVersionSort import DatasetVersionSort
16
17
  from phoenix.server.api.types.DatasetExample import DatasetExample
17
18
  from phoenix.server.api.types.DatasetVersion import DatasetVersion
@@ -217,6 +218,9 @@ class Dataset(Node):
217
218
  after: Optional[CursorString] = UNSET,
218
219
  before: Optional[CursorString] = UNSET,
219
220
  filter_condition: Optional[str] = UNSET,
221
+ filter_ids: Optional[
222
+ list[GlobalID]
223
+ ] = UNSET, # this is a stopgap until a query DSL is implemented
220
224
  ) -> Connection[Experiment]:
221
225
  args = ConnectionArgs(
222
226
  first=first,
@@ -238,6 +242,21 @@ class Dataset(Node):
238
242
  models.Experiment.description.ilike(f"%{filter_condition}%"),
239
243
  )
240
244
  query = query.where(search_filter)
245
+
246
+ if filter_ids:
247
+ filter_rowids = []
248
+ for filter_id in filter_ids:
249
+ try:
250
+ filter_rowids.append(
251
+ from_global_id_with_expected_type(
252
+ global_id=filter_id,
253
+ expected_type_name=Experiment.__name__,
254
+ )
255
+ )
256
+ except ValueError:
257
+ raise BadRequest(f"Invalid filter ID: {filter_id}")
258
+ query = query.where(models.Experiment.id.in_(filter_rowids))
259
+
241
260
  async with info.context.db() as session:
242
261
  experiments = [
243
262
  to_gql_experiment(experiment, sequence_number)
@@ -232,6 +232,7 @@ def estimate_cpu_usage_percent() -> Optional[float]:
232
232
  except Exception:
233
233
  pass
234
234
  return psutil.cpu_percent(interval=None)
235
+ return None
235
236
 
236
237
 
237
238
  @lru_cache(maxsize=1)
@@ -1,22 +1,22 @@
1
1
  {
2
- "_components-B1Ec5V_g.js": {
3
- "file": "assets/components-B1Ec5V_g.js",
2
+ "_components-BNcxEjYs.js": {
3
+ "file": "assets/components-BNcxEjYs.js",
4
4
  "name": "components",
5
5
  "imports": [
6
6
  "_vendor-BbqekBfb.js",
7
- "_pages-B77OHHSB.js",
7
+ "_pages-3RoC-adr.js",
8
8
  "_vendor-arizeai-CEwHhYfL.js",
9
9
  "_vendor-codemirror-CHApHLLJ.js",
10
10
  "_vendor-three-BLWp5bic.js"
11
11
  ]
12
12
  },
13
- "_pages-B77OHHSB.js": {
14
- "file": "assets/pages-B77OHHSB.js",
13
+ "_pages-3RoC-adr.js": {
14
+ "file": "assets/pages-3RoC-adr.js",
15
15
  "name": "pages",
16
16
  "imports": [
17
17
  "_vendor-BbqekBfb.js",
18
18
  "_vendor-arizeai-CEwHhYfL.js",
19
- "_components-B1Ec5V_g.js",
19
+ "_components-BNcxEjYs.js",
20
20
  "_vendor-codemirror-CHApHLLJ.js",
21
21
  "_vendor-recharts-Bqf7C6Cm.js"
22
22
  ]
@@ -75,15 +75,15 @@
75
75
  "name": "vendor-three"
76
76
  },
77
77
  "index.tsx": {
78
- "file": "assets/index-DYx39hbu.js",
78
+ "file": "assets/index-CKIBKnVD.js",
79
79
  "name": "index",
80
80
  "src": "index.tsx",
81
81
  "isEntry": true,
82
82
  "imports": [
83
83
  "_vendor-BbqekBfb.js",
84
84
  "_vendor-arizeai-CEwHhYfL.js",
85
- "_pages-B77OHHSB.js",
86
- "_components-B1Ec5V_g.js",
85
+ "_pages-3RoC-adr.js",
86
+ "_components-BNcxEjYs.js",
87
87
  "_vendor-three-BLWp5bic.js",
88
88
  "_vendor-codemirror-CHApHLLJ.js",
89
89
  "_vendor-shiki-BQ88Q1b1.js",