arize-phoenix 4.20.0__py3-none-any.whl → 4.20.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (46) hide show
  1. {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.2.dist-info}/METADATA +2 -1
  2. {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.2.dist-info}/RECORD +45 -43
  3. phoenix/db/bulk_inserter.py +24 -98
  4. phoenix/db/insertion/document_annotation.py +13 -0
  5. phoenix/db/insertion/span.py +9 -0
  6. phoenix/db/insertion/span_annotation.py +13 -0
  7. phoenix/db/insertion/trace_annotation.py +13 -0
  8. phoenix/db/insertion/types.py +34 -28
  9. phoenix/db/migrations/versions/10460e46d750_datasets.py +28 -2
  10. phoenix/db/migrations/versions/3be8647b87d8_add_token_columns_to_spans_table.py +134 -0
  11. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +28 -2
  12. phoenix/db/models.py +9 -1
  13. phoenix/server/api/context.py +8 -6
  14. phoenix/server/api/dataloaders/__init__.py +0 -47
  15. phoenix/server/api/dataloaders/token_counts.py +2 -7
  16. phoenix/server/api/input_types/SpanSort.py +3 -8
  17. phoenix/server/api/mutations/dataset_mutations.py +9 -3
  18. phoenix/server/api/mutations/experiment_mutations.py +2 -0
  19. phoenix/server/api/mutations/project_mutations.py +5 -5
  20. phoenix/server/api/mutations/span_annotations_mutations.py +10 -2
  21. phoenix/server/api/mutations/trace_annotations_mutations.py +10 -2
  22. phoenix/server/api/queries.py +9 -0
  23. phoenix/server/api/routers/v1/datasets.py +2 -0
  24. phoenix/server/api/routers/v1/experiment_evaluations.py +2 -0
  25. phoenix/server/api/routers/v1/experiment_runs.py +2 -0
  26. phoenix/server/api/routers/v1/experiments.py +2 -0
  27. phoenix/server/api/routers/v1/spans.py +12 -8
  28. phoenix/server/api/routers/v1/traces.py +12 -10
  29. phoenix/server/api/types/Dataset.py +6 -1
  30. phoenix/server/api/types/Experiment.py +6 -1
  31. phoenix/server/api/types/Project.py +4 -1
  32. phoenix/server/api/types/Span.py +5 -17
  33. phoenix/server/app.py +25 -8
  34. phoenix/server/dml_event.py +136 -0
  35. phoenix/server/dml_event_handler.py +272 -0
  36. phoenix/server/static/.vite/manifest.json +14 -14
  37. phoenix/server/static/assets/{components-CAummAJx.js → components-BSw2e1Zr.js} +108 -100
  38. phoenix/server/static/assets/{index-Cg5hdf3g.js → index-BYUFcdtx.js} +1 -1
  39. phoenix/server/static/assets/{pages-BU__X1UX.js → pages-p_fuED5k.js} +251 -237
  40. phoenix/server/static/assets/{vendor-arizeai-CkyzG9Wl.js → vendor-arizeai-CIETbKDq.js} +28 -28
  41. phoenix/server/types.py +106 -1
  42. phoenix/version.py +1 -1
  43. phoenix/db/migrations/types.py +0 -29
  44. {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.2.dist-info}/WHEEL +0 -0
  45. {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.2.dist-info}/licenses/IP_NOTICE +0 -0
  46. {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.2.dist-info}/licenses/LICENSE +0 -0
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
14
14
  from phoenix.server.api.queries import Query
15
15
  from phoenix.server.api.types.node import from_global_id_with_expected_type
16
16
  from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
17
+ from phoenix.server.dml_event import SpanAnnotationDeleteEvent, SpanAnnotationInsertEvent
17
18
 
18
19
 
19
20
  @strawberry.type
@@ -47,7 +48,10 @@ class SpanAnnotationMutationMixin:
47
48
  )
48
49
  result = await session.scalars(stmt)
49
50
  inserted_annotations = result.all()
50
-
51
+ if inserted_annotations:
52
+ info.context.event_queue.put(
53
+ SpanAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
54
+ )
51
55
  return SpanAnnotationMutationPayload(
52
56
  span_annotations=[
53
57
  to_gql_span_annotation(annotation) for annotation in inserted_annotations
@@ -92,7 +96,7 @@ class SpanAnnotationMutationMixin:
92
96
  )
93
97
  if span_annotation is not None:
94
98
  patched_annotations.append(to_gql_span_annotation(span_annotation))
95
-
99
+ info.context.event_queue.put(SpanAnnotationInsertEvent((span_annotation.id,)))
96
100
  return SpanAnnotationMutationPayload(span_annotations=patched_annotations, query=Query())
97
101
 
98
102
  @strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
@@ -115,6 +119,10 @@ class SpanAnnotationMutationMixin:
115
119
  deleted_annotations_gql = [
116
120
  to_gql_span_annotation(annotation) for annotation in deleted_annotations
117
121
  ]
122
+ if deleted_annotations:
123
+ info.context.event_queue.put(
124
+ SpanAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
125
+ )
118
126
  return SpanAnnotationMutationPayload(
119
127
  span_annotations=deleted_annotations_gql, query=Query()
120
128
  )
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
14
14
  from phoenix.server.api.queries import Query
15
15
  from phoenix.server.api.types.node import from_global_id_with_expected_type
16
16
  from phoenix.server.api.types.TraceAnnotation import TraceAnnotation, to_gql_trace_annotation
17
+ from phoenix.server.dml_event import TraceAnnotationDeleteEvent, TraceAnnotationInsertEvent
17
18
 
18
19
 
19
20
  @strawberry.type
@@ -47,7 +48,10 @@ class TraceAnnotationMutationMixin:
47
48
  )
48
49
  result = await session.scalars(stmt)
49
50
  inserted_annotations = result.all()
50
-
51
+ if inserted_annotations:
52
+ info.context.event_queue.put(
53
+ TraceAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
54
+ )
51
55
  return TraceAnnotationMutationPayload(
52
56
  trace_annotations=[
53
57
  to_gql_trace_annotation(annotation) for annotation in inserted_annotations
@@ -91,7 +95,7 @@ class TraceAnnotationMutationMixin:
91
95
  )
92
96
  if trace_annotation:
93
97
  patched_annotations.append(to_gql_trace_annotation(trace_annotation))
94
-
98
+ info.context.event_queue.put(TraceAnnotationInsertEvent((trace_annotation.id,)))
95
99
  return TraceAnnotationMutationPayload(trace_annotations=patched_annotations, query=Query())
96
100
 
97
101
  @strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
@@ -114,6 +118,10 @@ class TraceAnnotationMutationMixin:
114
118
  deleted_annotations_gql = [
115
119
  to_gql_trace_annotation(annotation) for annotation in deleted_annotations
116
120
  ]
121
+ if deleted_annotations:
122
+ info.context.event_queue.put(
123
+ TraceAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
124
+ )
117
125
  return TraceAnnotationMutationPayload(
118
126
  trace_annotations=deleted_annotations_gql, query=Query()
119
127
  )
@@ -1,4 +1,5 @@
1
1
  from collections import defaultdict
2
+ from datetime import datetime
2
3
  from typing import DefaultDict, Dict, List, Optional, Set, Union
3
4
 
4
5
  import numpy as np
@@ -107,6 +108,10 @@ class Query:
107
108
  ]
108
109
  return connection_from_list(data=data, args=args)
109
110
 
111
+ @strawberry.field
112
+ def projects_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
113
+ return info.context.last_updated_at.get(models.Project)
114
+
110
115
  @strawberry.field
111
116
  async def datasets(
112
117
  self,
@@ -133,6 +138,10 @@ class Query:
133
138
  data=[to_gql_dataset(dataset) for dataset in datasets], args=args
134
139
  )
135
140
 
141
+ @strawberry.field
142
+ def datasets_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
143
+ return info.context.last_updated_at.get(models.Dataset)
144
+
136
145
  @strawberry.field
137
146
  async def compare_experiments(
138
147
  self,
@@ -60,6 +60,7 @@ from phoenix.server.api.types.DatasetExample import DatasetExample as DatasetExa
60
60
  from phoenix.server.api.types.DatasetVersion import DatasetVersion as DatasetVersionNodeType
61
61
  from phoenix.server.api.types.node import from_global_id_with_expected_type
62
62
  from phoenix.server.api.utils import delete_projects, delete_traces
63
+ from phoenix.server.dml_event import DatasetInsertEvent
63
64
 
64
65
  from .pydantic_compat import V1RoutesBaseModel
65
66
  from .utils import (
@@ -481,6 +482,7 @@ async def upload_dataset(
481
482
  if sync:
482
483
  async with request.app.state.db() as session:
483
484
  dataset_id = (await operation(session)).dataset_id
485
+ request.state.event_queue.put(DatasetInsertEvent((dataset_id,)))
484
486
  return UploadDatasetResponseBody(
485
487
  data=UploadDatasetData(dataset_id=str(GlobalID(Dataset.__name__, str(dataset_id))))
486
488
  )
@@ -11,6 +11,7 @@ from phoenix.db import models
11
11
  from phoenix.db.helpers import SupportedSQLDialect
12
12
  from phoenix.db.insertion.helpers import insert_on_conflict
13
13
  from phoenix.server.api.types.node import from_global_id_with_expected_type
14
+ from phoenix.server.dml_event import ExperimentRunAnnotationInsertEvent
14
15
 
15
16
  from .pydantic_compat import V1RoutesBaseModel
16
17
  from .utils import ResponseBody, add_errors_to_responses
@@ -108,6 +109,7 @@ async def upsert_experiment_evaluation(
108
109
  ).returning(models.ExperimentRunAnnotation)
109
110
  )
110
111
  evaluation_gid = GlobalID("ExperimentEvaluation", str(exp_eval_run.id))
112
+ request.state.event_queue.put(ExperimentRunAnnotationInsertEvent((exp_eval_run.id,)))
111
113
  return UpsertExperimentEvaluationResponseBody(
112
114
  data=UpsertExperimentEvaluationResponseBodyData(id=str(evaluation_gid))
113
115
  )
@@ -11,6 +11,7 @@ from strawberry.relay import GlobalID
11
11
  from phoenix.db import models
12
12
  from phoenix.db.models import ExperimentRunOutput
13
13
  from phoenix.server.api.types.node import from_global_id_with_expected_type
14
+ from phoenix.server.dml_event import ExperimentRunInsertEvent
14
15
 
15
16
  from .pydantic_compat import V1RoutesBaseModel
16
17
  from .utils import ResponseBody, add_errors_to_responses
@@ -102,6 +103,7 @@ async def create_experiment_run(
102
103
  )
103
104
  session.add(exp_run)
104
105
  await session.flush()
106
+ request.state.event_queue.put(ExperimentRunInsertEvent((exp_run.id,)))
105
107
  run_gid = GlobalID("ExperimentRun", str(exp_run.id))
106
108
  return CreateExperimentResponseBody(data=CreateExperimentRunResponseBodyData(id=str(run_gid)))
107
109
 
@@ -13,6 +13,7 @@ from phoenix.db import models
13
13
  from phoenix.db.helpers import SupportedSQLDialect
14
14
  from phoenix.db.insertion.helpers import insert_on_conflict
15
15
  from phoenix.server.api.types.node import from_global_id_with_expected_type
16
+ from phoenix.server.dml_event import ExperimentInsertEvent
16
17
 
17
18
  from .pydantic_compat import V1RoutesBaseModel
18
19
  from .utils import ResponseBody, add_errors_to_responses
@@ -188,6 +189,7 @@ async def create_experiment(
188
189
  dataset_version_globalid = GlobalID(
189
190
  "DatasetVersion", str(experiment.dataset_version_id)
190
191
  )
192
+ request.state.event_queue.put(ExperimentInsertEvent((experiment.id,)))
191
193
  return CreateExperimentResponseBody(
192
194
  data=Experiment(
193
195
  id=str(experiment_globalid),
@@ -16,6 +16,7 @@ from phoenix.db.helpers import SupportedSQLDialect
16
16
  from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
17
17
  from phoenix.db.insertion.types import Precursors
18
18
  from phoenix.server.api.routers.utils import df_to_bytes
19
+ from phoenix.server.dml_event import SpanAnnotationInsertEvent
19
20
  from phoenix.trace.dsl import SpanQuery as SpanQuery_
20
21
 
21
22
  from .pydantic_compat import V1RoutesBaseModel
@@ -197,6 +198,8 @@ async def annotate_spans(
197
198
  request_body: AnnotateSpansRequestBody,
198
199
  sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
199
200
  ) -> AnnotateSpansResponseBody:
201
+ if not request_body.data:
202
+ return AnnotateSpansResponseBody(data=[])
200
203
  precursors = [d.as_precursor() for d in request_body.data]
201
204
  if not sync:
202
205
  await request.state.enqueue(*precursors)
@@ -217,9 +220,7 @@ async def annotate_spans(
217
220
  detail=f"Spans with IDs {', '.join(missing_span_ids)} do not exist.",
218
221
  status_code=HTTP_404_NOT_FOUND,
219
222
  )
220
-
221
- inserted_annotations = []
222
-
223
+ inserted_ids = []
223
224
  dialect = SupportedSQLDialect(session.bind.dialect.name)
224
225
  for p in precursors:
225
226
  values = dict(as_kv(p.as_insertable(existing_spans[p.span_id]).row))
@@ -231,8 +232,11 @@ async def annotate_spans(
231
232
  unique_by=("name", "span_rowid"),
232
233
  ).returning(models.SpanAnnotation.id)
233
234
  )
234
- inserted_annotations.append(
235
- InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(span_annotation_id))))
236
- )
237
-
238
- return AnnotateSpansResponseBody(data=inserted_annotations)
235
+ inserted_ids.append(span_annotation_id)
236
+ request.state.event_queue.put(SpanAnnotationInsertEvent(tuple(inserted_ids)))
237
+ return AnnotateSpansResponseBody(
238
+ data=[
239
+ InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(id_))))
240
+ for id_ in inserted_ids
241
+ ]
242
+ )
@@ -24,6 +24,7 @@ from phoenix.db import models
24
24
  from phoenix.db.helpers import SupportedSQLDialect
25
25
  from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
26
26
  from phoenix.db.insertion.types import Precursors
27
+ from phoenix.server.dml_event import TraceAnnotationInsertEvent
27
28
  from phoenix.trace.otel import decode_otlp_span
28
29
  from phoenix.utilities.project import get_project_name
29
30
 
@@ -153,6 +154,8 @@ async def annotate_traces(
153
154
  request_body: AnnotateTracesRequestBody,
154
155
  sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
155
156
  ) -> AnnotateTracesResponseBody:
157
+ if not request_body.data:
158
+ return AnnotateTracesResponseBody(data=[])
156
159
  precursors = [d.as_precursor() for d in request_body.data]
157
160
  if not sync:
158
161
  await request.state.enqueue(*precursors)
@@ -173,9 +176,7 @@ async def annotate_traces(
173
176
  detail=f"Traces with IDs {', '.join(missing_trace_ids)} do not exist.",
174
177
  status_code=HTTP_404_NOT_FOUND,
175
178
  )
176
-
177
- inserted_annotations = []
178
-
179
+ inserted_ids = []
179
180
  dialect = SupportedSQLDialect(session.bind.dialect.name)
180
181
  for p in precursors:
181
182
  values = dict(as_kv(p.as_insertable(existing_traces[p.trace_id]).row))
@@ -187,13 +188,14 @@ async def annotate_traces(
187
188
  unique_by=("name", "trace_rowid"),
188
189
  ).returning(models.TraceAnnotation.id)
189
190
  )
190
- inserted_annotations.append(
191
- InsertedTraceAnnotation(
192
- id=str(GlobalID("TraceAnnotation", str(trace_annotation_id)))
193
- )
194
- )
195
-
196
- return AnnotateTracesResponseBody(data=inserted_annotations)
191
+ inserted_ids.append(trace_annotation_id)
192
+ request.state.event_queue.put(TraceAnnotationInsertEvent(tuple(inserted_ids)))
193
+ return AnnotateTracesResponseBody(
194
+ data=[
195
+ InsertedTraceAnnotation(id=str(GlobalID("TraceAnnotation", str(id_))))
196
+ for id_ in inserted_ids
197
+ ]
198
+ )
197
199
 
198
200
 
199
201
  async def _add_spans(req: ExportTraceServiceRequest, state: State) -> None:
@@ -1,5 +1,5 @@
1
1
  from datetime import datetime
2
- from typing import AsyncIterable, List, Optional, Tuple, cast
2
+ from typing import AsyncIterable, ClassVar, List, Optional, Tuple, Type, cast
3
3
 
4
4
  import strawberry
5
5
  from sqlalchemy import and_, func, select
@@ -27,6 +27,7 @@ from phoenix.server.api.types.SortDir import SortDir
27
27
 
28
28
  @strawberry.type
29
29
  class Dataset(Node):
30
+ _table: ClassVar[Type[models.Base]] = models.Experiment
30
31
  id_attr: NodeID[int]
31
32
  name: str
32
33
  description: Optional[str]
@@ -284,6 +285,10 @@ class Dataset(Node):
284
285
  ) in await session.stream(query)
285
286
  ]
286
287
 
288
+ @strawberry.field
289
+ def last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
290
+ return info.context.last_updated_at.get(self._table, self.id_attr)
291
+
287
292
 
288
293
  def to_gql_dataset(dataset: models.Dataset) -> Dataset:
289
294
  """
@@ -1,5 +1,5 @@
1
1
  from datetime import datetime
2
- from typing import List, Optional
2
+ from typing import ClassVar, List, Optional, Type
3
3
 
4
4
  import strawberry
5
5
  from sqlalchemy import select
@@ -23,6 +23,7 @@ from phoenix.server.api.types.Project import Project
23
23
 
24
24
  @strawberry.type
25
25
  class Experiment(Node):
26
+ _table: ClassVar[Type[models.Base]] = models.Experiment
26
27
  cached_sequence_number: Private[Optional[int]] = None
27
28
  id_attr: NodeID[int]
28
29
  name: str
@@ -127,6 +128,10 @@ class Experiment(Node):
127
128
  gradient_end_color=db_project.gradient_end_color,
128
129
  )
129
130
 
131
+ @strawberry.field
132
+ def last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
133
+ return info.context.last_updated_at.get(self._table, self.id_attr)
134
+
130
135
 
131
136
  def to_gql_experiment(
132
137
  experiment: models.Experiment,
@@ -2,8 +2,10 @@ import operator
2
2
  from datetime import datetime
3
3
  from typing import (
4
4
  Any,
5
+ ClassVar,
5
6
  List,
6
7
  Optional,
8
+ Type,
7
9
  )
8
10
 
9
11
  import strawberry
@@ -38,6 +40,7 @@ from phoenix.trace.dsl import SpanFilter
38
40
 
39
41
  @strawberry.type
40
42
  class Project(Node):
43
+ _table: ClassVar[Type[models.Base]] = models.Project
41
44
  id_attr: NodeID[int]
42
45
  name: str
43
46
  gradient_start_color: str
@@ -397,7 +400,7 @@ class Project(Node):
397
400
  self,
398
401
  info: Info[Context, None],
399
402
  ) -> Optional[datetime]:
400
- return info.context.streaming_last_updated_at(self.id_attr)
403
+ return info.context.last_updated_at.get(self._table, self.id_attr)
401
404
 
402
405
  @strawberry.field
403
406
  async def validate_span_filter_condition(self, condition: str) -> ValidationResult:
@@ -40,9 +40,6 @@ EMBEDDING_EMBEDDINGS = SpanAttributes.EMBEDDING_EMBEDDINGS
40
40
  EMBEDDING_VECTOR = EmbeddingAttributes.EMBEDDING_VECTOR
41
41
  INPUT_MIME_TYPE = SpanAttributes.INPUT_MIME_TYPE
42
42
  INPUT_VALUE = SpanAttributes.INPUT_VALUE
43
- LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
44
- LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
45
- LLM_TOKEN_COUNT_TOTAL = SpanAttributes.LLM_TOKEN_COUNT_TOTAL
46
43
  LLM_PROMPT_TEMPLATE_VARIABLES = SpanAttributes.LLM_PROMPT_TEMPLATE_VARIABLES
47
44
  LLM_INPUT_MESSAGES = SpanAttributes.LLM_INPUT_MESSAGES
48
45
  LLM_OUTPUT_MESSAGES = SpanAttributes.LLM_OUTPUT_MESSAGES
@@ -194,8 +191,8 @@ class Span(Node):
194
191
  ) -> List[SpanAnnotation]:
195
192
  span_id = self.id_attr
196
193
  annotations = await info.context.data_loaders.span_annotations.load(span_id)
197
- sort_key = SpanAnnotationColumn.createdAt.value
198
- sort_descending = True
194
+ sort_key = SpanAnnotationColumn.name.value
195
+ sort_descending = False
199
196
  if sort:
200
197
  sort_key = sort.col.value
201
198
  sort_descending = sort.dir is SortDir.desc
@@ -308,18 +305,9 @@ def to_gql_span(span: models.Span) -> Span:
308
305
  attributes=json.dumps(_hide_embedding_vectors(span.attributes), cls=_JSONEncoder),
309
306
  metadata=_convert_metadata_to_string(get_attribute_value(span.attributes, METADATA)),
310
307
  num_documents=num_documents,
311
- token_count_total=cast(
312
- Optional[int],
313
- get_attribute_value(span.attributes, LLM_TOKEN_COUNT_TOTAL),
314
- ),
315
- token_count_prompt=cast(
316
- Optional[int],
317
- get_attribute_value(span.attributes, LLM_TOKEN_COUNT_PROMPT),
318
- ),
319
- token_count_completion=cast(
320
- Optional[int],
321
- get_attribute_value(span.attributes, LLM_TOKEN_COUNT_COMPLETION),
322
- ),
308
+ token_count_total=span.llm_token_count_total,
309
+ token_count_prompt=span.llm_token_count_prompt,
310
+ token_count_completion=span.llm_token_count_completion,
323
311
  cumulative_token_count_total=span.cumulative_llm_token_count_prompt
324
312
  + span.cumulative_llm_token_count_completion,
325
313
  cumulative_token_count_prompt=span.cumulative_llm_token_count_prompt,
phoenix/server/app.py CHANGED
@@ -2,7 +2,6 @@ import asyncio
2
2
  import contextlib
3
3
  import json
4
4
  import logging
5
- from datetime import datetime
6
5
  from functools import cached_property
7
6
  from pathlib import Path
8
7
  from typing import (
@@ -87,9 +86,16 @@ from phoenix.server.api.dataloaders import (
87
86
  from phoenix.server.api.routers.v1 import REST_API_VERSION
88
87
  from phoenix.server.api.routers.v1 import router as v1_router
89
88
  from phoenix.server.api.schema import schema
89
+ from phoenix.server.dml_event import DmlEvent
90
+ from phoenix.server.dml_event_handler import DmlEventHandler
90
91
  from phoenix.server.grpc_server import GrpcServer
91
92
  from phoenix.server.telemetry import initialize_opentelemetry_tracer_provider
92
- from phoenix.server.types import DbSessionFactory
93
+ from phoenix.server.types import (
94
+ CanGetLastUpdatedAt,
95
+ CanPutItem,
96
+ DbSessionFactory,
97
+ LastUpdatedAt,
98
+ )
93
99
  from phoenix.trace.schemas import Span
94
100
  from phoenix.utilities.client import PHOENIX_SERVER_VERSION_HEADER
95
101
 
@@ -220,6 +226,7 @@ def _lifespan(
220
226
  *,
221
227
  dialect: SupportedSQLDialect,
222
228
  bulk_inserter: BulkInserter,
229
+ dml_event_handler: DmlEventHandler,
223
230
  tracer_provider: Optional["TracerProvider"] = None,
224
231
  enable_prometheus: bool = False,
225
232
  clean_ups: Iterable[Callable[[], None]] = (),
@@ -239,8 +246,9 @@ def _lifespan(
239
246
  disabled=read_only,
240
247
  tracer_provider=tracer_provider,
241
248
  enable_prometheus=enable_prometheus,
242
- ):
249
+ ), dml_event_handler:
243
250
  yield {
251
+ "event_queue": dml_event_handler,
244
252
  "enqueue": enqueue,
245
253
  "queue_span_for_bulk_insert": queue_span,
246
254
  "queue_evaluation_for_bulk_insert": queue_evaluation,
@@ -263,9 +271,10 @@ def create_graphql_router(
263
271
  db: DbSessionFactory,
264
272
  model: Model,
265
273
  export_path: Path,
274
+ last_updated_at: CanGetLastUpdatedAt,
266
275
  corpus: Optional[Model] = None,
267
- streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None,
268
276
  cache_for_dataloaders: Optional[CacheForDataLoaders] = None,
277
+ event_queue: CanPutItem[DmlEvent],
269
278
  read_only: bool = False,
270
279
  ) -> GraphQLRouter: # type: ignore[type-arg]
271
280
  def get_context() -> Context:
@@ -274,7 +283,8 @@ def create_graphql_router(
274
283
  model=model,
275
284
  corpus=corpus,
276
285
  export_path=export_path,
277
- streaming_last_updated_at=streaming_last_updated_at,
286
+ last_updated_at=last_updated_at,
287
+ event_queue=event_queue,
278
288
  data_loaders=DataLoaders(
279
289
  average_experiment_run_latency=AverageExperimentRunLatencyDataLoader(db),
280
290
  dataset_example_revisions=DatasetExampleRevisionsDataLoader(db),
@@ -420,11 +430,16 @@ def create_app(
420
430
  cache_for_dataloaders = (
421
431
  CacheForDataLoaders() if db.dialect is SupportedSQLDialect.SQLITE else None
422
432
  )
423
-
433
+ last_updated_at = LastUpdatedAt()
434
+ dml_event_handler = DmlEventHandler(
435
+ db=db,
436
+ cache_for_dataloaders=cache_for_dataloaders,
437
+ last_updated_at=last_updated_at,
438
+ )
424
439
  bulk_inserter = BulkInserter(
425
440
  db,
426
441
  enable_prometheus=enable_prometheus,
427
- cache_for_dataloaders=cache_for_dataloaders,
442
+ event_queue=dml_event_handler,
428
443
  initial_batch_of_spans=initial_batch_of_spans,
429
444
  initial_batch_of_evaluations=initial_batch_of_evaluations,
430
445
  )
@@ -460,7 +475,8 @@ def create_app(
460
475
  model=model,
461
476
  corpus=corpus,
462
477
  export_path=export_path,
463
- streaming_last_updated_at=bulk_inserter.last_updated_at,
478
+ last_updated_at=last_updated_at,
479
+ event_queue=dml_event_handler,
464
480
  cache_for_dataloaders=cache_for_dataloaders,
465
481
  read_only=read_only,
466
482
  )
@@ -477,6 +493,7 @@ def create_app(
477
493
  dialect=db.dialect,
478
494
  read_only=read_only,
479
495
  bulk_inserter=bulk_inserter,
496
+ dml_event_handler=dml_event_handler,
480
497
  tracer_provider=tracer_provider,
481
498
  enable_prometheus=enable_prometheus,
482
499
  clean_ups=clean_ups,
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import ABC
4
+ from dataclasses import dataclass, field
5
+ from typing import ClassVar, Tuple, Type
6
+
7
+ from phoenix.db import models
8
+
9
+
10
+ @dataclass(frozen=True)
11
+ class DmlEvent(ABC):
12
+ """
13
+ Event corresponding to a Data Manipulation Language (DML)
14
+ operation, e.g. insertion, update, or deletion.
15
+ """
16
+
17
+ table: ClassVar[Type[models.Base]]
18
+ ids: Tuple[int, ...] = field(default_factory=tuple)
19
+
20
+ def __bool__(self) -> bool:
21
+ return bool(self.ids)
22
+
23
+ def __hash__(self) -> int:
24
+ return id(self)
25
+
26
+
27
+ @dataclass(frozen=True)
28
+ class ProjectDmlEvent(DmlEvent):
29
+ table = models.Project
30
+
31
+
32
+ @dataclass(frozen=True)
33
+ class ProjectDeleteEvent(ProjectDmlEvent): ...
34
+
35
+
36
+ @dataclass(frozen=True)
37
+ class SpanDmlEvent(ProjectDmlEvent): ...
38
+
39
+
40
+ @dataclass(frozen=True)
41
+ class SpanInsertEvent(SpanDmlEvent): ...
42
+
43
+
44
+ @dataclass(frozen=True)
45
+ class SpanDeleteEvent(SpanDmlEvent): ...
46
+
47
+
48
+ @dataclass(frozen=True)
49
+ class DatasetDmlEvent(DmlEvent):
50
+ table = models.Dataset
51
+
52
+
53
+ @dataclass(frozen=True)
54
+ class DatasetInsertEvent(DatasetDmlEvent): ...
55
+
56
+
57
+ @dataclass(frozen=True)
58
+ class DatasetDeleteEvent(DatasetDmlEvent): ...
59
+
60
+
61
+ @dataclass(frozen=True)
62
+ class ExperimentDmlEvent(DmlEvent):
63
+ table = models.Experiment
64
+
65
+
66
+ @dataclass(frozen=True)
67
+ class ExperimentInsertEvent(ExperimentDmlEvent): ...
68
+
69
+
70
+ @dataclass(frozen=True)
71
+ class ExperimentDeleteEvent(ExperimentDmlEvent): ...
72
+
73
+
74
+ @dataclass(frozen=True)
75
+ class ExperimentRunDmlEvent(DmlEvent):
76
+ table = models.ExperimentRun
77
+
78
+
79
+ @dataclass(frozen=True)
80
+ class ExperimentRunInsertEvent(ExperimentRunDmlEvent): ...
81
+
82
+
83
+ @dataclass(frozen=True)
84
+ class ExperimentRunDeleteEvent(ExperimentRunDmlEvent): ...
85
+
86
+
87
+ @dataclass(frozen=True)
88
+ class ExperimentRunAnnotationDmlEvent(DmlEvent):
89
+ table = models.ExperimentRunAnnotation
90
+
91
+
92
+ @dataclass(frozen=True)
93
+ class ExperimentRunAnnotationInsertEvent(ExperimentRunAnnotationDmlEvent): ...
94
+
95
+
96
+ @dataclass(frozen=True)
97
+ class ExperimentRunAnnotationDeleteEvent(ExperimentRunAnnotationDmlEvent): ...
98
+
99
+
100
+ @dataclass(frozen=True)
101
+ class SpanAnnotationDmlEvent(DmlEvent):
102
+ table = models.SpanAnnotation
103
+
104
+
105
+ @dataclass(frozen=True)
106
+ class SpanAnnotationInsertEvent(SpanAnnotationDmlEvent): ...
107
+
108
+
109
+ @dataclass(frozen=True)
110
+ class SpanAnnotationDeleteEvent(SpanAnnotationDmlEvent): ...
111
+
112
+
113
+ @dataclass(frozen=True)
114
+ class TraceAnnotationDmlEvent(DmlEvent):
115
+ table = models.TraceAnnotation
116
+
117
+
118
+ @dataclass(frozen=True)
119
+ class TraceAnnotationInsertEvent(TraceAnnotationDmlEvent): ...
120
+
121
+
122
+ @dataclass(frozen=True)
123
+ class TraceAnnotationDeleteEvent(TraceAnnotationDmlEvent): ...
124
+
125
+
126
+ @dataclass(frozen=True)
127
+ class DocumentAnnotationDmlEvent(DmlEvent):
128
+ table = models.DocumentAnnotation
129
+
130
+
131
+ @dataclass(frozen=True)
132
+ class DocumentAnnotationInsertEvent(DocumentAnnotationDmlEvent): ...
133
+
134
+
135
+ @dataclass(frozen=True)
136
+ class DocumentAnnotationDeleteEvent(DocumentAnnotationDmlEvent): ...