arize-phoenix 4.19.0__py3-none-any.whl → 4.20.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/METADATA +2 -1
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/RECORD +35 -33
- phoenix/db/bulk_inserter.py +24 -98
- phoenix/db/insertion/document_annotation.py +13 -0
- phoenix/db/insertion/span_annotation.py +13 -0
- phoenix/db/insertion/trace_annotation.py +13 -0
- phoenix/db/insertion/types.py +34 -28
- phoenix/server/api/context.py +9 -7
- phoenix/server/api/dataloaders/__init__.py +0 -47
- phoenix/server/api/dataloaders/span_annotations.py +6 -9
- phoenix/server/api/mutations/dataset_mutations.py +44 -4
- phoenix/server/api/mutations/experiment_mutations.py +2 -0
- phoenix/server/api/mutations/project_mutations.py +5 -5
- phoenix/server/api/mutations/span_annotations_mutations.py +10 -2
- phoenix/server/api/mutations/trace_annotations_mutations.py +10 -2
- phoenix/server/api/queries.py +9 -0
- phoenix/server/api/routers/v1/datasets.py +2 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +2 -0
- phoenix/server/api/routers/v1/experiment_runs.py +2 -0
- phoenix/server/api/routers/v1/experiments.py +2 -0
- phoenix/server/api/routers/v1/spans.py +15 -9
- phoenix/server/api/routers/v1/traces.py +15 -11
- phoenix/server/api/types/Dataset.py +6 -1
- phoenix/server/api/types/Experiment.py +6 -1
- phoenix/server/api/types/Project.py +4 -1
- phoenix/server/api/types/Span.py +14 -13
- phoenix/server/app.py +25 -8
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +272 -0
- phoenix/server/types.py +106 -1
- phoenix/session/client.py +2 -2
- phoenix/version.py +1 -1
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -16,6 +16,7 @@ from phoenix.db.helpers import SupportedSQLDialect
|
|
|
16
16
|
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
17
17
|
from phoenix.db.insertion.types import Precursors
|
|
18
18
|
from phoenix.server.api.routers.utils import df_to_bytes
|
|
19
|
+
from phoenix.server.dml_event import SpanAnnotationInsertEvent
|
|
19
20
|
from phoenix.trace.dsl import SpanQuery as SpanQuery_
|
|
20
21
|
|
|
21
22
|
from .pydantic_compat import V1RoutesBaseModel
|
|
@@ -23,7 +24,7 @@ from .utils import RequestBody, ResponseBody, add_errors_to_responses
|
|
|
23
24
|
|
|
24
25
|
DEFAULT_SPAN_LIMIT = 1000
|
|
25
26
|
|
|
26
|
-
router = APIRouter(tags=["
|
|
27
|
+
router = APIRouter(tags=["spans"])
|
|
27
28
|
|
|
28
29
|
|
|
29
30
|
class SpanQuery(V1RoutesBaseModel):
|
|
@@ -65,6 +66,7 @@ class QuerySpansRequestBody(V1RoutesBaseModel):
|
|
|
65
66
|
operation_id="querySpans",
|
|
66
67
|
summary="Query spans with query DSL",
|
|
67
68
|
responses=add_errors_to_responses([HTTP_404_NOT_FOUND, HTTP_422_UNPROCESSABLE_ENTITY]),
|
|
69
|
+
include_in_schema=False,
|
|
68
70
|
)
|
|
69
71
|
async def query_spans_handler(
|
|
70
72
|
request: Request,
|
|
@@ -189,12 +191,15 @@ class AnnotateSpansResponseBody(ResponseBody[List[InsertedSpanAnnotation]]):
|
|
|
189
191
|
[{"status_code": HTTP_404_NOT_FOUND, "description": "Span not found"}]
|
|
190
192
|
),
|
|
191
193
|
response_description="Span annotations inserted successfully",
|
|
194
|
+
include_in_schema=True,
|
|
192
195
|
)
|
|
193
196
|
async def annotate_spans(
|
|
194
197
|
request: Request,
|
|
195
198
|
request_body: AnnotateSpansRequestBody,
|
|
196
199
|
sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
|
|
197
200
|
) -> AnnotateSpansResponseBody:
|
|
201
|
+
if not request_body.data:
|
|
202
|
+
return AnnotateSpansResponseBody(data=[])
|
|
198
203
|
precursors = [d.as_precursor() for d in request_body.data]
|
|
199
204
|
if not sync:
|
|
200
205
|
await request.state.enqueue(*precursors)
|
|
@@ -215,9 +220,7 @@ async def annotate_spans(
|
|
|
215
220
|
detail=f"Spans with IDs {', '.join(missing_span_ids)} do not exist.",
|
|
216
221
|
status_code=HTTP_404_NOT_FOUND,
|
|
217
222
|
)
|
|
218
|
-
|
|
219
|
-
inserted_annotations = []
|
|
220
|
-
|
|
223
|
+
inserted_ids = []
|
|
221
224
|
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
222
225
|
for p in precursors:
|
|
223
226
|
values = dict(as_kv(p.as_insertable(existing_spans[p.span_id]).row))
|
|
@@ -229,8 +232,11 @@ async def annotate_spans(
|
|
|
229
232
|
unique_by=("name", "span_rowid"),
|
|
230
233
|
).returning(models.SpanAnnotation.id)
|
|
231
234
|
)
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
235
|
+
inserted_ids.append(span_annotation_id)
|
|
236
|
+
request.state.event_queue.put(SpanAnnotationInsertEvent(tuple(inserted_ids)))
|
|
237
|
+
return AnnotateSpansResponseBody(
|
|
238
|
+
data=[
|
|
239
|
+
InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(id_))))
|
|
240
|
+
for id_ in inserted_ids
|
|
241
|
+
]
|
|
242
|
+
)
|
|
@@ -24,13 +24,14 @@ from phoenix.db import models
|
|
|
24
24
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
25
25
|
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
26
26
|
from phoenix.db.insertion.types import Precursors
|
|
27
|
+
from phoenix.server.dml_event import TraceAnnotationInsertEvent
|
|
27
28
|
from phoenix.trace.otel import decode_otlp_span
|
|
28
29
|
from phoenix.utilities.project import get_project_name
|
|
29
30
|
|
|
30
31
|
from .pydantic_compat import V1RoutesBaseModel
|
|
31
32
|
from .utils import RequestBody, ResponseBody, add_errors_to_responses
|
|
32
33
|
|
|
33
|
-
router = APIRouter(tags=["traces"]
|
|
34
|
+
router = APIRouter(tags=["traces"])
|
|
34
35
|
|
|
35
36
|
|
|
36
37
|
@router.post(
|
|
@@ -57,6 +58,7 @@ router = APIRouter(tags=["traces"], include_in_schema=False)
|
|
|
57
58
|
},
|
|
58
59
|
}
|
|
59
60
|
},
|
|
61
|
+
include_in_schema=False,
|
|
60
62
|
)
|
|
61
63
|
async def post_traces(
|
|
62
64
|
request: Request,
|
|
@@ -145,12 +147,15 @@ class AnnotateTracesResponseBody(ResponseBody[List[InsertedTraceAnnotation]]):
|
|
|
145
147
|
responses=add_errors_to_responses(
|
|
146
148
|
[{"status_code": HTTP_404_NOT_FOUND, "description": "Trace not found"}]
|
|
147
149
|
),
|
|
150
|
+
include_in_schema=False,
|
|
148
151
|
)
|
|
149
152
|
async def annotate_traces(
|
|
150
153
|
request: Request,
|
|
151
154
|
request_body: AnnotateTracesRequestBody,
|
|
152
155
|
sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
|
|
153
156
|
) -> AnnotateTracesResponseBody:
|
|
157
|
+
if not request_body.data:
|
|
158
|
+
return AnnotateTracesResponseBody(data=[])
|
|
154
159
|
precursors = [d.as_precursor() for d in request_body.data]
|
|
155
160
|
if not sync:
|
|
156
161
|
await request.state.enqueue(*precursors)
|
|
@@ -171,9 +176,7 @@ async def annotate_traces(
|
|
|
171
176
|
detail=f"Traces with IDs {', '.join(missing_trace_ids)} do not exist.",
|
|
172
177
|
status_code=HTTP_404_NOT_FOUND,
|
|
173
178
|
)
|
|
174
|
-
|
|
175
|
-
inserted_annotations = []
|
|
176
|
-
|
|
179
|
+
inserted_ids = []
|
|
177
180
|
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
178
181
|
for p in precursors:
|
|
179
182
|
values = dict(as_kv(p.as_insertable(existing_traces[p.trace_id]).row))
|
|
@@ -185,13 +188,14 @@ async def annotate_traces(
|
|
|
185
188
|
unique_by=("name", "trace_rowid"),
|
|
186
189
|
).returning(models.TraceAnnotation.id)
|
|
187
190
|
)
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
)
|
|
193
|
-
|
|
194
|
-
|
|
191
|
+
inserted_ids.append(trace_annotation_id)
|
|
192
|
+
request.state.event_queue.put(TraceAnnotationInsertEvent(tuple(inserted_ids)))
|
|
193
|
+
return AnnotateTracesResponseBody(
|
|
194
|
+
data=[
|
|
195
|
+
InsertedTraceAnnotation(id=str(GlobalID("TraceAnnotation", str(id_))))
|
|
196
|
+
for id_ in inserted_ids
|
|
197
|
+
]
|
|
198
|
+
)
|
|
195
199
|
|
|
196
200
|
|
|
197
201
|
async def _add_spans(req: ExportTraceServiceRequest, state: State) -> None:
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import AsyncIterable, List, Optional, Tuple, cast
|
|
2
|
+
from typing import AsyncIterable, ClassVar, List, Optional, Tuple, Type, cast
|
|
3
3
|
|
|
4
4
|
import strawberry
|
|
5
5
|
from sqlalchemy import and_, func, select
|
|
@@ -27,6 +27,7 @@ from phoenix.server.api.types.SortDir import SortDir
|
|
|
27
27
|
|
|
28
28
|
@strawberry.type
|
|
29
29
|
class Dataset(Node):
|
|
30
|
+
_table: ClassVar[Type[models.Base]] = models.Experiment
|
|
30
31
|
id_attr: NodeID[int]
|
|
31
32
|
name: str
|
|
32
33
|
description: Optional[str]
|
|
@@ -284,6 +285,10 @@ class Dataset(Node):
|
|
|
284
285
|
) in await session.stream(query)
|
|
285
286
|
]
|
|
286
287
|
|
|
288
|
+
@strawberry.field
|
|
289
|
+
def last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
290
|
+
return info.context.last_updated_at.get(self._table, self.id_attr)
|
|
291
|
+
|
|
287
292
|
|
|
288
293
|
def to_gql_dataset(dataset: models.Dataset) -> Dataset:
|
|
289
294
|
"""
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import List, Optional
|
|
2
|
+
from typing import ClassVar, List, Optional, Type
|
|
3
3
|
|
|
4
4
|
import strawberry
|
|
5
5
|
from sqlalchemy import select
|
|
@@ -23,6 +23,7 @@ from phoenix.server.api.types.Project import Project
|
|
|
23
23
|
|
|
24
24
|
@strawberry.type
|
|
25
25
|
class Experiment(Node):
|
|
26
|
+
_table: ClassVar[Type[models.Base]] = models.Experiment
|
|
26
27
|
cached_sequence_number: Private[Optional[int]] = None
|
|
27
28
|
id_attr: NodeID[int]
|
|
28
29
|
name: str
|
|
@@ -127,6 +128,10 @@ class Experiment(Node):
|
|
|
127
128
|
gradient_end_color=db_project.gradient_end_color,
|
|
128
129
|
)
|
|
129
130
|
|
|
131
|
+
@strawberry.field
|
|
132
|
+
def last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
133
|
+
return info.context.last_updated_at.get(self._table, self.id_attr)
|
|
134
|
+
|
|
130
135
|
|
|
131
136
|
def to_gql_experiment(
|
|
132
137
|
experiment: models.Experiment,
|
|
@@ -2,8 +2,10 @@ import operator
|
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from typing import (
|
|
4
4
|
Any,
|
|
5
|
+
ClassVar,
|
|
5
6
|
List,
|
|
6
7
|
Optional,
|
|
8
|
+
Type,
|
|
7
9
|
)
|
|
8
10
|
|
|
9
11
|
import strawberry
|
|
@@ -38,6 +40,7 @@ from phoenix.trace.dsl import SpanFilter
|
|
|
38
40
|
|
|
39
41
|
@strawberry.type
|
|
40
42
|
class Project(Node):
|
|
43
|
+
_table: ClassVar[Type[models.Base]] = models.Project
|
|
41
44
|
id_attr: NodeID[int]
|
|
42
45
|
name: str
|
|
43
46
|
gradient_start_color: str
|
|
@@ -397,7 +400,7 @@ class Project(Node):
|
|
|
397
400
|
self,
|
|
398
401
|
info: Info[Context, None],
|
|
399
402
|
) -> Optional[datetime]:
|
|
400
|
-
return info.context.
|
|
403
|
+
return info.context.last_updated_at.get(self._table, self.id_attr)
|
|
401
404
|
|
|
402
405
|
@strawberry.field
|
|
403
406
|
async def validate_span_filter_condition(self, condition: str) -> ValidationResult:
|
phoenix/server/api/types/Span.py
CHANGED
|
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Sized, cast
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import strawberry
|
|
9
9
|
from openinference.semconv.trace import EmbeddingAttributes, SpanAttributes
|
|
10
|
-
from sqlalchemy import select
|
|
11
10
|
from strawberry import ID, UNSET
|
|
12
11
|
from strawberry.relay import Node, NodeID
|
|
13
12
|
from strawberry.types import Info
|
|
@@ -20,7 +19,10 @@ from phoenix.server.api.helpers.dataset_helpers import (
|
|
|
20
19
|
get_dataset_example_input,
|
|
21
20
|
get_dataset_example_output,
|
|
22
21
|
)
|
|
23
|
-
from phoenix.server.api.input_types.SpanAnnotationSort import
|
|
22
|
+
from phoenix.server.api.input_types.SpanAnnotationSort import (
|
|
23
|
+
SpanAnnotationColumn,
|
|
24
|
+
SpanAnnotationSort,
|
|
25
|
+
)
|
|
24
26
|
from phoenix.server.api.types.SortDir import SortDir
|
|
25
27
|
from phoenix.server.api.types.SpanAnnotation import to_gql_span_annotation
|
|
26
28
|
from phoenix.trace.attributes import get_attribute_value
|
|
@@ -190,17 +192,16 @@ class Span(Node):
|
|
|
190
192
|
info: Info[Context, None],
|
|
191
193
|
sort: Optional[SpanAnnotationSort] = UNSET,
|
|
192
194
|
) -> List[SpanAnnotation]:
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
annotations = await session.scalars(stmt)
|
|
195
|
+
span_id = self.id_attr
|
|
196
|
+
annotations = await info.context.data_loaders.span_annotations.load(span_id)
|
|
197
|
+
sort_key = SpanAnnotationColumn.name.value
|
|
198
|
+
sort_descending = False
|
|
199
|
+
if sort:
|
|
200
|
+
sort_key = sort.col.value
|
|
201
|
+
sort_descending = sort.dir is SortDir.desc
|
|
202
|
+
annotations.sort(
|
|
203
|
+
key=lambda annotation: getattr(annotation, sort_key), reverse=sort_descending
|
|
204
|
+
)
|
|
204
205
|
return [to_gql_span_annotation(annotation) for annotation in annotations]
|
|
205
206
|
|
|
206
207
|
@strawberry.field(
|
phoenix/server/app.py
CHANGED
|
@@ -2,7 +2,6 @@ import asyncio
|
|
|
2
2
|
import contextlib
|
|
3
3
|
import json
|
|
4
4
|
import logging
|
|
5
|
-
from datetime import datetime
|
|
6
5
|
from functools import cached_property
|
|
7
6
|
from pathlib import Path
|
|
8
7
|
from typing import (
|
|
@@ -87,9 +86,16 @@ from phoenix.server.api.dataloaders import (
|
|
|
87
86
|
from phoenix.server.api.routers.v1 import REST_API_VERSION
|
|
88
87
|
from phoenix.server.api.routers.v1 import router as v1_router
|
|
89
88
|
from phoenix.server.api.schema import schema
|
|
89
|
+
from phoenix.server.dml_event import DmlEvent
|
|
90
|
+
from phoenix.server.dml_event_handler import DmlEventHandler
|
|
90
91
|
from phoenix.server.grpc_server import GrpcServer
|
|
91
92
|
from phoenix.server.telemetry import initialize_opentelemetry_tracer_provider
|
|
92
|
-
from phoenix.server.types import
|
|
93
|
+
from phoenix.server.types import (
|
|
94
|
+
CanGetLastUpdatedAt,
|
|
95
|
+
CanPutItem,
|
|
96
|
+
DbSessionFactory,
|
|
97
|
+
LastUpdatedAt,
|
|
98
|
+
)
|
|
93
99
|
from phoenix.trace.schemas import Span
|
|
94
100
|
from phoenix.utilities.client import PHOENIX_SERVER_VERSION_HEADER
|
|
95
101
|
|
|
@@ -220,6 +226,7 @@ def _lifespan(
|
|
|
220
226
|
*,
|
|
221
227
|
dialect: SupportedSQLDialect,
|
|
222
228
|
bulk_inserter: BulkInserter,
|
|
229
|
+
dml_event_handler: DmlEventHandler,
|
|
223
230
|
tracer_provider: Optional["TracerProvider"] = None,
|
|
224
231
|
enable_prometheus: bool = False,
|
|
225
232
|
clean_ups: Iterable[Callable[[], None]] = (),
|
|
@@ -239,8 +246,9 @@ def _lifespan(
|
|
|
239
246
|
disabled=read_only,
|
|
240
247
|
tracer_provider=tracer_provider,
|
|
241
248
|
enable_prometheus=enable_prometheus,
|
|
242
|
-
):
|
|
249
|
+
), dml_event_handler:
|
|
243
250
|
yield {
|
|
251
|
+
"event_queue": dml_event_handler,
|
|
244
252
|
"enqueue": enqueue,
|
|
245
253
|
"queue_span_for_bulk_insert": queue_span,
|
|
246
254
|
"queue_evaluation_for_bulk_insert": queue_evaluation,
|
|
@@ -263,9 +271,10 @@ def create_graphql_router(
|
|
|
263
271
|
db: DbSessionFactory,
|
|
264
272
|
model: Model,
|
|
265
273
|
export_path: Path,
|
|
274
|
+
last_updated_at: CanGetLastUpdatedAt,
|
|
266
275
|
corpus: Optional[Model] = None,
|
|
267
|
-
streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None,
|
|
268
276
|
cache_for_dataloaders: Optional[CacheForDataLoaders] = None,
|
|
277
|
+
event_queue: CanPutItem[DmlEvent],
|
|
269
278
|
read_only: bool = False,
|
|
270
279
|
) -> GraphQLRouter: # type: ignore[type-arg]
|
|
271
280
|
def get_context() -> Context:
|
|
@@ -274,7 +283,8 @@ def create_graphql_router(
|
|
|
274
283
|
model=model,
|
|
275
284
|
corpus=corpus,
|
|
276
285
|
export_path=export_path,
|
|
277
|
-
|
|
286
|
+
last_updated_at=last_updated_at,
|
|
287
|
+
event_queue=event_queue,
|
|
278
288
|
data_loaders=DataLoaders(
|
|
279
289
|
average_experiment_run_latency=AverageExperimentRunLatencyDataLoader(db),
|
|
280
290
|
dataset_example_revisions=DatasetExampleRevisionsDataLoader(db),
|
|
@@ -420,11 +430,16 @@ def create_app(
|
|
|
420
430
|
cache_for_dataloaders = (
|
|
421
431
|
CacheForDataLoaders() if db.dialect is SupportedSQLDialect.SQLITE else None
|
|
422
432
|
)
|
|
423
|
-
|
|
433
|
+
last_updated_at = LastUpdatedAt()
|
|
434
|
+
dml_event_handler = DmlEventHandler(
|
|
435
|
+
db=db,
|
|
436
|
+
cache_for_dataloaders=cache_for_dataloaders,
|
|
437
|
+
last_updated_at=last_updated_at,
|
|
438
|
+
)
|
|
424
439
|
bulk_inserter = BulkInserter(
|
|
425
440
|
db,
|
|
426
441
|
enable_prometheus=enable_prometheus,
|
|
427
|
-
|
|
442
|
+
event_queue=dml_event_handler,
|
|
428
443
|
initial_batch_of_spans=initial_batch_of_spans,
|
|
429
444
|
initial_batch_of_evaluations=initial_batch_of_evaluations,
|
|
430
445
|
)
|
|
@@ -460,7 +475,8 @@ def create_app(
|
|
|
460
475
|
model=model,
|
|
461
476
|
corpus=corpus,
|
|
462
477
|
export_path=export_path,
|
|
463
|
-
|
|
478
|
+
last_updated_at=last_updated_at,
|
|
479
|
+
event_queue=dml_event_handler,
|
|
464
480
|
cache_for_dataloaders=cache_for_dataloaders,
|
|
465
481
|
read_only=read_only,
|
|
466
482
|
)
|
|
@@ -477,6 +493,7 @@ def create_app(
|
|
|
477
493
|
dialect=db.dialect,
|
|
478
494
|
read_only=read_only,
|
|
479
495
|
bulk_inserter=bulk_inserter,
|
|
496
|
+
dml_event_handler=dml_event_handler,
|
|
480
497
|
tracer_provider=tracer_provider,
|
|
481
498
|
enable_prometheus=enable_prometheus,
|
|
482
499
|
clean_ups=clean_ups,
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import ABC
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import ClassVar, Tuple, Type
|
|
6
|
+
|
|
7
|
+
from phoenix.db import models
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class DmlEvent(ABC):
|
|
12
|
+
"""
|
|
13
|
+
Event corresponding to a Data Manipulation Language (DML)
|
|
14
|
+
operation, e.g. insertion, update, or deletion.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
table: ClassVar[Type[models.Base]]
|
|
18
|
+
ids: Tuple[int, ...] = field(default_factory=tuple)
|
|
19
|
+
|
|
20
|
+
def __bool__(self) -> bool:
|
|
21
|
+
return bool(self.ids)
|
|
22
|
+
|
|
23
|
+
def __hash__(self) -> int:
|
|
24
|
+
return id(self)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(frozen=True)
|
|
28
|
+
class ProjectDmlEvent(DmlEvent):
|
|
29
|
+
table = models.Project
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass(frozen=True)
|
|
33
|
+
class ProjectDeleteEvent(ProjectDmlEvent): ...
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass(frozen=True)
|
|
37
|
+
class SpanDmlEvent(ProjectDmlEvent): ...
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass(frozen=True)
|
|
41
|
+
class SpanInsertEvent(SpanDmlEvent): ...
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass(frozen=True)
|
|
45
|
+
class SpanDeleteEvent(SpanDmlEvent): ...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass(frozen=True)
|
|
49
|
+
class DatasetDmlEvent(DmlEvent):
|
|
50
|
+
table = models.Dataset
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass(frozen=True)
|
|
54
|
+
class DatasetInsertEvent(DatasetDmlEvent): ...
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass(frozen=True)
|
|
58
|
+
class DatasetDeleteEvent(DatasetDmlEvent): ...
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass(frozen=True)
|
|
62
|
+
class ExperimentDmlEvent(DmlEvent):
|
|
63
|
+
table = models.Experiment
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass(frozen=True)
|
|
67
|
+
class ExperimentInsertEvent(ExperimentDmlEvent): ...
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@dataclass(frozen=True)
|
|
71
|
+
class ExperimentDeleteEvent(ExperimentDmlEvent): ...
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass(frozen=True)
|
|
75
|
+
class ExperimentRunDmlEvent(DmlEvent):
|
|
76
|
+
table = models.ExperimentRun
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@dataclass(frozen=True)
|
|
80
|
+
class ExperimentRunInsertEvent(ExperimentRunDmlEvent): ...
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass(frozen=True)
|
|
84
|
+
class ExperimentRunDeleteEvent(ExperimentRunDmlEvent): ...
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass(frozen=True)
|
|
88
|
+
class ExperimentRunAnnotationDmlEvent(DmlEvent):
|
|
89
|
+
table = models.ExperimentRunAnnotation
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass(frozen=True)
|
|
93
|
+
class ExperimentRunAnnotationInsertEvent(ExperimentRunAnnotationDmlEvent): ...
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass(frozen=True)
|
|
97
|
+
class ExperimentRunAnnotationDeleteEvent(ExperimentRunAnnotationDmlEvent): ...
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@dataclass(frozen=True)
|
|
101
|
+
class SpanAnnotationDmlEvent(DmlEvent):
|
|
102
|
+
table = models.SpanAnnotation
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
@dataclass(frozen=True)
|
|
106
|
+
class SpanAnnotationInsertEvent(SpanAnnotationDmlEvent): ...
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass(frozen=True)
|
|
110
|
+
class SpanAnnotationDeleteEvent(SpanAnnotationDmlEvent): ...
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@dataclass(frozen=True)
|
|
114
|
+
class TraceAnnotationDmlEvent(DmlEvent):
|
|
115
|
+
table = models.TraceAnnotation
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@dataclass(frozen=True)
|
|
119
|
+
class TraceAnnotationInsertEvent(TraceAnnotationDmlEvent): ...
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@dataclass(frozen=True)
|
|
123
|
+
class TraceAnnotationDeleteEvent(TraceAnnotationDmlEvent): ...
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@dataclass(frozen=True)
|
|
127
|
+
class DocumentAnnotationDmlEvent(DmlEvent):
|
|
128
|
+
table = models.DocumentAnnotation
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@dataclass(frozen=True)
|
|
132
|
+
class DocumentAnnotationInsertEvent(DocumentAnnotationDmlEvent): ...
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@dataclass(frozen=True)
|
|
136
|
+
class DocumentAnnotationDeleteEvent(DocumentAnnotationDmlEvent): ...
|