arize-phoenix 4.19.0__py3-none-any.whl → 4.20.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/METADATA +2 -1
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/RECORD +35 -33
- phoenix/db/bulk_inserter.py +24 -98
- phoenix/db/insertion/document_annotation.py +13 -0
- phoenix/db/insertion/span_annotation.py +13 -0
- phoenix/db/insertion/trace_annotation.py +13 -0
- phoenix/db/insertion/types.py +34 -28
- phoenix/server/api/context.py +9 -7
- phoenix/server/api/dataloaders/__init__.py +0 -47
- phoenix/server/api/dataloaders/span_annotations.py +6 -9
- phoenix/server/api/mutations/dataset_mutations.py +44 -4
- phoenix/server/api/mutations/experiment_mutations.py +2 -0
- phoenix/server/api/mutations/project_mutations.py +5 -5
- phoenix/server/api/mutations/span_annotations_mutations.py +10 -2
- phoenix/server/api/mutations/trace_annotations_mutations.py +10 -2
- phoenix/server/api/queries.py +9 -0
- phoenix/server/api/routers/v1/datasets.py +2 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +2 -0
- phoenix/server/api/routers/v1/experiment_runs.py +2 -0
- phoenix/server/api/routers/v1/experiments.py +2 -0
- phoenix/server/api/routers/v1/spans.py +15 -9
- phoenix/server/api/routers/v1/traces.py +15 -11
- phoenix/server/api/types/Dataset.py +6 -1
- phoenix/server/api/types/Experiment.py +6 -1
- phoenix/server/api/types/Project.py +4 -1
- phoenix/server/api/types/Span.py +14 -13
- phoenix/server/app.py +25 -8
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +272 -0
- phoenix/server/types.py +106 -1
- phoenix/session/client.py +2 -2
- phoenix/version.py +1 -1
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.19.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/LICENSE +0 -0
phoenix/db/insertion/types.py
CHANGED
|
@@ -21,11 +21,12 @@ from typing import (
|
|
|
21
21
|
)
|
|
22
22
|
|
|
23
23
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
24
|
-
from sqlalchemy.sql.dml import
|
|
24
|
+
from sqlalchemy.sql.dml import Insert
|
|
25
25
|
|
|
26
26
|
from phoenix.db import models
|
|
27
27
|
from phoenix.db.insertion.constants import DEFAULT_RETRY_ALLOWANCE, DEFAULT_RETRY_DELAY_SEC
|
|
28
|
-
from phoenix.db.insertion.helpers import
|
|
28
|
+
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
29
|
+
from phoenix.server.dml_event import DmlEvent
|
|
29
30
|
from phoenix.server.types import DbSessionFactory
|
|
30
31
|
|
|
31
32
|
logger = logging.getLogger("__name__")
|
|
@@ -40,6 +41,7 @@ _AnyT = TypeVar("_AnyT")
|
|
|
40
41
|
_PrecursorT = TypeVar("_PrecursorT")
|
|
41
42
|
_InsertableT = TypeVar("_InsertableT", bound=Insertable)
|
|
42
43
|
_RowT = TypeVar("_RowT", bound=models.Base)
|
|
44
|
+
_DmlEventT = TypeVar("_DmlEventT", bound=DmlEvent)
|
|
43
45
|
|
|
44
46
|
|
|
45
47
|
@dataclass(frozen=True)
|
|
@@ -56,7 +58,7 @@ class Postponed(Received[_AnyT]):
|
|
|
56
58
|
retries_left: int = field(default=DEFAULT_RETRY_ALLOWANCE)
|
|
57
59
|
|
|
58
60
|
|
|
59
|
-
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
61
|
+
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT, _DmlEventT]):
|
|
60
62
|
table: Type[_RowT]
|
|
61
63
|
unique_by: Sequence[str]
|
|
62
64
|
|
|
@@ -97,59 +99,63 @@ class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
|
97
99
|
List[Received[_PrecursorT]],
|
|
98
100
|
]: ...
|
|
99
101
|
|
|
100
|
-
async def insert(self) ->
|
|
102
|
+
async def insert(self) -> Optional[List[_DmlEventT]]:
|
|
101
103
|
if not self._queue:
|
|
102
|
-
return
|
|
103
|
-
parcels = self._queue
|
|
104
|
-
|
|
105
|
-
inserted_ids: List[int] = []
|
|
104
|
+
return None
|
|
105
|
+
self._queue, parcels = [], self._queue
|
|
106
|
+
events: List[_DmlEventT] = []
|
|
106
107
|
async with self._db() as session:
|
|
107
108
|
to_insert, to_postpone, _ = await self._partition(session, *parcels)
|
|
108
109
|
if to_insert:
|
|
109
|
-
|
|
110
|
-
|
|
110
|
+
events, to_retry, _ = await self._insert(session, *to_insert)
|
|
111
|
+
if to_retry:
|
|
112
|
+
to_postpone.extend(to_retry)
|
|
111
113
|
if to_postpone:
|
|
112
114
|
loop = asyncio.get_running_loop()
|
|
113
115
|
loop.call_later(self._retry_delay_sec, self._queue.extend, to_postpone)
|
|
114
|
-
return
|
|
116
|
+
return events
|
|
115
117
|
|
|
116
|
-
def
|
|
117
|
-
pk = next(c for c in self.table.__table__.c if c.primary_key)
|
|
118
|
+
def _insert_on_conflict(self, *records: Mapping[str, Any]) -> Insert:
|
|
118
119
|
return insert_on_conflict(
|
|
119
120
|
*records,
|
|
120
121
|
table=self.table,
|
|
121
122
|
unique_by=self.unique_by,
|
|
122
123
|
dialect=self._db.dialect,
|
|
123
|
-
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
@abstractmethod
|
|
127
|
+
async def _events(
|
|
128
|
+
self,
|
|
129
|
+
session: AsyncSession,
|
|
130
|
+
*insertions: _InsertableT,
|
|
131
|
+
) -> List[_DmlEventT]: ...
|
|
124
132
|
|
|
125
133
|
async def _insert(
|
|
126
134
|
self,
|
|
127
135
|
session: AsyncSession,
|
|
128
|
-
*
|
|
129
|
-
) -> Tuple[
|
|
130
|
-
|
|
131
|
-
|
|
136
|
+
*parcels: Received[_InsertableT],
|
|
137
|
+
) -> Tuple[
|
|
138
|
+
List[_DmlEventT],
|
|
139
|
+
List[Postponed[_PrecursorT]],
|
|
140
|
+
List[Received[_InsertableT]],
|
|
141
|
+
]:
|
|
132
142
|
to_retry: List[Postponed[_PrecursorT]] = []
|
|
133
143
|
failures: List[Received[_InsertableT]] = []
|
|
134
|
-
|
|
144
|
+
events: List[_DmlEventT] = []
|
|
135
145
|
try:
|
|
136
146
|
async with session.begin_nested():
|
|
137
|
-
|
|
138
|
-
inserted_ids.extend(ids)
|
|
147
|
+
events.extend(await self._events(session, *(p.item for p in parcels)))
|
|
139
148
|
except BaseException:
|
|
140
149
|
logger.exception(
|
|
141
150
|
f"Failed to bulk insert for {self.table.__name__}. "
|
|
142
|
-
f"Will try to insert ({len(
|
|
151
|
+
f"Will try to insert ({len(parcels)} records) individually instead."
|
|
143
152
|
)
|
|
144
|
-
for
|
|
145
|
-
stmt = self._stmt(record)
|
|
153
|
+
for p in parcels:
|
|
146
154
|
try:
|
|
147
155
|
async with session.begin_nested():
|
|
148
|
-
|
|
149
|
-
inserted_ids.extend(ids)
|
|
156
|
+
events.extend(await self._events(session, p.item))
|
|
150
157
|
except BaseException:
|
|
151
158
|
logger.exception(f"Failed to insert for {self.table.__name__}.")
|
|
152
|
-
p = insertions[i]
|
|
153
159
|
if isinstance(p, Postponed) and p.retries_left == 1:
|
|
154
160
|
failures.append(p)
|
|
155
161
|
else:
|
|
@@ -162,7 +168,7 @@ class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
|
162
168
|
else self._retry_allowance,
|
|
163
169
|
)
|
|
164
170
|
)
|
|
165
|
-
return
|
|
171
|
+
return events, to_retry, failures
|
|
166
172
|
|
|
167
173
|
|
|
168
174
|
class Precursors(ABC):
|
phoenix/server/api/context.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
|
-
from datetime import datetime
|
|
3
2
|
from pathlib import Path
|
|
4
|
-
from typing import
|
|
3
|
+
from typing import Any, Optional
|
|
5
4
|
|
|
6
5
|
from strawberry.fastapi import BaseContext
|
|
7
|
-
from typing_extensions import TypeAlias
|
|
8
6
|
|
|
9
7
|
from phoenix.core.model_schema import Model
|
|
10
8
|
from phoenix.server.api.dataloaders import (
|
|
@@ -34,7 +32,8 @@ from phoenix.server.api.dataloaders import (
|
|
|
34
32
|
TraceEvaluationsDataLoader,
|
|
35
33
|
TraceRowIdsDataLoader,
|
|
36
34
|
)
|
|
37
|
-
from phoenix.server.
|
|
35
|
+
from phoenix.server.dml_event import DmlEvent
|
|
36
|
+
from phoenix.server.types import CanGetLastUpdatedAt, CanPutItem, DbSessionFactory
|
|
38
37
|
|
|
39
38
|
|
|
40
39
|
@dataclass
|
|
@@ -54,6 +53,7 @@ class DataLoaders:
|
|
|
54
53
|
latency_ms_quantile: LatencyMsQuantileDataLoader
|
|
55
54
|
min_start_or_max_end_times: MinStartOrMaxEndTimeDataLoader
|
|
56
55
|
record_counts: RecordCountDataLoader
|
|
56
|
+
span_annotations: SpanAnnotationsDataLoader
|
|
57
57
|
span_dataset_examples: SpanDatasetExamplesDataLoader
|
|
58
58
|
span_descendants: SpanDescendantsDataLoader
|
|
59
59
|
span_evaluations: SpanEvaluationsDataLoader
|
|
@@ -62,10 +62,11 @@ class DataLoaders:
|
|
|
62
62
|
trace_evaluations: TraceEvaluationsDataLoader
|
|
63
63
|
trace_row_ids: TraceRowIdsDataLoader
|
|
64
64
|
project_by_name: ProjectByNameDataLoader
|
|
65
|
-
span_annotations: SpanAnnotationsDataLoader
|
|
66
65
|
|
|
67
66
|
|
|
68
|
-
|
|
67
|
+
class _NoOp:
|
|
68
|
+
def get(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
69
|
+
def put(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
69
70
|
|
|
70
71
|
|
|
71
72
|
@dataclass
|
|
@@ -75,6 +76,7 @@ class Context(BaseContext):
|
|
|
75
76
|
cache_for_dataloaders: Optional[CacheForDataLoaders]
|
|
76
77
|
model: Model
|
|
77
78
|
export_path: Path
|
|
79
|
+
last_updated_at: CanGetLastUpdatedAt = _NoOp()
|
|
80
|
+
event_queue: CanPutItem[DmlEvent] = _NoOp()
|
|
78
81
|
corpus: Optional[Model] = None
|
|
79
|
-
streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None
|
|
80
82
|
read_only: bool = False
|
|
@@ -1,12 +1,4 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
|
-
from functools import singledispatchmethod
|
|
3
|
-
|
|
4
|
-
from phoenix.db.insertion.evaluation import (
|
|
5
|
-
DocumentEvaluationInsertionEvent,
|
|
6
|
-
SpanEvaluationInsertionEvent,
|
|
7
|
-
TraceEvaluationInsertionEvent,
|
|
8
|
-
)
|
|
9
|
-
from phoenix.db.insertion.span import ClearProjectSpansEvent, SpanInsertionEvent
|
|
10
2
|
|
|
11
3
|
from .annotation_summaries import AnnotationSummaryCache, AnnotationSummaryDataLoader
|
|
12
4
|
from .average_experiment_run_latency import AverageExperimentRunLatencyDataLoader
|
|
@@ -88,42 +80,3 @@ class CacheForDataLoaders:
|
|
|
88
80
|
token_count: TokenCountCache = field(
|
|
89
81
|
default_factory=TokenCountCache,
|
|
90
82
|
)
|
|
91
|
-
|
|
92
|
-
def _update_spans(self, project_rowid: int) -> None:
|
|
93
|
-
self.latency_ms_quantile.invalidate(project_rowid)
|
|
94
|
-
self.token_count.invalidate(project_rowid)
|
|
95
|
-
self.record_count.invalidate(project_rowid)
|
|
96
|
-
self.min_start_or_max_end_time.invalidate(project_rowid)
|
|
97
|
-
|
|
98
|
-
def _clear_spans(self, project_rowid: int) -> None:
|
|
99
|
-
self._update_spans(project_rowid)
|
|
100
|
-
self.annotation_summary.invalidate_project(project_rowid)
|
|
101
|
-
self.evaluation_summary.invalidate_project(project_rowid)
|
|
102
|
-
self.document_evaluation_summary.invalidate_project(project_rowid)
|
|
103
|
-
|
|
104
|
-
@singledispatchmethod
|
|
105
|
-
def invalidate(self, event: SpanInsertionEvent) -> None:
|
|
106
|
-
project_rowid, *_ = event
|
|
107
|
-
self._update_spans(project_rowid)
|
|
108
|
-
|
|
109
|
-
@invalidate.register
|
|
110
|
-
def _(self, event: ClearProjectSpansEvent) -> None:
|
|
111
|
-
project_rowid, *_ = event
|
|
112
|
-
self._clear_spans(project_rowid)
|
|
113
|
-
|
|
114
|
-
@invalidate.register
|
|
115
|
-
def _(self, event: DocumentEvaluationInsertionEvent) -> None:
|
|
116
|
-
project_rowid, evaluation_name = event
|
|
117
|
-
self.document_evaluation_summary.invalidate((project_rowid, evaluation_name))
|
|
118
|
-
|
|
119
|
-
@invalidate.register
|
|
120
|
-
def _(self, event: SpanEvaluationInsertionEvent) -> None:
|
|
121
|
-
project_rowid, evaluation_name = event
|
|
122
|
-
self.annotation_summary.invalidate((project_rowid, evaluation_name, "span"))
|
|
123
|
-
self.evaluation_summary.invalidate((project_rowid, evaluation_name, "span"))
|
|
124
|
-
|
|
125
|
-
@invalidate.register
|
|
126
|
-
def _(self, event: TraceEvaluationInsertionEvent) -> None:
|
|
127
|
-
project_rowid, evaluation_name = event
|
|
128
|
-
self.annotation_summary.invalidate((project_rowid, evaluation_name, "trace"))
|
|
129
|
-
self.evaluation_summary.invalidate((project_rowid, evaluation_name, "trace"))
|
|
@@ -8,12 +8,11 @@ from sqlalchemy import select
|
|
|
8
8
|
from strawberry.dataloader import DataLoader
|
|
9
9
|
from typing_extensions import TypeAlias
|
|
10
10
|
|
|
11
|
-
from phoenix.db import
|
|
12
|
-
from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
|
|
11
|
+
from phoenix.db.models import SpanAnnotation as ORMSpanAnnotation
|
|
13
12
|
from phoenix.server.types import DbSessionFactory
|
|
14
13
|
|
|
15
14
|
Key: TypeAlias = int
|
|
16
|
-
Result: TypeAlias = List[
|
|
15
|
+
Result: TypeAlias = List[ORMSpanAnnotation]
|
|
17
16
|
|
|
18
17
|
|
|
19
18
|
class SpanAnnotationsDataLoader(DataLoader[Key, Result]):
|
|
@@ -23,11 +22,9 @@ class SpanAnnotationsDataLoader(DataLoader[Key, Result]):
|
|
|
23
22
|
|
|
24
23
|
async def _load_fn(self, keys: List[Key]) -> List[Result]:
|
|
25
24
|
span_annotations_by_id: DefaultDict[Key, Result] = defaultdict(list)
|
|
26
|
-
msa = models.SpanAnnotation
|
|
27
25
|
async with self._db() as session:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
)
|
|
26
|
+
async for span_annotation in await session.stream_scalars(
|
|
27
|
+
select(ORMSpanAnnotation).where(ORMSpanAnnotation.span_rowid.in_(keys))
|
|
28
|
+
):
|
|
29
|
+
span_annotations_by_id[span_annotation.span_rowid].append(span_annotation)
|
|
33
30
|
return [span_annotations_by_id[key] for key in keys]
|
|
@@ -33,6 +33,7 @@ from phoenix.server.api.types.DatasetExample import DatasetExample
|
|
|
33
33
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
34
34
|
from phoenix.server.api.types.Span import Span
|
|
35
35
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
36
|
+
from phoenix.server.dml_event import DatasetDeleteEvent, DatasetInsertEvent
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
@strawberry.type
|
|
@@ -62,6 +63,7 @@ class DatasetMutationMixin:
|
|
|
62
63
|
.returning(models.Dataset)
|
|
63
64
|
)
|
|
64
65
|
assert dataset is not None
|
|
66
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
65
67
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
66
68
|
|
|
67
69
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -90,6 +92,7 @@ class DatasetMutationMixin:
|
|
|
90
92
|
.values(**patch)
|
|
91
93
|
)
|
|
92
94
|
assert dataset is not None
|
|
95
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
93
96
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
94
97
|
|
|
95
98
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -154,6 +157,36 @@ class DatasetMutationMixin:
|
|
|
154
157
|
raise ValueError(
|
|
155
158
|
f"Could not find spans with rowids: {', '.join(map(str, missing_span_rowids))}"
|
|
156
159
|
) # todo: implement error handling types https://github.com/Arize-ai/phoenix/issues/3221
|
|
160
|
+
|
|
161
|
+
span_annotations = (
|
|
162
|
+
await session.execute(
|
|
163
|
+
select(
|
|
164
|
+
models.SpanAnnotation.span_rowid,
|
|
165
|
+
models.SpanAnnotation.name,
|
|
166
|
+
models.SpanAnnotation.label,
|
|
167
|
+
models.SpanAnnotation.score,
|
|
168
|
+
models.SpanAnnotation.explanation,
|
|
169
|
+
models.SpanAnnotation.metadata_,
|
|
170
|
+
models.SpanAnnotation.annotator_kind,
|
|
171
|
+
)
|
|
172
|
+
.select_from(models.SpanAnnotation)
|
|
173
|
+
.where(models.SpanAnnotation.span_rowid.in_(span_rowids))
|
|
174
|
+
)
|
|
175
|
+
).all()
|
|
176
|
+
|
|
177
|
+
span_annotations_by_span: Dict[int, Dict[Any, Any]] = {span.id: {} for span in spans}
|
|
178
|
+
for annotation in span_annotations:
|
|
179
|
+
span_id = annotation.span_rowid
|
|
180
|
+
if span_id not in span_annotations_by_span:
|
|
181
|
+
span_annotations_by_span[span_id] = dict()
|
|
182
|
+
span_annotations_by_span[span_id][annotation.name] = {
|
|
183
|
+
"label": annotation.label,
|
|
184
|
+
"score": annotation.score,
|
|
185
|
+
"explanation": annotation.explanation,
|
|
186
|
+
"metadata": annotation.metadata_,
|
|
187
|
+
"annotator_kind": annotation.annotator_kind,
|
|
188
|
+
}
|
|
189
|
+
|
|
157
190
|
DatasetExample = models.DatasetExample
|
|
158
191
|
dataset_example_rowids = (
|
|
159
192
|
await session.scalars(
|
|
@@ -170,6 +203,7 @@ class DatasetMutationMixin:
|
|
|
170
203
|
assert len(dataset_example_rowids) == len(spans)
|
|
171
204
|
assert all(map(lambda id: isinstance(id, int), dataset_example_rowids))
|
|
172
205
|
DatasetExampleRevision = models.DatasetExampleRevision
|
|
206
|
+
|
|
173
207
|
await session.execute(
|
|
174
208
|
insert(DatasetExampleRevision),
|
|
175
209
|
[
|
|
@@ -178,12 +212,16 @@ class DatasetMutationMixin:
|
|
|
178
212
|
DatasetExampleRevision.dataset_version_id.key: dataset_version_rowid,
|
|
179
213
|
DatasetExampleRevision.input.key: get_dataset_example_input(span),
|
|
180
214
|
DatasetExampleRevision.output.key: get_dataset_example_output(span),
|
|
181
|
-
DatasetExampleRevision.metadata_.key:
|
|
215
|
+
DatasetExampleRevision.metadata_.key: {
|
|
216
|
+
**span.attributes,
|
|
217
|
+
"annotations": span_annotations_by_span[span.id],
|
|
218
|
+
},
|
|
182
219
|
DatasetExampleRevision.revision_kind.key: "CREATE",
|
|
183
220
|
}
|
|
184
221
|
for dataset_example_rowid, span in zip(dataset_example_rowids, spans)
|
|
185
222
|
],
|
|
186
223
|
)
|
|
224
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
187
225
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
188
226
|
|
|
189
227
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -269,6 +307,7 @@ class DatasetMutationMixin:
|
|
|
269
307
|
)
|
|
270
308
|
],
|
|
271
309
|
)
|
|
310
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
272
311
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
273
312
|
|
|
274
313
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -299,6 +338,7 @@ class DatasetMutationMixin:
|
|
|
299
338
|
delete_traces(info.context.db, *eval_trace_ids),
|
|
300
339
|
return_exceptions=True,
|
|
301
340
|
)
|
|
341
|
+
info.context.event_queue.put(DatasetDeleteEvent((dataset.id,)))
|
|
302
342
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
303
343
|
|
|
304
344
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -390,7 +430,7 @@ class DatasetMutationMixin:
|
|
|
390
430
|
for revision, patch, example_id in zip(revisions, patches, example_ids)
|
|
391
431
|
],
|
|
392
432
|
)
|
|
393
|
-
|
|
433
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
394
434
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
395
435
|
|
|
396
436
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -473,8 +513,8 @@ class DatasetMutationMixin:
|
|
|
473
513
|
for dataset_example_rowid in example_db_ids
|
|
474
514
|
],
|
|
475
515
|
)
|
|
476
|
-
|
|
477
|
-
|
|
516
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
517
|
+
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
478
518
|
|
|
479
519
|
|
|
480
520
|
def _span_attribute(semconv: str) -> Any:
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.types.Experiment import Experiment, to_gql_experiment
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
17
|
+
from phoenix.server.dml_event import ExperimentDeleteEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -66,6 +67,7 @@ class ExperimentMutationMixin:
|
|
|
66
67
|
delete_traces(info.context.db, *eval_trace_ids),
|
|
67
68
|
return_exceptions=True,
|
|
68
69
|
)
|
|
70
|
+
info.context.event_queue.put(ExperimentDeleteEvent(tuple(experiments.keys())))
|
|
69
71
|
return ExperimentMutationPayload(
|
|
70
72
|
experiments=[
|
|
71
73
|
to_gql_experiment(experiments[experiment_id]) for experiment_id in experiment_ids
|
|
@@ -6,23 +6,23 @@ from strawberry.types import Info
|
|
|
6
6
|
|
|
7
7
|
from phoenix.config import DEFAULT_PROJECT_NAME
|
|
8
8
|
from phoenix.db import models
|
|
9
|
-
from phoenix.db.insertion.span import ClearProjectSpansEvent
|
|
10
9
|
from phoenix.server.api.context import Context
|
|
11
10
|
from phoenix.server.api.input_types.ClearProjectInput import ClearProjectInput
|
|
12
11
|
from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
13
12
|
from phoenix.server.api.queries import Query
|
|
14
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ProjectDeleteEvent, SpanDeleteEvent
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
@strawberry.type
|
|
18
18
|
class ProjectMutationMixin:
|
|
19
19
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
20
20
|
async def delete_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
21
|
-
|
|
21
|
+
project_id = from_global_id_with_expected_type(global_id=id, expected_type_name="Project")
|
|
22
22
|
async with info.context.db() as session:
|
|
23
23
|
project = await session.scalar(
|
|
24
24
|
select(models.Project)
|
|
25
|
-
.where(models.Project.id ==
|
|
25
|
+
.where(models.Project.id == project_id)
|
|
26
26
|
.options(load_only(models.Project.name))
|
|
27
27
|
)
|
|
28
28
|
if project is None:
|
|
@@ -30,6 +30,7 @@ class ProjectMutationMixin:
|
|
|
30
30
|
if project.name == DEFAULT_PROJECT_NAME:
|
|
31
31
|
raise ValueError(f"Cannot delete the {DEFAULT_PROJECT_NAME} project")
|
|
32
32
|
await session.delete(project)
|
|
33
|
+
info.context.event_queue.put(ProjectDeleteEvent((project_id,)))
|
|
33
34
|
return Query()
|
|
34
35
|
|
|
35
36
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -42,6 +43,5 @@ class ProjectMutationMixin:
|
|
|
42
43
|
delete_statement = delete_statement.where(models.Trace.start_time < input.end_time)
|
|
43
44
|
async with info.context.db() as session:
|
|
44
45
|
await session.execute(delete_statement)
|
|
45
|
-
|
|
46
|
-
cache.invalidate(ClearProjectSpansEvent(project_rowid=project_id))
|
|
46
|
+
info.context.event_queue.put(SpanDeleteEvent((project_id,)))
|
|
47
47
|
return Query()
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.queries import Query
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
|
|
17
|
+
from phoenix.server.dml_event import SpanAnnotationDeleteEvent, SpanAnnotationInsertEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -47,7 +48,10 @@ class SpanAnnotationMutationMixin:
|
|
|
47
48
|
)
|
|
48
49
|
result = await session.scalars(stmt)
|
|
49
50
|
inserted_annotations = result.all()
|
|
50
|
-
|
|
51
|
+
if inserted_annotations:
|
|
52
|
+
info.context.event_queue.put(
|
|
53
|
+
SpanAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
|
|
54
|
+
)
|
|
51
55
|
return SpanAnnotationMutationPayload(
|
|
52
56
|
span_annotations=[
|
|
53
57
|
to_gql_span_annotation(annotation) for annotation in inserted_annotations
|
|
@@ -92,7 +96,7 @@ class SpanAnnotationMutationMixin:
|
|
|
92
96
|
)
|
|
93
97
|
if span_annotation is not None:
|
|
94
98
|
patched_annotations.append(to_gql_span_annotation(span_annotation))
|
|
95
|
-
|
|
99
|
+
info.context.event_queue.put(SpanAnnotationInsertEvent((span_annotation.id,)))
|
|
96
100
|
return SpanAnnotationMutationPayload(span_annotations=patched_annotations, query=Query())
|
|
97
101
|
|
|
98
102
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -115,6 +119,10 @@ class SpanAnnotationMutationMixin:
|
|
|
115
119
|
deleted_annotations_gql = [
|
|
116
120
|
to_gql_span_annotation(annotation) for annotation in deleted_annotations
|
|
117
121
|
]
|
|
122
|
+
if deleted_annotations:
|
|
123
|
+
info.context.event_queue.put(
|
|
124
|
+
SpanAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
|
|
125
|
+
)
|
|
118
126
|
return SpanAnnotationMutationPayload(
|
|
119
127
|
span_annotations=deleted_annotations_gql, query=Query()
|
|
120
128
|
)
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.queries import Query
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.types.TraceAnnotation import TraceAnnotation, to_gql_trace_annotation
|
|
17
|
+
from phoenix.server.dml_event import TraceAnnotationDeleteEvent, TraceAnnotationInsertEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -47,7 +48,10 @@ class TraceAnnotationMutationMixin:
|
|
|
47
48
|
)
|
|
48
49
|
result = await session.scalars(stmt)
|
|
49
50
|
inserted_annotations = result.all()
|
|
50
|
-
|
|
51
|
+
if inserted_annotations:
|
|
52
|
+
info.context.event_queue.put(
|
|
53
|
+
TraceAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
|
|
54
|
+
)
|
|
51
55
|
return TraceAnnotationMutationPayload(
|
|
52
56
|
trace_annotations=[
|
|
53
57
|
to_gql_trace_annotation(annotation) for annotation in inserted_annotations
|
|
@@ -91,7 +95,7 @@ class TraceAnnotationMutationMixin:
|
|
|
91
95
|
)
|
|
92
96
|
if trace_annotation:
|
|
93
97
|
patched_annotations.append(to_gql_trace_annotation(trace_annotation))
|
|
94
|
-
|
|
98
|
+
info.context.event_queue.put(TraceAnnotationInsertEvent((trace_annotation.id,)))
|
|
95
99
|
return TraceAnnotationMutationPayload(trace_annotations=patched_annotations, query=Query())
|
|
96
100
|
|
|
97
101
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -114,6 +118,10 @@ class TraceAnnotationMutationMixin:
|
|
|
114
118
|
deleted_annotations_gql = [
|
|
115
119
|
to_gql_trace_annotation(annotation) for annotation in deleted_annotations
|
|
116
120
|
]
|
|
121
|
+
if deleted_annotations:
|
|
122
|
+
info.context.event_queue.put(
|
|
123
|
+
TraceAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
|
|
124
|
+
)
|
|
117
125
|
return TraceAnnotationMutationPayload(
|
|
118
126
|
trace_annotations=deleted_annotations_gql, query=Query()
|
|
119
127
|
)
|
phoenix/server/api/queries.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from collections import defaultdict
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
from typing import DefaultDict, Dict, List, Optional, Set, Union
|
|
3
4
|
|
|
4
5
|
import numpy as np
|
|
@@ -107,6 +108,10 @@ class Query:
|
|
|
107
108
|
]
|
|
108
109
|
return connection_from_list(data=data, args=args)
|
|
109
110
|
|
|
111
|
+
@strawberry.field
|
|
112
|
+
def projects_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
113
|
+
return info.context.last_updated_at.get(models.Project)
|
|
114
|
+
|
|
110
115
|
@strawberry.field
|
|
111
116
|
async def datasets(
|
|
112
117
|
self,
|
|
@@ -133,6 +138,10 @@ class Query:
|
|
|
133
138
|
data=[to_gql_dataset(dataset) for dataset in datasets], args=args
|
|
134
139
|
)
|
|
135
140
|
|
|
141
|
+
@strawberry.field
|
|
142
|
+
def datasets_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
143
|
+
return info.context.last_updated_at.get(models.Dataset)
|
|
144
|
+
|
|
136
145
|
@strawberry.field
|
|
137
146
|
async def compare_experiments(
|
|
138
147
|
self,
|
|
@@ -60,6 +60,7 @@ from phoenix.server.api.types.DatasetExample import DatasetExample as DatasetExa
|
|
|
60
60
|
from phoenix.server.api.types.DatasetVersion import DatasetVersion as DatasetVersionNodeType
|
|
61
61
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
62
62
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
63
|
+
from phoenix.server.dml_event import DatasetInsertEvent
|
|
63
64
|
|
|
64
65
|
from .pydantic_compat import V1RoutesBaseModel
|
|
65
66
|
from .utils import (
|
|
@@ -481,6 +482,7 @@ async def upload_dataset(
|
|
|
481
482
|
if sync:
|
|
482
483
|
async with request.app.state.db() as session:
|
|
483
484
|
dataset_id = (await operation(session)).dataset_id
|
|
485
|
+
request.state.event_queue.put(DatasetInsertEvent((dataset_id,)))
|
|
484
486
|
return UploadDatasetResponseBody(
|
|
485
487
|
data=UploadDatasetData(dataset_id=str(GlobalID(Dataset.__name__, str(dataset_id))))
|
|
486
488
|
)
|
|
@@ -11,6 +11,7 @@ from phoenix.db import models
|
|
|
11
11
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
12
12
|
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
13
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ExperimentRunAnnotationInsertEvent
|
|
14
15
|
|
|
15
16
|
from .pydantic_compat import V1RoutesBaseModel
|
|
16
17
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -108,6 +109,7 @@ async def upsert_experiment_evaluation(
|
|
|
108
109
|
).returning(models.ExperimentRunAnnotation)
|
|
109
110
|
)
|
|
110
111
|
evaluation_gid = GlobalID("ExperimentEvaluation", str(exp_eval_run.id))
|
|
112
|
+
request.state.event_queue.put(ExperimentRunAnnotationInsertEvent((exp_eval_run.id,)))
|
|
111
113
|
return UpsertExperimentEvaluationResponseBody(
|
|
112
114
|
data=UpsertExperimentEvaluationResponseBodyData(id=str(evaluation_gid))
|
|
113
115
|
)
|
|
@@ -11,6 +11,7 @@ from strawberry.relay import GlobalID
|
|
|
11
11
|
from phoenix.db import models
|
|
12
12
|
from phoenix.db.models import ExperimentRunOutput
|
|
13
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ExperimentRunInsertEvent
|
|
14
15
|
|
|
15
16
|
from .pydantic_compat import V1RoutesBaseModel
|
|
16
17
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -102,6 +103,7 @@ async def create_experiment_run(
|
|
|
102
103
|
)
|
|
103
104
|
session.add(exp_run)
|
|
104
105
|
await session.flush()
|
|
106
|
+
request.state.event_queue.put(ExperimentRunInsertEvent((exp_run.id,)))
|
|
105
107
|
run_gid = GlobalID("ExperimentRun", str(exp_run.id))
|
|
106
108
|
return CreateExperimentResponseBody(data=CreateExperimentRunResponseBodyData(id=str(run_gid)))
|
|
107
109
|
|
|
@@ -13,6 +13,7 @@ from phoenix.db import models
|
|
|
13
13
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
14
14
|
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
|
+
from phoenix.server.dml_event import ExperimentInsertEvent
|
|
16
17
|
|
|
17
18
|
from .pydantic_compat import V1RoutesBaseModel
|
|
18
19
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -188,6 +189,7 @@ async def create_experiment(
|
|
|
188
189
|
dataset_version_globalid = GlobalID(
|
|
189
190
|
"DatasetVersion", str(experiment.dataset_version_id)
|
|
190
191
|
)
|
|
192
|
+
request.state.event_queue.put(ExperimentInsertEvent((experiment.id,)))
|
|
191
193
|
return CreateExperimentResponseBody(
|
|
192
194
|
data=Experiment(
|
|
193
195
|
id=str(experiment_globalid),
|