arize-phoenix 4.20.0__py3-none-any.whl → 4.20.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.1.dist-info}/METADATA +2 -1
- {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.1.dist-info}/RECORD +33 -31
- phoenix/db/bulk_inserter.py +24 -98
- phoenix/db/insertion/document_annotation.py +13 -0
- phoenix/db/insertion/span_annotation.py +13 -0
- phoenix/db/insertion/trace_annotation.py +13 -0
- phoenix/db/insertion/types.py +34 -28
- phoenix/server/api/context.py +8 -6
- phoenix/server/api/dataloaders/__init__.py +0 -47
- phoenix/server/api/mutations/dataset_mutations.py +9 -3
- phoenix/server/api/mutations/experiment_mutations.py +2 -0
- phoenix/server/api/mutations/project_mutations.py +5 -5
- phoenix/server/api/mutations/span_annotations_mutations.py +10 -2
- phoenix/server/api/mutations/trace_annotations_mutations.py +10 -2
- phoenix/server/api/queries.py +9 -0
- phoenix/server/api/routers/v1/datasets.py +2 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +2 -0
- phoenix/server/api/routers/v1/experiment_runs.py +2 -0
- phoenix/server/api/routers/v1/experiments.py +2 -0
- phoenix/server/api/routers/v1/spans.py +12 -8
- phoenix/server/api/routers/v1/traces.py +12 -10
- phoenix/server/api/types/Dataset.py +6 -1
- phoenix/server/api/types/Experiment.py +6 -1
- phoenix/server/api/types/Project.py +4 -1
- phoenix/server/api/types/Span.py +2 -2
- phoenix/server/app.py +25 -8
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +272 -0
- phoenix/server/types.py +106 -1
- phoenix/version.py +1 -1
- {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.1.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.20.0.dist-info → arize_phoenix-4.20.1.dist-info}/licenses/LICENSE +0 -0
phoenix/db/insertion/types.py
CHANGED
|
@@ -21,11 +21,12 @@ from typing import (
|
|
|
21
21
|
)
|
|
22
22
|
|
|
23
23
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
24
|
-
from sqlalchemy.sql.dml import
|
|
24
|
+
from sqlalchemy.sql.dml import Insert
|
|
25
25
|
|
|
26
26
|
from phoenix.db import models
|
|
27
27
|
from phoenix.db.insertion.constants import DEFAULT_RETRY_ALLOWANCE, DEFAULT_RETRY_DELAY_SEC
|
|
28
|
-
from phoenix.db.insertion.helpers import
|
|
28
|
+
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
29
|
+
from phoenix.server.dml_event import DmlEvent
|
|
29
30
|
from phoenix.server.types import DbSessionFactory
|
|
30
31
|
|
|
31
32
|
logger = logging.getLogger("__name__")
|
|
@@ -40,6 +41,7 @@ _AnyT = TypeVar("_AnyT")
|
|
|
40
41
|
_PrecursorT = TypeVar("_PrecursorT")
|
|
41
42
|
_InsertableT = TypeVar("_InsertableT", bound=Insertable)
|
|
42
43
|
_RowT = TypeVar("_RowT", bound=models.Base)
|
|
44
|
+
_DmlEventT = TypeVar("_DmlEventT", bound=DmlEvent)
|
|
43
45
|
|
|
44
46
|
|
|
45
47
|
@dataclass(frozen=True)
|
|
@@ -56,7 +58,7 @@ class Postponed(Received[_AnyT]):
|
|
|
56
58
|
retries_left: int = field(default=DEFAULT_RETRY_ALLOWANCE)
|
|
57
59
|
|
|
58
60
|
|
|
59
|
-
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
61
|
+
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT, _DmlEventT]):
|
|
60
62
|
table: Type[_RowT]
|
|
61
63
|
unique_by: Sequence[str]
|
|
62
64
|
|
|
@@ -97,59 +99,63 @@ class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
|
97
99
|
List[Received[_PrecursorT]],
|
|
98
100
|
]: ...
|
|
99
101
|
|
|
100
|
-
async def insert(self) ->
|
|
102
|
+
async def insert(self) -> Optional[List[_DmlEventT]]:
|
|
101
103
|
if not self._queue:
|
|
102
|
-
return
|
|
103
|
-
parcels = self._queue
|
|
104
|
-
|
|
105
|
-
inserted_ids: List[int] = []
|
|
104
|
+
return None
|
|
105
|
+
self._queue, parcels = [], self._queue
|
|
106
|
+
events: List[_DmlEventT] = []
|
|
106
107
|
async with self._db() as session:
|
|
107
108
|
to_insert, to_postpone, _ = await self._partition(session, *parcels)
|
|
108
109
|
if to_insert:
|
|
109
|
-
|
|
110
|
-
|
|
110
|
+
events, to_retry, _ = await self._insert(session, *to_insert)
|
|
111
|
+
if to_retry:
|
|
112
|
+
to_postpone.extend(to_retry)
|
|
111
113
|
if to_postpone:
|
|
112
114
|
loop = asyncio.get_running_loop()
|
|
113
115
|
loop.call_later(self._retry_delay_sec, self._queue.extend, to_postpone)
|
|
114
|
-
return
|
|
116
|
+
return events
|
|
115
117
|
|
|
116
|
-
def
|
|
117
|
-
pk = next(c for c in self.table.__table__.c if c.primary_key)
|
|
118
|
+
def _insert_on_conflict(self, *records: Mapping[str, Any]) -> Insert:
|
|
118
119
|
return insert_on_conflict(
|
|
119
120
|
*records,
|
|
120
121
|
table=self.table,
|
|
121
122
|
unique_by=self.unique_by,
|
|
122
123
|
dialect=self._db.dialect,
|
|
123
|
-
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
@abstractmethod
|
|
127
|
+
async def _events(
|
|
128
|
+
self,
|
|
129
|
+
session: AsyncSession,
|
|
130
|
+
*insertions: _InsertableT,
|
|
131
|
+
) -> List[_DmlEventT]: ...
|
|
124
132
|
|
|
125
133
|
async def _insert(
|
|
126
134
|
self,
|
|
127
135
|
session: AsyncSession,
|
|
128
|
-
*
|
|
129
|
-
) -> Tuple[
|
|
130
|
-
|
|
131
|
-
|
|
136
|
+
*parcels: Received[_InsertableT],
|
|
137
|
+
) -> Tuple[
|
|
138
|
+
List[_DmlEventT],
|
|
139
|
+
List[Postponed[_PrecursorT]],
|
|
140
|
+
List[Received[_InsertableT]],
|
|
141
|
+
]:
|
|
132
142
|
to_retry: List[Postponed[_PrecursorT]] = []
|
|
133
143
|
failures: List[Received[_InsertableT]] = []
|
|
134
|
-
|
|
144
|
+
events: List[_DmlEventT] = []
|
|
135
145
|
try:
|
|
136
146
|
async with session.begin_nested():
|
|
137
|
-
|
|
138
|
-
inserted_ids.extend(ids)
|
|
147
|
+
events.extend(await self._events(session, *(p.item for p in parcels)))
|
|
139
148
|
except BaseException:
|
|
140
149
|
logger.exception(
|
|
141
150
|
f"Failed to bulk insert for {self.table.__name__}. "
|
|
142
|
-
f"Will try to insert ({len(
|
|
151
|
+
f"Will try to insert ({len(parcels)} records) individually instead."
|
|
143
152
|
)
|
|
144
|
-
for
|
|
145
|
-
stmt = self._stmt(record)
|
|
153
|
+
for p in parcels:
|
|
146
154
|
try:
|
|
147
155
|
async with session.begin_nested():
|
|
148
|
-
|
|
149
|
-
inserted_ids.extend(ids)
|
|
156
|
+
events.extend(await self._events(session, p.item))
|
|
150
157
|
except BaseException:
|
|
151
158
|
logger.exception(f"Failed to insert for {self.table.__name__}.")
|
|
152
|
-
p = insertions[i]
|
|
153
159
|
if isinstance(p, Postponed) and p.retries_left == 1:
|
|
154
160
|
failures.append(p)
|
|
155
161
|
else:
|
|
@@ -162,7 +168,7 @@ class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT]):
|
|
|
162
168
|
else self._retry_allowance,
|
|
163
169
|
)
|
|
164
170
|
)
|
|
165
|
-
return
|
|
171
|
+
return events, to_retry, failures
|
|
166
172
|
|
|
167
173
|
|
|
168
174
|
class Precursors(ABC):
|
phoenix/server/api/context.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
|
-
from datetime import datetime
|
|
3
2
|
from pathlib import Path
|
|
4
|
-
from typing import
|
|
3
|
+
from typing import Any, Optional
|
|
5
4
|
|
|
6
5
|
from strawberry.fastapi import BaseContext
|
|
7
|
-
from typing_extensions import TypeAlias
|
|
8
6
|
|
|
9
7
|
from phoenix.core.model_schema import Model
|
|
10
8
|
from phoenix.server.api.dataloaders import (
|
|
@@ -34,7 +32,8 @@ from phoenix.server.api.dataloaders import (
|
|
|
34
32
|
TraceEvaluationsDataLoader,
|
|
35
33
|
TraceRowIdsDataLoader,
|
|
36
34
|
)
|
|
37
|
-
from phoenix.server.
|
|
35
|
+
from phoenix.server.dml_event import DmlEvent
|
|
36
|
+
from phoenix.server.types import CanGetLastUpdatedAt, CanPutItem, DbSessionFactory
|
|
38
37
|
|
|
39
38
|
|
|
40
39
|
@dataclass
|
|
@@ -65,7 +64,9 @@ class DataLoaders:
|
|
|
65
64
|
project_by_name: ProjectByNameDataLoader
|
|
66
65
|
|
|
67
66
|
|
|
68
|
-
|
|
67
|
+
class _NoOp:
|
|
68
|
+
def get(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
69
|
+
def put(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
69
70
|
|
|
70
71
|
|
|
71
72
|
@dataclass
|
|
@@ -75,6 +76,7 @@ class Context(BaseContext):
|
|
|
75
76
|
cache_for_dataloaders: Optional[CacheForDataLoaders]
|
|
76
77
|
model: Model
|
|
77
78
|
export_path: Path
|
|
79
|
+
last_updated_at: CanGetLastUpdatedAt = _NoOp()
|
|
80
|
+
event_queue: CanPutItem[DmlEvent] = _NoOp()
|
|
78
81
|
corpus: Optional[Model] = None
|
|
79
|
-
streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None
|
|
80
82
|
read_only: bool = False
|
|
@@ -1,12 +1,4 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
|
-
from functools import singledispatchmethod
|
|
3
|
-
|
|
4
|
-
from phoenix.db.insertion.evaluation import (
|
|
5
|
-
DocumentEvaluationInsertionEvent,
|
|
6
|
-
SpanEvaluationInsertionEvent,
|
|
7
|
-
TraceEvaluationInsertionEvent,
|
|
8
|
-
)
|
|
9
|
-
from phoenix.db.insertion.span import ClearProjectSpansEvent, SpanInsertionEvent
|
|
10
2
|
|
|
11
3
|
from .annotation_summaries import AnnotationSummaryCache, AnnotationSummaryDataLoader
|
|
12
4
|
from .average_experiment_run_latency import AverageExperimentRunLatencyDataLoader
|
|
@@ -88,42 +80,3 @@ class CacheForDataLoaders:
|
|
|
88
80
|
token_count: TokenCountCache = field(
|
|
89
81
|
default_factory=TokenCountCache,
|
|
90
82
|
)
|
|
91
|
-
|
|
92
|
-
def _update_spans(self, project_rowid: int) -> None:
|
|
93
|
-
self.latency_ms_quantile.invalidate(project_rowid)
|
|
94
|
-
self.token_count.invalidate(project_rowid)
|
|
95
|
-
self.record_count.invalidate(project_rowid)
|
|
96
|
-
self.min_start_or_max_end_time.invalidate(project_rowid)
|
|
97
|
-
|
|
98
|
-
def _clear_spans(self, project_rowid: int) -> None:
|
|
99
|
-
self._update_spans(project_rowid)
|
|
100
|
-
self.annotation_summary.invalidate_project(project_rowid)
|
|
101
|
-
self.evaluation_summary.invalidate_project(project_rowid)
|
|
102
|
-
self.document_evaluation_summary.invalidate_project(project_rowid)
|
|
103
|
-
|
|
104
|
-
@singledispatchmethod
|
|
105
|
-
def invalidate(self, event: SpanInsertionEvent) -> None:
|
|
106
|
-
project_rowid, *_ = event
|
|
107
|
-
self._update_spans(project_rowid)
|
|
108
|
-
|
|
109
|
-
@invalidate.register
|
|
110
|
-
def _(self, event: ClearProjectSpansEvent) -> None:
|
|
111
|
-
project_rowid, *_ = event
|
|
112
|
-
self._clear_spans(project_rowid)
|
|
113
|
-
|
|
114
|
-
@invalidate.register
|
|
115
|
-
def _(self, event: DocumentEvaluationInsertionEvent) -> None:
|
|
116
|
-
project_rowid, evaluation_name = event
|
|
117
|
-
self.document_evaluation_summary.invalidate((project_rowid, evaluation_name))
|
|
118
|
-
|
|
119
|
-
@invalidate.register
|
|
120
|
-
def _(self, event: SpanEvaluationInsertionEvent) -> None:
|
|
121
|
-
project_rowid, evaluation_name = event
|
|
122
|
-
self.annotation_summary.invalidate((project_rowid, evaluation_name, "span"))
|
|
123
|
-
self.evaluation_summary.invalidate((project_rowid, evaluation_name, "span"))
|
|
124
|
-
|
|
125
|
-
@invalidate.register
|
|
126
|
-
def _(self, event: TraceEvaluationInsertionEvent) -> None:
|
|
127
|
-
project_rowid, evaluation_name = event
|
|
128
|
-
self.annotation_summary.invalidate((project_rowid, evaluation_name, "trace"))
|
|
129
|
-
self.evaluation_summary.invalidate((project_rowid, evaluation_name, "trace"))
|
|
@@ -33,6 +33,7 @@ from phoenix.server.api.types.DatasetExample import DatasetExample
|
|
|
33
33
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
34
34
|
from phoenix.server.api.types.Span import Span
|
|
35
35
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
36
|
+
from phoenix.server.dml_event import DatasetDeleteEvent, DatasetInsertEvent
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
@strawberry.type
|
|
@@ -62,6 +63,7 @@ class DatasetMutationMixin:
|
|
|
62
63
|
.returning(models.Dataset)
|
|
63
64
|
)
|
|
64
65
|
assert dataset is not None
|
|
66
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
65
67
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
66
68
|
|
|
67
69
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -90,6 +92,7 @@ class DatasetMutationMixin:
|
|
|
90
92
|
.values(**patch)
|
|
91
93
|
)
|
|
92
94
|
assert dataset is not None
|
|
95
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
93
96
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
94
97
|
|
|
95
98
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -218,6 +221,7 @@ class DatasetMutationMixin:
|
|
|
218
221
|
for dataset_example_rowid, span in zip(dataset_example_rowids, spans)
|
|
219
222
|
],
|
|
220
223
|
)
|
|
224
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
221
225
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
222
226
|
|
|
223
227
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -303,6 +307,7 @@ class DatasetMutationMixin:
|
|
|
303
307
|
)
|
|
304
308
|
],
|
|
305
309
|
)
|
|
310
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
306
311
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
307
312
|
|
|
308
313
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -333,6 +338,7 @@ class DatasetMutationMixin:
|
|
|
333
338
|
delete_traces(info.context.db, *eval_trace_ids),
|
|
334
339
|
return_exceptions=True,
|
|
335
340
|
)
|
|
341
|
+
info.context.event_queue.put(DatasetDeleteEvent((dataset.id,)))
|
|
336
342
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
337
343
|
|
|
338
344
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -424,7 +430,7 @@ class DatasetMutationMixin:
|
|
|
424
430
|
for revision, patch, example_id in zip(revisions, patches, example_ids)
|
|
425
431
|
],
|
|
426
432
|
)
|
|
427
|
-
|
|
433
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
428
434
|
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
429
435
|
|
|
430
436
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -507,8 +513,8 @@ class DatasetMutationMixin:
|
|
|
507
513
|
for dataset_example_rowid in example_db_ids
|
|
508
514
|
],
|
|
509
515
|
)
|
|
510
|
-
|
|
511
|
-
|
|
516
|
+
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
517
|
+
return DatasetMutationPayload(dataset=to_gql_dataset(dataset))
|
|
512
518
|
|
|
513
519
|
|
|
514
520
|
def _span_attribute(semconv: str) -> Any:
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.types.Experiment import Experiment, to_gql_experiment
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
17
|
+
from phoenix.server.dml_event import ExperimentDeleteEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -66,6 +67,7 @@ class ExperimentMutationMixin:
|
|
|
66
67
|
delete_traces(info.context.db, *eval_trace_ids),
|
|
67
68
|
return_exceptions=True,
|
|
68
69
|
)
|
|
70
|
+
info.context.event_queue.put(ExperimentDeleteEvent(tuple(experiments.keys())))
|
|
69
71
|
return ExperimentMutationPayload(
|
|
70
72
|
experiments=[
|
|
71
73
|
to_gql_experiment(experiments[experiment_id]) for experiment_id in experiment_ids
|
|
@@ -6,23 +6,23 @@ from strawberry.types import Info
|
|
|
6
6
|
|
|
7
7
|
from phoenix.config import DEFAULT_PROJECT_NAME
|
|
8
8
|
from phoenix.db import models
|
|
9
|
-
from phoenix.db.insertion.span import ClearProjectSpansEvent
|
|
10
9
|
from phoenix.server.api.context import Context
|
|
11
10
|
from phoenix.server.api.input_types.ClearProjectInput import ClearProjectInput
|
|
12
11
|
from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
13
12
|
from phoenix.server.api.queries import Query
|
|
14
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ProjectDeleteEvent, SpanDeleteEvent
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
@strawberry.type
|
|
18
18
|
class ProjectMutationMixin:
|
|
19
19
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
20
20
|
async def delete_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
21
|
-
|
|
21
|
+
project_id = from_global_id_with_expected_type(global_id=id, expected_type_name="Project")
|
|
22
22
|
async with info.context.db() as session:
|
|
23
23
|
project = await session.scalar(
|
|
24
24
|
select(models.Project)
|
|
25
|
-
.where(models.Project.id ==
|
|
25
|
+
.where(models.Project.id == project_id)
|
|
26
26
|
.options(load_only(models.Project.name))
|
|
27
27
|
)
|
|
28
28
|
if project is None:
|
|
@@ -30,6 +30,7 @@ class ProjectMutationMixin:
|
|
|
30
30
|
if project.name == DEFAULT_PROJECT_NAME:
|
|
31
31
|
raise ValueError(f"Cannot delete the {DEFAULT_PROJECT_NAME} project")
|
|
32
32
|
await session.delete(project)
|
|
33
|
+
info.context.event_queue.put(ProjectDeleteEvent((project_id,)))
|
|
33
34
|
return Query()
|
|
34
35
|
|
|
35
36
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -42,6 +43,5 @@ class ProjectMutationMixin:
|
|
|
42
43
|
delete_statement = delete_statement.where(models.Trace.start_time < input.end_time)
|
|
43
44
|
async with info.context.db() as session:
|
|
44
45
|
await session.execute(delete_statement)
|
|
45
|
-
|
|
46
|
-
cache.invalidate(ClearProjectSpansEvent(project_rowid=project_id))
|
|
46
|
+
info.context.event_queue.put(SpanDeleteEvent((project_id,)))
|
|
47
47
|
return Query()
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.queries import Query
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.types.SpanAnnotation import SpanAnnotation, to_gql_span_annotation
|
|
17
|
+
from phoenix.server.dml_event import SpanAnnotationDeleteEvent, SpanAnnotationInsertEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -47,7 +48,10 @@ class SpanAnnotationMutationMixin:
|
|
|
47
48
|
)
|
|
48
49
|
result = await session.scalars(stmt)
|
|
49
50
|
inserted_annotations = result.all()
|
|
50
|
-
|
|
51
|
+
if inserted_annotations:
|
|
52
|
+
info.context.event_queue.put(
|
|
53
|
+
SpanAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
|
|
54
|
+
)
|
|
51
55
|
return SpanAnnotationMutationPayload(
|
|
52
56
|
span_annotations=[
|
|
53
57
|
to_gql_span_annotation(annotation) for annotation in inserted_annotations
|
|
@@ -92,7 +96,7 @@ class SpanAnnotationMutationMixin:
|
|
|
92
96
|
)
|
|
93
97
|
if span_annotation is not None:
|
|
94
98
|
patched_annotations.append(to_gql_span_annotation(span_annotation))
|
|
95
|
-
|
|
99
|
+
info.context.event_queue.put(SpanAnnotationInsertEvent((span_annotation.id,)))
|
|
96
100
|
return SpanAnnotationMutationPayload(span_annotations=patched_annotations, query=Query())
|
|
97
101
|
|
|
98
102
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -115,6 +119,10 @@ class SpanAnnotationMutationMixin:
|
|
|
115
119
|
deleted_annotations_gql = [
|
|
116
120
|
to_gql_span_annotation(annotation) for annotation in deleted_annotations
|
|
117
121
|
]
|
|
122
|
+
if deleted_annotations:
|
|
123
|
+
info.context.event_queue.put(
|
|
124
|
+
SpanAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
|
|
125
|
+
)
|
|
118
126
|
return SpanAnnotationMutationPayload(
|
|
119
127
|
span_annotations=deleted_annotations_gql, query=Query()
|
|
120
128
|
)
|
|
@@ -14,6 +14,7 @@ from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
|
14
14
|
from phoenix.server.api.queries import Query
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
16
|
from phoenix.server.api.types.TraceAnnotation import TraceAnnotation, to_gql_trace_annotation
|
|
17
|
+
from phoenix.server.dml_event import TraceAnnotationDeleteEvent, TraceAnnotationInsertEvent
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
@strawberry.type
|
|
@@ -47,7 +48,10 @@ class TraceAnnotationMutationMixin:
|
|
|
47
48
|
)
|
|
48
49
|
result = await session.scalars(stmt)
|
|
49
50
|
inserted_annotations = result.all()
|
|
50
|
-
|
|
51
|
+
if inserted_annotations:
|
|
52
|
+
info.context.event_queue.put(
|
|
53
|
+
TraceAnnotationInsertEvent(tuple(anno.id for anno in inserted_annotations))
|
|
54
|
+
)
|
|
51
55
|
return TraceAnnotationMutationPayload(
|
|
52
56
|
trace_annotations=[
|
|
53
57
|
to_gql_trace_annotation(annotation) for annotation in inserted_annotations
|
|
@@ -91,7 +95,7 @@ class TraceAnnotationMutationMixin:
|
|
|
91
95
|
)
|
|
92
96
|
if trace_annotation:
|
|
93
97
|
patched_annotations.append(to_gql_trace_annotation(trace_annotation))
|
|
94
|
-
|
|
98
|
+
info.context.event_queue.put(TraceAnnotationInsertEvent((trace_annotation.id,)))
|
|
95
99
|
return TraceAnnotationMutationPayload(trace_annotations=patched_annotations, query=Query())
|
|
96
100
|
|
|
97
101
|
@strawberry.mutation(permission_classes=[IsAuthenticated]) # type: ignore
|
|
@@ -114,6 +118,10 @@ class TraceAnnotationMutationMixin:
|
|
|
114
118
|
deleted_annotations_gql = [
|
|
115
119
|
to_gql_trace_annotation(annotation) for annotation in deleted_annotations
|
|
116
120
|
]
|
|
121
|
+
if deleted_annotations:
|
|
122
|
+
info.context.event_queue.put(
|
|
123
|
+
TraceAnnotationDeleteEvent(tuple(anno.id for anno in deleted_annotations))
|
|
124
|
+
)
|
|
117
125
|
return TraceAnnotationMutationPayload(
|
|
118
126
|
trace_annotations=deleted_annotations_gql, query=Query()
|
|
119
127
|
)
|
phoenix/server/api/queries.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from collections import defaultdict
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
from typing import DefaultDict, Dict, List, Optional, Set, Union
|
|
3
4
|
|
|
4
5
|
import numpy as np
|
|
@@ -107,6 +108,10 @@ class Query:
|
|
|
107
108
|
]
|
|
108
109
|
return connection_from_list(data=data, args=args)
|
|
109
110
|
|
|
111
|
+
@strawberry.field
|
|
112
|
+
def projects_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
113
|
+
return info.context.last_updated_at.get(models.Project)
|
|
114
|
+
|
|
110
115
|
@strawberry.field
|
|
111
116
|
async def datasets(
|
|
112
117
|
self,
|
|
@@ -133,6 +138,10 @@ class Query:
|
|
|
133
138
|
data=[to_gql_dataset(dataset) for dataset in datasets], args=args
|
|
134
139
|
)
|
|
135
140
|
|
|
141
|
+
@strawberry.field
|
|
142
|
+
def datasets_last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
143
|
+
return info.context.last_updated_at.get(models.Dataset)
|
|
144
|
+
|
|
136
145
|
@strawberry.field
|
|
137
146
|
async def compare_experiments(
|
|
138
147
|
self,
|
|
@@ -60,6 +60,7 @@ from phoenix.server.api.types.DatasetExample import DatasetExample as DatasetExa
|
|
|
60
60
|
from phoenix.server.api.types.DatasetVersion import DatasetVersion as DatasetVersionNodeType
|
|
61
61
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
62
62
|
from phoenix.server.api.utils import delete_projects, delete_traces
|
|
63
|
+
from phoenix.server.dml_event import DatasetInsertEvent
|
|
63
64
|
|
|
64
65
|
from .pydantic_compat import V1RoutesBaseModel
|
|
65
66
|
from .utils import (
|
|
@@ -481,6 +482,7 @@ async def upload_dataset(
|
|
|
481
482
|
if sync:
|
|
482
483
|
async with request.app.state.db() as session:
|
|
483
484
|
dataset_id = (await operation(session)).dataset_id
|
|
485
|
+
request.state.event_queue.put(DatasetInsertEvent((dataset_id,)))
|
|
484
486
|
return UploadDatasetResponseBody(
|
|
485
487
|
data=UploadDatasetData(dataset_id=str(GlobalID(Dataset.__name__, str(dataset_id))))
|
|
486
488
|
)
|
|
@@ -11,6 +11,7 @@ from phoenix.db import models
|
|
|
11
11
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
12
12
|
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
13
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ExperimentRunAnnotationInsertEvent
|
|
14
15
|
|
|
15
16
|
from .pydantic_compat import V1RoutesBaseModel
|
|
16
17
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -108,6 +109,7 @@ async def upsert_experiment_evaluation(
|
|
|
108
109
|
).returning(models.ExperimentRunAnnotation)
|
|
109
110
|
)
|
|
110
111
|
evaluation_gid = GlobalID("ExperimentEvaluation", str(exp_eval_run.id))
|
|
112
|
+
request.state.event_queue.put(ExperimentRunAnnotationInsertEvent((exp_eval_run.id,)))
|
|
111
113
|
return UpsertExperimentEvaluationResponseBody(
|
|
112
114
|
data=UpsertExperimentEvaluationResponseBodyData(id=str(evaluation_gid))
|
|
113
115
|
)
|
|
@@ -11,6 +11,7 @@ from strawberry.relay import GlobalID
|
|
|
11
11
|
from phoenix.db import models
|
|
12
12
|
from phoenix.db.models import ExperimentRunOutput
|
|
13
13
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
|
+
from phoenix.server.dml_event import ExperimentRunInsertEvent
|
|
14
15
|
|
|
15
16
|
from .pydantic_compat import V1RoutesBaseModel
|
|
16
17
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -102,6 +103,7 @@ async def create_experiment_run(
|
|
|
102
103
|
)
|
|
103
104
|
session.add(exp_run)
|
|
104
105
|
await session.flush()
|
|
106
|
+
request.state.event_queue.put(ExperimentRunInsertEvent((exp_run.id,)))
|
|
105
107
|
run_gid = GlobalID("ExperimentRun", str(exp_run.id))
|
|
106
108
|
return CreateExperimentResponseBody(data=CreateExperimentRunResponseBodyData(id=str(run_gid)))
|
|
107
109
|
|
|
@@ -13,6 +13,7 @@ from phoenix.db import models
|
|
|
13
13
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
14
14
|
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
15
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
16
|
+
from phoenix.server.dml_event import ExperimentInsertEvent
|
|
16
17
|
|
|
17
18
|
from .pydantic_compat import V1RoutesBaseModel
|
|
18
19
|
from .utils import ResponseBody, add_errors_to_responses
|
|
@@ -188,6 +189,7 @@ async def create_experiment(
|
|
|
188
189
|
dataset_version_globalid = GlobalID(
|
|
189
190
|
"DatasetVersion", str(experiment.dataset_version_id)
|
|
190
191
|
)
|
|
192
|
+
request.state.event_queue.put(ExperimentInsertEvent((experiment.id,)))
|
|
191
193
|
return CreateExperimentResponseBody(
|
|
192
194
|
data=Experiment(
|
|
193
195
|
id=str(experiment_globalid),
|
|
@@ -16,6 +16,7 @@ from phoenix.db.helpers import SupportedSQLDialect
|
|
|
16
16
|
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
17
17
|
from phoenix.db.insertion.types import Precursors
|
|
18
18
|
from phoenix.server.api.routers.utils import df_to_bytes
|
|
19
|
+
from phoenix.server.dml_event import SpanAnnotationInsertEvent
|
|
19
20
|
from phoenix.trace.dsl import SpanQuery as SpanQuery_
|
|
20
21
|
|
|
21
22
|
from .pydantic_compat import V1RoutesBaseModel
|
|
@@ -197,6 +198,8 @@ async def annotate_spans(
|
|
|
197
198
|
request_body: AnnotateSpansRequestBody,
|
|
198
199
|
sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
|
|
199
200
|
) -> AnnotateSpansResponseBody:
|
|
201
|
+
if not request_body.data:
|
|
202
|
+
return AnnotateSpansResponseBody(data=[])
|
|
200
203
|
precursors = [d.as_precursor() for d in request_body.data]
|
|
201
204
|
if not sync:
|
|
202
205
|
await request.state.enqueue(*precursors)
|
|
@@ -217,9 +220,7 @@ async def annotate_spans(
|
|
|
217
220
|
detail=f"Spans with IDs {', '.join(missing_span_ids)} do not exist.",
|
|
218
221
|
status_code=HTTP_404_NOT_FOUND,
|
|
219
222
|
)
|
|
220
|
-
|
|
221
|
-
inserted_annotations = []
|
|
222
|
-
|
|
223
|
+
inserted_ids = []
|
|
223
224
|
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
224
225
|
for p in precursors:
|
|
225
226
|
values = dict(as_kv(p.as_insertable(existing_spans[p.span_id]).row))
|
|
@@ -231,8 +232,11 @@ async def annotate_spans(
|
|
|
231
232
|
unique_by=("name", "span_rowid"),
|
|
232
233
|
).returning(models.SpanAnnotation.id)
|
|
233
234
|
)
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
235
|
+
inserted_ids.append(span_annotation_id)
|
|
236
|
+
request.state.event_queue.put(SpanAnnotationInsertEvent(tuple(inserted_ids)))
|
|
237
|
+
return AnnotateSpansResponseBody(
|
|
238
|
+
data=[
|
|
239
|
+
InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(id_))))
|
|
240
|
+
for id_ in inserted_ids
|
|
241
|
+
]
|
|
242
|
+
)
|
|
@@ -24,6 +24,7 @@ from phoenix.db import models
|
|
|
24
24
|
from phoenix.db.helpers import SupportedSQLDialect
|
|
25
25
|
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
26
26
|
from phoenix.db.insertion.types import Precursors
|
|
27
|
+
from phoenix.server.dml_event import TraceAnnotationInsertEvent
|
|
27
28
|
from phoenix.trace.otel import decode_otlp_span
|
|
28
29
|
from phoenix.utilities.project import get_project_name
|
|
29
30
|
|
|
@@ -153,6 +154,8 @@ async def annotate_traces(
|
|
|
153
154
|
request_body: AnnotateTracesRequestBody,
|
|
154
155
|
sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
|
|
155
156
|
) -> AnnotateTracesResponseBody:
|
|
157
|
+
if not request_body.data:
|
|
158
|
+
return AnnotateTracesResponseBody(data=[])
|
|
156
159
|
precursors = [d.as_precursor() for d in request_body.data]
|
|
157
160
|
if not sync:
|
|
158
161
|
await request.state.enqueue(*precursors)
|
|
@@ -173,9 +176,7 @@ async def annotate_traces(
|
|
|
173
176
|
detail=f"Traces with IDs {', '.join(missing_trace_ids)} do not exist.",
|
|
174
177
|
status_code=HTTP_404_NOT_FOUND,
|
|
175
178
|
)
|
|
176
|
-
|
|
177
|
-
inserted_annotations = []
|
|
178
|
-
|
|
179
|
+
inserted_ids = []
|
|
179
180
|
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
180
181
|
for p in precursors:
|
|
181
182
|
values = dict(as_kv(p.as_insertable(existing_traces[p.trace_id]).row))
|
|
@@ -187,13 +188,14 @@ async def annotate_traces(
|
|
|
187
188
|
unique_by=("name", "trace_rowid"),
|
|
188
189
|
).returning(models.TraceAnnotation.id)
|
|
189
190
|
)
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
)
|
|
195
|
-
|
|
196
|
-
|
|
191
|
+
inserted_ids.append(trace_annotation_id)
|
|
192
|
+
request.state.event_queue.put(TraceAnnotationInsertEvent(tuple(inserted_ids)))
|
|
193
|
+
return AnnotateTracesResponseBody(
|
|
194
|
+
data=[
|
|
195
|
+
InsertedTraceAnnotation(id=str(GlobalID("TraceAnnotation", str(id_))))
|
|
196
|
+
for id_ in inserted_ids
|
|
197
|
+
]
|
|
198
|
+
)
|
|
197
199
|
|
|
198
200
|
|
|
199
201
|
async def _add_spans(req: ExportTraceServiceRequest, state: State) -> None:
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import AsyncIterable, List, Optional, Tuple, cast
|
|
2
|
+
from typing import AsyncIterable, ClassVar, List, Optional, Tuple, Type, cast
|
|
3
3
|
|
|
4
4
|
import strawberry
|
|
5
5
|
from sqlalchemy import and_, func, select
|
|
@@ -27,6 +27,7 @@ from phoenix.server.api.types.SortDir import SortDir
|
|
|
27
27
|
|
|
28
28
|
@strawberry.type
|
|
29
29
|
class Dataset(Node):
|
|
30
|
+
_table: ClassVar[Type[models.Base]] = models.Experiment
|
|
30
31
|
id_attr: NodeID[int]
|
|
31
32
|
name: str
|
|
32
33
|
description: Optional[str]
|
|
@@ -284,6 +285,10 @@ class Dataset(Node):
|
|
|
284
285
|
) in await session.stream(query)
|
|
285
286
|
]
|
|
286
287
|
|
|
288
|
+
@strawberry.field
|
|
289
|
+
def last_updated_at(self, info: Info[Context, None]) -> Optional[datetime]:
|
|
290
|
+
return info.context.last_updated_at.get(self._table, self.id_attr)
|
|
291
|
+
|
|
287
292
|
|
|
288
293
|
def to_gql_dataset(dataset: models.Dataset) -> Dataset:
|
|
289
294
|
"""
|