arize-phoenix 8.32.1__py3-none-any.whl → 9.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/METADATA +5 -5
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/RECORD +76 -56
- phoenix/db/constants.py +1 -0
- phoenix/db/facilitator.py +55 -0
- phoenix/db/insertion/document_annotation.py +31 -13
- phoenix/db/insertion/evaluation.py +15 -3
- phoenix/db/insertion/helpers.py +2 -1
- phoenix/db/insertion/span_annotation.py +26 -9
- phoenix/db/insertion/trace_annotation.py +25 -9
- phoenix/db/insertion/types.py +7 -0
- phoenix/db/migrations/versions/2f9d1a65945f_annotation_config_migration.py +322 -0
- phoenix/db/migrations/versions/8a3764fe7f1a_change_jsonb_to_json_for_prompts.py +76 -0
- phoenix/db/migrations/versions/bb8139330879_create_project_trace_retention_policies_table.py +77 -0
- phoenix/db/models.py +151 -10
- phoenix/db/types/annotation_configs.py +97 -0
- phoenix/db/types/db_models.py +41 -0
- phoenix/db/types/trace_retention.py +267 -0
- phoenix/experiments/functions.py +5 -1
- phoenix/server/api/auth.py +9 -0
- phoenix/server/api/context.py +5 -0
- phoenix/server/api/dataloaders/__init__.py +4 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +203 -24
- phoenix/server/api/dataloaders/project_ids_by_trace_retention_policy_id.py +42 -0
- phoenix/server/api/dataloaders/trace_retention_policy_id_by_project_id.py +34 -0
- phoenix/server/api/helpers/annotations.py +9 -0
- phoenix/server/api/helpers/prompts/models.py +34 -67
- phoenix/server/api/input_types/CreateSpanAnnotationInput.py +9 -0
- phoenix/server/api/input_types/CreateTraceAnnotationInput.py +3 -0
- phoenix/server/api/input_types/PatchAnnotationInput.py +3 -0
- phoenix/server/api/input_types/SpanAnnotationFilter.py +67 -0
- phoenix/server/api/mutations/__init__.py +6 -0
- phoenix/server/api/mutations/annotation_config_mutations.py +413 -0
- phoenix/server/api/mutations/dataset_mutations.py +62 -39
- phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +245 -0
- phoenix/server/api/mutations/span_annotations_mutations.py +272 -70
- phoenix/server/api/mutations/trace_annotations_mutations.py +203 -74
- phoenix/server/api/queries.py +86 -0
- phoenix/server/api/routers/v1/__init__.py +4 -0
- phoenix/server/api/routers/v1/annotation_configs.py +449 -0
- phoenix/server/api/routers/v1/annotations.py +161 -0
- phoenix/server/api/routers/v1/evaluations.py +6 -0
- phoenix/server/api/routers/v1/projects.py +1 -50
- phoenix/server/api/routers/v1/spans.py +35 -8
- phoenix/server/api/routers/v1/traces.py +22 -13
- phoenix/server/api/routers/v1/utils.py +60 -0
- phoenix/server/api/types/Annotation.py +7 -0
- phoenix/server/api/types/AnnotationConfig.py +124 -0
- phoenix/server/api/types/AnnotationSource.py +9 -0
- phoenix/server/api/types/AnnotationSummary.py +28 -14
- phoenix/server/api/types/AnnotatorKind.py +1 -0
- phoenix/server/api/types/CronExpression.py +15 -0
- phoenix/server/api/types/Evaluation.py +4 -30
- phoenix/server/api/types/Project.py +50 -2
- phoenix/server/api/types/ProjectTraceRetentionPolicy.py +110 -0
- phoenix/server/api/types/Span.py +78 -0
- phoenix/server/api/types/SpanAnnotation.py +24 -0
- phoenix/server/api/types/Trace.py +2 -2
- phoenix/server/api/types/TraceAnnotation.py +23 -0
- phoenix/server/app.py +20 -0
- phoenix/server/retention.py +76 -0
- phoenix/server/static/.vite/manifest.json +36 -36
- phoenix/server/static/assets/components-B2MWTXnm.js +4326 -0
- phoenix/server/static/assets/{index-B0CbpsxD.js → index-Bfvpea_-.js} +10 -10
- phoenix/server/static/assets/pages-CZ2vKu8H.js +7268 -0
- phoenix/server/static/assets/vendor-BRDkBC5J.js +903 -0
- phoenix/server/static/assets/{vendor-arizeai-CxXYQNUl.js → vendor-arizeai-BvTqp_W8.js} +3 -3
- phoenix/server/static/assets/{vendor-codemirror-B0NIFPOL.js → vendor-codemirror-COt9UfW7.js} +1 -1
- phoenix/server/static/assets/{vendor-recharts-CrrDFWK1.js → vendor-recharts-BoHX9Hvs.js} +2 -2
- phoenix/server/static/assets/{vendor-shiki-C5bJ-RPf.js → vendor-shiki-Cw1dsDAz.js} +1 -1
- phoenix/trace/dsl/filter.py +25 -5
- phoenix/utilities/__init__.py +18 -0
- phoenix/version.py +1 -1
- phoenix/server/static/assets/components-x-gKFJ8C.js +0 -3414
- phoenix/server/static/assets/pages-BU4VdyeH.js +0 -5867
- phoenix/server/static/assets/vendor-BfhM_F1u.js +0 -902
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/WHEEL +0 -0
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-8.32.1.dist-info → arize_phoenix-9.0.1.dist-info}/licenses/LICENSE +0 -0
phoenix/db/insertion/helpers.py
CHANGED
|
@@ -36,6 +36,7 @@ def insert_on_conflict(
|
|
|
36
36
|
unique_by: Sequence[str],
|
|
37
37
|
on_conflict: OnConflict = OnConflict.DO_UPDATE,
|
|
38
38
|
set_: Optional[Mapping[str, Any]] = None,
|
|
39
|
+
constraint_name: Optional[str] = None,
|
|
39
40
|
) -> Insert:
|
|
40
41
|
"""
|
|
41
42
|
Dialect specific insertion statement using ON CONFLICT DO syntax.
|
|
@@ -50,7 +51,7 @@ def insert_on_conflict(
|
|
|
50
51
|
unique_records.append(v)
|
|
51
52
|
seen.add(k)
|
|
52
53
|
records = tuple(reversed(unique_records))
|
|
53
|
-
constraint = "_".join(("uq", table.__tablename__, *unique_by))
|
|
54
|
+
constraint = constraint_name or "_".join(("uq", table.__tablename__, *unique_by))
|
|
54
55
|
if dialect is SupportedSQLDialect.POSTGRESQL:
|
|
55
56
|
stmt_postgresql = insert_postgresql(table).values(records)
|
|
56
57
|
if on_conflict is OnConflict.DO_NOTHING:
|
|
@@ -21,15 +21,23 @@ from phoenix.server.dml_event import SpanAnnotationDmlEvent
|
|
|
21
21
|
_Name: TypeAlias = str
|
|
22
22
|
_SpanId: TypeAlias = str
|
|
23
23
|
_SpanRowId: TypeAlias = int
|
|
24
|
+
_Identifier: TypeAlias = str
|
|
24
25
|
_AnnoRowId: TypeAlias = int
|
|
25
26
|
|
|
26
|
-
|
|
27
|
-
|
|
27
|
+
|
|
28
|
+
class _Key(NamedTuple):
|
|
29
|
+
annotation_name: _Name
|
|
30
|
+
annotation_identifier: _Identifier
|
|
31
|
+
span_id: _SpanId
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
_UniqueBy: TypeAlias = tuple[_Name, _SpanRowId, _Identifier]
|
|
28
35
|
_Existing: TypeAlias = tuple[
|
|
29
36
|
_SpanRowId,
|
|
30
37
|
_SpanId,
|
|
31
38
|
Optional[_AnnoRowId],
|
|
32
39
|
Optional[_Name],
|
|
40
|
+
Optional[_Identifier],
|
|
33
41
|
Optional[datetime],
|
|
34
42
|
]
|
|
35
43
|
|
|
@@ -42,7 +50,7 @@ class SpanAnnotationQueueInserter(
|
|
|
42
50
|
SpanAnnotationDmlEvent,
|
|
43
51
|
],
|
|
44
52
|
table=models.SpanAnnotation,
|
|
45
|
-
unique_by=("name", "span_rowid"),
|
|
53
|
+
unique_by=("name", "span_rowid", "identifier"),
|
|
46
54
|
):
|
|
47
55
|
async def _events(
|
|
48
56
|
self,
|
|
@@ -73,7 +81,11 @@ class SpanAnnotationQueueInserter(
|
|
|
73
81
|
e.span_id: _SpanAttr(e.span_rowid) for e in existing
|
|
74
82
|
}
|
|
75
83
|
existing_annos: Mapping[_Key, _AnnoAttr] = {
|
|
76
|
-
(
|
|
84
|
+
_Key(
|
|
85
|
+
annotation_name=e.name,
|
|
86
|
+
annotation_identifier=e.identifier,
|
|
87
|
+
span_id=e.span_id,
|
|
88
|
+
): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
|
|
77
89
|
for e in existing
|
|
78
90
|
if e.id is not None and e.name is not None and e.updated_at is not None
|
|
79
91
|
}
|
|
@@ -119,19 +131,20 @@ class SpanAnnotationQueueInserter(
|
|
|
119
131
|
anno = self.table
|
|
120
132
|
span = (
|
|
121
133
|
select(models.Span.id, models.Span.span_id)
|
|
122
|
-
.where(models.Span.span_id.in_({span_id for
|
|
134
|
+
.where(models.Span.span_id.in_({k.span_id for k in keys}))
|
|
123
135
|
.cte()
|
|
124
136
|
)
|
|
125
137
|
onclause = and_(
|
|
126
138
|
span.c.id == anno.span_rowid,
|
|
127
|
-
anno.name.in_({
|
|
128
|
-
tuple_(anno.name, span.c.span_id).in_(keys),
|
|
139
|
+
anno.name.in_({k.annotation_name for k in keys}),
|
|
140
|
+
tuple_(anno.name, anno.identifier, span.c.span_id).in_(keys),
|
|
129
141
|
)
|
|
130
142
|
return select(
|
|
131
143
|
span.c.id.label("span_rowid"),
|
|
132
144
|
span.c.span_id,
|
|
133
145
|
anno.id,
|
|
134
146
|
anno.name,
|
|
147
|
+
anno.identifier,
|
|
135
148
|
anno.updated_at,
|
|
136
149
|
).outerjoin_from(span, anno, onclause)
|
|
137
150
|
|
|
@@ -147,11 +160,15 @@ class _AnnoAttr(NamedTuple):
|
|
|
147
160
|
|
|
148
161
|
|
|
149
162
|
def _key(p: Received[Precursors.SpanAnnotation]) -> _Key:
|
|
150
|
-
return
|
|
163
|
+
return _Key(
|
|
164
|
+
annotation_name=p.item.obj.name,
|
|
165
|
+
annotation_identifier=p.item.obj.identifier,
|
|
166
|
+
span_id=p.item.span_id,
|
|
167
|
+
)
|
|
151
168
|
|
|
152
169
|
|
|
153
170
|
def _unique_by(p: Received[Insertables.SpanAnnotation]) -> _UniqueBy:
|
|
154
|
-
return p.item.obj.name, p.item.span_rowid
|
|
171
|
+
return p.item.obj.name, p.item.span_rowid, p.item.identifier
|
|
155
172
|
|
|
156
173
|
|
|
157
174
|
def _time(p: Received[Any]) -> datetime:
|
|
@@ -22,9 +22,16 @@ _Name: TypeAlias = str
|
|
|
22
22
|
_TraceId: TypeAlias = str
|
|
23
23
|
_TraceRowId: TypeAlias = int
|
|
24
24
|
_AnnoRowId: TypeAlias = int
|
|
25
|
+
_Identifier: TypeAlias = str
|
|
25
26
|
|
|
26
|
-
|
|
27
|
-
|
|
27
|
+
|
|
28
|
+
class _Key(NamedTuple):
|
|
29
|
+
annotation_name: _Name
|
|
30
|
+
annotation_identifier: _Identifier
|
|
31
|
+
trace_id: _TraceId
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
_UniqueBy: TypeAlias = tuple[_Name, _TraceRowId, _Identifier]
|
|
28
35
|
_Existing: TypeAlias = tuple[
|
|
29
36
|
_TraceRowId,
|
|
30
37
|
_TraceId,
|
|
@@ -42,7 +49,7 @@ class TraceAnnotationQueueInserter(
|
|
|
42
49
|
TraceAnnotationDmlEvent,
|
|
43
50
|
],
|
|
44
51
|
table=models.TraceAnnotation,
|
|
45
|
-
unique_by=("name", "trace_rowid"),
|
|
52
|
+
unique_by=("name", "trace_rowid", "identifier"),
|
|
46
53
|
):
|
|
47
54
|
async def _events(
|
|
48
55
|
self,
|
|
@@ -73,7 +80,11 @@ class TraceAnnotationQueueInserter(
|
|
|
73
80
|
e.trace_id: _TraceAttr(e.trace_rowid) for e in existing
|
|
74
81
|
}
|
|
75
82
|
existing_annos: Mapping[_Key, _AnnoAttr] = {
|
|
76
|
-
(
|
|
83
|
+
_Key(
|
|
84
|
+
annotation_name=e.name,
|
|
85
|
+
annotation_identifier=e.identifier,
|
|
86
|
+
trace_id=e.trace_id,
|
|
87
|
+
): _AnnoAttr(e.trace_rowid, e.id, e.updated_at)
|
|
77
88
|
for e in existing
|
|
78
89
|
if e.id is not None and e.name is not None and e.updated_at is not None
|
|
79
90
|
}
|
|
@@ -119,19 +130,20 @@ class TraceAnnotationQueueInserter(
|
|
|
119
130
|
anno = self.table
|
|
120
131
|
trace = (
|
|
121
132
|
select(models.Trace.id, models.Trace.trace_id)
|
|
122
|
-
.where(models.Trace.trace_id.in_({trace_id for
|
|
133
|
+
.where(models.Trace.trace_id.in_({k.trace_id for k in keys}))
|
|
123
134
|
.cte()
|
|
124
135
|
)
|
|
125
136
|
onclause = and_(
|
|
126
137
|
trace.c.id == anno.trace_rowid,
|
|
127
|
-
anno.name.in_({
|
|
128
|
-
tuple_(anno.name, trace.c.trace_id).in_(keys),
|
|
138
|
+
anno.name.in_({k.annotation_name for k in keys}),
|
|
139
|
+
tuple_(anno.name, anno.identifier, trace.c.trace_id).in_(keys),
|
|
129
140
|
)
|
|
130
141
|
return select(
|
|
131
142
|
trace.c.id.label("trace_rowid"),
|
|
132
143
|
trace.c.trace_id,
|
|
133
144
|
anno.id,
|
|
134
145
|
anno.name,
|
|
146
|
+
anno.identifier,
|
|
135
147
|
anno.updated_at,
|
|
136
148
|
).outerjoin_from(trace, anno, onclause)
|
|
137
149
|
|
|
@@ -147,11 +159,15 @@ class _AnnoAttr(NamedTuple):
|
|
|
147
159
|
|
|
148
160
|
|
|
149
161
|
def _key(p: Received[Precursors.TraceAnnotation]) -> _Key:
|
|
150
|
-
return
|
|
162
|
+
return _Key(
|
|
163
|
+
annotation_name=p.item.obj.name,
|
|
164
|
+
annotation_identifier=p.item.obj.identifier,
|
|
165
|
+
trace_id=p.item.trace_id,
|
|
166
|
+
)
|
|
151
167
|
|
|
152
168
|
|
|
153
169
|
def _unique_by(p: Received[Insertables.TraceAnnotation]) -> _UniqueBy:
|
|
154
|
-
return p.item.obj.name, p.item.trace_rowid
|
|
170
|
+
return p.item.obj.name, p.item.trace_rowid, p.item.identifier
|
|
155
171
|
|
|
156
172
|
|
|
157
173
|
def _time(p: Received[Any]) -> datetime:
|
phoenix/db/insertion/types.py
CHANGED
|
@@ -50,14 +50,17 @@ class Postponed(Received[_AnyT]):
|
|
|
50
50
|
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT, _DmlEventT]):
|
|
51
51
|
table: type[_RowT]
|
|
52
52
|
unique_by: Sequence[str]
|
|
53
|
+
constraint_name: Optional[str] = None
|
|
53
54
|
|
|
54
55
|
def __init_subclass__(
|
|
55
56
|
cls,
|
|
56
57
|
table: type[_RowT],
|
|
57
58
|
unique_by: Sequence[str],
|
|
59
|
+
constraint_name: Optional[str] = None,
|
|
58
60
|
) -> None:
|
|
59
61
|
cls.table = table
|
|
60
62
|
cls.unique_by = unique_by
|
|
63
|
+
cls.constraint_name = constraint_name
|
|
61
64
|
|
|
62
65
|
def __init__(
|
|
63
66
|
self,
|
|
@@ -109,6 +112,7 @@ class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT, _DmlEventT]):
|
|
|
109
112
|
*records,
|
|
110
113
|
table=self.table,
|
|
111
114
|
unique_by=self.unique_by,
|
|
115
|
+
constraint_name=self.constraint_name,
|
|
112
116
|
dialect=self._db.dialect,
|
|
113
117
|
)
|
|
114
118
|
|
|
@@ -219,6 +223,7 @@ class Insertables(ABC):
|
|
|
219
223
|
@dataclass(frozen=True)
|
|
220
224
|
class SpanAnnotation(Precursors.SpanAnnotation):
|
|
221
225
|
span_rowid: int
|
|
226
|
+
identifier: str = ""
|
|
222
227
|
id_: Optional[int] = None
|
|
223
228
|
|
|
224
229
|
@property
|
|
@@ -232,6 +237,7 @@ class Insertables(ABC):
|
|
|
232
237
|
@dataclass(frozen=True)
|
|
233
238
|
class TraceAnnotation(Precursors.TraceAnnotation):
|
|
234
239
|
trace_rowid: int
|
|
240
|
+
identifier: str = ""
|
|
235
241
|
id_: Optional[int] = None
|
|
236
242
|
|
|
237
243
|
@property
|
|
@@ -245,6 +251,7 @@ class Insertables(ABC):
|
|
|
245
251
|
@dataclass(frozen=True)
|
|
246
252
|
class DocumentAnnotation(Precursors.DocumentAnnotation):
|
|
247
253
|
span_rowid: int
|
|
254
|
+
identifier: str = ""
|
|
248
255
|
id_: Optional[int] = None
|
|
249
256
|
|
|
250
257
|
@property
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
"""Annotation config migrations
|
|
2
|
+
|
|
3
|
+
Revision ID: 2f9d1a65945f
|
|
4
|
+
Revises: bc8fea3c2bc8
|
|
5
|
+
Create Date: 2025-02-06 10:17:15.726197
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Any, Sequence, Union
|
|
10
|
+
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
from alembic import op
|
|
13
|
+
from sqlalchemy import JSON, text
|
|
14
|
+
from sqlalchemy.dialects import postgresql
|
|
15
|
+
from sqlalchemy.ext.compiler import compiles
|
|
16
|
+
|
|
17
|
+
# revision identifiers, used by Alembic.
|
|
18
|
+
revision: str = "2f9d1a65945f"
|
|
19
|
+
down_revision: Union[str, None] = "bc8fea3c2bc8"
|
|
20
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
21
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class JSONB(JSON):
|
|
25
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
26
|
+
__visit_name__ = "JSONB"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@compiles(JSONB, "sqlite")
|
|
30
|
+
def _(*args: Any, **kwargs: Any) -> str:
|
|
31
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
32
|
+
return "JSONB"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
JSON_ = (
|
|
36
|
+
JSON()
|
|
37
|
+
.with_variant(
|
|
38
|
+
postgresql.JSONB(),
|
|
39
|
+
"postgresql",
|
|
40
|
+
)
|
|
41
|
+
.with_variant(
|
|
42
|
+
JSONB(),
|
|
43
|
+
"sqlite",
|
|
44
|
+
)
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def upgrade() -> None:
|
|
49
|
+
with op.batch_alter_table("span_annotations") as batch_op:
|
|
50
|
+
batch_op.drop_index("ix_span_annotations_score")
|
|
51
|
+
batch_op.drop_index("ix_span_annotations_label")
|
|
52
|
+
batch_op.add_column(
|
|
53
|
+
sa.Column(
|
|
54
|
+
"user_id",
|
|
55
|
+
sa.Integer,
|
|
56
|
+
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
|
57
|
+
nullable=True,
|
|
58
|
+
),
|
|
59
|
+
)
|
|
60
|
+
batch_op.add_column(
|
|
61
|
+
sa.Column(
|
|
62
|
+
"identifier",
|
|
63
|
+
sa.String,
|
|
64
|
+
server_default="",
|
|
65
|
+
nullable=False,
|
|
66
|
+
),
|
|
67
|
+
)
|
|
68
|
+
batch_op.add_column(
|
|
69
|
+
sa.Column(
|
|
70
|
+
"source",
|
|
71
|
+
sa.String,
|
|
72
|
+
nullable=True,
|
|
73
|
+
),
|
|
74
|
+
)
|
|
75
|
+
batch_op.drop_constraint(
|
|
76
|
+
constraint_name="valid_annotator_kind",
|
|
77
|
+
type_="check",
|
|
78
|
+
)
|
|
79
|
+
batch_op.create_check_constraint(
|
|
80
|
+
constraint_name="valid_annotator_kind",
|
|
81
|
+
condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
|
|
82
|
+
)
|
|
83
|
+
batch_op.drop_constraint("uq_span_annotations_name_span_rowid", type_="unique")
|
|
84
|
+
batch_op.create_unique_constraint(
|
|
85
|
+
"uq_span_annotations_name_span_rowid_identifier",
|
|
86
|
+
["name", "span_rowid", "identifier"],
|
|
87
|
+
)
|
|
88
|
+
with op.batch_alter_table("span_annotations") as batch_op:
|
|
89
|
+
batch_op.execute(
|
|
90
|
+
text(
|
|
91
|
+
"""
|
|
92
|
+
UPDATE span_annotations
|
|
93
|
+
SET source = CASE
|
|
94
|
+
WHEN annotator_kind = 'HUMAN' THEN 'APP'
|
|
95
|
+
ELSE 'API'
|
|
96
|
+
END
|
|
97
|
+
"""
|
|
98
|
+
)
|
|
99
|
+
)
|
|
100
|
+
batch_op.alter_column(
|
|
101
|
+
"source",
|
|
102
|
+
nullable=False,
|
|
103
|
+
existing_nullable=True,
|
|
104
|
+
)
|
|
105
|
+
batch_op.create_check_constraint(
|
|
106
|
+
constraint_name="valid_source",
|
|
107
|
+
condition="source IN ('API', 'APP')",
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
with op.batch_alter_table("trace_annotations") as batch_op:
|
|
111
|
+
batch_op.drop_index("ix_trace_annotations_score")
|
|
112
|
+
batch_op.drop_index("ix_trace_annotations_label")
|
|
113
|
+
batch_op.add_column(
|
|
114
|
+
sa.Column(
|
|
115
|
+
"user_id",
|
|
116
|
+
sa.Integer,
|
|
117
|
+
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
|
118
|
+
nullable=True,
|
|
119
|
+
),
|
|
120
|
+
)
|
|
121
|
+
batch_op.add_column(
|
|
122
|
+
sa.Column(
|
|
123
|
+
"identifier",
|
|
124
|
+
sa.String,
|
|
125
|
+
server_default="",
|
|
126
|
+
nullable=False,
|
|
127
|
+
),
|
|
128
|
+
)
|
|
129
|
+
batch_op.add_column(
|
|
130
|
+
sa.Column(
|
|
131
|
+
"source",
|
|
132
|
+
sa.String,
|
|
133
|
+
nullable=True, # must initially be nullable before backfill
|
|
134
|
+
),
|
|
135
|
+
)
|
|
136
|
+
batch_op.drop_constraint(
|
|
137
|
+
constraint_name="valid_annotator_kind",
|
|
138
|
+
type_="check",
|
|
139
|
+
)
|
|
140
|
+
batch_op.create_check_constraint(
|
|
141
|
+
constraint_name="valid_annotator_kind",
|
|
142
|
+
condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
|
|
143
|
+
)
|
|
144
|
+
batch_op.drop_constraint("uq_trace_annotations_name_trace_rowid", type_="unique")
|
|
145
|
+
batch_op.create_unique_constraint(
|
|
146
|
+
"uq_trace_annotations_name_trace_rowid_identifier",
|
|
147
|
+
["name", "trace_rowid", "identifier"],
|
|
148
|
+
)
|
|
149
|
+
with op.batch_alter_table("trace_annotations") as batch_op:
|
|
150
|
+
batch_op.execute(
|
|
151
|
+
text(
|
|
152
|
+
"""
|
|
153
|
+
UPDATE trace_annotations
|
|
154
|
+
SET source = CASE
|
|
155
|
+
WHEN annotator_kind = 'HUMAN' THEN 'APP'
|
|
156
|
+
ELSE 'API'
|
|
157
|
+
END
|
|
158
|
+
"""
|
|
159
|
+
)
|
|
160
|
+
)
|
|
161
|
+
batch_op.alter_column(
|
|
162
|
+
"source",
|
|
163
|
+
nullable=False,
|
|
164
|
+
existing_nullable=True,
|
|
165
|
+
)
|
|
166
|
+
batch_op.create_check_constraint(
|
|
167
|
+
constraint_name="valid_source",
|
|
168
|
+
condition="source IN ('API', 'APP')",
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
with op.batch_alter_table("document_annotations") as batch_op:
|
|
172
|
+
batch_op.drop_index("ix_document_annotations_score")
|
|
173
|
+
batch_op.drop_index("ix_document_annotations_label")
|
|
174
|
+
batch_op.add_column(
|
|
175
|
+
sa.Column(
|
|
176
|
+
"user_id",
|
|
177
|
+
sa.Integer,
|
|
178
|
+
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
|
179
|
+
nullable=True,
|
|
180
|
+
),
|
|
181
|
+
)
|
|
182
|
+
batch_op.add_column(
|
|
183
|
+
sa.Column(
|
|
184
|
+
"identifier",
|
|
185
|
+
sa.String,
|
|
186
|
+
server_default="",
|
|
187
|
+
nullable=False,
|
|
188
|
+
),
|
|
189
|
+
)
|
|
190
|
+
batch_op.add_column(
|
|
191
|
+
sa.Column(
|
|
192
|
+
"source",
|
|
193
|
+
sa.String,
|
|
194
|
+
nullable=True,
|
|
195
|
+
),
|
|
196
|
+
)
|
|
197
|
+
batch_op.drop_constraint(
|
|
198
|
+
constraint_name="valid_annotator_kind",
|
|
199
|
+
type_="check",
|
|
200
|
+
)
|
|
201
|
+
batch_op.create_check_constraint(
|
|
202
|
+
constraint_name="valid_annotator_kind",
|
|
203
|
+
condition="annotator_kind IN ('LLM', 'CODE', 'HUMAN')",
|
|
204
|
+
)
|
|
205
|
+
batch_op.drop_constraint(
|
|
206
|
+
"uq_document_annotations_name_span_rowid_document_position",
|
|
207
|
+
type_="unique",
|
|
208
|
+
)
|
|
209
|
+
batch_op.create_unique_constraint(
|
|
210
|
+
"uq_document_annotations_name_span_rowid_document_pos_identifier", # this name does not conform to the auto-generated pattern, which results in a name longer than the Postgres limit of 63 characters # noqa: E501
|
|
211
|
+
["name", "span_rowid", "document_position", "identifier"],
|
|
212
|
+
)
|
|
213
|
+
with op.batch_alter_table("document_annotations") as batch_op:
|
|
214
|
+
batch_op.execute(
|
|
215
|
+
text(
|
|
216
|
+
"""
|
|
217
|
+
UPDATE document_annotations
|
|
218
|
+
SET source = CASE
|
|
219
|
+
WHEN annotator_kind = 'HUMAN' THEN 'APP'
|
|
220
|
+
ELSE 'API'
|
|
221
|
+
END
|
|
222
|
+
"""
|
|
223
|
+
)
|
|
224
|
+
)
|
|
225
|
+
batch_op.alter_column(
|
|
226
|
+
"source",
|
|
227
|
+
nullable=False,
|
|
228
|
+
existing_nullable=True,
|
|
229
|
+
)
|
|
230
|
+
batch_op.create_check_constraint(
|
|
231
|
+
constraint_name="valid_source",
|
|
232
|
+
condition="source IN ('API', 'APP')",
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
op.create_table(
|
|
236
|
+
"annotation_configs",
|
|
237
|
+
sa.Column("id", sa.Integer, primary_key=True),
|
|
238
|
+
sa.Column("name", sa.String, nullable=False, unique=True),
|
|
239
|
+
sa.Column("config", JSON_, nullable=False),
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
op.create_table(
|
|
243
|
+
"project_annotation_configs",
|
|
244
|
+
sa.Column("id", sa.Integer, primary_key=True),
|
|
245
|
+
sa.Column(
|
|
246
|
+
"project_id",
|
|
247
|
+
sa.Integer,
|
|
248
|
+
sa.ForeignKey("projects.id", ondelete="CASCADE"),
|
|
249
|
+
nullable=False,
|
|
250
|
+
index=True,
|
|
251
|
+
),
|
|
252
|
+
sa.Column(
|
|
253
|
+
"annotation_config_id",
|
|
254
|
+
sa.Integer,
|
|
255
|
+
sa.ForeignKey("annotation_configs.id", ondelete="CASCADE"),
|
|
256
|
+
nullable=False,
|
|
257
|
+
index=True,
|
|
258
|
+
),
|
|
259
|
+
sa.UniqueConstraint(
|
|
260
|
+
"project_id",
|
|
261
|
+
"annotation_config_id",
|
|
262
|
+
),
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def downgrade() -> None:
|
|
267
|
+
op.drop_table("project_annotation_configs")
|
|
268
|
+
op.drop_table("annotation_configs")
|
|
269
|
+
|
|
270
|
+
with op.batch_alter_table("document_annotations") as batch_op:
|
|
271
|
+
batch_op.create_index("ix_document_annotations_score", ["score"])
|
|
272
|
+
batch_op.create_index("ix_document_annotations_label", ["label"])
|
|
273
|
+
batch_op.drop_constraint(
|
|
274
|
+
"uq_document_annotations_name_span_rowid_document_pos_identifier", type_="unique"
|
|
275
|
+
)
|
|
276
|
+
batch_op.create_unique_constraint(
|
|
277
|
+
"uq_document_annotations_name_span_rowid_document_position",
|
|
278
|
+
["name", "span_rowid", "document_position"],
|
|
279
|
+
)
|
|
280
|
+
batch_op.drop_constraint("valid_annotator_kind", type_="check")
|
|
281
|
+
batch_op.create_check_constraint(
|
|
282
|
+
"valid_annotator_kind",
|
|
283
|
+
condition="annotator_kind IN ('LLM', 'HUMAN')",
|
|
284
|
+
)
|
|
285
|
+
batch_op.drop_constraint("valid_source", type_="check")
|
|
286
|
+
batch_op.drop_column("source")
|
|
287
|
+
batch_op.drop_column("identifier")
|
|
288
|
+
batch_op.drop_column("user_id")
|
|
289
|
+
|
|
290
|
+
with op.batch_alter_table("trace_annotations") as batch_op:
|
|
291
|
+
batch_op.create_index("ix_trace_annotations_score", ["score"])
|
|
292
|
+
batch_op.create_index("ix_trace_annotations_label", ["label"])
|
|
293
|
+
batch_op.drop_constraint("uq_trace_annotations_name_trace_rowid_identifier", type_="unique")
|
|
294
|
+
batch_op.create_unique_constraint(
|
|
295
|
+
"uq_trace_annotations_name_trace_rowid", ["name", "trace_rowid"]
|
|
296
|
+
)
|
|
297
|
+
batch_op.drop_constraint("valid_annotator_kind", type_="check")
|
|
298
|
+
batch_op.create_check_constraint(
|
|
299
|
+
"valid_annotator_kind",
|
|
300
|
+
condition="annotator_kind IN ('LLM', 'HUMAN')",
|
|
301
|
+
)
|
|
302
|
+
batch_op.drop_constraint("valid_source", type_="check")
|
|
303
|
+
batch_op.drop_column("source")
|
|
304
|
+
batch_op.drop_column("identifier")
|
|
305
|
+
batch_op.drop_column("user_id")
|
|
306
|
+
|
|
307
|
+
with op.batch_alter_table("span_annotations") as batch_op:
|
|
308
|
+
batch_op.create_index("ix_span_annotations_score", ["score"])
|
|
309
|
+
batch_op.create_index("ix_span_annotations_label", ["label"])
|
|
310
|
+
batch_op.drop_constraint("uq_span_annotations_name_span_rowid_identifier", type_="unique")
|
|
311
|
+
batch_op.create_unique_constraint(
|
|
312
|
+
"uq_span_annotations_name_span_rowid", ["name", "span_rowid"]
|
|
313
|
+
)
|
|
314
|
+
batch_op.drop_constraint("valid_annotator_kind", type_="check")
|
|
315
|
+
batch_op.create_check_constraint(
|
|
316
|
+
"valid_annotator_kind",
|
|
317
|
+
condition="annotator_kind IN ('LLM', 'HUMAN')",
|
|
318
|
+
)
|
|
319
|
+
batch_op.drop_constraint("valid_source", type_="check")
|
|
320
|
+
batch_op.drop_column("source")
|
|
321
|
+
batch_op.drop_column("identifier")
|
|
322
|
+
batch_op.drop_column("user_id")
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""change jsonb to json for prompts
|
|
2
|
+
|
|
3
|
+
Revision ID: 8a3764fe7f1a
|
|
4
|
+
Revises: bb8139330879
|
|
5
|
+
Create Date: 2025-04-25 07:04:26.102957
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Any, Sequence, Union
|
|
10
|
+
|
|
11
|
+
from alembic import op
|
|
12
|
+
from sqlalchemy import JSON
|
|
13
|
+
from sqlalchemy.dialects import postgresql
|
|
14
|
+
from sqlalchemy.ext.compiler import compiles
|
|
15
|
+
|
|
16
|
+
# revision identifiers, used by Alembic.
|
|
17
|
+
revision: str = "8a3764fe7f1a"
|
|
18
|
+
down_revision: Union[str, None] = "bb8139330879"
|
|
19
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
20
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class JSONB(JSON):
|
|
24
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
25
|
+
__visit_name__ = "JSONB"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@compiles(JSONB, "sqlite")
|
|
29
|
+
def _(*args: Any, **kwargs: Any) -> str:
|
|
30
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
31
|
+
return "JSONB"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
JSON_ = (
|
|
35
|
+
JSON()
|
|
36
|
+
.with_variant(
|
|
37
|
+
postgresql.JSONB(),
|
|
38
|
+
"postgresql",
|
|
39
|
+
)
|
|
40
|
+
.with_variant(
|
|
41
|
+
JSONB(),
|
|
42
|
+
"sqlite",
|
|
43
|
+
)
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def upgrade() -> None:
|
|
48
|
+
with op.batch_alter_table("prompt_versions") as batch_op:
|
|
49
|
+
batch_op.alter_column(
|
|
50
|
+
"tools",
|
|
51
|
+
type_=JSON,
|
|
52
|
+
existing_type=JSON_,
|
|
53
|
+
postgresql_using="tools::json",
|
|
54
|
+
)
|
|
55
|
+
batch_op.alter_column(
|
|
56
|
+
"response_format",
|
|
57
|
+
type_=JSON,
|
|
58
|
+
existing_type=JSON_,
|
|
59
|
+
postgresql_using="response_format::json",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def downgrade() -> None:
|
|
64
|
+
with op.batch_alter_table("prompt_versions") as batch_op:
|
|
65
|
+
batch_op.alter_column(
|
|
66
|
+
"tools",
|
|
67
|
+
type_=JSON_,
|
|
68
|
+
existing_type=JSON,
|
|
69
|
+
postgresql_using="tools::jsonb",
|
|
70
|
+
)
|
|
71
|
+
batch_op.alter_column(
|
|
72
|
+
"response_format",
|
|
73
|
+
type_=JSON_,
|
|
74
|
+
existing_type=JSON,
|
|
75
|
+
postgresql_using="response_format::jsonb",
|
|
76
|
+
)
|