arize-phoenix 4.14.1__py3-none-any.whl → 4.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (85) hide show
  1. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/METADATA +5 -3
  2. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/RECORD +81 -71
  3. phoenix/db/bulk_inserter.py +131 -5
  4. phoenix/db/engines.py +2 -1
  5. phoenix/db/helpers.py +23 -1
  6. phoenix/db/insertion/constants.py +2 -0
  7. phoenix/db/insertion/document_annotation.py +157 -0
  8. phoenix/db/insertion/helpers.py +13 -0
  9. phoenix/db/insertion/span_annotation.py +144 -0
  10. phoenix/db/insertion/trace_annotation.py +144 -0
  11. phoenix/db/insertion/types.py +261 -0
  12. phoenix/experiments/functions.py +3 -2
  13. phoenix/experiments/types.py +3 -3
  14. phoenix/server/api/context.py +7 -9
  15. phoenix/server/api/dataloaders/__init__.py +2 -0
  16. phoenix/server/api/dataloaders/average_experiment_run_latency.py +3 -3
  17. phoenix/server/api/dataloaders/dataset_example_revisions.py +2 -4
  18. phoenix/server/api/dataloaders/dataset_example_spans.py +2 -4
  19. phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -4
  20. phoenix/server/api/dataloaders/document_evaluations.py +2 -4
  21. phoenix/server/api/dataloaders/document_retrieval_metrics.py +2 -4
  22. phoenix/server/api/dataloaders/evaluation_summaries.py +2 -4
  23. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +2 -4
  24. phoenix/server/api/dataloaders/experiment_error_rates.py +2 -4
  25. phoenix/server/api/dataloaders/experiment_run_counts.py +2 -4
  26. phoenix/server/api/dataloaders/experiment_sequence_number.py +2 -4
  27. phoenix/server/api/dataloaders/latency_ms_quantile.py +2 -3
  28. phoenix/server/api/dataloaders/min_start_or_max_end_times.py +2 -4
  29. phoenix/server/api/dataloaders/project_by_name.py +3 -3
  30. phoenix/server/api/dataloaders/record_counts.py +2 -4
  31. phoenix/server/api/dataloaders/span_annotations.py +2 -4
  32. phoenix/server/api/dataloaders/span_dataset_examples.py +36 -0
  33. phoenix/server/api/dataloaders/span_descendants.py +2 -4
  34. phoenix/server/api/dataloaders/span_evaluations.py +2 -4
  35. phoenix/server/api/dataloaders/span_projects.py +3 -3
  36. phoenix/server/api/dataloaders/token_counts.py +2 -4
  37. phoenix/server/api/dataloaders/trace_evaluations.py +2 -4
  38. phoenix/server/api/dataloaders/trace_row_ids.py +2 -4
  39. phoenix/server/api/input_types/SpanAnnotationSort.py +17 -0
  40. phoenix/server/api/input_types/TraceAnnotationSort.py +17 -0
  41. phoenix/server/api/mutations/span_annotations_mutations.py +8 -3
  42. phoenix/server/api/mutations/trace_annotations_mutations.py +8 -3
  43. phoenix/server/api/openapi/main.py +18 -2
  44. phoenix/server/api/openapi/schema.py +12 -12
  45. phoenix/server/api/routers/v1/__init__.py +36 -83
  46. phoenix/server/api/routers/v1/datasets.py +515 -509
  47. phoenix/server/api/routers/v1/evaluations.py +164 -73
  48. phoenix/server/api/routers/v1/experiment_evaluations.py +68 -91
  49. phoenix/server/api/routers/v1/experiment_runs.py +98 -155
  50. phoenix/server/api/routers/v1/experiments.py +132 -181
  51. phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
  52. phoenix/server/api/routers/v1/spans.py +164 -203
  53. phoenix/server/api/routers/v1/traces.py +134 -159
  54. phoenix/server/api/routers/v1/utils.py +95 -0
  55. phoenix/server/api/types/Span.py +27 -3
  56. phoenix/server/api/types/Trace.py +21 -4
  57. phoenix/server/api/utils.py +4 -4
  58. phoenix/server/app.py +172 -192
  59. phoenix/server/grpc_server.py +2 -2
  60. phoenix/server/main.py +5 -9
  61. phoenix/server/static/.vite/manifest.json +31 -31
  62. phoenix/server/static/assets/components-Ci5kMOk5.js +1175 -0
  63. phoenix/server/static/assets/{index-CQgXRwU0.js → index-BQG5WVX7.js} +2 -2
  64. phoenix/server/static/assets/{pages-hdjlFZhO.js → pages-BrevprVW.js} +451 -275
  65. phoenix/server/static/assets/{vendor-DPvSDRn3.js → vendor-CP0b0YG0.js} +2 -2
  66. phoenix/server/static/assets/{vendor-arizeai-CkvPT67c.js → vendor-arizeai-DTbiPGp6.js} +27 -27
  67. phoenix/server/static/assets/vendor-codemirror-DtdPDzrv.js +15 -0
  68. phoenix/server/static/assets/{vendor-recharts-5jlNaZuF.js → vendor-recharts-A0DA1O99.js} +1 -1
  69. phoenix/server/thread_server.py +2 -2
  70. phoenix/server/types.py +18 -0
  71. phoenix/session/client.py +5 -3
  72. phoenix/session/session.py +2 -2
  73. phoenix/trace/dsl/filter.py +2 -6
  74. phoenix/trace/fixtures.py +17 -23
  75. phoenix/trace/utils.py +23 -0
  76. phoenix/utilities/client.py +116 -0
  77. phoenix/utilities/project.py +1 -1
  78. phoenix/version.py +1 -1
  79. phoenix/server/api/routers/v1/dataset_examples.py +0 -178
  80. phoenix/server/openapi/docs.py +0 -221
  81. phoenix/server/static/assets/components-DeS0YEmv.js +0 -1142
  82. phoenix/server/static/assets/vendor-codemirror-Cqwpwlua.js +0 -12
  83. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/WHEEL +0 -0
  84. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/licenses/IP_NOTICE +0 -0
  85. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,157 @@
1
+ from datetime import datetime
2
+ from typing import Any, List, Mapping, NamedTuple, Optional, Tuple
3
+
4
+ from sqlalchemy import Row, Select, and_, select, tuple_
5
+ from sqlalchemy.ext.asyncio import AsyncSession
6
+ from typing_extensions import TypeAlias
7
+
8
+ from phoenix.db import models
9
+ from phoenix.db.helpers import dedup, num_docs_col
10
+ from phoenix.db.insertion.types import (
11
+ Insertables,
12
+ Postponed,
13
+ Precursors,
14
+ QueueInserter,
15
+ Received,
16
+ )
17
+
18
+ _Name: TypeAlias = str
19
+ _SpanId: TypeAlias = str
20
+ _SpanRowId: TypeAlias = int
21
+ _DocumentPosition: TypeAlias = int
22
+ _AnnoRowId: TypeAlias = int
23
+ _NumDocs: TypeAlias = int
24
+
25
+ _Key: TypeAlias = Tuple[_Name, _SpanId, _DocumentPosition]
26
+ _UniqueBy: TypeAlias = Tuple[_Name, _SpanRowId, _DocumentPosition]
27
+ _Existing: TypeAlias = Tuple[
28
+ _SpanRowId,
29
+ _SpanId,
30
+ _NumDocs,
31
+ Optional[_AnnoRowId],
32
+ Optional[_Name],
33
+ Optional[_DocumentPosition],
34
+ Optional[datetime],
35
+ ]
36
+
37
+
38
+ class DocumentAnnotationQueueInserter(
39
+ QueueInserter[
40
+ Precursors.DocumentAnnotation,
41
+ Insertables.DocumentAnnotation,
42
+ models.DocumentAnnotation,
43
+ ],
44
+ table=models.DocumentAnnotation,
45
+ unique_by=("name", "span_rowid", "document_position"),
46
+ ):
47
+ async def _partition(
48
+ self,
49
+ session: AsyncSession,
50
+ *parcels: Received[Precursors.DocumentAnnotation],
51
+ ) -> Tuple[
52
+ List[Received[Insertables.DocumentAnnotation]],
53
+ List[Postponed[Precursors.DocumentAnnotation]],
54
+ List[Received[Precursors.DocumentAnnotation]],
55
+ ]:
56
+ to_insert: List[Received[Insertables.DocumentAnnotation]] = []
57
+ to_postpone: List[Postponed[Precursors.DocumentAnnotation]] = []
58
+ to_discard: List[Received[Precursors.DocumentAnnotation]] = []
59
+
60
+ stmt = self._select_existing(*map(_key, parcels))
61
+ existing: List[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
62
+ existing_spans: Mapping[str, _SpanAttr] = {
63
+ e.span_id: _SpanAttr(e.span_rowid, e.num_docs) for e in existing
64
+ }
65
+ existing_annos: Mapping[_Key, _AnnoAttr] = {
66
+ (e.name, e.span_id, e.document_position): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
67
+ for e in existing
68
+ if e.id is not None
69
+ and e.name is not None
70
+ and e.document_position is not None
71
+ and e.updated_at is not None
72
+ }
73
+
74
+ for p in parcels:
75
+ if (anno := existing_annos.get(_key(p))) is not None:
76
+ if p.received_at <= anno.updated_at:
77
+ to_discard.append(p)
78
+ else:
79
+ to_insert.append(
80
+ Received(
81
+ received_at=p.received_at,
82
+ item=p.item.as_insertable(
83
+ span_rowid=anno.span_rowid,
84
+ id_=anno.id_,
85
+ ),
86
+ )
87
+ )
88
+ elif (span := existing_spans.get(p.item.span_id)) is not None:
89
+ if 0 <= p.item.document_position < span.num_docs:
90
+ to_insert.append(
91
+ Received(
92
+ received_at=p.received_at,
93
+ item=p.item.as_insertable(
94
+ span_rowid=span.span_rowid,
95
+ ),
96
+ )
97
+ )
98
+ else:
99
+ to_discard.append(p)
100
+ elif isinstance(p, Postponed):
101
+ if p.retries_left > 1:
102
+ to_postpone.append(p.postpone(p.retries_left - 1))
103
+ else:
104
+ to_discard.append(p)
105
+ elif isinstance(p, Received):
106
+ to_postpone.append(p.postpone(self._retry_allowance))
107
+ else:
108
+ to_discard.append(p)
109
+
110
+ assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
111
+ to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
112
+ return to_insert, to_postpone, to_discard
113
+
114
+ def _select_existing(self, *keys: _Key) -> Select[_Existing]:
115
+ anno = self.table
116
+ span = (
117
+ select(models.Span.id, models.Span.span_id, num_docs_col(self._db.dialect))
118
+ .where(models.Span.span_id.in_({span_id for _, span_id, *_ in keys}))
119
+ .cte()
120
+ )
121
+ onclause = and_(
122
+ span.c.id == anno.span_rowid,
123
+ anno.name.in_({name for name, *_ in keys}),
124
+ tuple_(anno.name, span.c.span_id, anno.document_position).in_(keys),
125
+ )
126
+ return select(
127
+ span.c.id.label("span_rowid"),
128
+ span.c.span_id,
129
+ span.c.num_docs,
130
+ anno.id,
131
+ anno.name,
132
+ anno.document_position,
133
+ anno.updated_at,
134
+ ).outerjoin_from(span, anno, onclause)
135
+
136
+
137
+ class _SpanAttr(NamedTuple):
138
+ span_rowid: _SpanRowId
139
+ num_docs: _NumDocs
140
+
141
+
142
+ class _AnnoAttr(NamedTuple):
143
+ span_rowid: _SpanRowId
144
+ id_: _AnnoRowId
145
+ updated_at: datetime
146
+
147
+
148
+ def _key(p: Received[Precursors.DocumentAnnotation]) -> _Key:
149
+ return p.item.obj.name, p.item.span_id, p.item.document_position
150
+
151
+
152
+ def _unique_by(p: Received[Insertables.DocumentAnnotation]) -> _UniqueBy:
153
+ return p.item.obj.name, p.item.span_rowid, p.item.document_position
154
+
155
+
156
+ def _time(p: Received[Any]) -> datetime:
157
+ return p.received_at
@@ -20,6 +20,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
20
20
  from sqlalchemy.sql.elements import KeyedColumnElement
21
21
  from typing_extensions import TypeAlias, assert_never
22
22
 
23
+ from phoenix.db import models
23
24
  from phoenix.db.helpers import SupportedSQLDialect
24
25
  from phoenix.db.models import Base
25
26
 
@@ -93,3 +94,15 @@ def _clean(
93
94
  yield "metadata", v
94
95
  else:
95
96
  yield k, v
97
+
98
+
99
+ def as_kv(obj: models.Base) -> Iterator[Tuple[str, Any]]:
100
+ for k, c in obj.__table__.c.items():
101
+ if k in ["created_at", "updated_at"]:
102
+ continue
103
+ k = "metadata_" if k == "metadata" else k
104
+ v = getattr(obj, k, None)
105
+ if c.primary_key and v is None:
106
+ # postgresql disallows None for primary key
107
+ continue
108
+ yield k, v
@@ -0,0 +1,144 @@
1
+ from datetime import datetime
2
+ from typing import Any, List, Mapping, NamedTuple, Optional, Tuple
3
+
4
+ from sqlalchemy import Row, Select, and_, select, tuple_
5
+ from sqlalchemy.ext.asyncio import AsyncSession
6
+ from typing_extensions import TypeAlias
7
+
8
+ from phoenix.db import models
9
+ from phoenix.db.helpers import dedup
10
+ from phoenix.db.insertion.types import (
11
+ Insertables,
12
+ Postponed,
13
+ Precursors,
14
+ QueueInserter,
15
+ Received,
16
+ )
17
+
18
+ _Name: TypeAlias = str
19
+ _SpanId: TypeAlias = str
20
+ _SpanRowId: TypeAlias = int
21
+ _AnnoRowId: TypeAlias = int
22
+
23
+ _Key: TypeAlias = Tuple[_Name, _SpanId]
24
+ _UniqueBy: TypeAlias = Tuple[_Name, _SpanRowId]
25
+ _Existing: TypeAlias = Tuple[
26
+ _SpanRowId,
27
+ _SpanId,
28
+ Optional[_AnnoRowId],
29
+ Optional[_Name],
30
+ Optional[datetime],
31
+ ]
32
+
33
+
34
+ class SpanAnnotationQueueInserter(
35
+ QueueInserter[
36
+ Precursors.SpanAnnotation,
37
+ Insertables.SpanAnnotation,
38
+ models.SpanAnnotation,
39
+ ],
40
+ table=models.SpanAnnotation,
41
+ unique_by=("name", "span_rowid"),
42
+ ):
43
+ async def _partition(
44
+ self,
45
+ session: AsyncSession,
46
+ *parcels: Received[Precursors.SpanAnnotation],
47
+ ) -> Tuple[
48
+ List[Received[Insertables.SpanAnnotation]],
49
+ List[Postponed[Precursors.SpanAnnotation]],
50
+ List[Received[Precursors.SpanAnnotation]],
51
+ ]:
52
+ to_insert: List[Received[Insertables.SpanAnnotation]] = []
53
+ to_postpone: List[Postponed[Precursors.SpanAnnotation]] = []
54
+ to_discard: List[Received[Precursors.SpanAnnotation]] = []
55
+
56
+ stmt = self._select_existing(*map(_key, parcels))
57
+ existing: List[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
58
+ existing_spans: Mapping[str, _SpanAttr] = {
59
+ e.span_id: _SpanAttr(e.span_rowid) for e in existing
60
+ }
61
+ existing_annos: Mapping[_Key, _AnnoAttr] = {
62
+ (e.name, e.span_id): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
63
+ for e in existing
64
+ if e.id is not None and e.name is not None and e.updated_at is not None
65
+ }
66
+
67
+ for p in parcels:
68
+ if (anno := existing_annos.get(_key(p))) is not None:
69
+ if p.received_at <= anno.updated_at:
70
+ to_discard.append(p)
71
+ else:
72
+ to_insert.append(
73
+ Received(
74
+ received_at=p.received_at,
75
+ item=p.item.as_insertable(
76
+ span_rowid=anno.span_rowid,
77
+ id_=anno.id_,
78
+ ),
79
+ )
80
+ )
81
+ elif (span := existing_spans.get(p.item.span_id)) is not None:
82
+ to_insert.append(
83
+ Received(
84
+ received_at=p.received_at,
85
+ item=p.item.as_insertable(
86
+ span_rowid=span.span_rowid,
87
+ ),
88
+ )
89
+ )
90
+ elif isinstance(p, Postponed):
91
+ if p.retries_left > 1:
92
+ to_postpone.append(p.postpone(p.retries_left - 1))
93
+ else:
94
+ to_discard.append(p)
95
+ elif isinstance(p, Received):
96
+ to_postpone.append(p.postpone(self._retry_allowance))
97
+ else:
98
+ to_discard.append(p)
99
+
100
+ assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
101
+ to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
102
+ return to_insert, to_postpone, to_discard
103
+
104
+ def _select_existing(self, *keys: _Key) -> Select[_Existing]:
105
+ anno = self.table
106
+ span = (
107
+ select(models.Span.id, models.Span.span_id)
108
+ .where(models.Span.span_id.in_({span_id for _, span_id in keys}))
109
+ .cte()
110
+ )
111
+ onclause = and_(
112
+ span.c.id == anno.span_rowid,
113
+ anno.name.in_({name for name, _ in keys}),
114
+ tuple_(anno.name, span.c.span_id).in_(keys),
115
+ )
116
+ return select(
117
+ span.c.id.label("span_rowid"),
118
+ span.c.span_id,
119
+ anno.id,
120
+ anno.name,
121
+ anno.updated_at,
122
+ ).outerjoin_from(span, anno, onclause)
123
+
124
+
125
+ class _SpanAttr(NamedTuple):
126
+ span_rowid: _SpanRowId
127
+
128
+
129
+ class _AnnoAttr(NamedTuple):
130
+ span_rowid: _SpanRowId
131
+ id_: _AnnoRowId
132
+ updated_at: datetime
133
+
134
+
135
+ def _key(p: Received[Precursors.SpanAnnotation]) -> _Key:
136
+ return p.item.obj.name, p.item.span_id
137
+
138
+
139
+ def _unique_by(p: Received[Insertables.SpanAnnotation]) -> _UniqueBy:
140
+ return p.item.obj.name, p.item.span_rowid
141
+
142
+
143
+ def _time(p: Received[Any]) -> datetime:
144
+ return p.received_at
@@ -0,0 +1,144 @@
1
+ from datetime import datetime
2
+ from typing import Any, List, Mapping, NamedTuple, Optional, Tuple
3
+
4
+ from sqlalchemy import Row, Select, and_, select, tuple_
5
+ from sqlalchemy.ext.asyncio import AsyncSession
6
+ from typing_extensions import TypeAlias
7
+
8
+ from phoenix.db import models
9
+ from phoenix.db.helpers import dedup
10
+ from phoenix.db.insertion.types import (
11
+ Insertables,
12
+ Postponed,
13
+ Precursors,
14
+ QueueInserter,
15
+ Received,
16
+ )
17
+
18
+ _Name: TypeAlias = str
19
+ _TraceId: TypeAlias = str
20
+ _TraceRowId: TypeAlias = int
21
+ _AnnoRowId: TypeAlias = int
22
+
23
+ _Key: TypeAlias = Tuple[_Name, _TraceId]
24
+ _UniqueBy: TypeAlias = Tuple[_Name, _TraceRowId]
25
+ _Existing: TypeAlias = Tuple[
26
+ _TraceRowId,
27
+ _TraceId,
28
+ Optional[_AnnoRowId],
29
+ Optional[_Name],
30
+ Optional[datetime],
31
+ ]
32
+
33
+
34
+ class TraceAnnotationQueueInserter(
35
+ QueueInserter[
36
+ Precursors.TraceAnnotation,
37
+ Insertables.TraceAnnotation,
38
+ models.TraceAnnotation,
39
+ ],
40
+ table=models.TraceAnnotation,
41
+ unique_by=("name", "trace_rowid"),
42
+ ):
43
+ async def _partition(
44
+ self,
45
+ session: AsyncSession,
46
+ *parcels: Received[Precursors.TraceAnnotation],
47
+ ) -> Tuple[
48
+ List[Received[Insertables.TraceAnnotation]],
49
+ List[Postponed[Precursors.TraceAnnotation]],
50
+ List[Received[Precursors.TraceAnnotation]],
51
+ ]:
52
+ to_insert: List[Received[Insertables.TraceAnnotation]] = []
53
+ to_postpone: List[Postponed[Precursors.TraceAnnotation]] = []
54
+ to_discard: List[Received[Precursors.TraceAnnotation]] = []
55
+
56
+ stmt = self._select_existing(*map(_key, parcels))
57
+ existing: List[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
58
+ existing_traces: Mapping[str, _TraceAttr] = {
59
+ e.trace_id: _TraceAttr(e.trace_rowid) for e in existing
60
+ }
61
+ existing_annos: Mapping[_Key, _AnnoAttr] = {
62
+ (e.name, e.trace_id): _AnnoAttr(e.trace_rowid, e.id, e.updated_at)
63
+ for e in existing
64
+ if e.id is not None and e.name is not None and e.updated_at is not None
65
+ }
66
+
67
+ for p in parcels:
68
+ if (anno := existing_annos.get(_key(p))) is not None:
69
+ if p.received_at <= anno.updated_at:
70
+ to_discard.append(p)
71
+ else:
72
+ to_insert.append(
73
+ Received(
74
+ received_at=p.received_at,
75
+ item=p.item.as_insertable(
76
+ trace_rowid=anno.trace_rowid,
77
+ id_=anno.id_,
78
+ ),
79
+ )
80
+ )
81
+ elif (trace := existing_traces.get(p.item.trace_id)) is not None:
82
+ to_insert.append(
83
+ Received(
84
+ received_at=p.received_at,
85
+ item=p.item.as_insertable(
86
+ trace_rowid=trace.trace_rowid,
87
+ ),
88
+ )
89
+ )
90
+ elif isinstance(p, Postponed):
91
+ if p.retries_left > 1:
92
+ to_postpone.append(p.postpone(p.retries_left - 1))
93
+ else:
94
+ to_discard.append(p)
95
+ elif isinstance(p, Received):
96
+ to_postpone.append(p.postpone(self._retry_allowance))
97
+ else:
98
+ to_discard.append(p)
99
+
100
+ assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
101
+ to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
102
+ return to_insert, to_postpone, to_discard
103
+
104
+ def _select_existing(self, *keys: _Key) -> Select[_Existing]:
105
+ anno = self.table
106
+ trace = (
107
+ select(models.Trace.id, models.Trace.trace_id)
108
+ .where(models.Trace.trace_id.in_({trace_id for _, trace_id in keys}))
109
+ .cte()
110
+ )
111
+ onclause = and_(
112
+ trace.c.id == anno.trace_rowid,
113
+ anno.name.in_({name for name, _ in keys}),
114
+ tuple_(anno.name, trace.c.trace_id).in_(keys),
115
+ )
116
+ return select(
117
+ trace.c.id.label("trace_rowid"),
118
+ trace.c.trace_id,
119
+ anno.id,
120
+ anno.name,
121
+ anno.updated_at,
122
+ ).outerjoin_from(trace, anno, onclause)
123
+
124
+
125
+ class _TraceAttr(NamedTuple):
126
+ trace_rowid: _TraceRowId
127
+
128
+
129
+ class _AnnoAttr(NamedTuple):
130
+ trace_rowid: _TraceRowId
131
+ id_: _AnnoRowId
132
+ updated_at: datetime
133
+
134
+
135
+ def _key(p: Received[Precursors.TraceAnnotation]) -> _Key:
136
+ return p.item.obj.name, p.item.trace_id
137
+
138
+
139
+ def _unique_by(p: Received[Insertables.TraceAnnotation]) -> _UniqueBy:
140
+ return p.item.obj.name, p.item.trace_rowid
141
+
142
+
143
+ def _time(p: Received[Any]) -> datetime:
144
+ return p.received_at