arize-phoenix 11.23.1__py3-none-any.whl → 12.28.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/METADATA +61 -36
  2. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/RECORD +212 -162
  3. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/WHEEL +1 -1
  4. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/IP_NOTICE +1 -1
  5. phoenix/__generated__/__init__.py +0 -0
  6. phoenix/__generated__/classification_evaluator_configs/__init__.py +20 -0
  7. phoenix/__generated__/classification_evaluator_configs/_document_relevance_classification_evaluator_config.py +17 -0
  8. phoenix/__generated__/classification_evaluator_configs/_hallucination_classification_evaluator_config.py +17 -0
  9. phoenix/__generated__/classification_evaluator_configs/_models.py +18 -0
  10. phoenix/__generated__/classification_evaluator_configs/_tool_selection_classification_evaluator_config.py +17 -0
  11. phoenix/__init__.py +2 -1
  12. phoenix/auth.py +27 -2
  13. phoenix/config.py +1594 -81
  14. phoenix/db/README.md +546 -28
  15. phoenix/db/bulk_inserter.py +119 -116
  16. phoenix/db/engines.py +140 -33
  17. phoenix/db/facilitator.py +22 -1
  18. phoenix/db/helpers.py +818 -65
  19. phoenix/db/iam_auth.py +64 -0
  20. phoenix/db/insertion/dataset.py +133 -1
  21. phoenix/db/insertion/document_annotation.py +9 -6
  22. phoenix/db/insertion/evaluation.py +2 -3
  23. phoenix/db/insertion/helpers.py +2 -2
  24. phoenix/db/insertion/session_annotation.py +176 -0
  25. phoenix/db/insertion/span_annotation.py +3 -4
  26. phoenix/db/insertion/trace_annotation.py +3 -4
  27. phoenix/db/insertion/types.py +41 -18
  28. phoenix/db/migrations/versions/01a8342c9cdf_add_user_id_on_datasets.py +40 -0
  29. phoenix/db/migrations/versions/0df286449799_add_session_annotations_table.py +105 -0
  30. phoenix/db/migrations/versions/272b66ff50f8_drop_single_indices.py +119 -0
  31. phoenix/db/migrations/versions/58228d933c91_dataset_labels.py +67 -0
  32. phoenix/db/migrations/versions/699f655af132_experiment_tags.py +57 -0
  33. phoenix/db/migrations/versions/735d3d93c33e_add_composite_indices.py +41 -0
  34. phoenix/db/migrations/versions/ab513d89518b_add_user_id_on_dataset_versions.py +40 -0
  35. phoenix/db/migrations/versions/d0690a79ea51_users_on_experiments.py +40 -0
  36. phoenix/db/migrations/versions/deb2c81c0bb2_dataset_splits.py +139 -0
  37. phoenix/db/migrations/versions/e76cbd66ffc3_add_experiments_dataset_examples.py +87 -0
  38. phoenix/db/models.py +364 -56
  39. phoenix/db/pg_config.py +10 -0
  40. phoenix/db/types/trace_retention.py +7 -6
  41. phoenix/experiments/functions.py +69 -19
  42. phoenix/inferences/inferences.py +1 -2
  43. phoenix/server/api/auth.py +9 -0
  44. phoenix/server/api/auth_messages.py +46 -0
  45. phoenix/server/api/context.py +60 -0
  46. phoenix/server/api/dataloaders/__init__.py +36 -0
  47. phoenix/server/api/dataloaders/annotation_summaries.py +60 -8
  48. phoenix/server/api/dataloaders/average_experiment_repeated_run_group_latency.py +50 -0
  49. phoenix/server/api/dataloaders/average_experiment_run_latency.py +17 -24
  50. phoenix/server/api/dataloaders/cache/two_tier_cache.py +1 -2
  51. phoenix/server/api/dataloaders/dataset_dataset_splits.py +52 -0
  52. phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -1
  53. phoenix/server/api/dataloaders/dataset_example_splits.py +40 -0
  54. phoenix/server/api/dataloaders/dataset_examples_and_versions_by_experiment_run.py +47 -0
  55. phoenix/server/api/dataloaders/dataset_labels.py +36 -0
  56. phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -2
  57. phoenix/server/api/dataloaders/document_evaluations.py +6 -9
  58. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +88 -34
  59. phoenix/server/api/dataloaders/experiment_dataset_splits.py +43 -0
  60. phoenix/server/api/dataloaders/experiment_error_rates.py +21 -28
  61. phoenix/server/api/dataloaders/experiment_repeated_run_group_annotation_summaries.py +77 -0
  62. phoenix/server/api/dataloaders/experiment_repeated_run_groups.py +57 -0
  63. phoenix/server/api/dataloaders/experiment_runs_by_experiment_and_example.py +44 -0
  64. phoenix/server/api/dataloaders/latency_ms_quantile.py +40 -8
  65. phoenix/server/api/dataloaders/record_counts.py +37 -10
  66. phoenix/server/api/dataloaders/session_annotations_by_session.py +29 -0
  67. phoenix/server/api/dataloaders/span_cost_summary_by_experiment_repeated_run_group.py +64 -0
  68. phoenix/server/api/dataloaders/span_cost_summary_by_project.py +28 -14
  69. phoenix/server/api/dataloaders/span_costs.py +3 -9
  70. phoenix/server/api/dataloaders/table_fields.py +2 -2
  71. phoenix/server/api/dataloaders/token_prices_by_model.py +30 -0
  72. phoenix/server/api/dataloaders/trace_annotations_by_trace.py +27 -0
  73. phoenix/server/api/exceptions.py +5 -1
  74. phoenix/server/api/helpers/playground_clients.py +263 -83
  75. phoenix/server/api/helpers/playground_spans.py +2 -1
  76. phoenix/server/api/helpers/playground_users.py +26 -0
  77. phoenix/server/api/helpers/prompts/conversions/google.py +103 -0
  78. phoenix/server/api/helpers/prompts/models.py +61 -19
  79. phoenix/server/api/input_types/{SpanAnnotationFilter.py → AnnotationFilter.py} +22 -14
  80. phoenix/server/api/input_types/ChatCompletionInput.py +3 -0
  81. phoenix/server/api/input_types/CreateProjectSessionAnnotationInput.py +37 -0
  82. phoenix/server/api/input_types/DatasetFilter.py +5 -2
  83. phoenix/server/api/input_types/ExperimentRunSort.py +237 -0
  84. phoenix/server/api/input_types/GenerativeModelInput.py +3 -0
  85. phoenix/server/api/input_types/ProjectSessionSort.py +158 -1
  86. phoenix/server/api/input_types/PromptVersionInput.py +47 -1
  87. phoenix/server/api/input_types/SpanSort.py +3 -2
  88. phoenix/server/api/input_types/UpdateAnnotationInput.py +34 -0
  89. phoenix/server/api/input_types/UserRoleInput.py +1 -0
  90. phoenix/server/api/mutations/__init__.py +8 -0
  91. phoenix/server/api/mutations/annotation_config_mutations.py +8 -8
  92. phoenix/server/api/mutations/api_key_mutations.py +15 -20
  93. phoenix/server/api/mutations/chat_mutations.py +106 -37
  94. phoenix/server/api/mutations/dataset_label_mutations.py +243 -0
  95. phoenix/server/api/mutations/dataset_mutations.py +21 -16
  96. phoenix/server/api/mutations/dataset_split_mutations.py +351 -0
  97. phoenix/server/api/mutations/experiment_mutations.py +2 -2
  98. phoenix/server/api/mutations/export_events_mutations.py +3 -3
  99. phoenix/server/api/mutations/model_mutations.py +11 -9
  100. phoenix/server/api/mutations/project_mutations.py +4 -4
  101. phoenix/server/api/mutations/project_session_annotations_mutations.py +158 -0
  102. phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +8 -4
  103. phoenix/server/api/mutations/prompt_label_mutations.py +74 -65
  104. phoenix/server/api/mutations/prompt_mutations.py +65 -129
  105. phoenix/server/api/mutations/prompt_version_tag_mutations.py +11 -8
  106. phoenix/server/api/mutations/span_annotations_mutations.py +15 -10
  107. phoenix/server/api/mutations/trace_annotations_mutations.py +13 -8
  108. phoenix/server/api/mutations/trace_mutations.py +3 -3
  109. phoenix/server/api/mutations/user_mutations.py +55 -26
  110. phoenix/server/api/queries.py +501 -617
  111. phoenix/server/api/routers/__init__.py +2 -2
  112. phoenix/server/api/routers/auth.py +141 -87
  113. phoenix/server/api/routers/ldap.py +229 -0
  114. phoenix/server/api/routers/oauth2.py +349 -101
  115. phoenix/server/api/routers/v1/__init__.py +22 -4
  116. phoenix/server/api/routers/v1/annotation_configs.py +19 -30
  117. phoenix/server/api/routers/v1/annotations.py +455 -13
  118. phoenix/server/api/routers/v1/datasets.py +355 -68
  119. phoenix/server/api/routers/v1/documents.py +142 -0
  120. phoenix/server/api/routers/v1/evaluations.py +20 -28
  121. phoenix/server/api/routers/v1/experiment_evaluations.py +16 -6
  122. phoenix/server/api/routers/v1/experiment_runs.py +335 -59
  123. phoenix/server/api/routers/v1/experiments.py +475 -47
  124. phoenix/server/api/routers/v1/projects.py +16 -50
  125. phoenix/server/api/routers/v1/prompts.py +50 -39
  126. phoenix/server/api/routers/v1/sessions.py +108 -0
  127. phoenix/server/api/routers/v1/spans.py +156 -96
  128. phoenix/server/api/routers/v1/traces.py +51 -77
  129. phoenix/server/api/routers/v1/users.py +64 -24
  130. phoenix/server/api/routers/v1/utils.py +3 -7
  131. phoenix/server/api/subscriptions.py +257 -93
  132. phoenix/server/api/types/Annotation.py +90 -23
  133. phoenix/server/api/types/ApiKey.py +13 -17
  134. phoenix/server/api/types/AuthMethod.py +1 -0
  135. phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +1 -0
  136. phoenix/server/api/types/Dataset.py +199 -72
  137. phoenix/server/api/types/DatasetExample.py +88 -18
  138. phoenix/server/api/types/DatasetExperimentAnnotationSummary.py +10 -0
  139. phoenix/server/api/types/DatasetLabel.py +57 -0
  140. phoenix/server/api/types/DatasetSplit.py +98 -0
  141. phoenix/server/api/types/DatasetVersion.py +49 -4
  142. phoenix/server/api/types/DocumentAnnotation.py +212 -0
  143. phoenix/server/api/types/Experiment.py +215 -68
  144. phoenix/server/api/types/ExperimentComparison.py +3 -9
  145. phoenix/server/api/types/ExperimentRepeatedRunGroup.py +155 -0
  146. phoenix/server/api/types/ExperimentRepeatedRunGroupAnnotationSummary.py +9 -0
  147. phoenix/server/api/types/ExperimentRun.py +120 -70
  148. phoenix/server/api/types/ExperimentRunAnnotation.py +158 -39
  149. phoenix/server/api/types/GenerativeModel.py +95 -42
  150. phoenix/server/api/types/GenerativeProvider.py +1 -1
  151. phoenix/server/api/types/ModelInterface.py +7 -2
  152. phoenix/server/api/types/PlaygroundModel.py +12 -2
  153. phoenix/server/api/types/Project.py +218 -185
  154. phoenix/server/api/types/ProjectSession.py +146 -29
  155. phoenix/server/api/types/ProjectSessionAnnotation.py +187 -0
  156. phoenix/server/api/types/ProjectTraceRetentionPolicy.py +1 -1
  157. phoenix/server/api/types/Prompt.py +119 -39
  158. phoenix/server/api/types/PromptLabel.py +42 -25
  159. phoenix/server/api/types/PromptVersion.py +11 -8
  160. phoenix/server/api/types/PromptVersionTag.py +65 -25
  161. phoenix/server/api/types/Span.py +130 -123
  162. phoenix/server/api/types/SpanAnnotation.py +189 -42
  163. phoenix/server/api/types/SystemApiKey.py +65 -1
  164. phoenix/server/api/types/Trace.py +184 -53
  165. phoenix/server/api/types/TraceAnnotation.py +149 -50
  166. phoenix/server/api/types/User.py +128 -33
  167. phoenix/server/api/types/UserApiKey.py +73 -26
  168. phoenix/server/api/types/node.py +10 -0
  169. phoenix/server/api/types/pagination.py +11 -2
  170. phoenix/server/app.py +154 -36
  171. phoenix/server/authorization.py +5 -4
  172. phoenix/server/bearer_auth.py +13 -5
  173. phoenix/server/cost_tracking/cost_model_lookup.py +42 -14
  174. phoenix/server/cost_tracking/model_cost_manifest.json +1085 -194
  175. phoenix/server/daemons/generative_model_store.py +61 -9
  176. phoenix/server/daemons/span_cost_calculator.py +10 -8
  177. phoenix/server/dml_event.py +13 -0
  178. phoenix/server/email/sender.py +29 -2
  179. phoenix/server/grpc_server.py +9 -9
  180. phoenix/server/jwt_store.py +8 -6
  181. phoenix/server/ldap.py +1449 -0
  182. phoenix/server/main.py +9 -3
  183. phoenix/server/oauth2.py +330 -12
  184. phoenix/server/prometheus.py +43 -6
  185. phoenix/server/rate_limiters.py +4 -9
  186. phoenix/server/retention.py +33 -20
  187. phoenix/server/session_filters.py +49 -0
  188. phoenix/server/static/.vite/manifest.json +51 -53
  189. phoenix/server/static/assets/components-BreFUQQa.js +6702 -0
  190. phoenix/server/static/assets/{index-BPCwGQr8.js → index-CTQoemZv.js} +42 -35
  191. phoenix/server/static/assets/pages-DBE5iYM3.js +9524 -0
  192. phoenix/server/static/assets/vendor-BGzfc4EU.css +1 -0
  193. phoenix/server/static/assets/vendor-DCE4v-Ot.js +920 -0
  194. phoenix/server/static/assets/vendor-codemirror-D5f205eT.js +25 -0
  195. phoenix/server/static/assets/{vendor-recharts-Bw30oz1A.js → vendor-recharts-V9cwpXsm.js} +7 -7
  196. phoenix/server/static/assets/{vendor-shiki-DZajAPeq.js → vendor-shiki-Do--csgv.js} +1 -1
  197. phoenix/server/static/assets/vendor-three-CmB8bl_y.js +3840 -0
  198. phoenix/server/templates/index.html +7 -1
  199. phoenix/server/thread_server.py +1 -2
  200. phoenix/server/utils.py +74 -0
  201. phoenix/session/client.py +55 -1
  202. phoenix/session/data_extractor.py +5 -0
  203. phoenix/session/evaluation.py +8 -4
  204. phoenix/session/session.py +44 -8
  205. phoenix/settings.py +2 -0
  206. phoenix/trace/attributes.py +80 -13
  207. phoenix/trace/dsl/query.py +2 -0
  208. phoenix/trace/projects.py +5 -0
  209. phoenix/utilities/template_formatters.py +1 -1
  210. phoenix/version.py +1 -1
  211. phoenix/server/api/types/Evaluation.py +0 -39
  212. phoenix/server/static/assets/components-D0DWAf0l.js +0 -5650
  213. phoenix/server/static/assets/pages-Creyamao.js +0 -8612
  214. phoenix/server/static/assets/vendor-CU36oj8y.js +0 -905
  215. phoenix/server/static/assets/vendor-CqDb5u4o.css +0 -1
  216. phoenix/server/static/assets/vendor-arizeai-Ctgw0e1G.js +0 -168
  217. phoenix/server/static/assets/vendor-codemirror-Cojjzqb9.js +0 -25
  218. phoenix/server/static/assets/vendor-three-BLWp5bic.js +0 -2998
  219. phoenix/utilities/deprecation.py +0 -31
  220. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/entry_points.txt +0 -0
  221. {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,18 +1,15 @@
1
- from __future__ import annotations
2
-
3
1
  import operator
4
2
  from datetime import datetime, timezone
5
- from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, Optional, cast
3
+ from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, cast
6
4
 
7
5
  import strawberry
8
6
  from aioitertools.itertools import groupby, islice
9
7
  from openinference.semconv.trace import SpanAttributes
10
8
  from sqlalchemy import and_, case, desc, distinct, exists, func, or_, select
11
9
  from sqlalchemy.dialects import postgresql, sqlite
12
- from sqlalchemy.sql.elements import ColumnElement
13
10
  from sqlalchemy.sql.expression import tuple_
14
11
  from sqlalchemy.sql.functions import percentile_cont
15
- from strawberry import ID, UNSET, Private, lazy
12
+ from strawberry import ID, UNSET, lazy
16
13
  from strawberry.relay import Connection, Edge, Node, NodeID, PageInfo
17
14
  from strawberry.types import Info
18
15
  from typing_extensions import assert_never
@@ -23,8 +20,8 @@ from phoenix.db.helpers import SupportedSQLDialect, date_trunc
23
20
  from phoenix.server.api.context import Context
24
21
  from phoenix.server.api.exceptions import BadRequest
25
22
  from phoenix.server.api.input_types.ProjectSessionSort import (
26
- ProjectSessionColumn,
27
23
  ProjectSessionSort,
24
+ ProjectSessionSortConfig,
28
25
  )
29
26
  from phoenix.server.api.input_types.SpanSort import SpanColumn, SpanSort, SpanSortConfig
30
27
  from phoenix.server.api.input_types.TimeBinConfig import TimeBinConfig, TimeBinScale
@@ -33,7 +30,7 @@ from phoenix.server.api.types.AnnotationConfig import AnnotationConfig, to_gql_a
33
30
  from phoenix.server.api.types.AnnotationSummary import AnnotationSummary
34
31
  from phoenix.server.api.types.CostBreakdown import CostBreakdown
35
32
  from phoenix.server.api.types.DocumentEvaluationSummary import DocumentEvaluationSummary
36
- from phoenix.server.api.types.GenerativeModel import GenerativeModel, to_gql_generative_model
33
+ from phoenix.server.api.types.GenerativeModel import GenerativeModel
37
34
  from phoenix.server.api.types.pagination import (
38
35
  ConnectionArgs,
39
36
  Cursor,
@@ -43,13 +40,14 @@ from phoenix.server.api.types.pagination import (
43
40
  connection_from_cursors_and_nodes,
44
41
  connection_from_list,
45
42
  )
46
- from phoenix.server.api.types.ProjectSession import ProjectSession, to_gql_project_session
43
+ from phoenix.server.api.types.ProjectSession import ProjectSession
47
44
  from phoenix.server.api.types.SortDir import SortDir
48
45
  from phoenix.server.api.types.Span import Span
49
46
  from phoenix.server.api.types.SpanCostSummary import SpanCostSummary
50
47
  from phoenix.server.api.types.TimeSeries import TimeSeries, TimeSeriesDataPoint
51
48
  from phoenix.server.api.types.Trace import Trace
52
49
  from phoenix.server.api.types.ValidationResult import ValidationResult
50
+ from phoenix.server.session_filters import get_filtered_session_rowids_subquery
53
51
  from phoenix.server.types import DbSessionFactory
54
52
  from phoenix.trace.dsl import SpanFilter
55
53
 
@@ -60,12 +58,11 @@ if TYPE_CHECKING:
60
58
 
61
59
  @strawberry.type
62
60
  class Project(Node):
63
- _table: ClassVar[type[models.Base]] = models.Project
64
- project_rowid: NodeID[int]
65
- db_project: Private[models.Project] = UNSET
61
+ id: NodeID[int]
62
+ db_record: strawberry.Private[Optional[models.Project]] = None
66
63
 
67
64
  def __post_init__(self) -> None:
68
- if self.db_project and self.project_rowid != self.db_project.id:
65
+ if self.db_record and self.id != self.db_record.id:
69
66
  raise ValueError("Project ID mismatch")
70
67
 
71
68
  @strawberry.field
@@ -73,11 +70,11 @@ class Project(Node):
73
70
  self,
74
71
  info: Info[Context, None],
75
72
  ) -> str:
76
- if self.db_project:
77
- name = self.db_project.name
73
+ if self.db_record:
74
+ name = self.db_record.name
78
75
  else:
79
76
  name = await info.context.data_loaders.project_fields.load(
80
- (self.project_rowid, models.Project.name),
77
+ (self.id, models.Project.name),
81
78
  )
82
79
  return name
83
80
 
@@ -86,11 +83,11 @@ class Project(Node):
86
83
  self,
87
84
  info: Info[Context, None],
88
85
  ) -> str:
89
- if self.db_project:
90
- gradient_start_color = self.db_project.gradient_start_color
86
+ if self.db_record:
87
+ gradient_start_color = self.db_record.gradient_start_color
91
88
  else:
92
89
  gradient_start_color = await info.context.data_loaders.project_fields.load(
93
- (self.project_rowid, models.Project.gradient_start_color),
90
+ (self.id, models.Project.gradient_start_color),
94
91
  )
95
92
  return gradient_start_color
96
93
 
@@ -99,11 +96,11 @@ class Project(Node):
99
96
  self,
100
97
  info: Info[Context, None],
101
98
  ) -> str:
102
- if self.db_project:
103
- gradient_end_color = self.db_project.gradient_end_color
99
+ if self.db_record:
100
+ gradient_end_color = self.db_record.gradient_end_color
104
101
  else:
105
102
  gradient_end_color = await info.context.data_loaders.project_fields.load(
106
- (self.project_rowid, models.Project.gradient_end_color),
103
+ (self.id, models.Project.gradient_end_color),
107
104
  )
108
105
  return gradient_end_color
109
106
 
@@ -113,7 +110,7 @@ class Project(Node):
113
110
  info: Info[Context, None],
114
111
  ) -> Optional[datetime]:
115
112
  start_time = await info.context.data_loaders.min_start_or_max_end_times.load(
116
- (self.project_rowid, "start"),
113
+ (self.id, "start"),
117
114
  )
118
115
  start_time, _ = right_open_time_range(start_time, None)
119
116
  return start_time
@@ -124,7 +121,7 @@ class Project(Node):
124
121
  info: Info[Context, None],
125
122
  ) -> Optional[datetime]:
126
123
  end_time = await info.context.data_loaders.min_start_or_max_end_times.load(
127
- (self.project_rowid, "end"),
124
+ (self.id, "end"),
128
125
  )
129
126
  _, end_time = right_open_time_range(None, end_time)
130
127
  return end_time
@@ -135,9 +132,21 @@ class Project(Node):
135
132
  info: Info[Context, None],
136
133
  time_range: Optional[TimeRange] = UNSET,
137
134
  filter_condition: Optional[str] = UNSET,
135
+ session_filter_condition: Optional[str] = UNSET,
138
136
  ) -> int:
137
+ if filter_condition and session_filter_condition:
138
+ raise BadRequest(
139
+ "Both a filter condition and session filter condition "
140
+ "cannot be applied at the same time"
141
+ )
139
142
  return await info.context.data_loaders.record_counts.load(
140
- ("span", self.project_rowid, time_range, filter_condition),
143
+ (
144
+ "span",
145
+ self.id,
146
+ time_range or None,
147
+ filter_condition or None,
148
+ session_filter_condition or None,
149
+ ),
141
150
  )
142
151
 
143
152
  @strawberry.field
@@ -145,9 +154,22 @@ class Project(Node):
145
154
  self,
146
155
  info: Info[Context, None],
147
156
  time_range: Optional[TimeRange] = UNSET,
157
+ filter_condition: Optional[str] = UNSET,
158
+ session_filter_condition: Optional[str] = UNSET,
148
159
  ) -> int:
160
+ if filter_condition and session_filter_condition:
161
+ raise BadRequest(
162
+ "Both a filter condition and session filter condition "
163
+ "cannot be applied at the same time"
164
+ )
149
165
  return await info.context.data_loaders.record_counts.load(
150
- ("trace", self.project_rowid, time_range, None),
166
+ (
167
+ "trace",
168
+ self.id,
169
+ time_range or None,
170
+ filter_condition or None,
171
+ session_filter_condition or None,
172
+ ),
151
173
  )
152
174
 
153
175
  @strawberry.field
@@ -158,7 +180,7 @@ class Project(Node):
158
180
  filter_condition: Optional[str] = UNSET,
159
181
  ) -> float:
160
182
  return await info.context.data_loaders.token_counts.load(
161
- ("total", self.project_rowid, time_range, filter_condition),
183
+ ("total", self.id, time_range, filter_condition),
162
184
  )
163
185
 
164
186
  @strawberry.field
@@ -169,7 +191,7 @@ class Project(Node):
169
191
  filter_condition: Optional[str] = UNSET,
170
192
  ) -> float:
171
193
  return await info.context.data_loaders.token_counts.load(
172
- ("prompt", self.project_rowid, time_range, filter_condition),
194
+ ("prompt", self.id, time_range, filter_condition),
173
195
  )
174
196
 
175
197
  @strawberry.field
@@ -180,7 +202,7 @@ class Project(Node):
180
202
  filter_condition: Optional[str] = UNSET,
181
203
  ) -> float:
182
204
  return await info.context.data_loaders.token_counts.load(
183
- ("completion", self.project_rowid, time_range, filter_condition),
205
+ ("completion", self.id, time_range, filter_condition),
184
206
  )
185
207
 
186
208
  @strawberry.field
@@ -189,9 +211,21 @@ class Project(Node):
189
211
  info: Info[Context, None],
190
212
  time_range: Optional[TimeRange] = UNSET,
191
213
  filter_condition: Optional[str] = UNSET,
214
+ session_filter_condition: Optional[str] = UNSET,
192
215
  ) -> SpanCostSummary:
193
- loader = info.context.data_loaders.span_cost_summary_by_project
194
- summary = await loader.load((self.project_rowid, time_range, filter_condition))
216
+ if filter_condition and session_filter_condition:
217
+ raise BadRequest(
218
+ "Both a filter condition and session filter condition "
219
+ "cannot be applied at the same time"
220
+ )
221
+ summary = await info.context.data_loaders.span_cost_summary_by_project.load(
222
+ (
223
+ self.id,
224
+ time_range or None,
225
+ filter_condition or None,
226
+ session_filter_condition or None,
227
+ )
228
+ )
195
229
  return SpanCostSummary(
196
230
  prompt=CostBreakdown(
197
231
  tokens=summary.prompt.tokens,
@@ -213,13 +247,21 @@ class Project(Node):
213
247
  info: Info[Context, None],
214
248
  probability: float,
215
249
  time_range: Optional[TimeRange] = UNSET,
250
+ filter_condition: Optional[str] = UNSET,
251
+ session_filter_condition: Optional[str] = UNSET,
216
252
  ) -> Optional[float]:
253
+ if filter_condition and session_filter_condition:
254
+ raise BadRequest(
255
+ "Both a filter condition and session filter condition "
256
+ "cannot be applied at the same time"
257
+ )
217
258
  return await info.context.data_loaders.latency_ms_quantile.load(
218
259
  (
219
260
  "trace",
220
- self.project_rowid,
221
- time_range,
222
- None,
261
+ self.id,
262
+ time_range or None,
263
+ filter_condition or None,
264
+ session_filter_condition or None,
223
265
  probability,
224
266
  ),
225
267
  )
@@ -231,13 +273,20 @@ class Project(Node):
231
273
  probability: float,
232
274
  time_range: Optional[TimeRange] = UNSET,
233
275
  filter_condition: Optional[str] = UNSET,
276
+ session_filter_condition: Optional[str] = UNSET,
234
277
  ) -> Optional[float]:
278
+ if filter_condition and session_filter_condition:
279
+ raise BadRequest(
280
+ "Both a filter condition and session filter condition "
281
+ "cannot be applied at the same time"
282
+ )
235
283
  return await info.context.data_loaders.latency_ms_quantile.load(
236
284
  (
237
285
  "span",
238
- self.project_rowid,
239
- time_range,
240
- filter_condition,
286
+ self.id,
287
+ time_range or None,
288
+ filter_condition or None,
289
+ session_filter_condition or None,
241
290
  probability,
242
291
  ),
243
292
  )
@@ -247,12 +296,12 @@ class Project(Node):
247
296
  stmt = (
248
297
  select(models.Trace)
249
298
  .where(models.Trace.trace_id == str(trace_id))
250
- .where(models.Trace.project_rowid == self.project_rowid)
299
+ .where(models.Trace.project_rowid == self.id)
251
300
  )
252
301
  async with info.context.db() as session:
253
302
  if (trace := await session.scalar(stmt)) is None:
254
303
  return None
255
- return Trace(trace_rowid=trace.id, db_trace=trace)
304
+ return Trace(id=trace.id, db_record=trace)
256
305
 
257
306
  @strawberry.field
258
307
  async def spans(
@@ -271,7 +320,7 @@ class Project(Node):
271
320
  if root_spans_only and not filter_condition and sort and sort.col is SpanColumn.startTime:
272
321
  return await _paginate_span_by_trace_start_time(
273
322
  db=info.context.db,
274
- project_rowid=self.project_rowid,
323
+ project_rowid=self.id,
275
324
  time_range=time_range,
276
325
  first=first,
277
326
  after=after,
@@ -282,7 +331,7 @@ class Project(Node):
282
331
  select(models.Span.id)
283
332
  .select_from(models.Span)
284
333
  .join(models.Trace)
285
- .where(models.Trace.project_rowid == self.project_rowid)
334
+ .where(models.Trace.project_rowid == self.id)
286
335
  )
287
336
  if time_range:
288
337
  if time_range.start:
@@ -304,12 +353,16 @@ class Project(Node):
304
353
  if sort_config and cursor.sort_column:
305
354
  sort_column = cursor.sort_column
306
355
  compare = operator.lt if sort_config.dir is SortDir.desc else operator.gt
307
- stmt = stmt.where(
308
- compare(
309
- tuple_(sort_config.orm_expression, models.Span.id),
310
- (sort_column.value, cursor.rowid),
356
+ if sort_column.type is CursorSortColumnDataType.NULL:
357
+ stmt = stmt.where(sort_config.orm_expression.is_(None))
358
+ stmt = stmt.where(compare(models.Span.id, cursor.rowid))
359
+ else:
360
+ stmt = stmt.where(
361
+ compare(
362
+ tuple_(sort_config.orm_expression, models.Span.id),
363
+ (sort_column.value, cursor.rowid),
364
+ )
311
365
  )
312
- )
313
366
  else:
314
367
  stmt = stmt.where(models.Span.id > cursor.rowid)
315
368
  stmt = stmt.order_by(cursor_rowid_column)
@@ -347,7 +400,7 @@ class Project(Node):
347
400
  type=sort_config.column_data_type,
348
401
  value=span_record[1],
349
402
  )
350
- cursors_and_nodes.append((cursor, Span(span_rowid=span_rowid)))
403
+ cursors_and_nodes.append((cursor, Span(id=span_rowid)))
351
404
  has_next_page = True
352
405
  try:
353
406
  await span_records.__anext__()
@@ -377,12 +430,12 @@ class Project(Node):
377
430
  ans = await session.scalar(
378
431
  select(table).filter_by(
379
432
  session_id=session_id,
380
- project_id=self.project_rowid,
433
+ project_id=self.id,
381
434
  )
382
435
  )
383
436
  if ans:
384
437
  return connection_from_list(
385
- data=[to_gql_project_session(ans)],
438
+ data=[ProjectSession(id=ans.id, db_record=ans)],
386
439
  args=ConnectionArgs(),
387
440
  )
388
441
  elif not filter_io_substring:
@@ -390,106 +443,45 @@ class Project(Node):
390
443
  data=[],
391
444
  args=ConnectionArgs(),
392
445
  )
393
- stmt = select(table).filter_by(project_id=self.project_rowid)
446
+ stmt = select(table).filter_by(project_id=self.id)
394
447
  if time_range:
395
448
  if time_range.start:
396
449
  stmt = stmt.where(time_range.start <= table.start_time)
397
450
  if time_range.end:
398
451
  stmt = stmt.where(table.start_time < time_range.end)
399
452
  if filter_io_substring:
400
- filter_stmt = (
401
- select(distinct(models.Trace.project_session_rowid).label("id"))
402
- .filter_by(project_rowid=self.project_rowid)
403
- .join_from(models.Trace, models.Span)
404
- .where(models.Span.parent_id.is_(None))
405
- .where(
406
- or_(
407
- models.CaseInsensitiveContains(
408
- models.Span.attributes[INPUT_VALUE].as_string(),
409
- filter_io_substring,
410
- ),
411
- models.CaseInsensitiveContains(
412
- models.Span.attributes[OUTPUT_VALUE].as_string(),
413
- filter_io_substring,
414
- ),
415
- )
416
- )
453
+ filtered_session_rowids = get_filtered_session_rowids_subquery(
454
+ session_filter_condition=filter_io_substring,
455
+ project_rowids=[self.id],
456
+ start_time=time_range.start if time_range else None,
457
+ end_time=time_range.end if time_range else None,
417
458
  )
418
- if time_range:
419
- if time_range.start:
420
- filter_stmt = filter_stmt.where(time_range.start <= models.Trace.start_time)
421
- if time_range.end:
422
- filter_stmt = filter_stmt.where(models.Trace.start_time < time_range.end)
423
- filter_subq = filter_stmt.subquery()
424
- stmt = stmt.join(filter_subq, table.id == filter_subq.c.id)
459
+ stmt = stmt.where(table.id.in_(filtered_session_rowids))
460
+ sort_config: Optional[ProjectSessionSortConfig] = None
461
+ cursor_rowid_column: Any = table.id
425
462
  if sort:
426
- key: ColumnElement[Any]
427
- if sort.col is ProjectSessionColumn.startTime:
428
- key = table.start_time.label("key")
429
- elif sort.col is ProjectSessionColumn.endTime:
430
- key = table.end_time.label("key")
431
- elif (
432
- sort.col is ProjectSessionColumn.tokenCountTotal
433
- or sort.col is ProjectSessionColumn.numTraces
434
- ):
435
- if sort.col is ProjectSessionColumn.tokenCountTotal:
436
- sort_subq = (
437
- select(
438
- models.Trace.project_session_rowid.label("id"),
439
- func.sum(models.Span.cumulative_llm_token_count_total).label("key"),
440
- )
441
- .join_from(models.Trace, models.Span)
442
- .where(models.Span.parent_id.is_(None))
443
- .group_by(models.Trace.project_session_rowid)
444
- ).subquery()
445
- elif sort.col is ProjectSessionColumn.numTraces:
446
- sort_subq = (
447
- select(
448
- models.Trace.project_session_rowid.label("id"),
449
- func.count(models.Trace.id).label("key"),
450
- ).group_by(models.Trace.project_session_rowid)
451
- ).subquery()
463
+ sort_config = sort.update_orm_expr(stmt)
464
+ stmt = sort_config.stmt
465
+ if sort_config.dir is SortDir.desc:
466
+ cursor_rowid_column = desc(cursor_rowid_column)
467
+ if after:
468
+ cursor = Cursor.from_string(after)
469
+ if sort_config and cursor.sort_column:
470
+ sort_column = cursor.sort_column
471
+ compare = operator.lt if sort_config.dir is SortDir.desc else operator.gt
472
+ if sort_column.type is CursorSortColumnDataType.NULL:
473
+ stmt = stmt.where(sort_config.orm_expression.is_(None))
474
+ stmt = stmt.where(compare(table.id, cursor.rowid))
452
475
  else:
453
- assert_never(sort.col)
454
- key = sort_subq.c.key
455
- stmt = stmt.join(sort_subq, table.id == sort_subq.c.id)
456
- elif sort.col is ProjectSessionColumn.costTotal:
457
- sort_subq = (
458
- select(
459
- models.Trace.project_session_rowid.label("id"),
460
- func.sum(models.SpanCost.total_cost).label("key"),
461
- )
462
- .join_from(
463
- models.Trace,
464
- models.SpanCost,
465
- models.Trace.id == models.SpanCost.trace_rowid,
476
+ stmt = stmt.where(
477
+ compare(
478
+ tuple_(sort_config.orm_expression, table.id),
479
+ (sort_column.value, cursor.rowid),
480
+ )
466
481
  )
467
- .group_by(models.Trace.project_session_rowid)
468
- ).subquery()
469
- key = sort_subq.c.key
470
- stmt = stmt.join(sort_subq, table.id == sort_subq.c.id)
471
- else:
472
- assert_never(sort.col)
473
- stmt = stmt.add_columns(key)
474
- if sort.dir is SortDir.asc:
475
- stmt = stmt.order_by(key.asc(), table.id.asc())
476
482
  else:
477
- stmt = stmt.order_by(key.desc(), table.id.desc())
478
- if after:
479
- cursor = Cursor.from_string(after)
480
- assert cursor.sort_column is not None
481
- compare = operator.lt if sort.dir is SortDir.desc else operator.gt
482
- stmt = stmt.where(
483
- compare(
484
- tuple_(key, table.id),
485
- (cursor.sort_column.value, cursor.rowid),
486
- )
487
- )
488
- else:
489
- stmt = stmt.order_by(table.id.desc())
490
- if after:
491
- cursor = Cursor.from_string(after)
492
483
  stmt = stmt.where(table.id < cursor.rowid)
484
+ stmt = stmt.order_by(cursor_rowid_column)
493
485
  if first:
494
486
  stmt = stmt.limit(
495
487
  first + 1 # over-fetch by one to determine whether there's a next page
@@ -500,13 +492,15 @@ class Project(Node):
500
492
  async for record in islice(records, first):
501
493
  project_session = record[0]
502
494
  cursor = Cursor(rowid=project_session.id)
503
- if sort:
495
+ if sort_config:
504
496
  assert len(record) > 1
505
497
  cursor.sort_column = CursorSortColumn(
506
- type=sort.col.data_type,
498
+ type=sort_config.column_data_type,
507
499
  value=record[1],
508
500
  )
509
- cursors_and_nodes.append((cursor, to_gql_project_session(project_session)))
501
+ cursors_and_nodes.append(
502
+ (cursor, ProjectSession(id=project_session.id, db_record=project_session))
503
+ )
510
504
  has_next_page = True
511
505
  try:
512
506
  await records.__anext__()
@@ -529,7 +523,7 @@ class Project(Node):
529
523
  stmt = (
530
524
  select(distinct(models.TraceAnnotation.name))
531
525
  .join(models.Trace)
532
- .where(models.Trace.project_rowid == self.project_rowid)
526
+ .where(models.Trace.project_rowid == self.id)
533
527
  )
534
528
  async with info.context.db() as session:
535
529
  return list(await session.scalars(stmt))
@@ -546,7 +540,23 @@ class Project(Node):
546
540
  select(distinct(models.SpanAnnotation.name))
547
541
  .join(models.Span)
548
542
  .join(models.Trace, models.Span.trace_rowid == models.Trace.id)
549
- .where(models.Trace.project_rowid == self.project_rowid)
543
+ .where(models.Trace.project_rowid == self.id)
544
+ )
545
+ async with info.context.db() as session:
546
+ return list(await session.scalars(stmt))
547
+
548
+ @strawberry.field(
549
+ description="Names of all available annotations for sessions. "
550
+ "(The list contains no duplicates.)"
551
+ ) # type: ignore
552
+ async def session_annotation_names(
553
+ self,
554
+ info: Info[Context, None],
555
+ ) -> list[str]:
556
+ stmt = (
557
+ select(distinct(models.ProjectSessionAnnotation.name))
558
+ .join(models.ProjectSession)
559
+ .where(models.ProjectSession.project_id == self.id)
550
560
  )
551
561
  async with info.context.db() as session:
552
562
  return list(await session.scalars(stmt))
@@ -563,7 +573,7 @@ class Project(Node):
563
573
  select(distinct(models.DocumentAnnotation.name))
564
574
  .join(models.Span)
565
575
  .join(models.Trace, models.Span.trace_rowid == models.Trace.id)
566
- .where(models.Trace.project_rowid == self.project_rowid)
576
+ .where(models.Trace.project_rowid == self.id)
567
577
  .where(models.DocumentAnnotation.annotator_kind == "LLM")
568
578
  )
569
579
  if span_id:
@@ -576,10 +586,24 @@ class Project(Node):
576
586
  self,
577
587
  info: Info[Context, None],
578
588
  annotation_name: str,
589
+ filter_condition: Optional[str] = UNSET,
590
+ session_filter_condition: Optional[str] = UNSET,
579
591
  time_range: Optional[TimeRange] = UNSET,
580
592
  ) -> Optional[AnnotationSummary]:
593
+ if filter_condition and session_filter_condition:
594
+ raise BadRequest(
595
+ "Both a filter condition and session filter condition "
596
+ "cannot be applied at the same time"
597
+ )
581
598
  return await info.context.data_loaders.annotation_summaries.load(
582
- ("trace", self.project_rowid, time_range, None, annotation_name),
599
+ (
600
+ "trace",
601
+ self.id,
602
+ time_range or None,
603
+ filter_condition or None,
604
+ session_filter_condition or None,
605
+ annotation_name,
606
+ ),
583
607
  )
584
608
 
585
609
  @strawberry.field
@@ -589,9 +613,22 @@ class Project(Node):
589
613
  annotation_name: str,
590
614
  time_range: Optional[TimeRange] = UNSET,
591
615
  filter_condition: Optional[str] = UNSET,
616
+ session_filter_condition: Optional[str] = UNSET,
592
617
  ) -> Optional[AnnotationSummary]:
618
+ if filter_condition and session_filter_condition:
619
+ raise BadRequest(
620
+ "Both a filter condition and session filter condition "
621
+ "cannot be applied at the same time"
622
+ )
593
623
  return await info.context.data_loaders.annotation_summaries.load(
594
- ("span", self.project_rowid, time_range, filter_condition, annotation_name),
624
+ (
625
+ "span",
626
+ self.id,
627
+ time_range or None,
628
+ filter_condition or None,
629
+ session_filter_condition or None,
630
+ annotation_name,
631
+ ),
595
632
  )
596
633
 
597
634
  @strawberry.field
@@ -603,7 +640,7 @@ class Project(Node):
603
640
  filter_condition: Optional[str] = UNSET,
604
641
  ) -> Optional[DocumentEvaluationSummary]:
605
642
  return await info.context.data_loaders.document_evaluation_summaries.load(
606
- (self.project_rowid, time_range, filter_condition, evaluation_name),
643
+ (self.id, time_range, filter_condition, evaluation_name),
607
644
  )
608
645
 
609
646
  @strawberry.field
@@ -611,7 +648,7 @@ class Project(Node):
611
648
  self,
612
649
  info: Info[Context, None],
613
650
  ) -> Optional[datetime]:
614
- return info.context.last_updated_at.get(self._table, self.project_rowid)
651
+ return info.context.last_updated_at.get(models.Project, self.id)
615
652
 
616
653
  @strawberry.field
617
654
  async def validate_span_filter_condition(
@@ -644,7 +681,7 @@ class Project(Node):
644
681
  stmt = span_filter(select(models.Span))
645
682
  dialect = info.context.db.dialect
646
683
  if dialect is SupportedSQLDialect.POSTGRESQL:
647
- str(stmt.compile(dialect=sqlite.dialect())) # type: ignore[no-untyped-call]
684
+ str(stmt.compile(dialect=sqlite.dialect()))
648
685
  elif dialect is SupportedSQLDialect.SQLITE:
649
686
  str(stmt.compile(dialect=postgresql.dialect())) # type: ignore[no-untyped-call]
650
687
  else:
@@ -672,7 +709,7 @@ class Project(Node):
672
709
  before=before if isinstance(before, CursorString) else None,
673
710
  )
674
711
  loader = info.context.data_loaders.annotation_configs_by_project
675
- configs = await loader.load(self.project_rowid)
712
+ configs = await loader.load(self.id)
676
713
  data = [to_gql_annotation_config(config) for config in configs]
677
714
  return connection_from_list(data=data, args=args)
678
715
 
@@ -680,12 +717,10 @@ class Project(Node):
680
717
  async def trace_retention_policy(
681
718
  self,
682
719
  info: Info[Context, None],
683
- ) -> Annotated[ProjectTraceRetentionPolicy, lazy(".ProjectTraceRetentionPolicy")]:
720
+ ) -> Annotated["ProjectTraceRetentionPolicy", lazy(".ProjectTraceRetentionPolicy")]:
684
721
  from .ProjectTraceRetentionPolicy import ProjectTraceRetentionPolicy
685
722
 
686
- id_ = await info.context.data_loaders.trace_retention_policy_id_by_project_id.load(
687
- self.project_rowid
688
- )
723
+ id_ = await info.context.data_loaders.trace_retention_policy_id_by_project_id.load(self.id)
689
724
  return ProjectTraceRetentionPolicy(id=id_)
690
725
 
691
726
  @strawberry.field
@@ -693,11 +728,11 @@ class Project(Node):
693
728
  self,
694
729
  info: Info[Context, None],
695
730
  ) -> datetime:
696
- if self.db_project:
697
- created_at = self.db_project.created_at
731
+ if self.db_record:
732
+ created_at = self.db_record.created_at
698
733
  else:
699
734
  created_at = await info.context.data_loaders.project_fields.load(
700
- (self.project_rowid, models.Project.created_at),
735
+ (self.id, models.Project.created_at),
701
736
  )
702
737
  return created_at
703
738
 
@@ -706,11 +741,11 @@ class Project(Node):
706
741
  self,
707
742
  info: Info[Context, None],
708
743
  ) -> datetime:
709
- if self.db_project:
710
- updated_at = self.db_project.updated_at
744
+ if self.db_record:
745
+ updated_at = self.db_record.updated_at
711
746
  else:
712
747
  updated_at = await info.context.data_loaders.project_fields.load(
713
- (self.project_rowid, models.Project.updated_at),
748
+ (self.id, models.Project.updated_at),
714
749
  )
715
750
  return updated_at
716
751
 
@@ -721,7 +756,7 @@ class Project(Node):
721
756
  time_range: TimeRange,
722
757
  time_bin_config: Optional[TimeBinConfig] = UNSET,
723
758
  filter_condition: Optional[str] = UNSET,
724
- ) -> SpanCountTimeSeries:
759
+ ) -> "SpanCountTimeSeries":
725
760
  if time_range.start is None:
726
761
  raise BadRequest("Start time is required")
727
762
 
@@ -756,7 +791,7 @@ class Project(Node):
756
791
  ),
757
792
  )
758
793
  .join_from(models.Span, models.Trace)
759
- .where(models.Trace.project_rowid == self.project_rowid)
794
+ .where(models.Trace.project_rowid == self.id)
760
795
  .group_by(bucket)
761
796
  .order_by(bucket)
762
797
  )
@@ -806,7 +841,7 @@ class Project(Node):
806
841
  info: Info[Context, None],
807
842
  time_range: TimeRange,
808
843
  time_bin_config: Optional[TimeBinConfig] = UNSET,
809
- ) -> TraceCountTimeSeries:
844
+ ) -> "TraceCountTimeSeries":
810
845
  if time_range.start is None:
811
846
  raise BadRequest("Start time is required")
812
847
 
@@ -830,7 +865,7 @@ class Project(Node):
830
865
  bucket = date_trunc(dialect, field, models.Trace.start_time, utc_offset_minutes)
831
866
  stmt = (
832
867
  select(bucket, func.count(models.Trace.id))
833
- .where(models.Trace.project_rowid == self.project_rowid)
868
+ .where(models.Trace.project_rowid == self.id)
834
869
  .group_by(bucket)
835
870
  .order_by(bucket)
836
871
  )
@@ -869,7 +904,7 @@ class Project(Node):
869
904
  info: Info[Context, None],
870
905
  time_range: TimeRange,
871
906
  time_bin_config: Optional[TimeBinConfig] = UNSET,
872
- ) -> TraceCountByStatusTimeSeries:
907
+ ) -> "TraceCountByStatusTimeSeries":
873
908
  if time_range.start is None:
874
909
  raise BadRequest("Start time is required")
875
910
 
@@ -913,7 +948,7 @@ class Project(Node):
913
948
  onclause=trace_error_status_counts.c.trace_rowid == models.Trace.id,
914
949
  isouter=True,
915
950
  )
916
- .where(models.Trace.project_rowid == self.project_rowid)
951
+ .where(models.Trace.project_rowid == self.id)
917
952
  .group_by(bucket)
918
953
  .order_by(bucket)
919
954
  )
@@ -962,7 +997,7 @@ class Project(Node):
962
997
  info: Info[Context, None],
963
998
  time_range: TimeRange,
964
999
  time_bin_config: Optional[TimeBinConfig] = UNSET,
965
- ) -> TraceLatencyPercentileTimeSeries:
1000
+ ) -> "TraceLatencyPercentileTimeSeries":
966
1001
  if time_range.start is None:
967
1002
  raise BadRequest("Start time is required")
968
1003
 
@@ -985,7 +1020,7 @@ class Project(Node):
985
1020
  field = "year"
986
1021
  bucket = date_trunc(dialect, field, models.Trace.start_time, utc_offset_minutes)
987
1022
 
988
- stmt = select(bucket).where(models.Trace.project_rowid == self.project_rowid)
1023
+ stmt = select(bucket).where(models.Trace.project_rowid == self.id)
989
1024
  if time_range.start:
990
1025
  stmt = stmt.where(time_range.start <= models.Trace.start_time)
991
1026
  if time_range.end:
@@ -1066,7 +1101,7 @@ class Project(Node):
1066
1101
  info: Info[Context, None],
1067
1102
  time_range: TimeRange,
1068
1103
  time_bin_config: Optional[TimeBinConfig] = UNSET,
1069
- ) -> TraceTokenCountTimeSeries:
1104
+ ) -> "TraceTokenCountTimeSeries":
1070
1105
  if time_range.start is None:
1071
1106
  raise BadRequest("Start time is required")
1072
1107
 
@@ -1100,7 +1135,7 @@ class Project(Node):
1100
1135
  models.SpanCost,
1101
1136
  onclause=models.SpanCost.trace_rowid == models.Trace.id,
1102
1137
  )
1103
- .where(models.Trace.project_rowid == self.project_rowid)
1138
+ .where(models.Trace.project_rowid == self.id)
1104
1139
  .group_by(bucket)
1105
1140
  .order_by(bucket)
1106
1141
  )
@@ -1149,7 +1184,7 @@ class Project(Node):
1149
1184
  info: Info[Context, None],
1150
1185
  time_range: TimeRange,
1151
1186
  time_bin_config: Optional[TimeBinConfig] = UNSET,
1152
- ) -> TraceTokenCostTimeSeries:
1187
+ ) -> "TraceTokenCostTimeSeries":
1153
1188
  if time_range.start is None:
1154
1189
  raise BadRequest("Start time is required")
1155
1190
 
@@ -1183,7 +1218,7 @@ class Project(Node):
1183
1218
  models.SpanCost,
1184
1219
  onclause=models.SpanCost.trace_rowid == models.Trace.id,
1185
1220
  )
1186
- .where(models.Trace.project_rowid == self.project_rowid)
1221
+ .where(models.Trace.project_rowid == self.id)
1187
1222
  .group_by(bucket)
1188
1223
  .order_by(bucket)
1189
1224
  )
@@ -1232,7 +1267,7 @@ class Project(Node):
1232
1267
  info: Info[Context, None],
1233
1268
  time_range: TimeRange,
1234
1269
  time_bin_config: Optional[TimeBinConfig] = UNSET,
1235
- ) -> SpanAnnotationScoreTimeSeries:
1270
+ ) -> "SpanAnnotationScoreTimeSeries":
1236
1271
  if time_range.start is None:
1237
1272
  raise BadRequest("Start time is required")
1238
1273
 
@@ -1270,7 +1305,7 @@ class Project(Node):
1270
1305
  models.Trace,
1271
1306
  onclause=models.Span.trace_rowid == models.Trace.id,
1272
1307
  )
1273
- .where(models.Trace.project_rowid == self.project_rowid)
1308
+ .where(models.Trace.project_rowid == self.id)
1274
1309
  .group_by(bucket, models.SpanAnnotation.name)
1275
1310
  .order_by(bucket)
1276
1311
  )
@@ -1287,6 +1322,8 @@ class Project(Node):
1287
1322
  name,
1288
1323
  average_score,
1289
1324
  ) in await session.stream(stmt):
1325
+ if average_score is None:
1326
+ continue
1290
1327
  timestamp = _as_datetime(t)
1291
1328
  if timestamp not in scores:
1292
1329
  scores[timestamp] = {}
@@ -1355,7 +1392,7 @@ class Project(Node):
1355
1392
  models.Trace,
1356
1393
  models.SpanCost.trace_rowid == models.Trace.id,
1357
1394
  )
1358
- .where(models.Trace.project_rowid == self.project_rowid)
1395
+ .where(models.Trace.project_rowid == self.id)
1359
1396
  .where(models.SpanCost.model_id.isnot(None))
1360
1397
  .where(models.SpanCost.span_start_time >= time_range.start)
1361
1398
  .group_by(models.GenerativeModel.id)
@@ -1382,10 +1419,8 @@ class Project(Node):
1382
1419
  start=time_range.start,
1383
1420
  end=time_range.end,
1384
1421
  )
1385
- gql_model = to_gql_generative_model(model)
1386
- gql_model.add_cached_cost_summary(
1387
- self.project_rowid, cache_time_range, cost_summary
1388
- )
1422
+ gql_model = GenerativeModel(id=model.id, db_record=model)
1423
+ gql_model.add_cached_cost_summary(self.id, cache_time_range, cost_summary)
1389
1424
  results.append(gql_model)
1390
1425
  return results
1391
1426
 
@@ -1417,7 +1452,7 @@ class Project(Node):
1417
1452
  models.Trace,
1418
1453
  models.SpanCost.trace_rowid == models.Trace.id,
1419
1454
  )
1420
- .where(models.Trace.project_rowid == self.project_rowid)
1455
+ .where(models.Trace.project_rowid == self.id)
1421
1456
  .where(models.SpanCost.model_id.isnot(None))
1422
1457
  .where(models.SpanCost.span_start_time >= time_range.start)
1423
1458
  .group_by(models.GenerativeModel.id)
@@ -1444,10 +1479,8 @@ class Project(Node):
1444
1479
  start=time_range.start,
1445
1480
  end=time_range.end,
1446
1481
  )
1447
- gql_model = to_gql_generative_model(model)
1448
- gql_model.add_cached_cost_summary(
1449
- self.project_rowid, cache_time_range, cost_summary
1450
- )
1482
+ gql_model = GenerativeModel(id=model.id, db_record=model)
1483
+ gql_model.add_cached_cost_summary(self.id, cache_time_range, cost_summary)
1451
1484
  results.append(gql_model)
1452
1485
  return results
1453
1486
 
@@ -1717,7 +1750,7 @@ async def _paginate_span_by_trace_start_time(
1717
1750
  first_record = group[0]
1718
1751
  # Only create edge if trace has a root span
1719
1752
  if (span_rowid := first_record[2]) is not None:
1720
- edges.append(Edge(node=Span(span_rowid=span_rowid), cursor=str(cursor)))
1753
+ edges.append(Edge(node=Span(id=span_rowid), cursor=str(cursor)))
1721
1754
  has_next_page = True
1722
1755
  try:
1723
1756
  await records.__anext__()
@@ -1760,6 +1793,6 @@ def to_gql_project(project: models.Project) -> Project:
1760
1793
  Converts an ORM project to a GraphQL project.
1761
1794
  """
1762
1795
  return Project(
1763
- project_rowid=project.id,
1764
- db_project=project,
1796
+ id=project.id,
1797
+ db_record=project,
1765
1798
  )