arize-phoenix 4.14.1__py3-none-any.whl → 4.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/METADATA +5 -3
- {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/RECORD +81 -71
- phoenix/db/bulk_inserter.py +131 -5
- phoenix/db/engines.py +2 -1
- phoenix/db/helpers.py +23 -1
- phoenix/db/insertion/constants.py +2 -0
- phoenix/db/insertion/document_annotation.py +157 -0
- phoenix/db/insertion/helpers.py +13 -0
- phoenix/db/insertion/span_annotation.py +144 -0
- phoenix/db/insertion/trace_annotation.py +144 -0
- phoenix/db/insertion/types.py +261 -0
- phoenix/experiments/functions.py +3 -2
- phoenix/experiments/types.py +3 -3
- phoenix/server/api/context.py +7 -9
- phoenix/server/api/dataloaders/__init__.py +2 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +3 -3
- phoenix/server/api/dataloaders/dataset_example_revisions.py +2 -4
- phoenix/server/api/dataloaders/dataset_example_spans.py +2 -4
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -4
- phoenix/server/api/dataloaders/document_evaluations.py +2 -4
- phoenix/server/api/dataloaders/document_retrieval_metrics.py +2 -4
- phoenix/server/api/dataloaders/evaluation_summaries.py +2 -4
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +2 -4
- phoenix/server/api/dataloaders/experiment_error_rates.py +2 -4
- phoenix/server/api/dataloaders/experiment_run_counts.py +2 -4
- phoenix/server/api/dataloaders/experiment_sequence_number.py +2 -4
- phoenix/server/api/dataloaders/latency_ms_quantile.py +2 -3
- phoenix/server/api/dataloaders/min_start_or_max_end_times.py +2 -4
- phoenix/server/api/dataloaders/project_by_name.py +3 -3
- phoenix/server/api/dataloaders/record_counts.py +2 -4
- phoenix/server/api/dataloaders/span_annotations.py +2 -4
- phoenix/server/api/dataloaders/span_dataset_examples.py +36 -0
- phoenix/server/api/dataloaders/span_descendants.py +2 -4
- phoenix/server/api/dataloaders/span_evaluations.py +2 -4
- phoenix/server/api/dataloaders/span_projects.py +3 -3
- phoenix/server/api/dataloaders/token_counts.py +2 -4
- phoenix/server/api/dataloaders/trace_evaluations.py +2 -4
- phoenix/server/api/dataloaders/trace_row_ids.py +2 -4
- phoenix/server/api/input_types/SpanAnnotationSort.py +17 -0
- phoenix/server/api/input_types/TraceAnnotationSort.py +17 -0
- phoenix/server/api/mutations/span_annotations_mutations.py +8 -3
- phoenix/server/api/mutations/trace_annotations_mutations.py +8 -3
- phoenix/server/api/openapi/main.py +18 -2
- phoenix/server/api/openapi/schema.py +12 -12
- phoenix/server/api/routers/v1/__init__.py +36 -83
- phoenix/server/api/routers/v1/datasets.py +515 -509
- phoenix/server/api/routers/v1/evaluations.py +164 -73
- phoenix/server/api/routers/v1/experiment_evaluations.py +68 -91
- phoenix/server/api/routers/v1/experiment_runs.py +98 -155
- phoenix/server/api/routers/v1/experiments.py +132 -181
- phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
- phoenix/server/api/routers/v1/spans.py +164 -203
- phoenix/server/api/routers/v1/traces.py +134 -159
- phoenix/server/api/routers/v1/utils.py +95 -0
- phoenix/server/api/types/Span.py +27 -3
- phoenix/server/api/types/Trace.py +21 -4
- phoenix/server/api/utils.py +4 -4
- phoenix/server/app.py +172 -192
- phoenix/server/grpc_server.py +2 -2
- phoenix/server/main.py +5 -9
- phoenix/server/static/.vite/manifest.json +31 -31
- phoenix/server/static/assets/components-Ci5kMOk5.js +1175 -0
- phoenix/server/static/assets/{index-CQgXRwU0.js → index-BQG5WVX7.js} +2 -2
- phoenix/server/static/assets/{pages-hdjlFZhO.js → pages-BrevprVW.js} +451 -275
- phoenix/server/static/assets/{vendor-DPvSDRn3.js → vendor-CP0b0YG0.js} +2 -2
- phoenix/server/static/assets/{vendor-arizeai-CkvPT67c.js → vendor-arizeai-DTbiPGp6.js} +27 -27
- phoenix/server/static/assets/vendor-codemirror-DtdPDzrv.js +15 -0
- phoenix/server/static/assets/{vendor-recharts-5jlNaZuF.js → vendor-recharts-A0DA1O99.js} +1 -1
- phoenix/server/thread_server.py +2 -2
- phoenix/server/types.py +18 -0
- phoenix/session/client.py +5 -3
- phoenix/session/session.py +2 -2
- phoenix/trace/dsl/filter.py +2 -6
- phoenix/trace/fixtures.py +17 -23
- phoenix/trace/utils.py +23 -0
- phoenix/utilities/client.py +116 -0
- phoenix/utilities/project.py +1 -1
- phoenix/version.py +1 -1
- phoenix/server/api/routers/v1/dataset_examples.py +0 -178
- phoenix/server/openapi/docs.py +0 -221
- phoenix/server/static/assets/components-DeS0YEmv.js +0 -1142
- phoenix/server/static/assets/vendor-codemirror-Cqwpwlua.js +0 -12
- {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.16.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,8 +1,11 @@
|
|
|
1
|
+
from datetime import datetime
|
|
1
2
|
from random import getrandbits
|
|
3
|
+
from typing import Any, Dict, Optional
|
|
2
4
|
|
|
5
|
+
from fastapi import APIRouter, HTTPException
|
|
6
|
+
from pydantic import Field
|
|
3
7
|
from sqlalchemy import select
|
|
4
8
|
from starlette.requests import Request
|
|
5
|
-
from starlette.responses import JSONResponse, Response
|
|
6
9
|
from starlette.status import HTTP_404_NOT_FOUND
|
|
7
10
|
from strawberry.relay import GlobalID
|
|
8
11
|
|
|
@@ -11,6 +14,11 @@ from phoenix.db.helpers import SupportedSQLDialect
|
|
|
11
14
|
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
12
15
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
13
16
|
|
|
17
|
+
from .pydantic_compat import V1RoutesBaseModel
|
|
18
|
+
from .utils import ResponseBody, add_errors_to_responses
|
|
19
|
+
|
|
20
|
+
router = APIRouter(tags=["experiments"], include_in_schema=False)
|
|
21
|
+
|
|
14
22
|
|
|
15
23
|
def _short_uuid() -> str:
|
|
16
24
|
return str(getrandbits(32).to_bytes(4, "big").hex())
|
|
@@ -24,94 +32,76 @@ def _generate_experiment_name(dataset_name: str) -> str:
|
|
|
24
32
|
return f"{short_ds_name}-{_short_uuid()}"
|
|
25
33
|
|
|
26
34
|
|
|
27
|
-
|
|
35
|
+
class Experiment(V1RoutesBaseModel):
|
|
36
|
+
id: str = Field(description="The ID of the experiment")
|
|
37
|
+
dataset_id: str = Field(description="The ID of the dataset associated with the experiment")
|
|
38
|
+
dataset_version_id: str = Field(
|
|
39
|
+
description="The ID of the dataset version associated with the experiment"
|
|
40
|
+
)
|
|
41
|
+
repetitions: int = Field(description="Number of times the experiment is repeated")
|
|
42
|
+
metadata: Dict[str, Any] = Field(description="Metadata of the experiment")
|
|
43
|
+
project_name: Optional[str] = Field(
|
|
44
|
+
description="The name of the project associated with the experiment"
|
|
45
|
+
)
|
|
46
|
+
created_at: datetime = Field(description="The creation timestamp of the experiment")
|
|
47
|
+
updated_at: datetime = Field(description="The last update timestamp of the experiment")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CreateExperimentRequestBody(V1RoutesBaseModel):
|
|
28
51
|
"""
|
|
29
|
-
|
|
30
|
-
operationId: createExperiment
|
|
31
|
-
tags:
|
|
32
|
-
- private
|
|
33
|
-
parameters:
|
|
34
|
-
- in: path
|
|
35
|
-
name: dataset_id
|
|
36
|
-
required: true
|
|
37
|
-
description: The ID of the dataset to create an experiment for
|
|
38
|
-
schema:
|
|
39
|
-
type: string
|
|
40
|
-
requestBody:
|
|
41
|
-
description: Details of the experiment to be created
|
|
42
|
-
required: true
|
|
43
|
-
content:
|
|
44
|
-
application/json:
|
|
45
|
-
schema:
|
|
46
|
-
type: object
|
|
47
|
-
properties:
|
|
48
|
-
repetitions:
|
|
49
|
-
type: integer
|
|
50
|
-
description: Number of times the experiment should be repeated for each example
|
|
51
|
-
default: 1
|
|
52
|
-
metadata:
|
|
53
|
-
type: object
|
|
54
|
-
description: Metadata for the experiment
|
|
55
|
-
additionalProperties:
|
|
56
|
-
type: string
|
|
57
|
-
version_id:
|
|
58
|
-
type: string
|
|
59
|
-
description: ID of the dataset version to use
|
|
60
|
-
responses:
|
|
61
|
-
200:
|
|
62
|
-
description: Experiment retrieved successfully
|
|
63
|
-
content:
|
|
64
|
-
application/json:
|
|
65
|
-
schema:
|
|
66
|
-
type: object
|
|
67
|
-
properties:
|
|
68
|
-
data:
|
|
69
|
-
type: object
|
|
70
|
-
properties:
|
|
71
|
-
id:
|
|
72
|
-
type: string
|
|
73
|
-
description: The ID of the experiment
|
|
74
|
-
dataset_id:
|
|
75
|
-
type: string
|
|
76
|
-
description: The ID of the dataset associated with the experiment
|
|
77
|
-
dataset_version_id:
|
|
78
|
-
type: string
|
|
79
|
-
description: The ID of the dataset version associated with the experiment
|
|
80
|
-
repetitions:
|
|
81
|
-
type: integer
|
|
82
|
-
description: Number of times the experiment is repeated
|
|
83
|
-
metadata:
|
|
84
|
-
type: object
|
|
85
|
-
description: Metadata of the experiment
|
|
86
|
-
additionalProperties:
|
|
87
|
-
type: string
|
|
88
|
-
project_name:
|
|
89
|
-
type: string
|
|
90
|
-
description: The name of the project associated with the experiment
|
|
91
|
-
created_at:
|
|
92
|
-
type: string
|
|
93
|
-
format: date-time
|
|
94
|
-
description: The creation timestamp of the experiment
|
|
95
|
-
updated_at:
|
|
96
|
-
type: string
|
|
97
|
-
format: date-time
|
|
98
|
-
description: The last update timestamp of the experiment
|
|
99
|
-
404:
|
|
100
|
-
description: Dataset or DatasetVersion not found
|
|
52
|
+
Details of the experiment to be created
|
|
101
53
|
"""
|
|
102
|
-
|
|
54
|
+
|
|
55
|
+
name: Optional[str] = Field(
|
|
56
|
+
default=None,
|
|
57
|
+
description=("Name of the experiment (if omitted, a random name will be generated)"),
|
|
58
|
+
)
|
|
59
|
+
description: Optional[str] = Field(
|
|
60
|
+
default=None, description="An optional description of the experiment"
|
|
61
|
+
)
|
|
62
|
+
metadata: Optional[Dict[str, Any]] = Field(
|
|
63
|
+
default=None, description="Metadata for the experiment"
|
|
64
|
+
)
|
|
65
|
+
version_id: Optional[str] = Field(
|
|
66
|
+
default=None,
|
|
67
|
+
description=(
|
|
68
|
+
"ID of the dataset version over which the experiment will be run "
|
|
69
|
+
"(if omitted, the latest version will be used)"
|
|
70
|
+
),
|
|
71
|
+
)
|
|
72
|
+
repetitions: int = Field(
|
|
73
|
+
default=1, description="Number of times the experiment should be repeated for each example"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class CreateExperimentResponseBody(ResponseBody[Experiment]):
|
|
78
|
+
pass
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@router.post(
|
|
82
|
+
"/datasets/{dataset_id}/experiments",
|
|
83
|
+
operation_id="createExperiment",
|
|
84
|
+
summary="Create experiment on a dataset",
|
|
85
|
+
responses=add_errors_to_responses(
|
|
86
|
+
[{"status_code": HTTP_404_NOT_FOUND, "description": "Dataset or DatasetVersion not found"}]
|
|
87
|
+
),
|
|
88
|
+
response_description="Experiment retrieved successfully",
|
|
89
|
+
)
|
|
90
|
+
async def create_experiment(
|
|
91
|
+
request: Request,
|
|
92
|
+
dataset_id: str,
|
|
93
|
+
request_body: CreateExperimentRequestBody,
|
|
94
|
+
) -> CreateExperimentResponseBody:
|
|
95
|
+
dataset_globalid = GlobalID.from_id(dataset_id)
|
|
103
96
|
try:
|
|
104
|
-
|
|
97
|
+
dataset_rowid = from_global_id_with_expected_type(dataset_globalid, "Dataset")
|
|
105
98
|
except ValueError:
|
|
106
|
-
|
|
107
|
-
|
|
99
|
+
raise HTTPException(
|
|
100
|
+
detail="Dataset with ID {dataset_globalid} does not exist",
|
|
108
101
|
status_code=HTTP_404_NOT_FOUND,
|
|
109
102
|
)
|
|
110
103
|
|
|
111
|
-
|
|
112
|
-
repetitions = payload.get("repetitions", 1)
|
|
113
|
-
metadata = payload.get("metadata") or {}
|
|
114
|
-
dataset_version_globalid_str = payload.get("version_id")
|
|
104
|
+
dataset_version_globalid_str = request_body.version_id
|
|
115
105
|
if dataset_version_globalid_str is not None:
|
|
116
106
|
try:
|
|
117
107
|
dataset_version_globalid = GlobalID.from_id(dataset_version_globalid_str)
|
|
@@ -119,31 +109,31 @@ async def create_experiment(request: Request) -> Response:
|
|
|
119
109
|
dataset_version_globalid, "DatasetVersion"
|
|
120
110
|
)
|
|
121
111
|
except ValueError:
|
|
122
|
-
|
|
123
|
-
|
|
112
|
+
raise HTTPException(
|
|
113
|
+
detail=f"DatasetVersion with ID {dataset_version_globalid_str} does not exist",
|
|
124
114
|
status_code=HTTP_404_NOT_FOUND,
|
|
125
115
|
)
|
|
126
116
|
|
|
127
117
|
async with request.app.state.db() as session:
|
|
128
118
|
result = (
|
|
129
|
-
await session.execute(select(models.Dataset).where(models.Dataset.id ==
|
|
119
|
+
await session.execute(select(models.Dataset).where(models.Dataset.id == dataset_rowid))
|
|
130
120
|
).scalar()
|
|
131
121
|
if result is None:
|
|
132
|
-
|
|
133
|
-
|
|
122
|
+
raise HTTPException(
|
|
123
|
+
detail=f"Dataset with ID {dataset_globalid} does not exist",
|
|
134
124
|
status_code=HTTP_404_NOT_FOUND,
|
|
135
125
|
)
|
|
136
126
|
dataset_name = result.name
|
|
137
127
|
if dataset_version_globalid_str is None:
|
|
138
128
|
dataset_version_result = await session.execute(
|
|
139
129
|
select(models.DatasetVersion)
|
|
140
|
-
.where(models.DatasetVersion.dataset_id ==
|
|
130
|
+
.where(models.DatasetVersion.dataset_id == dataset_rowid)
|
|
141
131
|
.order_by(models.DatasetVersion.id.desc())
|
|
142
132
|
)
|
|
143
133
|
dataset_version = dataset_version_result.scalar()
|
|
144
134
|
if not dataset_version:
|
|
145
|
-
|
|
146
|
-
|
|
135
|
+
raise HTTPException(
|
|
136
|
+
detail=f"Dataset {dataset_globalid} does not have any versions",
|
|
147
137
|
status_code=HTTP_404_NOT_FOUND,
|
|
148
138
|
)
|
|
149
139
|
dataset_version_id = dataset_version.id
|
|
@@ -154,24 +144,24 @@ async def create_experiment(request: Request) -> Response:
|
|
|
154
144
|
)
|
|
155
145
|
dataset_version = dataset_version.scalar()
|
|
156
146
|
if not dataset_version:
|
|
157
|
-
|
|
158
|
-
|
|
147
|
+
raise HTTPException(
|
|
148
|
+
detail=f"DatasetVersion with ID {dataset_version_globalid} does not exist",
|
|
159
149
|
status_code=HTTP_404_NOT_FOUND,
|
|
160
150
|
)
|
|
161
151
|
|
|
162
152
|
# generate a semi-unique name for the experiment
|
|
163
|
-
experiment_name =
|
|
153
|
+
experiment_name = request_body.name or _generate_experiment_name(dataset_name)
|
|
164
154
|
project_name = f"Experiment-{getrandbits(96).to_bytes(12, 'big').hex()}"
|
|
165
155
|
project_description = (
|
|
166
156
|
f"dataset_id: {dataset_globalid}\ndataset_version_id: {dataset_version_globalid}"
|
|
167
157
|
)
|
|
168
158
|
experiment = models.Experiment(
|
|
169
|
-
dataset_id=int(
|
|
159
|
+
dataset_id=int(dataset_rowid),
|
|
170
160
|
dataset_version_id=int(dataset_version_id),
|
|
171
161
|
name=experiment_name,
|
|
172
|
-
description=
|
|
173
|
-
repetitions=repetitions,
|
|
174
|
-
metadata_=metadata,
|
|
162
|
+
description=request_body.description,
|
|
163
|
+
repetitions=request_body.repetitions,
|
|
164
|
+
metadata_=request_body.metadata or {},
|
|
175
165
|
project_name=project_name,
|
|
176
166
|
)
|
|
177
167
|
session.add(experiment)
|
|
@@ -198,104 +188,65 @@ async def create_experiment(request: Request) -> Response:
|
|
|
198
188
|
dataset_version_globalid = GlobalID(
|
|
199
189
|
"DatasetVersion", str(experiment.dataset_version_id)
|
|
200
190
|
)
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
191
|
+
return CreateExperimentResponseBody(
|
|
192
|
+
data=Experiment(
|
|
193
|
+
id=str(experiment_globalid),
|
|
194
|
+
dataset_id=str(dataset_globalid),
|
|
195
|
+
dataset_version_id=str(dataset_version_globalid),
|
|
196
|
+
repetitions=experiment.repetitions,
|
|
197
|
+
metadata=experiment.metadata_,
|
|
198
|
+
project_name=experiment.project_name,
|
|
199
|
+
created_at=experiment.created_at,
|
|
200
|
+
updated_at=experiment.updated_at,
|
|
201
|
+
)
|
|
202
|
+
)
|
|
212
203
|
|
|
213
204
|
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
description: Experiment retrieved successfully
|
|
230
|
-
content:
|
|
231
|
-
application/json:
|
|
232
|
-
schema:
|
|
233
|
-
type: object
|
|
234
|
-
properties:
|
|
235
|
-
data:
|
|
236
|
-
type: object
|
|
237
|
-
properties:
|
|
238
|
-
id:
|
|
239
|
-
type: string
|
|
240
|
-
description: The ID of the experiment
|
|
241
|
-
dataset_id:
|
|
242
|
-
type: string
|
|
243
|
-
description: The ID of the dataset associated with the experiment
|
|
244
|
-
dataset_version_id:
|
|
245
|
-
type: string
|
|
246
|
-
description: The ID of the dataset version associated with the experiment
|
|
247
|
-
repetitions:
|
|
248
|
-
type: integer
|
|
249
|
-
description: Number of times the experiment is repeated
|
|
250
|
-
metadata:
|
|
251
|
-
type: object
|
|
252
|
-
description: Metadata of the experiment
|
|
253
|
-
additionalProperties:
|
|
254
|
-
type: string
|
|
255
|
-
project_name:
|
|
256
|
-
type: string
|
|
257
|
-
description: The name of the project associated with the experiment
|
|
258
|
-
created_at:
|
|
259
|
-
type: string
|
|
260
|
-
format: date-time
|
|
261
|
-
description: The creation timestamp of the experiment
|
|
262
|
-
updated_at:
|
|
263
|
-
type: string
|
|
264
|
-
format: date-time
|
|
265
|
-
description: The last update timestamp of the experiment
|
|
266
|
-
404:
|
|
267
|
-
description: Experiment not found
|
|
268
|
-
"""
|
|
269
|
-
experiment_globalid = GlobalID.from_id(request.path_params["experiment_id"])
|
|
205
|
+
class GetExperimentResponseBody(ResponseBody[Experiment]):
|
|
206
|
+
pass
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
@router.get(
|
|
210
|
+
"/experiments/{experiment_id}",
|
|
211
|
+
operation_id="getExperiment",
|
|
212
|
+
summary="Get experiment by ID",
|
|
213
|
+
responses=add_errors_to_responses(
|
|
214
|
+
[{"status_code": HTTP_404_NOT_FOUND, "description": "Experiment not found"}]
|
|
215
|
+
),
|
|
216
|
+
response_description="Experiment retrieved successfully",
|
|
217
|
+
)
|
|
218
|
+
async def get_experiment(request: Request, experiment_id: str) -> GetExperimentResponseBody:
|
|
219
|
+
experiment_globalid = GlobalID.from_id(experiment_id)
|
|
270
220
|
try:
|
|
271
|
-
|
|
221
|
+
experiment_rowid = from_global_id_with_expected_type(experiment_globalid, "Experiment")
|
|
272
222
|
except ValueError:
|
|
273
|
-
|
|
274
|
-
|
|
223
|
+
raise HTTPException(
|
|
224
|
+
detail="Experiment with ID {experiment_globalid} does not exist",
|
|
275
225
|
status_code=HTTP_404_NOT_FOUND,
|
|
276
226
|
)
|
|
277
227
|
|
|
278
228
|
async with request.app.state.db() as session:
|
|
279
229
|
experiment = await session.execute(
|
|
280
|
-
select(models.Experiment).where(models.Experiment.id ==
|
|
230
|
+
select(models.Experiment).where(models.Experiment.id == experiment_rowid)
|
|
281
231
|
)
|
|
282
232
|
experiment = experiment.scalar()
|
|
283
233
|
if not experiment:
|
|
284
|
-
|
|
285
|
-
|
|
234
|
+
raise HTTPException(
|
|
235
|
+
detail=f"Experiment with ID {experiment_globalid} does not exist",
|
|
286
236
|
status_code=HTTP_404_NOT_FOUND,
|
|
287
237
|
)
|
|
288
238
|
|
|
289
239
|
dataset_globalid = GlobalID("Dataset", str(experiment.dataset_id))
|
|
290
240
|
dataset_version_globalid = GlobalID("DatasetVersion", str(experiment.dataset_version_id))
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
241
|
+
return GetExperimentResponseBody(
|
|
242
|
+
data=Experiment(
|
|
243
|
+
id=str(experiment_globalid),
|
|
244
|
+
dataset_id=str(dataset_globalid),
|
|
245
|
+
dataset_version_id=str(dataset_version_globalid),
|
|
246
|
+
repetitions=experiment.repetitions,
|
|
247
|
+
metadata=experiment.metadata_,
|
|
248
|
+
project_name=experiment.project_name,
|
|
249
|
+
created_at=experiment.created_at,
|
|
250
|
+
updated_at=experiment.updated_at,
|
|
251
|
+
)
|
|
252
|
+
)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
from typing_extensions import assert_never
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def datetime_encoder(dt: datetime) -> str:
|
|
10
|
+
"""
|
|
11
|
+
Encodes a `datetime` object to an ISO-formatted timestamp string.
|
|
12
|
+
|
|
13
|
+
By default, Pydantic v2 serializes `datetime` objects in a format that
|
|
14
|
+
cannot be parsed by `datetime.fromisoformat`. Adding this encoder to the
|
|
15
|
+
`json_encoders` config for a Pydantic model ensures that the serialized
|
|
16
|
+
`datetime` objects are parseable.
|
|
17
|
+
"""
|
|
18
|
+
return dt.isoformat()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class PydanticMajorVersion(Enum):
|
|
22
|
+
"""
|
|
23
|
+
The major version of `pydantic`.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
V1 = "v1"
|
|
27
|
+
V2 = "v2"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_pydantic_major_version() -> PydanticMajorVersion:
|
|
31
|
+
"""
|
|
32
|
+
Returns the major version of `pydantic` or raises an error if `pydantic` is
|
|
33
|
+
not installed.
|
|
34
|
+
"""
|
|
35
|
+
try:
|
|
36
|
+
pydantic_version = version("pydantic")
|
|
37
|
+
except PackageNotFoundError:
|
|
38
|
+
raise RuntimeError("Please install pydantic with `pip install pydantic`.")
|
|
39
|
+
if pydantic_version.startswith("1"):
|
|
40
|
+
return PydanticMajorVersion.V1
|
|
41
|
+
elif pydantic_version.startswith("2"):
|
|
42
|
+
return PydanticMajorVersion.V2
|
|
43
|
+
raise ValueError(f"Unsupported Pydantic version: {pydantic_version}")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
if (pydantic_major_version := get_pydantic_major_version()) is PydanticMajorVersion.V1:
|
|
47
|
+
|
|
48
|
+
class V1RoutesBaseModel(BaseModel):
|
|
49
|
+
class Config:
|
|
50
|
+
json_encoders = {datetime: datetime_encoder}
|
|
51
|
+
|
|
52
|
+
elif pydantic_major_version is PydanticMajorVersion.V2:
|
|
53
|
+
from pydantic import ConfigDict
|
|
54
|
+
|
|
55
|
+
# `json_encoders` is a configuration setting from Pydantic v1 that was
|
|
56
|
+
# removed in Pydantic v2.0.* but restored in Pydantic v2.1.0 with a
|
|
57
|
+
# deprecation warning. At this time, it remains the simplest way to
|
|
58
|
+
# configure custom JSON serialization for specific data types in a manner
|
|
59
|
+
# that is consistent between Pydantic v1 and v2.
|
|
60
|
+
#
|
|
61
|
+
# For details, see:
|
|
62
|
+
# - https://github.com/pydantic/pydantic/pull/6811
|
|
63
|
+
# - https://github.com/pydantic/pydantic/releases/tag/v2.1.0
|
|
64
|
+
#
|
|
65
|
+
# The assertion below is added in case a future release of Pydantic v2 fully
|
|
66
|
+
# removes the `json_encoders` parameter.
|
|
67
|
+
assert "json_encoders" in ConfigDict.__annotations__, (
|
|
68
|
+
"If you encounter this error with `pydantic==2.0.*`, "
|
|
69
|
+
"please upgrade `pydantic` with `pip install -U pydantic>=2.1.0`. "
|
|
70
|
+
"If you encounter this error with `pydantic>=2.1.0`, "
|
|
71
|
+
"please upgrade `arize-phoenix` with `pip install -U arize-phoenix`, "
|
|
72
|
+
"or downgrade `pydantic` to a version that supports the `json_encoders` config setting."
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
class V1RoutesBaseModel(BaseModel): # type: ignore[no-redef]
|
|
76
|
+
model_config = ConfigDict({"json_encoders": {datetime: datetime_encoder}})
|
|
77
|
+
else:
|
|
78
|
+
assert_never(pydantic_major_version)
|