arize-phoenix 11.23.1__py3-none-any.whl → 12.28.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/METADATA +61 -36
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/RECORD +212 -162
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/WHEEL +1 -1
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/IP_NOTICE +1 -1
- phoenix/__generated__/__init__.py +0 -0
- phoenix/__generated__/classification_evaluator_configs/__init__.py +20 -0
- phoenix/__generated__/classification_evaluator_configs/_document_relevance_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_hallucination_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_models.py +18 -0
- phoenix/__generated__/classification_evaluator_configs/_tool_selection_classification_evaluator_config.py +17 -0
- phoenix/__init__.py +2 -1
- phoenix/auth.py +27 -2
- phoenix/config.py +1594 -81
- phoenix/db/README.md +546 -28
- phoenix/db/bulk_inserter.py +119 -116
- phoenix/db/engines.py +140 -33
- phoenix/db/facilitator.py +22 -1
- phoenix/db/helpers.py +818 -65
- phoenix/db/iam_auth.py +64 -0
- phoenix/db/insertion/dataset.py +133 -1
- phoenix/db/insertion/document_annotation.py +9 -6
- phoenix/db/insertion/evaluation.py +2 -3
- phoenix/db/insertion/helpers.py +2 -2
- phoenix/db/insertion/session_annotation.py +176 -0
- phoenix/db/insertion/span_annotation.py +3 -4
- phoenix/db/insertion/trace_annotation.py +3 -4
- phoenix/db/insertion/types.py +41 -18
- phoenix/db/migrations/versions/01a8342c9cdf_add_user_id_on_datasets.py +40 -0
- phoenix/db/migrations/versions/0df286449799_add_session_annotations_table.py +105 -0
- phoenix/db/migrations/versions/272b66ff50f8_drop_single_indices.py +119 -0
- phoenix/db/migrations/versions/58228d933c91_dataset_labels.py +67 -0
- phoenix/db/migrations/versions/699f655af132_experiment_tags.py +57 -0
- phoenix/db/migrations/versions/735d3d93c33e_add_composite_indices.py +41 -0
- phoenix/db/migrations/versions/ab513d89518b_add_user_id_on_dataset_versions.py +40 -0
- phoenix/db/migrations/versions/d0690a79ea51_users_on_experiments.py +40 -0
- phoenix/db/migrations/versions/deb2c81c0bb2_dataset_splits.py +139 -0
- phoenix/db/migrations/versions/e76cbd66ffc3_add_experiments_dataset_examples.py +87 -0
- phoenix/db/models.py +364 -56
- phoenix/db/pg_config.py +10 -0
- phoenix/db/types/trace_retention.py +7 -6
- phoenix/experiments/functions.py +69 -19
- phoenix/inferences/inferences.py +1 -2
- phoenix/server/api/auth.py +9 -0
- phoenix/server/api/auth_messages.py +46 -0
- phoenix/server/api/context.py +60 -0
- phoenix/server/api/dataloaders/__init__.py +36 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +60 -8
- phoenix/server/api/dataloaders/average_experiment_repeated_run_group_latency.py +50 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +17 -24
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +1 -2
- phoenix/server/api/dataloaders/dataset_dataset_splits.py +52 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -1
- phoenix/server/api/dataloaders/dataset_example_splits.py +40 -0
- phoenix/server/api/dataloaders/dataset_examples_and_versions_by_experiment_run.py +47 -0
- phoenix/server/api/dataloaders/dataset_labels.py +36 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -2
- phoenix/server/api/dataloaders/document_evaluations.py +6 -9
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +88 -34
- phoenix/server/api/dataloaders/experiment_dataset_splits.py +43 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +21 -28
- phoenix/server/api/dataloaders/experiment_repeated_run_group_annotation_summaries.py +77 -0
- phoenix/server/api/dataloaders/experiment_repeated_run_groups.py +57 -0
- phoenix/server/api/dataloaders/experiment_runs_by_experiment_and_example.py +44 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +40 -8
- phoenix/server/api/dataloaders/record_counts.py +37 -10
- phoenix/server/api/dataloaders/session_annotations_by_session.py +29 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_repeated_run_group.py +64 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project.py +28 -14
- phoenix/server/api/dataloaders/span_costs.py +3 -9
- phoenix/server/api/dataloaders/table_fields.py +2 -2
- phoenix/server/api/dataloaders/token_prices_by_model.py +30 -0
- phoenix/server/api/dataloaders/trace_annotations_by_trace.py +27 -0
- phoenix/server/api/exceptions.py +5 -1
- phoenix/server/api/helpers/playground_clients.py +263 -83
- phoenix/server/api/helpers/playground_spans.py +2 -1
- phoenix/server/api/helpers/playground_users.py +26 -0
- phoenix/server/api/helpers/prompts/conversions/google.py +103 -0
- phoenix/server/api/helpers/prompts/models.py +61 -19
- phoenix/server/api/input_types/{SpanAnnotationFilter.py → AnnotationFilter.py} +22 -14
- phoenix/server/api/input_types/ChatCompletionInput.py +3 -0
- phoenix/server/api/input_types/CreateProjectSessionAnnotationInput.py +37 -0
- phoenix/server/api/input_types/DatasetFilter.py +5 -2
- phoenix/server/api/input_types/ExperimentRunSort.py +237 -0
- phoenix/server/api/input_types/GenerativeModelInput.py +3 -0
- phoenix/server/api/input_types/ProjectSessionSort.py +158 -1
- phoenix/server/api/input_types/PromptVersionInput.py +47 -1
- phoenix/server/api/input_types/SpanSort.py +3 -2
- phoenix/server/api/input_types/UpdateAnnotationInput.py +34 -0
- phoenix/server/api/input_types/UserRoleInput.py +1 -0
- phoenix/server/api/mutations/__init__.py +8 -0
- phoenix/server/api/mutations/annotation_config_mutations.py +8 -8
- phoenix/server/api/mutations/api_key_mutations.py +15 -20
- phoenix/server/api/mutations/chat_mutations.py +106 -37
- phoenix/server/api/mutations/dataset_label_mutations.py +243 -0
- phoenix/server/api/mutations/dataset_mutations.py +21 -16
- phoenix/server/api/mutations/dataset_split_mutations.py +351 -0
- phoenix/server/api/mutations/experiment_mutations.py +2 -2
- phoenix/server/api/mutations/export_events_mutations.py +3 -3
- phoenix/server/api/mutations/model_mutations.py +11 -9
- phoenix/server/api/mutations/project_mutations.py +4 -4
- phoenix/server/api/mutations/project_session_annotations_mutations.py +158 -0
- phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +8 -4
- phoenix/server/api/mutations/prompt_label_mutations.py +74 -65
- phoenix/server/api/mutations/prompt_mutations.py +65 -129
- phoenix/server/api/mutations/prompt_version_tag_mutations.py +11 -8
- phoenix/server/api/mutations/span_annotations_mutations.py +15 -10
- phoenix/server/api/mutations/trace_annotations_mutations.py +13 -8
- phoenix/server/api/mutations/trace_mutations.py +3 -3
- phoenix/server/api/mutations/user_mutations.py +55 -26
- phoenix/server/api/queries.py +501 -617
- phoenix/server/api/routers/__init__.py +2 -2
- phoenix/server/api/routers/auth.py +141 -87
- phoenix/server/api/routers/ldap.py +229 -0
- phoenix/server/api/routers/oauth2.py +349 -101
- phoenix/server/api/routers/v1/__init__.py +22 -4
- phoenix/server/api/routers/v1/annotation_configs.py +19 -30
- phoenix/server/api/routers/v1/annotations.py +455 -13
- phoenix/server/api/routers/v1/datasets.py +355 -68
- phoenix/server/api/routers/v1/documents.py +142 -0
- phoenix/server/api/routers/v1/evaluations.py +20 -28
- phoenix/server/api/routers/v1/experiment_evaluations.py +16 -6
- phoenix/server/api/routers/v1/experiment_runs.py +335 -59
- phoenix/server/api/routers/v1/experiments.py +475 -47
- phoenix/server/api/routers/v1/projects.py +16 -50
- phoenix/server/api/routers/v1/prompts.py +50 -39
- phoenix/server/api/routers/v1/sessions.py +108 -0
- phoenix/server/api/routers/v1/spans.py +156 -96
- phoenix/server/api/routers/v1/traces.py +51 -77
- phoenix/server/api/routers/v1/users.py +64 -24
- phoenix/server/api/routers/v1/utils.py +3 -7
- phoenix/server/api/subscriptions.py +257 -93
- phoenix/server/api/types/Annotation.py +90 -23
- phoenix/server/api/types/ApiKey.py +13 -17
- phoenix/server/api/types/AuthMethod.py +1 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +1 -0
- phoenix/server/api/types/Dataset.py +199 -72
- phoenix/server/api/types/DatasetExample.py +88 -18
- phoenix/server/api/types/DatasetExperimentAnnotationSummary.py +10 -0
- phoenix/server/api/types/DatasetLabel.py +57 -0
- phoenix/server/api/types/DatasetSplit.py +98 -0
- phoenix/server/api/types/DatasetVersion.py +49 -4
- phoenix/server/api/types/DocumentAnnotation.py +212 -0
- phoenix/server/api/types/Experiment.py +215 -68
- phoenix/server/api/types/ExperimentComparison.py +3 -9
- phoenix/server/api/types/ExperimentRepeatedRunGroup.py +155 -0
- phoenix/server/api/types/ExperimentRepeatedRunGroupAnnotationSummary.py +9 -0
- phoenix/server/api/types/ExperimentRun.py +120 -70
- phoenix/server/api/types/ExperimentRunAnnotation.py +158 -39
- phoenix/server/api/types/GenerativeModel.py +95 -42
- phoenix/server/api/types/GenerativeProvider.py +1 -1
- phoenix/server/api/types/ModelInterface.py +7 -2
- phoenix/server/api/types/PlaygroundModel.py +12 -2
- phoenix/server/api/types/Project.py +218 -185
- phoenix/server/api/types/ProjectSession.py +146 -29
- phoenix/server/api/types/ProjectSessionAnnotation.py +187 -0
- phoenix/server/api/types/ProjectTraceRetentionPolicy.py +1 -1
- phoenix/server/api/types/Prompt.py +119 -39
- phoenix/server/api/types/PromptLabel.py +42 -25
- phoenix/server/api/types/PromptVersion.py +11 -8
- phoenix/server/api/types/PromptVersionTag.py +65 -25
- phoenix/server/api/types/Span.py +130 -123
- phoenix/server/api/types/SpanAnnotation.py +189 -42
- phoenix/server/api/types/SystemApiKey.py +65 -1
- phoenix/server/api/types/Trace.py +184 -53
- phoenix/server/api/types/TraceAnnotation.py +149 -50
- phoenix/server/api/types/User.py +128 -33
- phoenix/server/api/types/UserApiKey.py +73 -26
- phoenix/server/api/types/node.py +10 -0
- phoenix/server/api/types/pagination.py +11 -2
- phoenix/server/app.py +154 -36
- phoenix/server/authorization.py +5 -4
- phoenix/server/bearer_auth.py +13 -5
- phoenix/server/cost_tracking/cost_model_lookup.py +42 -14
- phoenix/server/cost_tracking/model_cost_manifest.json +1085 -194
- phoenix/server/daemons/generative_model_store.py +61 -9
- phoenix/server/daemons/span_cost_calculator.py +10 -8
- phoenix/server/dml_event.py +13 -0
- phoenix/server/email/sender.py +29 -2
- phoenix/server/grpc_server.py +9 -9
- phoenix/server/jwt_store.py +8 -6
- phoenix/server/ldap.py +1449 -0
- phoenix/server/main.py +9 -3
- phoenix/server/oauth2.py +330 -12
- phoenix/server/prometheus.py +43 -6
- phoenix/server/rate_limiters.py +4 -9
- phoenix/server/retention.py +33 -20
- phoenix/server/session_filters.py +49 -0
- phoenix/server/static/.vite/manifest.json +51 -53
- phoenix/server/static/assets/components-BreFUQQa.js +6702 -0
- phoenix/server/static/assets/{index-BPCwGQr8.js → index-CTQoemZv.js} +42 -35
- phoenix/server/static/assets/pages-DBE5iYM3.js +9524 -0
- phoenix/server/static/assets/vendor-BGzfc4EU.css +1 -0
- phoenix/server/static/assets/vendor-DCE4v-Ot.js +920 -0
- phoenix/server/static/assets/vendor-codemirror-D5f205eT.js +25 -0
- phoenix/server/static/assets/{vendor-recharts-Bw30oz1A.js → vendor-recharts-V9cwpXsm.js} +7 -7
- phoenix/server/static/assets/{vendor-shiki-DZajAPeq.js → vendor-shiki-Do--csgv.js} +1 -1
- phoenix/server/static/assets/vendor-three-CmB8bl_y.js +3840 -0
- phoenix/server/templates/index.html +7 -1
- phoenix/server/thread_server.py +1 -2
- phoenix/server/utils.py +74 -0
- phoenix/session/client.py +55 -1
- phoenix/session/data_extractor.py +5 -0
- phoenix/session/evaluation.py +8 -4
- phoenix/session/session.py +44 -8
- phoenix/settings.py +2 -0
- phoenix/trace/attributes.py +80 -13
- phoenix/trace/dsl/query.py +2 -0
- phoenix/trace/projects.py +5 -0
- phoenix/utilities/template_formatters.py +1 -1
- phoenix/version.py +1 -1
- phoenix/server/api/types/Evaluation.py +0 -39
- phoenix/server/static/assets/components-D0DWAf0l.js +0 -5650
- phoenix/server/static/assets/pages-Creyamao.js +0 -8612
- phoenix/server/static/assets/vendor-CU36oj8y.js +0 -905
- phoenix/server/static/assets/vendor-CqDb5u4o.css +0 -1
- phoenix/server/static/assets/vendor-arizeai-Ctgw0e1G.js +0 -168
- phoenix/server/static/assets/vendor-codemirror-Cojjzqb9.js +0 -25
- phoenix/server/static/assets/vendor-three-BLWp5bic.js +0 -2998
- phoenix/utilities/deprecation.py +0 -31
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-11.23.1.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -18,7 +18,7 @@ from strawberry.types import Info
|
|
|
18
18
|
|
|
19
19
|
from phoenix.db import models
|
|
20
20
|
from phoenix.db.helpers import get_eval_trace_ids_for_datasets, get_project_names_for_datasets
|
|
21
|
-
from phoenix.server.api.auth import IsLocked, IsNotReadOnly
|
|
21
|
+
from phoenix.server.api.auth import IsLocked, IsNotReadOnly, IsNotViewer
|
|
22
22
|
from phoenix.server.api.context import Context
|
|
23
23
|
from phoenix.server.api.exceptions import BadRequest, NotFound
|
|
24
24
|
from phoenix.server.api.helpers.dataset_helpers import (
|
|
@@ -35,7 +35,7 @@ from phoenix.server.api.input_types.PatchDatasetExamplesInput import (
|
|
|
35
35
|
PatchDatasetExamplesInput,
|
|
36
36
|
)
|
|
37
37
|
from phoenix.server.api.input_types.PatchDatasetInput import PatchDatasetInput
|
|
38
|
-
from phoenix.server.api.types.Dataset import Dataset
|
|
38
|
+
from phoenix.server.api.types.Dataset import Dataset
|
|
39
39
|
from phoenix.server.api.types.DatasetExample import DatasetExample
|
|
40
40
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
41
41
|
from phoenix.server.api.types.Span import Span
|
|
@@ -50,7 +50,7 @@ class DatasetMutationPayload:
|
|
|
50
50
|
|
|
51
51
|
@strawberry.type
|
|
52
52
|
class DatasetMutationMixin:
|
|
53
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
53
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
54
54
|
async def create_dataset(
|
|
55
55
|
self,
|
|
56
56
|
info: Info[Context, None],
|
|
@@ -66,14 +66,15 @@ class DatasetMutationMixin:
|
|
|
66
66
|
name=name,
|
|
67
67
|
description=description,
|
|
68
68
|
metadata_=metadata,
|
|
69
|
+
user_id=info.context.user_id,
|
|
69
70
|
)
|
|
70
71
|
.returning(models.Dataset)
|
|
71
72
|
)
|
|
72
73
|
assert dataset is not None
|
|
73
74
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
74
|
-
return DatasetMutationPayload(dataset=
|
|
75
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
75
76
|
|
|
76
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
77
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
77
78
|
async def patch_dataset(
|
|
78
79
|
self,
|
|
79
80
|
info: Info[Context, None],
|
|
@@ -100,9 +101,9 @@ class DatasetMutationMixin:
|
|
|
100
101
|
)
|
|
101
102
|
assert dataset is not None
|
|
102
103
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
103
|
-
return DatasetMutationPayload(dataset=
|
|
104
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
104
105
|
|
|
105
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
106
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
106
107
|
async def add_spans_to_dataset(
|
|
107
108
|
self,
|
|
108
109
|
info: Info[Context, None],
|
|
@@ -136,6 +137,7 @@ class DatasetMutationMixin:
|
|
|
136
137
|
dataset_id=dataset_rowid,
|
|
137
138
|
description=dataset_version_description,
|
|
138
139
|
metadata_=dataset_version_metadata or {},
|
|
140
|
+
user_id=info.context.user_id,
|
|
139
141
|
)
|
|
140
142
|
session.add(dataset_version)
|
|
141
143
|
await session.flush()
|
|
@@ -219,9 +221,9 @@ class DatasetMutationMixin:
|
|
|
219
221
|
],
|
|
220
222
|
)
|
|
221
223
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
222
|
-
return DatasetMutationPayload(dataset=
|
|
224
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
223
225
|
|
|
224
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
226
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
225
227
|
async def add_examples_to_dataset(
|
|
226
228
|
self, info: Info[Context, None], input: AddExamplesToDatasetInput
|
|
227
229
|
) -> DatasetMutationPayload:
|
|
@@ -254,6 +256,7 @@ class DatasetMutationMixin:
|
|
|
254
256
|
dataset_id=dataset_rowid,
|
|
255
257
|
description=dataset_version_description,
|
|
256
258
|
metadata_=dataset_version_metadata,
|
|
259
|
+
user_id=info.context.user_id,
|
|
257
260
|
)
|
|
258
261
|
.returning(models.DatasetVersion.id)
|
|
259
262
|
)
|
|
@@ -345,9 +348,9 @@ class DatasetMutationMixin:
|
|
|
345
348
|
dataset_example_revisions,
|
|
346
349
|
)
|
|
347
350
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
348
|
-
return DatasetMutationPayload(dataset=
|
|
351
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
349
352
|
|
|
350
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
353
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
351
354
|
async def delete_dataset(
|
|
352
355
|
self,
|
|
353
356
|
info: Info[Context, None],
|
|
@@ -376,9 +379,9 @@ class DatasetMutationMixin:
|
|
|
376
379
|
return_exceptions=True,
|
|
377
380
|
)
|
|
378
381
|
info.context.event_queue.put(DatasetDeleteEvent((dataset.id,)))
|
|
379
|
-
return DatasetMutationPayload(dataset=
|
|
382
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
380
383
|
|
|
381
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
384
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
382
385
|
async def patch_dataset_examples(
|
|
383
386
|
self,
|
|
384
387
|
info: Info[Context, None],
|
|
@@ -451,6 +454,7 @@ class DatasetMutationMixin:
|
|
|
451
454
|
dataset_id=dataset.id,
|
|
452
455
|
description=version_description,
|
|
453
456
|
metadata_=version_metadata,
|
|
457
|
+
user_id=info.context.user_id,
|
|
454
458
|
)
|
|
455
459
|
)
|
|
456
460
|
assert version_id is not None
|
|
@@ -468,9 +472,9 @@ class DatasetMutationMixin:
|
|
|
468
472
|
],
|
|
469
473
|
)
|
|
470
474
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
471
|
-
return DatasetMutationPayload(dataset=
|
|
475
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
472
476
|
|
|
473
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsLocked]) # type: ignore
|
|
477
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
474
478
|
async def delete_dataset_examples(
|
|
475
479
|
self, info: Info[Context, None], input: DeleteDatasetExamplesInput
|
|
476
480
|
) -> DatasetMutationPayload:
|
|
@@ -514,6 +518,7 @@ class DatasetMutationMixin:
|
|
|
514
518
|
dataset_id=dataset.id,
|
|
515
519
|
description=dataset_version_description,
|
|
516
520
|
metadata_=dataset_version_metadata,
|
|
521
|
+
user_id=info.context.user_id,
|
|
517
522
|
created_at=timestamp,
|
|
518
523
|
)
|
|
519
524
|
.returning(models.DatasetVersion.id)
|
|
@@ -551,7 +556,7 @@ class DatasetMutationMixin:
|
|
|
551
556
|
],
|
|
552
557
|
)
|
|
553
558
|
info.context.event_queue.put(DatasetInsertEvent((dataset.id,)))
|
|
554
|
-
return DatasetMutationPayload(dataset=
|
|
559
|
+
return DatasetMutationPayload(dataset=Dataset(id=dataset.id, db_record=dataset))
|
|
555
560
|
|
|
556
561
|
|
|
557
562
|
def _span_attribute(semconv: str) -> Any:
|
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import strawberry
|
|
4
|
+
from sqlalchemy import delete, func, insert, select, tuple_
|
|
5
|
+
from sqlalchemy.exc import IntegrityError as PostgreSQLIntegrityError
|
|
6
|
+
from sqlalchemy.orm import joinedload
|
|
7
|
+
from sqlean.dbapi2 import IntegrityError as SQLiteIntegrityError # type: ignore[import-untyped]
|
|
8
|
+
from strawberry import UNSET
|
|
9
|
+
from strawberry.relay import GlobalID
|
|
10
|
+
from strawberry.scalars import JSON
|
|
11
|
+
from strawberry.types import Info
|
|
12
|
+
|
|
13
|
+
from phoenix.db import models
|
|
14
|
+
from phoenix.server.api.auth import IsLocked, IsNotReadOnly, IsNotViewer
|
|
15
|
+
from phoenix.server.api.context import Context
|
|
16
|
+
from phoenix.server.api.exceptions import BadRequest, Conflict, NotFound
|
|
17
|
+
from phoenix.server.api.helpers.playground_users import get_user
|
|
18
|
+
from phoenix.server.api.queries import Query
|
|
19
|
+
from phoenix.server.api.types.DatasetExample import DatasetExample
|
|
20
|
+
from phoenix.server.api.types.DatasetSplit import DatasetSplit
|
|
21
|
+
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@strawberry.input
|
|
25
|
+
class CreateDatasetSplitInput:
|
|
26
|
+
name: str
|
|
27
|
+
description: Optional[str] = UNSET
|
|
28
|
+
color: str
|
|
29
|
+
metadata: Optional[JSON] = UNSET
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@strawberry.input
|
|
33
|
+
class PatchDatasetSplitInput:
|
|
34
|
+
dataset_split_id: GlobalID
|
|
35
|
+
name: Optional[str] = UNSET
|
|
36
|
+
description: Optional[str] = UNSET
|
|
37
|
+
color: Optional[str] = UNSET
|
|
38
|
+
metadata: Optional[JSON] = UNSET
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@strawberry.input
|
|
42
|
+
class DeleteDatasetSplitInput:
|
|
43
|
+
dataset_split_ids: list[GlobalID]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@strawberry.input
|
|
47
|
+
class SetDatasetExampleSplitsInput:
|
|
48
|
+
example_id: GlobalID
|
|
49
|
+
dataset_split_ids: list[GlobalID]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@strawberry.input
|
|
53
|
+
class CreateDatasetSplitWithExamplesInput:
|
|
54
|
+
name: str
|
|
55
|
+
description: Optional[str] = UNSET
|
|
56
|
+
color: str
|
|
57
|
+
metadata: Optional[JSON] = UNSET
|
|
58
|
+
example_ids: list[GlobalID]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@strawberry.type
|
|
62
|
+
class DatasetSplitMutationPayload:
|
|
63
|
+
dataset_split: DatasetSplit
|
|
64
|
+
query: "Query"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@strawberry.type
|
|
68
|
+
class DatasetSplitMutationPayloadWithExamples:
|
|
69
|
+
dataset_split: DatasetSplit
|
|
70
|
+
query: "Query"
|
|
71
|
+
examples: list[DatasetExample]
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@strawberry.type
|
|
75
|
+
class DeleteDatasetSplitsMutationPayload:
|
|
76
|
+
dataset_splits: list[DatasetSplit]
|
|
77
|
+
query: "Query"
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@strawberry.type
|
|
81
|
+
class SetDatasetExampleSplitsMutationPayload:
|
|
82
|
+
query: "Query"
|
|
83
|
+
example: DatasetExample
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@strawberry.type
|
|
87
|
+
class DatasetSplitMutationMixin:
|
|
88
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
89
|
+
async def create_dataset_split(
|
|
90
|
+
self, info: Info[Context, None], input: CreateDatasetSplitInput
|
|
91
|
+
) -> DatasetSplitMutationPayload:
|
|
92
|
+
user_id = get_user(info)
|
|
93
|
+
validated_name = _validated_name(input.name)
|
|
94
|
+
async with info.context.db() as session:
|
|
95
|
+
dataset_split_orm = models.DatasetSplit(
|
|
96
|
+
name=validated_name,
|
|
97
|
+
description=input.description,
|
|
98
|
+
color=input.color,
|
|
99
|
+
metadata_=input.metadata or {},
|
|
100
|
+
user_id=user_id,
|
|
101
|
+
)
|
|
102
|
+
session.add(dataset_split_orm)
|
|
103
|
+
try:
|
|
104
|
+
await session.commit()
|
|
105
|
+
except (PostgreSQLIntegrityError, SQLiteIntegrityError):
|
|
106
|
+
raise Conflict(f"A dataset split named '{input.name}' already exists.")
|
|
107
|
+
return DatasetSplitMutationPayload(
|
|
108
|
+
dataset_split=DatasetSplit(id=dataset_split_orm.id, db_record=dataset_split_orm),
|
|
109
|
+
query=Query(),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
113
|
+
async def patch_dataset_split(
|
|
114
|
+
self, info: Info[Context, None], input: PatchDatasetSplitInput
|
|
115
|
+
) -> DatasetSplitMutationPayload:
|
|
116
|
+
validated_name = _validated_name(input.name) if input.name else None
|
|
117
|
+
async with info.context.db() as session:
|
|
118
|
+
dataset_split_id = from_global_id_with_expected_type(
|
|
119
|
+
input.dataset_split_id, DatasetSplit.__name__
|
|
120
|
+
)
|
|
121
|
+
dataset_split_orm = await session.get(models.DatasetSplit, dataset_split_id)
|
|
122
|
+
if not dataset_split_orm:
|
|
123
|
+
raise NotFound(f"Dataset split with ID {input.dataset_split_id} not found")
|
|
124
|
+
|
|
125
|
+
if validated_name:
|
|
126
|
+
dataset_split_orm.name = validated_name
|
|
127
|
+
if input.description:
|
|
128
|
+
dataset_split_orm.description = input.description
|
|
129
|
+
if input.color:
|
|
130
|
+
dataset_split_orm.color = input.color
|
|
131
|
+
if isinstance(input.metadata, dict):
|
|
132
|
+
dataset_split_orm.metadata_ = input.metadata
|
|
133
|
+
|
|
134
|
+
gql_dataset_split = DatasetSplit(id=dataset_split_orm.id, db_record=dataset_split_orm)
|
|
135
|
+
try:
|
|
136
|
+
await session.commit()
|
|
137
|
+
except (PostgreSQLIntegrityError, SQLiteIntegrityError):
|
|
138
|
+
raise Conflict("A dataset split with this name already exists")
|
|
139
|
+
|
|
140
|
+
return DatasetSplitMutationPayload(
|
|
141
|
+
dataset_split=gql_dataset_split,
|
|
142
|
+
query=Query(),
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
146
|
+
async def delete_dataset_splits(
|
|
147
|
+
self, info: Info[Context, None], input: DeleteDatasetSplitInput
|
|
148
|
+
) -> DeleteDatasetSplitsMutationPayload:
|
|
149
|
+
unique_dataset_split_rowids: dict[int, None] = {} # use a dict to preserve ordering
|
|
150
|
+
for dataset_split_gid in input.dataset_split_ids:
|
|
151
|
+
try:
|
|
152
|
+
dataset_split_rowid = from_global_id_with_expected_type(
|
|
153
|
+
dataset_split_gid, DatasetSplit.__name__
|
|
154
|
+
)
|
|
155
|
+
except ValueError:
|
|
156
|
+
raise BadRequest(f"Invalid dataset split ID: {dataset_split_gid}")
|
|
157
|
+
unique_dataset_split_rowids[dataset_split_rowid] = None
|
|
158
|
+
dataset_split_rowids = list(unique_dataset_split_rowids.keys())
|
|
159
|
+
|
|
160
|
+
async with info.context.db() as session:
|
|
161
|
+
deleted_splits_by_id = {
|
|
162
|
+
split.id: split
|
|
163
|
+
for split in (
|
|
164
|
+
await session.scalars(
|
|
165
|
+
delete(models.DatasetSplit)
|
|
166
|
+
.where(models.DatasetSplit.id.in_(dataset_split_rowids))
|
|
167
|
+
.returning(models.DatasetSplit)
|
|
168
|
+
)
|
|
169
|
+
).all()
|
|
170
|
+
}
|
|
171
|
+
if len(deleted_splits_by_id) < len(dataset_split_rowids):
|
|
172
|
+
await session.rollback()
|
|
173
|
+
raise NotFound("One or more dataset splits not found")
|
|
174
|
+
await session.commit()
|
|
175
|
+
|
|
176
|
+
return DeleteDatasetSplitsMutationPayload(
|
|
177
|
+
dataset_splits=[
|
|
178
|
+
DatasetSplit(
|
|
179
|
+
id=deleted_splits_by_id[dataset_split_rowid].id,
|
|
180
|
+
db_record=deleted_splits_by_id[dataset_split_rowid],
|
|
181
|
+
)
|
|
182
|
+
for dataset_split_rowid in dataset_split_rowids
|
|
183
|
+
],
|
|
184
|
+
query=Query(),
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
188
|
+
async def set_dataset_example_splits(
|
|
189
|
+
self, info: Info[Context, None], input: SetDatasetExampleSplitsInput
|
|
190
|
+
) -> SetDatasetExampleSplitsMutationPayload:
|
|
191
|
+
try:
|
|
192
|
+
example_id = from_global_id_with_expected_type(
|
|
193
|
+
input.example_id, models.DatasetExample.__name__
|
|
194
|
+
)
|
|
195
|
+
except ValueError:
|
|
196
|
+
raise BadRequest(f"Invalid example ID: {input.example_id}")
|
|
197
|
+
|
|
198
|
+
dataset_split_ids: dict[
|
|
199
|
+
int, None
|
|
200
|
+
] = {} # use dictionary to de-duplicate while preserving order
|
|
201
|
+
for dataset_split_gid in input.dataset_split_ids:
|
|
202
|
+
try:
|
|
203
|
+
dataset_split_id = from_global_id_with_expected_type(
|
|
204
|
+
dataset_split_gid, DatasetSplit.__name__
|
|
205
|
+
)
|
|
206
|
+
except ValueError:
|
|
207
|
+
raise BadRequest(f"Invalid dataset split ID: {dataset_split_gid}")
|
|
208
|
+
dataset_split_ids[dataset_split_id] = None
|
|
209
|
+
|
|
210
|
+
async with info.context.db() as session:
|
|
211
|
+
example = await session.scalar(
|
|
212
|
+
select(models.DatasetExample)
|
|
213
|
+
.where(models.DatasetExample.id == example_id)
|
|
214
|
+
.options(joinedload(models.DatasetExample.dataset_splits_dataset_examples))
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
if not example:
|
|
218
|
+
raise NotFound(f"Example with ID {input.example_id} not found")
|
|
219
|
+
|
|
220
|
+
existing_split_ids = (
|
|
221
|
+
await session.scalars(
|
|
222
|
+
select(models.DatasetSplit.id).where(
|
|
223
|
+
models.DatasetSplit.id.in_(dataset_split_ids.keys())
|
|
224
|
+
)
|
|
225
|
+
)
|
|
226
|
+
).all()
|
|
227
|
+
if len(existing_split_ids) != len(dataset_split_ids):
|
|
228
|
+
raise NotFound("One or more dataset splits not found")
|
|
229
|
+
|
|
230
|
+
previously_applied_dataset_split_ids = {
|
|
231
|
+
dataset_split_dataset_example.dataset_split_id
|
|
232
|
+
for dataset_split_dataset_example in example.dataset_splits_dataset_examples
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
# Do deletes first, then adds to prevent duplicate key errors
|
|
236
|
+
dataset_splits_dataset_examples_to_delete = [
|
|
237
|
+
dataset_split_dataset_example
|
|
238
|
+
for dataset_split_dataset_example in example.dataset_splits_dataset_examples
|
|
239
|
+
if dataset_split_dataset_example.dataset_split_id not in dataset_split_ids
|
|
240
|
+
]
|
|
241
|
+
if dataset_splits_dataset_examples_to_delete:
|
|
242
|
+
delete_pairs = [
|
|
243
|
+
(
|
|
244
|
+
dataset_split_dataset_example.dataset_split_id,
|
|
245
|
+
dataset_split_dataset_example.dataset_example_id,
|
|
246
|
+
)
|
|
247
|
+
for dataset_split_dataset_example in dataset_splits_dataset_examples_to_delete
|
|
248
|
+
]
|
|
249
|
+
await session.execute(
|
|
250
|
+
delete(models.DatasetSplitDatasetExample).where(
|
|
251
|
+
tuple_(
|
|
252
|
+
models.DatasetSplitDatasetExample.dataset_split_id,
|
|
253
|
+
models.DatasetSplitDatasetExample.dataset_example_id,
|
|
254
|
+
).in_(delete_pairs)
|
|
255
|
+
)
|
|
256
|
+
)
|
|
257
|
+
await session.flush()
|
|
258
|
+
|
|
259
|
+
dataset_splits_dataset_examples_to_add = [
|
|
260
|
+
models.DatasetSplitDatasetExample(
|
|
261
|
+
dataset_example_id=example_id,
|
|
262
|
+
dataset_split_id=dataset_split_id,
|
|
263
|
+
)
|
|
264
|
+
for dataset_split_id in dataset_split_ids
|
|
265
|
+
if dataset_split_id not in previously_applied_dataset_split_ids
|
|
266
|
+
]
|
|
267
|
+
if dataset_splits_dataset_examples_to_add:
|
|
268
|
+
session.add_all(dataset_splits_dataset_examples_to_add)
|
|
269
|
+
await session.flush()
|
|
270
|
+
|
|
271
|
+
return SetDatasetExampleSplitsMutationPayload(
|
|
272
|
+
example=DatasetExample(id=example.id, db_record=example),
|
|
273
|
+
query=Query(),
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer, IsLocked]) # type: ignore
|
|
277
|
+
async def create_dataset_split_with_examples(
|
|
278
|
+
self, info: Info[Context, None], input: CreateDatasetSplitWithExamplesInput
|
|
279
|
+
) -> DatasetSplitMutationPayloadWithExamples:
|
|
280
|
+
user_id = get_user(info)
|
|
281
|
+
validated_name = _validated_name(input.name)
|
|
282
|
+
unique_example_rowids: set[int] = set()
|
|
283
|
+
for example_gid in input.example_ids:
|
|
284
|
+
try:
|
|
285
|
+
example_rowid = from_global_id_with_expected_type(
|
|
286
|
+
example_gid, models.DatasetExample.__name__
|
|
287
|
+
)
|
|
288
|
+
unique_example_rowids.add(example_rowid)
|
|
289
|
+
except ValueError:
|
|
290
|
+
raise BadRequest(f"Invalid example ID: {example_gid}")
|
|
291
|
+
example_rowids = list(unique_example_rowids)
|
|
292
|
+
async with info.context.db() as session:
|
|
293
|
+
if example_rowids:
|
|
294
|
+
found_count = await session.scalar(
|
|
295
|
+
select(func.count(models.DatasetExample.id)).where(
|
|
296
|
+
models.DatasetExample.id.in_(example_rowids)
|
|
297
|
+
)
|
|
298
|
+
)
|
|
299
|
+
if found_count is None or found_count < len(example_rowids):
|
|
300
|
+
raise NotFound("One or more dataset examples were not found.")
|
|
301
|
+
|
|
302
|
+
dataset_split_orm = models.DatasetSplit(
|
|
303
|
+
name=validated_name,
|
|
304
|
+
description=input.description or None,
|
|
305
|
+
color=input.color,
|
|
306
|
+
metadata_=input.metadata or {},
|
|
307
|
+
user_id=user_id,
|
|
308
|
+
)
|
|
309
|
+
session.add(dataset_split_orm)
|
|
310
|
+
try:
|
|
311
|
+
await session.flush()
|
|
312
|
+
except (PostgreSQLIntegrityError, SQLiteIntegrityError):
|
|
313
|
+
raise Conflict(f"A dataset split named '{validated_name}' already exists.")
|
|
314
|
+
|
|
315
|
+
if example_rowids:
|
|
316
|
+
values = [
|
|
317
|
+
{
|
|
318
|
+
models.DatasetSplitDatasetExample.dataset_split_id.key: dataset_split_orm.id, # noqa: E501
|
|
319
|
+
models.DatasetSplitDatasetExample.dataset_example_id.key: example_id,
|
|
320
|
+
}
|
|
321
|
+
for example_id in example_rowids
|
|
322
|
+
]
|
|
323
|
+
try:
|
|
324
|
+
await session.execute(insert(models.DatasetSplitDatasetExample), values)
|
|
325
|
+
except (PostgreSQLIntegrityError, SQLiteIntegrityError) as e:
|
|
326
|
+
# Roll back the transaction on association failure
|
|
327
|
+
await session.rollback()
|
|
328
|
+
raise Conflict(
|
|
329
|
+
"Failed to associate examples with the new dataset split."
|
|
330
|
+
) from e
|
|
331
|
+
|
|
332
|
+
examples = (
|
|
333
|
+
await session.scalars(
|
|
334
|
+
select(models.DatasetExample).where(
|
|
335
|
+
models.DatasetExample.id.in_(example_rowids)
|
|
336
|
+
)
|
|
337
|
+
)
|
|
338
|
+
).all()
|
|
339
|
+
|
|
340
|
+
return DatasetSplitMutationPayloadWithExamples(
|
|
341
|
+
dataset_split=DatasetSplit(id=dataset_split_orm.id, db_record=dataset_split_orm),
|
|
342
|
+
query=Query(),
|
|
343
|
+
examples=[DatasetExample(id=example.id, db_record=example) for example in examples],
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def _validated_name(name: str) -> str:
|
|
348
|
+
validated_name = name.strip()
|
|
349
|
+
if not validated_name:
|
|
350
|
+
raise BadRequest("Name cannot be empty")
|
|
351
|
+
return validated_name
|
|
@@ -7,7 +7,7 @@ from strawberry.types import Info
|
|
|
7
7
|
|
|
8
8
|
from phoenix.db import models
|
|
9
9
|
from phoenix.db.helpers import get_eval_trace_ids_for_experiments, get_project_names_for_experiments
|
|
10
|
-
from phoenix.server.api.auth import IsNotReadOnly
|
|
10
|
+
from phoenix.server.api.auth import IsNotReadOnly, IsNotViewer
|
|
11
11
|
from phoenix.server.api.context import Context
|
|
12
12
|
from phoenix.server.api.exceptions import CustomGraphQLError
|
|
13
13
|
from phoenix.server.api.input_types.DeleteExperimentsInput import DeleteExperimentsInput
|
|
@@ -24,7 +24,7 @@ class ExperimentMutationPayload:
|
|
|
24
24
|
|
|
25
25
|
@strawberry.type
|
|
26
26
|
class ExperimentMutationMixin:
|
|
27
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
27
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
28
28
|
async def delete_experiments(
|
|
29
29
|
self,
|
|
30
30
|
info: Info[Context, None],
|
|
@@ -8,7 +8,7 @@ from strawberry import ID, UNSET
|
|
|
8
8
|
from strawberry.types import Info
|
|
9
9
|
|
|
10
10
|
import phoenix.core.model_schema as ms
|
|
11
|
-
from phoenix.server.api.auth import IsNotReadOnly
|
|
11
|
+
from phoenix.server.api.auth import IsNotReadOnly, IsNotViewer
|
|
12
12
|
from phoenix.server.api.context import Context
|
|
13
13
|
from phoenix.server.api.input_types.ClusterInput import ClusterInput
|
|
14
14
|
from phoenix.server.api.types.Event import parse_event_ids_by_inferences_role, unpack_event_id
|
|
@@ -19,7 +19,7 @@ from phoenix.server.api.types.InferencesRole import AncillaryInferencesRole, Inf
|
|
|
19
19
|
@strawberry.type
|
|
20
20
|
class ExportEventsMutationMixin:
|
|
21
21
|
@strawberry.mutation(
|
|
22
|
-
permission_classes=[IsNotReadOnly],
|
|
22
|
+
permission_classes=[IsNotReadOnly, IsNotViewer],
|
|
23
23
|
description=(
|
|
24
24
|
"Given a list of event ids, export the corresponding data subset in Parquet format."
|
|
25
25
|
" File name is optional, but if specified, should be without file extension. By default"
|
|
@@ -51,7 +51,7 @@ class ExportEventsMutationMixin:
|
|
|
51
51
|
return ExportedFile(file_name=file_name)
|
|
52
52
|
|
|
53
53
|
@strawberry.mutation(
|
|
54
|
-
permission_classes=[IsNotReadOnly],
|
|
54
|
+
permission_classes=[IsNotReadOnly, IsNotViewer],
|
|
55
55
|
description=(
|
|
56
56
|
"Given a list of clusters, export the corresponding data subset in Parquet format."
|
|
57
57
|
" File name is optional, but if specified, should be without file extension. By default"
|
|
@@ -12,11 +12,11 @@ from strawberry.relay import GlobalID
|
|
|
12
12
|
from strawberry.types import Info
|
|
13
13
|
|
|
14
14
|
from phoenix.db import models
|
|
15
|
-
from phoenix.server.api.auth import IsNotReadOnly
|
|
15
|
+
from phoenix.server.api.auth import IsNotReadOnly, IsNotViewer
|
|
16
16
|
from phoenix.server.api.context import Context
|
|
17
17
|
from phoenix.server.api.exceptions import BadRequest, Conflict, NotFound
|
|
18
18
|
from phoenix.server.api.queries import Query
|
|
19
|
-
from phoenix.server.api.types.GenerativeModel import GenerativeModel
|
|
19
|
+
from phoenix.server.api.types.GenerativeModel import GenerativeModel
|
|
20
20
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
21
21
|
from phoenix.server.api.types.TokenPrice import TokenKind
|
|
22
22
|
|
|
@@ -81,7 +81,7 @@ class DeleteModelMutationPayload:
|
|
|
81
81
|
|
|
82
82
|
@strawberry.type
|
|
83
83
|
class ModelMutationMixin:
|
|
84
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
84
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
85
85
|
async def create_model(
|
|
86
86
|
self,
|
|
87
87
|
info: Info[Context, None],
|
|
@@ -110,11 +110,11 @@ class ModelMutationMixin:
|
|
|
110
110
|
raise Conflict(f"Model with name '{input.name}' already exists")
|
|
111
111
|
|
|
112
112
|
return CreateModelMutationPayload(
|
|
113
|
-
model=
|
|
113
|
+
model=GenerativeModel(id=model.id, db_record=model),
|
|
114
114
|
query=Query(),
|
|
115
115
|
)
|
|
116
116
|
|
|
117
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
117
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
118
118
|
async def update_model(
|
|
119
119
|
self,
|
|
120
120
|
info: Info[Context, None],
|
|
@@ -155,19 +155,21 @@ class ModelMutationMixin:
|
|
|
155
155
|
model.name_pattern = name_pattern
|
|
156
156
|
model.token_prices = token_prices
|
|
157
157
|
model.start_time = input.start_time
|
|
158
|
+
# Explicitly set updated_at so the GenerativeModelStore daemon picks up this
|
|
159
|
+
# change (SQLAlchemy's onupdate may not trigger for relationship-only changes).
|
|
160
|
+
model.updated_at = datetime.now(timezone.utc)
|
|
158
161
|
session.add(model)
|
|
159
162
|
try:
|
|
160
163
|
await session.flush()
|
|
161
164
|
except (PostgreSQLIntegrityError, SQLiteIntegrityError):
|
|
162
165
|
raise Conflict(f"Model with name '{input.name}' already exists")
|
|
163
|
-
await session.refresh(model)
|
|
164
166
|
|
|
165
167
|
return UpdateModelMutationPayload(
|
|
166
|
-
model=
|
|
168
|
+
model=GenerativeModel(id=model.id, db_record=model),
|
|
167
169
|
query=Query(),
|
|
168
170
|
)
|
|
169
171
|
|
|
170
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
172
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
171
173
|
async def delete_model(
|
|
172
174
|
self,
|
|
173
175
|
info: Info[Context, None],
|
|
@@ -192,7 +194,7 @@ class ModelMutationMixin:
|
|
|
192
194
|
await session.rollback()
|
|
193
195
|
raise BadRequest("Cannot delete built-in model")
|
|
194
196
|
return DeleteModelMutationPayload(
|
|
195
|
-
model=
|
|
197
|
+
model=GenerativeModel(id=model.id, db_record=model),
|
|
196
198
|
query=Query(),
|
|
197
199
|
)
|
|
198
200
|
|
|
@@ -8,7 +8,7 @@ from strawberry.types import Info
|
|
|
8
8
|
|
|
9
9
|
from phoenix.config import DEFAULT_PROJECT_NAME
|
|
10
10
|
from phoenix.db import models
|
|
11
|
-
from phoenix.server.api.auth import IsNotReadOnly
|
|
11
|
+
from phoenix.server.api.auth import IsNotReadOnly, IsNotViewer
|
|
12
12
|
from phoenix.server.api.context import Context
|
|
13
13
|
from phoenix.server.api.exceptions import BadRequest, Conflict
|
|
14
14
|
from phoenix.server.api.input_types.ClearProjectInput import ClearProjectInput
|
|
@@ -27,7 +27,7 @@ class ProjectMutationPayload:
|
|
|
27
27
|
|
|
28
28
|
@strawberry.type
|
|
29
29
|
class ProjectMutationMixin:
|
|
30
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
30
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
31
31
|
async def create_project(
|
|
32
32
|
self,
|
|
33
33
|
info: Info[Context, None],
|
|
@@ -52,7 +52,7 @@ class ProjectMutationMixin:
|
|
|
52
52
|
info.context.event_queue.put(ProjectInsertEvent((project.id,)))
|
|
53
53
|
return ProjectMutationPayload(project=to_gql_project(project), query=Query())
|
|
54
54
|
|
|
55
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
55
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
56
56
|
async def delete_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
57
57
|
project_id = from_global_id_with_expected_type(global_id=id, expected_type_name="Project")
|
|
58
58
|
async with info.context.db() as session:
|
|
@@ -69,7 +69,7 @@ class ProjectMutationMixin:
|
|
|
69
69
|
info.context.event_queue.put(ProjectDeleteEvent((project_id,)))
|
|
70
70
|
return Query()
|
|
71
71
|
|
|
72
|
-
@strawberry.mutation(permission_classes=[IsNotReadOnly]) # type: ignore
|
|
72
|
+
@strawberry.mutation(permission_classes=[IsNotReadOnly, IsNotViewer]) # type: ignore
|
|
73
73
|
async def clear_project(self, info: Info[Context, None], input: ClearProjectInput) -> Query:
|
|
74
74
|
project_id = from_global_id_with_expected_type(
|
|
75
75
|
global_id=input.id, expected_type_name="Project"
|