arize-phoenix 10.0.4__py3-none-any.whl → 12.28.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/METADATA +124 -72
- arize_phoenix-12.28.1.dist-info/RECORD +499 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/WHEEL +1 -1
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/IP_NOTICE +1 -1
- phoenix/__generated__/__init__.py +0 -0
- phoenix/__generated__/classification_evaluator_configs/__init__.py +20 -0
- phoenix/__generated__/classification_evaluator_configs/_document_relevance_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_hallucination_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_models.py +18 -0
- phoenix/__generated__/classification_evaluator_configs/_tool_selection_classification_evaluator_config.py +17 -0
- phoenix/__init__.py +5 -4
- phoenix/auth.py +39 -2
- phoenix/config.py +1763 -91
- phoenix/datetime_utils.py +120 -2
- phoenix/db/README.md +595 -25
- phoenix/db/bulk_inserter.py +145 -103
- phoenix/db/engines.py +140 -33
- phoenix/db/enums.py +3 -12
- phoenix/db/facilitator.py +302 -35
- phoenix/db/helpers.py +1000 -65
- phoenix/db/iam_auth.py +64 -0
- phoenix/db/insertion/dataset.py +135 -2
- phoenix/db/insertion/document_annotation.py +9 -6
- phoenix/db/insertion/evaluation.py +2 -3
- phoenix/db/insertion/helpers.py +17 -2
- phoenix/db/insertion/session_annotation.py +176 -0
- phoenix/db/insertion/span.py +15 -11
- phoenix/db/insertion/span_annotation.py +3 -4
- phoenix/db/insertion/trace_annotation.py +3 -4
- phoenix/db/insertion/types.py +50 -20
- phoenix/db/migrations/versions/01a8342c9cdf_add_user_id_on_datasets.py +40 -0
- phoenix/db/migrations/versions/0df286449799_add_session_annotations_table.py +105 -0
- phoenix/db/migrations/versions/272b66ff50f8_drop_single_indices.py +119 -0
- phoenix/db/migrations/versions/58228d933c91_dataset_labels.py +67 -0
- phoenix/db/migrations/versions/699f655af132_experiment_tags.py +57 -0
- phoenix/db/migrations/versions/735d3d93c33e_add_composite_indices.py +41 -0
- phoenix/db/migrations/versions/a20694b15f82_cost.py +196 -0
- phoenix/db/migrations/versions/ab513d89518b_add_user_id_on_dataset_versions.py +40 -0
- phoenix/db/migrations/versions/d0690a79ea51_users_on_experiments.py +40 -0
- phoenix/db/migrations/versions/deb2c81c0bb2_dataset_splits.py +139 -0
- phoenix/db/migrations/versions/e76cbd66ffc3_add_experiments_dataset_examples.py +87 -0
- phoenix/db/models.py +669 -56
- phoenix/db/pg_config.py +10 -0
- phoenix/db/types/model_provider.py +4 -0
- phoenix/db/types/token_price_customization.py +29 -0
- phoenix/db/types/trace_retention.py +23 -15
- phoenix/experiments/evaluators/utils.py +3 -3
- phoenix/experiments/functions.py +160 -52
- phoenix/experiments/tracing.py +2 -2
- phoenix/experiments/types.py +1 -1
- phoenix/inferences/inferences.py +1 -2
- phoenix/server/api/auth.py +38 -7
- phoenix/server/api/auth_messages.py +46 -0
- phoenix/server/api/context.py +100 -4
- phoenix/server/api/dataloaders/__init__.py +79 -5
- phoenix/server/api/dataloaders/annotation_configs_by_project.py +31 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +60 -8
- phoenix/server/api/dataloaders/average_experiment_repeated_run_group_latency.py +50 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +17 -24
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +1 -2
- phoenix/server/api/dataloaders/dataset_dataset_splits.py +52 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -1
- phoenix/server/api/dataloaders/dataset_example_splits.py +40 -0
- phoenix/server/api/dataloaders/dataset_examples_and_versions_by_experiment_run.py +47 -0
- phoenix/server/api/dataloaders/dataset_labels.py +36 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -2
- phoenix/server/api/dataloaders/document_evaluations.py +6 -9
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +88 -34
- phoenix/server/api/dataloaders/experiment_dataset_splits.py +43 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +21 -28
- phoenix/server/api/dataloaders/experiment_repeated_run_group_annotation_summaries.py +77 -0
- phoenix/server/api/dataloaders/experiment_repeated_run_groups.py +57 -0
- phoenix/server/api/dataloaders/experiment_runs_by_experiment_and_example.py +44 -0
- phoenix/server/api/dataloaders/last_used_times_by_generative_model_id.py +35 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +40 -8
- phoenix/server/api/dataloaders/record_counts.py +37 -10
- phoenix/server/api/dataloaders/session_annotations_by_session.py +29 -0
- phoenix/server/api/dataloaders/span_cost_by_span.py +24 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_generative_model.py +56 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_project_session.py +57 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_span.py +43 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_trace.py +56 -0
- phoenix/server/api/dataloaders/span_cost_details_by_span_cost.py +27 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment.py +57 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_repeated_run_group.py +64 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_run.py +58 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_generative_model.py +55 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project.py +152 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project_session.py +56 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_trace.py +55 -0
- phoenix/server/api/dataloaders/span_costs.py +29 -0
- phoenix/server/api/dataloaders/table_fields.py +2 -2
- phoenix/server/api/dataloaders/token_prices_by_model.py +30 -0
- phoenix/server/api/dataloaders/trace_annotations_by_trace.py +27 -0
- phoenix/server/api/dataloaders/types.py +29 -0
- phoenix/server/api/exceptions.py +11 -1
- phoenix/server/api/helpers/dataset_helpers.py +5 -1
- phoenix/server/api/helpers/playground_clients.py +1243 -292
- phoenix/server/api/helpers/playground_registry.py +2 -2
- phoenix/server/api/helpers/playground_spans.py +8 -4
- phoenix/server/api/helpers/playground_users.py +26 -0
- phoenix/server/api/helpers/prompts/conversions/aws.py +83 -0
- phoenix/server/api/helpers/prompts/conversions/google.py +103 -0
- phoenix/server/api/helpers/prompts/models.py +205 -22
- phoenix/server/api/input_types/{SpanAnnotationFilter.py → AnnotationFilter.py} +22 -14
- phoenix/server/api/input_types/ChatCompletionInput.py +6 -2
- phoenix/server/api/input_types/CreateProjectInput.py +27 -0
- phoenix/server/api/input_types/CreateProjectSessionAnnotationInput.py +37 -0
- phoenix/server/api/input_types/DatasetFilter.py +17 -0
- phoenix/server/api/input_types/ExperimentRunSort.py +237 -0
- phoenix/server/api/input_types/GenerativeCredentialInput.py +9 -0
- phoenix/server/api/input_types/GenerativeModelInput.py +5 -0
- phoenix/server/api/input_types/ProjectSessionSort.py +161 -1
- phoenix/server/api/input_types/PromptFilter.py +14 -0
- phoenix/server/api/input_types/PromptVersionInput.py +52 -1
- phoenix/server/api/input_types/SpanSort.py +44 -7
- phoenix/server/api/input_types/TimeBinConfig.py +23 -0
- phoenix/server/api/input_types/UpdateAnnotationInput.py +34 -0
- phoenix/server/api/input_types/UserRoleInput.py +1 -0
- phoenix/server/api/mutations/__init__.py +10 -0
- phoenix/server/api/mutations/annotation_config_mutations.py +8 -8
- phoenix/server/api/mutations/api_key_mutations.py +19 -23
- phoenix/server/api/mutations/chat_mutations.py +154 -47
- phoenix/server/api/mutations/dataset_label_mutations.py +243 -0
- phoenix/server/api/mutations/dataset_mutations.py +21 -16
- phoenix/server/api/mutations/dataset_split_mutations.py +351 -0
- phoenix/server/api/mutations/experiment_mutations.py +2 -2
- phoenix/server/api/mutations/export_events_mutations.py +3 -3
- phoenix/server/api/mutations/model_mutations.py +210 -0
- phoenix/server/api/mutations/project_mutations.py +49 -10
- phoenix/server/api/mutations/project_session_annotations_mutations.py +158 -0
- phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +8 -4
- phoenix/server/api/mutations/prompt_label_mutations.py +74 -65
- phoenix/server/api/mutations/prompt_mutations.py +65 -129
- phoenix/server/api/mutations/prompt_version_tag_mutations.py +11 -8
- phoenix/server/api/mutations/span_annotations_mutations.py +15 -10
- phoenix/server/api/mutations/trace_annotations_mutations.py +14 -10
- phoenix/server/api/mutations/trace_mutations.py +47 -3
- phoenix/server/api/mutations/user_mutations.py +66 -41
- phoenix/server/api/queries.py +768 -293
- phoenix/server/api/routers/__init__.py +2 -2
- phoenix/server/api/routers/auth.py +154 -88
- phoenix/server/api/routers/ldap.py +229 -0
- phoenix/server/api/routers/oauth2.py +369 -106
- phoenix/server/api/routers/v1/__init__.py +24 -4
- phoenix/server/api/routers/v1/annotation_configs.py +23 -31
- phoenix/server/api/routers/v1/annotations.py +481 -17
- phoenix/server/api/routers/v1/datasets.py +395 -81
- phoenix/server/api/routers/v1/documents.py +142 -0
- phoenix/server/api/routers/v1/evaluations.py +24 -31
- phoenix/server/api/routers/v1/experiment_evaluations.py +19 -8
- phoenix/server/api/routers/v1/experiment_runs.py +337 -59
- phoenix/server/api/routers/v1/experiments.py +479 -48
- phoenix/server/api/routers/v1/models.py +7 -0
- phoenix/server/api/routers/v1/projects.py +18 -49
- phoenix/server/api/routers/v1/prompts.py +54 -40
- phoenix/server/api/routers/v1/sessions.py +108 -0
- phoenix/server/api/routers/v1/spans.py +1091 -81
- phoenix/server/api/routers/v1/traces.py +132 -78
- phoenix/server/api/routers/v1/users.py +389 -0
- phoenix/server/api/routers/v1/utils.py +3 -7
- phoenix/server/api/subscriptions.py +305 -88
- phoenix/server/api/types/Annotation.py +90 -23
- phoenix/server/api/types/ApiKey.py +13 -17
- phoenix/server/api/types/AuthMethod.py +1 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +1 -0
- phoenix/server/api/types/CostBreakdown.py +12 -0
- phoenix/server/api/types/Dataset.py +226 -72
- phoenix/server/api/types/DatasetExample.py +88 -18
- phoenix/server/api/types/DatasetExperimentAnnotationSummary.py +10 -0
- phoenix/server/api/types/DatasetLabel.py +57 -0
- phoenix/server/api/types/DatasetSplit.py +98 -0
- phoenix/server/api/types/DatasetVersion.py +49 -4
- phoenix/server/api/types/DocumentAnnotation.py +212 -0
- phoenix/server/api/types/Experiment.py +264 -59
- phoenix/server/api/types/ExperimentComparison.py +5 -10
- phoenix/server/api/types/ExperimentRepeatedRunGroup.py +155 -0
- phoenix/server/api/types/ExperimentRepeatedRunGroupAnnotationSummary.py +9 -0
- phoenix/server/api/types/ExperimentRun.py +169 -65
- phoenix/server/api/types/ExperimentRunAnnotation.py +158 -39
- phoenix/server/api/types/GenerativeModel.py +245 -3
- phoenix/server/api/types/GenerativeProvider.py +70 -11
- phoenix/server/api/types/{Model.py → InferenceModel.py} +1 -1
- phoenix/server/api/types/ModelInterface.py +16 -0
- phoenix/server/api/types/PlaygroundModel.py +20 -0
- phoenix/server/api/types/Project.py +1278 -216
- phoenix/server/api/types/ProjectSession.py +188 -28
- phoenix/server/api/types/ProjectSessionAnnotation.py +187 -0
- phoenix/server/api/types/ProjectTraceRetentionPolicy.py +1 -1
- phoenix/server/api/types/Prompt.py +119 -39
- phoenix/server/api/types/PromptLabel.py +42 -25
- phoenix/server/api/types/PromptVersion.py +11 -8
- phoenix/server/api/types/PromptVersionTag.py +65 -25
- phoenix/server/api/types/ServerStatus.py +6 -0
- phoenix/server/api/types/Span.py +167 -123
- phoenix/server/api/types/SpanAnnotation.py +189 -42
- phoenix/server/api/types/SpanCostDetailSummaryEntry.py +10 -0
- phoenix/server/api/types/SpanCostSummary.py +10 -0
- phoenix/server/api/types/SystemApiKey.py +65 -1
- phoenix/server/api/types/TokenPrice.py +16 -0
- phoenix/server/api/types/TokenUsage.py +3 -3
- phoenix/server/api/types/Trace.py +223 -51
- phoenix/server/api/types/TraceAnnotation.py +149 -50
- phoenix/server/api/types/User.py +137 -32
- phoenix/server/api/types/UserApiKey.py +73 -26
- phoenix/server/api/types/node.py +10 -0
- phoenix/server/api/types/pagination.py +11 -2
- phoenix/server/app.py +290 -45
- phoenix/server/authorization.py +38 -3
- phoenix/server/bearer_auth.py +34 -24
- phoenix/server/cost_tracking/cost_details_calculator.py +196 -0
- phoenix/server/cost_tracking/cost_model_lookup.py +179 -0
- phoenix/server/cost_tracking/helpers.py +68 -0
- phoenix/server/cost_tracking/model_cost_manifest.json +3657 -830
- phoenix/server/cost_tracking/regex_specificity.py +397 -0
- phoenix/server/cost_tracking/token_cost_calculator.py +57 -0
- phoenix/server/daemons/__init__.py +0 -0
- phoenix/server/daemons/db_disk_usage_monitor.py +214 -0
- phoenix/server/daemons/generative_model_store.py +103 -0
- phoenix/server/daemons/span_cost_calculator.py +99 -0
- phoenix/server/dml_event.py +17 -0
- phoenix/server/dml_event_handler.py +5 -0
- phoenix/server/email/sender.py +56 -3
- phoenix/server/email/templates/db_disk_usage_notification.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/experiments/__init__.py +0 -0
- phoenix/server/experiments/utils.py +14 -0
- phoenix/server/grpc_server.py +11 -11
- phoenix/server/jwt_store.py +17 -15
- phoenix/server/ldap.py +1449 -0
- phoenix/server/main.py +26 -10
- phoenix/server/oauth2.py +330 -12
- phoenix/server/prometheus.py +66 -6
- phoenix/server/rate_limiters.py +4 -9
- phoenix/server/retention.py +33 -20
- phoenix/server/session_filters.py +49 -0
- phoenix/server/static/.vite/manifest.json +55 -51
- phoenix/server/static/assets/components-BreFUQQa.js +6702 -0
- phoenix/server/static/assets/{index-E0M82BdE.js → index-CTQoemZv.js} +140 -56
- phoenix/server/static/assets/pages-DBE5iYM3.js +9524 -0
- phoenix/server/static/assets/vendor-BGzfc4EU.css +1 -0
- phoenix/server/static/assets/vendor-DCE4v-Ot.js +920 -0
- phoenix/server/static/assets/vendor-codemirror-D5f205eT.js +25 -0
- phoenix/server/static/assets/vendor-recharts-V9cwpXsm.js +37 -0
- phoenix/server/static/assets/vendor-shiki-Do--csgv.js +5 -0
- phoenix/server/static/assets/vendor-three-CmB8bl_y.js +3840 -0
- phoenix/server/templates/index.html +40 -6
- phoenix/server/thread_server.py +1 -2
- phoenix/server/types.py +14 -4
- phoenix/server/utils.py +74 -0
- phoenix/session/client.py +56 -3
- phoenix/session/data_extractor.py +5 -0
- phoenix/session/evaluation.py +14 -5
- phoenix/session/session.py +45 -9
- phoenix/settings.py +5 -0
- phoenix/trace/attributes.py +80 -13
- phoenix/trace/dsl/helpers.py +90 -1
- phoenix/trace/dsl/query.py +8 -6
- phoenix/trace/projects.py +5 -0
- phoenix/utilities/template_formatters.py +1 -1
- phoenix/version.py +1 -1
- arize_phoenix-10.0.4.dist-info/RECORD +0 -405
- phoenix/server/api/types/Evaluation.py +0 -39
- phoenix/server/cost_tracking/cost_lookup.py +0 -255
- phoenix/server/static/assets/components-DULKeDfL.js +0 -4365
- phoenix/server/static/assets/pages-Cl0A-0U2.js +0 -7430
- phoenix/server/static/assets/vendor-WIZid84E.css +0 -1
- phoenix/server/static/assets/vendor-arizeai-Dy-0mSNw.js +0 -649
- phoenix/server/static/assets/vendor-codemirror-DBtifKNr.js +0 -33
- phoenix/server/static/assets/vendor-oB4u9zuV.js +0 -905
- phoenix/server/static/assets/vendor-recharts-D-T4KPz2.js +0 -59
- phoenix/server/static/assets/vendor-shiki-BMn4O_9F.js +0 -5
- phoenix/server/static/assets/vendor-three-C5WAXd5r.js +0 -2998
- phoenix/utilities/deprecation.py +0 -31
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,40 +1,59 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import TYPE_CHECKING, Annotated, Optional
|
|
3
3
|
|
|
4
4
|
import strawberry
|
|
5
5
|
from sqlalchemy import select
|
|
6
|
-
from sqlalchemy.orm import joinedload
|
|
7
6
|
from strawberry import UNSET
|
|
8
7
|
from strawberry.relay.types import Connection, GlobalID, Node, NodeID
|
|
9
8
|
from strawberry.types import Info
|
|
10
9
|
|
|
11
10
|
from phoenix.db import models
|
|
12
11
|
from phoenix.server.api.context import Context
|
|
12
|
+
from phoenix.server.api.exceptions import BadRequest
|
|
13
13
|
from phoenix.server.api.types.DatasetExampleRevision import DatasetExampleRevision
|
|
14
|
+
from phoenix.server.api.types.DatasetSplit import DatasetSplit
|
|
14
15
|
from phoenix.server.api.types.DatasetVersion import DatasetVersion
|
|
15
|
-
from phoenix.server.api.types.
|
|
16
|
+
from phoenix.server.api.types.ExperimentRepeatedRunGroup import (
|
|
17
|
+
ExperimentRepeatedRunGroup,
|
|
18
|
+
)
|
|
19
|
+
from phoenix.server.api.types.ExperimentRun import ExperimentRun
|
|
16
20
|
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
17
21
|
from phoenix.server.api.types.pagination import (
|
|
18
22
|
ConnectionArgs,
|
|
19
23
|
CursorString,
|
|
20
24
|
connection_from_list,
|
|
21
25
|
)
|
|
22
|
-
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from .Span import Span
|
|
23
29
|
|
|
24
30
|
|
|
25
31
|
@strawberry.type
|
|
26
32
|
class DatasetExample(Node):
|
|
27
|
-
|
|
28
|
-
|
|
33
|
+
id: NodeID[int]
|
|
34
|
+
db_record: strawberry.Private[Optional[models.DatasetExample]] = None
|
|
29
35
|
version_id: strawberry.Private[Optional[int]] = None
|
|
30
36
|
|
|
37
|
+
def __post_init__(self) -> None:
|
|
38
|
+
if self.db_record and self.id != self.db_record.id:
|
|
39
|
+
raise ValueError("DatasetExample ID mismatch")
|
|
40
|
+
|
|
41
|
+
@strawberry.field
|
|
42
|
+
async def created_at(self, info: Info[Context, None]) -> datetime:
|
|
43
|
+
if self.db_record:
|
|
44
|
+
val = self.db_record.created_at
|
|
45
|
+
else:
|
|
46
|
+
val = await info.context.data_loaders.dataset_example_fields.load(
|
|
47
|
+
(self.id, models.DatasetExample.created_at),
|
|
48
|
+
)
|
|
49
|
+
return val
|
|
50
|
+
|
|
31
51
|
@strawberry.field
|
|
32
52
|
async def revision(
|
|
33
53
|
self,
|
|
34
54
|
info: Info[Context, None],
|
|
35
55
|
dataset_version_id: Optional[GlobalID] = UNSET,
|
|
36
56
|
) -> DatasetExampleRevision:
|
|
37
|
-
example_id = self.id_attr
|
|
38
57
|
version_id: Optional[int] = None
|
|
39
58
|
if dataset_version_id:
|
|
40
59
|
version_id = from_global_id_with_expected_type(
|
|
@@ -42,18 +61,18 @@ class DatasetExample(Node):
|
|
|
42
61
|
)
|
|
43
62
|
elif self.version_id is not None:
|
|
44
63
|
version_id = self.version_id
|
|
45
|
-
return await info.context.data_loaders.dataset_example_revisions.load(
|
|
46
|
-
(example_id, version_id)
|
|
47
|
-
)
|
|
64
|
+
return await info.context.data_loaders.dataset_example_revisions.load((self.id, version_id))
|
|
48
65
|
|
|
49
66
|
@strawberry.field
|
|
50
67
|
async def span(
|
|
51
68
|
self,
|
|
52
69
|
info: Info[Context, None],
|
|
53
|
-
) -> Optional[Span]:
|
|
70
|
+
) -> Optional[Annotated["Span", strawberry.lazy(".Span")]]:
|
|
71
|
+
from .Span import Span
|
|
72
|
+
|
|
54
73
|
return (
|
|
55
|
-
Span(
|
|
56
|
-
if (span := await info.context.data_loaders.dataset_example_spans.load(self.
|
|
74
|
+
Span(id=span.id, db_record=span)
|
|
75
|
+
if (span := await info.context.data_loaders.dataset_example_spans.load(self.id))
|
|
57
76
|
else None
|
|
58
77
|
)
|
|
59
78
|
|
|
@@ -65,6 +84,7 @@ class DatasetExample(Node):
|
|
|
65
84
|
last: Optional[int] = UNSET,
|
|
66
85
|
after: Optional[CursorString] = UNSET,
|
|
67
86
|
before: Optional[CursorString] = UNSET,
|
|
87
|
+
experiment_ids: Optional[list[GlobalID]] = UNSET,
|
|
68
88
|
) -> Connection[ExperimentRun]:
|
|
69
89
|
args = ConnectionArgs(
|
|
70
90
|
first=first,
|
|
@@ -72,14 +92,64 @@ class DatasetExample(Node):
|
|
|
72
92
|
last=last,
|
|
73
93
|
before=before if isinstance(before, CursorString) else None,
|
|
74
94
|
)
|
|
75
|
-
example_id = self.id_attr
|
|
76
95
|
query = (
|
|
77
96
|
select(models.ExperimentRun)
|
|
78
|
-
.options(joinedload(models.ExperimentRun.trace).load_only(models.Trace.trace_id))
|
|
79
97
|
.join(models.Experiment, models.Experiment.id == models.ExperimentRun.experiment_id)
|
|
80
|
-
.where(models.ExperimentRun.dataset_example_id ==
|
|
81
|
-
.order_by(
|
|
98
|
+
.where(models.ExperimentRun.dataset_example_id == self.id)
|
|
99
|
+
.order_by(
|
|
100
|
+
models.ExperimentRun.experiment_id.asc(),
|
|
101
|
+
models.ExperimentRun.repetition_number.asc(),
|
|
102
|
+
)
|
|
82
103
|
)
|
|
104
|
+
if experiment_ids:
|
|
105
|
+
experiment_db_ids = [
|
|
106
|
+
from_global_id_with_expected_type(
|
|
107
|
+
global_id=experiment_id,
|
|
108
|
+
expected_type_name=models.Experiment.__name__,
|
|
109
|
+
)
|
|
110
|
+
for experiment_id in experiment_ids or []
|
|
111
|
+
]
|
|
112
|
+
query = query.where(models.ExperimentRun.experiment_id.in_(experiment_db_ids))
|
|
83
113
|
async with info.context.db() as session:
|
|
84
114
|
runs = (await session.scalars(query)).all()
|
|
85
|
-
return connection_from_list([
|
|
115
|
+
return connection_from_list([ExperimentRun(id=run.id, db_record=run) for run in runs], args)
|
|
116
|
+
|
|
117
|
+
@strawberry.field
|
|
118
|
+
async def experiment_repeated_run_groups(
|
|
119
|
+
self,
|
|
120
|
+
info: Info[Context, None],
|
|
121
|
+
experiment_ids: list[GlobalID],
|
|
122
|
+
) -> list[ExperimentRepeatedRunGroup]:
|
|
123
|
+
experiment_rowids = []
|
|
124
|
+
for experiment_id in experiment_ids:
|
|
125
|
+
try:
|
|
126
|
+
experiment_rowid = from_global_id_with_expected_type(
|
|
127
|
+
global_id=experiment_id,
|
|
128
|
+
expected_type_name=models.Experiment.__name__,
|
|
129
|
+
)
|
|
130
|
+
except Exception:
|
|
131
|
+
raise BadRequest(f"Invalid experiment ID: {experiment_id}")
|
|
132
|
+
experiment_rowids.append(experiment_rowid)
|
|
133
|
+
repeated_run_groups = (
|
|
134
|
+
await info.context.data_loaders.experiment_repeated_run_groups.load_many(
|
|
135
|
+
[(experiment_rowid, self.id) for experiment_rowid in experiment_rowids]
|
|
136
|
+
)
|
|
137
|
+
)
|
|
138
|
+
return [
|
|
139
|
+
ExperimentRepeatedRunGroup(
|
|
140
|
+
experiment_rowid=group.experiment_rowid,
|
|
141
|
+
dataset_example_rowid=group.dataset_example_rowid,
|
|
142
|
+
cached_runs=[ExperimentRun(id=run.id, db_record=run) for run in group.runs],
|
|
143
|
+
)
|
|
144
|
+
for group in repeated_run_groups
|
|
145
|
+
]
|
|
146
|
+
|
|
147
|
+
@strawberry.field
|
|
148
|
+
async def dataset_splits(
|
|
149
|
+
self,
|
|
150
|
+
info: Info[Context, None],
|
|
151
|
+
) -> list[DatasetSplit]:
|
|
152
|
+
return [
|
|
153
|
+
DatasetSplit(id=split.id, db_record=split)
|
|
154
|
+
for split in await info.context.data_loaders.dataset_example_splits.load(self.id)
|
|
155
|
+
]
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import strawberry
|
|
4
|
+
from strawberry.relay import Node, NodeID
|
|
5
|
+
from strawberry.types import Info
|
|
6
|
+
|
|
7
|
+
from phoenix.db import models
|
|
8
|
+
from phoenix.server.api.context import Context
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@strawberry.type
|
|
12
|
+
class DatasetLabel(Node):
|
|
13
|
+
id: NodeID[int]
|
|
14
|
+
db_record: strawberry.Private[Optional[models.DatasetLabel]] = None
|
|
15
|
+
|
|
16
|
+
def __post_init__(self) -> None:
|
|
17
|
+
if self.db_record and self.id != self.db_record.id:
|
|
18
|
+
raise ValueError("DatasetLabel ID mismatch")
|
|
19
|
+
|
|
20
|
+
@strawberry.field
|
|
21
|
+
async def name(
|
|
22
|
+
self,
|
|
23
|
+
info: Info[Context, None],
|
|
24
|
+
) -> str:
|
|
25
|
+
if self.db_record:
|
|
26
|
+
val = self.db_record.name
|
|
27
|
+
else:
|
|
28
|
+
val = await info.context.data_loaders.dataset_label_fields.load(
|
|
29
|
+
(self.id, models.DatasetLabel.name),
|
|
30
|
+
)
|
|
31
|
+
return val
|
|
32
|
+
|
|
33
|
+
@strawberry.field
|
|
34
|
+
async def description(
|
|
35
|
+
self,
|
|
36
|
+
info: Info[Context, None],
|
|
37
|
+
) -> Optional[str]:
|
|
38
|
+
if self.db_record:
|
|
39
|
+
val = self.db_record.description
|
|
40
|
+
else:
|
|
41
|
+
val = await info.context.data_loaders.dataset_label_fields.load(
|
|
42
|
+
(self.id, models.DatasetLabel.description),
|
|
43
|
+
)
|
|
44
|
+
return val
|
|
45
|
+
|
|
46
|
+
@strawberry.field
|
|
47
|
+
async def color(
|
|
48
|
+
self,
|
|
49
|
+
info: Info[Context, None],
|
|
50
|
+
) -> str:
|
|
51
|
+
if self.db_record:
|
|
52
|
+
val = self.db_record.color
|
|
53
|
+
else:
|
|
54
|
+
val = await info.context.data_loaders.dataset_label_fields.load(
|
|
55
|
+
(self.id, models.DatasetLabel.color),
|
|
56
|
+
)
|
|
57
|
+
return val
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
import strawberry
|
|
5
|
+
from strawberry.relay import Node, NodeID
|
|
6
|
+
from strawberry.scalars import JSON
|
|
7
|
+
from strawberry.types import Info
|
|
8
|
+
|
|
9
|
+
from phoenix.db import models
|
|
10
|
+
from phoenix.server.api.context import Context
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@strawberry.type
|
|
14
|
+
class DatasetSplit(Node):
|
|
15
|
+
id: NodeID[int]
|
|
16
|
+
db_record: strawberry.Private[Optional[models.DatasetSplit]] = None
|
|
17
|
+
|
|
18
|
+
def __post_init__(self) -> None:
|
|
19
|
+
if self.db_record and self.id != self.db_record.id:
|
|
20
|
+
raise ValueError("DatasetSplit ID mismatch")
|
|
21
|
+
|
|
22
|
+
@strawberry.field
|
|
23
|
+
async def name(
|
|
24
|
+
self,
|
|
25
|
+
info: Info[Context, None],
|
|
26
|
+
) -> str:
|
|
27
|
+
if self.db_record:
|
|
28
|
+
val = self.db_record.name
|
|
29
|
+
else:
|
|
30
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
31
|
+
(self.id, models.DatasetSplit.name),
|
|
32
|
+
)
|
|
33
|
+
return val
|
|
34
|
+
|
|
35
|
+
@strawberry.field
|
|
36
|
+
async def description(
|
|
37
|
+
self,
|
|
38
|
+
info: Info[Context, None],
|
|
39
|
+
) -> Optional[str]:
|
|
40
|
+
if self.db_record:
|
|
41
|
+
val = self.db_record.description
|
|
42
|
+
else:
|
|
43
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
44
|
+
(self.id, models.DatasetSplit.description),
|
|
45
|
+
)
|
|
46
|
+
return val
|
|
47
|
+
|
|
48
|
+
@strawberry.field
|
|
49
|
+
async def metadata(
|
|
50
|
+
self,
|
|
51
|
+
info: Info[Context, None],
|
|
52
|
+
) -> JSON:
|
|
53
|
+
if self.db_record:
|
|
54
|
+
val = self.db_record.metadata_
|
|
55
|
+
else:
|
|
56
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
57
|
+
(self.id, models.DatasetSplit.metadata_),
|
|
58
|
+
)
|
|
59
|
+
return val
|
|
60
|
+
|
|
61
|
+
@strawberry.field
|
|
62
|
+
async def color(
|
|
63
|
+
self,
|
|
64
|
+
info: Info[Context, None],
|
|
65
|
+
) -> str:
|
|
66
|
+
if self.db_record:
|
|
67
|
+
val = self.db_record.color
|
|
68
|
+
else:
|
|
69
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
70
|
+
(self.id, models.DatasetSplit.color),
|
|
71
|
+
)
|
|
72
|
+
return val
|
|
73
|
+
|
|
74
|
+
@strawberry.field
|
|
75
|
+
async def created_at(
|
|
76
|
+
self,
|
|
77
|
+
info: Info[Context, None],
|
|
78
|
+
) -> datetime:
|
|
79
|
+
if self.db_record:
|
|
80
|
+
val = self.db_record.created_at
|
|
81
|
+
else:
|
|
82
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
83
|
+
(self.id, models.DatasetSplit.created_at),
|
|
84
|
+
)
|
|
85
|
+
return val
|
|
86
|
+
|
|
87
|
+
@strawberry.field
|
|
88
|
+
async def updated_at(
|
|
89
|
+
self,
|
|
90
|
+
info: Info[Context, None],
|
|
91
|
+
) -> datetime:
|
|
92
|
+
if self.db_record:
|
|
93
|
+
val = self.db_record.updated_at
|
|
94
|
+
else:
|
|
95
|
+
val = await info.context.data_loaders.dataset_split_fields.load(
|
|
96
|
+
(self.id, models.DatasetSplit.updated_at),
|
|
97
|
+
)
|
|
98
|
+
return val
|
|
@@ -4,11 +4,56 @@ from typing import Optional
|
|
|
4
4
|
import strawberry
|
|
5
5
|
from strawberry.relay import Node, NodeID
|
|
6
6
|
from strawberry.scalars import JSON
|
|
7
|
+
from strawberry.types import Info
|
|
8
|
+
|
|
9
|
+
from phoenix.db import models
|
|
10
|
+
from phoenix.server.api.context import Context
|
|
7
11
|
|
|
8
12
|
|
|
9
13
|
@strawberry.type
|
|
10
14
|
class DatasetVersion(Node):
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
+
id: NodeID[int]
|
|
16
|
+
db_record: strawberry.Private[Optional[models.DatasetVersion]] = None
|
|
17
|
+
|
|
18
|
+
def __post_init__(self) -> None:
|
|
19
|
+
if self.db_record and self.id != self.db_record.id:
|
|
20
|
+
raise ValueError("DatasetVersion ID mismatch")
|
|
21
|
+
|
|
22
|
+
@strawberry.field
|
|
23
|
+
async def description(
|
|
24
|
+
self,
|
|
25
|
+
info: Info[Context, None],
|
|
26
|
+
) -> Optional[str]:
|
|
27
|
+
if self.db_record:
|
|
28
|
+
val = self.db_record.description
|
|
29
|
+
else:
|
|
30
|
+
val = await info.context.data_loaders.dataset_version_fields.load(
|
|
31
|
+
(self.id, models.DatasetVersion.description),
|
|
32
|
+
)
|
|
33
|
+
return val
|
|
34
|
+
|
|
35
|
+
@strawberry.field
|
|
36
|
+
async def metadata(
|
|
37
|
+
self,
|
|
38
|
+
info: Info[Context, None],
|
|
39
|
+
) -> JSON:
|
|
40
|
+
if self.db_record:
|
|
41
|
+
val = self.db_record.metadata_
|
|
42
|
+
else:
|
|
43
|
+
val = await info.context.data_loaders.dataset_version_fields.load(
|
|
44
|
+
(self.id, models.DatasetVersion.metadata_),
|
|
45
|
+
)
|
|
46
|
+
return val
|
|
47
|
+
|
|
48
|
+
@strawberry.field
|
|
49
|
+
async def created_at(
|
|
50
|
+
self,
|
|
51
|
+
info: Info[Context, None],
|
|
52
|
+
) -> datetime:
|
|
53
|
+
if self.db_record:
|
|
54
|
+
val = self.db_record.created_at
|
|
55
|
+
else:
|
|
56
|
+
val = await info.context.data_loaders.dataset_version_fields.load(
|
|
57
|
+
(self.id, models.DatasetVersion.created_at),
|
|
58
|
+
)
|
|
59
|
+
return val
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from math import isfinite
|
|
3
|
+
from typing import TYPE_CHECKING, Annotated, Optional
|
|
4
|
+
|
|
5
|
+
import strawberry
|
|
6
|
+
from strawberry.relay import Node, NodeID
|
|
7
|
+
from strawberry.scalars import JSON
|
|
8
|
+
from strawberry.types import Info
|
|
9
|
+
|
|
10
|
+
from phoenix.db import models
|
|
11
|
+
from phoenix.server.api.context import Context
|
|
12
|
+
|
|
13
|
+
from .Annotation import Annotation
|
|
14
|
+
from .AnnotationSource import AnnotationSource
|
|
15
|
+
from .AnnotatorKind import AnnotatorKind
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from .Span import Span
|
|
19
|
+
from .User import User
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@strawberry.type
|
|
23
|
+
class DocumentAnnotation(Node, Annotation):
|
|
24
|
+
id: NodeID[int]
|
|
25
|
+
db_record: strawberry.Private[Optional[models.DocumentAnnotation]] = None
|
|
26
|
+
|
|
27
|
+
def __post_init__(self) -> None:
|
|
28
|
+
if self.db_record and self.id != self.db_record.id:
|
|
29
|
+
raise ValueError("DocumentAnnotation ID mismatch")
|
|
30
|
+
|
|
31
|
+
@strawberry.field(description="Name of the annotation, e.g. 'helpfulness' or 'relevance'.") # type: ignore
|
|
32
|
+
async def name(
|
|
33
|
+
self,
|
|
34
|
+
info: Info[Context, None],
|
|
35
|
+
) -> str:
|
|
36
|
+
if self.db_record:
|
|
37
|
+
val = self.db_record.name
|
|
38
|
+
else:
|
|
39
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
40
|
+
(self.id, models.DocumentAnnotation.name),
|
|
41
|
+
)
|
|
42
|
+
return val
|
|
43
|
+
|
|
44
|
+
@strawberry.field(description="The kind of annotator that produced the annotation.") # type: ignore
|
|
45
|
+
async def annotator_kind(
|
|
46
|
+
self,
|
|
47
|
+
info: Info[Context, None],
|
|
48
|
+
) -> AnnotatorKind:
|
|
49
|
+
if self.db_record:
|
|
50
|
+
val = self.db_record.annotator_kind
|
|
51
|
+
else:
|
|
52
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
53
|
+
(self.id, models.DocumentAnnotation.annotator_kind),
|
|
54
|
+
)
|
|
55
|
+
return AnnotatorKind(val)
|
|
56
|
+
|
|
57
|
+
@strawberry.field(
|
|
58
|
+
description="Value of the annotation in the form of a string, e.g. "
|
|
59
|
+
"'helpful' or 'not helpful'. Note that the label is not necessarily binary."
|
|
60
|
+
) # type: ignore
|
|
61
|
+
async def label(
|
|
62
|
+
self,
|
|
63
|
+
info: Info[Context, None],
|
|
64
|
+
) -> Optional[str]:
|
|
65
|
+
if self.db_record:
|
|
66
|
+
val = self.db_record.label
|
|
67
|
+
else:
|
|
68
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
69
|
+
(self.id, models.DocumentAnnotation.label),
|
|
70
|
+
)
|
|
71
|
+
return val
|
|
72
|
+
|
|
73
|
+
@strawberry.field(
|
|
74
|
+
description="Value of the annotation in the form of a numeric score.",
|
|
75
|
+
) # type: ignore
|
|
76
|
+
async def score(
|
|
77
|
+
self,
|
|
78
|
+
info: Info[Context, None],
|
|
79
|
+
) -> Optional[float]:
|
|
80
|
+
if self.db_record:
|
|
81
|
+
val = self.db_record.score
|
|
82
|
+
else:
|
|
83
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
84
|
+
(self.id, models.DocumentAnnotation.score),
|
|
85
|
+
)
|
|
86
|
+
return val if val is not None and isfinite(val) else None
|
|
87
|
+
|
|
88
|
+
@strawberry.field(
|
|
89
|
+
description="The annotator's explanation for the annotation result (i.e. "
|
|
90
|
+
"score or label, or both) given to the subject."
|
|
91
|
+
) # type: ignore
|
|
92
|
+
async def explanation(
|
|
93
|
+
self,
|
|
94
|
+
info: Info[Context, None],
|
|
95
|
+
) -> Optional[str]:
|
|
96
|
+
if self.db_record:
|
|
97
|
+
val = self.db_record.explanation
|
|
98
|
+
else:
|
|
99
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
100
|
+
(self.id, models.DocumentAnnotation.explanation),
|
|
101
|
+
)
|
|
102
|
+
return val
|
|
103
|
+
|
|
104
|
+
@strawberry.field(description="The metadata associated with the annotation.") # type: ignore
|
|
105
|
+
async def metadata(
|
|
106
|
+
self,
|
|
107
|
+
info: Info[Context, None],
|
|
108
|
+
) -> JSON:
|
|
109
|
+
if self.db_record:
|
|
110
|
+
val = self.db_record.metadata_
|
|
111
|
+
else:
|
|
112
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
113
|
+
(self.id, models.DocumentAnnotation.metadata_),
|
|
114
|
+
)
|
|
115
|
+
return val
|
|
116
|
+
|
|
117
|
+
@strawberry.field(description="The position of the annotation in the document.") # type: ignore
|
|
118
|
+
async def document_position(
|
|
119
|
+
self,
|
|
120
|
+
info: Info[Context, None],
|
|
121
|
+
) -> int:
|
|
122
|
+
if self.db_record:
|
|
123
|
+
val = self.db_record.document_position
|
|
124
|
+
else:
|
|
125
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
126
|
+
(self.id, models.DocumentAnnotation.document_position),
|
|
127
|
+
)
|
|
128
|
+
return val
|
|
129
|
+
|
|
130
|
+
@strawberry.field(description="The identifier of the annotation.") # type: ignore
|
|
131
|
+
async def identifier(
|
|
132
|
+
self,
|
|
133
|
+
info: Info[Context, None],
|
|
134
|
+
) -> str:
|
|
135
|
+
if self.db_record:
|
|
136
|
+
val = self.db_record.identifier
|
|
137
|
+
else:
|
|
138
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
139
|
+
(self.id, models.DocumentAnnotation.identifier),
|
|
140
|
+
)
|
|
141
|
+
return val
|
|
142
|
+
|
|
143
|
+
@strawberry.field(description="The source of the annotation.") # type: ignore
|
|
144
|
+
async def source(
|
|
145
|
+
self,
|
|
146
|
+
info: Info[Context, None],
|
|
147
|
+
) -> AnnotationSource:
|
|
148
|
+
if self.db_record:
|
|
149
|
+
val = self.db_record.source
|
|
150
|
+
else:
|
|
151
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
152
|
+
(self.id, models.DocumentAnnotation.source),
|
|
153
|
+
)
|
|
154
|
+
return AnnotationSource(val)
|
|
155
|
+
|
|
156
|
+
@strawberry.field(description="The date and time when the annotation was created.") # type: ignore
|
|
157
|
+
async def created_at(
|
|
158
|
+
self,
|
|
159
|
+
info: Info[Context, None],
|
|
160
|
+
) -> datetime:
|
|
161
|
+
if self.db_record:
|
|
162
|
+
val = self.db_record.created_at
|
|
163
|
+
else:
|
|
164
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
165
|
+
(self.id, models.DocumentAnnotation.created_at),
|
|
166
|
+
)
|
|
167
|
+
return val
|
|
168
|
+
|
|
169
|
+
@strawberry.field(description="The date and time when the annotation was last updated.") # type: ignore
|
|
170
|
+
async def updated_at(
|
|
171
|
+
self,
|
|
172
|
+
info: Info[Context, None],
|
|
173
|
+
) -> datetime:
|
|
174
|
+
if self.db_record:
|
|
175
|
+
val = self.db_record.updated_at
|
|
176
|
+
else:
|
|
177
|
+
val = await info.context.data_loaders.document_annotation_fields.load(
|
|
178
|
+
(self.id, models.DocumentAnnotation.updated_at),
|
|
179
|
+
)
|
|
180
|
+
return val
|
|
181
|
+
|
|
182
|
+
@strawberry.field(description="The span associated with the annotation.") # type: ignore
|
|
183
|
+
async def span(
|
|
184
|
+
self,
|
|
185
|
+
info: Info[Context, None],
|
|
186
|
+
) -> Annotated["Span", strawberry.lazy(".Span")]:
|
|
187
|
+
if self.db_record:
|
|
188
|
+
span_rowid = self.db_record.span_rowid
|
|
189
|
+
else:
|
|
190
|
+
span_rowid = await info.context.data_loaders.document_annotation_fields.load(
|
|
191
|
+
(self.id, models.DocumentAnnotation.span_rowid),
|
|
192
|
+
)
|
|
193
|
+
from .Span import Span
|
|
194
|
+
|
|
195
|
+
return Span(id=span_rowid)
|
|
196
|
+
|
|
197
|
+
@strawberry.field(description="The user that produced the annotation.") # type: ignore
|
|
198
|
+
async def user(
|
|
199
|
+
self,
|
|
200
|
+
info: Info[Context, None],
|
|
201
|
+
) -> Optional[Annotated["User", strawberry.lazy(".User")]]:
|
|
202
|
+
if self.db_record:
|
|
203
|
+
user_id = self.db_record.user_id
|
|
204
|
+
else:
|
|
205
|
+
user_id = await info.context.data_loaders.document_annotation_fields.load(
|
|
206
|
+
(self.id, models.DocumentAnnotation.user_id),
|
|
207
|
+
)
|
|
208
|
+
if user_id is None:
|
|
209
|
+
return None
|
|
210
|
+
from .User import User
|
|
211
|
+
|
|
212
|
+
return User(id=user_id)
|