arize-phoenix 3.16.1__py3-none-any.whl → 7.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- arize_phoenix-7.7.1.dist-info/METADATA +261 -0
- arize_phoenix-7.7.1.dist-info/RECORD +345 -0
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.1.dist-info}/WHEEL +1 -1
- arize_phoenix-7.7.1.dist-info/entry_points.txt +3 -0
- phoenix/__init__.py +86 -14
- phoenix/auth.py +309 -0
- phoenix/config.py +675 -45
- phoenix/core/model.py +32 -30
- phoenix/core/model_schema.py +102 -109
- phoenix/core/model_schema_adapter.py +48 -45
- phoenix/datetime_utils.py +24 -3
- phoenix/db/README.md +54 -0
- phoenix/db/__init__.py +4 -0
- phoenix/db/alembic.ini +85 -0
- phoenix/db/bulk_inserter.py +294 -0
- phoenix/db/engines.py +208 -0
- phoenix/db/enums.py +20 -0
- phoenix/db/facilitator.py +113 -0
- phoenix/db/helpers.py +159 -0
- phoenix/db/insertion/constants.py +2 -0
- phoenix/db/insertion/dataset.py +227 -0
- phoenix/db/insertion/document_annotation.py +171 -0
- phoenix/db/insertion/evaluation.py +191 -0
- phoenix/db/insertion/helpers.py +98 -0
- phoenix/db/insertion/span.py +193 -0
- phoenix/db/insertion/span_annotation.py +158 -0
- phoenix/db/insertion/trace_annotation.py +158 -0
- phoenix/db/insertion/types.py +256 -0
- phoenix/db/migrate.py +86 -0
- phoenix/db/migrations/data_migration_scripts/populate_project_sessions.py +199 -0
- phoenix/db/migrations/env.py +114 -0
- phoenix/db/migrations/script.py.mako +26 -0
- phoenix/db/migrations/versions/10460e46d750_datasets.py +317 -0
- phoenix/db/migrations/versions/3be8647b87d8_add_token_columns_to_spans_table.py +126 -0
- phoenix/db/migrations/versions/4ded9e43755f_create_project_sessions_table.py +66 -0
- phoenix/db/migrations/versions/cd164e83824f_users_and_tokens.py +157 -0
- phoenix/db/migrations/versions/cf03bd6bae1d_init.py +280 -0
- phoenix/db/models.py +807 -0
- phoenix/exceptions.py +5 -1
- phoenix/experiments/__init__.py +6 -0
- phoenix/experiments/evaluators/__init__.py +29 -0
- phoenix/experiments/evaluators/base.py +158 -0
- phoenix/experiments/evaluators/code_evaluators.py +184 -0
- phoenix/experiments/evaluators/llm_evaluators.py +473 -0
- phoenix/experiments/evaluators/utils.py +236 -0
- phoenix/experiments/functions.py +772 -0
- phoenix/experiments/tracing.py +86 -0
- phoenix/experiments/types.py +726 -0
- phoenix/experiments/utils.py +25 -0
- phoenix/inferences/__init__.py +0 -0
- phoenix/{datasets → inferences}/errors.py +6 -5
- phoenix/{datasets → inferences}/fixtures.py +49 -42
- phoenix/{datasets/dataset.py → inferences/inferences.py} +121 -105
- phoenix/{datasets → inferences}/schema.py +11 -11
- phoenix/{datasets → inferences}/validation.py +13 -14
- phoenix/logging/__init__.py +3 -0
- phoenix/logging/_config.py +90 -0
- phoenix/logging/_filter.py +6 -0
- phoenix/logging/_formatter.py +69 -0
- phoenix/metrics/__init__.py +5 -4
- phoenix/metrics/binning.py +4 -3
- phoenix/metrics/metrics.py +2 -1
- phoenix/metrics/mixins.py +7 -6
- phoenix/metrics/retrieval_metrics.py +2 -1
- phoenix/metrics/timeseries.py +5 -4
- phoenix/metrics/wrappers.py +9 -3
- phoenix/pointcloud/clustering.py +5 -5
- phoenix/pointcloud/pointcloud.py +7 -5
- phoenix/pointcloud/projectors.py +5 -6
- phoenix/pointcloud/umap_parameters.py +53 -52
- phoenix/server/api/README.md +28 -0
- phoenix/server/api/auth.py +44 -0
- phoenix/server/api/context.py +152 -9
- phoenix/server/api/dataloaders/__init__.py +91 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +139 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +54 -0
- phoenix/server/api/dataloaders/cache/__init__.py +3 -0
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +68 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +131 -0
- phoenix/server/api/dataloaders/dataset_example_spans.py +38 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +144 -0
- phoenix/server/api/dataloaders/document_evaluations.py +31 -0
- phoenix/server/api/dataloaders/document_retrieval_metrics.py +89 -0
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +79 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +58 -0
- phoenix/server/api/dataloaders/experiment_run_annotations.py +36 -0
- phoenix/server/api/dataloaders/experiment_run_counts.py +49 -0
- phoenix/server/api/dataloaders/experiment_sequence_number.py +44 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +188 -0
- phoenix/server/api/dataloaders/min_start_or_max_end_times.py +85 -0
- phoenix/server/api/dataloaders/project_by_name.py +31 -0
- phoenix/server/api/dataloaders/record_counts.py +116 -0
- phoenix/server/api/dataloaders/session_io.py +79 -0
- phoenix/server/api/dataloaders/session_num_traces.py +30 -0
- phoenix/server/api/dataloaders/session_num_traces_with_error.py +32 -0
- phoenix/server/api/dataloaders/session_token_usages.py +41 -0
- phoenix/server/api/dataloaders/session_trace_latency_ms_quantile.py +55 -0
- phoenix/server/api/dataloaders/span_annotations.py +26 -0
- phoenix/server/api/dataloaders/span_dataset_examples.py +31 -0
- phoenix/server/api/dataloaders/span_descendants.py +57 -0
- phoenix/server/api/dataloaders/span_projects.py +33 -0
- phoenix/server/api/dataloaders/token_counts.py +124 -0
- phoenix/server/api/dataloaders/trace_by_trace_ids.py +25 -0
- phoenix/server/api/dataloaders/trace_root_spans.py +32 -0
- phoenix/server/api/dataloaders/user_roles.py +30 -0
- phoenix/server/api/dataloaders/users.py +33 -0
- phoenix/server/api/exceptions.py +48 -0
- phoenix/server/api/helpers/__init__.py +12 -0
- phoenix/server/api/helpers/dataset_helpers.py +217 -0
- phoenix/server/api/helpers/experiment_run_filters.py +763 -0
- phoenix/server/api/helpers/playground_clients.py +948 -0
- phoenix/server/api/helpers/playground_registry.py +70 -0
- phoenix/server/api/helpers/playground_spans.py +455 -0
- phoenix/server/api/input_types/AddExamplesToDatasetInput.py +16 -0
- phoenix/server/api/input_types/AddSpansToDatasetInput.py +14 -0
- phoenix/server/api/input_types/ChatCompletionInput.py +38 -0
- phoenix/server/api/input_types/ChatCompletionMessageInput.py +24 -0
- phoenix/server/api/input_types/ClearProjectInput.py +15 -0
- phoenix/server/api/input_types/ClusterInput.py +2 -2
- phoenix/server/api/input_types/CreateDatasetInput.py +12 -0
- phoenix/server/api/input_types/CreateSpanAnnotationInput.py +18 -0
- phoenix/server/api/input_types/CreateTraceAnnotationInput.py +18 -0
- phoenix/server/api/input_types/DataQualityMetricInput.py +5 -2
- phoenix/server/api/input_types/DatasetExampleInput.py +14 -0
- phoenix/server/api/input_types/DatasetSort.py +17 -0
- phoenix/server/api/input_types/DatasetVersionSort.py +16 -0
- phoenix/server/api/input_types/DeleteAnnotationsInput.py +7 -0
- phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +13 -0
- phoenix/server/api/input_types/DeleteDatasetInput.py +7 -0
- phoenix/server/api/input_types/DeleteExperimentsInput.py +7 -0
- phoenix/server/api/input_types/DimensionFilter.py +4 -4
- phoenix/server/api/input_types/GenerativeModelInput.py +17 -0
- phoenix/server/api/input_types/Granularity.py +1 -1
- phoenix/server/api/input_types/InvocationParameters.py +162 -0
- phoenix/server/api/input_types/PatchAnnotationInput.py +19 -0
- phoenix/server/api/input_types/PatchDatasetExamplesInput.py +35 -0
- phoenix/server/api/input_types/PatchDatasetInput.py +14 -0
- phoenix/server/api/input_types/PerformanceMetricInput.py +5 -2
- phoenix/server/api/input_types/ProjectSessionSort.py +29 -0
- phoenix/server/api/input_types/SpanAnnotationSort.py +17 -0
- phoenix/server/api/input_types/SpanSort.py +134 -69
- phoenix/server/api/input_types/TemplateOptions.py +10 -0
- phoenix/server/api/input_types/TraceAnnotationSort.py +17 -0
- phoenix/server/api/input_types/UserRoleInput.py +9 -0
- phoenix/server/api/mutations/__init__.py +28 -0
- phoenix/server/api/mutations/api_key_mutations.py +167 -0
- phoenix/server/api/mutations/chat_mutations.py +593 -0
- phoenix/server/api/mutations/dataset_mutations.py +591 -0
- phoenix/server/api/mutations/experiment_mutations.py +75 -0
- phoenix/server/api/{types/ExportEventsMutation.py → mutations/export_events_mutations.py} +21 -18
- phoenix/server/api/mutations/project_mutations.py +57 -0
- phoenix/server/api/mutations/span_annotations_mutations.py +128 -0
- phoenix/server/api/mutations/trace_annotations_mutations.py +127 -0
- phoenix/server/api/mutations/user_mutations.py +329 -0
- phoenix/server/api/openapi/__init__.py +0 -0
- phoenix/server/api/openapi/main.py +17 -0
- phoenix/server/api/openapi/schema.py +16 -0
- phoenix/server/api/queries.py +738 -0
- phoenix/server/api/routers/__init__.py +11 -0
- phoenix/server/api/routers/auth.py +284 -0
- phoenix/server/api/routers/embeddings.py +26 -0
- phoenix/server/api/routers/oauth2.py +488 -0
- phoenix/server/api/routers/v1/__init__.py +64 -0
- phoenix/server/api/routers/v1/datasets.py +1017 -0
- phoenix/server/api/routers/v1/evaluations.py +362 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +115 -0
- phoenix/server/api/routers/v1/experiment_runs.py +167 -0
- phoenix/server/api/routers/v1/experiments.py +308 -0
- phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
- phoenix/server/api/routers/v1/spans.py +267 -0
- phoenix/server/api/routers/v1/traces.py +208 -0
- phoenix/server/api/routers/v1/utils.py +95 -0
- phoenix/server/api/schema.py +44 -241
- phoenix/server/api/subscriptions.py +597 -0
- phoenix/server/api/types/Annotation.py +21 -0
- phoenix/server/api/types/AnnotationSummary.py +55 -0
- phoenix/server/api/types/AnnotatorKind.py +16 -0
- phoenix/server/api/types/ApiKey.py +27 -0
- phoenix/server/api/types/AuthMethod.py +9 -0
- phoenix/server/api/types/ChatCompletionMessageRole.py +11 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +46 -0
- phoenix/server/api/types/Cluster.py +25 -24
- phoenix/server/api/types/CreateDatasetPayload.py +8 -0
- phoenix/server/api/types/DataQualityMetric.py +31 -13
- phoenix/server/api/types/Dataset.py +288 -63
- phoenix/server/api/types/DatasetExample.py +85 -0
- phoenix/server/api/types/DatasetExampleRevision.py +34 -0
- phoenix/server/api/types/DatasetVersion.py +14 -0
- phoenix/server/api/types/Dimension.py +32 -31
- phoenix/server/api/types/DocumentEvaluationSummary.py +9 -8
- phoenix/server/api/types/EmbeddingDimension.py +56 -49
- phoenix/server/api/types/Evaluation.py +25 -31
- phoenix/server/api/types/EvaluationSummary.py +30 -50
- phoenix/server/api/types/Event.py +20 -20
- phoenix/server/api/types/ExampleRevisionInterface.py +14 -0
- phoenix/server/api/types/Experiment.py +152 -0
- phoenix/server/api/types/ExperimentAnnotationSummary.py +13 -0
- phoenix/server/api/types/ExperimentComparison.py +17 -0
- phoenix/server/api/types/ExperimentRun.py +119 -0
- phoenix/server/api/types/ExperimentRunAnnotation.py +56 -0
- phoenix/server/api/types/GenerativeModel.py +9 -0
- phoenix/server/api/types/GenerativeProvider.py +85 -0
- phoenix/server/api/types/Inferences.py +80 -0
- phoenix/server/api/types/InferencesRole.py +23 -0
- phoenix/server/api/types/LabelFraction.py +7 -0
- phoenix/server/api/types/MimeType.py +2 -2
- phoenix/server/api/types/Model.py +54 -54
- phoenix/server/api/types/PerformanceMetric.py +8 -5
- phoenix/server/api/types/Project.py +407 -142
- phoenix/server/api/types/ProjectSession.py +139 -0
- phoenix/server/api/types/Segments.py +4 -4
- phoenix/server/api/types/Span.py +221 -176
- phoenix/server/api/types/SpanAnnotation.py +43 -0
- phoenix/server/api/types/SpanIOValue.py +15 -0
- phoenix/server/api/types/SystemApiKey.py +9 -0
- phoenix/server/api/types/TemplateLanguage.py +10 -0
- phoenix/server/api/types/TimeSeries.py +19 -15
- phoenix/server/api/types/TokenUsage.py +11 -0
- phoenix/server/api/types/Trace.py +154 -0
- phoenix/server/api/types/TraceAnnotation.py +45 -0
- phoenix/server/api/types/UMAPPoints.py +7 -7
- phoenix/server/api/types/User.py +60 -0
- phoenix/server/api/types/UserApiKey.py +45 -0
- phoenix/server/api/types/UserRole.py +15 -0
- phoenix/server/api/types/node.py +4 -112
- phoenix/server/api/types/pagination.py +156 -57
- phoenix/server/api/utils.py +34 -0
- phoenix/server/app.py +864 -115
- phoenix/server/bearer_auth.py +163 -0
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +256 -0
- phoenix/server/email/__init__.py +0 -0
- phoenix/server/email/sender.py +97 -0
- phoenix/server/email/templates/__init__.py +0 -0
- phoenix/server/email/templates/password_reset.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/grpc_server.py +102 -0
- phoenix/server/jwt_store.py +505 -0
- phoenix/server/main.py +305 -116
- phoenix/server/oauth2.py +52 -0
- phoenix/server/openapi/__init__.py +0 -0
- phoenix/server/prometheus.py +111 -0
- phoenix/server/rate_limiters.py +188 -0
- phoenix/server/static/.vite/manifest.json +87 -0
- phoenix/server/static/assets/components-Cy9nwIvF.js +2125 -0
- phoenix/server/static/assets/index-BKvHIxkk.js +113 -0
- phoenix/server/static/assets/pages-CUi2xCVQ.js +4449 -0
- phoenix/server/static/assets/vendor-DvC8cT4X.js +894 -0
- phoenix/server/static/assets/vendor-DxkFTwjz.css +1 -0
- phoenix/server/static/assets/vendor-arizeai-Do1793cv.js +662 -0
- phoenix/server/static/assets/vendor-codemirror-BzwZPyJM.js +24 -0
- phoenix/server/static/assets/vendor-recharts-_Jb7JjhG.js +59 -0
- phoenix/server/static/assets/vendor-shiki-Cl9QBraO.js +5 -0
- phoenix/server/static/assets/vendor-three-DwGkEfCM.js +2998 -0
- phoenix/server/telemetry.py +68 -0
- phoenix/server/templates/index.html +82 -23
- phoenix/server/thread_server.py +3 -3
- phoenix/server/types.py +275 -0
- phoenix/services.py +27 -18
- phoenix/session/client.py +743 -68
- phoenix/session/data_extractor.py +31 -7
- phoenix/session/evaluation.py +3 -9
- phoenix/session/session.py +263 -219
- phoenix/settings.py +22 -0
- phoenix/trace/__init__.py +2 -22
- phoenix/trace/attributes.py +338 -0
- phoenix/trace/dsl/README.md +116 -0
- phoenix/trace/dsl/filter.py +663 -213
- phoenix/trace/dsl/helpers.py +73 -21
- phoenix/trace/dsl/query.py +574 -201
- phoenix/trace/exporter.py +24 -19
- phoenix/trace/fixtures.py +368 -32
- phoenix/trace/otel.py +71 -219
- phoenix/trace/projects.py +3 -2
- phoenix/trace/schemas.py +33 -11
- phoenix/trace/span_evaluations.py +21 -16
- phoenix/trace/span_json_decoder.py +6 -4
- phoenix/trace/span_json_encoder.py +2 -2
- phoenix/trace/trace_dataset.py +47 -32
- phoenix/trace/utils.py +21 -4
- phoenix/utilities/__init__.py +0 -26
- phoenix/utilities/client.py +132 -0
- phoenix/utilities/deprecation.py +31 -0
- phoenix/utilities/error_handling.py +3 -2
- phoenix/utilities/json.py +109 -0
- phoenix/utilities/logging.py +8 -0
- phoenix/utilities/project.py +2 -2
- phoenix/utilities/re.py +49 -0
- phoenix/utilities/span_store.py +0 -23
- phoenix/utilities/template_formatters.py +99 -0
- phoenix/version.py +1 -1
- arize_phoenix-3.16.1.dist-info/METADATA +0 -495
- arize_phoenix-3.16.1.dist-info/RECORD +0 -178
- phoenix/core/project.py +0 -619
- phoenix/core/traces.py +0 -96
- phoenix/experimental/evals/__init__.py +0 -73
- phoenix/experimental/evals/evaluators.py +0 -413
- phoenix/experimental/evals/functions/__init__.py +0 -4
- phoenix/experimental/evals/functions/classify.py +0 -453
- phoenix/experimental/evals/functions/executor.py +0 -353
- phoenix/experimental/evals/functions/generate.py +0 -138
- phoenix/experimental/evals/functions/processing.py +0 -76
- phoenix/experimental/evals/models/__init__.py +0 -14
- phoenix/experimental/evals/models/anthropic.py +0 -175
- phoenix/experimental/evals/models/base.py +0 -170
- phoenix/experimental/evals/models/bedrock.py +0 -221
- phoenix/experimental/evals/models/litellm.py +0 -134
- phoenix/experimental/evals/models/openai.py +0 -448
- phoenix/experimental/evals/models/rate_limiters.py +0 -246
- phoenix/experimental/evals/models/vertex.py +0 -173
- phoenix/experimental/evals/models/vertexai.py +0 -186
- phoenix/experimental/evals/retrievals.py +0 -96
- phoenix/experimental/evals/templates/__init__.py +0 -50
- phoenix/experimental/evals/templates/default_templates.py +0 -472
- phoenix/experimental/evals/templates/template.py +0 -195
- phoenix/experimental/evals/utils/__init__.py +0 -172
- phoenix/experimental/evals/utils/threads.py +0 -27
- phoenix/server/api/helpers.py +0 -11
- phoenix/server/api/routers/evaluation_handler.py +0 -109
- phoenix/server/api/routers/span_handler.py +0 -70
- phoenix/server/api/routers/trace_handler.py +0 -60
- phoenix/server/api/types/DatasetRole.py +0 -23
- phoenix/server/static/index.css +0 -6
- phoenix/server/static/index.js +0 -7447
- phoenix/storage/span_store/__init__.py +0 -23
- phoenix/storage/span_store/text_file.py +0 -85
- phoenix/trace/dsl/missing.py +0 -60
- phoenix/trace/langchain/__init__.py +0 -3
- phoenix/trace/langchain/instrumentor.py +0 -35
- phoenix/trace/llama_index/__init__.py +0 -3
- phoenix/trace/llama_index/callback.py +0 -102
- phoenix/trace/openai/__init__.py +0 -3
- phoenix/trace/openai/instrumentor.py +0 -30
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.1.dist-info}/licenses/LICENSE +0 -0
- /phoenix/{datasets → db/insertion}/__init__.py +0 -0
- /phoenix/{experimental → db/migrations}/__init__.py +0 -0
- /phoenix/{storage → db/migrations/data_migration_scripts}/__init__.py +0 -0
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import gzip
|
|
2
|
+
import zlib
|
|
3
|
+
from typing import Any, Literal, Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, BackgroundTasks, Header, HTTPException, Query
|
|
6
|
+
from google.protobuf.json_format import MessageToJson
|
|
7
|
+
from google.protobuf.message import DecodeError
|
|
8
|
+
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import (
|
|
9
|
+
ExportTraceServiceRequest,
|
|
10
|
+
ExportTraceServiceResponse,
|
|
11
|
+
)
|
|
12
|
+
from pydantic import Field
|
|
13
|
+
from sqlalchemy import select
|
|
14
|
+
from starlette.concurrency import run_in_threadpool
|
|
15
|
+
from starlette.datastructures import State
|
|
16
|
+
from starlette.requests import Request
|
|
17
|
+
from starlette.responses import JSONResponse
|
|
18
|
+
from starlette.status import (
|
|
19
|
+
HTTP_404_NOT_FOUND,
|
|
20
|
+
HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
|
21
|
+
HTTP_422_UNPROCESSABLE_ENTITY,
|
|
22
|
+
)
|
|
23
|
+
from strawberry.relay import GlobalID
|
|
24
|
+
|
|
25
|
+
from phoenix.db import models
|
|
26
|
+
from phoenix.db.helpers import SupportedSQLDialect
|
|
27
|
+
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
28
|
+
from phoenix.db.insertion.types import Precursors
|
|
29
|
+
from phoenix.server.dml_event import TraceAnnotationInsertEvent
|
|
30
|
+
from phoenix.trace.otel import decode_otlp_span
|
|
31
|
+
from phoenix.utilities.project import get_project_name
|
|
32
|
+
|
|
33
|
+
from .pydantic_compat import V1RoutesBaseModel
|
|
34
|
+
from .utils import RequestBody, ResponseBody, add_errors_to_responses
|
|
35
|
+
|
|
36
|
+
router = APIRouter(tags=["traces"])
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@router.post(
|
|
40
|
+
"/traces",
|
|
41
|
+
operation_id="addTraces",
|
|
42
|
+
summary="Send traces",
|
|
43
|
+
responses=add_errors_to_responses(
|
|
44
|
+
[
|
|
45
|
+
{
|
|
46
|
+
"status_code": HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
|
47
|
+
"description": (
|
|
48
|
+
"Unsupported content type (only `application/x-protobuf` is supported)"
|
|
49
|
+
),
|
|
50
|
+
},
|
|
51
|
+
{"status_code": HTTP_422_UNPROCESSABLE_ENTITY, "description": "Invalid request body"},
|
|
52
|
+
]
|
|
53
|
+
),
|
|
54
|
+
openapi_extra={
|
|
55
|
+
"requestBody": {
|
|
56
|
+
"required": True,
|
|
57
|
+
"content": {
|
|
58
|
+
"application/x-protobuf": {"schema": {"type": "string", "format": "binary"}}
|
|
59
|
+
},
|
|
60
|
+
}
|
|
61
|
+
},
|
|
62
|
+
include_in_schema=False,
|
|
63
|
+
)
|
|
64
|
+
async def post_traces(
|
|
65
|
+
request: Request,
|
|
66
|
+
background_tasks: BackgroundTasks,
|
|
67
|
+
content_type: Optional[str] = Header(default=None),
|
|
68
|
+
content_encoding: Optional[str] = Header(default=None),
|
|
69
|
+
) -> JSONResponse:
|
|
70
|
+
if content_type != "application/x-protobuf":
|
|
71
|
+
raise HTTPException(
|
|
72
|
+
detail=f"Unsupported content type: {content_type}",
|
|
73
|
+
status_code=HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
|
74
|
+
)
|
|
75
|
+
if content_encoding and content_encoding not in ("gzip", "deflate"):
|
|
76
|
+
raise HTTPException(
|
|
77
|
+
detail=f"Unsupported content encoding: {content_encoding}",
|
|
78
|
+
status_code=HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
|
79
|
+
)
|
|
80
|
+
body = await request.body()
|
|
81
|
+
if content_encoding == "gzip":
|
|
82
|
+
body = await run_in_threadpool(gzip.decompress, body)
|
|
83
|
+
elif content_encoding == "deflate":
|
|
84
|
+
body = await run_in_threadpool(zlib.decompress, body)
|
|
85
|
+
req = ExportTraceServiceRequest()
|
|
86
|
+
try:
|
|
87
|
+
await run_in_threadpool(req.ParseFromString, body)
|
|
88
|
+
except DecodeError:
|
|
89
|
+
raise HTTPException(
|
|
90
|
+
detail="Request body is invalid ExportTraceServiceRequest",
|
|
91
|
+
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
|
92
|
+
)
|
|
93
|
+
background_tasks.add_task(_add_spans, req, request.state)
|
|
94
|
+
return JSONResponse(MessageToJson(ExportTraceServiceResponse()))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class TraceAnnotationResult(V1RoutesBaseModel):
|
|
98
|
+
label: Optional[str] = Field(default=None, description="The label assigned by the annotation")
|
|
99
|
+
score: Optional[float] = Field(default=None, description="The score assigned by the annotation")
|
|
100
|
+
explanation: Optional[str] = Field(
|
|
101
|
+
default=None, description="Explanation of the annotation result"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class TraceAnnotation(V1RoutesBaseModel):
|
|
106
|
+
trace_id: str = Field(description="OpenTelemetry Trace ID (hex format w/o 0x prefix)")
|
|
107
|
+
name: str = Field(description="The name of the annotation")
|
|
108
|
+
annotator_kind: Literal["LLM", "HUMAN"] = Field(
|
|
109
|
+
description="The kind of annotator used for the annotation"
|
|
110
|
+
)
|
|
111
|
+
result: Optional[TraceAnnotationResult] = Field(
|
|
112
|
+
default=None, description="The result of the annotation"
|
|
113
|
+
)
|
|
114
|
+
metadata: Optional[dict[str, Any]] = Field(
|
|
115
|
+
default=None, description="Metadata for the annotation"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def as_precursor(self) -> Precursors.TraceAnnotation:
|
|
119
|
+
return Precursors.TraceAnnotation(
|
|
120
|
+
self.trace_id,
|
|
121
|
+
models.TraceAnnotation(
|
|
122
|
+
name=self.name,
|
|
123
|
+
annotator_kind=self.annotator_kind,
|
|
124
|
+
score=self.result.score if self.result else None,
|
|
125
|
+
label=self.result.label if self.result else None,
|
|
126
|
+
explanation=self.result.explanation if self.result else None,
|
|
127
|
+
metadata_=self.metadata or {},
|
|
128
|
+
),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class AnnotateTracesRequestBody(RequestBody[list[TraceAnnotation]]):
|
|
133
|
+
data: list[TraceAnnotation] = Field(description="The trace annotations to be upserted")
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class InsertedTraceAnnotation(V1RoutesBaseModel):
|
|
137
|
+
id: str = Field(description="The ID of the inserted trace annotation")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class AnnotateTracesResponseBody(ResponseBody[list[InsertedTraceAnnotation]]):
|
|
141
|
+
pass
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@router.post(
|
|
145
|
+
"/trace_annotations",
|
|
146
|
+
operation_id="annotateTraces",
|
|
147
|
+
summary="Create or update trace annotations",
|
|
148
|
+
responses=add_errors_to_responses(
|
|
149
|
+
[{"status_code": HTTP_404_NOT_FOUND, "description": "Trace not found"}]
|
|
150
|
+
),
|
|
151
|
+
include_in_schema=False,
|
|
152
|
+
)
|
|
153
|
+
async def annotate_traces(
|
|
154
|
+
request: Request,
|
|
155
|
+
request_body: AnnotateTracesRequestBody,
|
|
156
|
+
sync: bool = Query(default=True, description="If true, fulfill request synchronously."),
|
|
157
|
+
) -> AnnotateTracesResponseBody:
|
|
158
|
+
if not request_body.data:
|
|
159
|
+
return AnnotateTracesResponseBody(data=[])
|
|
160
|
+
precursors = [d.as_precursor() for d in request_body.data]
|
|
161
|
+
if not sync:
|
|
162
|
+
await request.state.enqueue(*precursors)
|
|
163
|
+
return AnnotateTracesResponseBody(data=[])
|
|
164
|
+
|
|
165
|
+
trace_ids = {p.trace_id for p in precursors}
|
|
166
|
+
async with request.app.state.db() as session:
|
|
167
|
+
existing_traces = {
|
|
168
|
+
trace.trace_id: trace.id
|
|
169
|
+
async for trace in await session.stream_scalars(
|
|
170
|
+
select(models.Trace).filter(models.Trace.trace_id.in_(trace_ids))
|
|
171
|
+
)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
missing_trace_ids = trace_ids - set(existing_traces.keys())
|
|
175
|
+
if missing_trace_ids:
|
|
176
|
+
raise HTTPException(
|
|
177
|
+
detail=f"Traces with IDs {', '.join(missing_trace_ids)} do not exist.",
|
|
178
|
+
status_code=HTTP_404_NOT_FOUND,
|
|
179
|
+
)
|
|
180
|
+
inserted_ids = []
|
|
181
|
+
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
182
|
+
for p in precursors:
|
|
183
|
+
values = dict(as_kv(p.as_insertable(existing_traces[p.trace_id]).row))
|
|
184
|
+
trace_annotation_id = await session.scalar(
|
|
185
|
+
insert_on_conflict(
|
|
186
|
+
values,
|
|
187
|
+
dialect=dialect,
|
|
188
|
+
table=models.TraceAnnotation,
|
|
189
|
+
unique_by=("name", "trace_rowid"),
|
|
190
|
+
).returning(models.TraceAnnotation.id)
|
|
191
|
+
)
|
|
192
|
+
inserted_ids.append(trace_annotation_id)
|
|
193
|
+
request.state.event_queue.put(TraceAnnotationInsertEvent(tuple(inserted_ids)))
|
|
194
|
+
return AnnotateTracesResponseBody(
|
|
195
|
+
data=[
|
|
196
|
+
InsertedTraceAnnotation(id=str(GlobalID("TraceAnnotation", str(id_))))
|
|
197
|
+
for id_ in inserted_ids
|
|
198
|
+
]
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
async def _add_spans(req: ExportTraceServiceRequest, state: State) -> None:
|
|
203
|
+
for resource_spans in req.resource_spans:
|
|
204
|
+
project_name = get_project_name(resource_spans.resource.attributes)
|
|
205
|
+
for scope_span in resource_spans.scope_spans:
|
|
206
|
+
for otlp_span in scope_span.spans:
|
|
207
|
+
span = await run_in_threadpool(decode_otlp_span, otlp_span)
|
|
208
|
+
await state.queue_span_for_bulk_insert(span, project_name)
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from typing import Any, Generic, Optional, TypedDict, TypeVar, Union
|
|
2
|
+
|
|
3
|
+
from typing_extensions import TypeAlias, assert_never
|
|
4
|
+
|
|
5
|
+
from .pydantic_compat import V1RoutesBaseModel
|
|
6
|
+
|
|
7
|
+
StatusCode: TypeAlias = int
|
|
8
|
+
DataType = TypeVar("DataType")
|
|
9
|
+
Responses: TypeAlias = dict[
|
|
10
|
+
Union[int, str], dict[str, Any]
|
|
11
|
+
] # input type for the `responses` parameter of a fastapi route
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class StatusCodeWithDescription(TypedDict):
|
|
15
|
+
"""
|
|
16
|
+
A duck type for a status code with a description detailing under what
|
|
17
|
+
conditions the status code is raised.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
status_code: StatusCode
|
|
21
|
+
description: str
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class RequestBody(V1RoutesBaseModel, Generic[DataType]):
|
|
25
|
+
# A generic request type accepted by V1 routes.
|
|
26
|
+
#
|
|
27
|
+
# Don't use """ for this docstring or it will be included as a description
|
|
28
|
+
# in the generated OpenAPI schema.
|
|
29
|
+
data: DataType
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ResponseBody(V1RoutesBaseModel, Generic[DataType]):
|
|
33
|
+
# A generic response type returned by V1 routes.
|
|
34
|
+
#
|
|
35
|
+
# Don't use """ for this docstring or it will be included as a description
|
|
36
|
+
# in the generated OpenAPI schema.
|
|
37
|
+
|
|
38
|
+
data: DataType
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class PaginatedResponseBody(V1RoutesBaseModel, Generic[DataType]):
|
|
42
|
+
# A generic paginated response type returned by V1 routes.
|
|
43
|
+
#
|
|
44
|
+
# Don't use """ for this docstring or it will be included as a description
|
|
45
|
+
# in the generated OpenAPI schema.
|
|
46
|
+
|
|
47
|
+
data: list[DataType]
|
|
48
|
+
next_cursor: Optional[str]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def add_errors_to_responses(
|
|
52
|
+
errors: list[Union[StatusCode, StatusCodeWithDescription]],
|
|
53
|
+
/,
|
|
54
|
+
*,
|
|
55
|
+
responses: Optional[Responses] = None,
|
|
56
|
+
) -> Responses:
|
|
57
|
+
"""
|
|
58
|
+
Creates or updates a patch for an OpenAPI schema's `responses` section to
|
|
59
|
+
include status codes in the generated OpenAPI schema.
|
|
60
|
+
"""
|
|
61
|
+
output_responses: Responses = responses or {}
|
|
62
|
+
for error in errors:
|
|
63
|
+
status_code: int
|
|
64
|
+
description: Optional[str] = None
|
|
65
|
+
if isinstance(error, StatusCode):
|
|
66
|
+
status_code = error
|
|
67
|
+
elif isinstance(error, dict):
|
|
68
|
+
status_code = error["status_code"]
|
|
69
|
+
description = error["description"]
|
|
70
|
+
else:
|
|
71
|
+
assert_never(error)
|
|
72
|
+
if status_code not in output_responses:
|
|
73
|
+
output_responses[status_code] = {
|
|
74
|
+
"content": {"text/plain": {"schema": {"type": "string"}}}
|
|
75
|
+
}
|
|
76
|
+
if description:
|
|
77
|
+
output_responses[status_code]["description"] = description
|
|
78
|
+
return output_responses
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def add_text_csv_content_to_responses(
|
|
82
|
+
status_code: StatusCode, /, *, responses: Optional[Responses] = None
|
|
83
|
+
) -> Responses:
|
|
84
|
+
"""
|
|
85
|
+
Creates or updates a patch for an OpenAPI schema's `responses` section to
|
|
86
|
+
ensure that the response for the given status code is marked as text/csv in
|
|
87
|
+
the generated OpenAPI schema.
|
|
88
|
+
"""
|
|
89
|
+
output_responses: Responses = responses or {}
|
|
90
|
+
if status_code not in output_responses:
|
|
91
|
+
output_responses[status_code] = {}
|
|
92
|
+
output_responses[status_code]["content"] = {
|
|
93
|
+
"text/csv": {"schema": {"type": "string", "contentMediaType": "text/csv"}}
|
|
94
|
+
}
|
|
95
|
+
return output_responses
|
phoenix/server/api/schema.py
CHANGED
|
@@ -1,246 +1,49 @@
|
|
|
1
|
-
from
|
|
2
|
-
from typing import
|
|
1
|
+
from itertools import chain
|
|
2
|
+
from typing import Any, Iterable, Iterator, Optional, Union
|
|
3
3
|
|
|
4
|
-
import numpy as np
|
|
5
|
-
import numpy.typing as npt
|
|
6
4
|
import strawberry
|
|
7
|
-
from strawberry import
|
|
8
|
-
from strawberry.types import
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from phoenix.
|
|
12
|
-
from phoenix.server.api.
|
|
13
|
-
from phoenix.server.api.
|
|
14
|
-
from phoenix.server.api.
|
|
15
|
-
|
|
16
|
-
InputCoordinate3D,
|
|
17
|
-
)
|
|
18
|
-
from phoenix.server.api.types.Cluster import Cluster, to_gql_clusters
|
|
19
|
-
from phoenix.server.api.types.Project import Project
|
|
20
|
-
|
|
21
|
-
from .context import Context
|
|
22
|
-
from .types.DatasetRole import AncillaryDatasetRole, DatasetRole
|
|
23
|
-
from .types.Dimension import to_gql_dimension
|
|
24
|
-
from .types.EmbeddingDimension import (
|
|
25
|
-
DEFAULT_CLUSTER_SELECTION_EPSILON,
|
|
26
|
-
DEFAULT_MIN_CLUSTER_SIZE,
|
|
27
|
-
DEFAULT_MIN_SAMPLES,
|
|
28
|
-
to_gql_embedding_dimension,
|
|
5
|
+
from strawberry.extensions import SchemaExtension
|
|
6
|
+
from strawberry.types.base import StrawberryObjectDefinition, StrawberryType
|
|
7
|
+
|
|
8
|
+
from phoenix.server.api.exceptions import get_mask_errors_extension
|
|
9
|
+
from phoenix.server.api.mutations import Mutation
|
|
10
|
+
from phoenix.server.api.queries import Query
|
|
11
|
+
from phoenix.server.api.subscriptions import Subscription
|
|
12
|
+
from phoenix.server.api.types.ChatCompletionSubscriptionPayload import (
|
|
13
|
+
ChatCompletionSubscriptionPayload,
|
|
29
14
|
)
|
|
30
|
-
from .types.Event import create_event_id, unpack_event_id
|
|
31
|
-
from .types.ExportEventsMutation import ExportEventsMutation
|
|
32
|
-
from .types.Functionality import Functionality
|
|
33
|
-
from .types.Model import Model
|
|
34
|
-
from .types.node import GlobalID, Node, from_global_id, from_global_id_with_expected_type
|
|
35
|
-
from .types.pagination import Connection, ConnectionArgs, Cursor, connection_from_list
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
@strawberry.type
|
|
39
|
-
class Query:
|
|
40
|
-
@strawberry.field
|
|
41
|
-
def projects(
|
|
42
|
-
self,
|
|
43
|
-
info: Info[Context, None],
|
|
44
|
-
first: Optional[int] = 50,
|
|
45
|
-
last: Optional[int] = UNSET,
|
|
46
|
-
after: Optional[Cursor] = UNSET,
|
|
47
|
-
before: Optional[Cursor] = UNSET,
|
|
48
|
-
) -> Connection[Project]:
|
|
49
|
-
args = ConnectionArgs(
|
|
50
|
-
first=first,
|
|
51
|
-
after=after if isinstance(after, Cursor) else None,
|
|
52
|
-
last=last,
|
|
53
|
-
before=before if isinstance(before, Cursor) else None,
|
|
54
|
-
)
|
|
55
|
-
data = (
|
|
56
|
-
[]
|
|
57
|
-
if (traces := info.context.traces) is None
|
|
58
|
-
else [
|
|
59
|
-
Project(id_attr=project_id, name=project_name, project=project)
|
|
60
|
-
for project_id, project_name, project in traces.get_projects()
|
|
61
|
-
]
|
|
62
|
-
)
|
|
63
|
-
return connection_from_list(data=data, args=args)
|
|
64
|
-
|
|
65
|
-
@strawberry.field
|
|
66
|
-
def functionality(self, info: Info[Context, None]) -> "Functionality":
|
|
67
|
-
has_model_inferences = not info.context.model.is_empty
|
|
68
|
-
has_traces = info.context.traces is not None
|
|
69
|
-
return Functionality(
|
|
70
|
-
model_inferences=has_model_inferences,
|
|
71
|
-
tracing=has_traces,
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
@strawberry.field
|
|
75
|
-
def model(self) -> Model:
|
|
76
|
-
return Model()
|
|
77
|
-
|
|
78
|
-
@strawberry.field
|
|
79
|
-
def node(self, id: GlobalID, info: Info[Context, None]) -> Node:
|
|
80
|
-
type_name, node_id = from_global_id(str(id))
|
|
81
|
-
if type_name == "Dimension":
|
|
82
|
-
dimension = info.context.model.scalar_dimensions[node_id]
|
|
83
|
-
return to_gql_dimension(node_id, dimension)
|
|
84
|
-
elif type_name == "EmbeddingDimension":
|
|
85
|
-
embedding_dimension = info.context.model.embedding_dimensions[node_id]
|
|
86
|
-
return to_gql_embedding_dimension(node_id, embedding_dimension)
|
|
87
|
-
elif type_name == "Project":
|
|
88
|
-
if (traces := info.context.traces) is not None:
|
|
89
|
-
projects = {
|
|
90
|
-
project_id: (project_name, project)
|
|
91
|
-
for project_id, project_name, project in traces.get_projects()
|
|
92
|
-
}
|
|
93
|
-
if node_id in projects:
|
|
94
|
-
name, project = projects[node_id]
|
|
95
|
-
return Project(id_attr=node_id, name=name, project=project)
|
|
96
|
-
raise Exception(f"Unknown project: {id}")
|
|
97
|
-
|
|
98
|
-
raise Exception(f"Unknown node type: {type}")
|
|
99
|
-
|
|
100
|
-
@strawberry.field
|
|
101
|
-
def clusters(
|
|
102
|
-
self,
|
|
103
|
-
clusters: List[ClusterInput],
|
|
104
|
-
) -> List[Cluster]:
|
|
105
|
-
clustered_events: Dict[str, Set[ID]] = defaultdict(set)
|
|
106
|
-
for i, cluster in enumerate(clusters):
|
|
107
|
-
clustered_events[cluster.id or str(i)].update(cluster.event_ids)
|
|
108
|
-
return to_gql_clusters(
|
|
109
|
-
clustered_events=clustered_events,
|
|
110
|
-
)
|
|
111
|
-
|
|
112
|
-
@strawberry.field
|
|
113
|
-
def hdbscan_clustering(
|
|
114
|
-
self,
|
|
115
|
-
info: Info[Context, None],
|
|
116
|
-
event_ids: Annotated[
|
|
117
|
-
List[ID],
|
|
118
|
-
strawberry.argument(
|
|
119
|
-
description="Event ID of the coordinates",
|
|
120
|
-
),
|
|
121
|
-
],
|
|
122
|
-
coordinates_2d: Annotated[
|
|
123
|
-
Optional[List[InputCoordinate2D]],
|
|
124
|
-
strawberry.argument(
|
|
125
|
-
description="Point coordinates. Must be either 2D or 3D.",
|
|
126
|
-
),
|
|
127
|
-
] = UNSET,
|
|
128
|
-
coordinates_3d: Annotated[
|
|
129
|
-
Optional[List[InputCoordinate3D]],
|
|
130
|
-
strawberry.argument(
|
|
131
|
-
description="Point coordinates. Must be either 2D or 3D.",
|
|
132
|
-
),
|
|
133
|
-
] = UNSET,
|
|
134
|
-
min_cluster_size: Annotated[
|
|
135
|
-
int,
|
|
136
|
-
strawberry.argument(
|
|
137
|
-
description="HDBSCAN minimum cluster size",
|
|
138
|
-
),
|
|
139
|
-
] = DEFAULT_MIN_CLUSTER_SIZE,
|
|
140
|
-
cluster_min_samples: Annotated[
|
|
141
|
-
int,
|
|
142
|
-
strawberry.argument(
|
|
143
|
-
description="HDBSCAN minimum samples",
|
|
144
|
-
),
|
|
145
|
-
] = DEFAULT_MIN_SAMPLES,
|
|
146
|
-
cluster_selection_epsilon: Annotated[
|
|
147
|
-
float,
|
|
148
|
-
strawberry.argument(
|
|
149
|
-
description="HDBSCAN cluster selection epsilon",
|
|
150
|
-
),
|
|
151
|
-
] = DEFAULT_CLUSTER_SELECTION_EPSILON,
|
|
152
|
-
) -> List[Cluster]:
|
|
153
|
-
coordinates_3d = ensure_list(coordinates_3d)
|
|
154
|
-
coordinates_2d = ensure_list(coordinates_2d)
|
|
155
|
-
|
|
156
|
-
if len(coordinates_3d) > 0 and len(coordinates_2d) > 0:
|
|
157
|
-
raise ValueError("must specify only one of 2D or 3D coordinates")
|
|
158
|
-
|
|
159
|
-
if len(coordinates_3d) > 0:
|
|
160
|
-
coordinates = list(
|
|
161
|
-
map(
|
|
162
|
-
lambda coord: np.array(
|
|
163
|
-
[coord.x, coord.y, coord.z],
|
|
164
|
-
),
|
|
165
|
-
coordinates_3d,
|
|
166
|
-
)
|
|
167
|
-
)
|
|
168
|
-
else:
|
|
169
|
-
coordinates = list(
|
|
170
|
-
map(
|
|
171
|
-
lambda coord: np.array(
|
|
172
|
-
[coord.x, coord.y],
|
|
173
|
-
),
|
|
174
|
-
coordinates_2d,
|
|
175
|
-
)
|
|
176
|
-
)
|
|
177
|
-
|
|
178
|
-
if len(event_ids) != len(coordinates):
|
|
179
|
-
raise ValueError(
|
|
180
|
-
f"length mismatch between "
|
|
181
|
-
f"event_ids ({len(event_ids)}) "
|
|
182
|
-
f"and coordinates ({len(coordinates)})"
|
|
183
|
-
)
|
|
184
|
-
|
|
185
|
-
if len(event_ids) == 0:
|
|
186
|
-
return []
|
|
187
|
-
|
|
188
|
-
grouped_event_ids: Dict[
|
|
189
|
-
Union[DatasetRole, AncillaryDatasetRole],
|
|
190
|
-
List[ID],
|
|
191
|
-
] = defaultdict(list)
|
|
192
|
-
grouped_coordinates: Dict[
|
|
193
|
-
Union[DatasetRole, AncillaryDatasetRole],
|
|
194
|
-
List[npt.NDArray[np.float64]],
|
|
195
|
-
] = defaultdict(list)
|
|
196
|
-
|
|
197
|
-
for event_id, coordinate in zip(event_ids, coordinates):
|
|
198
|
-
row_id, dataset_role = unpack_event_id(event_id)
|
|
199
|
-
grouped_coordinates[dataset_role].append(coordinate)
|
|
200
|
-
grouped_event_ids[dataset_role].append(create_event_id(row_id, dataset_role))
|
|
201
|
-
|
|
202
|
-
stacked_event_ids = (
|
|
203
|
-
grouped_event_ids[DatasetRole.primary]
|
|
204
|
-
+ grouped_event_ids[DatasetRole.reference]
|
|
205
|
-
+ grouped_event_ids[AncillaryDatasetRole.corpus]
|
|
206
|
-
)
|
|
207
|
-
stacked_coordinates = np.stack(
|
|
208
|
-
grouped_coordinates[DatasetRole.primary]
|
|
209
|
-
+ grouped_coordinates[DatasetRole.reference]
|
|
210
|
-
+ grouped_coordinates[AncillaryDatasetRole.corpus]
|
|
211
|
-
)
|
|
212
|
-
|
|
213
|
-
clusters = Hdbscan(
|
|
214
|
-
min_cluster_size=min_cluster_size,
|
|
215
|
-
min_samples=cluster_min_samples,
|
|
216
|
-
cluster_selection_epsilon=cluster_selection_epsilon,
|
|
217
|
-
).find_clusters(stacked_coordinates)
|
|
218
|
-
|
|
219
|
-
clustered_events = {
|
|
220
|
-
str(i): {stacked_event_ids[row_idx] for row_idx in cluster}
|
|
221
|
-
for i, cluster in enumerate(clusters)
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
return to_gql_clusters(
|
|
225
|
-
clustered_events=clustered_events,
|
|
226
|
-
)
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
@strawberry.type
|
|
230
|
-
class Mutation(ExportEventsMutation):
|
|
231
|
-
@strawberry.mutation
|
|
232
|
-
def delete_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
233
|
-
if (traces := info.context.traces) is not None:
|
|
234
|
-
node_id = from_global_id_with_expected_type(str(id), "Project")
|
|
235
|
-
traces.archive_project(node_id)
|
|
236
|
-
return Query()
|
|
237
|
-
|
|
238
|
-
@strawberry.mutation
|
|
239
|
-
def archive_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
240
|
-
if (traces := info.context.traces) is not None:
|
|
241
|
-
node_id = from_global_id_with_expected_type(str(id), "Project")
|
|
242
|
-
traces.archive_project(node_id)
|
|
243
|
-
return Query()
|
|
244
15
|
|
|
245
16
|
|
|
246
|
-
|
|
17
|
+
def build_graphql_schema(
|
|
18
|
+
extensions: Optional[Iterable[Union[type[SchemaExtension], SchemaExtension]]] = None,
|
|
19
|
+
) -> strawberry.Schema:
|
|
20
|
+
"""
|
|
21
|
+
Builds a strawberry schema.
|
|
22
|
+
"""
|
|
23
|
+
return strawberry.Schema(
|
|
24
|
+
query=Query,
|
|
25
|
+
mutation=Mutation,
|
|
26
|
+
extensions=list(chain(extensions or [], [get_mask_errors_extension()])),
|
|
27
|
+
subscription=Subscription,
|
|
28
|
+
types=_implementing_types(ChatCompletionSubscriptionPayload),
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _implementing_types(interface: Any) -> Iterator[StrawberryType]:
|
|
33
|
+
"""
|
|
34
|
+
Iterates over strawberry types implementing the given strawberry interface.
|
|
35
|
+
"""
|
|
36
|
+
assert isinstance(
|
|
37
|
+
strawberry_definition := getattr(interface, "__strawberry_definition__", None),
|
|
38
|
+
StrawberryObjectDefinition,
|
|
39
|
+
)
|
|
40
|
+
assert strawberry_definition.is_interface
|
|
41
|
+
for subcls in interface.__subclasses__():
|
|
42
|
+
if isinstance(
|
|
43
|
+
getattr(subcls, "__strawberry_definition__", None),
|
|
44
|
+
StrawberryObjectDefinition,
|
|
45
|
+
):
|
|
46
|
+
yield subcls
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
_EXPORTED_GRAPHQL_SCHEMA = build_graphql_schema() # used to export the GraphQL schema to file
|