arize-phoenix 10.0.4__py3-none-any.whl → 12.28.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/METADATA +124 -72
- arize_phoenix-12.28.1.dist-info/RECORD +499 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/WHEEL +1 -1
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/IP_NOTICE +1 -1
- phoenix/__generated__/__init__.py +0 -0
- phoenix/__generated__/classification_evaluator_configs/__init__.py +20 -0
- phoenix/__generated__/classification_evaluator_configs/_document_relevance_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_hallucination_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_models.py +18 -0
- phoenix/__generated__/classification_evaluator_configs/_tool_selection_classification_evaluator_config.py +17 -0
- phoenix/__init__.py +5 -4
- phoenix/auth.py +39 -2
- phoenix/config.py +1763 -91
- phoenix/datetime_utils.py +120 -2
- phoenix/db/README.md +595 -25
- phoenix/db/bulk_inserter.py +145 -103
- phoenix/db/engines.py +140 -33
- phoenix/db/enums.py +3 -12
- phoenix/db/facilitator.py +302 -35
- phoenix/db/helpers.py +1000 -65
- phoenix/db/iam_auth.py +64 -0
- phoenix/db/insertion/dataset.py +135 -2
- phoenix/db/insertion/document_annotation.py +9 -6
- phoenix/db/insertion/evaluation.py +2 -3
- phoenix/db/insertion/helpers.py +17 -2
- phoenix/db/insertion/session_annotation.py +176 -0
- phoenix/db/insertion/span.py +15 -11
- phoenix/db/insertion/span_annotation.py +3 -4
- phoenix/db/insertion/trace_annotation.py +3 -4
- phoenix/db/insertion/types.py +50 -20
- phoenix/db/migrations/versions/01a8342c9cdf_add_user_id_on_datasets.py +40 -0
- phoenix/db/migrations/versions/0df286449799_add_session_annotations_table.py +105 -0
- phoenix/db/migrations/versions/272b66ff50f8_drop_single_indices.py +119 -0
- phoenix/db/migrations/versions/58228d933c91_dataset_labels.py +67 -0
- phoenix/db/migrations/versions/699f655af132_experiment_tags.py +57 -0
- phoenix/db/migrations/versions/735d3d93c33e_add_composite_indices.py +41 -0
- phoenix/db/migrations/versions/a20694b15f82_cost.py +196 -0
- phoenix/db/migrations/versions/ab513d89518b_add_user_id_on_dataset_versions.py +40 -0
- phoenix/db/migrations/versions/d0690a79ea51_users_on_experiments.py +40 -0
- phoenix/db/migrations/versions/deb2c81c0bb2_dataset_splits.py +139 -0
- phoenix/db/migrations/versions/e76cbd66ffc3_add_experiments_dataset_examples.py +87 -0
- phoenix/db/models.py +669 -56
- phoenix/db/pg_config.py +10 -0
- phoenix/db/types/model_provider.py +4 -0
- phoenix/db/types/token_price_customization.py +29 -0
- phoenix/db/types/trace_retention.py +23 -15
- phoenix/experiments/evaluators/utils.py +3 -3
- phoenix/experiments/functions.py +160 -52
- phoenix/experiments/tracing.py +2 -2
- phoenix/experiments/types.py +1 -1
- phoenix/inferences/inferences.py +1 -2
- phoenix/server/api/auth.py +38 -7
- phoenix/server/api/auth_messages.py +46 -0
- phoenix/server/api/context.py +100 -4
- phoenix/server/api/dataloaders/__init__.py +79 -5
- phoenix/server/api/dataloaders/annotation_configs_by_project.py +31 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +60 -8
- phoenix/server/api/dataloaders/average_experiment_repeated_run_group_latency.py +50 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +17 -24
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +1 -2
- phoenix/server/api/dataloaders/dataset_dataset_splits.py +52 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -1
- phoenix/server/api/dataloaders/dataset_example_splits.py +40 -0
- phoenix/server/api/dataloaders/dataset_examples_and_versions_by_experiment_run.py +47 -0
- phoenix/server/api/dataloaders/dataset_labels.py +36 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -2
- phoenix/server/api/dataloaders/document_evaluations.py +6 -9
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +88 -34
- phoenix/server/api/dataloaders/experiment_dataset_splits.py +43 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +21 -28
- phoenix/server/api/dataloaders/experiment_repeated_run_group_annotation_summaries.py +77 -0
- phoenix/server/api/dataloaders/experiment_repeated_run_groups.py +57 -0
- phoenix/server/api/dataloaders/experiment_runs_by_experiment_and_example.py +44 -0
- phoenix/server/api/dataloaders/last_used_times_by_generative_model_id.py +35 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +40 -8
- phoenix/server/api/dataloaders/record_counts.py +37 -10
- phoenix/server/api/dataloaders/session_annotations_by_session.py +29 -0
- phoenix/server/api/dataloaders/span_cost_by_span.py +24 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_generative_model.py +56 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_project_session.py +57 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_span.py +43 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_trace.py +56 -0
- phoenix/server/api/dataloaders/span_cost_details_by_span_cost.py +27 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment.py +57 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_repeated_run_group.py +64 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_run.py +58 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_generative_model.py +55 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project.py +152 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project_session.py +56 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_trace.py +55 -0
- phoenix/server/api/dataloaders/span_costs.py +29 -0
- phoenix/server/api/dataloaders/table_fields.py +2 -2
- phoenix/server/api/dataloaders/token_prices_by_model.py +30 -0
- phoenix/server/api/dataloaders/trace_annotations_by_trace.py +27 -0
- phoenix/server/api/dataloaders/types.py +29 -0
- phoenix/server/api/exceptions.py +11 -1
- phoenix/server/api/helpers/dataset_helpers.py +5 -1
- phoenix/server/api/helpers/playground_clients.py +1243 -292
- phoenix/server/api/helpers/playground_registry.py +2 -2
- phoenix/server/api/helpers/playground_spans.py +8 -4
- phoenix/server/api/helpers/playground_users.py +26 -0
- phoenix/server/api/helpers/prompts/conversions/aws.py +83 -0
- phoenix/server/api/helpers/prompts/conversions/google.py +103 -0
- phoenix/server/api/helpers/prompts/models.py +205 -22
- phoenix/server/api/input_types/{SpanAnnotationFilter.py → AnnotationFilter.py} +22 -14
- phoenix/server/api/input_types/ChatCompletionInput.py +6 -2
- phoenix/server/api/input_types/CreateProjectInput.py +27 -0
- phoenix/server/api/input_types/CreateProjectSessionAnnotationInput.py +37 -0
- phoenix/server/api/input_types/DatasetFilter.py +17 -0
- phoenix/server/api/input_types/ExperimentRunSort.py +237 -0
- phoenix/server/api/input_types/GenerativeCredentialInput.py +9 -0
- phoenix/server/api/input_types/GenerativeModelInput.py +5 -0
- phoenix/server/api/input_types/ProjectSessionSort.py +161 -1
- phoenix/server/api/input_types/PromptFilter.py +14 -0
- phoenix/server/api/input_types/PromptVersionInput.py +52 -1
- phoenix/server/api/input_types/SpanSort.py +44 -7
- phoenix/server/api/input_types/TimeBinConfig.py +23 -0
- phoenix/server/api/input_types/UpdateAnnotationInput.py +34 -0
- phoenix/server/api/input_types/UserRoleInput.py +1 -0
- phoenix/server/api/mutations/__init__.py +10 -0
- phoenix/server/api/mutations/annotation_config_mutations.py +8 -8
- phoenix/server/api/mutations/api_key_mutations.py +19 -23
- phoenix/server/api/mutations/chat_mutations.py +154 -47
- phoenix/server/api/mutations/dataset_label_mutations.py +243 -0
- phoenix/server/api/mutations/dataset_mutations.py +21 -16
- phoenix/server/api/mutations/dataset_split_mutations.py +351 -0
- phoenix/server/api/mutations/experiment_mutations.py +2 -2
- phoenix/server/api/mutations/export_events_mutations.py +3 -3
- phoenix/server/api/mutations/model_mutations.py +210 -0
- phoenix/server/api/mutations/project_mutations.py +49 -10
- phoenix/server/api/mutations/project_session_annotations_mutations.py +158 -0
- phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +8 -4
- phoenix/server/api/mutations/prompt_label_mutations.py +74 -65
- phoenix/server/api/mutations/prompt_mutations.py +65 -129
- phoenix/server/api/mutations/prompt_version_tag_mutations.py +11 -8
- phoenix/server/api/mutations/span_annotations_mutations.py +15 -10
- phoenix/server/api/mutations/trace_annotations_mutations.py +14 -10
- phoenix/server/api/mutations/trace_mutations.py +47 -3
- phoenix/server/api/mutations/user_mutations.py +66 -41
- phoenix/server/api/queries.py +768 -293
- phoenix/server/api/routers/__init__.py +2 -2
- phoenix/server/api/routers/auth.py +154 -88
- phoenix/server/api/routers/ldap.py +229 -0
- phoenix/server/api/routers/oauth2.py +369 -106
- phoenix/server/api/routers/v1/__init__.py +24 -4
- phoenix/server/api/routers/v1/annotation_configs.py +23 -31
- phoenix/server/api/routers/v1/annotations.py +481 -17
- phoenix/server/api/routers/v1/datasets.py +395 -81
- phoenix/server/api/routers/v1/documents.py +142 -0
- phoenix/server/api/routers/v1/evaluations.py +24 -31
- phoenix/server/api/routers/v1/experiment_evaluations.py +19 -8
- phoenix/server/api/routers/v1/experiment_runs.py +337 -59
- phoenix/server/api/routers/v1/experiments.py +479 -48
- phoenix/server/api/routers/v1/models.py +7 -0
- phoenix/server/api/routers/v1/projects.py +18 -49
- phoenix/server/api/routers/v1/prompts.py +54 -40
- phoenix/server/api/routers/v1/sessions.py +108 -0
- phoenix/server/api/routers/v1/spans.py +1091 -81
- phoenix/server/api/routers/v1/traces.py +132 -78
- phoenix/server/api/routers/v1/users.py +389 -0
- phoenix/server/api/routers/v1/utils.py +3 -7
- phoenix/server/api/subscriptions.py +305 -88
- phoenix/server/api/types/Annotation.py +90 -23
- phoenix/server/api/types/ApiKey.py +13 -17
- phoenix/server/api/types/AuthMethod.py +1 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +1 -0
- phoenix/server/api/types/CostBreakdown.py +12 -0
- phoenix/server/api/types/Dataset.py +226 -72
- phoenix/server/api/types/DatasetExample.py +88 -18
- phoenix/server/api/types/DatasetExperimentAnnotationSummary.py +10 -0
- phoenix/server/api/types/DatasetLabel.py +57 -0
- phoenix/server/api/types/DatasetSplit.py +98 -0
- phoenix/server/api/types/DatasetVersion.py +49 -4
- phoenix/server/api/types/DocumentAnnotation.py +212 -0
- phoenix/server/api/types/Experiment.py +264 -59
- phoenix/server/api/types/ExperimentComparison.py +5 -10
- phoenix/server/api/types/ExperimentRepeatedRunGroup.py +155 -0
- phoenix/server/api/types/ExperimentRepeatedRunGroupAnnotationSummary.py +9 -0
- phoenix/server/api/types/ExperimentRun.py +169 -65
- phoenix/server/api/types/ExperimentRunAnnotation.py +158 -39
- phoenix/server/api/types/GenerativeModel.py +245 -3
- phoenix/server/api/types/GenerativeProvider.py +70 -11
- phoenix/server/api/types/{Model.py → InferenceModel.py} +1 -1
- phoenix/server/api/types/ModelInterface.py +16 -0
- phoenix/server/api/types/PlaygroundModel.py +20 -0
- phoenix/server/api/types/Project.py +1278 -216
- phoenix/server/api/types/ProjectSession.py +188 -28
- phoenix/server/api/types/ProjectSessionAnnotation.py +187 -0
- phoenix/server/api/types/ProjectTraceRetentionPolicy.py +1 -1
- phoenix/server/api/types/Prompt.py +119 -39
- phoenix/server/api/types/PromptLabel.py +42 -25
- phoenix/server/api/types/PromptVersion.py +11 -8
- phoenix/server/api/types/PromptVersionTag.py +65 -25
- phoenix/server/api/types/ServerStatus.py +6 -0
- phoenix/server/api/types/Span.py +167 -123
- phoenix/server/api/types/SpanAnnotation.py +189 -42
- phoenix/server/api/types/SpanCostDetailSummaryEntry.py +10 -0
- phoenix/server/api/types/SpanCostSummary.py +10 -0
- phoenix/server/api/types/SystemApiKey.py +65 -1
- phoenix/server/api/types/TokenPrice.py +16 -0
- phoenix/server/api/types/TokenUsage.py +3 -3
- phoenix/server/api/types/Trace.py +223 -51
- phoenix/server/api/types/TraceAnnotation.py +149 -50
- phoenix/server/api/types/User.py +137 -32
- phoenix/server/api/types/UserApiKey.py +73 -26
- phoenix/server/api/types/node.py +10 -0
- phoenix/server/api/types/pagination.py +11 -2
- phoenix/server/app.py +290 -45
- phoenix/server/authorization.py +38 -3
- phoenix/server/bearer_auth.py +34 -24
- phoenix/server/cost_tracking/cost_details_calculator.py +196 -0
- phoenix/server/cost_tracking/cost_model_lookup.py +179 -0
- phoenix/server/cost_tracking/helpers.py +68 -0
- phoenix/server/cost_tracking/model_cost_manifest.json +3657 -830
- phoenix/server/cost_tracking/regex_specificity.py +397 -0
- phoenix/server/cost_tracking/token_cost_calculator.py +57 -0
- phoenix/server/daemons/__init__.py +0 -0
- phoenix/server/daemons/db_disk_usage_monitor.py +214 -0
- phoenix/server/daemons/generative_model_store.py +103 -0
- phoenix/server/daemons/span_cost_calculator.py +99 -0
- phoenix/server/dml_event.py +17 -0
- phoenix/server/dml_event_handler.py +5 -0
- phoenix/server/email/sender.py +56 -3
- phoenix/server/email/templates/db_disk_usage_notification.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/experiments/__init__.py +0 -0
- phoenix/server/experiments/utils.py +14 -0
- phoenix/server/grpc_server.py +11 -11
- phoenix/server/jwt_store.py +17 -15
- phoenix/server/ldap.py +1449 -0
- phoenix/server/main.py +26 -10
- phoenix/server/oauth2.py +330 -12
- phoenix/server/prometheus.py +66 -6
- phoenix/server/rate_limiters.py +4 -9
- phoenix/server/retention.py +33 -20
- phoenix/server/session_filters.py +49 -0
- phoenix/server/static/.vite/manifest.json +55 -51
- phoenix/server/static/assets/components-BreFUQQa.js +6702 -0
- phoenix/server/static/assets/{index-E0M82BdE.js → index-CTQoemZv.js} +140 -56
- phoenix/server/static/assets/pages-DBE5iYM3.js +9524 -0
- phoenix/server/static/assets/vendor-BGzfc4EU.css +1 -0
- phoenix/server/static/assets/vendor-DCE4v-Ot.js +920 -0
- phoenix/server/static/assets/vendor-codemirror-D5f205eT.js +25 -0
- phoenix/server/static/assets/vendor-recharts-V9cwpXsm.js +37 -0
- phoenix/server/static/assets/vendor-shiki-Do--csgv.js +5 -0
- phoenix/server/static/assets/vendor-three-CmB8bl_y.js +3840 -0
- phoenix/server/templates/index.html +40 -6
- phoenix/server/thread_server.py +1 -2
- phoenix/server/types.py +14 -4
- phoenix/server/utils.py +74 -0
- phoenix/session/client.py +56 -3
- phoenix/session/data_extractor.py +5 -0
- phoenix/session/evaluation.py +14 -5
- phoenix/session/session.py +45 -9
- phoenix/settings.py +5 -0
- phoenix/trace/attributes.py +80 -13
- phoenix/trace/dsl/helpers.py +90 -1
- phoenix/trace/dsl/query.py +8 -6
- phoenix/trace/projects.py +5 -0
- phoenix/utilities/template_formatters.py +1 -1
- phoenix/version.py +1 -1
- arize_phoenix-10.0.4.dist-info/RECORD +0 -405
- phoenix/server/api/types/Evaluation.py +0 -39
- phoenix/server/cost_tracking/cost_lookup.py +0 -255
- phoenix/server/static/assets/components-DULKeDfL.js +0 -4365
- phoenix/server/static/assets/pages-Cl0A-0U2.js +0 -7430
- phoenix/server/static/assets/vendor-WIZid84E.css +0 -1
- phoenix/server/static/assets/vendor-arizeai-Dy-0mSNw.js +0 -649
- phoenix/server/static/assets/vendor-codemirror-DBtifKNr.js +0 -33
- phoenix/server/static/assets/vendor-oB4u9zuV.js +0 -905
- phoenix/server/static/assets/vendor-recharts-D-T4KPz2.js +0 -59
- phoenix/server/static/assets/vendor-shiki-BMn4O_9F.js +0 -5
- phoenix/server/static/assets/vendor-three-C5WAXd5r.js +0 -2998
- phoenix/utilities/deprecation.py +0 -31
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,33 +1,58 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import warnings
|
|
2
3
|
from asyncio import get_running_loop
|
|
3
4
|
from collections.abc import AsyncIterator
|
|
4
5
|
from datetime import datetime, timezone
|
|
6
|
+
from enum import Enum
|
|
5
7
|
from secrets import token_urlsafe
|
|
6
|
-
from typing import Any,
|
|
8
|
+
from typing import Annotated, Any, Optional, Union
|
|
7
9
|
|
|
8
10
|
import pandas as pd
|
|
9
|
-
|
|
10
|
-
from
|
|
11
|
-
from
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
from fastapi import APIRouter, Depends, Header, HTTPException, Path, Query
|
|
13
|
+
from pydantic import BaseModel, BeforeValidator, Field
|
|
14
|
+
from sqlalchemy import exists, select, update
|
|
12
15
|
from starlette.requests import Request
|
|
13
16
|
from starlette.responses import Response, StreamingResponse
|
|
14
|
-
from starlette.status import HTTP_404_NOT_FOUND
|
|
17
|
+
from starlette.status import HTTP_404_NOT_FOUND
|
|
15
18
|
from strawberry.relay import GlobalID
|
|
16
19
|
|
|
17
20
|
from phoenix.config import DEFAULT_PROJECT_NAME
|
|
18
21
|
from phoenix.datetime_utils import normalize_datetime
|
|
19
22
|
from phoenix.db import models
|
|
20
|
-
from phoenix.db.helpers import SupportedSQLDialect
|
|
23
|
+
from phoenix.db.helpers import SupportedSQLDialect, get_ancestor_span_rowids
|
|
21
24
|
from phoenix.db.insertion.helpers import as_kv, insert_on_conflict
|
|
22
|
-
from phoenix.db.insertion.types import Precursors
|
|
23
25
|
from phoenix.server.api.routers.utils import df_to_bytes
|
|
26
|
+
from phoenix.server.api.routers.v1.annotations import SpanAnnotationData
|
|
27
|
+
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
28
|
+
from phoenix.server.authorization import is_not_locked
|
|
24
29
|
from phoenix.server.bearer_auth import PhoenixUser
|
|
25
|
-
from phoenix.server.dml_event import SpanAnnotationInsertEvent
|
|
30
|
+
from phoenix.server.dml_event import SpanAnnotationInsertEvent, SpanDeleteEvent
|
|
31
|
+
from phoenix.trace.attributes import flatten, unflatten
|
|
26
32
|
from phoenix.trace.dsl import SpanQuery as SpanQuery_
|
|
33
|
+
from phoenix.trace.schemas import (
|
|
34
|
+
Span as SpanForInsertion,
|
|
35
|
+
)
|
|
36
|
+
from phoenix.trace.schemas import (
|
|
37
|
+
SpanContext as InsertionSpanContext,
|
|
38
|
+
)
|
|
39
|
+
from phoenix.trace.schemas import (
|
|
40
|
+
SpanEvent as InternalSpanEvent,
|
|
41
|
+
)
|
|
42
|
+
from phoenix.trace.schemas import (
|
|
43
|
+
SpanKind,
|
|
44
|
+
SpanStatusCode,
|
|
45
|
+
)
|
|
27
46
|
from phoenix.utilities.json import encode_df_as_json_string
|
|
28
47
|
|
|
29
48
|
from .models import V1RoutesBaseModel
|
|
30
|
-
from .utils import
|
|
49
|
+
from .utils import (
|
|
50
|
+
PaginatedResponseBody,
|
|
51
|
+
RequestBody,
|
|
52
|
+
ResponseBody,
|
|
53
|
+
_get_project_by_identifier,
|
|
54
|
+
add_errors_to_responses,
|
|
55
|
+
)
|
|
31
56
|
|
|
32
57
|
DEFAULT_SPAN_LIMIT = 1000
|
|
33
58
|
|
|
@@ -68,12 +93,349 @@ class QuerySpansRequestBody(V1RoutesBaseModel):
|
|
|
68
93
|
)
|
|
69
94
|
|
|
70
95
|
|
|
96
|
+
################################################################################
|
|
97
|
+
# Autogenerated OTLP models
|
|
98
|
+
|
|
99
|
+
# These models are autogenerated from the OTLP v1 protobuf schemas
|
|
100
|
+
# Source: https://github.com/open-telemetry/opentelemetry-proto/
|
|
101
|
+
# ...blob/main/opentelemetry/proto/trace/v1/trace.proto
|
|
102
|
+
# ...blob/main/opentelemetry/proto/common/v1/common.proto
|
|
103
|
+
# ...blob/main/opentelemetry/proto/resource/v1/resource.proto
|
|
104
|
+
|
|
105
|
+
# The autogeneration is done using the `protoc` tool and the `protoc-gen-jsonschema` go plugin
|
|
106
|
+
# The generated JSON schemas are coverted to Pydantic using `datamodel-codegen`
|
|
107
|
+
################################################################################
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class OtlpDoubleValue(Enum):
|
|
111
|
+
Infinity = "Infinity"
|
|
112
|
+
field_Infinity = "-Infinity"
|
|
113
|
+
NaN = "NaN"
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class OtlpArrayValue(BaseModel):
|
|
117
|
+
model_config = {"extra": "forbid"}
|
|
118
|
+
|
|
119
|
+
values: Optional[list["OtlpAnyValue"]] = Field(
|
|
120
|
+
None,
|
|
121
|
+
description="Array of values. The array may be empty (contain 0 elements).",
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class OtlpAnyValue(BaseModel):
|
|
126
|
+
model_config = {"extra": "forbid"}
|
|
127
|
+
|
|
128
|
+
array_value: Optional[OtlpArrayValue] = None
|
|
129
|
+
bool_value: Optional[bool] = None
|
|
130
|
+
bytes_value: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = None
|
|
131
|
+
double_value: Optional[Union[float, OtlpDoubleValue, str]] = None
|
|
132
|
+
int_value: Optional[
|
|
133
|
+
Union[
|
|
134
|
+
Annotated[int, Field(ge=-9223372036854775808, lt=9223372036854775808)],
|
|
135
|
+
Annotated[str, Field(pattern=r"^-?[0-9]+$")],
|
|
136
|
+
]
|
|
137
|
+
] = None
|
|
138
|
+
kvlist_value: None = None # TODO: Add KeyValueList model
|
|
139
|
+
string_value: Optional[str] = None
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class OtlpKeyValue(BaseModel):
|
|
143
|
+
model_config = {"extra": "forbid"}
|
|
144
|
+
|
|
145
|
+
key: Optional[str] = None
|
|
146
|
+
value: Optional[OtlpAnyValue] = None
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class StatusCode(str, Enum):
|
|
150
|
+
# This is not autogenerated, but used to convert the status code in our DB to an OTLP
|
|
151
|
+
# status code integer
|
|
152
|
+
UNSET = "UNSET"
|
|
153
|
+
OK = "OK"
|
|
154
|
+
ERROR = "ERROR"
|
|
155
|
+
|
|
156
|
+
def to_int(self) -> int:
|
|
157
|
+
return {
|
|
158
|
+
"UNSET": 0,
|
|
159
|
+
"OK": 1,
|
|
160
|
+
"ERROR": 2,
|
|
161
|
+
}[self.value]
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class OtlpStatus(BaseModel):
|
|
165
|
+
model_config = {"extra": "forbid"}
|
|
166
|
+
|
|
167
|
+
code: Optional[Annotated[int, Field(ge=-2147483648, le=2147483647)]] = Field(
|
|
168
|
+
None, description="The status code."
|
|
169
|
+
)
|
|
170
|
+
message: Optional[str] = Field(
|
|
171
|
+
None, description="A developer-facing human readable error message."
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
class OtlpKind(Enum):
|
|
176
|
+
SPAN_KIND_UNSPECIFIED = "SPAN_KIND_UNSPECIFIED"
|
|
177
|
+
SPAN_KIND_INTERNAL = "SPAN_KIND_INTERNAL"
|
|
178
|
+
SPAN_KIND_SERVER = "SPAN_KIND_SERVER"
|
|
179
|
+
SPAN_KIND_CLIENT = "SPAN_KIND_CLIENT"
|
|
180
|
+
SPAN_KIND_PRODUCER = "SPAN_KIND_PRODUCER"
|
|
181
|
+
SPAN_KIND_CONSUMER = "SPAN_KIND_CONSUMER"
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class OtlpEvent(BaseModel):
|
|
185
|
+
model_config = {"extra": "forbid"}
|
|
186
|
+
|
|
187
|
+
attributes: Optional[list[OtlpKeyValue]] = Field(
|
|
188
|
+
None,
|
|
189
|
+
description=(
|
|
190
|
+
"attributes is a collection of attribute key/value pairs on the event. "
|
|
191
|
+
"Attribute keys MUST be unique (it is not allowed to have more than one "
|
|
192
|
+
"attribute with the same key)."
|
|
193
|
+
),
|
|
194
|
+
)
|
|
195
|
+
dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
|
|
196
|
+
None,
|
|
197
|
+
description=(
|
|
198
|
+
"dropped_attributes_count is the number of dropped attributes. If the value is 0, "
|
|
199
|
+
"then no attributes were dropped."
|
|
200
|
+
),
|
|
201
|
+
)
|
|
202
|
+
name: Optional[str] = Field(
|
|
203
|
+
None,
|
|
204
|
+
description=(
|
|
205
|
+
"name of the event. This field is semantically required to be set to non-empty string."
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
time_unix_nano: Optional[
|
|
209
|
+
Union[
|
|
210
|
+
Annotated[int, Field(ge=0, lt=18446744073709551616)],
|
|
211
|
+
Annotated[str, Field(pattern=r"^[0-9]+$")],
|
|
212
|
+
]
|
|
213
|
+
] = Field(
|
|
214
|
+
None,
|
|
215
|
+
description=(
|
|
216
|
+
"time_unix_nano is the time the event occurred. "
|
|
217
|
+
"Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970."
|
|
218
|
+
),
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class OtlpSpan(BaseModel):
|
|
223
|
+
model_config = {"extra": "forbid"}
|
|
224
|
+
|
|
225
|
+
attributes: Optional[list[OtlpKeyValue]] = Field(
|
|
226
|
+
None,
|
|
227
|
+
description=(
|
|
228
|
+
"attributes is a collection of key/value pairs. Note, global attributes like server "
|
|
229
|
+
"name can be set using the resource API. Examples of attributes:\n\n"
|
|
230
|
+
' "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) '
|
|
231
|
+
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"\n'
|
|
232
|
+
' "/http/server_latency": 300\n'
|
|
233
|
+
' "example.com/myattribute": true\n'
|
|
234
|
+
' "example.com/score": 10.239\n\n'
|
|
235
|
+
"The OpenTelemetry API specification further restricts the allowed value types:\n"
|
|
236
|
+
"https://github.com/open-telemetry/opentelemetry-specification/blob/main/"
|
|
237
|
+
"specification/common/README.md#attribute\n"
|
|
238
|
+
"Attribute keys MUST be unique (it is not allowed to have more than one attribute "
|
|
239
|
+
"with the same key)."
|
|
240
|
+
),
|
|
241
|
+
)
|
|
242
|
+
dropped_attributes_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
|
|
243
|
+
None,
|
|
244
|
+
description=(
|
|
245
|
+
"dropped_attributes_count is the number of attributes that were discarded. Attributes "
|
|
246
|
+
"can be discarded because their keys are too long or because there are too many "
|
|
247
|
+
"attributes. If this value is 0, then no attributes were dropped."
|
|
248
|
+
),
|
|
249
|
+
)
|
|
250
|
+
dropped_events_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
|
|
251
|
+
None,
|
|
252
|
+
description=(
|
|
253
|
+
"dropped_events_count is the number of dropped events. If the value is 0, then no "
|
|
254
|
+
"events were dropped."
|
|
255
|
+
),
|
|
256
|
+
)
|
|
257
|
+
dropped_links_count: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
|
|
258
|
+
None,
|
|
259
|
+
description=(
|
|
260
|
+
"dropped_links_count is the number of dropped links after the maximum size was "
|
|
261
|
+
"enforced. If this value is 0, then no links were dropped."
|
|
262
|
+
),
|
|
263
|
+
)
|
|
264
|
+
end_time_unix_nano: Optional[
|
|
265
|
+
Union[
|
|
266
|
+
Annotated[int, Field(ge=0, lt=18446744073709551616)],
|
|
267
|
+
Annotated[str, Field(pattern=r"^[0-9]+$")],
|
|
268
|
+
]
|
|
269
|
+
] = Field(
|
|
270
|
+
None,
|
|
271
|
+
description=(
|
|
272
|
+
"end_time_unix_nano is the end time of the span. On the client side, this is the time "
|
|
273
|
+
"kept by the local machine where the span execution ends. On the server side, this is "
|
|
274
|
+
"the time when the server application handler stops running.\n"
|
|
275
|
+
"Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
|
|
276
|
+
"This field is semantically required and it is expected that end_time >= start_time."
|
|
277
|
+
),
|
|
278
|
+
)
|
|
279
|
+
events: Optional[list[OtlpEvent]] = Field(
|
|
280
|
+
None,
|
|
281
|
+
description=("events is a collection of Event items. A span with no events is valid."),
|
|
282
|
+
)
|
|
283
|
+
flags: Optional[Annotated[int, Field(ge=0, le=4294967295)]] = Field(
|
|
284
|
+
None,
|
|
285
|
+
description=(
|
|
286
|
+
"Flags, a bit field.\n\n"
|
|
287
|
+
"Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace "
|
|
288
|
+
"Context specification. To read the 8-bit W3C trace flag, use "
|
|
289
|
+
"`flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.\n\n"
|
|
290
|
+
"See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.\n\n"
|
|
291
|
+
"Bits 8 and 9 represent the 3 states of whether a span's parent is remote. The states "
|
|
292
|
+
"are (unknown, is not remote, is remote).\n"
|
|
293
|
+
"To read whether the value is known, use "
|
|
294
|
+
"`(flags & SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK) != 0`.\n"
|
|
295
|
+
"To read whether the span is remote, use "
|
|
296
|
+
"`(flags & SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK) != 0`.\n\n"
|
|
297
|
+
"When creating span messages, if the message is logically forwarded from another "
|
|
298
|
+
"source with an equivalent flags fields (i.e., usually another OTLP span message), the "
|
|
299
|
+
"field SHOULD be copied as-is. If creating from a source that does not have an "
|
|
300
|
+
"equivalent flags field (such as a runtime representation of an OpenTelemetry span), "
|
|
301
|
+
"the high 22 bits MUST be set to zero.\n"
|
|
302
|
+
"Readers MUST NOT assume that bits 10-31 (22 most significant bits) will be zero.\n\n"
|
|
303
|
+
"[Optional]."
|
|
304
|
+
),
|
|
305
|
+
)
|
|
306
|
+
kind: Optional[Union[OtlpKind, Annotated[int, Field(ge=-2147483648, le=2147483647)]]] = Field(
|
|
307
|
+
OtlpKind.SPAN_KIND_INTERNAL, # INTERNAL because OpenInference uses its own SpanKind
|
|
308
|
+
description=(
|
|
309
|
+
"Distinguishes between spans generated in a particular context. For example, two spans "
|
|
310
|
+
"with the same name may be distinguished using `CLIENT` (caller) and `SERVER` (callee) "
|
|
311
|
+
"to identify queueing latency associated with the span."
|
|
312
|
+
),
|
|
313
|
+
)
|
|
314
|
+
links: None = None # TODO: Add Link model
|
|
315
|
+
name: Optional[str] = Field(
|
|
316
|
+
None,
|
|
317
|
+
description=(
|
|
318
|
+
"A description of the span's operation.\n\n"
|
|
319
|
+
"For example, the name can be a qualified method name or a file name and a line number "
|
|
320
|
+
"where the operation is called. A best practice is to use the same display name at the "
|
|
321
|
+
"same call point in an application. This makes it easier to correlate spans in "
|
|
322
|
+
"different traces.\n\n"
|
|
323
|
+
"This field is semantically required to be set to non-empty string. Empty value is "
|
|
324
|
+
"equivalent to an unknown span name.\n\n"
|
|
325
|
+
"This field is required."
|
|
326
|
+
),
|
|
327
|
+
)
|
|
328
|
+
parent_span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
|
|
329
|
+
None,
|
|
330
|
+
description=(
|
|
331
|
+
"The `span_id` of this span's parent span. If this is a root span, then this field "
|
|
332
|
+
"must be empty. The ID is an 8-byte array."
|
|
333
|
+
),
|
|
334
|
+
)
|
|
335
|
+
span_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
|
|
336
|
+
None,
|
|
337
|
+
description=(
|
|
338
|
+
"A unique identifier for a span within a trace, assigned when the span is created. The "
|
|
339
|
+
"ID is an 8-byte array. An ID with all zeroes OR of length other than 8 bytes is "
|
|
340
|
+
"considered invalid (empty string in OTLP/JSON is zero-length and thus is also "
|
|
341
|
+
"invalid).\n\n"
|
|
342
|
+
"This field is required."
|
|
343
|
+
),
|
|
344
|
+
)
|
|
345
|
+
start_time_unix_nano: Optional[
|
|
346
|
+
Union[
|
|
347
|
+
Annotated[int, Field(ge=0, lt=18446744073709551616)],
|
|
348
|
+
Annotated[str, Field(pattern=r"^[0-9]+$")],
|
|
349
|
+
]
|
|
350
|
+
] = Field(
|
|
351
|
+
None,
|
|
352
|
+
description=(
|
|
353
|
+
"start_time_unix_nano is the start time of the span. On the client side, this is the "
|
|
354
|
+
"time kept by the local machine where the span execution starts. On the server side, "
|
|
355
|
+
"this is the time when the server's application handler starts running.\n"
|
|
356
|
+
"Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.\n\n"
|
|
357
|
+
"This field is semantically required and it is expected that end_time >= start_time."
|
|
358
|
+
),
|
|
359
|
+
)
|
|
360
|
+
status: Optional[OtlpStatus] = Field(
|
|
361
|
+
None,
|
|
362
|
+
description=(
|
|
363
|
+
"An optional final status for this span. Semantically when Status isn't set, it means "
|
|
364
|
+
"span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0)."
|
|
365
|
+
),
|
|
366
|
+
)
|
|
367
|
+
trace_id: Optional[Annotated[str, Field(pattern=r"^[A-Za-z0-9+/]*={0,2}$")]] = Field(
|
|
368
|
+
None,
|
|
369
|
+
description=(
|
|
370
|
+
"A unique identifier for a trace. All spans from the same trace share the same "
|
|
371
|
+
"`trace_id`. The ID is a 16-byte array. An ID with all zeroes OR of length other than "
|
|
372
|
+
"16 bytes is considered invalid (empty string in OTLP/JSON is zero-length and thus is "
|
|
373
|
+
"also invalid).\n\n"
|
|
374
|
+
"This field is required."
|
|
375
|
+
),
|
|
376
|
+
)
|
|
377
|
+
trace_state: Optional[str] = Field(
|
|
378
|
+
None,
|
|
379
|
+
description=(
|
|
380
|
+
"trace_state conveys information about request position in multiple distributed "
|
|
381
|
+
"tracing graphs. It is a trace_state in w3c-trace-context format: "
|
|
382
|
+
"https://www.w3.org/TR/trace-context/#tracestate-header\n"
|
|
383
|
+
"See also https://github.com/w3c/distributed-tracing for more details about this "
|
|
384
|
+
"field."
|
|
385
|
+
),
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
class OtlpSpansResponseBody(PaginatedResponseBody[OtlpSpan]):
|
|
390
|
+
"""Paginated response where each span follows OTLP JSON structure."""
|
|
391
|
+
|
|
392
|
+
pass
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
################################################################################
|
|
396
|
+
# Phoenix Span Models
|
|
397
|
+
################################################################################
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
class SpanContext(V1RoutesBaseModel):
|
|
401
|
+
trace_id: str = Field(description="OpenTelemetry trace ID")
|
|
402
|
+
span_id: str = Field(description="OpenTelemetry span ID")
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
class SpanEvent(V1RoutesBaseModel):
|
|
406
|
+
name: str = Field(description="Name of the event")
|
|
407
|
+
timestamp: datetime = Field(description="When the event occurred")
|
|
408
|
+
attributes: dict[str, Any] = Field(default_factory=dict, description="Event attributes")
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
class Span(V1RoutesBaseModel):
|
|
412
|
+
id: str = Field(
|
|
413
|
+
default="", description="Span Global ID, distinct from the OpenTelemetry span ID"
|
|
414
|
+
)
|
|
415
|
+
name: str = Field(description="Name of the span operation")
|
|
416
|
+
context: SpanContext = Field(description="Span context containing trace_id and span_id")
|
|
417
|
+
span_kind: str = Field(description="Type of work that the span encapsulates")
|
|
418
|
+
parent_id: Optional[str] = Field(
|
|
419
|
+
default=None, description="OpenTelemetry span ID of the parent span"
|
|
420
|
+
)
|
|
421
|
+
start_time: datetime = Field(description="Start time of the span")
|
|
422
|
+
end_time: datetime = Field(description="End time of the span")
|
|
423
|
+
status_code: str = Field(description="Status code of the span")
|
|
424
|
+
status_message: str = Field(default="", description="Status message")
|
|
425
|
+
attributes: dict[str, Any] = Field(default_factory=dict, description="Span attributes")
|
|
426
|
+
events: list[SpanEvent] = Field(default_factory=list, description="Span events")
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
class SpansResponseBody(PaginatedResponseBody[Span]):
|
|
430
|
+
pass
|
|
431
|
+
|
|
432
|
+
|
|
71
433
|
# TODO: Add property details to SpanQuery schema
|
|
72
434
|
@router.post(
|
|
73
435
|
"/spans",
|
|
74
436
|
operation_id="querySpans",
|
|
75
437
|
summary="Query spans with query DSL",
|
|
76
|
-
responses=add_errors_to_responses([
|
|
438
|
+
responses=add_errors_to_responses([404, 422]),
|
|
77
439
|
include_in_schema=False,
|
|
78
440
|
)
|
|
79
441
|
async def query_spans_handler(
|
|
@@ -96,34 +458,34 @@ async def query_spans_handler(
|
|
|
96
458
|
)
|
|
97
459
|
end_time = request_body.end_time or request_body.stop_time
|
|
98
460
|
try:
|
|
99
|
-
span_queries = [SpanQuery_.from_dict(query.
|
|
461
|
+
span_queries = [SpanQuery_.from_dict(query.model_dump()) for query in queries]
|
|
100
462
|
except Exception as e:
|
|
101
463
|
raise HTTPException(
|
|
102
464
|
detail=f"Invalid query: {e}",
|
|
103
|
-
status_code=
|
|
465
|
+
status_code=422,
|
|
104
466
|
)
|
|
467
|
+
|
|
105
468
|
async with request.app.state.db() as session:
|
|
106
|
-
results = []
|
|
469
|
+
results: list[pd.DataFrame] = []
|
|
107
470
|
for query in span_queries:
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
start_time
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
end_time
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
orphan_span_as_root_span=request_body.orphan_span_as_root_span,
|
|
123
|
-
)
|
|
471
|
+
df = await session.run_sync(
|
|
472
|
+
query,
|
|
473
|
+
project_name=project_name,
|
|
474
|
+
start_time=normalize_datetime(
|
|
475
|
+
request_body.start_time,
|
|
476
|
+
timezone.utc,
|
|
477
|
+
),
|
|
478
|
+
end_time=normalize_datetime(
|
|
479
|
+
end_time,
|
|
480
|
+
timezone.utc,
|
|
481
|
+
),
|
|
482
|
+
limit=request_body.limit,
|
|
483
|
+
root_spans_only=request_body.root_spans_only,
|
|
484
|
+
orphan_span_as_root_span=request_body.orphan_span_as_root_span,
|
|
124
485
|
)
|
|
486
|
+
results.append(df)
|
|
125
487
|
if not results:
|
|
126
|
-
raise HTTPException(status_code=
|
|
488
|
+
raise HTTPException(status_code=404)
|
|
127
489
|
|
|
128
490
|
if accept == "application/json":
|
|
129
491
|
boundary_token = token_urlsafe(64)
|
|
@@ -154,61 +516,334 @@ async def _json_multipart(
|
|
|
154
516
|
yield f"--{boundary_token}--\r\n"
|
|
155
517
|
|
|
156
518
|
|
|
157
|
-
|
|
158
|
-
|
|
519
|
+
def _to_array_value(values: list[Any]) -> OtlpArrayValue:
|
|
520
|
+
"""Convert a list of values to an OtlpArrayValue.
|
|
521
|
+
|
|
522
|
+
If the values are not all of the same type, they will be coerced to strings.
|
|
523
|
+
Nested lists/tuples are not allowed and will be stringified.
|
|
524
|
+
"""
|
|
525
|
+
if not values:
|
|
526
|
+
return OtlpArrayValue(values=[])
|
|
527
|
+
|
|
528
|
+
# Convert any list/tuple values to strings to prevent nesting
|
|
529
|
+
processed_values = [str(v) if isinstance(v, (list, tuple)) else v for v in values]
|
|
530
|
+
|
|
531
|
+
# Check if all values are of the same type
|
|
532
|
+
first_type = type(processed_values[0])
|
|
533
|
+
if all(isinstance(v, first_type) for v in processed_values):
|
|
534
|
+
# All values are of the same type, convert normally
|
|
535
|
+
return OtlpArrayValue(values=[_to_any_value(v) for v in processed_values])
|
|
536
|
+
|
|
537
|
+
# Values are not homogeneous, convert everything to strings
|
|
538
|
+
return OtlpArrayValue(values=[OtlpAnyValue(string_value=str(v)) for v in processed_values])
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
def _to_any_value(value: Any) -> OtlpAnyValue:
|
|
542
|
+
if value is None:
|
|
543
|
+
return OtlpAnyValue()
|
|
544
|
+
elif isinstance(value, bool):
|
|
545
|
+
return OtlpAnyValue(bool_value=value)
|
|
546
|
+
elif isinstance(value, int):
|
|
547
|
+
return OtlpAnyValue(int_value=value)
|
|
548
|
+
elif isinstance(value, float):
|
|
549
|
+
if value in (float("inf"), float("-inf"), float("nan")):
|
|
550
|
+
return OtlpAnyValue(double_value=str(value))
|
|
551
|
+
return OtlpAnyValue(double_value=value)
|
|
552
|
+
elif isinstance(value, str):
|
|
553
|
+
return OtlpAnyValue(string_value=value)
|
|
554
|
+
elif isinstance(value, bytes):
|
|
555
|
+
return OtlpAnyValue(bytes_value=value.hex())
|
|
556
|
+
elif isinstance(value, (list, tuple)):
|
|
557
|
+
return OtlpAnyValue(array_value=_to_array_value(list(value)))
|
|
558
|
+
elif isinstance(value, dict):
|
|
559
|
+
# TODO: Implement kvlist_value when KeyValueList model is added
|
|
560
|
+
return OtlpAnyValue()
|
|
561
|
+
else:
|
|
562
|
+
# For any other type, convert to string
|
|
563
|
+
return OtlpAnyValue(string_value=str(value))
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
@router.get(
|
|
567
|
+
"/projects/{project_identifier}/spans/otlpv1",
|
|
568
|
+
operation_id="spanSearch",
|
|
569
|
+
summary="Search spans with simple filters (no DSL)",
|
|
570
|
+
description="Return spans within a project filtered by time range. "
|
|
571
|
+
"Supports cursor-based pagination.",
|
|
572
|
+
responses=add_errors_to_responses([404, 422]),
|
|
573
|
+
)
|
|
574
|
+
async def span_search_otlpv1(
|
|
159
575
|
request: Request,
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
576
|
+
project_identifier: str = Path(
|
|
577
|
+
description=(
|
|
578
|
+
"The project identifier: either project ID or project name. If using a project name, "
|
|
579
|
+
"it cannot contain slash (/), question mark (?), or pound sign (#) characters."
|
|
580
|
+
)
|
|
163
581
|
),
|
|
164
|
-
|
|
165
|
-
|
|
582
|
+
cursor: Optional[str] = Query(default=None, description="Pagination cursor (Span Global ID)"),
|
|
583
|
+
limit: int = Query(default=100, gt=0, le=1000, description="Maximum number of spans to return"),
|
|
584
|
+
start_time: Optional[datetime] = Query(default=None, description="Inclusive lower bound time"),
|
|
585
|
+
end_time: Optional[datetime] = Query(default=None, description="Exclusive upper bound time"),
|
|
586
|
+
) -> OtlpSpansResponseBody:
|
|
587
|
+
"""Search spans with minimal filters instead of the old SpanQuery DSL."""
|
|
166
588
|
|
|
589
|
+
async with request.app.state.db() as session:
|
|
590
|
+
project = await _get_project_by_identifier(session, project_identifier)
|
|
591
|
+
|
|
592
|
+
project_id: int = project.id
|
|
593
|
+
order_by = [models.Span.id.desc()]
|
|
167
594
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
595
|
+
stmt = (
|
|
596
|
+
select(
|
|
597
|
+
models.Span,
|
|
598
|
+
models.Trace.trace_id,
|
|
599
|
+
)
|
|
600
|
+
.join(models.Trace, onclause=models.Trace.id == models.Span.trace_rowid)
|
|
601
|
+
.where(models.Trace.project_rowid == project_id)
|
|
602
|
+
.order_by(*order_by)
|
|
173
603
|
)
|
|
174
604
|
|
|
605
|
+
if start_time:
|
|
606
|
+
stmt = stmt.where(models.Span.start_time >= normalize_datetime(start_time, timezone.utc))
|
|
607
|
+
if end_time:
|
|
608
|
+
stmt = stmt.where(models.Span.start_time < normalize_datetime(end_time, timezone.utc))
|
|
175
609
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
610
|
+
if cursor:
|
|
611
|
+
try:
|
|
612
|
+
cursor_rowid = int(GlobalID.from_id(cursor).node_id)
|
|
613
|
+
stmt = stmt.where(models.Span.id <= cursor_rowid)
|
|
614
|
+
except Exception:
|
|
615
|
+
raise HTTPException(status_code=422, detail="Invalid cursor")
|
|
616
|
+
|
|
617
|
+
stmt = stmt.limit(limit + 1)
|
|
618
|
+
|
|
619
|
+
async with request.app.state.db() as session:
|
|
620
|
+
rows: list[tuple[models.Span, str]] = [r async for r in await session.stream(stmt)]
|
|
621
|
+
|
|
622
|
+
if not rows:
|
|
623
|
+
return OtlpSpansResponseBody(next_cursor=None, data=[])
|
|
624
|
+
|
|
625
|
+
next_cursor: Optional[str] = None
|
|
626
|
+
if len(rows) == limit + 1:
|
|
627
|
+
*rows, extra = rows # extra is first item of next page
|
|
628
|
+
span_extra, _ = extra
|
|
629
|
+
next_cursor = str(GlobalID("Span", str(span_extra.id)))
|
|
630
|
+
|
|
631
|
+
# Convert ORM rows -> OTLP-style spans
|
|
632
|
+
result_spans: list[OtlpSpan] = []
|
|
633
|
+
for span_orm, trace_id in rows:
|
|
634
|
+
try:
|
|
635
|
+
status_code_enum = StatusCode(span_orm.status_code or "UNSET")
|
|
636
|
+
except ValueError:
|
|
637
|
+
status_code_enum = StatusCode.UNSET
|
|
638
|
+
|
|
639
|
+
# Convert attributes to KeyValue list
|
|
640
|
+
attributes_kv: list[OtlpKeyValue] = []
|
|
641
|
+
if span_orm.attributes:
|
|
642
|
+
for k, v in flatten(span_orm.attributes or {}, recurse_on_sequence=True):
|
|
643
|
+
attributes_kv.append(OtlpKeyValue(key=k, value=_to_any_value(v)))
|
|
644
|
+
|
|
645
|
+
# Convert events to OTLP Event list
|
|
646
|
+
events: Optional[list[OtlpEvent]] = None
|
|
647
|
+
if span_orm.events:
|
|
648
|
+
events = []
|
|
649
|
+
for event in span_orm.events:
|
|
650
|
+
event_attributes: list[OtlpKeyValue] = []
|
|
651
|
+
if event.get("attributes"):
|
|
652
|
+
for k, v in flatten(event["attributes"], recurse_on_sequence=True):
|
|
653
|
+
event_attributes.append(OtlpKeyValue(key=k, value=_to_any_value(v)))
|
|
654
|
+
|
|
655
|
+
# Convert event timestamp to nanoseconds
|
|
656
|
+
event_time = event.get("timestamp")
|
|
657
|
+
time_unix_nano = None
|
|
658
|
+
if event_time:
|
|
659
|
+
if isinstance(event_time, datetime):
|
|
660
|
+
time_unix_nano = int(event_time.timestamp() * 1_000_000_000)
|
|
661
|
+
elif isinstance(event_time, str):
|
|
662
|
+
try:
|
|
663
|
+
dt = datetime.fromisoformat(event_time)
|
|
664
|
+
time_unix_nano = int(dt.timestamp() * 1_000_000_000)
|
|
665
|
+
except ValueError:
|
|
666
|
+
pass
|
|
667
|
+
elif isinstance(event_time, (int, float)):
|
|
668
|
+
time_unix_nano = int(event_time)
|
|
669
|
+
|
|
670
|
+
events.append(
|
|
671
|
+
OtlpEvent(
|
|
672
|
+
name=event.get("name"),
|
|
673
|
+
attributes=event_attributes,
|
|
674
|
+
time_unix_nano=time_unix_nano,
|
|
675
|
+
dropped_attributes_count=event.get("dropped_attributes_count"),
|
|
676
|
+
)
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
start_ns = (
|
|
680
|
+
int(span_orm.start_time.timestamp() * 1_000_000_000) if span_orm.start_time else None
|
|
681
|
+
)
|
|
682
|
+
end_ns = int(span_orm.end_time.timestamp() * 1_000_000_000) if span_orm.end_time else None
|
|
683
|
+
|
|
684
|
+
result_spans.append(
|
|
685
|
+
OtlpSpan(
|
|
686
|
+
trace_id=trace_id,
|
|
687
|
+
span_id=span_orm.span_id,
|
|
688
|
+
parent_span_id=span_orm.parent_id,
|
|
689
|
+
name=span_orm.name,
|
|
690
|
+
start_time_unix_nano=start_ns,
|
|
691
|
+
end_time_unix_nano=end_ns,
|
|
692
|
+
attributes=attributes_kv,
|
|
693
|
+
events=events,
|
|
694
|
+
status=OtlpStatus(
|
|
695
|
+
code=status_code_enum.to_int(), message=span_orm.status_message or None
|
|
696
|
+
),
|
|
697
|
+
)
|
|
698
|
+
)
|
|
699
|
+
|
|
700
|
+
return OtlpSpansResponseBody(next_cursor=next_cursor, data=result_spans)
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
@router.get(
|
|
704
|
+
"/projects/{project_identifier}/spans",
|
|
705
|
+
operation_id="getSpans",
|
|
706
|
+
summary="List spans with simple filters (no DSL)",
|
|
707
|
+
description="Return spans within a project filtered by time range. "
|
|
708
|
+
"Supports cursor-based pagination.",
|
|
709
|
+
responses=add_errors_to_responses([404, 422]),
|
|
710
|
+
)
|
|
711
|
+
async def span_search(
|
|
712
|
+
request: Request,
|
|
713
|
+
project_identifier: str = Path(
|
|
190
714
|
description=(
|
|
191
|
-
"The identifier
|
|
192
|
-
"
|
|
193
|
-
)
|
|
715
|
+
"The project identifier: either project ID or project name. If using a project name, "
|
|
716
|
+
"it cannot contain slash (/), question mark (?), or pound sign (#) characters."
|
|
717
|
+
)
|
|
718
|
+
),
|
|
719
|
+
cursor: Optional[str] = Query(default=None, description="Pagination cursor (Span Global ID)"),
|
|
720
|
+
limit: int = Query(default=100, gt=0, le=1000, description="Maximum number of spans to return"),
|
|
721
|
+
start_time: Optional[datetime] = Query(default=None, description="Inclusive lower bound time"),
|
|
722
|
+
end_time: Optional[datetime] = Query(default=None, description="Exclusive upper bound time"),
|
|
723
|
+
) -> SpansResponseBody:
|
|
724
|
+
async with request.app.state.db() as session:
|
|
725
|
+
project = await _get_project_by_identifier(session, project_identifier)
|
|
726
|
+
|
|
727
|
+
project_id: int = project.id
|
|
728
|
+
order_by = [models.Span.id.desc()]
|
|
729
|
+
|
|
730
|
+
stmt = (
|
|
731
|
+
select(
|
|
732
|
+
models.Span,
|
|
733
|
+
models.Trace.trace_id,
|
|
734
|
+
)
|
|
735
|
+
.join(models.Trace, onclause=models.Trace.id == models.Span.trace_rowid)
|
|
736
|
+
.where(models.Trace.project_rowid == project_id)
|
|
737
|
+
.order_by(*order_by)
|
|
194
738
|
)
|
|
195
739
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
740
|
+
if start_time:
|
|
741
|
+
stmt = stmt.where(models.Span.start_time >= normalize_datetime(start_time, timezone.utc))
|
|
742
|
+
if end_time:
|
|
743
|
+
stmt = stmt.where(models.Span.start_time < normalize_datetime(end_time, timezone.utc))
|
|
744
|
+
|
|
745
|
+
if cursor:
|
|
746
|
+
try:
|
|
747
|
+
cursor_rowid = int(GlobalID.from_id(cursor).node_id)
|
|
748
|
+
except Exception:
|
|
749
|
+
raise HTTPException(status_code=422, detail="Invalid cursor")
|
|
750
|
+
stmt = stmt.where(models.Span.id <= cursor_rowid)
|
|
751
|
+
|
|
752
|
+
stmt = stmt.limit(limit + 1)
|
|
753
|
+
|
|
754
|
+
async with request.app.state.db() as session:
|
|
755
|
+
rows: list[tuple[models.Span, str]] = [r async for r in await session.stream(stmt)]
|
|
756
|
+
|
|
757
|
+
if not rows:
|
|
758
|
+
return SpansResponseBody(next_cursor=None, data=[])
|
|
759
|
+
|
|
760
|
+
next_cursor: Optional[str] = None
|
|
761
|
+
if len(rows) == limit + 1:
|
|
762
|
+
*rows, extra = rows # extra is first item of next page
|
|
763
|
+
span_extra, _ = extra
|
|
764
|
+
next_cursor = str(GlobalID("Span", str(span_extra.id)))
|
|
765
|
+
|
|
766
|
+
# Convert ORM rows -> Phoenix spans
|
|
767
|
+
result_spans: list[Span] = []
|
|
768
|
+
for span_orm, trace_id in rows:
|
|
769
|
+
# Convert events to Phoenix Event list
|
|
770
|
+
events: list[SpanEvent] = []
|
|
771
|
+
for event in span_orm.events:
|
|
772
|
+
event_time = event.get("timestamp")
|
|
773
|
+
parsed_time = None
|
|
774
|
+
|
|
775
|
+
if event_time:
|
|
776
|
+
if isinstance(event_time, datetime):
|
|
777
|
+
parsed_time = normalize_datetime(event_time, timezone.utc)
|
|
778
|
+
elif isinstance(event_time, str):
|
|
779
|
+
try:
|
|
780
|
+
naive_time = datetime.fromisoformat(event_time)
|
|
781
|
+
parsed_time = normalize_datetime(naive_time, timezone.utc)
|
|
782
|
+
except ValueError:
|
|
783
|
+
# If ISO format fails, try to parse as timestamp
|
|
784
|
+
try:
|
|
785
|
+
parsed_time = datetime.fromtimestamp(float(event_time), tz=timezone.utc)
|
|
786
|
+
except (ValueError, TypeError):
|
|
787
|
+
parsed_time = datetime.now(timezone.utc) # fallback
|
|
788
|
+
elif isinstance(event_time, (int, float)):
|
|
789
|
+
try:
|
|
790
|
+
# Assume nanoseconds if very large, otherwise seconds
|
|
791
|
+
if event_time > 1e12: # nanoseconds
|
|
792
|
+
parsed_time = datetime.fromtimestamp(
|
|
793
|
+
event_time / 1_000_000_000, tz=timezone.utc
|
|
794
|
+
)
|
|
795
|
+
else: # seconds
|
|
796
|
+
parsed_time = datetime.fromtimestamp(event_time, tz=timezone.utc)
|
|
797
|
+
except (ValueError, OSError):
|
|
798
|
+
parsed_time = datetime.now(timezone.utc) # fallback
|
|
799
|
+
else:
|
|
800
|
+
parsed_time = datetime.now(timezone.utc) # fallback
|
|
801
|
+
|
|
802
|
+
events.append(
|
|
803
|
+
SpanEvent(
|
|
804
|
+
name=event.get("name", ""),
|
|
805
|
+
timestamp=parsed_time,
|
|
806
|
+
attributes=event.get("attributes", {}),
|
|
807
|
+
)
|
|
808
|
+
)
|
|
809
|
+
|
|
810
|
+
attributes = {
|
|
811
|
+
k: v for k, v in flatten(span_orm.attributes or dict(), recurse_on_sequence=True)
|
|
812
|
+
}
|
|
813
|
+
openinference_span_kind = attributes.pop("openinference.span.kind", "UNKNOWN")
|
|
814
|
+
|
|
815
|
+
result_spans.append(
|
|
816
|
+
Span(
|
|
817
|
+
id=str(GlobalID("Span", str(span_orm.id))),
|
|
818
|
+
name=span_orm.name or "",
|
|
819
|
+
context=SpanContext(
|
|
820
|
+
trace_id=trace_id,
|
|
821
|
+
span_id=span_orm.span_id or "",
|
|
822
|
+
),
|
|
823
|
+
span_kind=openinference_span_kind,
|
|
824
|
+
parent_id=span_orm.parent_id,
|
|
825
|
+
start_time=span_orm.start_time,
|
|
826
|
+
end_time=span_orm.end_time,
|
|
827
|
+
status_code=span_orm.status_code,
|
|
828
|
+
status_message=span_orm.status_message or "",
|
|
829
|
+
attributes=attributes,
|
|
830
|
+
events=events,
|
|
831
|
+
)
|
|
210
832
|
)
|
|
211
833
|
|
|
834
|
+
return SpansResponseBody(next_cursor=next_cursor, data=result_spans)
|
|
835
|
+
|
|
836
|
+
|
|
837
|
+
@router.get("/spans", include_in_schema=False, deprecated=True)
|
|
838
|
+
async def get_spans_handler(
|
|
839
|
+
request: Request,
|
|
840
|
+
request_body: QuerySpansRequestBody,
|
|
841
|
+
project_name: Optional[str] = Query(
|
|
842
|
+
default=None, description="The project name to get evaluations from"
|
|
843
|
+
),
|
|
844
|
+
) -> Response:
|
|
845
|
+
return await query_spans_handler(request, request_body, project_name)
|
|
846
|
+
|
|
212
847
|
|
|
213
848
|
class AnnotateSpansRequestBody(RequestBody[list[SpanAnnotationData]]):
|
|
214
849
|
data: list[SpanAnnotationData]
|
|
@@ -224,11 +859,10 @@ class AnnotateSpansResponseBody(ResponseBody[list[InsertedSpanAnnotation]]):
|
|
|
224
859
|
|
|
225
860
|
@router.post(
|
|
226
861
|
"/span_annotations",
|
|
862
|
+
dependencies=[Depends(is_not_locked)],
|
|
227
863
|
operation_id="annotateSpans",
|
|
228
864
|
summary="Create span annotations",
|
|
229
|
-
responses=add_errors_to_responses(
|
|
230
|
-
[{"status_code": HTTP_404_NOT_FOUND, "description": "Span not found"}]
|
|
231
|
-
),
|
|
865
|
+
responses=add_errors_to_responses([{"status_code": 404, "description": "Span not found"}]),
|
|
232
866
|
response_description="Span annotations inserted successfully",
|
|
233
867
|
include_in_schema=True,
|
|
234
868
|
)
|
|
@@ -256,15 +890,17 @@ async def annotate_spans(
|
|
|
256
890
|
)
|
|
257
891
|
precursors = [d.as_precursor(user_id=user_id) for d in filtered_span_annotations]
|
|
258
892
|
if not sync:
|
|
259
|
-
await request.state.
|
|
893
|
+
await request.state.enqueue_annotations(*precursors)
|
|
260
894
|
return AnnotateSpansResponseBody(data=[])
|
|
261
895
|
|
|
262
896
|
span_ids = {p.span_id for p in precursors}
|
|
263
897
|
async with request.app.state.db() as session:
|
|
264
898
|
existing_spans = {
|
|
265
|
-
|
|
266
|
-
async for
|
|
267
|
-
select(models.Span
|
|
899
|
+
span_id: id_
|
|
900
|
+
async for span_id, id_ in await session.stream(
|
|
901
|
+
select(models.Span.span_id, models.Span.id).filter(
|
|
902
|
+
models.Span.span_id.in_(span_ids)
|
|
903
|
+
)
|
|
268
904
|
)
|
|
269
905
|
}
|
|
270
906
|
|
|
@@ -272,7 +908,7 @@ async def annotate_spans(
|
|
|
272
908
|
if missing_span_ids:
|
|
273
909
|
raise HTTPException(
|
|
274
910
|
detail=f"Spans with IDs {', '.join(missing_span_ids)} do not exist.",
|
|
275
|
-
status_code=
|
|
911
|
+
status_code=404,
|
|
276
912
|
)
|
|
277
913
|
inserted_ids = []
|
|
278
914
|
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
@@ -294,3 +930,377 @@ async def annotate_spans(
|
|
|
294
930
|
for id_ in inserted_ids
|
|
295
931
|
]
|
|
296
932
|
)
|
|
933
|
+
|
|
934
|
+
|
|
935
|
+
class SpanNoteData(V1RoutesBaseModel):
|
|
936
|
+
span_id: Annotated[str, BeforeValidator(lambda v: v.strip() if isinstance(v, str) else v)] = (
|
|
937
|
+
Field(min_length=1, description="OpenTelemetry Span ID (hex format w/o 0x prefix)")
|
|
938
|
+
)
|
|
939
|
+
note: Annotated[str, BeforeValidator(lambda v: v.strip() if isinstance(v, str) else v)] = Field(
|
|
940
|
+
min_length=1, description="The note text to add to the span"
|
|
941
|
+
)
|
|
942
|
+
|
|
943
|
+
|
|
944
|
+
class CreateSpanNoteRequestBody(RequestBody[SpanNoteData]):
|
|
945
|
+
data: SpanNoteData
|
|
946
|
+
|
|
947
|
+
|
|
948
|
+
class CreateSpanNoteResponseBody(ResponseBody[InsertedSpanAnnotation]):
|
|
949
|
+
pass
|
|
950
|
+
|
|
951
|
+
|
|
952
|
+
@router.post(
|
|
953
|
+
"/span_notes",
|
|
954
|
+
dependencies=[Depends(is_not_locked)],
|
|
955
|
+
operation_id="createSpanNote",
|
|
956
|
+
summary="Create a span note",
|
|
957
|
+
description=(
|
|
958
|
+
"Add a note annotation to a span. Notes are special annotations that allow "
|
|
959
|
+
"multiple entries per span (unlike regular annotations which are unique by name "
|
|
960
|
+
"and identifier). Each note gets a unique timestamp-based identifier."
|
|
961
|
+
),
|
|
962
|
+
responses=add_errors_to_responses([{"status_code": 404, "description": "Span not found"}]),
|
|
963
|
+
response_description="Span note created successfully",
|
|
964
|
+
status_code=200,
|
|
965
|
+
)
|
|
966
|
+
async def create_span_note(
|
|
967
|
+
request: Request,
|
|
968
|
+
request_body: CreateSpanNoteRequestBody,
|
|
969
|
+
) -> CreateSpanNoteResponseBody:
|
|
970
|
+
"""
|
|
971
|
+
Create a note annotation for a span.
|
|
972
|
+
|
|
973
|
+
Notes are a special type of annotation that:
|
|
974
|
+
- Have the fixed name "note"
|
|
975
|
+
- Use a timestamp-based identifier to allow multiple notes per span
|
|
976
|
+
- Are always created with annotator_kind="HUMAN" and source="API"
|
|
977
|
+
- Store the note text in the explanation field
|
|
978
|
+
"""
|
|
979
|
+
note_data = request_body.data
|
|
980
|
+
|
|
981
|
+
user_id: Optional[int] = None
|
|
982
|
+
if request.app.state.authentication_enabled and isinstance(request.user, PhoenixUser):
|
|
983
|
+
user_id = int(request.user.identity)
|
|
984
|
+
|
|
985
|
+
async with request.app.state.db() as session:
|
|
986
|
+
# Find the span by OpenTelemetry span_id
|
|
987
|
+
span_rowid = await session.scalar(
|
|
988
|
+
select(models.Span.id).where(models.Span.span_id == note_data.span_id)
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
if span_rowid is None:
|
|
992
|
+
raise HTTPException(
|
|
993
|
+
status_code=404,
|
|
994
|
+
detail=f"Span with ID {note_data.span_id} not found",
|
|
995
|
+
)
|
|
996
|
+
|
|
997
|
+
# Generate a unique identifier for the note using timestamp
|
|
998
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
999
|
+
note_identifier = f"px-span-note:{timestamp}"
|
|
1000
|
+
|
|
1001
|
+
# Create the annotation values
|
|
1002
|
+
values = {
|
|
1003
|
+
"span_rowid": span_rowid,
|
|
1004
|
+
"name": "note",
|
|
1005
|
+
"label": None,
|
|
1006
|
+
"score": None,
|
|
1007
|
+
"explanation": note_data.note,
|
|
1008
|
+
"annotator_kind": "HUMAN",
|
|
1009
|
+
"metadata_": {},
|
|
1010
|
+
"identifier": note_identifier,
|
|
1011
|
+
"source": "API",
|
|
1012
|
+
"user_id": user_id,
|
|
1013
|
+
}
|
|
1014
|
+
|
|
1015
|
+
# Insert the annotation
|
|
1016
|
+
result = await session.execute(
|
|
1017
|
+
sa.insert(models.SpanAnnotation).values(**values).returning(models.SpanAnnotation.id)
|
|
1018
|
+
)
|
|
1019
|
+
annotation_id = result.scalar_one()
|
|
1020
|
+
|
|
1021
|
+
# Put event on queue after successful insert
|
|
1022
|
+
request.state.event_queue.put(SpanAnnotationInsertEvent((annotation_id,)))
|
|
1023
|
+
|
|
1024
|
+
return CreateSpanNoteResponseBody(
|
|
1025
|
+
data=InsertedSpanAnnotation(id=str(GlobalID("SpanAnnotation", str(annotation_id))))
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
class CreateSpansRequestBody(RequestBody[list[Span]]):
|
|
1030
|
+
data: list[Span]
|
|
1031
|
+
|
|
1032
|
+
|
|
1033
|
+
class CreateSpansResponseBody(V1RoutesBaseModel):
|
|
1034
|
+
total_received: int = Field(description="Total number of spans received")
|
|
1035
|
+
total_queued: int = Field(description="Number of spans successfully queued for insertion")
|
|
1036
|
+
|
|
1037
|
+
|
|
1038
|
+
@router.post(
|
|
1039
|
+
"/projects/{project_identifier}/spans",
|
|
1040
|
+
dependencies=[Depends(is_not_locked)],
|
|
1041
|
+
operation_id="createSpans",
|
|
1042
|
+
summary="Create spans",
|
|
1043
|
+
description=(
|
|
1044
|
+
"Submit spans to be inserted into a project. If any spans are invalid or "
|
|
1045
|
+
"duplicates, no spans will be inserted."
|
|
1046
|
+
),
|
|
1047
|
+
responses=add_errors_to_responses([404, 400]),
|
|
1048
|
+
status_code=202,
|
|
1049
|
+
)
|
|
1050
|
+
async def create_spans(
|
|
1051
|
+
request: Request,
|
|
1052
|
+
request_body: CreateSpansRequestBody,
|
|
1053
|
+
project_identifier: str = Path(
|
|
1054
|
+
description=(
|
|
1055
|
+
"The project identifier: either project ID or project name. If using a project name, "
|
|
1056
|
+
"it cannot contain slash (/), question mark (?), or pound sign (#) characters."
|
|
1057
|
+
)
|
|
1058
|
+
),
|
|
1059
|
+
) -> CreateSpansResponseBody:
|
|
1060
|
+
def convert_api_span_for_insertion(api_span: Span) -> SpanForInsertion:
|
|
1061
|
+
"""
|
|
1062
|
+
Convert from API Span to phoenix.trace.schemas.Span
|
|
1063
|
+
Note: The 'id' field has a default empty string and is ignored during insertion.
|
|
1064
|
+
"""
|
|
1065
|
+
try:
|
|
1066
|
+
span_kind = SpanKind(api_span.span_kind.upper())
|
|
1067
|
+
except ValueError:
|
|
1068
|
+
span_kind = SpanKind.UNKNOWN
|
|
1069
|
+
|
|
1070
|
+
try:
|
|
1071
|
+
status_code = SpanStatusCode(api_span.status_code.upper())
|
|
1072
|
+
except ValueError:
|
|
1073
|
+
status_code = SpanStatusCode.UNSET
|
|
1074
|
+
|
|
1075
|
+
internal_events: list[InternalSpanEvent] = []
|
|
1076
|
+
for event in api_span.events:
|
|
1077
|
+
if event.timestamp:
|
|
1078
|
+
internal_events.append(
|
|
1079
|
+
InternalSpanEvent(
|
|
1080
|
+
name=event.name, timestamp=event.timestamp, attributes=event.attributes
|
|
1081
|
+
)
|
|
1082
|
+
)
|
|
1083
|
+
|
|
1084
|
+
# Add back the openinference.span.kind attribute since it's stored separately in the API
|
|
1085
|
+
attributes = dict(api_span.attributes)
|
|
1086
|
+
attributes["openinference.span.kind"] = api_span.span_kind
|
|
1087
|
+
attributes = unflatten(attributes.items())
|
|
1088
|
+
|
|
1089
|
+
# Create span for insertion - note we ignore the 'id' field as it's server-generated
|
|
1090
|
+
return SpanForInsertion(
|
|
1091
|
+
name=api_span.name,
|
|
1092
|
+
context=InsertionSpanContext(
|
|
1093
|
+
trace_id=api_span.context.trace_id, span_id=api_span.context.span_id
|
|
1094
|
+
),
|
|
1095
|
+
span_kind=span_kind,
|
|
1096
|
+
parent_id=api_span.parent_id,
|
|
1097
|
+
start_time=api_span.start_time,
|
|
1098
|
+
end_time=api_span.end_time,
|
|
1099
|
+
status_code=status_code,
|
|
1100
|
+
status_message=api_span.status_message,
|
|
1101
|
+
attributes=attributes,
|
|
1102
|
+
events=internal_events,
|
|
1103
|
+
conversation=None, # Unused
|
|
1104
|
+
)
|
|
1105
|
+
|
|
1106
|
+
try:
|
|
1107
|
+
id_ = from_global_id_with_expected_type(
|
|
1108
|
+
GlobalID.from_id(project_identifier),
|
|
1109
|
+
"Project",
|
|
1110
|
+
)
|
|
1111
|
+
except Exception:
|
|
1112
|
+
project_name = project_identifier
|
|
1113
|
+
else:
|
|
1114
|
+
stmt = select(models.Project).filter_by(id=id_)
|
|
1115
|
+
async with request.app.state.db() as session:
|
|
1116
|
+
project = await session.scalar(stmt)
|
|
1117
|
+
if project is None:
|
|
1118
|
+
raise HTTPException(
|
|
1119
|
+
status_code=HTTP_404_NOT_FOUND,
|
|
1120
|
+
detail=f"Project with ID {project_identifier} not found",
|
|
1121
|
+
)
|
|
1122
|
+
project_name = project.name
|
|
1123
|
+
|
|
1124
|
+
total_received = len(request_body.data)
|
|
1125
|
+
duplicate_spans: list[dict[str, str]] = []
|
|
1126
|
+
invalid_spans: list[dict[str, str]] = []
|
|
1127
|
+
spans_to_queue: list[tuple[SpanForInsertion, str]] = []
|
|
1128
|
+
|
|
1129
|
+
existing_span_ids: set[str] = set()
|
|
1130
|
+
span_ids = [span.context.span_id for span in request_body.data]
|
|
1131
|
+
async with request.app.state.db() as session:
|
|
1132
|
+
existing_result = await session.execute(
|
|
1133
|
+
select(models.Span.span_id).where(models.Span.span_id.in_(span_ids))
|
|
1134
|
+
)
|
|
1135
|
+
existing_span_ids = {row[0] for row in existing_result}
|
|
1136
|
+
|
|
1137
|
+
for api_span in request_body.data:
|
|
1138
|
+
# Check if it's a duplicate
|
|
1139
|
+
if api_span.context.span_id in existing_span_ids:
|
|
1140
|
+
duplicate_spans.append(
|
|
1141
|
+
{
|
|
1142
|
+
"span_id": api_span.context.span_id,
|
|
1143
|
+
"trace_id": api_span.context.trace_id,
|
|
1144
|
+
}
|
|
1145
|
+
)
|
|
1146
|
+
continue
|
|
1147
|
+
|
|
1148
|
+
try:
|
|
1149
|
+
span_for_insertion = convert_api_span_for_insertion(api_span)
|
|
1150
|
+
spans_to_queue.append((span_for_insertion, project_name))
|
|
1151
|
+
except Exception as e:
|
|
1152
|
+
invalid_spans.append(
|
|
1153
|
+
{
|
|
1154
|
+
"span_id": api_span.context.span_id,
|
|
1155
|
+
"trace_id": api_span.context.trace_id,
|
|
1156
|
+
"error": str(e),
|
|
1157
|
+
}
|
|
1158
|
+
)
|
|
1159
|
+
|
|
1160
|
+
# If there are any duplicates or invalid spans, reject the entire request
|
|
1161
|
+
if duplicate_spans or invalid_spans:
|
|
1162
|
+
error_detail = {
|
|
1163
|
+
"error": "Request contains invalid or duplicate spans",
|
|
1164
|
+
"total_received": total_received,
|
|
1165
|
+
"total_queued": 0, # No spans are queued when there are validation errors
|
|
1166
|
+
"total_duplicates": len(duplicate_spans),
|
|
1167
|
+
"total_invalid": len(invalid_spans),
|
|
1168
|
+
"duplicate_spans": duplicate_spans,
|
|
1169
|
+
"invalid_spans": invalid_spans,
|
|
1170
|
+
}
|
|
1171
|
+
raise HTTPException(
|
|
1172
|
+
status_code=400,
|
|
1173
|
+
detail=json.dumps(error_detail),
|
|
1174
|
+
)
|
|
1175
|
+
|
|
1176
|
+
# All spans are valid, queue them all
|
|
1177
|
+
for span_for_insertion, project_name in spans_to_queue:
|
|
1178
|
+
await request.state.enqueue_span(span_for_insertion, project_name)
|
|
1179
|
+
|
|
1180
|
+
return CreateSpansResponseBody(
|
|
1181
|
+
total_received=total_received,
|
|
1182
|
+
total_queued=len(spans_to_queue),
|
|
1183
|
+
)
|
|
1184
|
+
|
|
1185
|
+
|
|
1186
|
+
@router.delete(
|
|
1187
|
+
"/spans/{span_identifier}",
|
|
1188
|
+
dependencies=[Depends(is_not_locked)],
|
|
1189
|
+
operation_id="deleteSpan",
|
|
1190
|
+
summary="Delete a span by span_identifier",
|
|
1191
|
+
description=(
|
|
1192
|
+
"""
|
|
1193
|
+
Delete a single span by identifier.
|
|
1194
|
+
|
|
1195
|
+
**Important**: This operation deletes ONLY the specified span itself and does NOT
|
|
1196
|
+
delete its descendants/children. All child spans will remain in the trace and
|
|
1197
|
+
become orphaned (their parent_id will point to a non-existent span).
|
|
1198
|
+
|
|
1199
|
+
Behavior:
|
|
1200
|
+
- Deletes only the target span (preserves all descendant spans)
|
|
1201
|
+
- If this was the last span in the trace, the trace record is also deleted
|
|
1202
|
+
- If the deleted span had a parent, its cumulative metrics (error count, token counts)
|
|
1203
|
+
are subtracted from all ancestor spans in the chain
|
|
1204
|
+
|
|
1205
|
+
**Note**: This operation is irreversible and may create orphaned spans.
|
|
1206
|
+
"""
|
|
1207
|
+
),
|
|
1208
|
+
responses=add_errors_to_responses([404]),
|
|
1209
|
+
status_code=204, # No Content for successful deletion
|
|
1210
|
+
)
|
|
1211
|
+
async def delete_span(
|
|
1212
|
+
request: Request,
|
|
1213
|
+
span_identifier: str = Path(
|
|
1214
|
+
description="The span identifier: either a relay GlobalID or OpenTelemetry span_id"
|
|
1215
|
+
),
|
|
1216
|
+
) -> None:
|
|
1217
|
+
"""
|
|
1218
|
+
Delete a single span by identifier.
|
|
1219
|
+
|
|
1220
|
+
This operation deletes ONLY the specified span and preserves all its descendants,
|
|
1221
|
+
which may become orphaned (parent_id pointing to non-existent span).
|
|
1222
|
+
|
|
1223
|
+
Steps:
|
|
1224
|
+
1. Find the target span to delete (supports both GlobalID and OpenTelemetry span_id)
|
|
1225
|
+
2. Delete only the target span (all descendants remain untouched)
|
|
1226
|
+
3. If trace becomes empty, delete the trace record
|
|
1227
|
+
4. If deleted span had a parent, subtract its cumulative metrics from ancestor chain
|
|
1228
|
+
5. Return 204 No Content on success
|
|
1229
|
+
|
|
1230
|
+
Args:
|
|
1231
|
+
request: FastAPI request object
|
|
1232
|
+
span_identifier: Either relay GlobalID or OpenTelemetry span_id
|
|
1233
|
+
|
|
1234
|
+
Raises:
|
|
1235
|
+
HTTPException(404): If span not found
|
|
1236
|
+
|
|
1237
|
+
Returns:
|
|
1238
|
+
None (204 No Content status)
|
|
1239
|
+
"""
|
|
1240
|
+
async with request.app.state.db() as session:
|
|
1241
|
+
# Determine the predicate for deletion based on identifier type
|
|
1242
|
+
try:
|
|
1243
|
+
span_rowid = from_global_id_with_expected_type(
|
|
1244
|
+
GlobalID.from_id(span_identifier),
|
|
1245
|
+
"Span",
|
|
1246
|
+
)
|
|
1247
|
+
predicate = models.Span.id == span_rowid
|
|
1248
|
+
error_detail = f"Span with relay ID '{span_identifier}' not found"
|
|
1249
|
+
except Exception:
|
|
1250
|
+
predicate = models.Span.span_id == span_identifier
|
|
1251
|
+
error_detail = f"Span with span_id '{span_identifier}' not found"
|
|
1252
|
+
|
|
1253
|
+
# Delete the span and return its data in one operation
|
|
1254
|
+
target_span = await session.scalar(
|
|
1255
|
+
sa.delete(models.Span).where(predicate).returning(models.Span)
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
if target_span is None:
|
|
1259
|
+
raise HTTPException(
|
|
1260
|
+
status_code=404,
|
|
1261
|
+
detail=error_detail,
|
|
1262
|
+
)
|
|
1263
|
+
|
|
1264
|
+
# Store values needed for later operations
|
|
1265
|
+
trace_rowid = target_span.trace_rowid
|
|
1266
|
+
parent_id = target_span.parent_id
|
|
1267
|
+
cumulative_error_count = target_span.cumulative_error_count
|
|
1268
|
+
cumulative_llm_token_count_prompt = target_span.cumulative_llm_token_count_prompt
|
|
1269
|
+
cumulative_llm_token_count_completion = target_span.cumulative_llm_token_count_completion
|
|
1270
|
+
|
|
1271
|
+
# Step 2: Check if trace is empty—if so, delete the trace record
|
|
1272
|
+
trace_is_empty = await session.scalar(
|
|
1273
|
+
select(~exists().where(models.Span.trace_rowid == trace_rowid))
|
|
1274
|
+
)
|
|
1275
|
+
|
|
1276
|
+
if trace_is_empty:
|
|
1277
|
+
# Trace is empty, delete the trace record
|
|
1278
|
+
await session.execute(sa.delete(models.Trace).where(models.Trace.id == trace_rowid))
|
|
1279
|
+
|
|
1280
|
+
# Step 3: Propagate negative cumulative values up ancestor chain if parent_id is not null
|
|
1281
|
+
if not trace_is_empty and parent_id is not None:
|
|
1282
|
+
# Use the helper function to get all ancestor span IDs
|
|
1283
|
+
ancestor_ids_query = get_ancestor_span_rowids(parent_id)
|
|
1284
|
+
|
|
1285
|
+
# Propagate negative cumulative values to ancestors
|
|
1286
|
+
await session.execute(
|
|
1287
|
+
update(models.Span)
|
|
1288
|
+
.where(models.Span.id.in_(ancestor_ids_query))
|
|
1289
|
+
.values(
|
|
1290
|
+
cumulative_error_count=(
|
|
1291
|
+
models.Span.cumulative_error_count - cumulative_error_count
|
|
1292
|
+
),
|
|
1293
|
+
cumulative_llm_token_count_prompt=(
|
|
1294
|
+
models.Span.cumulative_llm_token_count_prompt
|
|
1295
|
+
- cumulative_llm_token_count_prompt
|
|
1296
|
+
),
|
|
1297
|
+
cumulative_llm_token_count_completion=(
|
|
1298
|
+
models.Span.cumulative_llm_token_count_completion
|
|
1299
|
+
- cumulative_llm_token_count_completion
|
|
1300
|
+
),
|
|
1301
|
+
)
|
|
1302
|
+
)
|
|
1303
|
+
# Trigger cache invalidation event
|
|
1304
|
+
request.state.event_queue.put(SpanDeleteEvent((trace_rowid,)))
|
|
1305
|
+
|
|
1306
|
+
return None
|