arize-phoenix 3.16.1__py3-none-any.whl → 7.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- arize_phoenix-7.7.0.dist-info/METADATA +261 -0
- arize_phoenix-7.7.0.dist-info/RECORD +345 -0
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.0.dist-info}/WHEEL +1 -1
- arize_phoenix-7.7.0.dist-info/entry_points.txt +3 -0
- phoenix/__init__.py +86 -14
- phoenix/auth.py +309 -0
- phoenix/config.py +675 -45
- phoenix/core/model.py +32 -30
- phoenix/core/model_schema.py +102 -109
- phoenix/core/model_schema_adapter.py +48 -45
- phoenix/datetime_utils.py +24 -3
- phoenix/db/README.md +54 -0
- phoenix/db/__init__.py +4 -0
- phoenix/db/alembic.ini +85 -0
- phoenix/db/bulk_inserter.py +294 -0
- phoenix/db/engines.py +208 -0
- phoenix/db/enums.py +20 -0
- phoenix/db/facilitator.py +113 -0
- phoenix/db/helpers.py +159 -0
- phoenix/db/insertion/constants.py +2 -0
- phoenix/db/insertion/dataset.py +227 -0
- phoenix/db/insertion/document_annotation.py +171 -0
- phoenix/db/insertion/evaluation.py +191 -0
- phoenix/db/insertion/helpers.py +98 -0
- phoenix/db/insertion/span.py +193 -0
- phoenix/db/insertion/span_annotation.py +158 -0
- phoenix/db/insertion/trace_annotation.py +158 -0
- phoenix/db/insertion/types.py +256 -0
- phoenix/db/migrate.py +86 -0
- phoenix/db/migrations/data_migration_scripts/populate_project_sessions.py +199 -0
- phoenix/db/migrations/env.py +114 -0
- phoenix/db/migrations/script.py.mako +26 -0
- phoenix/db/migrations/versions/10460e46d750_datasets.py +317 -0
- phoenix/db/migrations/versions/3be8647b87d8_add_token_columns_to_spans_table.py +126 -0
- phoenix/db/migrations/versions/4ded9e43755f_create_project_sessions_table.py +66 -0
- phoenix/db/migrations/versions/cd164e83824f_users_and_tokens.py +157 -0
- phoenix/db/migrations/versions/cf03bd6bae1d_init.py +280 -0
- phoenix/db/models.py +807 -0
- phoenix/exceptions.py +5 -1
- phoenix/experiments/__init__.py +6 -0
- phoenix/experiments/evaluators/__init__.py +29 -0
- phoenix/experiments/evaluators/base.py +158 -0
- phoenix/experiments/evaluators/code_evaluators.py +184 -0
- phoenix/experiments/evaluators/llm_evaluators.py +473 -0
- phoenix/experiments/evaluators/utils.py +236 -0
- phoenix/experiments/functions.py +772 -0
- phoenix/experiments/tracing.py +86 -0
- phoenix/experiments/types.py +726 -0
- phoenix/experiments/utils.py +25 -0
- phoenix/inferences/__init__.py +0 -0
- phoenix/{datasets → inferences}/errors.py +6 -5
- phoenix/{datasets → inferences}/fixtures.py +49 -42
- phoenix/{datasets/dataset.py → inferences/inferences.py} +121 -105
- phoenix/{datasets → inferences}/schema.py +11 -11
- phoenix/{datasets → inferences}/validation.py +13 -14
- phoenix/logging/__init__.py +3 -0
- phoenix/logging/_config.py +90 -0
- phoenix/logging/_filter.py +6 -0
- phoenix/logging/_formatter.py +69 -0
- phoenix/metrics/__init__.py +5 -4
- phoenix/metrics/binning.py +4 -3
- phoenix/metrics/metrics.py +2 -1
- phoenix/metrics/mixins.py +7 -6
- phoenix/metrics/retrieval_metrics.py +2 -1
- phoenix/metrics/timeseries.py +5 -4
- phoenix/metrics/wrappers.py +9 -3
- phoenix/pointcloud/clustering.py +5 -5
- phoenix/pointcloud/pointcloud.py +7 -5
- phoenix/pointcloud/projectors.py +5 -6
- phoenix/pointcloud/umap_parameters.py +53 -52
- phoenix/server/api/README.md +28 -0
- phoenix/server/api/auth.py +44 -0
- phoenix/server/api/context.py +152 -9
- phoenix/server/api/dataloaders/__init__.py +91 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +139 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +54 -0
- phoenix/server/api/dataloaders/cache/__init__.py +3 -0
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +68 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +131 -0
- phoenix/server/api/dataloaders/dataset_example_spans.py +38 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +144 -0
- phoenix/server/api/dataloaders/document_evaluations.py +31 -0
- phoenix/server/api/dataloaders/document_retrieval_metrics.py +89 -0
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +79 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +58 -0
- phoenix/server/api/dataloaders/experiment_run_annotations.py +36 -0
- phoenix/server/api/dataloaders/experiment_run_counts.py +49 -0
- phoenix/server/api/dataloaders/experiment_sequence_number.py +44 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +188 -0
- phoenix/server/api/dataloaders/min_start_or_max_end_times.py +85 -0
- phoenix/server/api/dataloaders/project_by_name.py +31 -0
- phoenix/server/api/dataloaders/record_counts.py +116 -0
- phoenix/server/api/dataloaders/session_io.py +79 -0
- phoenix/server/api/dataloaders/session_num_traces.py +30 -0
- phoenix/server/api/dataloaders/session_num_traces_with_error.py +32 -0
- phoenix/server/api/dataloaders/session_token_usages.py +41 -0
- phoenix/server/api/dataloaders/session_trace_latency_ms_quantile.py +55 -0
- phoenix/server/api/dataloaders/span_annotations.py +26 -0
- phoenix/server/api/dataloaders/span_dataset_examples.py +31 -0
- phoenix/server/api/dataloaders/span_descendants.py +57 -0
- phoenix/server/api/dataloaders/span_projects.py +33 -0
- phoenix/server/api/dataloaders/token_counts.py +124 -0
- phoenix/server/api/dataloaders/trace_by_trace_ids.py +25 -0
- phoenix/server/api/dataloaders/trace_root_spans.py +32 -0
- phoenix/server/api/dataloaders/user_roles.py +30 -0
- phoenix/server/api/dataloaders/users.py +33 -0
- phoenix/server/api/exceptions.py +48 -0
- phoenix/server/api/helpers/__init__.py +12 -0
- phoenix/server/api/helpers/dataset_helpers.py +217 -0
- phoenix/server/api/helpers/experiment_run_filters.py +763 -0
- phoenix/server/api/helpers/playground_clients.py +948 -0
- phoenix/server/api/helpers/playground_registry.py +70 -0
- phoenix/server/api/helpers/playground_spans.py +455 -0
- phoenix/server/api/input_types/AddExamplesToDatasetInput.py +16 -0
- phoenix/server/api/input_types/AddSpansToDatasetInput.py +14 -0
- phoenix/server/api/input_types/ChatCompletionInput.py +38 -0
- phoenix/server/api/input_types/ChatCompletionMessageInput.py +24 -0
- phoenix/server/api/input_types/ClearProjectInput.py +15 -0
- phoenix/server/api/input_types/ClusterInput.py +2 -2
- phoenix/server/api/input_types/CreateDatasetInput.py +12 -0
- phoenix/server/api/input_types/CreateSpanAnnotationInput.py +18 -0
- phoenix/server/api/input_types/CreateTraceAnnotationInput.py +18 -0
- phoenix/server/api/input_types/DataQualityMetricInput.py +5 -2
- phoenix/server/api/input_types/DatasetExampleInput.py +14 -0
- phoenix/server/api/input_types/DatasetSort.py +17 -0
- phoenix/server/api/input_types/DatasetVersionSort.py +16 -0
- phoenix/server/api/input_types/DeleteAnnotationsInput.py +7 -0
- phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +13 -0
- phoenix/server/api/input_types/DeleteDatasetInput.py +7 -0
- phoenix/server/api/input_types/DeleteExperimentsInput.py +7 -0
- phoenix/server/api/input_types/DimensionFilter.py +4 -4
- phoenix/server/api/input_types/GenerativeModelInput.py +17 -0
- phoenix/server/api/input_types/Granularity.py +1 -1
- phoenix/server/api/input_types/InvocationParameters.py +162 -0
- phoenix/server/api/input_types/PatchAnnotationInput.py +19 -0
- phoenix/server/api/input_types/PatchDatasetExamplesInput.py +35 -0
- phoenix/server/api/input_types/PatchDatasetInput.py +14 -0
- phoenix/server/api/input_types/PerformanceMetricInput.py +5 -2
- phoenix/server/api/input_types/ProjectSessionSort.py +29 -0
- phoenix/server/api/input_types/SpanAnnotationSort.py +17 -0
- phoenix/server/api/input_types/SpanSort.py +134 -69
- phoenix/server/api/input_types/TemplateOptions.py +10 -0
- phoenix/server/api/input_types/TraceAnnotationSort.py +17 -0
- phoenix/server/api/input_types/UserRoleInput.py +9 -0
- phoenix/server/api/mutations/__init__.py +28 -0
- phoenix/server/api/mutations/api_key_mutations.py +167 -0
- phoenix/server/api/mutations/chat_mutations.py +593 -0
- phoenix/server/api/mutations/dataset_mutations.py +591 -0
- phoenix/server/api/mutations/experiment_mutations.py +75 -0
- phoenix/server/api/{types/ExportEventsMutation.py → mutations/export_events_mutations.py} +21 -18
- phoenix/server/api/mutations/project_mutations.py +57 -0
- phoenix/server/api/mutations/span_annotations_mutations.py +128 -0
- phoenix/server/api/mutations/trace_annotations_mutations.py +127 -0
- phoenix/server/api/mutations/user_mutations.py +329 -0
- phoenix/server/api/openapi/__init__.py +0 -0
- phoenix/server/api/openapi/main.py +17 -0
- phoenix/server/api/openapi/schema.py +16 -0
- phoenix/server/api/queries.py +738 -0
- phoenix/server/api/routers/__init__.py +11 -0
- phoenix/server/api/routers/auth.py +284 -0
- phoenix/server/api/routers/embeddings.py +26 -0
- phoenix/server/api/routers/oauth2.py +488 -0
- phoenix/server/api/routers/v1/__init__.py +64 -0
- phoenix/server/api/routers/v1/datasets.py +1017 -0
- phoenix/server/api/routers/v1/evaluations.py +362 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +115 -0
- phoenix/server/api/routers/v1/experiment_runs.py +167 -0
- phoenix/server/api/routers/v1/experiments.py +308 -0
- phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
- phoenix/server/api/routers/v1/spans.py +267 -0
- phoenix/server/api/routers/v1/traces.py +208 -0
- phoenix/server/api/routers/v1/utils.py +95 -0
- phoenix/server/api/schema.py +44 -241
- phoenix/server/api/subscriptions.py +597 -0
- phoenix/server/api/types/Annotation.py +21 -0
- phoenix/server/api/types/AnnotationSummary.py +55 -0
- phoenix/server/api/types/AnnotatorKind.py +16 -0
- phoenix/server/api/types/ApiKey.py +27 -0
- phoenix/server/api/types/AuthMethod.py +9 -0
- phoenix/server/api/types/ChatCompletionMessageRole.py +11 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +46 -0
- phoenix/server/api/types/Cluster.py +25 -24
- phoenix/server/api/types/CreateDatasetPayload.py +8 -0
- phoenix/server/api/types/DataQualityMetric.py +31 -13
- phoenix/server/api/types/Dataset.py +288 -63
- phoenix/server/api/types/DatasetExample.py +85 -0
- phoenix/server/api/types/DatasetExampleRevision.py +34 -0
- phoenix/server/api/types/DatasetVersion.py +14 -0
- phoenix/server/api/types/Dimension.py +32 -31
- phoenix/server/api/types/DocumentEvaluationSummary.py +9 -8
- phoenix/server/api/types/EmbeddingDimension.py +56 -49
- phoenix/server/api/types/Evaluation.py +25 -31
- phoenix/server/api/types/EvaluationSummary.py +30 -50
- phoenix/server/api/types/Event.py +20 -20
- phoenix/server/api/types/ExampleRevisionInterface.py +14 -0
- phoenix/server/api/types/Experiment.py +152 -0
- phoenix/server/api/types/ExperimentAnnotationSummary.py +13 -0
- phoenix/server/api/types/ExperimentComparison.py +17 -0
- phoenix/server/api/types/ExperimentRun.py +119 -0
- phoenix/server/api/types/ExperimentRunAnnotation.py +56 -0
- phoenix/server/api/types/GenerativeModel.py +9 -0
- phoenix/server/api/types/GenerativeProvider.py +85 -0
- phoenix/server/api/types/Inferences.py +80 -0
- phoenix/server/api/types/InferencesRole.py +23 -0
- phoenix/server/api/types/LabelFraction.py +7 -0
- phoenix/server/api/types/MimeType.py +2 -2
- phoenix/server/api/types/Model.py +54 -54
- phoenix/server/api/types/PerformanceMetric.py +8 -5
- phoenix/server/api/types/Project.py +407 -142
- phoenix/server/api/types/ProjectSession.py +139 -0
- phoenix/server/api/types/Segments.py +4 -4
- phoenix/server/api/types/Span.py +221 -176
- phoenix/server/api/types/SpanAnnotation.py +43 -0
- phoenix/server/api/types/SpanIOValue.py +15 -0
- phoenix/server/api/types/SystemApiKey.py +9 -0
- phoenix/server/api/types/TemplateLanguage.py +10 -0
- phoenix/server/api/types/TimeSeries.py +19 -15
- phoenix/server/api/types/TokenUsage.py +11 -0
- phoenix/server/api/types/Trace.py +154 -0
- phoenix/server/api/types/TraceAnnotation.py +45 -0
- phoenix/server/api/types/UMAPPoints.py +7 -7
- phoenix/server/api/types/User.py +60 -0
- phoenix/server/api/types/UserApiKey.py +45 -0
- phoenix/server/api/types/UserRole.py +15 -0
- phoenix/server/api/types/node.py +4 -112
- phoenix/server/api/types/pagination.py +156 -57
- phoenix/server/api/utils.py +34 -0
- phoenix/server/app.py +864 -115
- phoenix/server/bearer_auth.py +163 -0
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +256 -0
- phoenix/server/email/__init__.py +0 -0
- phoenix/server/email/sender.py +97 -0
- phoenix/server/email/templates/__init__.py +0 -0
- phoenix/server/email/templates/password_reset.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/grpc_server.py +102 -0
- phoenix/server/jwt_store.py +505 -0
- phoenix/server/main.py +305 -116
- phoenix/server/oauth2.py +52 -0
- phoenix/server/openapi/__init__.py +0 -0
- phoenix/server/prometheus.py +111 -0
- phoenix/server/rate_limiters.py +188 -0
- phoenix/server/static/.vite/manifest.json +87 -0
- phoenix/server/static/assets/components-Cy9nwIvF.js +2125 -0
- phoenix/server/static/assets/index-BKvHIxkk.js +113 -0
- phoenix/server/static/assets/pages-CUi2xCVQ.js +4449 -0
- phoenix/server/static/assets/vendor-DvC8cT4X.js +894 -0
- phoenix/server/static/assets/vendor-DxkFTwjz.css +1 -0
- phoenix/server/static/assets/vendor-arizeai-Do1793cv.js +662 -0
- phoenix/server/static/assets/vendor-codemirror-BzwZPyJM.js +24 -0
- phoenix/server/static/assets/vendor-recharts-_Jb7JjhG.js +59 -0
- phoenix/server/static/assets/vendor-shiki-Cl9QBraO.js +5 -0
- phoenix/server/static/assets/vendor-three-DwGkEfCM.js +2998 -0
- phoenix/server/telemetry.py +68 -0
- phoenix/server/templates/index.html +82 -23
- phoenix/server/thread_server.py +3 -3
- phoenix/server/types.py +275 -0
- phoenix/services.py +27 -18
- phoenix/session/client.py +743 -68
- phoenix/session/data_extractor.py +31 -7
- phoenix/session/evaluation.py +3 -9
- phoenix/session/session.py +263 -219
- phoenix/settings.py +22 -0
- phoenix/trace/__init__.py +2 -22
- phoenix/trace/attributes.py +338 -0
- phoenix/trace/dsl/README.md +116 -0
- phoenix/trace/dsl/filter.py +663 -213
- phoenix/trace/dsl/helpers.py +73 -21
- phoenix/trace/dsl/query.py +574 -201
- phoenix/trace/exporter.py +24 -19
- phoenix/trace/fixtures.py +368 -32
- phoenix/trace/otel.py +71 -219
- phoenix/trace/projects.py +3 -2
- phoenix/trace/schemas.py +33 -11
- phoenix/trace/span_evaluations.py +21 -16
- phoenix/trace/span_json_decoder.py +6 -4
- phoenix/trace/span_json_encoder.py +2 -2
- phoenix/trace/trace_dataset.py +47 -32
- phoenix/trace/utils.py +21 -4
- phoenix/utilities/__init__.py +0 -26
- phoenix/utilities/client.py +132 -0
- phoenix/utilities/deprecation.py +31 -0
- phoenix/utilities/error_handling.py +3 -2
- phoenix/utilities/json.py +109 -0
- phoenix/utilities/logging.py +8 -0
- phoenix/utilities/project.py +2 -2
- phoenix/utilities/re.py +49 -0
- phoenix/utilities/span_store.py +0 -23
- phoenix/utilities/template_formatters.py +99 -0
- phoenix/version.py +1 -1
- arize_phoenix-3.16.1.dist-info/METADATA +0 -495
- arize_phoenix-3.16.1.dist-info/RECORD +0 -178
- phoenix/core/project.py +0 -619
- phoenix/core/traces.py +0 -96
- phoenix/experimental/evals/__init__.py +0 -73
- phoenix/experimental/evals/evaluators.py +0 -413
- phoenix/experimental/evals/functions/__init__.py +0 -4
- phoenix/experimental/evals/functions/classify.py +0 -453
- phoenix/experimental/evals/functions/executor.py +0 -353
- phoenix/experimental/evals/functions/generate.py +0 -138
- phoenix/experimental/evals/functions/processing.py +0 -76
- phoenix/experimental/evals/models/__init__.py +0 -14
- phoenix/experimental/evals/models/anthropic.py +0 -175
- phoenix/experimental/evals/models/base.py +0 -170
- phoenix/experimental/evals/models/bedrock.py +0 -221
- phoenix/experimental/evals/models/litellm.py +0 -134
- phoenix/experimental/evals/models/openai.py +0 -448
- phoenix/experimental/evals/models/rate_limiters.py +0 -246
- phoenix/experimental/evals/models/vertex.py +0 -173
- phoenix/experimental/evals/models/vertexai.py +0 -186
- phoenix/experimental/evals/retrievals.py +0 -96
- phoenix/experimental/evals/templates/__init__.py +0 -50
- phoenix/experimental/evals/templates/default_templates.py +0 -472
- phoenix/experimental/evals/templates/template.py +0 -195
- phoenix/experimental/evals/utils/__init__.py +0 -172
- phoenix/experimental/evals/utils/threads.py +0 -27
- phoenix/server/api/helpers.py +0 -11
- phoenix/server/api/routers/evaluation_handler.py +0 -109
- phoenix/server/api/routers/span_handler.py +0 -70
- phoenix/server/api/routers/trace_handler.py +0 -60
- phoenix/server/api/types/DatasetRole.py +0 -23
- phoenix/server/static/index.css +0 -6
- phoenix/server/static/index.js +0 -7447
- phoenix/storage/span_store/__init__.py +0 -23
- phoenix/storage/span_store/text_file.py +0 -85
- phoenix/trace/dsl/missing.py +0 -60
- phoenix/trace/langchain/__init__.py +0 -3
- phoenix/trace/langchain/instrumentor.py +0 -35
- phoenix/trace/llama_index/__init__.py +0 -3
- phoenix/trace/llama_index/callback.py +0 -102
- phoenix/trace/openai/__init__.py +0 -3
- phoenix/trace/openai/instrumentor.py +0 -30
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-3.16.1.dist-info → arize_phoenix-7.7.0.dist-info}/licenses/LICENSE +0 -0
- /phoenix/{datasets → db/insertion}/__init__.py +0 -0
- /phoenix/{experimental → db/migrations}/__init__.py +0 -0
- /phoenix/{storage → db/migrations/data_migration_scripts}/__init__.py +0 -0
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
from typing import NamedTuple, Optional, cast
|
|
3
|
+
|
|
4
|
+
from openinference.semconv.trace import SpanAttributes
|
|
5
|
+
from sqlalchemy import func, insert, select, update
|
|
6
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
7
|
+
|
|
8
|
+
from phoenix.db import models
|
|
9
|
+
from phoenix.db.helpers import SupportedSQLDialect
|
|
10
|
+
from phoenix.db.insertion.helpers import OnConflict, insert_on_conflict
|
|
11
|
+
from phoenix.trace.attributes import get_attribute_value
|
|
12
|
+
from phoenix.trace.schemas import Span, SpanStatusCode
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SpanInsertionEvent(NamedTuple):
|
|
16
|
+
project_rowid: int
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ClearProjectSpansEvent(NamedTuple):
|
|
20
|
+
project_rowid: int
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def insert_span(
|
|
24
|
+
session: AsyncSession,
|
|
25
|
+
span: Span,
|
|
26
|
+
project_name: str,
|
|
27
|
+
) -> Optional[SpanInsertionEvent]:
|
|
28
|
+
dialect = SupportedSQLDialect(session.bind.dialect.name)
|
|
29
|
+
if (
|
|
30
|
+
project_rowid := await session.scalar(
|
|
31
|
+
select(models.Project.id).filter_by(name=project_name)
|
|
32
|
+
)
|
|
33
|
+
) is None:
|
|
34
|
+
project_rowid = await session.scalar(
|
|
35
|
+
insert(models.Project).values(name=project_name).returning(models.Project.id)
|
|
36
|
+
)
|
|
37
|
+
assert project_rowid is not None
|
|
38
|
+
|
|
39
|
+
trace_id = span.context.trace_id
|
|
40
|
+
trace: models.Trace = await session.scalar(
|
|
41
|
+
select(models.Trace).filter_by(trace_id=trace_id)
|
|
42
|
+
) or models.Trace(trace_id=trace_id)
|
|
43
|
+
|
|
44
|
+
if trace.id is not None:
|
|
45
|
+
# Trace record may need to be updated.
|
|
46
|
+
if trace.end_time < span.end_time:
|
|
47
|
+
trace.end_time = span.end_time
|
|
48
|
+
trace.project_rowid = project_rowid
|
|
49
|
+
if span.start_time < trace.start_time:
|
|
50
|
+
trace.start_time = span.start_time
|
|
51
|
+
else:
|
|
52
|
+
# Trace record needs to be persisted for the first time.
|
|
53
|
+
trace.start_time = span.start_time
|
|
54
|
+
trace.end_time = span.end_time
|
|
55
|
+
trace.project_rowid = project_rowid
|
|
56
|
+
session.add(trace)
|
|
57
|
+
|
|
58
|
+
session_id = get_attribute_value(span.attributes, SpanAttributes.SESSION_ID)
|
|
59
|
+
session_id = str(session_id).strip() if session_id is not None else ""
|
|
60
|
+
assert isinstance(session_id, str)
|
|
61
|
+
|
|
62
|
+
project_session: Optional[models.ProjectSession] = None
|
|
63
|
+
if trace.project_session_rowid is not None:
|
|
64
|
+
# ProjectSession record already exists in database for this Trace record, so we fetch
|
|
65
|
+
# it because it may need to be updated. However, the session_id on the span, if exists,
|
|
66
|
+
# will be ignored at this point. Otherwise, if session_id is different, we will need
|
|
67
|
+
# to create a new ProjectSession record, as well as to determine whether the old record
|
|
68
|
+
# needs to be deleted if this is the last Trace associated with it.
|
|
69
|
+
project_session = await session.scalar(
|
|
70
|
+
select(models.ProjectSession).filter_by(id=trace.project_session_rowid)
|
|
71
|
+
)
|
|
72
|
+
elif session_id:
|
|
73
|
+
project_session = await session.scalar(
|
|
74
|
+
select(models.ProjectSession).filter_by(session_id=session_id)
|
|
75
|
+
) or models.ProjectSession(session_id=session_id)
|
|
76
|
+
|
|
77
|
+
if project_session is not None:
|
|
78
|
+
if project_session.id is None:
|
|
79
|
+
# ProjectSession record needs to be persisted for the first time.
|
|
80
|
+
project_session.start_time = trace.start_time
|
|
81
|
+
project_session.end_time = trace.end_time
|
|
82
|
+
project_session.project_id = project_rowid
|
|
83
|
+
session.add(project_session)
|
|
84
|
+
await session.flush()
|
|
85
|
+
assert project_session.id is not None
|
|
86
|
+
trace.project_session_rowid = project_session.id
|
|
87
|
+
else:
|
|
88
|
+
# ProjectSession record may need to be updated.
|
|
89
|
+
if trace.project_session_rowid is None:
|
|
90
|
+
trace.project_session_rowid = project_session.id
|
|
91
|
+
if trace.start_time < project_session.start_time:
|
|
92
|
+
project_session.start_time = trace.start_time
|
|
93
|
+
if project_session.end_time < trace.end_time:
|
|
94
|
+
project_session.end_time = trace.end_time
|
|
95
|
+
|
|
96
|
+
await session.flush()
|
|
97
|
+
assert trace.id is not None
|
|
98
|
+
assert project_session is None or (
|
|
99
|
+
project_session.id is not None and project_session.id == trace.project_session_rowid
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
cumulative_error_count = int(span.status_code is SpanStatusCode.ERROR)
|
|
103
|
+
try:
|
|
104
|
+
cumulative_llm_token_count_prompt = int(
|
|
105
|
+
get_attribute_value(span.attributes, SpanAttributes.LLM_TOKEN_COUNT_PROMPT) or 0
|
|
106
|
+
)
|
|
107
|
+
except BaseException:
|
|
108
|
+
cumulative_llm_token_count_prompt = 0
|
|
109
|
+
try:
|
|
110
|
+
cumulative_llm_token_count_completion = int(
|
|
111
|
+
get_attribute_value(span.attributes, SpanAttributes.LLM_TOKEN_COUNT_COMPLETION) or 0
|
|
112
|
+
)
|
|
113
|
+
except BaseException:
|
|
114
|
+
cumulative_llm_token_count_completion = 0
|
|
115
|
+
try:
|
|
116
|
+
llm_token_count_prompt = int(
|
|
117
|
+
get_attribute_value(span.attributes, SpanAttributes.LLM_TOKEN_COUNT_PROMPT) or 0
|
|
118
|
+
)
|
|
119
|
+
except BaseException:
|
|
120
|
+
llm_token_count_prompt = 0
|
|
121
|
+
try:
|
|
122
|
+
llm_token_count_completion = int(
|
|
123
|
+
get_attribute_value(span.attributes, SpanAttributes.LLM_TOKEN_COUNT_COMPLETION) or 0
|
|
124
|
+
)
|
|
125
|
+
except BaseException:
|
|
126
|
+
llm_token_count_completion = 0
|
|
127
|
+
if accumulation := (
|
|
128
|
+
await session.execute(
|
|
129
|
+
select(
|
|
130
|
+
func.sum(models.Span.cumulative_error_count),
|
|
131
|
+
func.sum(models.Span.cumulative_llm_token_count_prompt),
|
|
132
|
+
func.sum(models.Span.cumulative_llm_token_count_completion),
|
|
133
|
+
).where(models.Span.parent_id == span.context.span_id)
|
|
134
|
+
)
|
|
135
|
+
).first():
|
|
136
|
+
cumulative_error_count += cast(int, accumulation[0] or 0)
|
|
137
|
+
cumulative_llm_token_count_prompt += cast(int, accumulation[1] or 0)
|
|
138
|
+
cumulative_llm_token_count_completion += cast(int, accumulation[2] or 0)
|
|
139
|
+
span_rowid = await session.scalar(
|
|
140
|
+
insert_on_conflict(
|
|
141
|
+
dict(
|
|
142
|
+
span_id=span.context.span_id,
|
|
143
|
+
trace_rowid=trace.id,
|
|
144
|
+
parent_id=span.parent_id,
|
|
145
|
+
span_kind=span.span_kind.value,
|
|
146
|
+
name=span.name,
|
|
147
|
+
start_time=span.start_time,
|
|
148
|
+
end_time=span.end_time,
|
|
149
|
+
attributes=span.attributes,
|
|
150
|
+
events=[asdict(event) for event in span.events],
|
|
151
|
+
status_code=span.status_code.value,
|
|
152
|
+
status_message=span.status_message,
|
|
153
|
+
cumulative_error_count=cumulative_error_count,
|
|
154
|
+
cumulative_llm_token_count_prompt=cumulative_llm_token_count_prompt,
|
|
155
|
+
cumulative_llm_token_count_completion=cumulative_llm_token_count_completion,
|
|
156
|
+
llm_token_count_prompt=llm_token_count_prompt,
|
|
157
|
+
llm_token_count_completion=llm_token_count_completion,
|
|
158
|
+
),
|
|
159
|
+
dialect=dialect,
|
|
160
|
+
table=models.Span,
|
|
161
|
+
unique_by=("span_id",),
|
|
162
|
+
on_conflict=OnConflict.DO_NOTHING,
|
|
163
|
+
).returning(models.Span.id)
|
|
164
|
+
)
|
|
165
|
+
if span_rowid is None:
|
|
166
|
+
return None
|
|
167
|
+
# Propagate cumulative values to ancestors. This is usually a no-op, since
|
|
168
|
+
# the parent usually arrives after the child. But in the event that a
|
|
169
|
+
# child arrives after its parent, we need to make sure that all the
|
|
170
|
+
# ancestors' cumulative values are updated.
|
|
171
|
+
ancestors = (
|
|
172
|
+
select(models.Span.id, models.Span.parent_id)
|
|
173
|
+
.where(models.Span.span_id == span.parent_id)
|
|
174
|
+
.cte(recursive=True)
|
|
175
|
+
)
|
|
176
|
+
child = ancestors.alias()
|
|
177
|
+
ancestors = ancestors.union_all(
|
|
178
|
+
select(models.Span.id, models.Span.parent_id).join(
|
|
179
|
+
child, models.Span.span_id == child.c.parent_id
|
|
180
|
+
)
|
|
181
|
+
)
|
|
182
|
+
await session.execute(
|
|
183
|
+
update(models.Span)
|
|
184
|
+
.where(models.Span.id.in_(select(ancestors.c.id)))
|
|
185
|
+
.values(
|
|
186
|
+
cumulative_error_count=models.Span.cumulative_error_count + cumulative_error_count,
|
|
187
|
+
cumulative_llm_token_count_prompt=models.Span.cumulative_llm_token_count_prompt
|
|
188
|
+
+ cumulative_llm_token_count_prompt,
|
|
189
|
+
cumulative_llm_token_count_completion=models.Span.cumulative_llm_token_count_completion
|
|
190
|
+
+ cumulative_llm_token_count_completion,
|
|
191
|
+
)
|
|
192
|
+
)
|
|
193
|
+
return SpanInsertionEvent(project_rowid)
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, NamedTuple, Optional
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import Row, Select, and_, select, tuple_
|
|
6
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
7
|
+
from typing_extensions import TypeAlias
|
|
8
|
+
|
|
9
|
+
from phoenix.db import models
|
|
10
|
+
from phoenix.db.helpers import dedup
|
|
11
|
+
from phoenix.db.insertion.helpers import as_kv
|
|
12
|
+
from phoenix.db.insertion.types import (
|
|
13
|
+
Insertables,
|
|
14
|
+
Postponed,
|
|
15
|
+
Precursors,
|
|
16
|
+
QueueInserter,
|
|
17
|
+
Received,
|
|
18
|
+
)
|
|
19
|
+
from phoenix.server.dml_event import SpanAnnotationDmlEvent
|
|
20
|
+
|
|
21
|
+
_Name: TypeAlias = str
|
|
22
|
+
_SpanId: TypeAlias = str
|
|
23
|
+
_SpanRowId: TypeAlias = int
|
|
24
|
+
_AnnoRowId: TypeAlias = int
|
|
25
|
+
|
|
26
|
+
_Key: TypeAlias = tuple[_Name, _SpanId]
|
|
27
|
+
_UniqueBy: TypeAlias = tuple[_Name, _SpanRowId]
|
|
28
|
+
_Existing: TypeAlias = tuple[
|
|
29
|
+
_SpanRowId,
|
|
30
|
+
_SpanId,
|
|
31
|
+
Optional[_AnnoRowId],
|
|
32
|
+
Optional[_Name],
|
|
33
|
+
Optional[datetime],
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class SpanAnnotationQueueInserter(
|
|
38
|
+
QueueInserter[
|
|
39
|
+
Precursors.SpanAnnotation,
|
|
40
|
+
Insertables.SpanAnnotation,
|
|
41
|
+
models.SpanAnnotation,
|
|
42
|
+
SpanAnnotationDmlEvent,
|
|
43
|
+
],
|
|
44
|
+
table=models.SpanAnnotation,
|
|
45
|
+
unique_by=("name", "span_rowid"),
|
|
46
|
+
):
|
|
47
|
+
async def _events(
|
|
48
|
+
self,
|
|
49
|
+
session: AsyncSession,
|
|
50
|
+
*insertions: Insertables.SpanAnnotation,
|
|
51
|
+
) -> list[SpanAnnotationDmlEvent]:
|
|
52
|
+
records = [dict(as_kv(ins.row)) for ins in insertions]
|
|
53
|
+
stmt = self._insert_on_conflict(*records).returning(self.table.id)
|
|
54
|
+
ids = tuple([_ async for _ in await session.stream_scalars(stmt)])
|
|
55
|
+
return [SpanAnnotationDmlEvent(ids)]
|
|
56
|
+
|
|
57
|
+
async def _partition(
|
|
58
|
+
self,
|
|
59
|
+
session: AsyncSession,
|
|
60
|
+
*parcels: Received[Precursors.SpanAnnotation],
|
|
61
|
+
) -> tuple[
|
|
62
|
+
list[Received[Insertables.SpanAnnotation]],
|
|
63
|
+
list[Postponed[Precursors.SpanAnnotation]],
|
|
64
|
+
list[Received[Precursors.SpanAnnotation]],
|
|
65
|
+
]:
|
|
66
|
+
to_insert: list[Received[Insertables.SpanAnnotation]] = []
|
|
67
|
+
to_postpone: list[Postponed[Precursors.SpanAnnotation]] = []
|
|
68
|
+
to_discard: list[Received[Precursors.SpanAnnotation]] = []
|
|
69
|
+
|
|
70
|
+
stmt = self._select_existing(*map(_key, parcels))
|
|
71
|
+
existing: list[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
|
|
72
|
+
existing_spans: Mapping[str, _SpanAttr] = {
|
|
73
|
+
e.span_id: _SpanAttr(e.span_rowid) for e in existing
|
|
74
|
+
}
|
|
75
|
+
existing_annos: Mapping[_Key, _AnnoAttr] = {
|
|
76
|
+
(e.name, e.span_id): _AnnoAttr(e.span_rowid, e.id, e.updated_at)
|
|
77
|
+
for e in existing
|
|
78
|
+
if e.id is not None and e.name is not None and e.updated_at is not None
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
for p in parcels:
|
|
82
|
+
if (anno := existing_annos.get(_key(p))) is not None:
|
|
83
|
+
if p.received_at <= anno.updated_at:
|
|
84
|
+
to_discard.append(p)
|
|
85
|
+
else:
|
|
86
|
+
to_insert.append(
|
|
87
|
+
Received(
|
|
88
|
+
received_at=p.received_at,
|
|
89
|
+
item=p.item.as_insertable(
|
|
90
|
+
span_rowid=anno.span_rowid,
|
|
91
|
+
id_=anno.id_,
|
|
92
|
+
),
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
elif (span := existing_spans.get(p.item.span_id)) is not None:
|
|
96
|
+
to_insert.append(
|
|
97
|
+
Received(
|
|
98
|
+
received_at=p.received_at,
|
|
99
|
+
item=p.item.as_insertable(
|
|
100
|
+
span_rowid=span.span_rowid,
|
|
101
|
+
),
|
|
102
|
+
)
|
|
103
|
+
)
|
|
104
|
+
elif isinstance(p, Postponed):
|
|
105
|
+
if p.retries_left > 1:
|
|
106
|
+
to_postpone.append(p.postpone(p.retries_left - 1))
|
|
107
|
+
else:
|
|
108
|
+
to_discard.append(p)
|
|
109
|
+
elif isinstance(p, Received):
|
|
110
|
+
to_postpone.append(p.postpone(self._retry_allowance))
|
|
111
|
+
else:
|
|
112
|
+
to_discard.append(p)
|
|
113
|
+
|
|
114
|
+
assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
|
|
115
|
+
to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
|
|
116
|
+
return to_insert, to_postpone, to_discard
|
|
117
|
+
|
|
118
|
+
def _select_existing(self, *keys: _Key) -> Select[_Existing]:
|
|
119
|
+
anno = self.table
|
|
120
|
+
span = (
|
|
121
|
+
select(models.Span.id, models.Span.span_id)
|
|
122
|
+
.where(models.Span.span_id.in_({span_id for _, span_id in keys}))
|
|
123
|
+
.cte()
|
|
124
|
+
)
|
|
125
|
+
onclause = and_(
|
|
126
|
+
span.c.id == anno.span_rowid,
|
|
127
|
+
anno.name.in_({name for name, _ in keys}),
|
|
128
|
+
tuple_(anno.name, span.c.span_id).in_(keys),
|
|
129
|
+
)
|
|
130
|
+
return select(
|
|
131
|
+
span.c.id.label("span_rowid"),
|
|
132
|
+
span.c.span_id,
|
|
133
|
+
anno.id,
|
|
134
|
+
anno.name,
|
|
135
|
+
anno.updated_at,
|
|
136
|
+
).outerjoin_from(span, anno, onclause)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class _SpanAttr(NamedTuple):
|
|
140
|
+
span_rowid: _SpanRowId
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class _AnnoAttr(NamedTuple):
|
|
144
|
+
span_rowid: _SpanRowId
|
|
145
|
+
id_: _AnnoRowId
|
|
146
|
+
updated_at: datetime
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _key(p: Received[Precursors.SpanAnnotation]) -> _Key:
|
|
150
|
+
return p.item.obj.name, p.item.span_id
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _unique_by(p: Received[Insertables.SpanAnnotation]) -> _UniqueBy:
|
|
154
|
+
return p.item.obj.name, p.item.span_rowid
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _time(p: Received[Any]) -> datetime:
|
|
158
|
+
return p.received_at
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, NamedTuple, Optional
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import Row, Select, and_, select, tuple_
|
|
6
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
7
|
+
from typing_extensions import TypeAlias
|
|
8
|
+
|
|
9
|
+
from phoenix.db import models
|
|
10
|
+
from phoenix.db.helpers import dedup
|
|
11
|
+
from phoenix.db.insertion.helpers import as_kv
|
|
12
|
+
from phoenix.db.insertion.types import (
|
|
13
|
+
Insertables,
|
|
14
|
+
Postponed,
|
|
15
|
+
Precursors,
|
|
16
|
+
QueueInserter,
|
|
17
|
+
Received,
|
|
18
|
+
)
|
|
19
|
+
from phoenix.server.dml_event import TraceAnnotationDmlEvent
|
|
20
|
+
|
|
21
|
+
_Name: TypeAlias = str
|
|
22
|
+
_TraceId: TypeAlias = str
|
|
23
|
+
_TraceRowId: TypeAlias = int
|
|
24
|
+
_AnnoRowId: TypeAlias = int
|
|
25
|
+
|
|
26
|
+
_Key: TypeAlias = tuple[_Name, _TraceId]
|
|
27
|
+
_UniqueBy: TypeAlias = tuple[_Name, _TraceRowId]
|
|
28
|
+
_Existing: TypeAlias = tuple[
|
|
29
|
+
_TraceRowId,
|
|
30
|
+
_TraceId,
|
|
31
|
+
Optional[_AnnoRowId],
|
|
32
|
+
Optional[_Name],
|
|
33
|
+
Optional[datetime],
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class TraceAnnotationQueueInserter(
|
|
38
|
+
QueueInserter[
|
|
39
|
+
Precursors.TraceAnnotation,
|
|
40
|
+
Insertables.TraceAnnotation,
|
|
41
|
+
models.TraceAnnotation,
|
|
42
|
+
TraceAnnotationDmlEvent,
|
|
43
|
+
],
|
|
44
|
+
table=models.TraceAnnotation,
|
|
45
|
+
unique_by=("name", "trace_rowid"),
|
|
46
|
+
):
|
|
47
|
+
async def _events(
|
|
48
|
+
self,
|
|
49
|
+
session: AsyncSession,
|
|
50
|
+
*insertions: Insertables.TraceAnnotation,
|
|
51
|
+
) -> list[TraceAnnotationDmlEvent]:
|
|
52
|
+
records = [dict(as_kv(ins.row)) for ins in insertions]
|
|
53
|
+
stmt = self._insert_on_conflict(*records).returning(self.table.id)
|
|
54
|
+
ids = tuple([_ async for _ in await session.stream_scalars(stmt)])
|
|
55
|
+
return [TraceAnnotationDmlEvent(ids)]
|
|
56
|
+
|
|
57
|
+
async def _partition(
|
|
58
|
+
self,
|
|
59
|
+
session: AsyncSession,
|
|
60
|
+
*parcels: Received[Precursors.TraceAnnotation],
|
|
61
|
+
) -> tuple[
|
|
62
|
+
list[Received[Insertables.TraceAnnotation]],
|
|
63
|
+
list[Postponed[Precursors.TraceAnnotation]],
|
|
64
|
+
list[Received[Precursors.TraceAnnotation]],
|
|
65
|
+
]:
|
|
66
|
+
to_insert: list[Received[Insertables.TraceAnnotation]] = []
|
|
67
|
+
to_postpone: list[Postponed[Precursors.TraceAnnotation]] = []
|
|
68
|
+
to_discard: list[Received[Precursors.TraceAnnotation]] = []
|
|
69
|
+
|
|
70
|
+
stmt = self._select_existing(*map(_key, parcels))
|
|
71
|
+
existing: list[Row[_Existing]] = [_ async for _ in await session.stream(stmt)]
|
|
72
|
+
existing_traces: Mapping[str, _TraceAttr] = {
|
|
73
|
+
e.trace_id: _TraceAttr(e.trace_rowid) for e in existing
|
|
74
|
+
}
|
|
75
|
+
existing_annos: Mapping[_Key, _AnnoAttr] = {
|
|
76
|
+
(e.name, e.trace_id): _AnnoAttr(e.trace_rowid, e.id, e.updated_at)
|
|
77
|
+
for e in existing
|
|
78
|
+
if e.id is not None and e.name is not None and e.updated_at is not None
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
for p in parcels:
|
|
82
|
+
if (anno := existing_annos.get(_key(p))) is not None:
|
|
83
|
+
if p.received_at <= anno.updated_at:
|
|
84
|
+
to_discard.append(p)
|
|
85
|
+
else:
|
|
86
|
+
to_insert.append(
|
|
87
|
+
Received(
|
|
88
|
+
received_at=p.received_at,
|
|
89
|
+
item=p.item.as_insertable(
|
|
90
|
+
trace_rowid=anno.trace_rowid,
|
|
91
|
+
id_=anno.id_,
|
|
92
|
+
),
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
elif (trace := existing_traces.get(p.item.trace_id)) is not None:
|
|
96
|
+
to_insert.append(
|
|
97
|
+
Received(
|
|
98
|
+
received_at=p.received_at,
|
|
99
|
+
item=p.item.as_insertable(
|
|
100
|
+
trace_rowid=trace.trace_rowid,
|
|
101
|
+
),
|
|
102
|
+
)
|
|
103
|
+
)
|
|
104
|
+
elif isinstance(p, Postponed):
|
|
105
|
+
if p.retries_left > 1:
|
|
106
|
+
to_postpone.append(p.postpone(p.retries_left - 1))
|
|
107
|
+
else:
|
|
108
|
+
to_discard.append(p)
|
|
109
|
+
elif isinstance(p, Received):
|
|
110
|
+
to_postpone.append(p.postpone(self._retry_allowance))
|
|
111
|
+
else:
|
|
112
|
+
to_discard.append(p)
|
|
113
|
+
|
|
114
|
+
assert len(to_insert) + len(to_postpone) + len(to_discard) == len(parcels)
|
|
115
|
+
to_insert = dedup(sorted(to_insert, key=_time, reverse=True), _unique_by)[::-1]
|
|
116
|
+
return to_insert, to_postpone, to_discard
|
|
117
|
+
|
|
118
|
+
def _select_existing(self, *keys: _Key) -> Select[_Existing]:
|
|
119
|
+
anno = self.table
|
|
120
|
+
trace = (
|
|
121
|
+
select(models.Trace.id, models.Trace.trace_id)
|
|
122
|
+
.where(models.Trace.trace_id.in_({trace_id for _, trace_id in keys}))
|
|
123
|
+
.cte()
|
|
124
|
+
)
|
|
125
|
+
onclause = and_(
|
|
126
|
+
trace.c.id == anno.trace_rowid,
|
|
127
|
+
anno.name.in_({name for name, _ in keys}),
|
|
128
|
+
tuple_(anno.name, trace.c.trace_id).in_(keys),
|
|
129
|
+
)
|
|
130
|
+
return select(
|
|
131
|
+
trace.c.id.label("trace_rowid"),
|
|
132
|
+
trace.c.trace_id,
|
|
133
|
+
anno.id,
|
|
134
|
+
anno.name,
|
|
135
|
+
anno.updated_at,
|
|
136
|
+
).outerjoin_from(trace, anno, onclause)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class _TraceAttr(NamedTuple):
|
|
140
|
+
trace_rowid: _TraceRowId
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class _AnnoAttr(NamedTuple):
|
|
144
|
+
trace_rowid: _TraceRowId
|
|
145
|
+
id_: _AnnoRowId
|
|
146
|
+
updated_at: datetime
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _key(p: Received[Precursors.TraceAnnotation]) -> _Key:
|
|
150
|
+
return p.item.obj.name, p.item.trace_id
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _unique_by(p: Received[Insertables.TraceAnnotation]) -> _UniqueBy:
|
|
154
|
+
return p.item.obj.name, p.item.trace_rowid
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _time(p: Received[Any]) -> datetime:
|
|
158
|
+
return p.received_at
|