arize-phoenix 3.16.0__py3-none-any.whl → 7.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- arize_phoenix-7.7.0.dist-info/METADATA +261 -0
- arize_phoenix-7.7.0.dist-info/RECORD +345 -0
- {arize_phoenix-3.16.0.dist-info → arize_phoenix-7.7.0.dist-info}/WHEEL +1 -1
- arize_phoenix-7.7.0.dist-info/entry_points.txt +3 -0
- phoenix/__init__.py +86 -14
- phoenix/auth.py +309 -0
- phoenix/config.py +675 -45
- phoenix/core/model.py +32 -30
- phoenix/core/model_schema.py +102 -109
- phoenix/core/model_schema_adapter.py +48 -45
- phoenix/datetime_utils.py +24 -3
- phoenix/db/README.md +54 -0
- phoenix/db/__init__.py +4 -0
- phoenix/db/alembic.ini +85 -0
- phoenix/db/bulk_inserter.py +294 -0
- phoenix/db/engines.py +208 -0
- phoenix/db/enums.py +20 -0
- phoenix/db/facilitator.py +113 -0
- phoenix/db/helpers.py +159 -0
- phoenix/db/insertion/constants.py +2 -0
- phoenix/db/insertion/dataset.py +227 -0
- phoenix/db/insertion/document_annotation.py +171 -0
- phoenix/db/insertion/evaluation.py +191 -0
- phoenix/db/insertion/helpers.py +98 -0
- phoenix/db/insertion/span.py +193 -0
- phoenix/db/insertion/span_annotation.py +158 -0
- phoenix/db/insertion/trace_annotation.py +158 -0
- phoenix/db/insertion/types.py +256 -0
- phoenix/db/migrate.py +86 -0
- phoenix/db/migrations/data_migration_scripts/populate_project_sessions.py +199 -0
- phoenix/db/migrations/env.py +114 -0
- phoenix/db/migrations/script.py.mako +26 -0
- phoenix/db/migrations/versions/10460e46d750_datasets.py +317 -0
- phoenix/db/migrations/versions/3be8647b87d8_add_token_columns_to_spans_table.py +126 -0
- phoenix/db/migrations/versions/4ded9e43755f_create_project_sessions_table.py +66 -0
- phoenix/db/migrations/versions/cd164e83824f_users_and_tokens.py +157 -0
- phoenix/db/migrations/versions/cf03bd6bae1d_init.py +280 -0
- phoenix/db/models.py +807 -0
- phoenix/exceptions.py +5 -1
- phoenix/experiments/__init__.py +6 -0
- phoenix/experiments/evaluators/__init__.py +29 -0
- phoenix/experiments/evaluators/base.py +158 -0
- phoenix/experiments/evaluators/code_evaluators.py +184 -0
- phoenix/experiments/evaluators/llm_evaluators.py +473 -0
- phoenix/experiments/evaluators/utils.py +236 -0
- phoenix/experiments/functions.py +772 -0
- phoenix/experiments/tracing.py +86 -0
- phoenix/experiments/types.py +726 -0
- phoenix/experiments/utils.py +25 -0
- phoenix/inferences/__init__.py +0 -0
- phoenix/{datasets → inferences}/errors.py +6 -5
- phoenix/{datasets → inferences}/fixtures.py +49 -42
- phoenix/{datasets/dataset.py → inferences/inferences.py} +121 -105
- phoenix/{datasets → inferences}/schema.py +11 -11
- phoenix/{datasets → inferences}/validation.py +13 -14
- phoenix/logging/__init__.py +3 -0
- phoenix/logging/_config.py +90 -0
- phoenix/logging/_filter.py +6 -0
- phoenix/logging/_formatter.py +69 -0
- phoenix/metrics/__init__.py +5 -4
- phoenix/metrics/binning.py +4 -3
- phoenix/metrics/metrics.py +2 -1
- phoenix/metrics/mixins.py +7 -6
- phoenix/metrics/retrieval_metrics.py +2 -1
- phoenix/metrics/timeseries.py +5 -4
- phoenix/metrics/wrappers.py +9 -3
- phoenix/pointcloud/clustering.py +5 -5
- phoenix/pointcloud/pointcloud.py +7 -5
- phoenix/pointcloud/projectors.py +5 -6
- phoenix/pointcloud/umap_parameters.py +53 -52
- phoenix/server/api/README.md +28 -0
- phoenix/server/api/auth.py +44 -0
- phoenix/server/api/context.py +152 -9
- phoenix/server/api/dataloaders/__init__.py +91 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +139 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +54 -0
- phoenix/server/api/dataloaders/cache/__init__.py +3 -0
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +68 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +131 -0
- phoenix/server/api/dataloaders/dataset_example_spans.py +38 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +144 -0
- phoenix/server/api/dataloaders/document_evaluations.py +31 -0
- phoenix/server/api/dataloaders/document_retrieval_metrics.py +89 -0
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +79 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +58 -0
- phoenix/server/api/dataloaders/experiment_run_annotations.py +36 -0
- phoenix/server/api/dataloaders/experiment_run_counts.py +49 -0
- phoenix/server/api/dataloaders/experiment_sequence_number.py +44 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +188 -0
- phoenix/server/api/dataloaders/min_start_or_max_end_times.py +85 -0
- phoenix/server/api/dataloaders/project_by_name.py +31 -0
- phoenix/server/api/dataloaders/record_counts.py +116 -0
- phoenix/server/api/dataloaders/session_io.py +79 -0
- phoenix/server/api/dataloaders/session_num_traces.py +30 -0
- phoenix/server/api/dataloaders/session_num_traces_with_error.py +32 -0
- phoenix/server/api/dataloaders/session_token_usages.py +41 -0
- phoenix/server/api/dataloaders/session_trace_latency_ms_quantile.py +55 -0
- phoenix/server/api/dataloaders/span_annotations.py +26 -0
- phoenix/server/api/dataloaders/span_dataset_examples.py +31 -0
- phoenix/server/api/dataloaders/span_descendants.py +57 -0
- phoenix/server/api/dataloaders/span_projects.py +33 -0
- phoenix/server/api/dataloaders/token_counts.py +124 -0
- phoenix/server/api/dataloaders/trace_by_trace_ids.py +25 -0
- phoenix/server/api/dataloaders/trace_root_spans.py +32 -0
- phoenix/server/api/dataloaders/user_roles.py +30 -0
- phoenix/server/api/dataloaders/users.py +33 -0
- phoenix/server/api/exceptions.py +48 -0
- phoenix/server/api/helpers/__init__.py +12 -0
- phoenix/server/api/helpers/dataset_helpers.py +217 -0
- phoenix/server/api/helpers/experiment_run_filters.py +763 -0
- phoenix/server/api/helpers/playground_clients.py +948 -0
- phoenix/server/api/helpers/playground_registry.py +70 -0
- phoenix/server/api/helpers/playground_spans.py +455 -0
- phoenix/server/api/input_types/AddExamplesToDatasetInput.py +16 -0
- phoenix/server/api/input_types/AddSpansToDatasetInput.py +14 -0
- phoenix/server/api/input_types/ChatCompletionInput.py +38 -0
- phoenix/server/api/input_types/ChatCompletionMessageInput.py +24 -0
- phoenix/server/api/input_types/ClearProjectInput.py +15 -0
- phoenix/server/api/input_types/ClusterInput.py +2 -2
- phoenix/server/api/input_types/CreateDatasetInput.py +12 -0
- phoenix/server/api/input_types/CreateSpanAnnotationInput.py +18 -0
- phoenix/server/api/input_types/CreateTraceAnnotationInput.py +18 -0
- phoenix/server/api/input_types/DataQualityMetricInput.py +5 -2
- phoenix/server/api/input_types/DatasetExampleInput.py +14 -0
- phoenix/server/api/input_types/DatasetSort.py +17 -0
- phoenix/server/api/input_types/DatasetVersionSort.py +16 -0
- phoenix/server/api/input_types/DeleteAnnotationsInput.py +7 -0
- phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +13 -0
- phoenix/server/api/input_types/DeleteDatasetInput.py +7 -0
- phoenix/server/api/input_types/DeleteExperimentsInput.py +7 -0
- phoenix/server/api/input_types/DimensionFilter.py +4 -4
- phoenix/server/api/input_types/GenerativeModelInput.py +17 -0
- phoenix/server/api/input_types/Granularity.py +1 -1
- phoenix/server/api/input_types/InvocationParameters.py +162 -0
- phoenix/server/api/input_types/PatchAnnotationInput.py +19 -0
- phoenix/server/api/input_types/PatchDatasetExamplesInput.py +35 -0
- phoenix/server/api/input_types/PatchDatasetInput.py +14 -0
- phoenix/server/api/input_types/PerformanceMetricInput.py +5 -2
- phoenix/server/api/input_types/ProjectSessionSort.py +29 -0
- phoenix/server/api/input_types/SpanAnnotationSort.py +17 -0
- phoenix/server/api/input_types/SpanSort.py +134 -69
- phoenix/server/api/input_types/TemplateOptions.py +10 -0
- phoenix/server/api/input_types/TraceAnnotationSort.py +17 -0
- phoenix/server/api/input_types/UserRoleInput.py +9 -0
- phoenix/server/api/mutations/__init__.py +28 -0
- phoenix/server/api/mutations/api_key_mutations.py +167 -0
- phoenix/server/api/mutations/chat_mutations.py +593 -0
- phoenix/server/api/mutations/dataset_mutations.py +591 -0
- phoenix/server/api/mutations/experiment_mutations.py +75 -0
- phoenix/server/api/{types/ExportEventsMutation.py → mutations/export_events_mutations.py} +21 -18
- phoenix/server/api/mutations/project_mutations.py +57 -0
- phoenix/server/api/mutations/span_annotations_mutations.py +128 -0
- phoenix/server/api/mutations/trace_annotations_mutations.py +127 -0
- phoenix/server/api/mutations/user_mutations.py +329 -0
- phoenix/server/api/openapi/__init__.py +0 -0
- phoenix/server/api/openapi/main.py +17 -0
- phoenix/server/api/openapi/schema.py +16 -0
- phoenix/server/api/queries.py +738 -0
- phoenix/server/api/routers/__init__.py +11 -0
- phoenix/server/api/routers/auth.py +284 -0
- phoenix/server/api/routers/embeddings.py +26 -0
- phoenix/server/api/routers/oauth2.py +488 -0
- phoenix/server/api/routers/v1/__init__.py +64 -0
- phoenix/server/api/routers/v1/datasets.py +1017 -0
- phoenix/server/api/routers/v1/evaluations.py +362 -0
- phoenix/server/api/routers/v1/experiment_evaluations.py +115 -0
- phoenix/server/api/routers/v1/experiment_runs.py +167 -0
- phoenix/server/api/routers/v1/experiments.py +308 -0
- phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
- phoenix/server/api/routers/v1/spans.py +267 -0
- phoenix/server/api/routers/v1/traces.py +208 -0
- phoenix/server/api/routers/v1/utils.py +95 -0
- phoenix/server/api/schema.py +44 -247
- phoenix/server/api/subscriptions.py +597 -0
- phoenix/server/api/types/Annotation.py +21 -0
- phoenix/server/api/types/AnnotationSummary.py +55 -0
- phoenix/server/api/types/AnnotatorKind.py +16 -0
- phoenix/server/api/types/ApiKey.py +27 -0
- phoenix/server/api/types/AuthMethod.py +9 -0
- phoenix/server/api/types/ChatCompletionMessageRole.py +11 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +46 -0
- phoenix/server/api/types/Cluster.py +25 -24
- phoenix/server/api/types/CreateDatasetPayload.py +8 -0
- phoenix/server/api/types/DataQualityMetric.py +31 -13
- phoenix/server/api/types/Dataset.py +288 -63
- phoenix/server/api/types/DatasetExample.py +85 -0
- phoenix/server/api/types/DatasetExampleRevision.py +34 -0
- phoenix/server/api/types/DatasetVersion.py +14 -0
- phoenix/server/api/types/Dimension.py +32 -31
- phoenix/server/api/types/DocumentEvaluationSummary.py +9 -8
- phoenix/server/api/types/EmbeddingDimension.py +56 -49
- phoenix/server/api/types/Evaluation.py +25 -31
- phoenix/server/api/types/EvaluationSummary.py +30 -50
- phoenix/server/api/types/Event.py +20 -20
- phoenix/server/api/types/ExampleRevisionInterface.py +14 -0
- phoenix/server/api/types/Experiment.py +152 -0
- phoenix/server/api/types/ExperimentAnnotationSummary.py +13 -0
- phoenix/server/api/types/ExperimentComparison.py +17 -0
- phoenix/server/api/types/ExperimentRun.py +119 -0
- phoenix/server/api/types/ExperimentRunAnnotation.py +56 -0
- phoenix/server/api/types/GenerativeModel.py +9 -0
- phoenix/server/api/types/GenerativeProvider.py +85 -0
- phoenix/server/api/types/Inferences.py +80 -0
- phoenix/server/api/types/InferencesRole.py +23 -0
- phoenix/server/api/types/LabelFraction.py +7 -0
- phoenix/server/api/types/MimeType.py +2 -2
- phoenix/server/api/types/Model.py +54 -54
- phoenix/server/api/types/PerformanceMetric.py +8 -5
- phoenix/server/api/types/Project.py +407 -142
- phoenix/server/api/types/ProjectSession.py +139 -0
- phoenix/server/api/types/Segments.py +4 -4
- phoenix/server/api/types/Span.py +221 -176
- phoenix/server/api/types/SpanAnnotation.py +43 -0
- phoenix/server/api/types/SpanIOValue.py +15 -0
- phoenix/server/api/types/SystemApiKey.py +9 -0
- phoenix/server/api/types/TemplateLanguage.py +10 -0
- phoenix/server/api/types/TimeSeries.py +19 -15
- phoenix/server/api/types/TokenUsage.py +11 -0
- phoenix/server/api/types/Trace.py +154 -0
- phoenix/server/api/types/TraceAnnotation.py +45 -0
- phoenix/server/api/types/UMAPPoints.py +7 -7
- phoenix/server/api/types/User.py +60 -0
- phoenix/server/api/types/UserApiKey.py +45 -0
- phoenix/server/api/types/UserRole.py +15 -0
- phoenix/server/api/types/node.py +13 -107
- phoenix/server/api/types/pagination.py +156 -57
- phoenix/server/api/utils.py +34 -0
- phoenix/server/app.py +864 -115
- phoenix/server/bearer_auth.py +163 -0
- phoenix/server/dml_event.py +136 -0
- phoenix/server/dml_event_handler.py +256 -0
- phoenix/server/email/__init__.py +0 -0
- phoenix/server/email/sender.py +97 -0
- phoenix/server/email/templates/__init__.py +0 -0
- phoenix/server/email/templates/password_reset.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/grpc_server.py +102 -0
- phoenix/server/jwt_store.py +505 -0
- phoenix/server/main.py +305 -116
- phoenix/server/oauth2.py +52 -0
- phoenix/server/openapi/__init__.py +0 -0
- phoenix/server/prometheus.py +111 -0
- phoenix/server/rate_limiters.py +188 -0
- phoenix/server/static/.vite/manifest.json +87 -0
- phoenix/server/static/assets/components-Cy9nwIvF.js +2125 -0
- phoenix/server/static/assets/index-BKvHIxkk.js +113 -0
- phoenix/server/static/assets/pages-CUi2xCVQ.js +4449 -0
- phoenix/server/static/assets/vendor-DvC8cT4X.js +894 -0
- phoenix/server/static/assets/vendor-DxkFTwjz.css +1 -0
- phoenix/server/static/assets/vendor-arizeai-Do1793cv.js +662 -0
- phoenix/server/static/assets/vendor-codemirror-BzwZPyJM.js +24 -0
- phoenix/server/static/assets/vendor-recharts-_Jb7JjhG.js +59 -0
- phoenix/server/static/assets/vendor-shiki-Cl9QBraO.js +5 -0
- phoenix/server/static/assets/vendor-three-DwGkEfCM.js +2998 -0
- phoenix/server/telemetry.py +68 -0
- phoenix/server/templates/index.html +82 -23
- phoenix/server/thread_server.py +3 -3
- phoenix/server/types.py +275 -0
- phoenix/services.py +27 -18
- phoenix/session/client.py +743 -68
- phoenix/session/data_extractor.py +31 -7
- phoenix/session/evaluation.py +3 -9
- phoenix/session/session.py +263 -219
- phoenix/settings.py +22 -0
- phoenix/trace/__init__.py +2 -22
- phoenix/trace/attributes.py +338 -0
- phoenix/trace/dsl/README.md +116 -0
- phoenix/trace/dsl/filter.py +663 -213
- phoenix/trace/dsl/helpers.py +73 -21
- phoenix/trace/dsl/query.py +574 -201
- phoenix/trace/exporter.py +24 -19
- phoenix/trace/fixtures.py +368 -32
- phoenix/trace/otel.py +71 -219
- phoenix/trace/projects.py +3 -2
- phoenix/trace/schemas.py +33 -11
- phoenix/trace/span_evaluations.py +21 -16
- phoenix/trace/span_json_decoder.py +6 -4
- phoenix/trace/span_json_encoder.py +2 -2
- phoenix/trace/trace_dataset.py +47 -32
- phoenix/trace/utils.py +21 -4
- phoenix/utilities/__init__.py +0 -26
- phoenix/utilities/client.py +132 -0
- phoenix/utilities/deprecation.py +31 -0
- phoenix/utilities/error_handling.py +3 -2
- phoenix/utilities/json.py +109 -0
- phoenix/utilities/logging.py +8 -0
- phoenix/utilities/project.py +2 -2
- phoenix/utilities/re.py +49 -0
- phoenix/utilities/span_store.py +0 -23
- phoenix/utilities/template_formatters.py +99 -0
- phoenix/version.py +1 -1
- arize_phoenix-3.16.0.dist-info/METADATA +0 -495
- arize_phoenix-3.16.0.dist-info/RECORD +0 -178
- phoenix/core/project.py +0 -617
- phoenix/core/traces.py +0 -100
- phoenix/experimental/evals/__init__.py +0 -73
- phoenix/experimental/evals/evaluators.py +0 -413
- phoenix/experimental/evals/functions/__init__.py +0 -4
- phoenix/experimental/evals/functions/classify.py +0 -453
- phoenix/experimental/evals/functions/executor.py +0 -353
- phoenix/experimental/evals/functions/generate.py +0 -138
- phoenix/experimental/evals/functions/processing.py +0 -76
- phoenix/experimental/evals/models/__init__.py +0 -14
- phoenix/experimental/evals/models/anthropic.py +0 -175
- phoenix/experimental/evals/models/base.py +0 -170
- phoenix/experimental/evals/models/bedrock.py +0 -221
- phoenix/experimental/evals/models/litellm.py +0 -134
- phoenix/experimental/evals/models/openai.py +0 -448
- phoenix/experimental/evals/models/rate_limiters.py +0 -246
- phoenix/experimental/evals/models/vertex.py +0 -173
- phoenix/experimental/evals/models/vertexai.py +0 -186
- phoenix/experimental/evals/retrievals.py +0 -96
- phoenix/experimental/evals/templates/__init__.py +0 -50
- phoenix/experimental/evals/templates/default_templates.py +0 -472
- phoenix/experimental/evals/templates/template.py +0 -195
- phoenix/experimental/evals/utils/__init__.py +0 -172
- phoenix/experimental/evals/utils/threads.py +0 -27
- phoenix/server/api/helpers.py +0 -11
- phoenix/server/api/routers/evaluation_handler.py +0 -109
- phoenix/server/api/routers/span_handler.py +0 -70
- phoenix/server/api/routers/trace_handler.py +0 -60
- phoenix/server/api/types/DatasetRole.py +0 -23
- phoenix/server/static/index.css +0 -6
- phoenix/server/static/index.js +0 -7447
- phoenix/storage/span_store/__init__.py +0 -23
- phoenix/storage/span_store/text_file.py +0 -85
- phoenix/trace/dsl/missing.py +0 -60
- phoenix/trace/langchain/__init__.py +0 -3
- phoenix/trace/langchain/instrumentor.py +0 -35
- phoenix/trace/llama_index/__init__.py +0 -3
- phoenix/trace/llama_index/callback.py +0 -102
- phoenix/trace/openai/__init__.py +0 -3
- phoenix/trace/openai/instrumentor.py +0 -30
- {arize_phoenix-3.16.0.dist-info → arize_phoenix-7.7.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-3.16.0.dist-info → arize_phoenix-7.7.0.dist-info}/licenses/LICENSE +0 -0
- /phoenix/{datasets → db/insertion}/__init__.py +0 -0
- /phoenix/{experimental → db/migrations}/__init__.py +0 -0
- /phoenix/{storage → db/migrations/data_migration_scripts}/__init__.py +0 -0
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import logging
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from collections.abc import Mapping, Sequence
|
|
7
|
+
from copy import copy
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from typing import Any, Generic, Optional, Protocol, TypeVar, cast
|
|
11
|
+
|
|
12
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
13
|
+
from sqlalchemy.sql.dml import Insert
|
|
14
|
+
|
|
15
|
+
from phoenix.db import models
|
|
16
|
+
from phoenix.db.insertion.constants import DEFAULT_RETRY_ALLOWANCE, DEFAULT_RETRY_DELAY_SEC
|
|
17
|
+
from phoenix.db.insertion.helpers import insert_on_conflict
|
|
18
|
+
from phoenix.server.dml_event import DmlEvent
|
|
19
|
+
from phoenix.server.types import DbSessionFactory
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class Insertable(Protocol):
|
|
25
|
+
@property
|
|
26
|
+
def row(self) -> models.Base: ...
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
_AnyT = TypeVar("_AnyT")
|
|
30
|
+
_PrecursorT = TypeVar("_PrecursorT")
|
|
31
|
+
_InsertableT = TypeVar("_InsertableT", bound=Insertable)
|
|
32
|
+
_RowT = TypeVar("_RowT", bound=models.Base)
|
|
33
|
+
_DmlEventT = TypeVar("_DmlEventT", bound=DmlEvent)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass(frozen=True)
|
|
37
|
+
class Received(Generic[_AnyT]):
|
|
38
|
+
item: _AnyT
|
|
39
|
+
received_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
40
|
+
|
|
41
|
+
def postpone(self, retries_left: int = DEFAULT_RETRY_ALLOWANCE) -> Postponed[_AnyT]:
|
|
42
|
+
return Postponed(item=self.item, received_at=self.received_at, retries_left=retries_left)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass(frozen=True)
|
|
46
|
+
class Postponed(Received[_AnyT]):
|
|
47
|
+
retries_left: int = field(default=DEFAULT_RETRY_ALLOWANCE)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class QueueInserter(ABC, Generic[_PrecursorT, _InsertableT, _RowT, _DmlEventT]):
|
|
51
|
+
table: type[_RowT]
|
|
52
|
+
unique_by: Sequence[str]
|
|
53
|
+
|
|
54
|
+
def __init_subclass__(
|
|
55
|
+
cls,
|
|
56
|
+
table: type[_RowT],
|
|
57
|
+
unique_by: Sequence[str],
|
|
58
|
+
) -> None:
|
|
59
|
+
cls.table = table
|
|
60
|
+
cls.unique_by = unique_by
|
|
61
|
+
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
db: DbSessionFactory,
|
|
65
|
+
retry_delay_sec: float = DEFAULT_RETRY_DELAY_SEC,
|
|
66
|
+
retry_allowance: int = DEFAULT_RETRY_ALLOWANCE,
|
|
67
|
+
) -> None:
|
|
68
|
+
self._queue: list[Received[_PrecursorT]] = []
|
|
69
|
+
self._db = db
|
|
70
|
+
self._retry_delay_sec = retry_delay_sec
|
|
71
|
+
self._retry_allowance = retry_allowance
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def empty(self) -> bool:
|
|
75
|
+
return not bool(self._queue)
|
|
76
|
+
|
|
77
|
+
async def enqueue(self, *items: _PrecursorT) -> None:
|
|
78
|
+
self._queue.extend([Received(item) for item in items])
|
|
79
|
+
|
|
80
|
+
@abstractmethod
|
|
81
|
+
async def _partition(
|
|
82
|
+
self,
|
|
83
|
+
session: AsyncSession,
|
|
84
|
+
*parcels: Received[_PrecursorT],
|
|
85
|
+
) -> tuple[
|
|
86
|
+
list[Received[_InsertableT]],
|
|
87
|
+
list[Postponed[_PrecursorT]],
|
|
88
|
+
list[Received[_PrecursorT]],
|
|
89
|
+
]: ...
|
|
90
|
+
|
|
91
|
+
async def insert(self) -> Optional[list[_DmlEventT]]:
|
|
92
|
+
if not self._queue:
|
|
93
|
+
return None
|
|
94
|
+
self._queue, parcels = [], self._queue
|
|
95
|
+
events: list[_DmlEventT] = []
|
|
96
|
+
async with self._db() as session:
|
|
97
|
+
to_insert, to_postpone, _ = await self._partition(session, *parcels)
|
|
98
|
+
if to_insert:
|
|
99
|
+
events, to_retry, _ = await self._insert(session, *to_insert)
|
|
100
|
+
if to_retry:
|
|
101
|
+
to_postpone.extend(to_retry)
|
|
102
|
+
if to_postpone:
|
|
103
|
+
loop = asyncio.get_running_loop()
|
|
104
|
+
loop.call_later(self._retry_delay_sec, self._queue.extend, to_postpone)
|
|
105
|
+
return events
|
|
106
|
+
|
|
107
|
+
def _insert_on_conflict(self, *records: Mapping[str, Any]) -> Insert:
|
|
108
|
+
return insert_on_conflict(
|
|
109
|
+
*records,
|
|
110
|
+
table=self.table,
|
|
111
|
+
unique_by=self.unique_by,
|
|
112
|
+
dialect=self._db.dialect,
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
@abstractmethod
|
|
116
|
+
async def _events(
|
|
117
|
+
self,
|
|
118
|
+
session: AsyncSession,
|
|
119
|
+
*insertions: _InsertableT,
|
|
120
|
+
) -> list[_DmlEventT]: ...
|
|
121
|
+
|
|
122
|
+
async def _insert(
|
|
123
|
+
self,
|
|
124
|
+
session: AsyncSession,
|
|
125
|
+
*parcels: Received[_InsertableT],
|
|
126
|
+
) -> tuple[
|
|
127
|
+
list[_DmlEventT],
|
|
128
|
+
list[Postponed[_PrecursorT]],
|
|
129
|
+
list[Received[_InsertableT]],
|
|
130
|
+
]:
|
|
131
|
+
to_retry: list[Postponed[_PrecursorT]] = []
|
|
132
|
+
failures: list[Received[_InsertableT]] = []
|
|
133
|
+
events: list[_DmlEventT] = []
|
|
134
|
+
try:
|
|
135
|
+
async with session.begin_nested():
|
|
136
|
+
events.extend(await self._events(session, *(p.item for p in parcels)))
|
|
137
|
+
except BaseException:
|
|
138
|
+
logger.exception(
|
|
139
|
+
f"Failed to bulk insert for {self.table.__name__}. "
|
|
140
|
+
f"Will try to insert ({len(parcels)} records) individually instead."
|
|
141
|
+
)
|
|
142
|
+
for p in parcels:
|
|
143
|
+
try:
|
|
144
|
+
async with session.begin_nested():
|
|
145
|
+
events.extend(await self._events(session, p.item))
|
|
146
|
+
except BaseException:
|
|
147
|
+
logger.exception(f"Failed to insert for {self.table.__name__}.")
|
|
148
|
+
if isinstance(p, Postponed) and p.retries_left == 1:
|
|
149
|
+
failures.append(p)
|
|
150
|
+
else:
|
|
151
|
+
to_retry.append(
|
|
152
|
+
Postponed(
|
|
153
|
+
item=cast(_PrecursorT, p.item),
|
|
154
|
+
received_at=p.received_at,
|
|
155
|
+
retries_left=(p.retries_left - 1)
|
|
156
|
+
if isinstance(p, Postponed)
|
|
157
|
+
else self._retry_allowance,
|
|
158
|
+
)
|
|
159
|
+
)
|
|
160
|
+
return events, to_retry, failures
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class Precursors(ABC):
|
|
164
|
+
@dataclass(frozen=True)
|
|
165
|
+
class SpanAnnotation:
|
|
166
|
+
span_id: str
|
|
167
|
+
obj: models.SpanAnnotation
|
|
168
|
+
|
|
169
|
+
def as_insertable(
|
|
170
|
+
self,
|
|
171
|
+
span_rowid: int,
|
|
172
|
+
id_: Optional[int] = None,
|
|
173
|
+
) -> Insertables.SpanAnnotation:
|
|
174
|
+
return Insertables.SpanAnnotation(
|
|
175
|
+
span_id=self.span_id,
|
|
176
|
+
obj=self.obj,
|
|
177
|
+
span_rowid=span_rowid,
|
|
178
|
+
id_=id_,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
@dataclass(frozen=True)
|
|
182
|
+
class TraceAnnotation:
|
|
183
|
+
trace_id: str
|
|
184
|
+
obj: models.TraceAnnotation
|
|
185
|
+
|
|
186
|
+
def as_insertable(
|
|
187
|
+
self,
|
|
188
|
+
trace_rowid: int,
|
|
189
|
+
id_: Optional[int] = None,
|
|
190
|
+
) -> Insertables.TraceAnnotation:
|
|
191
|
+
return Insertables.TraceAnnotation(
|
|
192
|
+
trace_id=self.trace_id,
|
|
193
|
+
obj=self.obj,
|
|
194
|
+
trace_rowid=trace_rowid,
|
|
195
|
+
id_=id_,
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
@dataclass(frozen=True)
|
|
199
|
+
class DocumentAnnotation:
|
|
200
|
+
span_id: str
|
|
201
|
+
document_position: int
|
|
202
|
+
obj: models.DocumentAnnotation
|
|
203
|
+
|
|
204
|
+
def as_insertable(
|
|
205
|
+
self,
|
|
206
|
+
span_rowid: int,
|
|
207
|
+
id_: Optional[int] = None,
|
|
208
|
+
) -> Insertables.DocumentAnnotation:
|
|
209
|
+
return Insertables.DocumentAnnotation(
|
|
210
|
+
span_id=self.span_id,
|
|
211
|
+
document_position=self.document_position,
|
|
212
|
+
obj=self.obj,
|
|
213
|
+
span_rowid=span_rowid,
|
|
214
|
+
id_=id_,
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class Insertables(ABC):
|
|
219
|
+
@dataclass(frozen=True)
|
|
220
|
+
class SpanAnnotation(Precursors.SpanAnnotation):
|
|
221
|
+
span_rowid: int
|
|
222
|
+
id_: Optional[int] = None
|
|
223
|
+
|
|
224
|
+
@property
|
|
225
|
+
def row(self) -> models.SpanAnnotation:
|
|
226
|
+
obj = copy(self.obj)
|
|
227
|
+
obj.span_rowid = self.span_rowid
|
|
228
|
+
if self.id_ is not None:
|
|
229
|
+
obj.id = self.id_
|
|
230
|
+
return obj
|
|
231
|
+
|
|
232
|
+
@dataclass(frozen=True)
|
|
233
|
+
class TraceAnnotation(Precursors.TraceAnnotation):
|
|
234
|
+
trace_rowid: int
|
|
235
|
+
id_: Optional[int] = None
|
|
236
|
+
|
|
237
|
+
@property
|
|
238
|
+
def row(self) -> models.TraceAnnotation:
|
|
239
|
+
obj = copy(self.obj)
|
|
240
|
+
obj.trace_rowid = self.trace_rowid
|
|
241
|
+
if self.id_ is not None:
|
|
242
|
+
obj.id = self.id_
|
|
243
|
+
return obj
|
|
244
|
+
|
|
245
|
+
@dataclass(frozen=True)
|
|
246
|
+
class DocumentAnnotation(Precursors.DocumentAnnotation):
|
|
247
|
+
span_rowid: int
|
|
248
|
+
id_: Optional[int] = None
|
|
249
|
+
|
|
250
|
+
@property
|
|
251
|
+
def row(self) -> models.DocumentAnnotation:
|
|
252
|
+
obj = copy(self.obj)
|
|
253
|
+
obj.span_rowid = self.span_rowid
|
|
254
|
+
if self.id_ is not None:
|
|
255
|
+
obj.id = self.id_
|
|
256
|
+
return obj
|
phoenix/db/migrate.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import codecs
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from queue import Empty, SimpleQueue
|
|
6
|
+
from threading import Thread
|
|
7
|
+
from time import perf_counter
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
from alembic import command
|
|
11
|
+
from alembic.config import Config
|
|
12
|
+
from sqlalchemy import Engine
|
|
13
|
+
|
|
14
|
+
from phoenix.exceptions import PhoenixMigrationError
|
|
15
|
+
from phoenix.settings import Settings
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def printif(condition: bool, text: str) -> None:
|
|
21
|
+
if not condition:
|
|
22
|
+
return
|
|
23
|
+
if sys.platform.startswith("win"):
|
|
24
|
+
text = codecs.encode(text, "ascii", errors="ignore").decode("ascii").strip()
|
|
25
|
+
print(text)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def migrate(
|
|
29
|
+
engine: Engine,
|
|
30
|
+
error_queue: Optional["SimpleQueue[BaseException]"] = None,
|
|
31
|
+
) -> None:
|
|
32
|
+
"""
|
|
33
|
+
Runs migrations on the database.
|
|
34
|
+
NB: Migrate only works on non-memory databases.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
url: The database URL.
|
|
38
|
+
"""
|
|
39
|
+
try:
|
|
40
|
+
log_migrations = Settings.log_migrations
|
|
41
|
+
printif(log_migrations, "🏃♀️➡️ Running migrations on the database.")
|
|
42
|
+
printif(log_migrations, "---------------------------")
|
|
43
|
+
config_path = str(Path(__file__).parent.resolve() / "alembic.ini")
|
|
44
|
+
alembic_cfg = Config(config_path)
|
|
45
|
+
|
|
46
|
+
# Explicitly set the migration directory
|
|
47
|
+
scripts_location = str(Path(__file__).parent.resolve() / "migrations")
|
|
48
|
+
alembic_cfg.set_main_option("script_location", scripts_location)
|
|
49
|
+
url = str(engine.url).replace("%", "%%")
|
|
50
|
+
alembic_cfg.set_main_option("sqlalchemy.url", url)
|
|
51
|
+
start_time = perf_counter()
|
|
52
|
+
with engine.connect() as conn:
|
|
53
|
+
alembic_cfg.attributes["connection"] = conn
|
|
54
|
+
command.upgrade(alembic_cfg, "head")
|
|
55
|
+
elapsed_time = perf_counter() - start_time
|
|
56
|
+
engine.dispose()
|
|
57
|
+
printif(log_migrations, "---------------------------")
|
|
58
|
+
printif(log_migrations, f"✅ Migrations completed in {elapsed_time:.3f} seconds.")
|
|
59
|
+
except BaseException as e:
|
|
60
|
+
if error_queue:
|
|
61
|
+
error_queue.put(e)
|
|
62
|
+
raise e
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def migrate_in_thread(engine: Engine) -> None:
|
|
66
|
+
"""
|
|
67
|
+
Runs migrations on the database in a separate thread.
|
|
68
|
+
This is needed because depending on the context (notebook)
|
|
69
|
+
the migration process can fail to execute in the main thread.
|
|
70
|
+
"""
|
|
71
|
+
error_queue: SimpleQueue[BaseException] = SimpleQueue()
|
|
72
|
+
t = Thread(target=migrate, args=(engine, error_queue))
|
|
73
|
+
t.start()
|
|
74
|
+
t.join()
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
result = error_queue.get_nowait()
|
|
78
|
+
except Empty:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
if result is not None:
|
|
82
|
+
error_message = (
|
|
83
|
+
"\n\nUnable to migrate configured Phoenix DB. Original error:\n"
|
|
84
|
+
f"{type(result).__name__}: {str(result)}"
|
|
85
|
+
)
|
|
86
|
+
raise PhoenixMigrationError(error_message) from result
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
# /// script
|
|
2
|
+
# dependencies = [
|
|
3
|
+
# "arize-phoenix[pg]",
|
|
4
|
+
# ]
|
|
5
|
+
# ///
|
|
6
|
+
"""
|
|
7
|
+
Populate the `project_sessions` table with data from the traces and spans tables.
|
|
8
|
+
|
|
9
|
+
Environment variables.
|
|
10
|
+
|
|
11
|
+
- `PHOENIX_SQL_DATABASE_URL` must be set to the database connection string.
|
|
12
|
+
- (optional) Postgresql schema can be set via `PHOENIX_SQL_DATABASE_SCHEMA`.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import os
|
|
16
|
+
from datetime import datetime
|
|
17
|
+
from time import perf_counter
|
|
18
|
+
from typing import Any, Optional, Union
|
|
19
|
+
|
|
20
|
+
import sqlean
|
|
21
|
+
from openinference.semconv.trace import SpanAttributes
|
|
22
|
+
from sqlalchemy import (
|
|
23
|
+
JSON,
|
|
24
|
+
Engine,
|
|
25
|
+
NullPool,
|
|
26
|
+
create_engine,
|
|
27
|
+
event,
|
|
28
|
+
func,
|
|
29
|
+
insert,
|
|
30
|
+
make_url,
|
|
31
|
+
select,
|
|
32
|
+
update,
|
|
33
|
+
)
|
|
34
|
+
from sqlalchemy.dialects import postgresql
|
|
35
|
+
from sqlalchemy.ext.compiler import compiles
|
|
36
|
+
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, sessionmaker
|
|
37
|
+
|
|
38
|
+
from phoenix.config import ENV_PHOENIX_SQL_DATABASE_SCHEMA, get_env_database_connection_str
|
|
39
|
+
from phoenix.db.engines import set_postgresql_search_path
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class JSONB(JSON):
|
|
43
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
44
|
+
__visit_name__ = "JSONB"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@compiles(JSONB, "sqlite")
|
|
48
|
+
def _(*args: Any, **kwargs: Any) -> str:
|
|
49
|
+
# See https://docs.sqlalchemy.org/en/20/core/custom_types.html
|
|
50
|
+
return "JSONB"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
JSON_ = (
|
|
54
|
+
JSON()
|
|
55
|
+
.with_variant(
|
|
56
|
+
postgresql.JSONB(), # type: ignore
|
|
57
|
+
"postgresql",
|
|
58
|
+
)
|
|
59
|
+
.with_variant(
|
|
60
|
+
JSONB(),
|
|
61
|
+
"sqlite",
|
|
62
|
+
)
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class Base(DeclarativeBase): ...
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class ProjectSession(Base):
|
|
70
|
+
__tablename__ = "project_sessions"
|
|
71
|
+
id: Mapped[int] = mapped_column(primary_key=True)
|
|
72
|
+
session_id: Mapped[str]
|
|
73
|
+
project_id: Mapped[int]
|
|
74
|
+
start_time: Mapped[datetime]
|
|
75
|
+
end_time: Mapped[datetime]
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class Trace(Base):
|
|
79
|
+
__tablename__ = "traces"
|
|
80
|
+
id: Mapped[int] = mapped_column(primary_key=True)
|
|
81
|
+
project_session_rowid: Mapped[Union[int, None]]
|
|
82
|
+
project_rowid: Mapped[int]
|
|
83
|
+
start_time: Mapped[datetime]
|
|
84
|
+
end_time: Mapped[datetime]
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class Span(Base):
|
|
88
|
+
__tablename__ = "spans"
|
|
89
|
+
id: Mapped[int] = mapped_column(primary_key=True)
|
|
90
|
+
trace_rowid: Mapped[int]
|
|
91
|
+
parent_id: Mapped[Optional[str]]
|
|
92
|
+
attributes: Mapped[dict[str, Any]] = mapped_column(JSON_, nullable=False)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
SESSION_ID = SpanAttributes.SESSION_ID.split(".")
|
|
96
|
+
USER_ID = SpanAttributes.USER_ID.split(".")
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def populate_project_sessions(
|
|
100
|
+
engine: Engine,
|
|
101
|
+
) -> None:
|
|
102
|
+
sessions_from_span = (
|
|
103
|
+
select(
|
|
104
|
+
Span.attributes[SESSION_ID].as_string().label("session_id"),
|
|
105
|
+
Trace.project_rowid.label("project_id"),
|
|
106
|
+
Trace.start_time.label("start_time"),
|
|
107
|
+
func.row_number()
|
|
108
|
+
.over(
|
|
109
|
+
partition_by=Span.attributes[SESSION_ID],
|
|
110
|
+
order_by=[Trace.start_time, Trace.id, Span.id],
|
|
111
|
+
)
|
|
112
|
+
.label("rank"),
|
|
113
|
+
func.max(Trace.end_time)
|
|
114
|
+
.over(partition_by=Span.attributes[SESSION_ID])
|
|
115
|
+
.label("end_time"),
|
|
116
|
+
)
|
|
117
|
+
.join_from(Span, Trace, Span.trace_rowid == Trace.id)
|
|
118
|
+
.where(Span.parent_id.is_(None))
|
|
119
|
+
.where(Span.attributes[SESSION_ID].as_string() != "")
|
|
120
|
+
.subquery()
|
|
121
|
+
)
|
|
122
|
+
sessions_for_trace_id = (
|
|
123
|
+
select(
|
|
124
|
+
Span.trace_rowid,
|
|
125
|
+
ProjectSession.id.label("project_session_rowid"),
|
|
126
|
+
)
|
|
127
|
+
.join_from(
|
|
128
|
+
Span,
|
|
129
|
+
ProjectSession,
|
|
130
|
+
Span.attributes[SESSION_ID].as_string() == ProjectSession.session_id,
|
|
131
|
+
)
|
|
132
|
+
.where(Span.parent_id.is_(None))
|
|
133
|
+
.where(Span.attributes[SESSION_ID].as_string() != "")
|
|
134
|
+
.subquery()
|
|
135
|
+
)
|
|
136
|
+
start_time = perf_counter()
|
|
137
|
+
with sessionmaker(engine).begin() as session:
|
|
138
|
+
session.execute(
|
|
139
|
+
insert(ProjectSession).from_select(
|
|
140
|
+
[
|
|
141
|
+
"session_id",
|
|
142
|
+
"project_id",
|
|
143
|
+
"start_time",
|
|
144
|
+
"end_time",
|
|
145
|
+
],
|
|
146
|
+
select(
|
|
147
|
+
sessions_from_span.c.session_id,
|
|
148
|
+
sessions_from_span.c.project_id,
|
|
149
|
+
sessions_from_span.c.start_time,
|
|
150
|
+
sessions_from_span.c.end_time,
|
|
151
|
+
).where(sessions_from_span.c.rank == 1),
|
|
152
|
+
)
|
|
153
|
+
)
|
|
154
|
+
session.execute(
|
|
155
|
+
(
|
|
156
|
+
update(Trace)
|
|
157
|
+
.values(project_session_rowid=sessions_for_trace_id.c.project_session_rowid)
|
|
158
|
+
.where(Trace.id == sessions_for_trace_id.c.trace_rowid)
|
|
159
|
+
)
|
|
160
|
+
)
|
|
161
|
+
elapsed_time = perf_counter() - start_time
|
|
162
|
+
print(f"✅ Populated project_sessions in {elapsed_time:.3f} seconds.")
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
if __name__ == "__main__":
|
|
166
|
+
sql_database_url = make_url(get_env_database_connection_str())
|
|
167
|
+
print(f"Using database URL: {sql_database_url}")
|
|
168
|
+
ans = input("Is that correct? [y]/n: ")
|
|
169
|
+
if ans.lower().startswith("n"):
|
|
170
|
+
url = input("Please enter the correct database URL: ")
|
|
171
|
+
sql_database_url = make_url(url)
|
|
172
|
+
backend = sql_database_url.get_backend_name()
|
|
173
|
+
if backend == "sqlite":
|
|
174
|
+
file = sql_database_url.database
|
|
175
|
+
engine = create_engine(
|
|
176
|
+
url=sql_database_url.set(drivername="sqlite"),
|
|
177
|
+
creator=lambda: sqlean.connect(f"file:///{file}", uri=True),
|
|
178
|
+
poolclass=NullPool,
|
|
179
|
+
echo=True,
|
|
180
|
+
)
|
|
181
|
+
elif backend == "postgresql":
|
|
182
|
+
schema = os.getenv(ENV_PHOENIX_SQL_DATABASE_SCHEMA)
|
|
183
|
+
if schema:
|
|
184
|
+
print(f"Using schema: {schema}")
|
|
185
|
+
else:
|
|
186
|
+
print("No PostgreSQL schema set. (This is the default.)")
|
|
187
|
+
ans = input("Is that correct? [y]/n: ")
|
|
188
|
+
if ans.lower().startswith("n"):
|
|
189
|
+
schema = input("Please enter the correct schema: ")
|
|
190
|
+
engine = create_engine(
|
|
191
|
+
url=sql_database_url.set(drivername="postgresql+psycopg"),
|
|
192
|
+
poolclass=NullPool,
|
|
193
|
+
echo=True,
|
|
194
|
+
)
|
|
195
|
+
if schema:
|
|
196
|
+
event.listen(engine, "connect", set_postgresql_search_path(schema))
|
|
197
|
+
else:
|
|
198
|
+
raise ValueError(f"Unknown database backend: {backend}")
|
|
199
|
+
populate_project_sessions(engine)
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
from alembic import context
|
|
4
|
+
from sqlalchemy import Connection, engine_from_config, pool
|
|
5
|
+
from sqlalchemy.ext.asyncio import AsyncEngine
|
|
6
|
+
|
|
7
|
+
from phoenix.config import get_env_database_connection_str
|
|
8
|
+
from phoenix.db.engines import get_async_db_url
|
|
9
|
+
from phoenix.db.models import Base
|
|
10
|
+
from phoenix.settings import Settings
|
|
11
|
+
|
|
12
|
+
# this is the Alembic Config object, which provides
|
|
13
|
+
# access to the values within the .ini file in use.
|
|
14
|
+
config = context.config
|
|
15
|
+
|
|
16
|
+
# add your model's MetaData object here
|
|
17
|
+
# for 'autogenerate' support
|
|
18
|
+
target_metadata = Base.metadata
|
|
19
|
+
|
|
20
|
+
# other values from the config, defined by the needs of env.py,
|
|
21
|
+
# can be acquired:
|
|
22
|
+
# my_important_option = config.get_main_option("my_important_option")
|
|
23
|
+
# ... etc.
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def run_migrations_offline() -> None:
|
|
27
|
+
"""Run migrations in 'offline' mode.
|
|
28
|
+
|
|
29
|
+
This configures the context with just a URL
|
|
30
|
+
and not an Engine, though an Engine is acceptable
|
|
31
|
+
here as well. By skipping the Engine creation
|
|
32
|
+
we don't even need a DBAPI to be available.
|
|
33
|
+
|
|
34
|
+
Calls to context.execute() here emit the given string to the
|
|
35
|
+
script output.
|
|
36
|
+
|
|
37
|
+
"""
|
|
38
|
+
url = config.get_main_option("sqlalchemy.url")
|
|
39
|
+
context.configure(
|
|
40
|
+
url=url,
|
|
41
|
+
target_metadata=target_metadata,
|
|
42
|
+
literal_binds=True,
|
|
43
|
+
dialect_opts={"paramstyle": "named"},
|
|
44
|
+
transaction_per_migration=True,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
with context.begin_transaction():
|
|
48
|
+
context.run_migrations()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def run_migrations_online() -> None:
|
|
52
|
+
"""Run migrations in 'online' mode.
|
|
53
|
+
|
|
54
|
+
In this scenario we need to create an Engine
|
|
55
|
+
and associate a connection with the context.
|
|
56
|
+
|
|
57
|
+
"""
|
|
58
|
+
connectable = context.config.attributes.get("connection", None)
|
|
59
|
+
if connectable is None:
|
|
60
|
+
config = context.config.get_section(context.config.config_ini_section) or {}
|
|
61
|
+
if "sqlalchemy.url" not in config:
|
|
62
|
+
connection_str = get_env_database_connection_str()
|
|
63
|
+
config["sqlalchemy.url"] = get_async_db_url(connection_str).render_as_string(
|
|
64
|
+
hide_password=False
|
|
65
|
+
)
|
|
66
|
+
connectable = AsyncEngine(
|
|
67
|
+
engine_from_config(
|
|
68
|
+
config,
|
|
69
|
+
prefix="sqlalchemy.",
|
|
70
|
+
poolclass=pool.NullPool,
|
|
71
|
+
future=True,
|
|
72
|
+
echo=Settings.log_migrations,
|
|
73
|
+
)
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if isinstance(connectable, AsyncEngine):
|
|
77
|
+
try:
|
|
78
|
+
asyncio.get_running_loop()
|
|
79
|
+
except RuntimeError:
|
|
80
|
+
asyncio.run(run_async_migrations(connectable))
|
|
81
|
+
else:
|
|
82
|
+
asyncio.create_task(run_async_migrations(connectable))
|
|
83
|
+
else:
|
|
84
|
+
run_migrations(connectable)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def run_async_migrations(connectable: AsyncEngine) -> None:
|
|
88
|
+
async with connectable.connect() as connection:
|
|
89
|
+
await connection.run_sync(run_migrations)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def run_migrations(connection: Connection) -> None:
|
|
93
|
+
transaction = connection.begin()
|
|
94
|
+
try:
|
|
95
|
+
context.configure(
|
|
96
|
+
connection=connection,
|
|
97
|
+
target_metadata=target_metadata,
|
|
98
|
+
compare_type=True,
|
|
99
|
+
transactional_ddl=True,
|
|
100
|
+
transaction_per_migration=True,
|
|
101
|
+
)
|
|
102
|
+
context.run_migrations()
|
|
103
|
+
transaction.commit()
|
|
104
|
+
except Exception:
|
|
105
|
+
transaction.rollback()
|
|
106
|
+
raise
|
|
107
|
+
finally:
|
|
108
|
+
connection.close()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
if context.is_offline_mode():
|
|
112
|
+
run_migrations_offline()
|
|
113
|
+
else:
|
|
114
|
+
run_migrations_online()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""${message}
|
|
2
|
+
|
|
3
|
+
Revision ID: ${up_revision}
|
|
4
|
+
Revises: ${down_revision | comma,n}
|
|
5
|
+
Create Date: ${create_date}
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
from typing import Sequence, Union
|
|
9
|
+
|
|
10
|
+
from alembic import op
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
${imports if imports else ""}
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = ${repr(up_revision)}
|
|
16
|
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
${upgrades if upgrades else "pass"}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def downgrade() -> None:
|
|
26
|
+
${downgrades if downgrades else "pass"}
|