arize-phoenix 10.0.4__py3-none-any.whl → 12.28.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/METADATA +124 -72
- arize_phoenix-12.28.1.dist-info/RECORD +499 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/WHEEL +1 -1
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/IP_NOTICE +1 -1
- phoenix/__generated__/__init__.py +0 -0
- phoenix/__generated__/classification_evaluator_configs/__init__.py +20 -0
- phoenix/__generated__/classification_evaluator_configs/_document_relevance_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_hallucination_classification_evaluator_config.py +17 -0
- phoenix/__generated__/classification_evaluator_configs/_models.py +18 -0
- phoenix/__generated__/classification_evaluator_configs/_tool_selection_classification_evaluator_config.py +17 -0
- phoenix/__init__.py +5 -4
- phoenix/auth.py +39 -2
- phoenix/config.py +1763 -91
- phoenix/datetime_utils.py +120 -2
- phoenix/db/README.md +595 -25
- phoenix/db/bulk_inserter.py +145 -103
- phoenix/db/engines.py +140 -33
- phoenix/db/enums.py +3 -12
- phoenix/db/facilitator.py +302 -35
- phoenix/db/helpers.py +1000 -65
- phoenix/db/iam_auth.py +64 -0
- phoenix/db/insertion/dataset.py +135 -2
- phoenix/db/insertion/document_annotation.py +9 -6
- phoenix/db/insertion/evaluation.py +2 -3
- phoenix/db/insertion/helpers.py +17 -2
- phoenix/db/insertion/session_annotation.py +176 -0
- phoenix/db/insertion/span.py +15 -11
- phoenix/db/insertion/span_annotation.py +3 -4
- phoenix/db/insertion/trace_annotation.py +3 -4
- phoenix/db/insertion/types.py +50 -20
- phoenix/db/migrations/versions/01a8342c9cdf_add_user_id_on_datasets.py +40 -0
- phoenix/db/migrations/versions/0df286449799_add_session_annotations_table.py +105 -0
- phoenix/db/migrations/versions/272b66ff50f8_drop_single_indices.py +119 -0
- phoenix/db/migrations/versions/58228d933c91_dataset_labels.py +67 -0
- phoenix/db/migrations/versions/699f655af132_experiment_tags.py +57 -0
- phoenix/db/migrations/versions/735d3d93c33e_add_composite_indices.py +41 -0
- phoenix/db/migrations/versions/a20694b15f82_cost.py +196 -0
- phoenix/db/migrations/versions/ab513d89518b_add_user_id_on_dataset_versions.py +40 -0
- phoenix/db/migrations/versions/d0690a79ea51_users_on_experiments.py +40 -0
- phoenix/db/migrations/versions/deb2c81c0bb2_dataset_splits.py +139 -0
- phoenix/db/migrations/versions/e76cbd66ffc3_add_experiments_dataset_examples.py +87 -0
- phoenix/db/models.py +669 -56
- phoenix/db/pg_config.py +10 -0
- phoenix/db/types/model_provider.py +4 -0
- phoenix/db/types/token_price_customization.py +29 -0
- phoenix/db/types/trace_retention.py +23 -15
- phoenix/experiments/evaluators/utils.py +3 -3
- phoenix/experiments/functions.py +160 -52
- phoenix/experiments/tracing.py +2 -2
- phoenix/experiments/types.py +1 -1
- phoenix/inferences/inferences.py +1 -2
- phoenix/server/api/auth.py +38 -7
- phoenix/server/api/auth_messages.py +46 -0
- phoenix/server/api/context.py +100 -4
- phoenix/server/api/dataloaders/__init__.py +79 -5
- phoenix/server/api/dataloaders/annotation_configs_by_project.py +31 -0
- phoenix/server/api/dataloaders/annotation_summaries.py +60 -8
- phoenix/server/api/dataloaders/average_experiment_repeated_run_group_latency.py +50 -0
- phoenix/server/api/dataloaders/average_experiment_run_latency.py +17 -24
- phoenix/server/api/dataloaders/cache/two_tier_cache.py +1 -2
- phoenix/server/api/dataloaders/dataset_dataset_splits.py +52 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -1
- phoenix/server/api/dataloaders/dataset_example_splits.py +40 -0
- phoenix/server/api/dataloaders/dataset_examples_and_versions_by_experiment_run.py +47 -0
- phoenix/server/api/dataloaders/dataset_labels.py +36 -0
- phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -2
- phoenix/server/api/dataloaders/document_evaluations.py +6 -9
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +88 -34
- phoenix/server/api/dataloaders/experiment_dataset_splits.py +43 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +21 -28
- phoenix/server/api/dataloaders/experiment_repeated_run_group_annotation_summaries.py +77 -0
- phoenix/server/api/dataloaders/experiment_repeated_run_groups.py +57 -0
- phoenix/server/api/dataloaders/experiment_runs_by_experiment_and_example.py +44 -0
- phoenix/server/api/dataloaders/last_used_times_by_generative_model_id.py +35 -0
- phoenix/server/api/dataloaders/latency_ms_quantile.py +40 -8
- phoenix/server/api/dataloaders/record_counts.py +37 -10
- phoenix/server/api/dataloaders/session_annotations_by_session.py +29 -0
- phoenix/server/api/dataloaders/span_cost_by_span.py +24 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_generative_model.py +56 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_project_session.py +57 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_span.py +43 -0
- phoenix/server/api/dataloaders/span_cost_detail_summary_entries_by_trace.py +56 -0
- phoenix/server/api/dataloaders/span_cost_details_by_span_cost.py +27 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment.py +57 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_repeated_run_group.py +64 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_experiment_run.py +58 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_generative_model.py +55 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project.py +152 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_project_session.py +56 -0
- phoenix/server/api/dataloaders/span_cost_summary_by_trace.py +55 -0
- phoenix/server/api/dataloaders/span_costs.py +29 -0
- phoenix/server/api/dataloaders/table_fields.py +2 -2
- phoenix/server/api/dataloaders/token_prices_by_model.py +30 -0
- phoenix/server/api/dataloaders/trace_annotations_by_trace.py +27 -0
- phoenix/server/api/dataloaders/types.py +29 -0
- phoenix/server/api/exceptions.py +11 -1
- phoenix/server/api/helpers/dataset_helpers.py +5 -1
- phoenix/server/api/helpers/playground_clients.py +1243 -292
- phoenix/server/api/helpers/playground_registry.py +2 -2
- phoenix/server/api/helpers/playground_spans.py +8 -4
- phoenix/server/api/helpers/playground_users.py +26 -0
- phoenix/server/api/helpers/prompts/conversions/aws.py +83 -0
- phoenix/server/api/helpers/prompts/conversions/google.py +103 -0
- phoenix/server/api/helpers/prompts/models.py +205 -22
- phoenix/server/api/input_types/{SpanAnnotationFilter.py → AnnotationFilter.py} +22 -14
- phoenix/server/api/input_types/ChatCompletionInput.py +6 -2
- phoenix/server/api/input_types/CreateProjectInput.py +27 -0
- phoenix/server/api/input_types/CreateProjectSessionAnnotationInput.py +37 -0
- phoenix/server/api/input_types/DatasetFilter.py +17 -0
- phoenix/server/api/input_types/ExperimentRunSort.py +237 -0
- phoenix/server/api/input_types/GenerativeCredentialInput.py +9 -0
- phoenix/server/api/input_types/GenerativeModelInput.py +5 -0
- phoenix/server/api/input_types/ProjectSessionSort.py +161 -1
- phoenix/server/api/input_types/PromptFilter.py +14 -0
- phoenix/server/api/input_types/PromptVersionInput.py +52 -1
- phoenix/server/api/input_types/SpanSort.py +44 -7
- phoenix/server/api/input_types/TimeBinConfig.py +23 -0
- phoenix/server/api/input_types/UpdateAnnotationInput.py +34 -0
- phoenix/server/api/input_types/UserRoleInput.py +1 -0
- phoenix/server/api/mutations/__init__.py +10 -0
- phoenix/server/api/mutations/annotation_config_mutations.py +8 -8
- phoenix/server/api/mutations/api_key_mutations.py +19 -23
- phoenix/server/api/mutations/chat_mutations.py +154 -47
- phoenix/server/api/mutations/dataset_label_mutations.py +243 -0
- phoenix/server/api/mutations/dataset_mutations.py +21 -16
- phoenix/server/api/mutations/dataset_split_mutations.py +351 -0
- phoenix/server/api/mutations/experiment_mutations.py +2 -2
- phoenix/server/api/mutations/export_events_mutations.py +3 -3
- phoenix/server/api/mutations/model_mutations.py +210 -0
- phoenix/server/api/mutations/project_mutations.py +49 -10
- phoenix/server/api/mutations/project_session_annotations_mutations.py +158 -0
- phoenix/server/api/mutations/project_trace_retention_policy_mutations.py +8 -4
- phoenix/server/api/mutations/prompt_label_mutations.py +74 -65
- phoenix/server/api/mutations/prompt_mutations.py +65 -129
- phoenix/server/api/mutations/prompt_version_tag_mutations.py +11 -8
- phoenix/server/api/mutations/span_annotations_mutations.py +15 -10
- phoenix/server/api/mutations/trace_annotations_mutations.py +14 -10
- phoenix/server/api/mutations/trace_mutations.py +47 -3
- phoenix/server/api/mutations/user_mutations.py +66 -41
- phoenix/server/api/queries.py +768 -293
- phoenix/server/api/routers/__init__.py +2 -2
- phoenix/server/api/routers/auth.py +154 -88
- phoenix/server/api/routers/ldap.py +229 -0
- phoenix/server/api/routers/oauth2.py +369 -106
- phoenix/server/api/routers/v1/__init__.py +24 -4
- phoenix/server/api/routers/v1/annotation_configs.py +23 -31
- phoenix/server/api/routers/v1/annotations.py +481 -17
- phoenix/server/api/routers/v1/datasets.py +395 -81
- phoenix/server/api/routers/v1/documents.py +142 -0
- phoenix/server/api/routers/v1/evaluations.py +24 -31
- phoenix/server/api/routers/v1/experiment_evaluations.py +19 -8
- phoenix/server/api/routers/v1/experiment_runs.py +337 -59
- phoenix/server/api/routers/v1/experiments.py +479 -48
- phoenix/server/api/routers/v1/models.py +7 -0
- phoenix/server/api/routers/v1/projects.py +18 -49
- phoenix/server/api/routers/v1/prompts.py +54 -40
- phoenix/server/api/routers/v1/sessions.py +108 -0
- phoenix/server/api/routers/v1/spans.py +1091 -81
- phoenix/server/api/routers/v1/traces.py +132 -78
- phoenix/server/api/routers/v1/users.py +389 -0
- phoenix/server/api/routers/v1/utils.py +3 -7
- phoenix/server/api/subscriptions.py +305 -88
- phoenix/server/api/types/Annotation.py +90 -23
- phoenix/server/api/types/ApiKey.py +13 -17
- phoenix/server/api/types/AuthMethod.py +1 -0
- phoenix/server/api/types/ChatCompletionSubscriptionPayload.py +1 -0
- phoenix/server/api/types/CostBreakdown.py +12 -0
- phoenix/server/api/types/Dataset.py +226 -72
- phoenix/server/api/types/DatasetExample.py +88 -18
- phoenix/server/api/types/DatasetExperimentAnnotationSummary.py +10 -0
- phoenix/server/api/types/DatasetLabel.py +57 -0
- phoenix/server/api/types/DatasetSplit.py +98 -0
- phoenix/server/api/types/DatasetVersion.py +49 -4
- phoenix/server/api/types/DocumentAnnotation.py +212 -0
- phoenix/server/api/types/Experiment.py +264 -59
- phoenix/server/api/types/ExperimentComparison.py +5 -10
- phoenix/server/api/types/ExperimentRepeatedRunGroup.py +155 -0
- phoenix/server/api/types/ExperimentRepeatedRunGroupAnnotationSummary.py +9 -0
- phoenix/server/api/types/ExperimentRun.py +169 -65
- phoenix/server/api/types/ExperimentRunAnnotation.py +158 -39
- phoenix/server/api/types/GenerativeModel.py +245 -3
- phoenix/server/api/types/GenerativeProvider.py +70 -11
- phoenix/server/api/types/{Model.py → InferenceModel.py} +1 -1
- phoenix/server/api/types/ModelInterface.py +16 -0
- phoenix/server/api/types/PlaygroundModel.py +20 -0
- phoenix/server/api/types/Project.py +1278 -216
- phoenix/server/api/types/ProjectSession.py +188 -28
- phoenix/server/api/types/ProjectSessionAnnotation.py +187 -0
- phoenix/server/api/types/ProjectTraceRetentionPolicy.py +1 -1
- phoenix/server/api/types/Prompt.py +119 -39
- phoenix/server/api/types/PromptLabel.py +42 -25
- phoenix/server/api/types/PromptVersion.py +11 -8
- phoenix/server/api/types/PromptVersionTag.py +65 -25
- phoenix/server/api/types/ServerStatus.py +6 -0
- phoenix/server/api/types/Span.py +167 -123
- phoenix/server/api/types/SpanAnnotation.py +189 -42
- phoenix/server/api/types/SpanCostDetailSummaryEntry.py +10 -0
- phoenix/server/api/types/SpanCostSummary.py +10 -0
- phoenix/server/api/types/SystemApiKey.py +65 -1
- phoenix/server/api/types/TokenPrice.py +16 -0
- phoenix/server/api/types/TokenUsage.py +3 -3
- phoenix/server/api/types/Trace.py +223 -51
- phoenix/server/api/types/TraceAnnotation.py +149 -50
- phoenix/server/api/types/User.py +137 -32
- phoenix/server/api/types/UserApiKey.py +73 -26
- phoenix/server/api/types/node.py +10 -0
- phoenix/server/api/types/pagination.py +11 -2
- phoenix/server/app.py +290 -45
- phoenix/server/authorization.py +38 -3
- phoenix/server/bearer_auth.py +34 -24
- phoenix/server/cost_tracking/cost_details_calculator.py +196 -0
- phoenix/server/cost_tracking/cost_model_lookup.py +179 -0
- phoenix/server/cost_tracking/helpers.py +68 -0
- phoenix/server/cost_tracking/model_cost_manifest.json +3657 -830
- phoenix/server/cost_tracking/regex_specificity.py +397 -0
- phoenix/server/cost_tracking/token_cost_calculator.py +57 -0
- phoenix/server/daemons/__init__.py +0 -0
- phoenix/server/daemons/db_disk_usage_monitor.py +214 -0
- phoenix/server/daemons/generative_model_store.py +103 -0
- phoenix/server/daemons/span_cost_calculator.py +99 -0
- phoenix/server/dml_event.py +17 -0
- phoenix/server/dml_event_handler.py +5 -0
- phoenix/server/email/sender.py +56 -3
- phoenix/server/email/templates/db_disk_usage_notification.html +19 -0
- phoenix/server/email/types.py +11 -0
- phoenix/server/experiments/__init__.py +0 -0
- phoenix/server/experiments/utils.py +14 -0
- phoenix/server/grpc_server.py +11 -11
- phoenix/server/jwt_store.py +17 -15
- phoenix/server/ldap.py +1449 -0
- phoenix/server/main.py +26 -10
- phoenix/server/oauth2.py +330 -12
- phoenix/server/prometheus.py +66 -6
- phoenix/server/rate_limiters.py +4 -9
- phoenix/server/retention.py +33 -20
- phoenix/server/session_filters.py +49 -0
- phoenix/server/static/.vite/manifest.json +55 -51
- phoenix/server/static/assets/components-BreFUQQa.js +6702 -0
- phoenix/server/static/assets/{index-E0M82BdE.js → index-CTQoemZv.js} +140 -56
- phoenix/server/static/assets/pages-DBE5iYM3.js +9524 -0
- phoenix/server/static/assets/vendor-BGzfc4EU.css +1 -0
- phoenix/server/static/assets/vendor-DCE4v-Ot.js +920 -0
- phoenix/server/static/assets/vendor-codemirror-D5f205eT.js +25 -0
- phoenix/server/static/assets/vendor-recharts-V9cwpXsm.js +37 -0
- phoenix/server/static/assets/vendor-shiki-Do--csgv.js +5 -0
- phoenix/server/static/assets/vendor-three-CmB8bl_y.js +3840 -0
- phoenix/server/templates/index.html +40 -6
- phoenix/server/thread_server.py +1 -2
- phoenix/server/types.py +14 -4
- phoenix/server/utils.py +74 -0
- phoenix/session/client.py +56 -3
- phoenix/session/data_extractor.py +5 -0
- phoenix/session/evaluation.py +14 -5
- phoenix/session/session.py +45 -9
- phoenix/settings.py +5 -0
- phoenix/trace/attributes.py +80 -13
- phoenix/trace/dsl/helpers.py +90 -1
- phoenix/trace/dsl/query.py +8 -6
- phoenix/trace/projects.py +5 -0
- phoenix/utilities/template_formatters.py +1 -1
- phoenix/version.py +1 -1
- arize_phoenix-10.0.4.dist-info/RECORD +0 -405
- phoenix/server/api/types/Evaluation.py +0 -39
- phoenix/server/cost_tracking/cost_lookup.py +0 -255
- phoenix/server/static/assets/components-DULKeDfL.js +0 -4365
- phoenix/server/static/assets/pages-Cl0A-0U2.js +0 -7430
- phoenix/server/static/assets/vendor-WIZid84E.css +0 -1
- phoenix/server/static/assets/vendor-arizeai-Dy-0mSNw.js +0 -649
- phoenix/server/static/assets/vendor-codemirror-DBtifKNr.js +0 -33
- phoenix/server/static/assets/vendor-oB4u9zuV.js +0 -905
- phoenix/server/static/assets/vendor-recharts-D-T4KPz2.js +0 -59
- phoenix/server/static/assets/vendor-shiki-BMn4O_9F.js +0 -5
- phoenix/server/static/assets/vendor-three-C5WAXd5r.js +0 -2998
- phoenix/utilities/deprecation.py +0 -31
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/entry_points.txt +0 -0
- {arize_phoenix-10.0.4.dist-info → arize_phoenix-12.28.1.dist-info}/licenses/LICENSE +0 -0
phoenix/db/bulk_inserter.py
CHANGED
|
@@ -1,34 +1,52 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import logging
|
|
3
3
|
from asyncio import Queue, as_completed
|
|
4
|
-
from collections
|
|
4
|
+
from collections import deque
|
|
5
5
|
from dataclasses import dataclass, field
|
|
6
6
|
from functools import singledispatchmethod
|
|
7
|
-
from
|
|
8
|
-
from
|
|
9
|
-
from typing import Any, Optional, cast
|
|
7
|
+
from time import perf_counter, time
|
|
8
|
+
from typing import Any, AsyncIterator, Awaitable, Callable, Iterable, Optional, cast
|
|
10
9
|
|
|
10
|
+
from openinference.semconv.trace import SpanAttributes
|
|
11
11
|
from typing_extensions import TypeAlias
|
|
12
12
|
|
|
13
13
|
import phoenix.trace.v1 as pb
|
|
14
|
+
from phoenix.db import models
|
|
14
15
|
from phoenix.db.insertion.constants import DEFAULT_RETRY_ALLOWANCE, DEFAULT_RETRY_DELAY_SEC
|
|
15
16
|
from phoenix.db.insertion.document_annotation import DocumentAnnotationQueueInserter
|
|
16
17
|
from phoenix.db.insertion.evaluation import (
|
|
17
18
|
InsertEvaluationError,
|
|
18
19
|
insert_evaluation,
|
|
19
20
|
)
|
|
20
|
-
from phoenix.db.insertion.helpers import
|
|
21
|
+
from phoenix.db.insertion.helpers import (
|
|
22
|
+
DataManipulation,
|
|
23
|
+
DataManipulationEvent,
|
|
24
|
+
should_calculate_span_cost,
|
|
25
|
+
)
|
|
26
|
+
from phoenix.db.insertion.session_annotation import SessionAnnotationQueueInserter
|
|
21
27
|
from phoenix.db.insertion.span import SpanInsertionEvent, insert_span
|
|
22
28
|
from phoenix.db.insertion.span_annotation import SpanAnnotationQueueInserter
|
|
23
29
|
from phoenix.db.insertion.trace_annotation import TraceAnnotationQueueInserter
|
|
24
30
|
from phoenix.db.insertion.types import Insertables, Precursors
|
|
31
|
+
from phoenix.server.daemons.span_cost_calculator import (
|
|
32
|
+
SpanCostCalculator,
|
|
33
|
+
)
|
|
25
34
|
from phoenix.server.dml_event import DmlEvent, SpanInsertEvent
|
|
35
|
+
from phoenix.server.prometheus import (
|
|
36
|
+
BULK_LOADER_EVALUATION_INSERTIONS,
|
|
37
|
+
BULK_LOADER_EXCEPTIONS,
|
|
38
|
+
BULK_LOADER_LAST_ACTIVITY,
|
|
39
|
+
BULK_LOADER_SPAN_EXCEPTIONS,
|
|
40
|
+
BULK_LOADER_SPAN_INSERTION_TIME,
|
|
41
|
+
SPAN_QUEUE_SIZE,
|
|
42
|
+
)
|
|
26
43
|
from phoenix.server.types import CanPutItem, DbSessionFactory
|
|
27
44
|
from phoenix.trace.schemas import Span
|
|
28
45
|
|
|
29
46
|
logger = logging.getLogger(__name__)
|
|
30
47
|
|
|
31
48
|
ProjectRowId: TypeAlias = int
|
|
49
|
+
ProjectName: TypeAlias = str
|
|
32
50
|
|
|
33
51
|
|
|
34
52
|
@dataclass(frozen=True)
|
|
@@ -42,12 +60,13 @@ class BulkInserter:
|
|
|
42
60
|
db: DbSessionFactory,
|
|
43
61
|
*,
|
|
44
62
|
event_queue: CanPutItem[DmlEvent],
|
|
45
|
-
|
|
46
|
-
|
|
63
|
+
span_cost_calculator: SpanCostCalculator,
|
|
64
|
+
initial_batch_of_spans: Iterable[tuple[Span, ProjectName]] = (),
|
|
65
|
+
initial_batch_of_evaluations: Iterable[pb.Evaluation] = (),
|
|
47
66
|
sleep: float = 0.1,
|
|
48
67
|
max_ops_per_transaction: int = 1000,
|
|
49
68
|
max_queue_size: int = 1000,
|
|
50
|
-
|
|
69
|
+
max_spans_queue_size: Optional[int] = None,
|
|
51
70
|
retry_delay_sec: float = DEFAULT_RETRY_DELAY_SEC,
|
|
52
71
|
retry_allowance: int = DEFAULT_RETRY_ALLOWANCE,
|
|
53
72
|
) -> None:
|
|
@@ -58,7 +77,6 @@ class BulkInserter:
|
|
|
58
77
|
:param max_ops_per_transaction: The maximum number of operations to dequeue from
|
|
59
78
|
the operations queue for each transaction.
|
|
60
79
|
:param max_queue_size: The maximum length of the operations queue.
|
|
61
|
-
:param enable_prometheus: Whether Prometheus is enabled.
|
|
62
80
|
"""
|
|
63
81
|
self._db = db
|
|
64
82
|
self._running = False
|
|
@@ -66,18 +84,19 @@ class BulkInserter:
|
|
|
66
84
|
self._max_ops_per_transaction = max_ops_per_transaction
|
|
67
85
|
self._operations: Optional[Queue[DataManipulation]] = None
|
|
68
86
|
self._max_queue_size = max_queue_size
|
|
69
|
-
self.
|
|
70
|
-
|
|
71
|
-
)
|
|
72
|
-
self._evaluations: list[pb.Evaluation] = (
|
|
73
|
-
[] if initial_batch_of_evaluations is None else list(initial_batch_of_evaluations)
|
|
74
|
-
)
|
|
87
|
+
self._max_spans_queue_size = max_spans_queue_size
|
|
88
|
+
self._spans: deque[tuple[Span, ProjectName]] = deque(initial_batch_of_spans)
|
|
89
|
+
self._evaluations: deque[pb.Evaluation] = deque(initial_batch_of_evaluations)
|
|
75
90
|
self._task: Optional[asyncio.Task[None]] = None
|
|
76
91
|
self._event_queue = event_queue
|
|
77
|
-
self._enable_prometheus = enable_prometheus
|
|
78
92
|
self._retry_delay_sec = retry_delay_sec
|
|
79
93
|
self._retry_allowance = retry_allowance
|
|
80
94
|
self._queue_inserters = _QueueInserters(db, self._retry_delay_sec, self._retry_allowance)
|
|
95
|
+
self._span_cost_calculator = span_cost_calculator
|
|
96
|
+
|
|
97
|
+
@property
|
|
98
|
+
def is_full(self) -> bool:
|
|
99
|
+
return bool(self._max_spans_queue_size and self._max_spans_queue_size <= len(self._spans))
|
|
81
100
|
|
|
82
101
|
async def __aenter__(
|
|
83
102
|
self,
|
|
@@ -91,9 +110,9 @@ class BulkInserter:
|
|
|
91
110
|
self._operations = Queue(maxsize=self._max_queue_size)
|
|
92
111
|
self._task = asyncio.create_task(self._bulk_insert())
|
|
93
112
|
return (
|
|
94
|
-
self.
|
|
95
|
-
self.
|
|
96
|
-
self.
|
|
113
|
+
self._enqueue_annotations,
|
|
114
|
+
self._enqueue_span,
|
|
115
|
+
self._enqueue_evaluation,
|
|
97
116
|
self._enqueue_operation,
|
|
98
117
|
)
|
|
99
118
|
|
|
@@ -103,23 +122,22 @@ class BulkInserter:
|
|
|
103
122
|
self._task.cancel()
|
|
104
123
|
self._task = None
|
|
105
124
|
|
|
106
|
-
async def
|
|
125
|
+
async def _enqueue_annotations(self, *items: Any) -> None:
|
|
107
126
|
await self._queue_inserters.enqueue(*items)
|
|
108
127
|
|
|
109
128
|
def _enqueue_operation(self, operation: DataManipulation) -> None:
|
|
110
129
|
cast("Queue[DataManipulation]", self._operations).put_nowait(operation)
|
|
111
130
|
|
|
112
|
-
async def
|
|
131
|
+
async def _enqueue_span(self, span: Span, project_name: str) -> None:
|
|
113
132
|
self._spans.append((span, project_name))
|
|
114
133
|
|
|
115
|
-
async def
|
|
134
|
+
async def _enqueue_evaluation(self, evaluation: pb.Evaluation) -> None:
|
|
116
135
|
self._evaluations.append(evaluation)
|
|
117
136
|
|
|
118
137
|
async def _process_events(self, events: Iterable[Optional[DataManipulationEvent]]) -> None: ...
|
|
119
138
|
|
|
120
139
|
async def _bulk_insert(self) -> None:
|
|
121
140
|
assert isinstance(self._operations, Queue)
|
|
122
|
-
spans_buffer, evaluations_buffer = None, None
|
|
123
141
|
# start first insert immediately if the inserter has not run recently
|
|
124
142
|
while (
|
|
125
143
|
self._running
|
|
@@ -128,6 +146,8 @@ class BulkInserter:
|
|
|
128
146
|
or self._spans
|
|
129
147
|
or self._evaluations
|
|
130
148
|
):
|
|
149
|
+
BULK_LOADER_LAST_ACTIVITY.set(time())
|
|
150
|
+
SPAN_QUEUE_SIZE.set(len(self._spans))
|
|
131
151
|
if (
|
|
132
152
|
self._queue_inserters.empty
|
|
133
153
|
and self._operations.empty()
|
|
@@ -145,100 +165,100 @@ class BulkInserter:
|
|
|
145
165
|
async with session.begin_nested():
|
|
146
166
|
await op(session)
|
|
147
167
|
except Exception as e:
|
|
148
|
-
|
|
149
|
-
from phoenix.server.prometheus import BULK_LOADER_EXCEPTIONS
|
|
150
|
-
|
|
151
|
-
BULK_LOADER_EXCEPTIONS.inc()
|
|
168
|
+
BULK_LOADER_EXCEPTIONS.inc()
|
|
152
169
|
logger.exception(str(e))
|
|
153
170
|
# It's important to grab the buffers at the same time so there's
|
|
154
171
|
# no race condition, since an eval insertion will fail if the span
|
|
155
172
|
# it references doesn't exist. Grabbing the eval buffer later may
|
|
156
173
|
# include an eval whose span is in the queue but missed being
|
|
157
174
|
# included in the span buffer that was grabbed previously.
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
self._spans = []
|
|
161
|
-
if self._evaluations:
|
|
162
|
-
evaluations_buffer = self._evaluations
|
|
163
|
-
self._evaluations = []
|
|
175
|
+
num_spans_to_insert = min(self._max_ops_per_transaction, len(self._spans))
|
|
176
|
+
num_evals_to_insert = min(self._max_ops_per_transaction, len(self._evaluations))
|
|
164
177
|
# Spans should be inserted before the evaluations, since an evaluation
|
|
165
178
|
# insertion will fail if the span it references doesn't exist.
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
spans_buffer = None
|
|
169
|
-
if evaluations_buffer:
|
|
170
|
-
await self._insert_evaluations(evaluations_buffer)
|
|
171
|
-
evaluations_buffer = None
|
|
179
|
+
await self._insert_spans(num_spans_to_insert)
|
|
180
|
+
await self._insert_evaluations(num_evals_to_insert)
|
|
172
181
|
async for event in self._queue_inserters.insert():
|
|
173
182
|
self._event_queue.put(event)
|
|
174
183
|
await asyncio.sleep(self._sleep)
|
|
175
184
|
|
|
176
|
-
async def _insert_spans(self,
|
|
185
|
+
async def _insert_spans(self, num_spans_to_insert: int) -> None:
|
|
186
|
+
if not num_spans_to_insert or not self._spans:
|
|
187
|
+
return
|
|
177
188
|
project_ids = set()
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
BULK_LOADER_EXCEPTIONS.inc()
|
|
231
|
-
logger.exception(f"Failed to insert evaluation: {str(error)}")
|
|
232
|
-
if self._enable_prometheus:
|
|
233
|
-
from phoenix.server.prometheus import BULK_LOADER_INSERTION_TIME
|
|
234
|
-
|
|
235
|
-
BULK_LOADER_INSERTION_TIME.observe(perf_counter() - start)
|
|
236
|
-
except Exception:
|
|
237
|
-
if self._enable_prometheus:
|
|
238
|
-
from phoenix.server.prometheus import BULK_LOADER_EXCEPTIONS
|
|
189
|
+
span_costs: list[models.SpanCost] = []
|
|
190
|
+
try:
|
|
191
|
+
start = perf_counter()
|
|
192
|
+
async with self._db() as session:
|
|
193
|
+
while num_spans_to_insert > 0:
|
|
194
|
+
num_spans_to_insert -= 1
|
|
195
|
+
if not self._spans:
|
|
196
|
+
break
|
|
197
|
+
span, project_name = self._spans.popleft()
|
|
198
|
+
result: Optional[SpanInsertionEvent] = None
|
|
199
|
+
try:
|
|
200
|
+
async with session.begin_nested():
|
|
201
|
+
result = await insert_span(session, span, project_name)
|
|
202
|
+
except Exception:
|
|
203
|
+
BULK_LOADER_SPAN_EXCEPTIONS.inc()
|
|
204
|
+
logger.exception(
|
|
205
|
+
f"Failed to insert span with span_id={span.context.span_id}"
|
|
206
|
+
)
|
|
207
|
+
if result is None:
|
|
208
|
+
continue
|
|
209
|
+
project_ids.add(result.project_rowid)
|
|
210
|
+
try:
|
|
211
|
+
if not should_calculate_span_cost(span.attributes):
|
|
212
|
+
continue
|
|
213
|
+
span_cost = self._span_cost_calculator.calculate_cost(
|
|
214
|
+
span.start_time,
|
|
215
|
+
span.attributes,
|
|
216
|
+
)
|
|
217
|
+
except Exception:
|
|
218
|
+
logger.exception(
|
|
219
|
+
f"Failed to calculate span cost for span with "
|
|
220
|
+
f"span_id={span.context.span_id}"
|
|
221
|
+
)
|
|
222
|
+
else:
|
|
223
|
+
if span_cost is None:
|
|
224
|
+
continue
|
|
225
|
+
span_cost.span_rowid = result.span_rowid
|
|
226
|
+
span_cost.trace_rowid = result.trace_rowid
|
|
227
|
+
span_costs.append(span_cost)
|
|
228
|
+
BULK_LOADER_SPAN_INSERTION_TIME.observe(perf_counter() - start)
|
|
229
|
+
except Exception:
|
|
230
|
+
BULK_LOADER_SPAN_EXCEPTIONS.inc()
|
|
231
|
+
logger.exception("Failed to insert spans")
|
|
232
|
+
if project_ids:
|
|
233
|
+
self._event_queue.put(SpanInsertEvent(tuple(project_ids)))
|
|
234
|
+
if not span_costs:
|
|
235
|
+
return
|
|
236
|
+
try:
|
|
237
|
+
async with self._db() as session:
|
|
238
|
+
session.add_all(span_costs)
|
|
239
|
+
except Exception:
|
|
240
|
+
logger.exception("Failed to insert span costs")
|
|
239
241
|
|
|
240
|
-
|
|
241
|
-
|
|
242
|
+
async def _insert_evaluations(self, num_evals_to_insert: int) -> None:
|
|
243
|
+
if not num_evals_to_insert or not self._evaluations:
|
|
244
|
+
return
|
|
245
|
+
try:
|
|
246
|
+
async with self._db() as session:
|
|
247
|
+
while num_evals_to_insert > 0:
|
|
248
|
+
num_evals_to_insert -= 1
|
|
249
|
+
if not self._evaluations:
|
|
250
|
+
break
|
|
251
|
+
evaluation = self._evaluations.popleft()
|
|
252
|
+
BULK_LOADER_EVALUATION_INSERTIONS.inc()
|
|
253
|
+
try:
|
|
254
|
+
async with session.begin_nested():
|
|
255
|
+
await insert_evaluation(session, evaluation)
|
|
256
|
+
except InsertEvaluationError as error:
|
|
257
|
+
BULK_LOADER_EXCEPTIONS.inc()
|
|
258
|
+
logger.exception(f"Failed to insert evaluation: {str(error)}")
|
|
259
|
+
except Exception:
|
|
260
|
+
BULK_LOADER_EXCEPTIONS.inc()
|
|
261
|
+
logger.exception("Failed to insert evaluations")
|
|
242
262
|
|
|
243
263
|
|
|
244
264
|
class _QueueInserters:
|
|
@@ -253,10 +273,12 @@ class _QueueInserters:
|
|
|
253
273
|
self._span_annotations = SpanAnnotationQueueInserter(*args)
|
|
254
274
|
self._trace_annotations = TraceAnnotationQueueInserter(*args)
|
|
255
275
|
self._document_annotations = DocumentAnnotationQueueInserter(*args)
|
|
276
|
+
self._session_annotations = SessionAnnotationQueueInserter(*args)
|
|
256
277
|
self._queues = (
|
|
257
278
|
self._span_annotations,
|
|
258
279
|
self._trace_annotations,
|
|
259
280
|
self._document_annotations,
|
|
281
|
+
self._session_annotations,
|
|
260
282
|
)
|
|
261
283
|
|
|
262
284
|
async def insert(self) -> AsyncIterator[DmlEvent]:
|
|
@@ -292,3 +314,23 @@ class _QueueInserters:
|
|
|
292
314
|
@_enqueue.register(Insertables.DocumentAnnotation)
|
|
293
315
|
async def _(self, item: Precursors.DocumentAnnotation) -> None:
|
|
294
316
|
await self._document_annotations.enqueue(item)
|
|
317
|
+
|
|
318
|
+
@_enqueue.register(Precursors.SessionAnnotation)
|
|
319
|
+
@_enqueue.register(Insertables.SessionAnnotation)
|
|
320
|
+
async def _(self, item: Precursors.SessionAnnotation) -> None:
|
|
321
|
+
await self._session_annotations.enqueue(item)
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
LLM_MODEL_NAME = SpanAttributes.LLM_MODEL_NAME
|
|
325
|
+
LLM_PROVIDER = SpanAttributes.LLM_PROVIDER
|
|
326
|
+
LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
|
|
327
|
+
LLM_TOKEN_COUNT_COMPLETION_DETAILS_AUDIO = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION_DETAILS_AUDIO
|
|
328
|
+
LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING = (
|
|
329
|
+
SpanAttributes.LLM_TOKEN_COUNT_COMPLETION_DETAILS_REASONING
|
|
330
|
+
)
|
|
331
|
+
LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
|
|
332
|
+
LLM_TOKEN_COUNT_PROMPT_DETAILS_AUDIO = SpanAttributes.LLM_TOKEN_COUNT_PROMPT_DETAILS_AUDIO
|
|
333
|
+
LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHE_READ = SpanAttributes.LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHE_READ
|
|
334
|
+
LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHE_WRITE = (
|
|
335
|
+
SpanAttributes.LLM_TOKEN_COUNT_PROMPT_DETAILS_CACHE_WRITE
|
|
336
|
+
)
|
phoenix/db/engines.py
CHANGED
|
@@ -1,16 +1,15 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
-
import json
|
|
5
4
|
import logging
|
|
6
5
|
from collections.abc import Callable
|
|
7
|
-
from datetime import datetime
|
|
8
6
|
from enum import Enum
|
|
9
7
|
from sqlite3 import Connection
|
|
10
|
-
from typing import Any
|
|
8
|
+
from typing import Any, Optional
|
|
11
9
|
|
|
12
10
|
import aiosqlite
|
|
13
11
|
import numpy as np
|
|
12
|
+
import orjson
|
|
14
13
|
import sqlalchemy
|
|
15
14
|
import sqlean
|
|
16
15
|
from sqlalchemy import URL, StaticPool, event, make_url
|
|
@@ -123,7 +122,7 @@ def aio_sqlite_engine(
|
|
|
123
122
|
lambda: sqlean.connect(f"file:{database}", uri=True),
|
|
124
123
|
iter_chunk_size=64,
|
|
125
124
|
)
|
|
126
|
-
conn.daemon = True
|
|
125
|
+
conn.daemon = True # type: ignore[attr-defined]
|
|
127
126
|
return conn
|
|
128
127
|
|
|
129
128
|
engine = create_async_engine(
|
|
@@ -169,43 +168,151 @@ def aio_postgresql_engine(
|
|
|
169
168
|
log_to_stdout: bool = False,
|
|
170
169
|
log_migrations_to_stdout: bool = True,
|
|
171
170
|
) -> AsyncEngine:
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
connect_args=asyncpg_args,
|
|
176
|
-
echo=log_to_stdout,
|
|
177
|
-
json_serializer=_dumps,
|
|
171
|
+
from phoenix.config import (
|
|
172
|
+
get_env_postgres_iam_token_lifetime,
|
|
173
|
+
get_env_postgres_use_iam_auth,
|
|
178
174
|
)
|
|
175
|
+
|
|
176
|
+
use_iam_auth = get_env_postgres_use_iam_auth()
|
|
177
|
+
|
|
178
|
+
asyncpg_url, asyncpg_args = get_pg_config(url, "asyncpg", enforce_ssl=use_iam_auth)
|
|
179
|
+
|
|
180
|
+
iam_config: Optional[dict[str, Any]] = None
|
|
181
|
+
token_lifetime: int = 0
|
|
182
|
+
if use_iam_auth:
|
|
183
|
+
iam_config = _extract_iam_config_from_url(url)
|
|
184
|
+
token_lifetime = get_env_postgres_iam_token_lifetime()
|
|
185
|
+
|
|
186
|
+
async def iam_async_creator() -> Any:
|
|
187
|
+
import asyncpg # type: ignore
|
|
188
|
+
|
|
189
|
+
from phoenix.db.iam_auth import generate_aws_rds_token
|
|
190
|
+
|
|
191
|
+
assert iam_config is not None
|
|
192
|
+
token = generate_aws_rds_token(
|
|
193
|
+
host=iam_config["host"],
|
|
194
|
+
port=iam_config["port"],
|
|
195
|
+
user=iam_config["user"],
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
conn_kwargs = {
|
|
199
|
+
"host": iam_config["host"],
|
|
200
|
+
"port": iam_config["port"],
|
|
201
|
+
"user": iam_config["user"],
|
|
202
|
+
"password": token,
|
|
203
|
+
"database": iam_config["database"],
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
if asyncpg_args:
|
|
207
|
+
conn_kwargs.update(asyncpg_args)
|
|
208
|
+
|
|
209
|
+
return await asyncpg.connect(**conn_kwargs)
|
|
210
|
+
|
|
211
|
+
engine = create_async_engine(
|
|
212
|
+
url=asyncpg_url,
|
|
213
|
+
async_creator=iam_async_creator,
|
|
214
|
+
echo=log_to_stdout,
|
|
215
|
+
json_serializer=_dumps,
|
|
216
|
+
pool_recycle=token_lifetime,
|
|
217
|
+
)
|
|
218
|
+
else:
|
|
219
|
+
engine = create_async_engine(
|
|
220
|
+
url=asyncpg_url,
|
|
221
|
+
connect_args=asyncpg_args,
|
|
222
|
+
echo=log_to_stdout,
|
|
223
|
+
json_serializer=_dumps,
|
|
224
|
+
)
|
|
225
|
+
|
|
179
226
|
if not migrate:
|
|
180
227
|
return engine
|
|
181
228
|
|
|
182
|
-
psycopg_url, psycopg_args = get_pg_config(url, "psycopg")
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
229
|
+
psycopg_url, psycopg_args = get_pg_config(url, "psycopg", enforce_ssl=use_iam_auth)
|
|
230
|
+
|
|
231
|
+
if use_iam_auth:
|
|
232
|
+
assert iam_config is not None
|
|
233
|
+
|
|
234
|
+
def iam_sync_creator() -> Any:
|
|
235
|
+
import psycopg
|
|
236
|
+
|
|
237
|
+
from phoenix.db.iam_auth import generate_aws_rds_token
|
|
238
|
+
|
|
239
|
+
token = generate_aws_rds_token(
|
|
240
|
+
host=iam_config["host"],
|
|
241
|
+
port=iam_config["port"],
|
|
242
|
+
user=iam_config["user"],
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
conn_kwargs = {
|
|
246
|
+
"host": iam_config["host"],
|
|
247
|
+
"port": iam_config["port"],
|
|
248
|
+
"user": iam_config["user"],
|
|
249
|
+
"password": token,
|
|
250
|
+
"dbname": iam_config["database"],
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
if psycopg_args:
|
|
254
|
+
conn_kwargs.update(psycopg_args)
|
|
255
|
+
|
|
256
|
+
return psycopg.connect(**conn_kwargs)
|
|
257
|
+
|
|
258
|
+
sync_engine = sqlalchemy.create_engine(
|
|
259
|
+
url=psycopg_url,
|
|
260
|
+
creator=iam_sync_creator,
|
|
261
|
+
echo=log_migrations_to_stdout,
|
|
262
|
+
json_serializer=_dumps,
|
|
263
|
+
pool_recycle=token_lifetime,
|
|
264
|
+
)
|
|
265
|
+
else:
|
|
266
|
+
sync_engine = sqlalchemy.create_engine(
|
|
267
|
+
url=psycopg_url,
|
|
268
|
+
connect_args=psycopg_args,
|
|
269
|
+
echo=log_migrations_to_stdout,
|
|
270
|
+
json_serializer=_dumps,
|
|
271
|
+
)
|
|
272
|
+
|
|
189
273
|
if schema := get_env_database_schema():
|
|
190
274
|
event.listen(sync_engine, "connect", set_postgresql_search_path(schema))
|
|
191
275
|
migrate_in_thread(sync_engine)
|
|
192
276
|
return engine
|
|
193
277
|
|
|
194
278
|
|
|
279
|
+
def _extract_iam_config_from_url(url: URL) -> dict[str, Any]:
|
|
280
|
+
"""Extract connection parameters needed for IAM authentication from a SQLAlchemy URL.
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
url: SQLAlchemy database URL
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
Dictionary with host, port, user, and database
|
|
287
|
+
"""
|
|
288
|
+
host = url.host
|
|
289
|
+
if not host:
|
|
290
|
+
raise ValueError("Database host is required for IAM authentication")
|
|
291
|
+
|
|
292
|
+
port = url.port or 5432
|
|
293
|
+
user = url.username
|
|
294
|
+
if not user:
|
|
295
|
+
raise ValueError("Database user is required for IAM authentication")
|
|
296
|
+
|
|
297
|
+
database = url.database or "postgres"
|
|
298
|
+
|
|
299
|
+
return {
|
|
300
|
+
"host": host,
|
|
301
|
+
"port": port,
|
|
302
|
+
"user": user,
|
|
303
|
+
"database": database,
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
|
|
195
307
|
def _dumps(obj: Any) -> str:
|
|
196
|
-
return
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
elif isinstance(obj, np.integer):
|
|
208
|
-
return int(obj)
|
|
209
|
-
elif isinstance(obj, np.floating):
|
|
210
|
-
return float(obj)
|
|
211
|
-
return super().default(obj)
|
|
308
|
+
return orjson.dumps(obj, default=_default).decode()
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def _default(obj: Any) -> Any:
|
|
312
|
+
if isinstance(obj, np.ndarray):
|
|
313
|
+
return obj.tolist()
|
|
314
|
+
if isinstance(obj, (np.integer, np.floating, np.bool_)):
|
|
315
|
+
return obj.item()
|
|
316
|
+
if isinstance(obj, Enum):
|
|
317
|
+
return obj.value
|
|
318
|
+
raise TypeError(f"Object of type {type(obj).__name__} is not serializable")
|
phoenix/db/enums.py
CHANGED
|
@@ -1,19 +1,10 @@
|
|
|
1
|
-
from collections.abc import Mapping
|
|
2
|
-
from enum import Enum
|
|
3
|
-
|
|
4
1
|
from sqlalchemy.orm import InstrumentedAttribute
|
|
5
2
|
|
|
6
3
|
from phoenix.db import models
|
|
7
4
|
|
|
8
|
-
__all__ = ["
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class UserRole(Enum):
|
|
12
|
-
SYSTEM = "SYSTEM"
|
|
13
|
-
ADMIN = "ADMIN"
|
|
14
|
-
MEMBER = "MEMBER"
|
|
5
|
+
__all__ = ["ENUM_COLUMNS"]
|
|
15
6
|
|
|
16
7
|
|
|
17
|
-
|
|
18
|
-
models.UserRole.name
|
|
8
|
+
ENUM_COLUMNS: set[InstrumentedAttribute[str]] = {
|
|
9
|
+
models.UserRole.name,
|
|
19
10
|
}
|