arize-phoenix 3.19.3rc1__tar.gz → 3.20.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/PKG-INFO +2 -2
- {arize_phoenix-3.19.3rc1/examples/manually-instrumented-chatbot → arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/chat-service}/chat/app.py +2 -1
- arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend/Dockerfile +15 -0
- arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend/Makefile +20 -0
- arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend/pyproject.toml +24 -0
- arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend/requirements.txt +123 -0
- arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend/schema.json +149 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/pyproject.toml +1 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/__init__.py +7 -3
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/model.py +8 -6
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/model_schema_adapter.py +6 -6
- arize_phoenix-3.20.0/src/phoenix/datasets/dataset.py +214 -0
- arize_phoenix-3.20.0/src/phoenix/datasets/fixtures.py +24 -0
- arize_phoenix-3.20.0/src/phoenix/datasets/schema.py +31 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/datasets → arize_phoenix-3.20.0/src/phoenix/inferences}/fixtures.py +12 -12
- arize_phoenix-3.19.3rc1/src/phoenix/datasets/dataset.py → arize_phoenix-3.20.0/src/phoenix/inferences/inferences.py +8 -209
- {arize_phoenix-3.19.3rc1/src/phoenix/datasets → arize_phoenix-3.20.0/src/phoenix/inferences}/schema.py +0 -1
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/app.py +5 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/main.py +8 -8
- arize_phoenix-3.20.0/src/phoenix/server/static/index.css +6 -0
- arize_phoenix-3.20.0/src/phoenix/server/static/index.js +7479 -0
- arize_phoenix-3.20.0/src/phoenix/session/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/session/evaluation.py +1 -2
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/session/session.py +16 -16
- arize_phoenix-3.20.0/src/phoenix/storage/__init__.py +0 -0
- arize_phoenix-3.20.0/src/phoenix/utilities/deprecation.py +30 -0
- arize_phoenix-3.20.0/src/phoenix/version.py +1 -0
- arize_phoenix-3.19.3rc1/src/phoenix/server/static/index.css +0 -153
- arize_phoenix-3.19.3rc1/src/phoenix/server/static/index.css.map +0 -7
- arize_phoenix-3.19.3rc1/src/phoenix/server/static/index.js +0 -217610
- arize_phoenix-3.19.3rc1/src/phoenix/server/static/index.js.map +0 -7
- arize_phoenix-3.19.3rc1/src/phoenix/version.py +0 -1
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/.gitignore +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/IP_NOTICE +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/LICENSE +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/README.md +0 -0
- {arize_phoenix-3.19.3rc1/examples/manually-instrumented-chatbot → arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/chat-service}/chat/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1/examples/manually-instrumented-chatbot → arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/chat-service}/chat/types.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/core → arize_phoenix-3.20.0/examples/manually-instrumented-chatbot/frontend}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/config.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/datasets → arize_phoenix-3.20.0/src/phoenix/core}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/embedding_dimension.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/model_schema.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/project.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/core/traces.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/experimental → arize_phoenix-3.20.0/src/phoenix/datasets}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/datetime_utils.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/exceptions.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server → arize_phoenix-3.20.0/src/phoenix/experimental}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/evaluators.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/functions/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/functions/classify.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/functions/executor.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/functions/generate.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/functions/processing.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/anthropic.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/base.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/bedrock.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/litellm.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/openai.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/rate_limiters.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/vertex.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/models/vertexai.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/retrievals.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/templates/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/templates/default_templates.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/templates/template.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/utils/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/experimental/evals/utils/threads.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server/api → arize_phoenix-3.20.0/src/phoenix/inferences}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/datasets → arize_phoenix-3.20.0/src/phoenix/inferences}/errors.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/datasets → arize_phoenix-3.20.0/src/phoenix/inferences}/validation.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/README.md +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/binning.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/metrics.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/mixins.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/retrieval_metrics.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/timeseries.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/metrics/wrappers.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/pointcloud/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/pointcloud/clustering.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/pointcloud/pointcloud.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/pointcloud/projectors.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/pointcloud/umap_parameters.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/py.typed +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server/api/input_types → arize_phoenix-3.20.0/src/phoenix/server}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server/api/routers → arize_phoenix-3.20.0/src/phoenix/server/api}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/context.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/helpers.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/ClusterInput.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/Coordinates.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/DataQualityMetricInput.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/DimensionFilter.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/DimensionInput.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/Granularity.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/PerformanceMetricInput.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/SpanSort.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/input_types/TimeRange.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server/api/types → arize_phoenix-3.20.0/src/phoenix/server/api/input_types}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/interceptor.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/server/templates → arize_phoenix-3.20.0/src/phoenix/server/api/routers}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/routers/evaluation_handler.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/routers/span_handler.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/routers/trace_handler.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/routers/utils.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/schema.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Cluster.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DataQualityMetric.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Dataset.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DatasetRole.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DatasetValues.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Dimension.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DimensionDataType.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DimensionShape.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DimensionType.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DimensionWithValue.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DocumentEvaluationSummary.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/DocumentRetrievalMetrics.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/EmbeddingDimension.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/EmbeddingMetadata.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Evaluation.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/EvaluationSummary.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Event.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/EventMetadata.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/ExportEventsMutation.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/ExportedFile.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Functionality.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/MimeType.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Model.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/NumericRange.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/PerformanceMetric.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Project.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/PromptResponse.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Retrieval.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/ScalarDriftMetricEnum.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Segments.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/SortDir.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Span.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/TimeSeries.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/Trace.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/UMAPPoints.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/ValidationResult.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/VectorDriftMetricEnum.py +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/session → arize_phoenix-3.20.0/src/phoenix/server/api/types}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/node.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/api/types/pagination.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/prometheus.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-114x114.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-120x120.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-144x144.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-152x152.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-180x180.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-72x72.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon-76x76.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/apple-touch-icon.png +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/favicon.ico +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/static/modernizr.js +0 -0
- {arize_phoenix-3.19.3rc1/src/phoenix/storage → arize_phoenix-3.20.0/src/phoenix/server/templates}/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/templates/index.html +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/server/thread_server.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/services.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/session/client.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/session/data_extractor.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/storage/span_store/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/storage/span_store/text_file.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/dsl/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/dsl/filter.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/dsl/helpers.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/dsl/missing.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/dsl/query.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/errors.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/evaluation_conventions.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/exporter.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/fixtures.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/langchain/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/langchain/instrumentor.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/llama_index/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/llama_index/callback.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/openai/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/openai/instrumentor.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/otel.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/projects.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/schemas.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/span_evaluations.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/span_json_decoder.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/span_json_encoder.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/trace_dataset.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/utils.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/v1/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/v1/evaluation_pb2.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/trace/v1/evaluation_pb2.pyi +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/utilities/__init__.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/utilities/error_handling.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/utilities/logging.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/utilities/project.py +0 -0
- {arize_phoenix-3.19.3rc1 → arize_phoenix-3.20.0}/src/phoenix/utilities/span_store.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: arize-phoenix
|
|
3
|
-
Version: 3.
|
|
3
|
+
Version: 3.20.0
|
|
4
4
|
Summary: AI Observability and Evaluation
|
|
5
5
|
Project-URL: Documentation, https://docs.arize.com/phoenix/
|
|
6
6
|
Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
|
|
@@ -19,7 +19,8 @@ from opentelemetry.sdk.trace.export import (
|
|
|
19
19
|
|
|
20
20
|
from chat.types import Message, MessagesPayload, MessagesResponse
|
|
21
21
|
|
|
22
|
-
|
|
22
|
+
COLLECTOR_HOST = os.getenv("COLLECTOR_HOST", "localhost")
|
|
23
|
+
endpoint = f"http://{COLLECTOR_HOST}:6006/v1/traces"
|
|
23
24
|
tracer_provider = trace_sdk.TracerProvider()
|
|
24
25
|
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint)))
|
|
25
26
|
trace_api.set_tracer_provider(tracer_provider)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
FROM python:3.11-slim as builder
|
|
2
|
+
WORKDIR /app
|
|
3
|
+
COPY requirements.txt /app/
|
|
4
|
+
RUN pip install uv
|
|
5
|
+
RUN uv venv
|
|
6
|
+
ENV PATH="/app/.venv/bin:$PATH"
|
|
7
|
+
RUN uv pip sync requirements.txt
|
|
8
|
+
|
|
9
|
+
FROM gcr.io/distroless/python3-debian12
|
|
10
|
+
WORKDIR /app
|
|
11
|
+
COPY --from=builder /app/.venv/lib/python3.11/site-packages/ /app/site-packages/
|
|
12
|
+
ENV PYTHONPATH="/app/site-packages:$PYTHONPATH"
|
|
13
|
+
COPY frontend/ /app/frontend/
|
|
14
|
+
EXPOSE 8501
|
|
15
|
+
CMD ["-m", "streamlit", "run", "frontend/app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
server:
|
|
2
|
+
streamlit run app.py
|
|
3
|
+
|
|
4
|
+
format:
|
|
5
|
+
ruff check --fix . && ruff format .
|
|
6
|
+
|
|
7
|
+
typecheck:
|
|
8
|
+
pyright .
|
|
9
|
+
|
|
10
|
+
types:
|
|
11
|
+
datamodel-codegen --input schema.json --input-file-type openapi --output frontend/request_types.py --enum-field-as-literal all
|
|
12
|
+
|
|
13
|
+
requirements:
|
|
14
|
+
uv pip compile pyproject.toml > requirements.txt
|
|
15
|
+
|
|
16
|
+
docker-build:
|
|
17
|
+
docker build -t frontend .
|
|
18
|
+
|
|
19
|
+
docker-run:
|
|
20
|
+
docker run -p 8501:8501 -it frontend
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "frontend"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
dependencies = [
|
|
5
|
+
"streamlit",
|
|
6
|
+
"httpx",
|
|
7
|
+
"pydantic",
|
|
8
|
+
]
|
|
9
|
+
requires-python = ">=3.8"
|
|
10
|
+
|
|
11
|
+
[project.optional-dependencies]
|
|
12
|
+
dev = [
|
|
13
|
+
"pyright",
|
|
14
|
+
"ruff",
|
|
15
|
+
"datamodel-code-generator",
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
[tool.ruff]
|
|
19
|
+
exclude = [
|
|
20
|
+
"frontend/request_types.py",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
[tool.ruff.lint]
|
|
24
|
+
extend-select = ["I"]
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
# This file was autogenerated by uv via the following command:
|
|
2
|
+
# uv pip compile pyproject.toml
|
|
3
|
+
altair==5.3.0
|
|
4
|
+
# via streamlit
|
|
5
|
+
annotated-types==0.6.0
|
|
6
|
+
# via pydantic
|
|
7
|
+
anyio==4.3.0
|
|
8
|
+
# via httpx
|
|
9
|
+
attrs==23.2.0
|
|
10
|
+
# via
|
|
11
|
+
# jsonschema
|
|
12
|
+
# referencing
|
|
13
|
+
blinker==1.7.0
|
|
14
|
+
# via streamlit
|
|
15
|
+
cachetools==5.3.3
|
|
16
|
+
# via streamlit
|
|
17
|
+
certifi==2024.2.2
|
|
18
|
+
# via
|
|
19
|
+
# httpcore
|
|
20
|
+
# httpx
|
|
21
|
+
# requests
|
|
22
|
+
charset-normalizer==3.3.2
|
|
23
|
+
# via requests
|
|
24
|
+
click==8.1.7
|
|
25
|
+
# via streamlit
|
|
26
|
+
gitdb==4.0.11
|
|
27
|
+
# via gitpython
|
|
28
|
+
gitpython==3.1.43
|
|
29
|
+
# via streamlit
|
|
30
|
+
h11==0.14.0
|
|
31
|
+
# via httpcore
|
|
32
|
+
httpcore==1.0.5
|
|
33
|
+
# via httpx
|
|
34
|
+
httpx==0.27.0
|
|
35
|
+
idna==3.6
|
|
36
|
+
# via
|
|
37
|
+
# anyio
|
|
38
|
+
# httpx
|
|
39
|
+
# requests
|
|
40
|
+
jinja2==3.1.3
|
|
41
|
+
# via
|
|
42
|
+
# altair
|
|
43
|
+
# pydeck
|
|
44
|
+
jsonschema==4.21.1
|
|
45
|
+
# via altair
|
|
46
|
+
jsonschema-specifications==2023.12.1
|
|
47
|
+
# via jsonschema
|
|
48
|
+
markdown-it-py==3.0.0
|
|
49
|
+
# via rich
|
|
50
|
+
markupsafe==2.1.5
|
|
51
|
+
# via jinja2
|
|
52
|
+
mdurl==0.1.2
|
|
53
|
+
# via markdown-it-py
|
|
54
|
+
numpy==1.26.4
|
|
55
|
+
# via
|
|
56
|
+
# altair
|
|
57
|
+
# pandas
|
|
58
|
+
# pyarrow
|
|
59
|
+
# pydeck
|
|
60
|
+
# streamlit
|
|
61
|
+
packaging==24.0
|
|
62
|
+
# via
|
|
63
|
+
# altair
|
|
64
|
+
# streamlit
|
|
65
|
+
pandas==2.2.1
|
|
66
|
+
# via
|
|
67
|
+
# altair
|
|
68
|
+
# streamlit
|
|
69
|
+
pillow==10.3.0
|
|
70
|
+
# via streamlit
|
|
71
|
+
protobuf==4.25.3
|
|
72
|
+
# via streamlit
|
|
73
|
+
pyarrow==15.0.2
|
|
74
|
+
# via streamlit
|
|
75
|
+
pydantic==2.6.4
|
|
76
|
+
pydantic-core==2.16.3
|
|
77
|
+
# via pydantic
|
|
78
|
+
pydeck==0.8.0
|
|
79
|
+
# via streamlit
|
|
80
|
+
pygments==2.17.2
|
|
81
|
+
# via rich
|
|
82
|
+
python-dateutil==2.9.0.post0
|
|
83
|
+
# via pandas
|
|
84
|
+
pytz==2024.1
|
|
85
|
+
# via pandas
|
|
86
|
+
referencing==0.34.0
|
|
87
|
+
# via
|
|
88
|
+
# jsonschema
|
|
89
|
+
# jsonschema-specifications
|
|
90
|
+
requests==2.31.0
|
|
91
|
+
# via streamlit
|
|
92
|
+
rich==13.7.1
|
|
93
|
+
# via streamlit
|
|
94
|
+
rpds-py==0.18.0
|
|
95
|
+
# via
|
|
96
|
+
# jsonschema
|
|
97
|
+
# referencing
|
|
98
|
+
six==1.16.0
|
|
99
|
+
# via python-dateutil
|
|
100
|
+
smmap==5.0.1
|
|
101
|
+
# via gitdb
|
|
102
|
+
sniffio==1.3.1
|
|
103
|
+
# via
|
|
104
|
+
# anyio
|
|
105
|
+
# httpx
|
|
106
|
+
streamlit==1.33.0
|
|
107
|
+
tenacity==8.2.3
|
|
108
|
+
# via streamlit
|
|
109
|
+
toml==0.10.2
|
|
110
|
+
# via streamlit
|
|
111
|
+
toolz==0.12.1
|
|
112
|
+
# via altair
|
|
113
|
+
tornado==6.4
|
|
114
|
+
# via streamlit
|
|
115
|
+
typing-extensions==4.11.0
|
|
116
|
+
# via
|
|
117
|
+
# pydantic
|
|
118
|
+
# pydantic-core
|
|
119
|
+
# streamlit
|
|
120
|
+
tzdata==2024.1
|
|
121
|
+
# via pandas
|
|
122
|
+
urllib3==2.2.1
|
|
123
|
+
# via requests
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
{
|
|
2
|
+
"openapi": "3.1.0",
|
|
3
|
+
"info": {
|
|
4
|
+
"title": "Chat Service Schema",
|
|
5
|
+
"description": "API schema for chat-service",
|
|
6
|
+
"version": "1.0.0"
|
|
7
|
+
},
|
|
8
|
+
"paths": {
|
|
9
|
+
"/messages/": {
|
|
10
|
+
"post": {
|
|
11
|
+
"summary": "Messages",
|
|
12
|
+
"operationId": "messages_messages__post",
|
|
13
|
+
"requestBody": {
|
|
14
|
+
"content": {
|
|
15
|
+
"application/json": {
|
|
16
|
+
"schema": {
|
|
17
|
+
"$ref": "#/components/schemas/MessagesPayload"
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"required": true
|
|
22
|
+
},
|
|
23
|
+
"responses": {
|
|
24
|
+
"200": {
|
|
25
|
+
"description": "Successful Response",
|
|
26
|
+
"content": {
|
|
27
|
+
"application/json": {
|
|
28
|
+
"schema": {
|
|
29
|
+
"$ref": "#/components/schemas/MessagesResponse"
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
"422": {
|
|
35
|
+
"description": "Validation Error",
|
|
36
|
+
"content": {
|
|
37
|
+
"application/json": {
|
|
38
|
+
"schema": {
|
|
39
|
+
"$ref": "#/components/schemas/HTTPValidationError"
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
"components": {
|
|
49
|
+
"schemas": {
|
|
50
|
+
"HTTPValidationError": {
|
|
51
|
+
"properties": {
|
|
52
|
+
"detail": {
|
|
53
|
+
"items": {
|
|
54
|
+
"$ref": "#/components/schemas/ValidationError"
|
|
55
|
+
},
|
|
56
|
+
"type": "array",
|
|
57
|
+
"title": "Detail"
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
"type": "object",
|
|
61
|
+
"title": "HTTPValidationError"
|
|
62
|
+
},
|
|
63
|
+
"Message": {
|
|
64
|
+
"properties": {
|
|
65
|
+
"role": {
|
|
66
|
+
"type": "string",
|
|
67
|
+
"enum": [
|
|
68
|
+
"system",
|
|
69
|
+
"assistant",
|
|
70
|
+
"user"
|
|
71
|
+
],
|
|
72
|
+
"title": "Role"
|
|
73
|
+
},
|
|
74
|
+
"content": {
|
|
75
|
+
"type": "string",
|
|
76
|
+
"title": "Content"
|
|
77
|
+
}
|
|
78
|
+
},
|
|
79
|
+
"type": "object",
|
|
80
|
+
"required": [
|
|
81
|
+
"role",
|
|
82
|
+
"content"
|
|
83
|
+
],
|
|
84
|
+
"title": "Message"
|
|
85
|
+
},
|
|
86
|
+
"MessagesPayload": {
|
|
87
|
+
"properties": {
|
|
88
|
+
"messages": {
|
|
89
|
+
"items": {
|
|
90
|
+
"$ref": "#/components/schemas/Message"
|
|
91
|
+
},
|
|
92
|
+
"type": "array",
|
|
93
|
+
"title": "Messages"
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
"type": "object",
|
|
97
|
+
"required": [
|
|
98
|
+
"messages"
|
|
99
|
+
],
|
|
100
|
+
"title": "MessagesPayload"
|
|
101
|
+
},
|
|
102
|
+
"MessagesResponse": {
|
|
103
|
+
"properties": {
|
|
104
|
+
"message": {
|
|
105
|
+
"$ref": "#/components/schemas/Message"
|
|
106
|
+
}
|
|
107
|
+
},
|
|
108
|
+
"type": "object",
|
|
109
|
+
"required": [
|
|
110
|
+
"message"
|
|
111
|
+
],
|
|
112
|
+
"title": "MessagesResponse"
|
|
113
|
+
},
|
|
114
|
+
"ValidationError": {
|
|
115
|
+
"properties": {
|
|
116
|
+
"loc": {
|
|
117
|
+
"items": {
|
|
118
|
+
"anyOf": [
|
|
119
|
+
{
|
|
120
|
+
"type": "string"
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"type": "integer"
|
|
124
|
+
}
|
|
125
|
+
]
|
|
126
|
+
},
|
|
127
|
+
"type": "array",
|
|
128
|
+
"title": "Location"
|
|
129
|
+
},
|
|
130
|
+
"msg": {
|
|
131
|
+
"type": "string",
|
|
132
|
+
"title": "Message"
|
|
133
|
+
},
|
|
134
|
+
"type": {
|
|
135
|
+
"type": "string",
|
|
136
|
+
"title": "Error Type"
|
|
137
|
+
}
|
|
138
|
+
},
|
|
139
|
+
"type": "object",
|
|
140
|
+
"required": [
|
|
141
|
+
"loc",
|
|
142
|
+
"msg",
|
|
143
|
+
"type"
|
|
144
|
+
],
|
|
145
|
+
"title": "ValidationError"
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
@@ -5,8 +5,10 @@ from types import ModuleType
|
|
|
5
5
|
from typing import Any, Optional
|
|
6
6
|
|
|
7
7
|
from .datasets.dataset import Dataset
|
|
8
|
-
from .datasets.fixtures import ExampleDatasets
|
|
9
|
-
from .
|
|
8
|
+
from .datasets.fixtures import ExampleDatasets
|
|
9
|
+
from .inferences.fixtures import ExampleInferences, load_example
|
|
10
|
+
from .inferences.inferences import Inferences
|
|
11
|
+
from .inferences.schema import EmbeddingColumnNames, RetrievalEmbeddingColumnNames, Schema
|
|
10
12
|
from .session.client import Client
|
|
11
13
|
from .session.evaluation import log_evaluations
|
|
12
14
|
from .session.session import NotebookEnvironment, Session, active_session, close_app, launch_app
|
|
@@ -33,11 +35,13 @@ Here are just a few of the things that phoenix does well:
|
|
|
33
35
|
__all__ = [
|
|
34
36
|
"__version__",
|
|
35
37
|
"Dataset",
|
|
38
|
+
"ExampleDatasets",
|
|
39
|
+
"Inferences",
|
|
36
40
|
"EmbeddingColumnNames",
|
|
37
41
|
"RetrievalEmbeddingColumnNames",
|
|
38
42
|
"Schema",
|
|
39
43
|
"load_example",
|
|
40
|
-
"
|
|
44
|
+
"ExampleInferences",
|
|
41
45
|
"active_session",
|
|
42
46
|
"close_app",
|
|
43
47
|
"launch_app",
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
from typing import List, Optional, Union
|
|
2
2
|
|
|
3
|
-
from phoenix.
|
|
4
|
-
from phoenix.
|
|
3
|
+
from phoenix.inferences.inferences import Inferences
|
|
4
|
+
from phoenix.inferences.schema import EmbeddingColumnNames, EmbeddingFeatures
|
|
5
5
|
|
|
6
6
|
from .embedding_dimension import EmbeddingDimension
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def _get_embedding_dimensions(
|
|
10
|
-
primary_dataset:
|
|
10
|
+
primary_dataset: Inferences, reference_dataset: Optional[Inferences]
|
|
11
11
|
) -> List[EmbeddingDimension]:
|
|
12
12
|
embedding_dimensions: List[EmbeddingDimension] = []
|
|
13
13
|
embedding_features: EmbeddingFeatures = {}
|
|
@@ -58,8 +58,8 @@ def _get_embedding_dimensions(
|
|
|
58
58
|
def _check_embedding_vector_lengths_match_across_datasets(
|
|
59
59
|
embedding_feature_name: str,
|
|
60
60
|
embedding_column_names: EmbeddingColumnNames,
|
|
61
|
-
primary_dataset:
|
|
62
|
-
reference_dataset:
|
|
61
|
+
primary_dataset: Inferences,
|
|
62
|
+
reference_dataset: Inferences,
|
|
63
63
|
) -> None:
|
|
64
64
|
"""
|
|
65
65
|
Ensure that for each embedding feature, the vector lengths match across the primary
|
|
@@ -86,7 +86,9 @@ def _check_embedding_vector_lengths_match_across_datasets(
|
|
|
86
86
|
)
|
|
87
87
|
|
|
88
88
|
|
|
89
|
-
def _get_column_vector_length(
|
|
89
|
+
def _get_column_vector_length(
|
|
90
|
+
dataset: Inferences, embedding_vector_column_name: str
|
|
91
|
+
) -> Optional[int]:
|
|
90
92
|
"""
|
|
91
93
|
Because a dataset has already been constructed, we can assume that the lengths
|
|
92
94
|
of the vectors for any given embedding feature in the dataset are the same.
|
|
@@ -6,18 +6,18 @@ import pandas as pd
|
|
|
6
6
|
from pandas.api.types import is_object_dtype
|
|
7
7
|
from typing_extensions import TypeAlias, TypeGuard
|
|
8
8
|
|
|
9
|
-
from phoenix import
|
|
9
|
+
from phoenix import EmbeddingColumnNames, Inferences
|
|
10
10
|
from phoenix.core.model import _get_embedding_dimensions
|
|
11
11
|
from phoenix.core.model_schema import Embedding, Model, RetrievalEmbedding, Schema
|
|
12
|
-
from phoenix.
|
|
13
|
-
from phoenix.
|
|
12
|
+
from phoenix.inferences.schema import RetrievalEmbeddingColumnNames
|
|
13
|
+
from phoenix.inferences.schema import Schema as DatasetSchema
|
|
14
14
|
|
|
15
15
|
DatasetName: TypeAlias = str
|
|
16
16
|
ColumnName: TypeAlias = str
|
|
17
17
|
DisplayName: TypeAlias = str
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def create_model_from_datasets(*datasets: Optional[
|
|
20
|
+
def create_model_from_datasets(*datasets: Optional[Inferences]) -> Model:
|
|
21
21
|
# TODO: move this validation into model_schema.Model.
|
|
22
22
|
if len(datasets) > 1 and datasets[0] is not None:
|
|
23
23
|
# Check that for each embedding dimension all vectors
|
|
@@ -132,8 +132,8 @@ def create_model_from_datasets(*datasets: Optional[Dataset]) -> Model:
|
|
|
132
132
|
)
|
|
133
133
|
|
|
134
134
|
|
|
135
|
-
def _is_dataset(obj: Optional[
|
|
136
|
-
return type(obj) is
|
|
135
|
+
def _is_dataset(obj: Optional[Inferences]) -> TypeGuard[Inferences]:
|
|
136
|
+
return type(obj) is Inferences
|
|
137
137
|
|
|
138
138
|
|
|
139
139
|
def _take_first_str(iterator: Iterable[str]) -> str:
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from dataclasses import dataclass, replace
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from itertools import groupby
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
from pandas import DataFrame
|
|
8
|
+
|
|
9
|
+
from phoenix.inferences.inferences import Inferences
|
|
10
|
+
from phoenix.inferences.schema import EmbeddingColumnNames, RetrievalEmbeddingColumnNames, Schema
|
|
11
|
+
from phoenix.utilities.deprecation import deprecated, deprecated_class
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@deprecated_class("phoenix.Dataset is deprecated, use phoenix.Inference instead.")
|
|
15
|
+
class Dataset(Inferences):
|
|
16
|
+
@classmethod
|
|
17
|
+
@deprecated("Dataset.from_open_inference is deprecated and will be removed.")
|
|
18
|
+
def from_open_inference(cls, dataframe: DataFrame) -> "Dataset":
|
|
19
|
+
schema = Schema()
|
|
20
|
+
column_renaming: Dict[str, str] = {}
|
|
21
|
+
for group_name, group in groupby(
|
|
22
|
+
sorted(
|
|
23
|
+
map(_parse_open_inference_column_name, dataframe.columns),
|
|
24
|
+
key=lambda column: column.name,
|
|
25
|
+
),
|
|
26
|
+
key=lambda column: column.name,
|
|
27
|
+
):
|
|
28
|
+
open_inference_columns = list(group)
|
|
29
|
+
if group_name == "":
|
|
30
|
+
column_names_by_category = {
|
|
31
|
+
column.category: column.full_name for column in open_inference_columns
|
|
32
|
+
}
|
|
33
|
+
schema = replace(
|
|
34
|
+
schema,
|
|
35
|
+
prediction_id_column_name=column_names_by_category.get(
|
|
36
|
+
OpenInferenceCategory.id
|
|
37
|
+
),
|
|
38
|
+
timestamp_column_name=column_names_by_category.get(
|
|
39
|
+
OpenInferenceCategory.timestamp
|
|
40
|
+
),
|
|
41
|
+
)
|
|
42
|
+
continue
|
|
43
|
+
column_names_by_specifier = {
|
|
44
|
+
column.specifier: column.full_name for column in open_inference_columns
|
|
45
|
+
}
|
|
46
|
+
if group_name == "response":
|
|
47
|
+
response_vector_column_name = column_names_by_specifier.get(
|
|
48
|
+
OpenInferenceSpecifier.embedding
|
|
49
|
+
)
|
|
50
|
+
if response_vector_column_name is not None:
|
|
51
|
+
column_renaming[response_vector_column_name] = "response"
|
|
52
|
+
schema = replace(
|
|
53
|
+
schema,
|
|
54
|
+
response_column_names=EmbeddingColumnNames(
|
|
55
|
+
vector_column_name=column_renaming[response_vector_column_name],
|
|
56
|
+
raw_data_column_name=column_names_by_specifier.get(
|
|
57
|
+
OpenInferenceSpecifier.default
|
|
58
|
+
),
|
|
59
|
+
),
|
|
60
|
+
)
|
|
61
|
+
else:
|
|
62
|
+
response_text_column_name = column_names_by_specifier.get(
|
|
63
|
+
OpenInferenceSpecifier.default
|
|
64
|
+
)
|
|
65
|
+
if response_text_column_name is None:
|
|
66
|
+
raise ValueError(
|
|
67
|
+
"invalid OpenInference format: missing text column for response"
|
|
68
|
+
)
|
|
69
|
+
column_renaming[response_text_column_name] = "response"
|
|
70
|
+
schema = replace(
|
|
71
|
+
schema,
|
|
72
|
+
response_column_names=column_renaming[response_text_column_name],
|
|
73
|
+
)
|
|
74
|
+
elif group_name == "prompt":
|
|
75
|
+
prompt_vector_column_name = column_names_by_specifier.get(
|
|
76
|
+
OpenInferenceSpecifier.embedding
|
|
77
|
+
)
|
|
78
|
+
if prompt_vector_column_name is None:
|
|
79
|
+
raise ValueError(
|
|
80
|
+
"invalid OpenInference format: missing embedding vector column for prompt"
|
|
81
|
+
)
|
|
82
|
+
column_renaming[prompt_vector_column_name] = "prompt"
|
|
83
|
+
schema = replace(
|
|
84
|
+
schema,
|
|
85
|
+
prompt_column_names=RetrievalEmbeddingColumnNames(
|
|
86
|
+
vector_column_name=column_renaming[prompt_vector_column_name],
|
|
87
|
+
raw_data_column_name=column_names_by_specifier.get(
|
|
88
|
+
OpenInferenceSpecifier.default
|
|
89
|
+
),
|
|
90
|
+
context_retrieval_ids_column_name=column_names_by_specifier.get(
|
|
91
|
+
OpenInferenceSpecifier.retrieved_document_ids
|
|
92
|
+
),
|
|
93
|
+
context_retrieval_scores_column_name=column_names_by_specifier.get(
|
|
94
|
+
OpenInferenceSpecifier.retrieved_document_scores
|
|
95
|
+
),
|
|
96
|
+
),
|
|
97
|
+
)
|
|
98
|
+
elif OpenInferenceSpecifier.embedding in column_names_by_specifier:
|
|
99
|
+
vector_column_name = column_names_by_specifier[OpenInferenceSpecifier.embedding]
|
|
100
|
+
column_renaming[vector_column_name] = group_name
|
|
101
|
+
embedding_feature_column_names = schema.embedding_feature_column_names or {}
|
|
102
|
+
embedding_feature_column_names.update(
|
|
103
|
+
{
|
|
104
|
+
group_name: EmbeddingColumnNames(
|
|
105
|
+
vector_column_name=column_renaming[vector_column_name],
|
|
106
|
+
raw_data_column_name=column_names_by_specifier.get(
|
|
107
|
+
OpenInferenceSpecifier.raw_data
|
|
108
|
+
),
|
|
109
|
+
link_to_data_column_name=column_names_by_specifier.get(
|
|
110
|
+
OpenInferenceSpecifier.link_to_data
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
}
|
|
114
|
+
)
|
|
115
|
+
schema = replace(
|
|
116
|
+
schema,
|
|
117
|
+
embedding_feature_column_names=embedding_feature_column_names,
|
|
118
|
+
)
|
|
119
|
+
elif len(open_inference_columns) == 1:
|
|
120
|
+
open_inference_column = open_inference_columns[0]
|
|
121
|
+
raw_column_name = open_inference_column.full_name
|
|
122
|
+
column_renaming[raw_column_name] = open_inference_column.name
|
|
123
|
+
if open_inference_column.category is OpenInferenceCategory.feature:
|
|
124
|
+
schema = replace(
|
|
125
|
+
schema,
|
|
126
|
+
feature_column_names=(
|
|
127
|
+
(schema.feature_column_names or []) + [column_renaming[raw_column_name]]
|
|
128
|
+
),
|
|
129
|
+
)
|
|
130
|
+
elif open_inference_column.category is OpenInferenceCategory.tag:
|
|
131
|
+
schema = replace(
|
|
132
|
+
schema,
|
|
133
|
+
tag_column_names=(
|
|
134
|
+
(schema.tag_column_names or []) + [column_renaming[raw_column_name]]
|
|
135
|
+
),
|
|
136
|
+
)
|
|
137
|
+
elif open_inference_column.category is OpenInferenceCategory.prediction:
|
|
138
|
+
if open_inference_column.specifier is OpenInferenceSpecifier.score:
|
|
139
|
+
schema = replace(
|
|
140
|
+
schema,
|
|
141
|
+
prediction_score_column_name=column_renaming[raw_column_name],
|
|
142
|
+
)
|
|
143
|
+
if open_inference_column.specifier is OpenInferenceSpecifier.label:
|
|
144
|
+
schema = replace(
|
|
145
|
+
schema,
|
|
146
|
+
prediction_label_column_name=column_renaming[raw_column_name],
|
|
147
|
+
)
|
|
148
|
+
elif open_inference_column.category is OpenInferenceCategory.actual:
|
|
149
|
+
if open_inference_column.specifier is OpenInferenceSpecifier.score:
|
|
150
|
+
schema = replace(
|
|
151
|
+
schema,
|
|
152
|
+
actual_score_column_name=column_renaming[raw_column_name],
|
|
153
|
+
)
|
|
154
|
+
if open_inference_column.specifier is OpenInferenceSpecifier.label:
|
|
155
|
+
schema = replace(
|
|
156
|
+
schema,
|
|
157
|
+
actual_label_column_name=column_renaming[raw_column_name],
|
|
158
|
+
)
|
|
159
|
+
else:
|
|
160
|
+
raise ValueError(f"invalid OpenInference format: duplicated name `{group_name}`")
|
|
161
|
+
|
|
162
|
+
return cls(
|
|
163
|
+
dataframe.rename(
|
|
164
|
+
column_renaming,
|
|
165
|
+
axis=1,
|
|
166
|
+
copy=False,
|
|
167
|
+
),
|
|
168
|
+
schema,
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class OpenInferenceCategory(Enum):
|
|
173
|
+
id = "id"
|
|
174
|
+
timestamp = "timestamp"
|
|
175
|
+
feature = "feature"
|
|
176
|
+
tag = "tag"
|
|
177
|
+
prediction = "prediction"
|
|
178
|
+
actual = "actual"
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
class OpenInferenceSpecifier(Enum):
|
|
182
|
+
default = ""
|
|
183
|
+
score = "score"
|
|
184
|
+
label = "label"
|
|
185
|
+
embedding = "embedding"
|
|
186
|
+
raw_data = "raw_data"
|
|
187
|
+
link_to_data = "link_to_data"
|
|
188
|
+
retrieved_document_ids = "retrieved_document_ids"
|
|
189
|
+
retrieved_document_scores = "retrieved_document_scores"
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
@dataclass(frozen=True)
|
|
193
|
+
class _OpenInferenceColumnName:
|
|
194
|
+
full_name: str
|
|
195
|
+
category: OpenInferenceCategory
|
|
196
|
+
data_type: str
|
|
197
|
+
specifier: OpenInferenceSpecifier = OpenInferenceSpecifier.default
|
|
198
|
+
name: str = ""
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _parse_open_inference_column_name(column_name: str) -> _OpenInferenceColumnName:
|
|
202
|
+
pattern = (
|
|
203
|
+
r"^:(?P<category>\w+)\.(?P<data_type>\[\w+\]|\w+)(\.(?P<specifier>\w+))?:(?P<name>.*)?$"
|
|
204
|
+
)
|
|
205
|
+
if match := re.match(pattern, column_name):
|
|
206
|
+
extract = match.groupdict(default="")
|
|
207
|
+
return _OpenInferenceColumnName(
|
|
208
|
+
full_name=column_name,
|
|
209
|
+
category=OpenInferenceCategory(extract.get("category", "").lower()),
|
|
210
|
+
data_type=extract.get("data_type", "").lower(),
|
|
211
|
+
specifier=OpenInferenceSpecifier(extract.get("specifier", "").lower()),
|
|
212
|
+
name=extract.get("name", ""),
|
|
213
|
+
)
|
|
214
|
+
raise ValueError(f"Invalid format for column name: {column_name}")
|