arize-phoenix 3.0.2__tar.gz → 3.0.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/PKG-INFO +3 -1
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/pyproject.toml +2 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/traces.py +14 -9
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/classify.py +5 -1
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/utils/__init__.py +1 -1
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/SpanSort.py +4 -4
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Span.py +13 -14
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/dsl/filter.py +7 -4
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/dsl/helpers.py +7 -7
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/dsl/query.py +3 -1
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/errors.py +4 -0
- arize_phoenix-3.0.3/src/phoenix/trace/llama_index/__init__.py +3 -0
- arize_phoenix-3.0.3/src/phoenix/trace/llama_index/callback.py +77 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/otel.py +52 -14
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/schemas.py +4 -6
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/span_json_decoder.py +6 -5
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/span_json_encoder.py +1 -6
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/trace_dataset.py +15 -12
- arize_phoenix-3.0.3/src/phoenix/version.py +1 -0
- arize_phoenix-3.0.2/src/phoenix/trace/llama_index/__init__.py +0 -4
- arize_phoenix-3.0.2/src/phoenix/trace/llama_index/callback.py +0 -42
- arize_phoenix-3.0.2/src/phoenix/trace/llama_index/debug_callback.py +0 -50
- arize_phoenix-3.0.2/src/phoenix/trace/semantic_conventions.py +0 -172
- arize_phoenix-3.0.2/src/phoenix/version.py +0 -1
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/.gitignore +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/IP_NOTICE +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/LICENSE +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/README.md +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/config.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/embedding_dimension.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/evals.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/model.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/model_schema.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/core/model_schema_adapter.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/dataset.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/errors.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/fixtures.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/schema.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datasets/validation.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/datetime_utils.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/exceptions.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/evaluators.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/executor.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/generate.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/processing.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/anthropic.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/base.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/bedrock.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/litellm.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/openai.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/rate_limiters.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/vertex.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/models/vertexai.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/retrievals.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/templates/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/templates/default_templates.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/templates/template.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/utils/threads.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/README.md +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/binning.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/metrics.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/mixins.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/retrieval_metrics.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/timeseries.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/metrics/wrappers.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/pointcloud/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/pointcloud/clustering.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/pointcloud/pointcloud.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/pointcloud/projectors.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/pointcloud/umap_parameters.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/py.typed +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/context.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/helpers.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/ClusterInput.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/Coordinates.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/DataQualityMetricInput.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/DimensionFilter.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/DimensionInput.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/Granularity.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/PerformanceMetricInput.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/TimeRange.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/input_types/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/interceptor.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/routers/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/routers/evaluation_handler.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/routers/span_handler.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/routers/trace_handler.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/routers/utils.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/schema.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Cluster.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DataQualityMetric.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Dataset.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DatasetInfo.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DatasetRole.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DatasetValues.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Dimension.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DimensionDataType.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DimensionShape.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DimensionType.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DimensionWithValue.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DocumentEvaluationSummary.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/DocumentRetrievalMetrics.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/EmbeddingDimension.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/EmbeddingMetadata.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Evaluation.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/EvaluationSummary.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Event.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/EventMetadata.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/ExportEventsMutation.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/ExportedFile.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Functionality.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/MimeType.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Model.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/NumericRange.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/PerformanceMetric.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/PromptResponse.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Retrieval.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/ScalarDriftMetricEnum.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/Segments.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/SortDir.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/TimeSeries.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/UMAPPoints.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/ValidationResult.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/VectorDriftMetricEnum.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/node.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/api/types/pagination.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/app.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/main.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-114x114.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-120x120.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-144x144.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-152x152.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-180x180.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-72x72.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon-76x76.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/apple-touch-icon.png +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/favicon.ico +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/index.css +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/index.js +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/static/modernizr.js +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/templates/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/templates/index.html +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/server/thread_server.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/services.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/session/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/session/client.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/session/data_extractor.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/session/evaluation.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/session/session.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/dsl/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/dsl/missing.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/evaluation_conventions.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/exporter.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/fixtures.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/langchain/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/langchain/instrumentor.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/langchain/tracer.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/openai/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/openai/instrumentor.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/span_evaluations.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/tracer.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/utils.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/v1/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/v1/evaluation_pb2.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/trace/v1/evaluation_pb2.pyi +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/utilities/__init__.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/utilities/error_handling.py +0 -0
- {arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/utilities/logging.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: arize-phoenix
|
|
3
|
-
Version: 3.0.
|
|
3
|
+
Version: 3.0.3
|
|
4
4
|
Summary: ML Observability in your notebook
|
|
5
5
|
Project-URL: Documentation, https://docs.arize.com/phoenix/
|
|
6
6
|
Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
|
|
@@ -23,6 +23,7 @@ Requires-Dist: numpy
|
|
|
23
23
|
Requires-Dist: openinference-instrumentation-langchain
|
|
24
24
|
Requires-Dist: openinference-instrumentation-llama-index
|
|
25
25
|
Requires-Dist: openinference-instrumentation-openai
|
|
26
|
+
Requires-Dist: openinference-semantic-conventions
|
|
26
27
|
Requires-Dist: opentelemetry-exporter-otlp
|
|
27
28
|
Requires-Dist: opentelemetry-proto
|
|
28
29
|
Requires-Dist: opentelemetry-sdk
|
|
@@ -64,6 +65,7 @@ Provides-Extra: experimental
|
|
|
64
65
|
Requires-Dist: tenacity; extra == 'experimental'
|
|
65
66
|
Provides-Extra: llama-index
|
|
66
67
|
Requires-Dist: llama-index==0.9.45; extra == 'llama-index'
|
|
68
|
+
Requires-Dist: openinference-instrumentation-llama-index==0.1.3; extra == 'llama-index'
|
|
67
69
|
Description-Content-Type: text/markdown
|
|
68
70
|
|
|
69
71
|
<p align="center">
|
|
@@ -43,6 +43,7 @@ dependencies = [
|
|
|
43
43
|
"opentelemetry-sdk",
|
|
44
44
|
"opentelemetry-proto",
|
|
45
45
|
"opentelemetry-exporter-otlp",
|
|
46
|
+
"openinference-semantic-conventions",
|
|
46
47
|
"openinference-instrumentation-langchain",
|
|
47
48
|
"openinference-instrumentation-llama-index",
|
|
48
49
|
"openinference-instrumentation-openai",
|
|
@@ -75,6 +76,7 @@ experimental = [
|
|
|
75
76
|
]
|
|
76
77
|
llama-index = [
|
|
77
78
|
"llama-index==0.9.45", # always pin to a version that keeps our notebooks working
|
|
79
|
+
"openinference-instrumentation-llama-index==0.1.3",
|
|
78
80
|
]
|
|
79
81
|
|
|
80
82
|
[project.urls]
|
|
@@ -20,12 +20,13 @@ from typing import (
|
|
|
20
20
|
|
|
21
21
|
import opentelemetry.proto.trace.v1.trace_pb2 as otlp
|
|
22
22
|
from ddsketch import DDSketch
|
|
23
|
+
from openinference.semconv.trace import SpanAttributes
|
|
23
24
|
from sortedcontainers import SortedKeyList
|
|
24
25
|
from typing_extensions import TypeAlias
|
|
25
26
|
from wrapt import ObjectProxy
|
|
26
27
|
|
|
28
|
+
import phoenix.trace.schemas
|
|
27
29
|
from phoenix.datetime_utils import right_open_time_range
|
|
28
|
-
from phoenix.trace import semantic_conventions
|
|
29
30
|
from phoenix.trace.otel import decode
|
|
30
31
|
from phoenix.trace.schemas import (
|
|
31
32
|
ATTRIBUTE_PREFIX,
|
|
@@ -33,12 +34,10 @@ from phoenix.trace.schemas import (
|
|
|
33
34
|
CONTEXT_PREFIX,
|
|
34
35
|
ComputedAttributes,
|
|
35
36
|
Span,
|
|
36
|
-
SpanAttributes,
|
|
37
37
|
SpanID,
|
|
38
38
|
SpanStatusCode,
|
|
39
39
|
TraceID,
|
|
40
40
|
)
|
|
41
|
-
from phoenix.trace.semantic_conventions import RETRIEVAL_DOCUMENTS
|
|
42
41
|
|
|
43
42
|
END_OF_QUEUE = None # sentinel value for queue termination
|
|
44
43
|
|
|
@@ -50,9 +49,9 @@ SPAN_ID = CONTEXT_PREFIX + "span_id"
|
|
|
50
49
|
PARENT_ID = "parent_id"
|
|
51
50
|
START_TIME = "start_time"
|
|
52
51
|
END_TIME = "end_time"
|
|
53
|
-
LLM_TOKEN_COUNT_TOTAL = ATTRIBUTE_PREFIX +
|
|
54
|
-
LLM_TOKEN_COUNT_PROMPT = ATTRIBUTE_PREFIX +
|
|
55
|
-
LLM_TOKEN_COUNT_COMPLETION = ATTRIBUTE_PREFIX +
|
|
52
|
+
LLM_TOKEN_COUNT_TOTAL = ATTRIBUTE_PREFIX + SpanAttributes.LLM_TOKEN_COUNT_TOTAL
|
|
53
|
+
LLM_TOKEN_COUNT_PROMPT = ATTRIBUTE_PREFIX + SpanAttributes.LLM_TOKEN_COUNT_PROMPT
|
|
54
|
+
LLM_TOKEN_COUNT_COMPLETION = ATTRIBUTE_PREFIX + SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
|
|
56
55
|
|
|
57
56
|
|
|
58
57
|
class ReadableSpan(ObjectProxy): # type: ignore
|
|
@@ -73,7 +72,9 @@ class ReadableSpan(ObjectProxy): # type: ignore
|
|
|
73
72
|
@property
|
|
74
73
|
def span(self) -> Span:
|
|
75
74
|
span = decode(self._self_otlp_span)
|
|
76
|
-
span.attributes.update(
|
|
75
|
+
span.attributes.update(
|
|
76
|
+
cast(phoenix.trace.schemas.SpanAttributes, self._self_computed_values)
|
|
77
|
+
)
|
|
77
78
|
# TODO: compute latency rank percent (which can change depending on how
|
|
78
79
|
# many spans already ingested).
|
|
79
80
|
return span
|
|
@@ -333,9 +334,13 @@ class Traces:
|
|
|
333
334
|
self._token_count_total -= existing_span[LLM_TOKEN_COUNT_TOTAL] or 0
|
|
334
335
|
self._token_count_total += new_span[LLM_TOKEN_COUNT_TOTAL] or 0
|
|
335
336
|
# Update number of documents
|
|
336
|
-
num_documents_update = len(
|
|
337
|
+
num_documents_update = len(
|
|
338
|
+
new_span.attributes.get(SpanAttributes.RETRIEVAL_DOCUMENTS) or ()
|
|
339
|
+
)
|
|
337
340
|
if existing_span:
|
|
338
|
-
num_documents_update -= len(
|
|
341
|
+
num_documents_update -= len(
|
|
342
|
+
existing_span.attributes.get(SpanAttributes.RETRIEVAL_DOCUMENTS) or ()
|
|
343
|
+
)
|
|
339
344
|
if num_documents_update:
|
|
340
345
|
self._num_documents[span_id] += num_documents_update
|
|
341
346
|
# Process previously orphaned spans, if any.
|
{arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/functions/classify.py
RENAMED
|
@@ -19,6 +19,7 @@ from typing import (
|
|
|
19
19
|
)
|
|
20
20
|
|
|
21
21
|
import pandas as pd
|
|
22
|
+
from openinference.semconv.trace import DocumentAttributes, SpanAttributes
|
|
22
23
|
from pandas import DataFrame
|
|
23
24
|
from typing_extensions import TypeAlias
|
|
24
25
|
|
|
@@ -41,9 +42,12 @@ from phoenix.experimental.evals.utils import (
|
|
|
41
42
|
parse_openai_function_call,
|
|
42
43
|
snap_to_rail,
|
|
43
44
|
)
|
|
44
|
-
from phoenix.trace.semantic_conventions import DOCUMENT_CONTENT, INPUT_VALUE, RETRIEVAL_DOCUMENTS
|
|
45
45
|
from phoenix.utilities.logging import printif
|
|
46
46
|
|
|
47
|
+
DOCUMENT_CONTENT = DocumentAttributes.DOCUMENT_CONTENT
|
|
48
|
+
INPUT_VALUE = SpanAttributes.INPUT_VALUE
|
|
49
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
50
|
+
|
|
47
51
|
logger = logging.getLogger(__name__)
|
|
48
52
|
|
|
49
53
|
|
{arize_phoenix-3.0.2 → arize_phoenix-3.0.3}/src/phoenix/experimental/evals/utils/__init__.py
RENAMED
|
@@ -32,7 +32,7 @@ def download_benchmark_dataset(task: str, dataset_name: str) -> pd.DataFrame:
|
|
|
32
32
|
pandas.DataFrame: A pandas dataframe containing the data.
|
|
33
33
|
"""
|
|
34
34
|
jsonl_file_name = f"{dataset_name}.jsonl"
|
|
35
|
-
url = f"http://storage.googleapis.com/arize-assets/
|
|
35
|
+
url = f"http://storage.googleapis.com/arize-phoenix-assets/evals/{task}/{jsonl_file_name}.zip"
|
|
36
36
|
try:
|
|
37
37
|
with urlopen(url) as response:
|
|
38
38
|
zip_byte_stream = BytesIO(response.read())
|
|
@@ -4,6 +4,7 @@ from typing import Any, Iterable, Iterator, Optional, Protocol
|
|
|
4
4
|
|
|
5
5
|
import pandas as pd
|
|
6
6
|
import strawberry
|
|
7
|
+
from openinference.semconv.trace import SpanAttributes
|
|
7
8
|
from strawberry import UNSET
|
|
8
9
|
from typing_extensions import assert_never
|
|
9
10
|
|
|
@@ -13,7 +14,6 @@ from phoenix.core.traces import (
|
|
|
13
14
|
START_TIME,
|
|
14
15
|
)
|
|
15
16
|
from phoenix.server.api.types.SortDir import SortDir
|
|
16
|
-
from phoenix.trace import semantic_conventions
|
|
17
17
|
from phoenix.trace.schemas import ComputedAttributes, Span, SpanID
|
|
18
18
|
|
|
19
19
|
|
|
@@ -22,9 +22,9 @@ class SpanColumn(Enum):
|
|
|
22
22
|
startTime = START_TIME
|
|
23
23
|
endTime = END_TIME
|
|
24
24
|
latencyMs = ComputedAttributes.LATENCY_MS.value
|
|
25
|
-
tokenCountTotal =
|
|
26
|
-
tokenCountPrompt =
|
|
27
|
-
tokenCountCompletion =
|
|
25
|
+
tokenCountTotal = SpanAttributes.LLM_TOKEN_COUNT_TOTAL
|
|
26
|
+
tokenCountPrompt = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
|
|
27
|
+
tokenCountCompletion = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
|
|
28
28
|
cumulativeTokenCountTotal = ComputedAttributes.CUMULATIVE_LLM_TOKEN_COUNT_TOTAL.value
|
|
29
29
|
cumulativeTokenCountPrompt = ComputedAttributes.CUMULATIVE_LLM_TOKEN_COUNT_PROMPT.value
|
|
30
30
|
cumulativeTokenCountCompletion = ComputedAttributes.CUMULATIVE_LLM_TOKEN_COUNT_COMPLETION.value
|
|
@@ -5,6 +5,7 @@ from enum import Enum
|
|
|
5
5
|
from typing import Any, DefaultDict, Dict, List, Mapping, Optional, Sized, cast
|
|
6
6
|
|
|
7
7
|
import strawberry
|
|
8
|
+
from openinference.semconv.trace import EmbeddingAttributes, SpanAttributes
|
|
8
9
|
from strawberry import ID, UNSET
|
|
9
10
|
from strawberry.types import Info
|
|
10
11
|
|
|
@@ -15,19 +16,17 @@ from phoenix.server.api.types.DocumentRetrievalMetrics import DocumentRetrievalM
|
|
|
15
16
|
from phoenix.server.api.types.Evaluation import DocumentEvaluation, SpanEvaluation
|
|
16
17
|
from phoenix.server.api.types.MimeType import MimeType
|
|
17
18
|
from phoenix.trace.schemas import ComputedAttributes, SpanID
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
RETRIEVAL_DOCUMENTS,
|
|
30
|
-
)
|
|
19
|
+
|
|
20
|
+
EMBEDDING_EMBEDDINGS = SpanAttributes.EMBEDDING_EMBEDDINGS
|
|
21
|
+
EMBEDDING_VECTOR = EmbeddingAttributes.EMBEDDING_VECTOR
|
|
22
|
+
INPUT_MIME_TYPE = SpanAttributes.INPUT_MIME_TYPE
|
|
23
|
+
INPUT_VALUE = SpanAttributes.INPUT_VALUE
|
|
24
|
+
LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
|
|
25
|
+
LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
|
|
26
|
+
LLM_TOKEN_COUNT_TOTAL = SpanAttributes.LLM_TOKEN_COUNT_TOTAL
|
|
27
|
+
OUTPUT_MIME_TYPE = SpanAttributes.OUTPUT_MIME_TYPE
|
|
28
|
+
OUTPUT_VALUE = SpanAttributes.OUTPUT_VALUE
|
|
29
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
31
30
|
|
|
32
31
|
|
|
33
32
|
@strawberry.enum
|
|
@@ -87,7 +86,7 @@ class SpanEvent:
|
|
|
87
86
|
) -> "SpanEvent":
|
|
88
87
|
return SpanEvent(
|
|
89
88
|
name=event.name,
|
|
90
|
-
message=cast(str, event.attributes.get(EXCEPTION_MESSAGE) or ""),
|
|
89
|
+
message=cast(str, event.attributes.get(trace_schema.EXCEPTION_MESSAGE) or ""),
|
|
91
90
|
timestamp=event.timestamp,
|
|
92
91
|
)
|
|
93
92
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import ast
|
|
2
|
+
import inspect
|
|
2
3
|
import sys
|
|
3
4
|
from dataclasses import dataclass, field
|
|
4
5
|
from difflib import SequenceMatcher
|
|
@@ -15,10 +16,10 @@ from typing import (
|
|
|
15
16
|
cast,
|
|
16
17
|
)
|
|
17
18
|
|
|
19
|
+
from openinference.semconv import trace
|
|
18
20
|
from typing_extensions import TypeGuard
|
|
19
21
|
|
|
20
22
|
import phoenix.trace.v1 as pb
|
|
21
|
-
from phoenix.trace import semantic_conventions
|
|
22
23
|
from phoenix.trace.dsl.missing import MISSING
|
|
23
24
|
from phoenix.trace.schemas import COMPUTED_PREFIX, ComputedAttributes, Span, SpanID
|
|
24
25
|
|
|
@@ -137,9 +138,11 @@ def _allowed_replacements() -> Iterator[Tuple[str, ast.expr]]:
|
|
|
137
138
|
yield "span.context." + source_segment, ast_replacement
|
|
138
139
|
|
|
139
140
|
for field_name in (
|
|
140
|
-
getattr(
|
|
141
|
-
for
|
|
142
|
-
if
|
|
141
|
+
getattr(klass, attr)
|
|
142
|
+
for name in dir(trace)
|
|
143
|
+
if name.endswith("Attributes") and inspect.isclass(klass := getattr(trace, name))
|
|
144
|
+
for attr in dir(klass)
|
|
145
|
+
if attr.isupper()
|
|
143
146
|
):
|
|
144
147
|
source_segment = field_name
|
|
145
148
|
ast_replacement = _ast_replacement(f"span.attributes.get('{field_name}')")
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
from typing import List, Optional, Protocol, Union, cast
|
|
2
2
|
|
|
3
3
|
import pandas as pd
|
|
4
|
+
from openinference.semconv.trace import DocumentAttributes, SpanAttributes
|
|
4
5
|
|
|
5
6
|
from phoenix.trace.dsl import SpanQuery
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
)
|
|
7
|
+
|
|
8
|
+
DOCUMENT_CONTENT = DocumentAttributes.DOCUMENT_CONTENT
|
|
9
|
+
DOCUMENT_SCORE = DocumentAttributes.DOCUMENT_SCORE
|
|
10
|
+
INPUT_VALUE = SpanAttributes.INPUT_VALUE
|
|
11
|
+
OUTPUT_VALUE = SpanAttributes.OUTPUT_VALUE
|
|
12
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
13
13
|
|
|
14
14
|
INPUT = {"input": INPUT_VALUE}
|
|
15
15
|
OUTPUT = {"output": OUTPUT_VALUE}
|
|
@@ -19,13 +19,15 @@ from typing import (
|
|
|
19
19
|
)
|
|
20
20
|
|
|
21
21
|
import pandas as pd
|
|
22
|
+
from openinference.semconv.trace import SpanAttributes
|
|
22
23
|
|
|
23
24
|
from phoenix.trace.dsl import SpanFilter
|
|
24
25
|
from phoenix.trace.dsl.filter import SupportsGetSpanEvaluation
|
|
25
26
|
from phoenix.trace.schemas import ATTRIBUTE_PREFIX, CONTEXT_PREFIX, Span
|
|
26
|
-
from phoenix.trace.semantic_conventions import RETRIEVAL_DOCUMENTS
|
|
27
27
|
from phoenix.trace.span_json_encoder import span_to_json
|
|
28
28
|
|
|
29
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
30
|
+
|
|
29
31
|
_SPAN_ID = "context.span_id"
|
|
30
32
|
_PRESCRIBED_POSITION_PREFIXES = {
|
|
31
33
|
RETRIEVAL_DOCUMENTS: "document_",
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
3
|
+
from importlib.util import find_spec
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from opentelemetry import trace as trace_api
|
|
7
|
+
from opentelemetry.sdk import trace as trace_sdk
|
|
8
|
+
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
|
9
|
+
|
|
10
|
+
from phoenix.trace.errors import IncompatibleLibraryVersionError
|
|
11
|
+
from phoenix.trace.exporter import _OpenInferenceExporter
|
|
12
|
+
from phoenix.trace.tracer import _show_deprecation_warnings
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
LLAMA_INDEX_MODERN_VERSION = (0, 10, 0)
|
|
17
|
+
INSTRUMENTATION_MODERN_VERSION = (1, 0, 0)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _check_instrumentation_compatibility() -> bool:
|
|
21
|
+
if find_spec("llama_index") is None:
|
|
22
|
+
raise PackageNotFoundError("Missing `llama-index`. Install with `pip install llama-index`.")
|
|
23
|
+
# split the version string into a tuple of integers
|
|
24
|
+
llama_index_version_str = version("llama-index")
|
|
25
|
+
llama_index_version = tuple(map(int, llama_index_version_str.split(".")[:3]))
|
|
26
|
+
instrumentation_version_str = version("openinference-instrumentation-llama-index")
|
|
27
|
+
instrumentation_version = tuple(map(int, instrumentation_version_str.split(".")[:3]))
|
|
28
|
+
# check if the llama_index version is compatible with the instrumentation version
|
|
29
|
+
if (
|
|
30
|
+
llama_index_version < LLAMA_INDEX_MODERN_VERSION
|
|
31
|
+
and instrumentation_version >= INSTRUMENTATION_MODERN_VERSION
|
|
32
|
+
):
|
|
33
|
+
raise IncompatibleLibraryVersionError(
|
|
34
|
+
f"llama-index v{llama_index_version_str} is not compatible with "
|
|
35
|
+
f"openinference-instrumentation-llama-index v{instrumentation_version_str}."
|
|
36
|
+
"Please either migrate llama-index to at least 0.10.0 or downgrade "
|
|
37
|
+
"openinference-instrumentation-llama-index via "
|
|
38
|
+
"`pip install 'openinference-instrumentation-llama-index<1.0.0'`."
|
|
39
|
+
)
|
|
40
|
+
elif (
|
|
41
|
+
llama_index_version >= LLAMA_INDEX_MODERN_VERSION
|
|
42
|
+
and instrumentation_version < INSTRUMENTATION_MODERN_VERSION
|
|
43
|
+
):
|
|
44
|
+
raise IncompatibleLibraryVersionError(
|
|
45
|
+
f"llama-index v{llama_index_version_str} is not compatible with "
|
|
46
|
+
f"openinference-instrumentation-llama-index v{instrumentation_version_str}."
|
|
47
|
+
"Please upgrade openinference-instrumentation-llama-index to at least 1.0.0"
|
|
48
|
+
"`pip install 'openinference-instrumentation-llama-index>=1.0.0'`."
|
|
49
|
+
)
|
|
50
|
+
# if the versions are compatible, return True
|
|
51
|
+
return True
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
if _check_instrumentation_compatibility():
|
|
55
|
+
from openinference.instrumentation.llama_index._callback import (
|
|
56
|
+
OpenInferenceTraceCallbackHandler as _OpenInferenceTraceCallbackHandler,
|
|
57
|
+
)
|
|
58
|
+
from openinference.instrumentation.llama_index.version import (
|
|
59
|
+
__version__,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class OpenInferenceTraceCallbackHandler(_OpenInferenceTraceCallbackHandler):
|
|
64
|
+
"""Callback handler for storing LLM application trace data in OpenInference format.
|
|
65
|
+
OpenInference is an open standard for capturing and storing AI model
|
|
66
|
+
inferences. It enables production LLMapp servers to seamlessly integrate
|
|
67
|
+
with LLM observability solutions such as Arize and Phoenix.
|
|
68
|
+
|
|
69
|
+
For more information on the specification, see
|
|
70
|
+
https://github.com/Arize-ai/openinference
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
74
|
+
_show_deprecation_warnings(self, *args, **kwargs)
|
|
75
|
+
tracer_provider = trace_sdk.TracerProvider()
|
|
76
|
+
tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter()))
|
|
77
|
+
super().__init__(trace_api.get_tracer(__name__, __version__, tracer_provider))
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import inspect
|
|
1
2
|
import json
|
|
2
3
|
from binascii import hexlify, unhexlify
|
|
3
4
|
from datetime import datetime, timezone
|
|
@@ -21,12 +22,23 @@ from typing import (
|
|
|
21
22
|
|
|
22
23
|
import numpy as np
|
|
23
24
|
import opentelemetry.proto.trace.v1.trace_pb2 as otlp
|
|
25
|
+
from openinference.semconv import trace
|
|
26
|
+
from openinference.semconv.trace import (
|
|
27
|
+
DocumentAttributes,
|
|
28
|
+
EmbeddingAttributes,
|
|
29
|
+
MessageAttributes,
|
|
30
|
+
SpanAttributes,
|
|
31
|
+
ToolCallAttributes,
|
|
32
|
+
)
|
|
24
33
|
from opentelemetry.proto.common.v1.common_pb2 import AnyValue, ArrayValue, KeyValue
|
|
25
34
|
from opentelemetry.util.types import Attributes, AttributeValue
|
|
26
35
|
from typing_extensions import TypeAlias, assert_never
|
|
27
36
|
|
|
28
|
-
import phoenix.trace.semantic_conventions as sem_conv
|
|
29
37
|
from phoenix.trace.schemas import (
|
|
38
|
+
EXCEPTION_ESCAPED,
|
|
39
|
+
EXCEPTION_MESSAGE,
|
|
40
|
+
EXCEPTION_STACKTRACE,
|
|
41
|
+
EXCEPTION_TYPE,
|
|
30
42
|
MimeType,
|
|
31
43
|
Span,
|
|
32
44
|
SpanContext,
|
|
@@ -37,18 +49,38 @@ from phoenix.trace.schemas import (
|
|
|
37
49
|
SpanStatusCode,
|
|
38
50
|
TraceID,
|
|
39
51
|
)
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
+
|
|
53
|
+
DOCUMENT_CONTENT = DocumentAttributes.DOCUMENT_CONTENT
|
|
54
|
+
DOCUMENT_ID = DocumentAttributes.DOCUMENT_ID
|
|
55
|
+
DOCUMENT_METADATA = DocumentAttributes.DOCUMENT_METADATA
|
|
56
|
+
EMBEDDING_EMBEDDINGS = SpanAttributes.EMBEDDING_EMBEDDINGS
|
|
57
|
+
EMBEDDING_MODEL_NAME = SpanAttributes.EMBEDDING_MODEL_NAME
|
|
58
|
+
EMBEDDING_TEXT = EmbeddingAttributes.EMBEDDING_TEXT
|
|
59
|
+
EMBEDDING_VECTOR = EmbeddingAttributes.EMBEDDING_VECTOR
|
|
60
|
+
INPUT_MIME_TYPE = SpanAttributes.INPUT_MIME_TYPE
|
|
61
|
+
INPUT_VALUE = SpanAttributes.INPUT_VALUE
|
|
62
|
+
LLM_INPUT_MESSAGES = SpanAttributes.LLM_INPUT_MESSAGES
|
|
63
|
+
LLM_INVOCATION_PARAMETERS = SpanAttributes.LLM_INVOCATION_PARAMETERS
|
|
64
|
+
LLM_MODEL_NAME = SpanAttributes.LLM_MODEL_NAME
|
|
65
|
+
LLM_OUTPUT_MESSAGES = SpanAttributes.LLM_OUTPUT_MESSAGES
|
|
66
|
+
LLM_PROMPTS = SpanAttributes.LLM_PROMPTS
|
|
67
|
+
LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
|
|
68
|
+
LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
|
|
69
|
+
LLM_TOKEN_COUNT_TOTAL = SpanAttributes.LLM_TOKEN_COUNT_TOTAL
|
|
70
|
+
MESSAGE_CONTENT = MessageAttributes.MESSAGE_CONTENT
|
|
71
|
+
MESSAGE_FUNCTION_CALL_ARGUMENTS_JSON = MessageAttributes.MESSAGE_FUNCTION_CALL_ARGUMENTS_JSON
|
|
72
|
+
MESSAGE_FUNCTION_CALL_NAME = MessageAttributes.MESSAGE_FUNCTION_CALL_NAME
|
|
73
|
+
MESSAGE_ROLE = MessageAttributes.MESSAGE_ROLE
|
|
74
|
+
MESSAGE_TOOL_CALLS = MessageAttributes.MESSAGE_TOOL_CALLS
|
|
75
|
+
OPENINFERENCE_SPAN_KIND = SpanAttributes.OPENINFERENCE_SPAN_KIND
|
|
76
|
+
OUTPUT_MIME_TYPE = SpanAttributes.OUTPUT_MIME_TYPE
|
|
77
|
+
OUTPUT_VALUE = SpanAttributes.OUTPUT_VALUE
|
|
78
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
79
|
+
TOOL_CALL_FUNCTION_ARGUMENTS_JSON = ToolCallAttributes.TOOL_CALL_FUNCTION_ARGUMENTS_JSON
|
|
80
|
+
TOOL_CALL_FUNCTION_NAME = ToolCallAttributes.TOOL_CALL_FUNCTION_NAME
|
|
81
|
+
TOOL_PARAMETERS = SpanAttributes.TOOL_PARAMETERS
|
|
82
|
+
LLM_PROMPT_TEMPLATE = SpanAttributes.LLM_PROMPT_TEMPLATE
|
|
83
|
+
LLM_PROMPT_TEMPLATE_VARIABLES = SpanAttributes.LLM_PROMPT_TEMPLATE_VARIABLES
|
|
52
84
|
|
|
53
85
|
|
|
54
86
|
def decode(otlp_span: otlp.Span) -> Span:
|
|
@@ -186,7 +218,13 @@ def _decode_status(otlp_status: otlp.Status) -> Tuple[SpanStatusCode, StatusMess
|
|
|
186
218
|
|
|
187
219
|
|
|
188
220
|
_SEMANTIC_CONVENTIONS: List[str] = sorted(
|
|
189
|
-
(
|
|
221
|
+
(
|
|
222
|
+
getattr(klass, attr)
|
|
223
|
+
for name in dir(trace)
|
|
224
|
+
if name.endswith("Attributes") and inspect.isclass(klass := getattr(trace, name))
|
|
225
|
+
for attr in dir(klass)
|
|
226
|
+
if attr.isupper()
|
|
227
|
+
),
|
|
190
228
|
reverse=True,
|
|
191
229
|
) # sorted so the longer strings go first
|
|
192
230
|
|
|
@@ -4,12 +4,10 @@ from enum import Enum
|
|
|
4
4
|
from typing import Any, Dict, List, Optional, Union
|
|
5
5
|
from uuid import UUID
|
|
6
6
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
EXCEPTION_TYPE,
|
|
12
|
-
)
|
|
7
|
+
EXCEPTION_TYPE = "exception.type"
|
|
8
|
+
EXCEPTION_MESSAGE = "exception.message"
|
|
9
|
+
EXCEPTION_ESCAPED = "exception.escaped"
|
|
10
|
+
EXCEPTION_STACKTRACE = "exception.stacktrace"
|
|
13
11
|
|
|
14
12
|
|
|
15
13
|
class SpanStatusCode(Enum):
|
|
@@ -2,7 +2,10 @@ import json
|
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from typing import Any, Dict, Optional
|
|
4
4
|
|
|
5
|
+
from openinference.semconv.trace import SpanAttributes
|
|
6
|
+
|
|
5
7
|
from phoenix.trace.schemas import (
|
|
8
|
+
EXCEPTION_MESSAGE,
|
|
6
9
|
MimeType,
|
|
7
10
|
Span,
|
|
8
11
|
SpanContext,
|
|
@@ -14,11 +17,9 @@ from phoenix.trace.schemas import (
|
|
|
14
17
|
SpanStatusCode,
|
|
15
18
|
TraceID,
|
|
16
19
|
)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
OUTPUT_MIME_TYPE,
|
|
21
|
-
)
|
|
20
|
+
|
|
21
|
+
INPUT_MIME_TYPE = SpanAttributes.INPUT_MIME_TYPE
|
|
22
|
+
OUTPUT_MIME_TYPE = SpanAttributes.OUTPUT_MIME_TYPE
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def json_to_attributes(obj: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
|
@@ -5,12 +5,7 @@ from enum import Enum
|
|
|
5
5
|
from typing import Any, List
|
|
6
6
|
from uuid import UUID
|
|
7
7
|
|
|
8
|
-
from .schemas import
|
|
9
|
-
Span,
|
|
10
|
-
SpanContext,
|
|
11
|
-
SpanConversationAttributes,
|
|
12
|
-
SpanEvent,
|
|
13
|
-
)
|
|
8
|
+
from phoenix.trace.schemas import Span, SpanContext, SpanConversationAttributes, SpanEvent
|
|
14
9
|
|
|
15
10
|
|
|
16
11
|
class SpanJSONEncoder(json.JSONEncoder):
|
|
@@ -6,23 +6,26 @@ from uuid import UUID, uuid4
|
|
|
6
6
|
from warnings import warn
|
|
7
7
|
|
|
8
8
|
import pandas as pd
|
|
9
|
+
from openinference.semconv.trace import (
|
|
10
|
+
DocumentAttributes,
|
|
11
|
+
RerankerAttributes,
|
|
12
|
+
SpanAttributes,
|
|
13
|
+
)
|
|
9
14
|
from pandas import DataFrame, read_parquet
|
|
10
15
|
from pyarrow import Schema, Table, parquet
|
|
11
16
|
|
|
17
|
+
from phoenix.config import DATASET_DIR, GENERATED_DATASET_NAME_PREFIX, TRACE_DATASET_DIR
|
|
12
18
|
from phoenix.datetime_utils import normalize_timestamps
|
|
13
19
|
from phoenix.trace.errors import InvalidParquetMetadataError
|
|
14
|
-
|
|
15
|
-
from
|
|
16
|
-
from .
|
|
17
|
-
from .
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
from .span_evaluations import Evaluations, SpanEvaluations
|
|
24
|
-
from .span_json_decoder import json_to_span
|
|
25
|
-
from .span_json_encoder import span_to_json
|
|
20
|
+
from phoenix.trace.schemas import ATTRIBUTE_PREFIX, CONTEXT_PREFIX, Span
|
|
21
|
+
from phoenix.trace.span_evaluations import Evaluations, SpanEvaluations
|
|
22
|
+
from phoenix.trace.span_json_decoder import json_to_span
|
|
23
|
+
from phoenix.trace.span_json_encoder import span_to_json
|
|
24
|
+
|
|
25
|
+
DOCUMENT_METADATA = DocumentAttributes.DOCUMENT_METADATA
|
|
26
|
+
RERANKER_INPUT_DOCUMENTS = RerankerAttributes.RERANKER_INPUT_DOCUMENTS
|
|
27
|
+
RERANKER_OUTPUT_DOCUMENTS = RerankerAttributes.RERANKER_OUTPUT_DOCUMENTS
|
|
28
|
+
RETRIEVAL_DOCUMENTS = SpanAttributes.RETRIEVAL_DOCUMENTS
|
|
26
29
|
|
|
27
30
|
# A set of columns that is required
|
|
28
31
|
REQUIRED_COLUMNS = [
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "3.0.3"
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from importlib.metadata import PackageNotFoundError
|
|
3
|
-
from importlib.util import find_spec
|
|
4
|
-
from typing import (
|
|
5
|
-
Any,
|
|
6
|
-
)
|
|
7
|
-
|
|
8
|
-
from openinference.instrumentation.llama_index._callback import (
|
|
9
|
-
OpenInferenceTraceCallbackHandler as _OpenInferenceTraceCallbackHandler,
|
|
10
|
-
)
|
|
11
|
-
from openinference.instrumentation.llama_index.version import (
|
|
12
|
-
__version__,
|
|
13
|
-
)
|
|
14
|
-
from opentelemetry import trace as trace_api
|
|
15
|
-
from opentelemetry.sdk import trace as trace_sdk
|
|
16
|
-
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
|
17
|
-
|
|
18
|
-
from phoenix.trace.exporter import _OpenInferenceExporter
|
|
19
|
-
from phoenix.trace.tracer import _show_deprecation_warnings
|
|
20
|
-
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class OpenInferenceTraceCallbackHandler(_OpenInferenceTraceCallbackHandler):
|
|
25
|
-
"""Callback handler for storing LLM application trace data in OpenInference format.
|
|
26
|
-
OpenInference is an open standard for capturing and storing AI model
|
|
27
|
-
inferences. It enables production LLMapp servers to seamlessly integrate
|
|
28
|
-
with LLM observability solutions such as Arize and Phoenix.
|
|
29
|
-
|
|
30
|
-
For more information on the specification, see
|
|
31
|
-
https://github.com/Arize-ai/openinference
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
35
|
-
_show_deprecation_warnings(self, *args, **kwargs)
|
|
36
|
-
if find_spec("llama_index") is None:
|
|
37
|
-
raise PackageNotFoundError(
|
|
38
|
-
"Missing `llama-index`. Install with `pip install llama-index`."
|
|
39
|
-
)
|
|
40
|
-
tracer_provider = trace_sdk.TracerProvider()
|
|
41
|
-
tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter()))
|
|
42
|
-
super().__init__(trace_api.get_tracer(__name__, __version__, tracer_provider))
|