arize-phoenix 3.16.0__tar.gz → 3.16.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/PKG-INFO +7 -5
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/README.md +2 -2
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/pyproject.toml +5 -1
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/project.py +29 -14
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/traces.py +6 -10
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/schema.py +7 -13
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Span.py +3 -3
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/node.py +14 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/app.py +8 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/main.py +6 -0
- arize_phoenix-3.16.2/src/phoenix/server/prometheus.py +75 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/index.js +54 -54
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/dsl/helpers.py +22 -3
- arize_phoenix-3.16.2/src/phoenix/version.py +1 -0
- arize_phoenix-3.16.0/src/phoenix/version.py +0 -1
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/.gitignore +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/IP_NOTICE +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/LICENSE +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/config.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/embedding_dimension.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/model.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/model_schema.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/core/model_schema_adapter.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/dataset.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/errors.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/fixtures.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/schema.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datasets/validation.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/datetime_utils.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/exceptions.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/evaluators.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/functions/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/functions/classify.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/functions/executor.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/functions/generate.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/functions/processing.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/anthropic.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/base.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/bedrock.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/litellm.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/openai.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/rate_limiters.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/vertex.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/models/vertexai.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/retrievals.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/templates/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/templates/default_templates.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/templates/template.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/utils/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/experimental/evals/utils/threads.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/README.md +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/binning.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/metrics.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/mixins.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/retrieval_metrics.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/timeseries.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/metrics/wrappers.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/pointcloud/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/pointcloud/clustering.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/pointcloud/pointcloud.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/pointcloud/projectors.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/pointcloud/umap_parameters.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/py.typed +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/context.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/helpers.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/ClusterInput.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/Coordinates.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/DataQualityMetricInput.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/DimensionFilter.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/DimensionInput.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/Granularity.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/PerformanceMetricInput.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/SpanSort.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/TimeRange.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/input_types/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/interceptor.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/routers/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/routers/evaluation_handler.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/routers/span_handler.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/routers/trace_handler.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/routers/utils.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Cluster.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DataQualityMetric.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Dataset.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DatasetRole.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DatasetValues.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Dimension.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DimensionDataType.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DimensionShape.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DimensionType.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DimensionWithValue.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DocumentEvaluationSummary.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/DocumentRetrievalMetrics.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/EmbeddingDimension.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/EmbeddingMetadata.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Evaluation.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/EvaluationSummary.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Event.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/EventMetadata.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/ExportEventsMutation.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/ExportedFile.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Functionality.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/MimeType.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Model.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/NumericRange.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/PerformanceMetric.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Project.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/PromptResponse.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Retrieval.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/ScalarDriftMetricEnum.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/Segments.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/SortDir.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/TimeSeries.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/UMAPPoints.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/ValidationResult.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/VectorDriftMetricEnum.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/api/types/pagination.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-114x114.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-120x120.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-144x144.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-152x152.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-180x180.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-72x72.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon-76x76.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/apple-touch-icon.png +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/favicon.ico +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/index.css +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/static/modernizr.js +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/templates/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/templates/index.html +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/server/thread_server.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/services.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/session/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/session/client.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/session/data_extractor.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/session/evaluation.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/session/session.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/storage/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/storage/span_store/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/storage/span_store/text_file.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/dsl/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/dsl/filter.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/dsl/missing.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/dsl/query.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/errors.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/evaluation_conventions.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/exporter.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/fixtures.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/langchain/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/langchain/instrumentor.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/llama_index/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/llama_index/callback.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/openai/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/openai/instrumentor.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/otel.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/projects.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/schemas.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/span_evaluations.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/span_json_decoder.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/span_json_encoder.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/trace_dataset.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/utils.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/v1/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/v1/evaluation_pb2.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/trace/v1/evaluation_pb2.pyi +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/utilities/__init__.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/utilities/error_handling.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/utilities/logging.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/utilities/project.py +0 -0
- {arize_phoenix-3.16.0 → arize_phoenix-3.16.2}/src/phoenix/utilities/span_store.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: arize-phoenix
|
|
3
|
-
Version: 3.16.
|
|
3
|
+
Version: 3.16.2
|
|
4
4
|
Summary: AI Observability and Evaluation
|
|
5
5
|
Project-URL: Documentation, https://docs.arize.com/phoenix/
|
|
6
6
|
Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
|
|
@@ -17,7 +17,6 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.11
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.12
|
|
19
19
|
Requires-Python: <3.13,>=3.8
|
|
20
|
-
Requires-Dist: ddsketch
|
|
21
20
|
Requires-Dist: hdbscan>=0.8.33
|
|
22
21
|
Requires-Dist: jinja2
|
|
23
22
|
Requires-Dist: numpy
|
|
@@ -44,6 +43,8 @@ Requires-Dist: typing-extensions>=4.6; python_version >= '3.12'
|
|
|
44
43
|
Requires-Dist: umap-learn
|
|
45
44
|
Requires-Dist: uvicorn
|
|
46
45
|
Requires-Dist: wrapt
|
|
46
|
+
Provides-Extra: container
|
|
47
|
+
Requires-Dist: prometheus-client; extra == 'container'
|
|
47
48
|
Provides-Extra: dev
|
|
48
49
|
Requires-Dist: anthropic; extra == 'dev'
|
|
49
50
|
Requires-Dist: arize[autoembeddings,llm-evaluation]; extra == 'dev'
|
|
@@ -57,6 +58,7 @@ Requires-Dist: llama-index>=0.10.3; extra == 'dev'
|
|
|
57
58
|
Requires-Dist: nbqa; extra == 'dev'
|
|
58
59
|
Requires-Dist: pandas-stubs<=2.0.2.230605; extra == 'dev'
|
|
59
60
|
Requires-Dist: pre-commit; extra == 'dev'
|
|
61
|
+
Requires-Dist: prometheus-client; extra == 'dev'
|
|
60
62
|
Requires-Dist: pytest-asyncio; extra == 'dev'
|
|
61
63
|
Requires-Dist: pytest-cov; extra == 'dev'
|
|
62
64
|
Requires-Dist: pytest-lazy-fixture; extra == 'dev'
|
|
@@ -109,7 +111,7 @@ Phoenix provides MLOps and LLMOps insights at lightning speed with zero-config o
|
|
|
109
111
|
- **LLM Traces** - Trace through the execution of your LLM Application to understand the internals of your LLM Application and to troubleshoot problems related to things like retrieval and tool execution.
|
|
110
112
|
- **LLM Evals** - Leverage the power of large language models to evaluate your generative model or application's relevance, toxicity, and more.
|
|
111
113
|
- **Embedding Analysis** - Explore embedding point-clouds and identify clusters of high drift and performance degradation.
|
|
112
|
-
- **RAG Analysis** - Visualize your generative application's search and retrieval process to
|
|
114
|
+
- **RAG Analysis** - Visualize your generative application's search and retrieval process to identify problems and improve your RAG pipeline.
|
|
113
115
|
- **Structured Data Analysis** - Statistically analyze your structured data by performing A/B analysis, temporal drift analysis, and more.
|
|
114
116
|
|
|
115
117
|
**Table of Contents**
|
|
@@ -230,7 +232,7 @@ session = px.launch_app()
|
|
|
230
232
|
|
|
231
233
|
# Once you have started a Phoenix server, you can start your LangChain application with the OpenInferenceTracer as a callback. To do this, you will have to instrument your LangChain application with the tracer:
|
|
232
234
|
|
|
233
|
-
from phoenix.trace.langchain import
|
|
235
|
+
from phoenix.trace.langchain import LangChainInstrumentor
|
|
234
236
|
|
|
235
237
|
# By default, the traces will be exported to the locally running Phoenix server.
|
|
236
238
|
LangChainInstrumentor().instrument()
|
|
@@ -34,7 +34,7 @@ Phoenix provides MLOps and LLMOps insights at lightning speed with zero-config o
|
|
|
34
34
|
- **LLM Traces** - Trace through the execution of your LLM Application to understand the internals of your LLM Application and to troubleshoot problems related to things like retrieval and tool execution.
|
|
35
35
|
- **LLM Evals** - Leverage the power of large language models to evaluate your generative model or application's relevance, toxicity, and more.
|
|
36
36
|
- **Embedding Analysis** - Explore embedding point-clouds and identify clusters of high drift and performance degradation.
|
|
37
|
-
- **RAG Analysis** - Visualize your generative application's search and retrieval process to
|
|
37
|
+
- **RAG Analysis** - Visualize your generative application's search and retrieval process to identify problems and improve your RAG pipeline.
|
|
38
38
|
- **Structured Data Analysis** - Statistically analyze your structured data by performing A/B analysis, temporal drift analysis, and more.
|
|
39
39
|
|
|
40
40
|
**Table of Contents**
|
|
@@ -155,7 +155,7 @@ session = px.launch_app()
|
|
|
155
155
|
|
|
156
156
|
# Once you have started a Phoenix server, you can start your LangChain application with the OpenInferenceTracer as a callback. To do this, you will have to instrument your LangChain application with the tracer:
|
|
157
157
|
|
|
158
|
-
from phoenix.trace.langchain import
|
|
158
|
+
from phoenix.trace.langchain import LangChainInstrumentor
|
|
159
159
|
|
|
160
160
|
# By default, the traces will be exported to the locally running Phoenix server.
|
|
161
161
|
LangChainInstrumentor().instrument()
|
|
@@ -40,7 +40,6 @@ dependencies = [
|
|
|
40
40
|
"wrapt",
|
|
41
41
|
"sortedcontainers",
|
|
42
42
|
"protobuf>=3.20, <5.0",
|
|
43
|
-
"ddsketch",
|
|
44
43
|
"tqdm",
|
|
45
44
|
"requests",
|
|
46
45
|
"opentelemetry-sdk",
|
|
@@ -73,6 +72,7 @@ dev = [
|
|
|
73
72
|
"litellm>=1.0.3",
|
|
74
73
|
"google-cloud-aiplatform>=1.3",
|
|
75
74
|
"anthropic",
|
|
75
|
+
"prometheus_client",
|
|
76
76
|
]
|
|
77
77
|
evals = [
|
|
78
78
|
"arize-phoenix-evals>=0.3.0",
|
|
@@ -85,6 +85,9 @@ llama-index = [
|
|
|
85
85
|
"llama-index-callbacks-arize-phoenix>=0.1.2",
|
|
86
86
|
"openinference-instrumentation-llama-index>=1.2.0",
|
|
87
87
|
]
|
|
88
|
+
container = [
|
|
89
|
+
"prometheus-client",
|
|
90
|
+
]
|
|
88
91
|
|
|
89
92
|
[project.urls]
|
|
90
93
|
Documentation = "https://docs.arize.com/phoenix/"
|
|
@@ -145,6 +148,7 @@ dependencies = [
|
|
|
145
148
|
"types-setuptools",
|
|
146
149
|
"openai>=1.0.0",
|
|
147
150
|
"litellm>=1.0.3",
|
|
151
|
+
"prometheus_client",
|
|
148
152
|
]
|
|
149
153
|
|
|
150
154
|
[tool.hatch.envs.style]
|
|
@@ -13,13 +13,13 @@ from typing import (
|
|
|
13
13
|
Mapping,
|
|
14
14
|
Optional,
|
|
15
15
|
Set,
|
|
16
|
+
Sized,
|
|
16
17
|
Tuple,
|
|
17
18
|
Union,
|
|
18
19
|
cast,
|
|
19
20
|
)
|
|
20
21
|
|
|
21
22
|
import numpy as np
|
|
22
|
-
from ddsketch import DDSketch
|
|
23
23
|
from google.protobuf.json_format import MessageToDict
|
|
24
24
|
from openinference.semconv.trace import SpanAttributes
|
|
25
25
|
from pandas import DataFrame, Index, MultiIndex
|
|
@@ -215,10 +215,15 @@ class _Spans:
|
|
|
215
215
|
self._start_time_sorted_root_spans: SortedKeyList[WrappedSpan] = SortedKeyList(
|
|
216
216
|
key=lambda span: span.start_time,
|
|
217
217
|
)
|
|
218
|
+
"""
|
|
219
|
+
A root span is defined to be a span whose parent span is not in our collection.
|
|
220
|
+
This includes spans whose parent is None and spans whose parent has not arrived
|
|
221
|
+
(or will not arrive). For spans whose parent is not None, the root span status
|
|
222
|
+
is temporary and will be revoked when its parent span arrives.
|
|
223
|
+
"""
|
|
218
224
|
self._latency_sorted_root_spans: SortedKeyList[WrappedSpan] = SortedKeyList(
|
|
219
225
|
key=lambda span: span[ComputedAttributes.LATENCY_MS],
|
|
220
226
|
)
|
|
221
|
-
self._root_span_latency_ms_sketch = DDSketch()
|
|
222
227
|
self._token_count_total: int = 0
|
|
223
228
|
self._last_updated_at: Optional[datetime] = None
|
|
224
229
|
|
|
@@ -284,7 +289,15 @@ class _Spans:
|
|
|
284
289
|
def root_span_latency_ms_quantiles(self, probability: float) -> Optional[float]:
|
|
285
290
|
"""Root span latency quantiles in milliseconds"""
|
|
286
291
|
with self._lock:
|
|
287
|
-
|
|
292
|
+
spans = self._latency_sorted_root_spans
|
|
293
|
+
if not (n := len(spans)):
|
|
294
|
+
return None
|
|
295
|
+
if probability >= 1:
|
|
296
|
+
return cast(float, spans[-1][ComputedAttributes.LATENCY_MS])
|
|
297
|
+
if probability <= 0:
|
|
298
|
+
return cast(float, spans[0][ComputedAttributes.LATENCY_MS])
|
|
299
|
+
k = max(0, round(n * probability) - 1)
|
|
300
|
+
return cast(float, spans[k][ComputedAttributes.LATENCY_MS])
|
|
288
301
|
|
|
289
302
|
def get_descendant_spans(self, span_id: SpanID) -> Iterator[WrappedSpan]:
|
|
290
303
|
for span in self._get_descendant_spans(span_id):
|
|
@@ -352,26 +365,27 @@ class _Spans:
|
|
|
352
365
|
return
|
|
353
366
|
|
|
354
367
|
parent_span_id = span.parent_id
|
|
355
|
-
|
|
356
|
-
if not is_root_span:
|
|
368
|
+
if parent_span_id is not None:
|
|
357
369
|
self._child_spans[parent_span_id].add(span)
|
|
358
370
|
self._parent_span_ids[span_id] = parent_span_id
|
|
359
371
|
|
|
372
|
+
for child_span in self._child_spans.get(span_id, ()):
|
|
373
|
+
# A root span is a span whose parent span is not in our collection.
|
|
374
|
+
# Now that their parent span has arrived, they are no longer root spans.
|
|
375
|
+
self._start_time_sorted_root_spans.remove(child_span)
|
|
376
|
+
self._latency_sorted_root_spans.remove(child_span)
|
|
377
|
+
|
|
360
378
|
# Add computed attributes to span
|
|
361
379
|
start_time = span.start_time
|
|
362
380
|
end_time = span.end_time
|
|
363
|
-
span[ComputedAttributes.LATENCY_MS] =
|
|
364
|
-
end_time - start_time
|
|
365
|
-
).total_seconds() * 1000
|
|
366
|
-
if is_root_span:
|
|
367
|
-
self._root_span_latency_ms_sketch.add(latency)
|
|
381
|
+
span[ComputedAttributes.LATENCY_MS] = (end_time - start_time).total_seconds() * 1000
|
|
368
382
|
span[ComputedAttributes.ERROR_COUNT] = int(span.status_code is SpanStatusCode.ERROR)
|
|
369
383
|
|
|
370
384
|
# Store the new span (after adding computed attributes)
|
|
371
385
|
self._spans[span_id] = span
|
|
372
386
|
self._traces[span.context.trace_id].add(span)
|
|
373
387
|
self._start_time_sorted_spans.add(span)
|
|
374
|
-
if
|
|
388
|
+
if parent_span_id is None or parent_span_id not in self._spans:
|
|
375
389
|
self._start_time_sorted_root_spans.add(span)
|
|
376
390
|
self._latency_sorted_root_spans.add(span)
|
|
377
391
|
self._propagate_cumulative_values(span)
|
|
@@ -386,9 +400,10 @@ class _Spans:
|
|
|
386
400
|
span_id = span.context.span_id
|
|
387
401
|
if token_count_update := span.attributes.get(SpanAttributes.LLM_TOKEN_COUNT_TOTAL):
|
|
388
402
|
self._token_count_total += token_count_update
|
|
389
|
-
if
|
|
390
|
-
span.attributes.get(SpanAttributes.RETRIEVAL_DOCUMENTS)
|
|
391
|
-
|
|
403
|
+
if isinstance(
|
|
404
|
+
(retrieval_documents := span.attributes.get(SpanAttributes.RETRIEVAL_DOCUMENTS)),
|
|
405
|
+
Sized,
|
|
406
|
+
) and (num_documents_update := len(retrieval_documents)):
|
|
392
407
|
self._num_documents[span_id] += num_documents_update
|
|
393
408
|
|
|
394
409
|
def _propagate_cumulative_values(self, span: WrappedSpan) -> None:
|
|
@@ -46,17 +46,13 @@ class Traces:
|
|
|
46
46
|
yield project_id, project_name, project
|
|
47
47
|
|
|
48
48
|
def archive_project(self, id: int) -> Optional["Project"]:
|
|
49
|
+
if id == 0:
|
|
50
|
+
raise ValueError("Cannot archive the default project")
|
|
49
51
|
with self._lock:
|
|
50
|
-
|
|
51
|
-
project_id:
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
}
|
|
55
|
-
if len(active_projects) <= 1:
|
|
56
|
-
return None
|
|
57
|
-
if project := active_projects.get(id):
|
|
58
|
-
project.archive()
|
|
59
|
-
return project
|
|
52
|
+
for project_id, _, project in self.get_projects():
|
|
53
|
+
if id == project_id:
|
|
54
|
+
project.archive()
|
|
55
|
+
return project
|
|
60
56
|
return None
|
|
61
57
|
|
|
62
58
|
def put(
|
|
@@ -31,7 +31,7 @@ from .types.Event import create_event_id, unpack_event_id
|
|
|
31
31
|
from .types.ExportEventsMutation import ExportEventsMutation
|
|
32
32
|
from .types.Functionality import Functionality
|
|
33
33
|
from .types.Model import Model
|
|
34
|
-
from .types.node import GlobalID, Node, from_global_id
|
|
34
|
+
from .types.node import GlobalID, Node, from_global_id, from_global_id_with_expected_type
|
|
35
35
|
from .types.pagination import Connection, ConnectionArgs, Cursor, connection_from_list
|
|
36
36
|
|
|
37
37
|
|
|
@@ -230,22 +230,16 @@ class Query:
|
|
|
230
230
|
class Mutation(ExportEventsMutation):
|
|
231
231
|
@strawberry.mutation
|
|
232
232
|
def delete_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
233
|
-
if (traces := info.context.traces) is None:
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
if type_name != "Project":
|
|
237
|
-
return Query()
|
|
238
|
-
traces.archive_project(node_id)
|
|
233
|
+
if (traces := info.context.traces) is not None:
|
|
234
|
+
node_id = from_global_id_with_expected_type(str(id), "Project")
|
|
235
|
+
traces.archive_project(node_id)
|
|
239
236
|
return Query()
|
|
240
237
|
|
|
241
238
|
@strawberry.mutation
|
|
242
239
|
def archive_project(self, info: Info[Context, None], id: GlobalID) -> Query:
|
|
243
|
-
if (traces := info.context.traces) is None:
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
if type_name != "Project":
|
|
247
|
-
return Query()
|
|
248
|
-
traces.archive_project(node_id)
|
|
240
|
+
if (traces := info.context.traces) is not None:
|
|
241
|
+
node_id = from_global_id_with_expected_type(str(id), "Project")
|
|
242
|
+
traces.archive_project(node_id)
|
|
249
243
|
return Query()
|
|
250
244
|
|
|
251
245
|
|
|
@@ -2,7 +2,7 @@ import json
|
|
|
2
2
|
from collections import defaultdict
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from enum import Enum
|
|
5
|
-
from typing import Any, DefaultDict, Dict, List, Mapping, Optional, Sized, cast
|
|
5
|
+
from typing import Any, DefaultDict, Dict, Iterable, List, Mapping, Optional, Sized, cast
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import strawberry
|
|
@@ -339,12 +339,12 @@ def _hide_embedding_vectors(
|
|
|
339
339
|
attributes: Mapping[str, Any],
|
|
340
340
|
) -> Dict[str, Any]:
|
|
341
341
|
_attributes = dict(attributes)
|
|
342
|
-
if not (embeddings := _attributes.get(EMBEDDING_EMBEDDINGS)):
|
|
342
|
+
if not isinstance((embeddings := _attributes.get(EMBEDDING_EMBEDDINGS)), Iterable):
|
|
343
343
|
return _attributes
|
|
344
344
|
_embeddings = []
|
|
345
345
|
for embedding in embeddings:
|
|
346
346
|
_embedding = dict(embedding)
|
|
347
|
-
if vector := _embedding.get(EMBEDDING_VECTOR):
|
|
347
|
+
if isinstance((vector := _embedding.get(EMBEDDING_VECTOR)), Sized):
|
|
348
348
|
_embedding[EMBEDDING_VECTOR] = f"<{len(vector)} dimensional vector>"
|
|
349
349
|
_embeddings.append(_embedding)
|
|
350
350
|
_attributes[EMBEDDING_EMBEDDINGS] = _embeddings
|
|
@@ -30,6 +30,20 @@ def from_global_id(global_id: str) -> Tuple[str, int]:
|
|
|
30
30
|
return type_name, int(node_id)
|
|
31
31
|
|
|
32
32
|
|
|
33
|
+
def from_global_id_with_expected_type(global_id: str, expected_type_name: str) -> int:
|
|
34
|
+
"""
|
|
35
|
+
Decodes the given global id and return the id, checking that the type
|
|
36
|
+
matches the expected type.
|
|
37
|
+
"""
|
|
38
|
+
type_name, node_id = from_global_id(global_id)
|
|
39
|
+
if type_name != expected_type_name:
|
|
40
|
+
raise ValueError(
|
|
41
|
+
f"The node id must correspond to a node of type {expected_type_name}, "
|
|
42
|
+
f"but instead corresponds to a node of type: {type_name}"
|
|
43
|
+
)
|
|
44
|
+
return node_id
|
|
45
|
+
|
|
46
|
+
|
|
33
47
|
class GlobalIDValueError(ValueError):
|
|
34
48
|
"""GlobalID value error, usually related to parsing or serialization."""
|
|
35
49
|
|
|
@@ -151,6 +151,7 @@ def create_app(
|
|
|
151
151
|
span_store: Optional[SpanStore] = None,
|
|
152
152
|
debug: bool = False,
|
|
153
153
|
read_only: bool = False,
|
|
154
|
+
enable_prometheus: bool = False,
|
|
154
155
|
) -> Starlette:
|
|
155
156
|
graphql = GraphQLWithContext(
|
|
156
157
|
schema=schema,
|
|
@@ -160,9 +161,16 @@ def create_app(
|
|
|
160
161
|
export_path=export_path,
|
|
161
162
|
graphiql=True,
|
|
162
163
|
)
|
|
164
|
+
if enable_prometheus:
|
|
165
|
+
from phoenix.server.prometheus import PrometheusMiddleware
|
|
166
|
+
|
|
167
|
+
prometheus_middlewares = [Middleware(PrometheusMiddleware)]
|
|
168
|
+
else:
|
|
169
|
+
prometheus_middlewares = []
|
|
163
170
|
return Starlette(
|
|
164
171
|
middleware=[
|
|
165
172
|
Middleware(HeadersMiddleware),
|
|
173
|
+
*prometheus_middlewares,
|
|
166
174
|
],
|
|
167
175
|
debug=debug,
|
|
168
176
|
routes=(
|
|
@@ -129,6 +129,7 @@ if __name__ == "__main__":
|
|
|
129
129
|
parser.add_argument("--no-internet", action="store_true")
|
|
130
130
|
parser.add_argument("--umap_params", type=str, required=False, default=DEFAULT_UMAP_PARAMS_STR)
|
|
131
131
|
parser.add_argument("--debug", action="store_false")
|
|
132
|
+
parser.add_argument("--enable-prometheus", type=bool, default=False)
|
|
132
133
|
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
133
134
|
serve_parser = subparsers.add_parser("serve")
|
|
134
135
|
datasets_parser = subparsers.add_parser("datasets")
|
|
@@ -223,6 +224,10 @@ if __name__ == "__main__":
|
|
|
223
224
|
)
|
|
224
225
|
read_only = args.read_only
|
|
225
226
|
logger.info(f"Server umap params: {umap_params}")
|
|
227
|
+
if enable_prometheus := args.enable_prometheus:
|
|
228
|
+
from phoenix.server.prometheus import start_prometheus
|
|
229
|
+
|
|
230
|
+
start_prometheus()
|
|
226
231
|
app = create_app(
|
|
227
232
|
export_path=export_path,
|
|
228
233
|
model=model,
|
|
@@ -232,6 +237,7 @@ if __name__ == "__main__":
|
|
|
232
237
|
debug=args.debug,
|
|
233
238
|
read_only=read_only,
|
|
234
239
|
span_store=span_store,
|
|
240
|
+
enable_prometheus=enable_prometheus,
|
|
235
241
|
)
|
|
236
242
|
host = args.host or get_env_host()
|
|
237
243
|
port = args.port or get_env_port()
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from threading import Thread
|
|
3
|
+
|
|
4
|
+
import psutil
|
|
5
|
+
from prometheus_client import (
|
|
6
|
+
Counter,
|
|
7
|
+
Gauge,
|
|
8
|
+
Histogram,
|
|
9
|
+
start_http_server,
|
|
10
|
+
)
|
|
11
|
+
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
|
12
|
+
from starlette.requests import Request
|
|
13
|
+
from starlette.responses import Response
|
|
14
|
+
from starlette.routing import Match
|
|
15
|
+
|
|
16
|
+
REQUESTS_PROCESSING_TIME = Histogram(
|
|
17
|
+
name="starlette_requests_processing_time_seconds",
|
|
18
|
+
documentation="Histogram of requests processing time by method and path (in seconds)",
|
|
19
|
+
labelnames=["method", "path"],
|
|
20
|
+
)
|
|
21
|
+
EXCEPTIONS = Counter(
|
|
22
|
+
name="starlette_exceptions_total",
|
|
23
|
+
documentation="Total count of exceptions raised by method, path and exception type",
|
|
24
|
+
labelnames=["method", "path", "exception_type"],
|
|
25
|
+
)
|
|
26
|
+
RAM_METRIC = Gauge(
|
|
27
|
+
name="memory_usage_bytes",
|
|
28
|
+
documentation="Memory usage in bytes",
|
|
29
|
+
labelnames=["type"],
|
|
30
|
+
)
|
|
31
|
+
CPU_METRIC = Gauge(
|
|
32
|
+
name="cpu_usage_percent",
|
|
33
|
+
documentation="CPU usage percent",
|
|
34
|
+
labelnames=["core"],
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class PrometheusMiddleware(BaseHTTPMiddleware):
|
|
39
|
+
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
|
|
40
|
+
for route in request.app.routes:
|
|
41
|
+
match, _ = route.matches(request.scope)
|
|
42
|
+
if match is Match.FULL:
|
|
43
|
+
path = route.path
|
|
44
|
+
break
|
|
45
|
+
else:
|
|
46
|
+
return await call_next(request)
|
|
47
|
+
method = request.method
|
|
48
|
+
start_time = time.perf_counter()
|
|
49
|
+
try:
|
|
50
|
+
response = await call_next(request)
|
|
51
|
+
except BaseException as e:
|
|
52
|
+
EXCEPTIONS.labels(method=method, path=path, exception_type=type(e).__name__).inc()
|
|
53
|
+
raise
|
|
54
|
+
stop_time = time.perf_counter()
|
|
55
|
+
REQUESTS_PROCESSING_TIME.labels(method=method, path=path).observe(stop_time - start_time)
|
|
56
|
+
return response
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def start_prometheus() -> None:
|
|
60
|
+
Thread(target=gather_system_data, daemon=True).start()
|
|
61
|
+
start_http_server(9090)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def gather_system_data() -> None:
|
|
65
|
+
while True:
|
|
66
|
+
time.sleep(1)
|
|
67
|
+
|
|
68
|
+
ram = psutil.virtual_memory()
|
|
69
|
+
swap = psutil.swap_memory()
|
|
70
|
+
|
|
71
|
+
RAM_METRIC.labels(type="virtual").set(ram.used)
|
|
72
|
+
RAM_METRIC.labels(type="swap").set(swap.used)
|
|
73
|
+
|
|
74
|
+
for core, percent in enumerate(psutil.cpu_percent(interval=1, percpu=True)):
|
|
75
|
+
CPU_METRIC.labels(core=core).set(percent)
|