arize-phoenix 4.28.1__py3-none-any.whl → 4.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.28.1.dist-info → arize_phoenix-4.29.0.dist-info}/METADATA +2 -1
- {arize_phoenix-4.28.1.dist-info → arize_phoenix-4.29.0.dist-info}/RECORD +18 -14
- phoenix/otel/__init__.py +22 -0
- phoenix/otel/otel.py +284 -0
- phoenix/otel/settings.py +82 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +3 -2
- phoenix/server/api/exceptions.py +41 -0
- phoenix/server/api/mutations/api_key_mutations.py +29 -0
- phoenix/server/api/mutations/auth_mutations.py +4 -2
- phoenix/server/api/mutations/dataset_mutations.py +9 -8
- phoenix/server/api/mutations/experiment_mutations.py +2 -1
- phoenix/server/api/queries.py +9 -8
- phoenix/server/api/routers/v1/experiments.py +4 -4
- phoenix/server/api/schema.py +2 -0
- phoenix/version.py +1 -1
- {arize_phoenix-4.28.1.dist-info → arize_phoenix-4.29.0.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.28.1.dist-info → arize_phoenix-4.29.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.28.1.dist-info → arize_phoenix-4.29.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: arize-phoenix
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.29.0
|
|
4
4
|
Summary: AI Observability and Evaluation
|
|
5
5
|
Project-URL: Documentation, https://docs.arize.com/phoenix/
|
|
6
6
|
Project-URL: Issues, https://github.com/Arize-ai/phoenix/issues
|
|
@@ -21,6 +21,7 @@ Requires-Dist: aioitertools
|
|
|
21
21
|
Requires-Dist: aiosqlite
|
|
22
22
|
Requires-Dist: alembic<2,>=1.3.0
|
|
23
23
|
Requires-Dist: arize-phoenix-evals>=0.13.1
|
|
24
|
+
Requires-Dist: arize-phoenix-otel>=0.4.1
|
|
24
25
|
Requires-Dist: cachetools
|
|
25
26
|
Requires-Dist: fastapi
|
|
26
27
|
Requires-Dist: grpcio
|
|
@@ -6,7 +6,7 @@ phoenix/exceptions.py,sha256=n2L2KKuecrdflB9MsCdAYCiSEvGJptIsfRkXMoJle7A,169
|
|
|
6
6
|
phoenix/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
7
7
|
phoenix/services.py,sha256=OyML4t2XGnlqF0JXA9_uccL8HslTABxep9Ci7MViKEU,5216
|
|
8
8
|
phoenix/settings.py,sha256=cO-qgis_S27nHirTobYI9hHPfZH18R--WMmxNdsVUwc,273
|
|
9
|
-
phoenix/version.py,sha256=
|
|
9
|
+
phoenix/version.py,sha256=i2svGkhQxPKAQeMjtRMY-YcR__KOdErWKzax-PEH1rY,23
|
|
10
10
|
phoenix/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
phoenix/core/embedding_dimension.py,sha256=zKGbcvwOXgLf-yrJBpQyKtd-LEOPRKHnUToyAU8Owis,87
|
|
12
12
|
phoenix/core/model.py,sha256=km_a--PBHOuA337ClRw9xqhOHhrUT6Rl9pz_zV0JYkQ,4843
|
|
@@ -63,6 +63,9 @@ phoenix/metrics/mixins.py,sha256=moZ5hENIKzUQt2IRhWOd5EFXnoqQkVrpqEqMH7KQzyA,744
|
|
|
63
63
|
phoenix/metrics/retrieval_metrics.py,sha256=XFQPo66h16w7-1AJ92M1VL_BUIXIWxXHGKF_QVOABZI,4384
|
|
64
64
|
phoenix/metrics/timeseries.py,sha256=Cib3E0njJzi0vZpmyADvbakFQA98rIkfDaYAOmsmBz8,6277
|
|
65
65
|
phoenix/metrics/wrappers.py,sha256=umZqa_5lf1wZSFe3FgzxF-qp1xbPdKD54W628GlGCUI,8392
|
|
66
|
+
phoenix/otel/__init__.py,sha256=YvEiD-3aGZs9agwLNCXU34ofV3G-Q-dolfsiinOJuT0,407
|
|
67
|
+
phoenix/otel/otel.py,sha256=6tlr7VHdVzykQ3Pu5VjvDcjUBKG6GV5fJVZCluXi_d0,11793
|
|
68
|
+
phoenix/otel/settings.py,sha256=Qr2-RkgLQRfLhJqtLpEkSpqns7qLjPoOvpEOTqeSohM,3026
|
|
66
69
|
phoenix/pointcloud/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
67
70
|
phoenix/pointcloud/clustering.py,sha256=IzcG67kJ2hPP7pcqVmKPSL_6gKRonKdOT3bCtbTOqnk,820
|
|
68
71
|
phoenix/pointcloud/pointcloud.py,sha256=4zAIkKs2xOUbchpj4XDAV-iPMXrfAJ15TG6rlIYGrao,2145
|
|
@@ -80,14 +83,15 @@ phoenix/server/thread_server.py,sha256=RwXQGP_QhGD7le6WB7xEygEEuwBl5Ck_Zo8xGIYGi
|
|
|
80
83
|
phoenix/server/types.py,sha256=S2dReLNboR2nzjRK5j3MUyUDqu6AQFD7KRwJkeKj1q4,3609
|
|
81
84
|
phoenix/server/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
82
85
|
phoenix/server/api/context.py,sha256=WuhGT2549C5Yc7pWj2S7NaPeT4a-N-_mmz-Vg5bUkI8,3637
|
|
86
|
+
phoenix/server/api/exceptions.py,sha256=KdAzgwNan-wQ7THDrSoeJU2k9zWQVcH6lRiB462VsRA,990
|
|
83
87
|
phoenix/server/api/interceptor.py,sha256=ykDnoC_apUd-llVli3m1CW18kNSIgjz2qZ6m5JmPDu8,1294
|
|
84
|
-
phoenix/server/api/queries.py,sha256=
|
|
85
|
-
phoenix/server/api/schema.py,sha256=
|
|
88
|
+
phoenix/server/api/queries.py,sha256=hUzeHOUWuBQ-kjXh13-d5LgJfkbB8XSpFaHJX4YXpC8,23875
|
|
89
|
+
phoenix/server/api/schema.py,sha256=4L2m6QXhaV13YPTZCEZ3hqCPQFHZOy3QnJVLRYQFzpg,548
|
|
86
90
|
phoenix/server/api/utils.py,sha256=Kl47G-1A7QKTDrc75BU2QK6HupsG6MWuXxy351FOfKQ,858
|
|
87
91
|
phoenix/server/api/dataloaders/__init__.py,sha256=TrOGnU_SD_vEIxOE_dm8HrD5C2ScLFQ4xQ7f8r-E76s,3064
|
|
88
92
|
phoenix/server/api/dataloaders/annotation_summaries.py,sha256=Wv8AORZoGd5TJ4Y-em8iqJu87AMpZP7lWOTr-SML-x8,5560
|
|
89
93
|
phoenix/server/api/dataloaders/average_experiment_run_latency.py,sha256=q091UmkXx37OBKh7L-GJ5LXHyRXfX2w4XTk1NMHtPpw,1827
|
|
90
|
-
phoenix/server/api/dataloaders/dataset_example_revisions.py,sha256=
|
|
94
|
+
phoenix/server/api/dataloaders/dataset_example_revisions.py,sha256=rZhJoIYUGgYhXwVBtq5u0bqtHmIQ2Sh6HNnJsSGIXis,3767
|
|
91
95
|
phoenix/server/api/dataloaders/dataset_example_spans.py,sha256=-TjdyyJv2c2JiN1OXu6MMmQ-BEKlHXucEDcuObeRVsU,1416
|
|
92
96
|
phoenix/server/api/dataloaders/document_evaluation_summaries.py,sha256=5XOom2KRAmCwPmtlraiZOSl3vhfaW-eiiYkmetAEalw,5616
|
|
93
97
|
phoenix/server/api/dataloaders/document_evaluations.py,sha256=V6sE34jON_qFxt7eArJbktykAsty-gnBZHlEkORcj0E,1296
|
|
@@ -140,11 +144,11 @@ phoenix/server/api/input_types/TraceAnnotationSort.py,sha256=BzwiUnMh2VsgQYnhDlb
|
|
|
140
144
|
phoenix/server/api/input_types/UserRoleInput.py,sha256=xxhFe0ITZOgRVEJbVem_W6F1Ip_H6xDENdQqMMx-kKE,129
|
|
141
145
|
phoenix/server/api/input_types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
142
146
|
phoenix/server/api/mutations/__init__.py,sha256=JS3WRqYxNeoaLsKjODFvJnZb6CF19IFW-lfOsUq3rtM,1074
|
|
143
|
-
phoenix/server/api/mutations/api_key_mutations.py,sha256=
|
|
147
|
+
phoenix/server/api/mutations/api_key_mutations.py,sha256=5GDsP0gURgXokbajc3GJEor3AGt6BGd1IMWMg054E7U,4683
|
|
144
148
|
phoenix/server/api/mutations/auth.py,sha256=8o4tTfGCPkpUauuB9ijPH84Od77UX_UrQWfmUsnujI4,524
|
|
145
|
-
phoenix/server/api/mutations/auth_mutations.py,sha256=
|
|
146
|
-
phoenix/server/api/mutations/dataset_mutations.py,sha256=
|
|
147
|
-
phoenix/server/api/mutations/experiment_mutations.py,sha256=
|
|
149
|
+
phoenix/server/api/mutations/auth_mutations.py,sha256=XLCxmsjyVp1riGWxUhVUOiGhIFs_ZmOfZM77vs_RLDw,2182
|
|
150
|
+
phoenix/server/api/mutations/dataset_mutations.py,sha256=8S6qjmraSBxA7ioNogTQPp6q27ZdvdAn6yt0Z4fmOI0,27096
|
|
151
|
+
phoenix/server/api/mutations/experiment_mutations.py,sha256=Z2xPrK8J117l5XWN-IvdKpykWIiVXysaUhzuwbIiLtk,3226
|
|
148
152
|
phoenix/server/api/mutations/export_events_mutations.py,sha256=t_wYBxaqvBJYRoHslh3Bmoxmwlzoy0u8SsBKWIKN5hE,4028
|
|
149
153
|
phoenix/server/api/mutations/project_mutations.py,sha256=MLm7I97lJ85hTuc1tq8sdYA8Ps5WKMV-bGqeeN-Ey90,2279
|
|
150
154
|
phoenix/server/api/mutations/span_annotations_mutations.py,sha256=DM9gzxrMSAcxwXQ6jNaNGDVgl8oP50LZsBWRYQwLaSo,5955
|
|
@@ -160,7 +164,7 @@ phoenix/server/api/routers/v1/datasets.py,sha256=l3Hlc9AVyvX5GdT9iOXBsV-i4c_vtnC
|
|
|
160
164
|
phoenix/server/api/routers/v1/evaluations.py,sha256=FSfz9MTi8s65F07abDXlb9-y97fDZSYbqsCXpimwO7g,12628
|
|
161
165
|
phoenix/server/api/routers/v1/experiment_evaluations.py,sha256=RTQnjupjmh07xowjq77ajbuAZhzIEfYxA4ZtECvGwOU,4844
|
|
162
166
|
phoenix/server/api/routers/v1/experiment_runs.py,sha256=0G7GgGcZv9dzK47tsPp-p4k5O7W4F_aNRrsNuJN7mho,6393
|
|
163
|
-
phoenix/server/api/routers/v1/experiments.py,sha256=
|
|
167
|
+
phoenix/server/api/routers/v1/experiments.py,sha256=6Ouby3jQDZjeb_nJoD5eH8h2jxINBLrHzCPMsklzcJI,11820
|
|
164
168
|
phoenix/server/api/routers/v1/pydantic_compat.py,sha256=FeK8oe2brqu-djsoqRxiKL4tw5cHmi89OHVfCFxYsAo,2890
|
|
165
169
|
phoenix/server/api/routers/v1/spans.py,sha256=MAkMLrONFtItQxkHJde_Wpvz0jsgydegxVZOkZkRUsU,8781
|
|
166
170
|
phoenix/server/api/routers/v1/traces.py,sha256=HJDmYKMATL40dZEJro6uQ3imbCZBzk3nUun9d21jcDs,7799
|
|
@@ -291,8 +295,8 @@ phoenix/utilities/logging.py,sha256=lDXd6EGaamBNcQxL4vP1au9-i_SXe0OraUDiJOcszSw,
|
|
|
291
295
|
phoenix/utilities/project.py,sha256=8IJuMM4yUMoooPi37sictGj8Etu9rGmq6RFtc9848cQ,436
|
|
292
296
|
phoenix/utilities/re.py,sha256=PDve_OLjRTM8yQQJHC8-n3HdIONi7aNils3ZKRZ5uBM,2045
|
|
293
297
|
phoenix/utilities/span_store.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
294
|
-
arize_phoenix-4.
|
|
295
|
-
arize_phoenix-4.
|
|
296
|
-
arize_phoenix-4.
|
|
297
|
-
arize_phoenix-4.
|
|
298
|
-
arize_phoenix-4.
|
|
298
|
+
arize_phoenix-4.29.0.dist-info/METADATA,sha256=UJiskTOjTLRjXDFO41I1ioPp3KZDU2nmgqMucGUyuAI,11977
|
|
299
|
+
arize_phoenix-4.29.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
300
|
+
arize_phoenix-4.29.0.dist-info/licenses/IP_NOTICE,sha256=JBqyyCYYxGDfzQ0TtsQgjts41IJoa-hiwDrBjCb9gHM,469
|
|
301
|
+
arize_phoenix-4.29.0.dist-info/licenses/LICENSE,sha256=HFkW9REuMOkvKRACuwLPT0hRydHb3zNg-fdFt94td18,3794
|
|
302
|
+
arize_phoenix-4.29.0.dist-info/RECORD,,
|
phoenix/otel/__init__.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from opentelemetry.sdk.resources import Resource
|
|
2
|
+
|
|
3
|
+
from .otel import (
|
|
4
|
+
PROJECT_NAME,
|
|
5
|
+
BatchSpanProcessor,
|
|
6
|
+
GRPCSpanExporter,
|
|
7
|
+
HTTPSpanExporter,
|
|
8
|
+
SimpleSpanProcessor,
|
|
9
|
+
TracerProvider,
|
|
10
|
+
register,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"TracerProvider",
|
|
15
|
+
"SimpleSpanProcessor",
|
|
16
|
+
"BatchSpanProcessor",
|
|
17
|
+
"HTTPSpanExporter",
|
|
18
|
+
"GRPCSpanExporter",
|
|
19
|
+
"Resource",
|
|
20
|
+
"PROJECT_NAME",
|
|
21
|
+
"register",
|
|
22
|
+
]
|
phoenix/otel/otel.py
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import os
|
|
3
|
+
import warnings
|
|
4
|
+
from typing import Any, Dict, List, Optional, Tuple, Union, cast
|
|
5
|
+
from urllib.parse import ParseResult, urlparse
|
|
6
|
+
|
|
7
|
+
from openinference.semconv.resource import ResourceAttributes as _ResourceAttributes
|
|
8
|
+
from opentelemetry import trace as trace_api
|
|
9
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
|
10
|
+
OTLPSpanExporter as _GRPCSpanExporter,
|
|
11
|
+
)
|
|
12
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
|
13
|
+
OTLPSpanExporter as _HTTPSpanExporter,
|
|
14
|
+
)
|
|
15
|
+
from opentelemetry.sdk.resources import Resource
|
|
16
|
+
from opentelemetry.sdk.trace import SpanProcessor
|
|
17
|
+
from opentelemetry.sdk.trace import TracerProvider as _TracerProvider
|
|
18
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor as _BatchSpanProcessor
|
|
19
|
+
from opentelemetry.sdk.trace.export import SimpleSpanProcessor as _SimpleSpanProcessor
|
|
20
|
+
from opentelemetry.sdk.trace.export import SpanExporter
|
|
21
|
+
|
|
22
|
+
from .settings import get_env_client_headers, get_env_collector_endpoint, get_env_project_name
|
|
23
|
+
|
|
24
|
+
PROJECT_NAME = _ResourceAttributes.PROJECT_NAME
|
|
25
|
+
|
|
26
|
+
_DEFAULT_GRPC_PORT = 4317
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def register(
|
|
30
|
+
*,
|
|
31
|
+
endpoint: Optional[str] = None,
|
|
32
|
+
project_name: Optional[str] = None,
|
|
33
|
+
batch: bool = False,
|
|
34
|
+
set_global_tracer: bool = True,
|
|
35
|
+
headers: Optional[Dict[str, str]] = None,
|
|
36
|
+
verbose: bool = True,
|
|
37
|
+
) -> _TracerProvider:
|
|
38
|
+
"""
|
|
39
|
+
Creates an OpenTelemetry TracerProvider for enabling OpenInference tracing.
|
|
40
|
+
|
|
41
|
+
For futher configuration, the `phoenix.otel` module provides drop-in replacements for
|
|
42
|
+
OpenTelemetry TracerProvider, SimpleSpanProcessor, BatchSpanProcessor, HTTPSpanExporter, and
|
|
43
|
+
GRPCSpanExporter objects with Phoenix-aware defaults. Documentation on how to configure tracing
|
|
44
|
+
can be found at https://opentelemetry.io/docs/specs/otel/trace/sdk/.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
endpoint (str, optional): The collector endpoint to which spans will be exported. If not
|
|
48
|
+
provided, the `PHOENIX_OTEL_COLLECTOR_ENDPOINT` environment variable will be used. The
|
|
49
|
+
export protocol will be inferred from the endpoint.
|
|
50
|
+
project_name (str, optional): The name of the project to which spans will be associated. If
|
|
51
|
+
not provided, the `PHOENIX_PROJECT_NAME` environment variable will be used.
|
|
52
|
+
batch (bool): If True, spans will be processed using a BatchSpanprocessor. If False, spans
|
|
53
|
+
will be processed one at a time using a SimpleSpanProcessor.
|
|
54
|
+
set_global_tracer (bool): If False, the TracerProvider will not be set as the global
|
|
55
|
+
tracer provider. Defaults to True.
|
|
56
|
+
headers (dict, optional): Optional headers to include in the HTTP request to the collector.
|
|
57
|
+
verbose (bool): If True, configuration details will be printed to stdout.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
project_name = project_name or get_env_project_name()
|
|
61
|
+
resource = Resource.create({PROJECT_NAME: project_name})
|
|
62
|
+
tracer_provider = TracerProvider(resource=resource, verbose=False)
|
|
63
|
+
span_processor: SpanProcessor
|
|
64
|
+
if batch:
|
|
65
|
+
span_processor = BatchSpanProcessor(endpoint=endpoint, headers=headers)
|
|
66
|
+
else:
|
|
67
|
+
span_processor = SimpleSpanProcessor(endpoint=endpoint, headers=headers)
|
|
68
|
+
tracer_provider.add_span_processor(span_processor)
|
|
69
|
+
tracer_provider._default_processor = True
|
|
70
|
+
|
|
71
|
+
if set_global_tracer:
|
|
72
|
+
trace_api.set_tracer_provider(tracer_provider)
|
|
73
|
+
global_provider_msg = (
|
|
74
|
+
"| \n"
|
|
75
|
+
"| `register` has set this TracerProvider as the global OpenTelemetry default.\n"
|
|
76
|
+
"| To disable this behavior, call `register` with `set_global_tracer=False`.\n"
|
|
77
|
+
)
|
|
78
|
+
else:
|
|
79
|
+
global_provider_msg = ""
|
|
80
|
+
|
|
81
|
+
details = tracer_provider._tracing_details()
|
|
82
|
+
if verbose:
|
|
83
|
+
print(f"{details}" f"{global_provider_msg}")
|
|
84
|
+
return tracer_provider
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class TracerProvider(_TracerProvider):
|
|
88
|
+
def __init__(
|
|
89
|
+
self, *args: Any, endpoint: Optional[str] = None, verbose: bool = True, **kwargs: Any
|
|
90
|
+
):
|
|
91
|
+
sig = inspect.signature(_TracerProvider)
|
|
92
|
+
bound_args = sig.bind_partial(*args, **kwargs)
|
|
93
|
+
bound_args.apply_defaults()
|
|
94
|
+
if bound_args.arguments.get("resource") is None:
|
|
95
|
+
bound_args.arguments["resource"] = Resource.create(
|
|
96
|
+
{PROJECT_NAME: get_env_project_name()}
|
|
97
|
+
)
|
|
98
|
+
super().__init__(**bound_args.arguments)
|
|
99
|
+
|
|
100
|
+
parsed_url, endpoint = _normalized_endpoint(endpoint)
|
|
101
|
+
self._default_processor = False
|
|
102
|
+
|
|
103
|
+
if _maybe_http_endpoint(parsed_url):
|
|
104
|
+
http_exporter: SpanExporter = HTTPSpanExporter(endpoint=endpoint)
|
|
105
|
+
self.add_span_processor(SimpleSpanProcessor(span_exporter=http_exporter))
|
|
106
|
+
self._default_processor = True
|
|
107
|
+
elif _maybe_grpc_endpoint(parsed_url):
|
|
108
|
+
grpc_exporter: SpanExporter = GRPCSpanExporter(endpoint=endpoint)
|
|
109
|
+
self.add_span_processor(SimpleSpanProcessor(span_exporter=grpc_exporter))
|
|
110
|
+
self._default_processor = True
|
|
111
|
+
if verbose:
|
|
112
|
+
print(self._tracing_details())
|
|
113
|
+
|
|
114
|
+
def add_span_processor(self, *args: Any, **kwargs: Any) -> None:
|
|
115
|
+
if self._default_processor:
|
|
116
|
+
self._active_span_processor.shutdown()
|
|
117
|
+
self._active_span_processor._span_processors = tuple() # remove default processors
|
|
118
|
+
self._default_processor = False
|
|
119
|
+
return super().add_span_processor(*args, **kwargs)
|
|
120
|
+
|
|
121
|
+
def _tracing_details(self) -> str:
|
|
122
|
+
project = self.resource.attributes.get(PROJECT_NAME)
|
|
123
|
+
processor_name: Optional[str] = None
|
|
124
|
+
endpoint: Optional[str] = None
|
|
125
|
+
transport: Optional[str] = None
|
|
126
|
+
headers: Optional[Union[Dict[str, str], str]] = None
|
|
127
|
+
|
|
128
|
+
if self._active_span_processor:
|
|
129
|
+
if processors := self._active_span_processor._span_processors:
|
|
130
|
+
if len(processors) == 1:
|
|
131
|
+
span_processor = self._active_span_processor._span_processors[0]
|
|
132
|
+
if exporter := getattr(span_processor, "span_exporter"):
|
|
133
|
+
processor_name = span_processor.__class__.__name__
|
|
134
|
+
endpoint = exporter._endpoint
|
|
135
|
+
transport = _exporter_transport(exporter)
|
|
136
|
+
headers = _printable_headers(exporter._headers)
|
|
137
|
+
else:
|
|
138
|
+
processor_name = "Multiple Span Processors"
|
|
139
|
+
endpoint = "Multiple Span Exporters"
|
|
140
|
+
transport = "Multiple Span Exporters"
|
|
141
|
+
headers = "Multiple Span Exporters"
|
|
142
|
+
|
|
143
|
+
if os.name == "nt":
|
|
144
|
+
details_header = "OpenTelemetry Tracing Details"
|
|
145
|
+
else:
|
|
146
|
+
details_header = "🔭 OpenTelemetry Tracing Details 🔭"
|
|
147
|
+
|
|
148
|
+
configuration_msg = (
|
|
149
|
+
"| Using a default SpanProcessor. `add_span_processor` will overwrite this default.\n"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
details_msg = (
|
|
153
|
+
f"{details_header}\n"
|
|
154
|
+
f"| Phoenix Project: {project}\n"
|
|
155
|
+
f"| Span Processor: {processor_name}\n"
|
|
156
|
+
f"| Collector Endpoint: {endpoint}\n"
|
|
157
|
+
f"| Transport: {transport}\n"
|
|
158
|
+
f"| Transport Headers: {headers}\n"
|
|
159
|
+
"| \n"
|
|
160
|
+
f"{configuration_msg if self._default_processor else ''}"
|
|
161
|
+
)
|
|
162
|
+
return details_msg
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class SimpleSpanProcessor(_SimpleSpanProcessor):
|
|
166
|
+
def __init__(
|
|
167
|
+
self,
|
|
168
|
+
span_exporter: Optional[SpanExporter] = None,
|
|
169
|
+
endpoint: Optional[str] = None,
|
|
170
|
+
headers: Optional[Dict[str, str]] = None,
|
|
171
|
+
):
|
|
172
|
+
if span_exporter is None:
|
|
173
|
+
parsed_url, endpoint = _normalized_endpoint(endpoint)
|
|
174
|
+
if _maybe_http_endpoint(parsed_url):
|
|
175
|
+
span_exporter = HTTPSpanExporter(endpoint=endpoint, headers=headers)
|
|
176
|
+
elif _maybe_grpc_endpoint(parsed_url):
|
|
177
|
+
span_exporter = GRPCSpanExporter(endpoint=endpoint, headers=headers)
|
|
178
|
+
else:
|
|
179
|
+
warnings.warn("Could not infer collector endpoint protocol, defaulting to HTTP.")
|
|
180
|
+
span_exporter = HTTPSpanExporter(endpoint=endpoint, headers=headers)
|
|
181
|
+
super().__init__(span_exporter)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class BatchSpanProcessor(_BatchSpanProcessor):
|
|
185
|
+
def __init__(
|
|
186
|
+
self,
|
|
187
|
+
span_exporter: Optional[SpanExporter] = None,
|
|
188
|
+
endpoint: Optional[str] = None,
|
|
189
|
+
headers: Optional[Dict[str, str]] = None,
|
|
190
|
+
):
|
|
191
|
+
if span_exporter is None:
|
|
192
|
+
parsed_url, endpoint = _normalized_endpoint(endpoint)
|
|
193
|
+
if _maybe_http_endpoint(parsed_url):
|
|
194
|
+
span_exporter = HTTPSpanExporter(endpoint=endpoint, headers=headers)
|
|
195
|
+
elif _maybe_grpc_endpoint(parsed_url):
|
|
196
|
+
span_exporter = GRPCSpanExporter(endpoint=endpoint, headers=headers)
|
|
197
|
+
else:
|
|
198
|
+
warnings.warn("Could not infer collector endpoint protocol, defaulting to HTTP.")
|
|
199
|
+
span_exporter = HTTPSpanExporter(endpoint=endpoint, headers=headers)
|
|
200
|
+
super().__init__(span_exporter)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
class HTTPSpanExporter(_HTTPSpanExporter):
|
|
204
|
+
def __init__(self, *args: Any, **kwargs: Any):
|
|
205
|
+
sig = inspect.signature(_HTTPSpanExporter)
|
|
206
|
+
bound_args = sig.bind_partial(*args, **kwargs)
|
|
207
|
+
bound_args.apply_defaults()
|
|
208
|
+
|
|
209
|
+
if not bound_args.arguments.get("headers"):
|
|
210
|
+
bound_args.arguments["headers"] = get_env_client_headers()
|
|
211
|
+
|
|
212
|
+
if bound_args.arguments.get("endpoint") is None:
|
|
213
|
+
_, endpoint = _normalized_endpoint(None)
|
|
214
|
+
bound_args.arguments["endpoint"] = endpoint
|
|
215
|
+
super().__init__(**bound_args.arguments)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class GRPCSpanExporter(_GRPCSpanExporter):
|
|
219
|
+
def __init__(self, *args: Any, **kwargs: Any):
|
|
220
|
+
sig = inspect.signature(_GRPCSpanExporter)
|
|
221
|
+
bound_args = sig.bind_partial(*args, **kwargs)
|
|
222
|
+
bound_args.apply_defaults()
|
|
223
|
+
|
|
224
|
+
if not bound_args.arguments.get("headers"):
|
|
225
|
+
bound_args.arguments["headers"] = get_env_client_headers()
|
|
226
|
+
|
|
227
|
+
if bound_args.arguments.get("endpoint") is None:
|
|
228
|
+
_, endpoint = _normalized_endpoint(None)
|
|
229
|
+
bound_args.arguments["endpoint"] = endpoint
|
|
230
|
+
super().__init__(**bound_args.arguments)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def _maybe_http_endpoint(parsed_endpoint: ParseResult) -> bool:
|
|
234
|
+
if parsed_endpoint.path == "/v1/traces":
|
|
235
|
+
return True
|
|
236
|
+
return False
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def _maybe_grpc_endpoint(parsed_endpoint: ParseResult) -> bool:
|
|
240
|
+
if not parsed_endpoint.path and parsed_endpoint.port == 4317:
|
|
241
|
+
return True
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def _exporter_transport(exporter: SpanExporter) -> str:
|
|
246
|
+
if isinstance(exporter, _HTTPSpanExporter):
|
|
247
|
+
return "HTTP"
|
|
248
|
+
if isinstance(exporter, _GRPCSpanExporter):
|
|
249
|
+
return "gRPC"
|
|
250
|
+
else:
|
|
251
|
+
return exporter.__class__.__name__
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def _printable_headers(headers: Union[List[Tuple[str, str]], Dict[str, str]]) -> Dict[str, str]:
|
|
255
|
+
if isinstance(headers, dict):
|
|
256
|
+
return {key.lower(): "****" for key, _ in headers.items()}
|
|
257
|
+
return {key.lower(): "****" for key, _ in headers}
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def _construct_http_endpoint(parsed_endpoint: ParseResult) -> ParseResult:
|
|
261
|
+
return parsed_endpoint._replace(path="/v1/traces")
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def _construct_grpc_endpoint(parsed_endpoint: ParseResult) -> ParseResult:
|
|
265
|
+
return parsed_endpoint._replace(netloc=f"{parsed_endpoint.hostname}:{_DEFAULT_GRPC_PORT}")
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
_KNOWN_PROVIDERS = {
|
|
269
|
+
"app.phoenix.arize.com": _construct_http_endpoint,
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _normalized_endpoint(endpoint: Optional[str]) -> Tuple[ParseResult, str]:
|
|
274
|
+
if endpoint is None:
|
|
275
|
+
base_endpoint = get_env_collector_endpoint() or "http://localhost:6006"
|
|
276
|
+
parsed = urlparse(base_endpoint)
|
|
277
|
+
if parsed.hostname in _KNOWN_PROVIDERS:
|
|
278
|
+
parsed = _KNOWN_PROVIDERS[parsed.hostname](parsed)
|
|
279
|
+
else:
|
|
280
|
+
parsed = _construct_grpc_endpoint(parsed)
|
|
281
|
+
else:
|
|
282
|
+
parsed = urlparse(endpoint)
|
|
283
|
+
parsed = cast(ParseResult, parsed)
|
|
284
|
+
return parsed, parsed.geturl()
|
phoenix/otel/settings.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import urllib
|
|
3
|
+
from logging import getLogger
|
|
4
|
+
from re import compile
|
|
5
|
+
from typing import Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
_logger = getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
# Environment variables specific to the subpackage
|
|
10
|
+
ENV_PHOENIX_COLLECTOR_ENDPOINT = "PHOENIX_COLLECTOR_ENDPOINT"
|
|
11
|
+
ENV_PHOENIX_PROJECT_NAME = "PHOENIX_PROJECT_NAME"
|
|
12
|
+
ENV_PHOENIX_CLIENT_HEADERS = "PHOENIX_CLIENT_HEADERS"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_env_collector_endpoint() -> Optional[str]:
|
|
16
|
+
return os.getenv(ENV_PHOENIX_COLLECTOR_ENDPOINT)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_env_project_name() -> str:
|
|
20
|
+
return os.getenv(ENV_PHOENIX_PROJECT_NAME, "default")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_env_client_headers() -> Optional[Dict[str, str]]:
|
|
24
|
+
if headers_str := os.getenv(ENV_PHOENIX_CLIENT_HEADERS):
|
|
25
|
+
return parse_env_headers(headers_str)
|
|
26
|
+
return None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# Optional whitespace
|
|
30
|
+
_OWS = r"[ \t]*"
|
|
31
|
+
# A key contains printable US-ASCII characters except: SP and "(),/:;<=>?@[\]{}
|
|
32
|
+
_KEY_FORMAT = r"[\x21\x23-\x27\x2a\x2b\x2d\x2e\x30-\x39\x41-\x5a\x5e-\x7a\x7c\x7e]+"
|
|
33
|
+
# A value contains a URL-encoded UTF-8 string. The encoded form can contain any
|
|
34
|
+
# printable US-ASCII characters (0x20-0x7f) other than SP, DEL, and ",;/
|
|
35
|
+
_VALUE_FORMAT = r"[\x21\x23-\x2b\x2d-\x3a\x3c-\x5b\x5d-\x7e]*"
|
|
36
|
+
# A key-value is key=value, with optional whitespace surrounding key and value
|
|
37
|
+
_KEY_VALUE_FORMAT = rf"{_OWS}{_KEY_FORMAT}{_OWS}={_OWS}{_VALUE_FORMAT}{_OWS}"
|
|
38
|
+
|
|
39
|
+
_HEADER_PATTERN = compile(_KEY_VALUE_FORMAT)
|
|
40
|
+
_DELIMITER_PATTERN = compile(r"[ \t]*,[ \t]*")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def parse_env_headers(s: str) -> Dict[str, str]:
|
|
44
|
+
"""
|
|
45
|
+
Parse ``s``, which is a ``str`` instance containing HTTP headers encoded
|
|
46
|
+
for use in ENV variables per the W3C Baggage HTTP header format at
|
|
47
|
+
https://www.w3.org/TR/baggage/#baggage-http-header-format, except that
|
|
48
|
+
additional semi-colon delimited metadata is not supported.
|
|
49
|
+
|
|
50
|
+
If the headers are not urlencoded, we will log a warning and attempt to urldecode them.
|
|
51
|
+
"""
|
|
52
|
+
headers: Dict[str, str] = {}
|
|
53
|
+
headers_list: List[str] = _DELIMITER_PATTERN.split(s)
|
|
54
|
+
|
|
55
|
+
for header in headers_list:
|
|
56
|
+
if not header: # empty string
|
|
57
|
+
continue
|
|
58
|
+
|
|
59
|
+
match = _HEADER_PATTERN.fullmatch(header.strip())
|
|
60
|
+
if not match:
|
|
61
|
+
parts = header.split("=", 1)
|
|
62
|
+
name, value = parts
|
|
63
|
+
encoded_header = f"{urllib.parse.quote(name)}={urllib.parse.quote(value)}"
|
|
64
|
+
match = _HEADER_PATTERN.fullmatch(encoded_header.strip())
|
|
65
|
+
if not match:
|
|
66
|
+
_logger.warning(
|
|
67
|
+
"Header format invalid! Header values in environment variables must be "
|
|
68
|
+
"URL encoded: %s",
|
|
69
|
+
f"{name}: ****",
|
|
70
|
+
)
|
|
71
|
+
continue
|
|
72
|
+
_logger.warning(
|
|
73
|
+
"Header values in environment variables should be URL encoded, attempting to "
|
|
74
|
+
"URL encode header: {name}: ****"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
name, value = header.split("=", 1)
|
|
78
|
+
name = urllib.parse.unquote(name).strip().lower()
|
|
79
|
+
value = urllib.parse.unquote(value).strip()
|
|
80
|
+
headers[name] = value
|
|
81
|
+
|
|
82
|
+
return headers
|
|
@@ -10,6 +10,7 @@ from strawberry.dataloader import DataLoader
|
|
|
10
10
|
from typing_extensions import TypeAlias
|
|
11
11
|
|
|
12
12
|
from phoenix.db import models
|
|
13
|
+
from phoenix.server.api.exceptions import NotFound
|
|
13
14
|
from phoenix.server.api.types.DatasetExampleRevision import DatasetExampleRevision
|
|
14
15
|
from phoenix.server.types import DbSessionFactory
|
|
15
16
|
|
|
@@ -24,7 +25,7 @@ class DatasetExampleRevisionsDataLoader(DataLoader[Key, Result]):
|
|
|
24
25
|
super().__init__(load_fn=self._load_fn)
|
|
25
26
|
self._db = db
|
|
26
27
|
|
|
27
|
-
async def _load_fn(self, keys: List[Key]) -> List[Union[Result,
|
|
28
|
+
async def _load_fn(self, keys: List[Key]) -> List[Union[Result, NotFound]]:
|
|
28
29
|
# sqlalchemy has limited SQLite support for VALUES, so use UNION ALL instead.
|
|
29
30
|
# For details, see https://github.com/sqlalchemy/sqlalchemy/issues/7228
|
|
30
31
|
keys_subquery = union(
|
|
@@ -95,4 +96,4 @@ class DatasetExampleRevisionsDataLoader(DataLoader[Key, Result]):
|
|
|
95
96
|
) in await session.stream(query)
|
|
96
97
|
if is_valid_version
|
|
97
98
|
}
|
|
98
|
-
return [results.get(key,
|
|
99
|
+
return [results.get(key, NotFound("Could not find revision.")) for key in keys]
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from graphql.error import GraphQLError
|
|
2
|
+
from strawberry.extensions import MaskErrors
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class CustomGraphQLError(Exception):
|
|
6
|
+
"""
|
|
7
|
+
An error that represents an expected error scenario in a GraphQL resolver.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BadRequest(CustomGraphQLError):
|
|
12
|
+
"""
|
|
13
|
+
An error raised due to a malformed or invalid request.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class NotFound(CustomGraphQLError):
|
|
18
|
+
"""
|
|
19
|
+
An error raised when the requested resource is not found.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class Unauthorized(CustomGraphQLError):
|
|
24
|
+
"""
|
|
25
|
+
An error raised when login fails or a user or other entity is not authorized
|
|
26
|
+
to access a resource.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_mask_errors_extension() -> MaskErrors:
|
|
31
|
+
return MaskErrors(
|
|
32
|
+
should_mask_error=_should_mask_error,
|
|
33
|
+
error_message="an unexpected error occurred",
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _should_mask_error(error: GraphQLError) -> bool:
|
|
38
|
+
"""
|
|
39
|
+
Masks unexpected errors raised from GraphQL resolvers.
|
|
40
|
+
"""
|
|
41
|
+
return not isinstance(error.original_error, CustomGraphQLError)
|
|
@@ -5,12 +5,15 @@ import jwt
|
|
|
5
5
|
import strawberry
|
|
6
6
|
from sqlalchemy import insert, select
|
|
7
7
|
from strawberry import UNSET
|
|
8
|
+
from strawberry.relay import GlobalID
|
|
8
9
|
from strawberry.types import Info
|
|
9
10
|
|
|
10
11
|
from phoenix.db import models
|
|
11
12
|
from phoenix.server.api.context import Context
|
|
13
|
+
from phoenix.server.api.exceptions import NotFound
|
|
12
14
|
from phoenix.server.api.mutations.auth import HasSecret, IsAuthenticated
|
|
13
15
|
from phoenix.server.api.queries import Query
|
|
16
|
+
from phoenix.server.api.types.node import from_global_id_with_expected_type
|
|
14
17
|
from phoenix.server.api.types.SystemApiKey import SystemApiKey
|
|
15
18
|
|
|
16
19
|
|
|
@@ -28,6 +31,16 @@ class CreateApiKeyInput:
|
|
|
28
31
|
expires_at: Optional[datetime] = UNSET
|
|
29
32
|
|
|
30
33
|
|
|
34
|
+
@strawberry.input
|
|
35
|
+
class DeleteApiKeyInput:
|
|
36
|
+
id: GlobalID
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@strawberry.type
|
|
40
|
+
class DeleteSystemApiKeyMutationPayload:
|
|
41
|
+
id: GlobalID
|
|
42
|
+
|
|
43
|
+
|
|
31
44
|
@strawberry.type
|
|
32
45
|
class ApiKeyMutationMixin:
|
|
33
46
|
@strawberry.mutation(permission_classes=[HasSecret, IsAuthenticated]) # type: ignore
|
|
@@ -79,6 +92,22 @@ class ApiKeyMutationMixin:
|
|
|
79
92
|
query=Query(),
|
|
80
93
|
)
|
|
81
94
|
|
|
95
|
+
@strawberry.mutation(permission_classes=[HasSecret, IsAuthenticated]) # type: ignore
|
|
96
|
+
async def delete_system_api_key(
|
|
97
|
+
self, info: Info[Context, None], input: DeleteApiKeyInput
|
|
98
|
+
) -> DeleteSystemApiKeyMutationPayload:
|
|
99
|
+
api_key_id = from_global_id_with_expected_type(
|
|
100
|
+
input.id, expected_type_name=SystemApiKey.__name__
|
|
101
|
+
)
|
|
102
|
+
async with info.context.db() as session:
|
|
103
|
+
api_key = await session.get(models.APIKey, api_key_id)
|
|
104
|
+
if api_key is None:
|
|
105
|
+
raise NotFound(f"Unknown System API Key: {input.id}")
|
|
106
|
+
|
|
107
|
+
await session.delete(api_key)
|
|
108
|
+
|
|
109
|
+
return DeleteSystemApiKeyMutationPayload(id=input.id)
|
|
110
|
+
|
|
82
111
|
|
|
83
112
|
def create_jwt(
|
|
84
113
|
*,
|
|
@@ -8,10 +8,12 @@ from strawberry.types import Info
|
|
|
8
8
|
from phoenix.auth import is_valid_password
|
|
9
9
|
from phoenix.db import models
|
|
10
10
|
from phoenix.server.api.context import Context
|
|
11
|
+
from phoenix.server.api.exceptions import Unauthorized
|
|
11
12
|
from phoenix.server.api.mutations.auth import HasSecret
|
|
12
13
|
|
|
13
14
|
PHOENIX_ACCESS_TOKEN_COOKIE_NAME = "phoenix-access-token"
|
|
14
15
|
PHOENIX_ACCESS_TOKEN_COOKIE_MAX_AGE_IN_SECONDS = int(timedelta(days=31).total_seconds())
|
|
16
|
+
FAILED_LOGIN_MESSAGE = "login failed"
|
|
15
17
|
|
|
16
18
|
|
|
17
19
|
@strawberry.input
|
|
@@ -34,7 +36,7 @@ class AuthMutationMixin:
|
|
|
34
36
|
select(models.User).where(models.User.email == input.email)
|
|
35
37
|
)
|
|
36
38
|
) is None or (password_hash := user.password_hash) is None:
|
|
37
|
-
raise
|
|
39
|
+
raise Unauthorized(FAILED_LOGIN_MESSAGE)
|
|
38
40
|
secret = info.context.get_secret()
|
|
39
41
|
loop = asyncio.get_running_loop()
|
|
40
42
|
if not await loop.run_in_executor(
|
|
@@ -43,7 +45,7 @@ class AuthMutationMixin:
|
|
|
43
45
|
password=input.password, salt=secret, password_hash=password_hash
|
|
44
46
|
),
|
|
45
47
|
):
|
|
46
|
-
raise
|
|
48
|
+
raise Unauthorized(FAILED_LOGIN_MESSAGE)
|
|
47
49
|
response = info.context.get_response()
|
|
48
50
|
response.set_cookie(
|
|
49
51
|
key=PHOENIX_ACCESS_TOKEN_COOKIE_NAME,
|
|
@@ -13,6 +13,7 @@ from strawberry.types import Info
|
|
|
13
13
|
from phoenix.db import models
|
|
14
14
|
from phoenix.db.helpers import get_eval_trace_ids_for_datasets, get_project_names_for_datasets
|
|
15
15
|
from phoenix.server.api.context import Context
|
|
16
|
+
from phoenix.server.api.exceptions import BadRequest, NotFound
|
|
16
17
|
from phoenix.server.api.helpers.dataset_helpers import (
|
|
17
18
|
get_dataset_example_input,
|
|
18
19
|
get_dataset_example_output,
|
|
@@ -362,7 +363,7 @@ class DatasetMutationMixin:
|
|
|
362
363
|
expected_type_name=Dataset.__name__,
|
|
363
364
|
)
|
|
364
365
|
except ValueError:
|
|
365
|
-
raise
|
|
366
|
+
raise NotFound(f"Unknown dataset: {input.dataset_id}")
|
|
366
367
|
project_names_stmt = get_project_names_for_datasets(dataset_id)
|
|
367
368
|
eval_trace_ids_stmt = get_eval_trace_ids_for_datasets(dataset_id)
|
|
368
369
|
stmt = (
|
|
@@ -372,7 +373,7 @@ class DatasetMutationMixin:
|
|
|
372
373
|
project_names = await session.scalars(project_names_stmt)
|
|
373
374
|
eval_trace_ids = await session.scalars(eval_trace_ids_stmt)
|
|
374
375
|
if not (dataset := await session.scalar(stmt)):
|
|
375
|
-
raise
|
|
376
|
+
raise NotFound(f"Unknown dataset: {input.dataset_id}")
|
|
376
377
|
await asyncio.gather(
|
|
377
378
|
delete_projects(info.context.db, *project_names),
|
|
378
379
|
delete_traces(info.context.db, *eval_trace_ids),
|
|
@@ -388,7 +389,7 @@ class DatasetMutationMixin:
|
|
|
388
389
|
input: PatchDatasetExamplesInput,
|
|
389
390
|
) -> DatasetMutationPayload:
|
|
390
391
|
if not (patches := input.patches):
|
|
391
|
-
raise
|
|
392
|
+
raise BadRequest("Must provide examples to patch.")
|
|
392
393
|
by_numeric_id = [
|
|
393
394
|
(
|
|
394
395
|
from_global_id_with_expected_type(patch.example_id, DatasetExample.__name__),
|
|
@@ -399,9 +400,9 @@ class DatasetMutationMixin:
|
|
|
399
400
|
]
|
|
400
401
|
example_ids, _, patches = map(list, zip(*sorted(by_numeric_id)))
|
|
401
402
|
if len(set(example_ids)) < len(example_ids):
|
|
402
|
-
raise
|
|
403
|
+
raise BadRequest("Cannot patch the same example more than once per mutation.")
|
|
403
404
|
if any(patch.is_empty() for patch in patches):
|
|
404
|
-
raise
|
|
405
|
+
raise BadRequest("Received one or more empty patches that contain no fields to update.")
|
|
405
406
|
version_description = input.version_description or None
|
|
406
407
|
version_metadata = input.version_metadata
|
|
407
408
|
async with info.context.db() as session:
|
|
@@ -419,9 +420,9 @@ class DatasetMutationMixin:
|
|
|
419
420
|
)
|
|
420
421
|
).all()
|
|
421
422
|
if not datasets:
|
|
422
|
-
raise
|
|
423
|
+
raise NotFound("No examples found.")
|
|
423
424
|
if len(set(ds.id for ds in datasets)) > 1:
|
|
424
|
-
raise
|
|
425
|
+
raise BadRequest("Examples must come from the same dataset.")
|
|
425
426
|
dataset = datasets[0]
|
|
426
427
|
|
|
427
428
|
revision_ids = (
|
|
@@ -445,7 +446,7 @@ class DatasetMutationMixin:
|
|
|
445
446
|
)
|
|
446
447
|
).all()
|
|
447
448
|
if (num_missing_examples := len(example_ids) - len(revisions)) > 0:
|
|
448
|
-
raise
|
|
449
|
+
raise NotFound(f"{num_missing_examples} example(s) could not be found.")
|
|
449
450
|
|
|
450
451
|
version_id = await session.scalar(
|
|
451
452
|
insert(models.DatasetVersion)
|
|
@@ -9,6 +9,7 @@ from strawberry.types import Info
|
|
|
9
9
|
from phoenix.db import models
|
|
10
10
|
from phoenix.db.helpers import get_eval_trace_ids_for_experiments, get_project_names_for_experiments
|
|
11
11
|
from phoenix.server.api.context import Context
|
|
12
|
+
from phoenix.server.api.exceptions import CustomGraphQLError
|
|
12
13
|
from phoenix.server.api.input_types.DeleteExperimentsInput import DeleteExperimentsInput
|
|
13
14
|
from phoenix.server.api.mutations.auth import IsAuthenticated
|
|
14
15
|
from phoenix.server.api.types.Experiment import Experiment, to_gql_experiment
|
|
@@ -52,7 +53,7 @@ class ExperimentMutationMixin:
|
|
|
52
53
|
}
|
|
53
54
|
if unknown_experiment_ids := set(experiment_ids) - set(experiments.keys()):
|
|
54
55
|
await savepoint.rollback()
|
|
55
|
-
raise
|
|
56
|
+
raise CustomGraphQLError(
|
|
56
57
|
"Failed to delete experiment(s), "
|
|
57
58
|
"probably due to invalid input experiment ID(s): "
|
|
58
59
|
+ str(
|
phoenix/server/api/queries.py
CHANGED
|
@@ -33,6 +33,7 @@ from phoenix.db.models import (
|
|
|
33
33
|
)
|
|
34
34
|
from phoenix.pointcloud.clustering import Hdbscan
|
|
35
35
|
from phoenix.server.api.context import Context
|
|
36
|
+
from phoenix.server.api.exceptions import NotFound
|
|
36
37
|
from phoenix.server.api.helpers import ensure_list
|
|
37
38
|
from phoenix.server.api.input_types.ClusterInput import ClusterInput
|
|
38
39
|
from phoenix.server.api.input_types.Coordinates import (
|
|
@@ -391,7 +392,7 @@ class Query:
|
|
|
391
392
|
async with info.context.db() as session:
|
|
392
393
|
project = (await session.execute(project_stmt)).first()
|
|
393
394
|
if project is None:
|
|
394
|
-
raise
|
|
395
|
+
raise NotFound(f"Unknown project: {id}")
|
|
395
396
|
return Project(
|
|
396
397
|
id_attr=project.id,
|
|
397
398
|
name=project.name,
|
|
@@ -406,7 +407,7 @@ class Query:
|
|
|
406
407
|
async with info.context.db() as session:
|
|
407
408
|
trace = (await session.execute(trace_stmt)).first()
|
|
408
409
|
if trace is None:
|
|
409
|
-
raise
|
|
410
|
+
raise NotFound(f"Unknown trace: {id}")
|
|
410
411
|
return Trace(
|
|
411
412
|
id_attr=trace.id, trace_id=trace.trace_id, project_rowid=trace.project_rowid
|
|
412
413
|
)
|
|
@@ -421,13 +422,13 @@ class Query:
|
|
|
421
422
|
async with info.context.db() as session:
|
|
422
423
|
span = await session.scalar(span_stmt)
|
|
423
424
|
if span is None:
|
|
424
|
-
raise
|
|
425
|
+
raise NotFound(f"Unknown span: {id}")
|
|
425
426
|
return to_gql_span(span)
|
|
426
427
|
elif type_name == Dataset.__name__:
|
|
427
428
|
dataset_stmt = select(models.Dataset).where(models.Dataset.id == node_id)
|
|
428
429
|
async with info.context.db() as session:
|
|
429
430
|
if (dataset := await session.scalar(dataset_stmt)) is None:
|
|
430
|
-
raise
|
|
431
|
+
raise NotFound(f"Unknown dataset: {id}")
|
|
431
432
|
return to_gql_dataset(dataset)
|
|
432
433
|
elif type_name == DatasetExample.__name__:
|
|
433
434
|
example_id = node_id
|
|
@@ -453,7 +454,7 @@ class Query:
|
|
|
453
454
|
)
|
|
454
455
|
)
|
|
455
456
|
if not example:
|
|
456
|
-
raise
|
|
457
|
+
raise NotFound(f"Unknown dataset example: {id}")
|
|
457
458
|
return DatasetExample(
|
|
458
459
|
id_attr=example.id,
|
|
459
460
|
created_at=example.created_at,
|
|
@@ -464,7 +465,7 @@ class Query:
|
|
|
464
465
|
select(models.Experiment).where(models.Experiment.id == node_id)
|
|
465
466
|
)
|
|
466
467
|
if not experiment:
|
|
467
|
-
raise
|
|
468
|
+
raise NotFound(f"Unknown experiment: {id}")
|
|
468
469
|
return Experiment(
|
|
469
470
|
id_attr=experiment.id,
|
|
470
471
|
name=experiment.name,
|
|
@@ -485,9 +486,9 @@ class Query:
|
|
|
485
486
|
)
|
|
486
487
|
)
|
|
487
488
|
):
|
|
488
|
-
raise
|
|
489
|
+
raise NotFound(f"Unknown experiment run: {id}")
|
|
489
490
|
return to_gql_experiment_run(run)
|
|
490
|
-
raise
|
|
491
|
+
raise NotFound(f"Unknown node type: {type_name}")
|
|
491
492
|
|
|
492
493
|
@strawberry.field
|
|
493
494
|
def clusters(
|
|
@@ -2,7 +2,7 @@ from datetime import datetime
|
|
|
2
2
|
from random import getrandbits
|
|
3
3
|
from typing import Any, Dict, List, Optional
|
|
4
4
|
|
|
5
|
-
from fastapi import APIRouter, HTTPException
|
|
5
|
+
from fastapi import APIRouter, HTTPException, Path
|
|
6
6
|
from pydantic import Field
|
|
7
7
|
from sqlalchemy import select
|
|
8
8
|
from starlette.requests import Request
|
|
@@ -18,7 +18,7 @@ from phoenix.server.dml_event import ExperimentInsertEvent
|
|
|
18
18
|
from .pydantic_compat import V1RoutesBaseModel
|
|
19
19
|
from .utils import ResponseBody, add_errors_to_responses
|
|
20
20
|
|
|
21
|
-
router = APIRouter(tags=["experiments"], include_in_schema=
|
|
21
|
+
router = APIRouter(tags=["experiments"], include_in_schema=True)
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
def _short_uuid() -> str:
|
|
@@ -90,8 +90,8 @@ class CreateExperimentResponseBody(ResponseBody[Experiment]):
|
|
|
90
90
|
)
|
|
91
91
|
async def create_experiment(
|
|
92
92
|
request: Request,
|
|
93
|
-
dataset_id: str,
|
|
94
93
|
request_body: CreateExperimentRequestBody,
|
|
94
|
+
dataset_id: str = Path(..., title="Dataset ID"),
|
|
95
95
|
) -> CreateExperimentResponseBody:
|
|
96
96
|
dataset_globalid = GlobalID.from_id(dataset_id)
|
|
97
97
|
try:
|
|
@@ -266,7 +266,7 @@ class ListExperimentsResponseBody(ResponseBody[List[Experiment]]):
|
|
|
266
266
|
)
|
|
267
267
|
async def list_experiments(
|
|
268
268
|
request: Request,
|
|
269
|
-
dataset_id: str,
|
|
269
|
+
dataset_id: str = Path(..., title="Dataset ID"),
|
|
270
270
|
) -> ListExperimentsResponseBody:
|
|
271
271
|
dataset_gid = GlobalID.from_id(dataset_id)
|
|
272
272
|
try:
|
phoenix/server/api/schema.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import strawberry
|
|
2
2
|
|
|
3
|
+
from phoenix.server.api.exceptions import get_mask_errors_extension
|
|
3
4
|
from phoenix.server.api.mutations import Mutation
|
|
4
5
|
from phoenix.server.api.queries import Query
|
|
5
6
|
|
|
@@ -10,4 +11,5 @@ from phoenix.server.api.queries import Query
|
|
|
10
11
|
schema = strawberry.Schema(
|
|
11
12
|
query=Query,
|
|
12
13
|
mutation=Mutation,
|
|
14
|
+
extensions=[get_mask_errors_extension()],
|
|
13
15
|
)
|
phoenix/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "4.
|
|
1
|
+
__version__ = "4.29.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|