arize-phoenix 4.10.2rc2__py3-none-any.whl → 4.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.10.2rc2.dist-info → arize_phoenix-4.12.0.dist-info}/METADATA +3 -4
- {arize_phoenix-4.10.2rc2.dist-info → arize_phoenix-4.12.0.dist-info}/RECORD +29 -29
- phoenix/server/api/context.py +7 -3
- phoenix/server/api/openapi/main.py +2 -18
- phoenix/server/api/openapi/schema.py +12 -12
- phoenix/server/api/routers/v1/__init__.py +83 -36
- phoenix/server/api/routers/v1/dataset_examples.py +123 -102
- phoenix/server/api/routers/v1/datasets.py +507 -389
- phoenix/server/api/routers/v1/evaluations.py +66 -73
- phoenix/server/api/routers/v1/experiment_evaluations.py +91 -67
- phoenix/server/api/routers/v1/experiment_runs.py +155 -97
- phoenix/server/api/routers/v1/experiments.py +181 -131
- phoenix/server/api/routers/v1/spans.py +173 -143
- phoenix/server/api/routers/v1/traces.py +128 -114
- phoenix/server/api/types/Span.py +1 -0
- phoenix/server/app.py +176 -148
- phoenix/server/openapi/docs.py +221 -0
- phoenix/server/static/index.js +574 -573
- phoenix/server/thread_server.py +2 -2
- phoenix/session/client.py +5 -0
- phoenix/session/data_extractor.py +20 -1
- phoenix/session/session.py +4 -0
- phoenix/trace/attributes.py +2 -1
- phoenix/trace/schemas.py +1 -0
- phoenix/trace/span_json_decoder.py +1 -1
- phoenix/version.py +1 -1
- phoenix/server/api/routers/v1/utils.py +0 -94
- {arize_phoenix-4.10.2rc2.dist-info → arize_phoenix-4.12.0.dist-info}/WHEEL +0 -0
- {arize_phoenix-4.10.2rc2.dist-info → arize_phoenix-4.12.0.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.10.2rc2.dist-info → arize_phoenix-4.12.0.dist-info}/licenses/LICENSE +0 -0
phoenix/server/thread_server.py
CHANGED
|
@@ -4,7 +4,7 @@ from threading import Thread
|
|
|
4
4
|
from time import sleep, time
|
|
5
5
|
from typing import Generator
|
|
6
6
|
|
|
7
|
-
from
|
|
7
|
+
from starlette.applications import Starlette
|
|
8
8
|
from uvicorn import Config, Server
|
|
9
9
|
from uvicorn.config import LoopSetupType
|
|
10
10
|
|
|
@@ -24,7 +24,7 @@ class ThreadServer(Server):
|
|
|
24
24
|
|
|
25
25
|
def __init__(
|
|
26
26
|
self,
|
|
27
|
-
app:
|
|
27
|
+
app: Starlette,
|
|
28
28
|
host: str,
|
|
29
29
|
port: int,
|
|
30
30
|
root_path: str,
|
phoenix/session/client.py
CHANGED
|
@@ -51,6 +51,9 @@ from phoenix.trace.otel import encode_span_to_otlp
|
|
|
51
51
|
|
|
52
52
|
logger = logging.getLogger(__name__)
|
|
53
53
|
|
|
54
|
+
|
|
55
|
+
DEFAULT_TIMEOUT_IN_SECONDS = 5
|
|
56
|
+
|
|
54
57
|
DatasetAction: TypeAlias = Literal["create", "append"]
|
|
55
58
|
|
|
56
59
|
|
|
@@ -120,6 +123,7 @@ class Client(TraceDataExtractor):
|
|
|
120
123
|
project_name: Optional[str] = None,
|
|
121
124
|
# Deprecated
|
|
122
125
|
stop_time: Optional[datetime] = None,
|
|
126
|
+
timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
|
|
123
127
|
) -> Optional[Union[pd.DataFrame, List[pd.DataFrame]]]:
|
|
124
128
|
"""
|
|
125
129
|
Queries spans from the Phoenix server or active session based on specified criteria.
|
|
@@ -159,6 +163,7 @@ class Client(TraceDataExtractor):
|
|
|
159
163
|
"limit": limit,
|
|
160
164
|
"root_spans_only": root_spans_only,
|
|
161
165
|
},
|
|
166
|
+
timeout=timeout,
|
|
162
167
|
)
|
|
163
168
|
if response.status_code == 404:
|
|
164
169
|
logger.info("No spans found.")
|
|
@@ -9,6 +9,7 @@ from phoenix.trace.dsl import SpanQuery
|
|
|
9
9
|
from phoenix.trace.trace_dataset import TraceDataset
|
|
10
10
|
|
|
11
11
|
DEFAULT_SPAN_LIMIT = 1000
|
|
12
|
+
DEFAULT_TIMEOUT_IN_SECONDS = 5
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class TraceDataExtractor(ABC):
|
|
@@ -26,6 +27,7 @@ class TraceDataExtractor(ABC):
|
|
|
26
27
|
limit: Optional[int] = DEFAULT_SPAN_LIMIT,
|
|
27
28
|
root_spans_only: Optional[bool] = None,
|
|
28
29
|
project_name: Optional[str] = None,
|
|
30
|
+
timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
|
|
29
31
|
) -> Optional[Union[pd.DataFrame, List[pd.DataFrame]]]: ...
|
|
30
32
|
|
|
31
33
|
def get_spans_dataframe(
|
|
@@ -37,6 +39,7 @@ class TraceDataExtractor(ABC):
|
|
|
37
39
|
limit: Optional[int] = DEFAULT_SPAN_LIMIT,
|
|
38
40
|
root_spans_only: Optional[bool] = None,
|
|
39
41
|
project_name: Optional[str] = None,
|
|
42
|
+
timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
|
|
40
43
|
) -> Optional[pd.DataFrame]:
|
|
41
44
|
return cast(
|
|
42
45
|
Optional[pd.DataFrame],
|
|
@@ -47,6 +50,7 @@ class TraceDataExtractor(ABC):
|
|
|
47
50
|
limit=limit,
|
|
48
51
|
root_spans_only=root_spans_only,
|
|
49
52
|
project_name=project_name,
|
|
53
|
+
timeout=timeout,
|
|
50
54
|
),
|
|
51
55
|
)
|
|
52
56
|
|
|
@@ -59,8 +63,23 @@ class TraceDataExtractor(ABC):
|
|
|
59
63
|
def get_trace_dataset(
|
|
60
64
|
self,
|
|
61
65
|
project_name: Optional[str] = None,
|
|
66
|
+
*,
|
|
67
|
+
start_time: Optional[datetime] = None,
|
|
68
|
+
end_time: Optional[datetime] = None,
|
|
69
|
+
limit: Optional[int] = DEFAULT_SPAN_LIMIT,
|
|
70
|
+
root_spans_only: Optional[bool] = None,
|
|
71
|
+
timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
|
|
62
72
|
) -> Optional[TraceDataset]:
|
|
63
|
-
if (
|
|
73
|
+
if (
|
|
74
|
+
dataframe := self.get_spans_dataframe(
|
|
75
|
+
project_name=project_name,
|
|
76
|
+
start_time=start_time,
|
|
77
|
+
end_time=end_time,
|
|
78
|
+
limit=limit,
|
|
79
|
+
root_spans_only=root_spans_only,
|
|
80
|
+
timeout=timeout,
|
|
81
|
+
)
|
|
82
|
+
) is None:
|
|
64
83
|
return None
|
|
65
84
|
evaluations = self.get_evaluations(project_name=project_name)
|
|
66
85
|
return TraceDataset(dataframe=dataframe, evaluations=evaluations)
|
phoenix/session/session.py
CHANGED
|
@@ -75,6 +75,9 @@ global _session_working_dir
|
|
|
75
75
|
_session_working_dir: Optional["TemporaryDirectory[str]"] = None
|
|
76
76
|
|
|
77
77
|
|
|
78
|
+
DEFAULT_TIMEOUT_IN_SECONDS = 5
|
|
79
|
+
|
|
80
|
+
|
|
78
81
|
class NotebookEnvironment(Enum):
|
|
79
82
|
COLAB = "colab"
|
|
80
83
|
LOCAL = "local"
|
|
@@ -152,6 +155,7 @@ class Session(TraceDataExtractor, ABC):
|
|
|
152
155
|
project_name: Optional[str] = None,
|
|
153
156
|
# Deprecated fields
|
|
154
157
|
stop_time: Optional[datetime] = None,
|
|
158
|
+
timeout: Optional[int] = DEFAULT_TIMEOUT_IN_SECONDS,
|
|
155
159
|
) -> Optional[Union[pd.DataFrame, List[pd.DataFrame]]]:
|
|
156
160
|
"""
|
|
157
161
|
Queries the spans in the project based on the provided parameters.
|
phoenix/trace/attributes.py
CHANGED
|
@@ -33,6 +33,7 @@ from typing import (
|
|
|
33
33
|
cast,
|
|
34
34
|
)
|
|
35
35
|
|
|
36
|
+
import numpy as np
|
|
36
37
|
from openinference.semconv import trace
|
|
37
38
|
from openinference.semconv.trace import DocumentAttributes, SpanAttributes
|
|
38
39
|
from typing_extensions import assert_never
|
|
@@ -307,7 +308,7 @@ def _flatten_mapping(
|
|
|
307
308
|
json_string_attributes=json_string_attributes,
|
|
308
309
|
separator=separator,
|
|
309
310
|
)
|
|
310
|
-
elif isinstance(value, Sequence) and recurse_on_sequence:
|
|
311
|
+
elif (isinstance(value, Sequence) or isinstance(value, np.ndarray)) and recurse_on_sequence:
|
|
311
312
|
yield from _flatten_sequence(
|
|
312
313
|
value,
|
|
313
314
|
prefix=prefixed_key,
|
phoenix/trace/schemas.py
CHANGED
|
@@ -81,7 +81,7 @@ def json_to_span(data: Dict[str, Any]) -> Any:
|
|
|
81
81
|
timestamp=datetime.fromisoformat(event["timestamp"]),
|
|
82
82
|
)
|
|
83
83
|
for event in (
|
|
84
|
-
data["events"] if isinstance(data["events"],
|
|
84
|
+
json.loads(data["events"]) if isinstance(data["events"], str) else data["events"]
|
|
85
85
|
)
|
|
86
86
|
]
|
|
87
87
|
data["conversation"] = (
|
phoenix/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "4.
|
|
1
|
+
__version__ = "4.12.0"
|
|
@@ -1,94 +0,0 @@
|
|
|
1
|
-
from typing import Any, Dict, Generic, List, Optional, TypedDict, Union
|
|
2
|
-
|
|
3
|
-
from pydantic import BaseModel
|
|
4
|
-
from typing_extensions import TypeAlias, TypeVar, assert_never
|
|
5
|
-
|
|
6
|
-
StatusCode: TypeAlias = int
|
|
7
|
-
DataType = TypeVar("DataType")
|
|
8
|
-
Responses: TypeAlias = Dict[
|
|
9
|
-
Union[int, str], Dict[str, Any]
|
|
10
|
-
] # input type for the `responses` parameter of a fastapi route
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class StatusCodeWithDescription(TypedDict):
|
|
14
|
-
"""
|
|
15
|
-
A duck type for a status code with a description detailing under what
|
|
16
|
-
conditions the status code is raised.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
status_code: StatusCode
|
|
20
|
-
description: str
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class RequestBody(BaseModel, Generic[DataType]):
|
|
24
|
-
# A generic request type accepted by V1 routes.
|
|
25
|
-
#
|
|
26
|
-
# Don't use """ for this docstring or it will be included as a description
|
|
27
|
-
# in the generated OpenAPI schema.
|
|
28
|
-
data: DataType
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class ResponseBody(BaseModel, Generic[DataType]):
|
|
32
|
-
# A generic response type returned by V1 routes.
|
|
33
|
-
#
|
|
34
|
-
# Don't use """ for this docstring or it will be included as a description
|
|
35
|
-
# in the generated OpenAPI schema.
|
|
36
|
-
|
|
37
|
-
data: DataType
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
class PaginatedResponseBody(BaseModel, Generic[DataType]):
|
|
41
|
-
# A generic paginated response type returned by V1 routes.
|
|
42
|
-
#
|
|
43
|
-
# Don't use """ for this docstring or it will be included as a description
|
|
44
|
-
# in the generated OpenAPI schema.
|
|
45
|
-
|
|
46
|
-
data: List[DataType]
|
|
47
|
-
next_cursor: Optional[str]
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def add_errors_to_responses(
|
|
51
|
-
errors: List[Union[StatusCode, StatusCodeWithDescription]],
|
|
52
|
-
/,
|
|
53
|
-
*,
|
|
54
|
-
responses: Optional[Responses] = None,
|
|
55
|
-
) -> Responses:
|
|
56
|
-
"""
|
|
57
|
-
Creates or updates a patch for an OpenAPI schema's `responses` section to
|
|
58
|
-
include status codes in the generated OpenAPI schema.
|
|
59
|
-
"""
|
|
60
|
-
output_responses: Responses = responses or {}
|
|
61
|
-
for error in errors:
|
|
62
|
-
status_code: int
|
|
63
|
-
description: Optional[str] = None
|
|
64
|
-
if isinstance(error, StatusCode):
|
|
65
|
-
status_code = error
|
|
66
|
-
elif isinstance(error, dict):
|
|
67
|
-
status_code = error["status_code"]
|
|
68
|
-
description = error["description"]
|
|
69
|
-
else:
|
|
70
|
-
assert_never(error)
|
|
71
|
-
if status_code not in output_responses:
|
|
72
|
-
output_responses[status_code] = {
|
|
73
|
-
"content": {"text/plain": {"schema": {"type": "string"}}}
|
|
74
|
-
}
|
|
75
|
-
if description:
|
|
76
|
-
output_responses[status_code]["description"] = description
|
|
77
|
-
return output_responses
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def add_text_csv_content_to_responses(
|
|
81
|
-
status_code: StatusCode, /, *, responses: Optional[Responses] = None
|
|
82
|
-
) -> Responses:
|
|
83
|
-
"""
|
|
84
|
-
Creates or updates a patch for an OpenAPI schema's `responses` section to
|
|
85
|
-
ensure that the response for the given status code is marked as text/csv in
|
|
86
|
-
the generated OpenAPI schema.
|
|
87
|
-
"""
|
|
88
|
-
output_responses: Responses = responses or {}
|
|
89
|
-
if status_code not in output_responses:
|
|
90
|
-
output_responses[status_code] = {}
|
|
91
|
-
output_responses[status_code]["content"] = {
|
|
92
|
-
"text/csv": {"schema": {"type": "string", "contentMediaType": "text/csv"}}
|
|
93
|
-
}
|
|
94
|
-
return output_responses
|
|
File without changes
|
|
File without changes
|
|
File without changes
|