arize-phoenix 3.19.4__py3-none-any.whl → 3.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

phoenix/server/main.py CHANGED
@@ -19,8 +19,8 @@ from phoenix.config import (
19
19
  )
20
20
  from phoenix.core.model_schema_adapter import create_model_from_datasets
21
21
  from phoenix.core.traces import Traces
22
- from phoenix.datasets.dataset import EMPTY_DATASET, Dataset
23
- from phoenix.datasets.fixtures import FIXTURES, get_datasets
22
+ from phoenix.inferences.fixtures import FIXTURES, get_datasets
23
+ from phoenix.inferences.inferences import EMPTY_DATASET, Inferences
24
24
  from phoenix.pointcloud.umap_parameters import (
25
25
  DEFAULT_MIN_DIST,
26
26
  DEFAULT_N_NEIGHBORS,
@@ -114,9 +114,9 @@ if __name__ == "__main__":
114
114
  trace_dataset_name: Optional[str] = None
115
115
  simulate_streaming: Optional[bool] = None
116
116
 
117
- primary_dataset: Dataset = EMPTY_DATASET
118
- reference_dataset: Optional[Dataset] = None
119
- corpus_dataset: Optional[Dataset] = None
117
+ primary_dataset: Inferences = EMPTY_DATASET
118
+ reference_dataset: Optional[Inferences] = None
119
+ corpus_dataset: Optional[Inferences] = None
120
120
 
121
121
  # automatically remove the pid file when the process is being gracefully terminated
122
122
  atexit.register(_remove_pid_file)
@@ -158,14 +158,14 @@ if __name__ == "__main__":
158
158
  primary_dataset_name = args.primary
159
159
  reference_dataset_name = args.reference
160
160
  corpus_dataset_name = args.corpus
161
- primary_dataset = Dataset.from_name(primary_dataset_name)
161
+ primary_dataset = Inferences.from_name(primary_dataset_name)
162
162
  reference_dataset = (
163
- Dataset.from_name(reference_dataset_name)
163
+ Inferences.from_name(reference_dataset_name)
164
164
  if reference_dataset_name is not None
165
165
  else None
166
166
  )
167
167
  corpus_dataset = (
168
- None if corpus_dataset_name is None else Dataset.from_name(corpus_dataset_name)
168
+ None if corpus_dataset_name is None else Inferences.from_name(corpus_dataset_name)
169
169
  )
170
170
  elif args.command == "fixture":
171
171
  fixture_name = args.fixture
@@ -1,5 +1,5 @@
1
1
  """
2
- A set of **highly experimental** helper functions to
2
+ A set of helper functions to
3
3
  - extract spans from Phoenix for evaluation
4
4
  - explode retrieved documents from (horizontal) lists to a (vertical) series
5
5
  indexed by `context.span_id` and `document_position`
@@ -73,7 +73,6 @@ def _extract_subject_id_from_index(
73
73
  value: Union[str, Sequence[Any]],
74
74
  ) -> pb.Evaluation.SubjectId:
75
75
  """
76
- (**Highly Experimental**)
77
76
  Returns `SubjectId` given the format of `index_names`. Allowed formats are:
78
77
  - DocumentRetrievalId
79
78
  - index_names=["context.span_id", "document_position"]
@@ -36,7 +36,7 @@ from phoenix.config import (
36
36
  )
37
37
  from phoenix.core.model_schema_adapter import create_model_from_datasets
38
38
  from phoenix.core.traces import Traces
39
- from phoenix.datasets.dataset import EMPTY_DATASET, Dataset
39
+ from phoenix.inferences.inferences import EMPTY_DATASET, Inferences
40
40
  from phoenix.pointcloud.umap_parameters import get_umap_parameters
41
41
  from phoenix.server.app import create_app
42
42
  from phoenix.server.thread_server import ThreadServer
@@ -104,9 +104,9 @@ class Session(TraceDataExtractor, ABC):
104
104
 
105
105
  def __init__(
106
106
  self,
107
- primary_dataset: Dataset,
108
- reference_dataset: Optional[Dataset] = None,
109
- corpus_dataset: Optional[Dataset] = None,
107
+ primary_dataset: Inferences,
108
+ reference_dataset: Optional[Inferences] = None,
109
+ corpus_dataset: Optional[Inferences] = None,
110
110
  trace_dataset: Optional[TraceDataset] = None,
111
111
  default_umap_parameters: Optional[Mapping[str, Any]] = None,
112
112
  host: Optional[str] = None,
@@ -194,9 +194,9 @@ _session: Optional[Session] = None
194
194
  class ProcessSession(Session):
195
195
  def __init__(
196
196
  self,
197
- primary_dataset: Dataset,
198
- reference_dataset: Optional[Dataset] = None,
199
- corpus_dataset: Optional[Dataset] = None,
197
+ primary_dataset: Inferences,
198
+ reference_dataset: Optional[Inferences] = None,
199
+ corpus_dataset: Optional[Inferences] = None,
200
200
  trace_dataset: Optional[TraceDataset] = None,
201
201
  default_umap_parameters: Optional[Mapping[str, Any]] = None,
202
202
  host: Optional[str] = None,
@@ -215,9 +215,9 @@ class ProcessSession(Session):
215
215
  notebook_env=notebook_env,
216
216
  )
217
217
  primary_dataset.to_disc()
218
- if isinstance(reference_dataset, Dataset):
218
+ if isinstance(reference_dataset, Inferences):
219
219
  reference_dataset.to_disc()
220
- if isinstance(corpus_dataset, Dataset):
220
+ if isinstance(corpus_dataset, Inferences):
221
221
  corpus_dataset.to_disc()
222
222
  if isinstance(trace_dataset, TraceDataset):
223
223
  trace_dataset.to_disc()
@@ -284,9 +284,9 @@ class ProcessSession(Session):
284
284
  class ThreadSession(Session):
285
285
  def __init__(
286
286
  self,
287
- primary_dataset: Dataset,
288
- reference_dataset: Optional[Dataset] = None,
289
- corpus_dataset: Optional[Dataset] = None,
287
+ primary_dataset: Inferences,
288
+ reference_dataset: Optional[Inferences] = None,
289
+ corpus_dataset: Optional[Inferences] = None,
290
290
  trace_dataset: Optional[TraceDataset] = None,
291
291
  default_umap_parameters: Optional[Mapping[str, Any]] = None,
292
292
  host: Optional[str] = None,
@@ -424,9 +424,9 @@ class ThreadSession(Session):
424
424
 
425
425
 
426
426
  def launch_app(
427
- primary: Optional[Dataset] = None,
428
- reference: Optional[Dataset] = None,
429
- corpus: Optional[Dataset] = None,
427
+ primary: Optional[Inferences] = None,
428
+ reference: Optional[Inferences] = None,
429
+ corpus: Optional[Inferences] = None,
430
430
  trace: Optional[TraceDataset] = None,
431
431
  default_umap_parameters: Optional[Mapping[str, Any]] = None,
432
432
  host: Optional[str] = None,
@@ -447,7 +447,7 @@ def launch_app(
447
447
  corpus : Dataset, optional
448
448
  The dataset containing corpus for LLM context retrieval.
449
449
  trace: TraceDataset, optional
450
- **Experimental** The trace dataset containing the trace data.
450
+ The trace dataset containing the trace data.
451
451
  host: str, optional
452
452
  The host on which the server runs. It can also be set using environment
453
453
  variable `PHOENIX_HOST`, otherwise it defaults to `127.0.0.1`.
@@ -0,0 +1,30 @@
1
+ import functools
2
+ import warnings
3
+ from typing import Any, Callable, Type, TypeVar
4
+
5
+ GenericClass = TypeVar("GenericClass", bound=Type[Any])
6
+ CallableType = TypeVar("CallableType", bound=Callable[..., Any])
7
+
8
+
9
+ def deprecated_class(message: str) -> Callable[[GenericClass], GenericClass]:
10
+ def decorator(original_class: GenericClass) -> GenericClass:
11
+ @functools.wraps(original_class)
12
+ def new_class(*args: Any, **kwargs: Any) -> Any:
13
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
14
+ return original_class(*args, **kwargs)
15
+
16
+ return new_class # type: ignore
17
+
18
+ return decorator
19
+
20
+
21
+ def deprecated(message: str) -> Callable[[CallableType], CallableType]:
22
+ def decorator(original_func: CallableType) -> CallableType:
23
+ @functools.wraps(original_func)
24
+ def new_func(*args: Any, **kwargs: Any) -> Any:
25
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
26
+ return original_func(*args, **kwargs)
27
+
28
+ return new_func # type: ignore
29
+
30
+ return decorator
phoenix/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "3.19.4"
1
+ __version__ = "3.20.0"
File without changes
File without changes