arize-phoenix 3.9.0__py3-none-any.whl → 3.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

phoenix/session/client.py CHANGED
@@ -11,7 +11,12 @@ from pyarrow import ArrowInvalid
11
11
  from requests import Session
12
12
 
13
13
  import phoenix as px
14
- from phoenix.config import get_env_collector_endpoint, get_env_host, get_env_port
14
+ from phoenix.config import (
15
+ get_env_collector_endpoint,
16
+ get_env_host,
17
+ get_env_port,
18
+ get_env_project_name,
19
+ )
15
20
  from phoenix.session.data_extractor import TraceDataExtractor
16
21
  from phoenix.trace import Evaluations
17
22
  from phoenix.trace.dsl import SpanQuery
@@ -59,6 +64,7 @@ class Client(TraceDataExtractor):
59
64
  root_spans_only: Optional[bool] = None,
60
65
  project_name: Optional[str] = None,
61
66
  ) -> Optional[Union[pd.DataFrame, List[pd.DataFrame]]]:
67
+ project_name = project_name or get_env_project_name()
62
68
  if not queries:
63
69
  queries = (SpanQuery(),)
64
70
  if self._use_active_session_if_available and (session := px.active_session()):
@@ -102,6 +108,7 @@ class Client(TraceDataExtractor):
102
108
  self,
103
109
  project_name: Optional[str] = None,
104
110
  ) -> List[Evaluations]:
111
+ project_name = project_name or get_env_project_name()
105
112
  if self._use_active_session_if_available and (session := px.active_session()):
106
113
  return session.get_evaluations(project_name=project_name)
107
114
  response = self._session.get(
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import logging
3
3
  import os
4
+ import warnings
4
5
  from abc import ABC, abstractmethod
5
6
  from collections import UserList
6
7
  from datetime import datetime
@@ -24,12 +25,14 @@ import pandas as pd
24
25
  from phoenix.config import (
25
26
  ENV_NOTEBOOK_ENV,
26
27
  ENV_PHOENIX_COLLECTOR_ENDPOINT,
28
+ ENV_PHOENIX_HOST,
29
+ ENV_PHOENIX_PORT,
27
30
  get_env_host,
28
31
  get_env_port,
32
+ get_env_project_name,
29
33
  get_exported_files,
30
34
  )
31
35
  from phoenix.core.model_schema_adapter import create_model_from_datasets
32
- from phoenix.core.project import DEFAULT_PROJECT_NAME
33
36
  from phoenix.core.traces import Traces
34
37
  from phoenix.datasets.dataset import EMPTY_DATASET, Dataset
35
38
  from phoenix.pointcloud.umap_parameters import get_umap_parameters
@@ -331,8 +334,34 @@ class ThreadSession(Session):
331
334
  root_spans_only: Optional[bool] = None,
332
335
  project_name: Optional[str] = None,
333
336
  ) -> Optional[Union[pd.DataFrame, List[pd.DataFrame]]]:
337
+ """
338
+ Queries the spans in the project based on the provided parameters.
339
+
340
+ Parameters
341
+ ----------
342
+ queries : *SpanQuery
343
+ Variable-length argument list of SpanQuery objects representing
344
+ the queries to be executed.
345
+
346
+ start_time : datetime, optional
347
+ datetime representing the start time of the query.
348
+
349
+ stop_time : datetime, optional
350
+ datetime representing the stop time of the query.
351
+
352
+ root_spans_only : boolean, optional
353
+ whether to include only root spans in the results.
354
+
355
+ project_name : string, optional
356
+ name of the project to query. Defaults to the project name set
357
+ in the environment variable `PHOENIX_PROJECT_NAME` or 'default' if not set.
358
+
359
+ Returns:
360
+ results : DataFrame
361
+ DataFrame or list of DataFrames containing the query results.
362
+ """
334
363
  if not (traces := self.traces) or not (
335
- project := traces.get_project(project_name or DEFAULT_PROJECT_NAME)
364
+ project := traces.get_project(project_name or get_env_project_name())
336
365
  ):
337
366
  return None
338
367
  if not queries:
@@ -362,9 +391,24 @@ class ThreadSession(Session):
362
391
  self,
363
392
  project_name: Optional[str] = None,
364
393
  ) -> List[Evaluations]:
365
- if not (traces := self.traces) or not (
366
- project := traces.get_project(project_name or DEFAULT_PROJECT_NAME)
367
- ):
394
+ """
395
+ Get the evaluations for a project.
396
+
397
+ Parameters
398
+ ----------
399
+ project_name : str, optional
400
+ The name of the project. If not provided, the project name set
401
+ in the environment variable `PHOENIX_PROJECT_NAME` will be used.
402
+ Otherwise, 'default' will be used.
403
+
404
+ Returns
405
+ -------
406
+ evaluations : List[Evaluations]
407
+ A list of evaluations for the specified project.
408
+
409
+ """
410
+ project_name = project_name or get_env_project_name()
411
+ if not (traces := self.traces) or not (project := traces.get_project(project_name)):
368
412
  return []
369
413
  return project.export_evaluations()
370
414
 
@@ -454,6 +498,29 @@ def launch_app(
454
498
  else:
455
499
  nb_env = notebook_environment
456
500
 
501
+ if port is not None:
502
+ warning_message = (
503
+ "❗️ The launch_app `port` parameter is deprecated and "
504
+ "will be removed in a future release. "
505
+ f"Use the `{ENV_PHOENIX_PORT}` environment variable instead."
506
+ )
507
+ print(warning_message)
508
+ warnings.warn(
509
+ warning_message,
510
+ DeprecationWarning,
511
+ )
512
+ if host is not None:
513
+ warning_message = (
514
+ "❗️ The launch_app `host` parameter is deprecated and "
515
+ "will be removed in a future release. "
516
+ f"Use the `{ENV_PHOENIX_HOST}` environment variable instead."
517
+ )
518
+ print(warning_message)
519
+ warnings.warn(
520
+ warning_message,
521
+ DeprecationWarning,
522
+ )
523
+
457
524
  host = host or get_env_host()
458
525
  port = port or get_env_port()
459
526
 
@@ -4,9 +4,12 @@ from importlib.util import find_spec
4
4
  from typing import Any
5
5
 
6
6
  from openinference.instrumentation.langchain import LangChainInstrumentor as Instrumentor
7
+ from openinference.semconv.resource import ResourceAttributes
7
8
  from opentelemetry.sdk import trace as trace_sdk
9
+ from opentelemetry.sdk.resources import Resource
8
10
  from opentelemetry.sdk.trace.export import SimpleSpanProcessor
9
11
 
12
+ from phoenix.config import get_env_project_name
10
13
  from phoenix.trace.exporter import _OpenInferenceExporter
11
14
  from phoenix.trace.tracer import _show_deprecation_warnings
12
15
 
@@ -26,6 +29,8 @@ class LangChainInstrumentor(Instrumentor):
26
29
  super().__init__()
27
30
 
28
31
  def instrument(self) -> None:
29
- tracer_provider = trace_sdk.TracerProvider()
32
+ tracer_provider = trace_sdk.TracerProvider(
33
+ resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()})
34
+ )
30
35
  tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter()))
31
36
  super().instrument(skip_dep_check=True, tracer_provider=tracer_provider)
@@ -1,12 +1,14 @@
1
1
  import logging
2
2
  from importlib.metadata import PackageNotFoundError, version
3
- from importlib.util import find_spec
4
- from typing import Any
3
+ from typing import Any, Optional, Tuple
5
4
 
5
+ from openinference.semconv.resource import ResourceAttributes
6
6
  from opentelemetry import trace as trace_api
7
7
  from opentelemetry.sdk import trace as trace_sdk
8
+ from opentelemetry.sdk.resources import Resource
8
9
  from opentelemetry.sdk.trace.export import SimpleSpanProcessor
9
10
 
11
+ from phoenix.config import get_env_project_name
10
12
  from phoenix.trace.errors import IncompatibleLibraryVersionError
11
13
  from phoenix.trace.exporter import _OpenInferenceExporter
12
14
  from phoenix.trace.tracer import _show_deprecation_warnings
@@ -18,39 +20,61 @@ INSTRUMENTATION_MODERN_VERSION = (1, 0, 0)
18
20
 
19
21
 
20
22
  def _check_instrumentation_compatibility() -> bool:
21
- if find_spec("llama_index") is None:
22
- raise PackageNotFoundError("Missing `llama-index`. Install with `pip install llama-index`.")
23
- # split the version string into a tuple of integers
24
- llama_index_version_str = version("llama-index")
25
- llama_index_version = tuple(map(int, llama_index_version_str.split(".")[:3]))
23
+ llama_index_version_str = _get_version_if_installed("llama-index")
24
+ llama_index_installed = llama_index_version_str is not None
25
+ llama_index_core_version_str = _get_version_if_installed("llama-index-core")
26
+ llama_index_core_installed = modern_llama_index_installed = (
27
+ llama_index_core_version_str is not None
28
+ )
26
29
  instrumentation_version_str = version("openinference-instrumentation-llama-index")
27
- instrumentation_version = tuple(map(int, instrumentation_version_str.split(".")[:3]))
28
- # check if the llama_index version is compatible with the instrumentation version
29
- if (
30
- llama_index_version < LLAMA_INDEX_MODERN_VERSION
30
+ instrumentation_version = _parse_semantic_version(instrumentation_version_str)
31
+
32
+ if not llama_index_installed and not llama_index_core_installed:
33
+ raise PackageNotFoundError(
34
+ "Missing `llama_index`. "
35
+ "Install with `pip install llama-index` or "
36
+ "`pip install llama-index-core` for a minimal installation."
37
+ )
38
+ elif modern_llama_index_installed and instrumentation_version < INSTRUMENTATION_MODERN_VERSION:
39
+ raise IncompatibleLibraryVersionError(
40
+ f"llama-index-core v{llama_index_core_version_str} is not compatible with "
41
+ f"openinference-instrumentation-llama-index v{instrumentation_version_str}. "
42
+ "Please upgrade openinference-instrumentation-llama-index to at least 1.0.0 via "
43
+ "`pip install 'openinference-instrumentation-llama-index>=1.0.0'`."
44
+ )
45
+ elif (
46
+ llama_index_installed
47
+ and llama_index_version_str
48
+ and _parse_semantic_version(llama_index_version_str) < LLAMA_INDEX_MODERN_VERSION
31
49
  and instrumentation_version >= INSTRUMENTATION_MODERN_VERSION
32
50
  ):
33
51
  raise IncompatibleLibraryVersionError(
34
52
  f"llama-index v{llama_index_version_str} is not compatible with "
35
- f"openinference-instrumentation-llama-index v{instrumentation_version_str}."
53
+ f"openinference-instrumentation-llama-index v{instrumentation_version_str}. "
36
54
  "Please either migrate llama-index to at least 0.10.0 or downgrade "
37
55
  "openinference-instrumentation-llama-index via "
38
56
  "`pip install 'openinference-instrumentation-llama-index<1.0.0'`."
39
57
  )
40
- elif (
41
- llama_index_version >= LLAMA_INDEX_MODERN_VERSION
42
- and instrumentation_version < INSTRUMENTATION_MODERN_VERSION
43
- ):
44
- raise IncompatibleLibraryVersionError(
45
- f"llama-index v{llama_index_version_str} is not compatible with "
46
- f"openinference-instrumentation-llama-index v{instrumentation_version_str}."
47
- "Please upgrade openinference-instrumentation-llama-index to at least 1.0.0"
48
- "`pip install 'openinference-instrumentation-llama-index>=1.0.0'`."
49
- )
50
- # if the versions are compatible, return True
51
58
  return True
52
59
 
53
60
 
61
+ def _get_version_if_installed(package_name: str) -> Optional[str]:
62
+ """
63
+ Gets the version of the package if it is installed, otherwise, returns None.
64
+ """
65
+ try:
66
+ return version(package_name)
67
+ except PackageNotFoundError:
68
+ return None
69
+
70
+
71
+ def _parse_semantic_version(semver_string: str) -> Tuple[int, ...]:
72
+ """
73
+ Parse a semantic version string into a tuple of integers.
74
+ """
75
+ return tuple(map(int, semver_string.split(".")[:3]))
76
+
77
+
54
78
  if _check_instrumentation_compatibility():
55
79
  from openinference.instrumentation.llama_index._callback import (
56
80
  OpenInferenceTraceCallbackHandler as _OpenInferenceTraceCallbackHandler,
@@ -72,6 +96,8 @@ class OpenInferenceTraceCallbackHandler(_OpenInferenceTraceCallbackHandler):
72
96
 
73
97
  def __init__(self, *args: Any, **kwargs: Any) -> None:
74
98
  _show_deprecation_warnings(self, *args, **kwargs)
75
- tracer_provider = trace_sdk.TracerProvider()
99
+ tracer_provider = trace_sdk.TracerProvider(
100
+ resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()})
101
+ )
76
102
  tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter()))
77
103
  super().__init__(trace_api.get_tracer(__name__, __version__, tracer_provider))
@@ -4,9 +4,12 @@ from importlib.util import find_spec
4
4
  from typing import Any
5
5
 
6
6
  from openinference.instrumentation.openai import OpenAIInstrumentor as Instrumentor
7
+ from openinference.semconv.resource import ResourceAttributes
7
8
  from opentelemetry.sdk import trace as trace_sdk
9
+ from opentelemetry.sdk.resources import Resource
8
10
  from opentelemetry.sdk.trace.export import SimpleSpanProcessor
9
11
 
12
+ from phoenix.config import get_env_project_name
10
13
  from phoenix.trace.exporter import _OpenInferenceExporter
11
14
  from phoenix.trace.tracer import _show_deprecation_warnings
12
15
 
@@ -21,6 +24,8 @@ class OpenAIInstrumentor(Instrumentor):
21
24
  super().__init__()
22
25
 
23
26
  def instrument(self) -> None:
24
- tracer_provider = trace_sdk.TracerProvider()
27
+ tracer_provider = trace_sdk.TracerProvider(
28
+ resource=Resource({ResourceAttributes.PROJECT_NAME: get_env_project_name()})
29
+ )
25
30
  tracer_provider.add_span_processor(SimpleSpanProcessor(_OpenInferenceExporter()))
26
31
  super().instrument(skip_dep_check=True, tracer_provider=tracer_provider)
phoenix/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "3.9.0"
1
+ __version__ = "3.10.0"
@@ -1,24 +0,0 @@
1
- from datetime import datetime
2
- from typing import Optional
3
-
4
- import strawberry
5
-
6
-
7
- @strawberry.type
8
- class DatasetInfo:
9
- start_time: datetime = strawberry.field(description="The start bookend of the data")
10
- end_time: datetime = strawberry.field(description="The end bookend of the data")
11
- record_count: int = strawberry.field(description="The record count of the data")
12
-
13
-
14
- @strawberry.type
15
- class TraceDatasetInfo(DatasetInfo):
16
- token_count_total: int = strawberry.field(
17
- description="Count of total (prompt + completion) tokens in the trace data"
18
- )
19
- latency_ms_p50: Optional[float] = strawberry.field(
20
- description="Root span latency p50 quantile in milliseconds"
21
- )
22
- latency_ms_p99: Optional[float] = strawberry.field(
23
- description="Root span latency p99 quantile in milliseconds"
24
- )