arize-phoenix 4.4.4rc6__py3-none-any.whl → 4.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (123) hide show
  1. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/METADATA +8 -14
  2. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/RECORD +58 -122
  3. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/WHEEL +1 -1
  4. phoenix/__init__.py +27 -0
  5. phoenix/config.py +7 -42
  6. phoenix/core/model.py +25 -25
  7. phoenix/core/model_schema.py +62 -64
  8. phoenix/core/model_schema_adapter.py +25 -27
  9. phoenix/datetime_utils.py +0 -4
  10. phoenix/db/bulk_inserter.py +14 -54
  11. phoenix/db/insertion/evaluation.py +10 -10
  12. phoenix/db/insertion/helpers.py +14 -17
  13. phoenix/db/insertion/span.py +3 -3
  14. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +28 -2
  15. phoenix/db/models.py +4 -236
  16. phoenix/inferences/fixtures.py +23 -23
  17. phoenix/inferences/inferences.py +7 -7
  18. phoenix/inferences/validation.py +1 -1
  19. phoenix/server/api/context.py +0 -20
  20. phoenix/server/api/dataloaders/__init__.py +0 -20
  21. phoenix/server/api/dataloaders/span_descendants.py +3 -2
  22. phoenix/server/api/routers/v1/__init__.py +2 -77
  23. phoenix/server/api/routers/v1/evaluations.py +13 -8
  24. phoenix/server/api/routers/v1/spans.py +5 -9
  25. phoenix/server/api/routers/v1/traces.py +4 -1
  26. phoenix/server/api/schema.py +303 -2
  27. phoenix/server/api/types/Cluster.py +19 -19
  28. phoenix/server/api/types/Dataset.py +63 -282
  29. phoenix/server/api/types/DatasetRole.py +23 -0
  30. phoenix/server/api/types/Dimension.py +29 -30
  31. phoenix/server/api/types/EmbeddingDimension.py +34 -40
  32. phoenix/server/api/types/Event.py +16 -16
  33. phoenix/server/api/{mutations/export_events_mutations.py → types/ExportEventsMutation.py} +14 -17
  34. phoenix/server/api/types/Model.py +42 -43
  35. phoenix/server/api/types/Project.py +12 -26
  36. phoenix/server/api/types/Span.py +2 -79
  37. phoenix/server/api/types/TimeSeries.py +6 -6
  38. phoenix/server/api/types/Trace.py +4 -15
  39. phoenix/server/api/types/UMAPPoints.py +1 -1
  40. phoenix/server/api/types/node.py +111 -5
  41. phoenix/server/api/types/pagination.py +52 -10
  42. phoenix/server/app.py +49 -103
  43. phoenix/server/main.py +27 -49
  44. phoenix/server/openapi/docs.py +0 -3
  45. phoenix/server/static/index.js +1384 -2390
  46. phoenix/server/templates/index.html +0 -1
  47. phoenix/services.py +15 -15
  48. phoenix/session/client.py +23 -611
  49. phoenix/session/session.py +37 -47
  50. phoenix/trace/exporter.py +9 -14
  51. phoenix/trace/fixtures.py +7 -133
  52. phoenix/trace/schemas.py +2 -1
  53. phoenix/trace/span_evaluations.py +3 -3
  54. phoenix/trace/trace_dataset.py +6 -6
  55. phoenix/version.py +1 -1
  56. phoenix/db/insertion/dataset.py +0 -237
  57. phoenix/db/migrations/types.py +0 -29
  58. phoenix/db/migrations/versions/10460e46d750_datasets.py +0 -291
  59. phoenix/experiments/__init__.py +0 -6
  60. phoenix/experiments/evaluators/__init__.py +0 -29
  61. phoenix/experiments/evaluators/base.py +0 -153
  62. phoenix/experiments/evaluators/code_evaluators.py +0 -99
  63. phoenix/experiments/evaluators/llm_evaluators.py +0 -244
  64. phoenix/experiments/evaluators/utils.py +0 -189
  65. phoenix/experiments/functions.py +0 -616
  66. phoenix/experiments/tracing.py +0 -85
  67. phoenix/experiments/types.py +0 -722
  68. phoenix/experiments/utils.py +0 -9
  69. phoenix/server/api/dataloaders/average_experiment_run_latency.py +0 -54
  70. phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -100
  71. phoenix/server/api/dataloaders/dataset_example_spans.py +0 -43
  72. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +0 -85
  73. phoenix/server/api/dataloaders/experiment_error_rates.py +0 -43
  74. phoenix/server/api/dataloaders/experiment_run_counts.py +0 -42
  75. phoenix/server/api/dataloaders/experiment_sequence_number.py +0 -49
  76. phoenix/server/api/dataloaders/project_by_name.py +0 -31
  77. phoenix/server/api/dataloaders/span_projects.py +0 -33
  78. phoenix/server/api/dataloaders/trace_row_ids.py +0 -39
  79. phoenix/server/api/helpers/dataset_helpers.py +0 -179
  80. phoenix/server/api/input_types/AddExamplesToDatasetInput.py +0 -16
  81. phoenix/server/api/input_types/AddSpansToDatasetInput.py +0 -14
  82. phoenix/server/api/input_types/ClearProjectInput.py +0 -15
  83. phoenix/server/api/input_types/CreateDatasetInput.py +0 -12
  84. phoenix/server/api/input_types/DatasetExampleInput.py +0 -14
  85. phoenix/server/api/input_types/DatasetSort.py +0 -17
  86. phoenix/server/api/input_types/DatasetVersionSort.py +0 -16
  87. phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +0 -13
  88. phoenix/server/api/input_types/DeleteDatasetInput.py +0 -7
  89. phoenix/server/api/input_types/DeleteExperimentsInput.py +0 -9
  90. phoenix/server/api/input_types/PatchDatasetExamplesInput.py +0 -35
  91. phoenix/server/api/input_types/PatchDatasetInput.py +0 -14
  92. phoenix/server/api/mutations/__init__.py +0 -13
  93. phoenix/server/api/mutations/auth.py +0 -11
  94. phoenix/server/api/mutations/dataset_mutations.py +0 -520
  95. phoenix/server/api/mutations/experiment_mutations.py +0 -65
  96. phoenix/server/api/mutations/project_mutations.py +0 -47
  97. phoenix/server/api/openapi/__init__.py +0 -0
  98. phoenix/server/api/openapi/main.py +0 -6
  99. phoenix/server/api/openapi/schema.py +0 -16
  100. phoenix/server/api/queries.py +0 -503
  101. phoenix/server/api/routers/v1/dataset_examples.py +0 -178
  102. phoenix/server/api/routers/v1/datasets.py +0 -965
  103. phoenix/server/api/routers/v1/experiment_evaluations.py +0 -65
  104. phoenix/server/api/routers/v1/experiment_runs.py +0 -96
  105. phoenix/server/api/routers/v1/experiments.py +0 -174
  106. phoenix/server/api/types/AnnotatorKind.py +0 -10
  107. phoenix/server/api/types/CreateDatasetPayload.py +0 -8
  108. phoenix/server/api/types/DatasetExample.py +0 -85
  109. phoenix/server/api/types/DatasetExampleRevision.py +0 -34
  110. phoenix/server/api/types/DatasetVersion.py +0 -14
  111. phoenix/server/api/types/ExampleRevisionInterface.py +0 -14
  112. phoenix/server/api/types/Experiment.py +0 -147
  113. phoenix/server/api/types/ExperimentAnnotationSummary.py +0 -13
  114. phoenix/server/api/types/ExperimentComparison.py +0 -19
  115. phoenix/server/api/types/ExperimentRun.py +0 -91
  116. phoenix/server/api/types/ExperimentRunAnnotation.py +0 -57
  117. phoenix/server/api/types/Inferences.py +0 -80
  118. phoenix/server/api/types/InferencesRole.py +0 -23
  119. phoenix/utilities/json.py +0 -61
  120. phoenix/utilities/re.py +0 -50
  121. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/IP_NOTICE +0 -0
  122. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/LICENSE +0 -0
  123. /phoenix/server/api/{helpers/__init__.py → helpers.py} +0 -0
@@ -1,91 +0,0 @@
1
- from datetime import datetime
2
- from typing import Optional
3
-
4
- import strawberry
5
- from sqlalchemy import select
6
- from strawberry import UNSET
7
- from strawberry.relay import Connection, GlobalID, Node, NodeID
8
- from strawberry.scalars import JSON
9
- from strawberry.types import Info
10
-
11
- from phoenix.db import models
12
- from phoenix.server.api.context import Context
13
- from phoenix.server.api.types.ExperimentRunAnnotation import (
14
- ExperimentRunAnnotation,
15
- to_gql_experiment_run_annotation,
16
- )
17
- from phoenix.server.api.types.pagination import (
18
- ConnectionArgs,
19
- CursorString,
20
- connection_from_list,
21
- )
22
- from phoenix.server.api.types.Trace import Trace
23
-
24
-
25
- @strawberry.type
26
- class ExperimentRun(Node):
27
- id_attr: NodeID[int]
28
- experiment_id: GlobalID
29
- trace_id: Optional[str]
30
- output: Optional[JSON]
31
- start_time: datetime
32
- end_time: datetime
33
- error: Optional[str]
34
-
35
- @strawberry.field
36
- async def annotations(
37
- self,
38
- info: Info[Context, None],
39
- first: Optional[int] = 50,
40
- last: Optional[int] = UNSET,
41
- after: Optional[CursorString] = UNSET,
42
- before: Optional[CursorString] = UNSET,
43
- ) -> Connection[ExperimentRunAnnotation]:
44
- args = ConnectionArgs(
45
- first=first,
46
- after=after if isinstance(after, CursorString) else None,
47
- last=last,
48
- before=before if isinstance(before, CursorString) else None,
49
- )
50
- run_id = self.id_attr
51
- async with info.context.db() as session:
52
- annotations = (
53
- await session.scalars(
54
- select(models.ExperimentRunAnnotation)
55
- .where(models.ExperimentRunAnnotation.experiment_run_id == run_id)
56
- .order_by(models.ExperimentRunAnnotation.id.desc())
57
- )
58
- ).all()
59
- return connection_from_list(
60
- [to_gql_experiment_run_annotation(annotation) for annotation in annotations], args
61
- )
62
-
63
- @strawberry.field
64
- async def trace(self, info: Info) -> Optional[Trace]:
65
- if not self.trace_id:
66
- return None
67
- dataloader = info.context.data_loaders.trace_row_ids
68
- if (trace := await dataloader.load(self.trace_id)) is None:
69
- return None
70
- trace_rowid, project_rowid = trace
71
- return Trace(id_attr=trace_rowid, trace_id=self.trace_id, project_rowid=project_rowid)
72
-
73
-
74
- def to_gql_experiment_run(run: models.ExperimentRun) -> ExperimentRun:
75
- """
76
- Converts an ORM experiment run to a GraphQL ExperimentRun.
77
- """
78
-
79
- from phoenix.server.api.types.Experiment import Experiment
80
-
81
- return ExperimentRun(
82
- id_attr=run.id,
83
- experiment_id=GlobalID(Experiment.__name__, str(run.experiment_id)),
84
- trace_id=trace_id
85
- if (trace := run.trace) and (trace_id := trace.trace_id) is not None
86
- else None,
87
- output=run.output.get("result"),
88
- start_time=run.start_time,
89
- end_time=run.end_time,
90
- error=run.error,
91
- )
@@ -1,57 +0,0 @@
1
- from datetime import datetime
2
- from typing import Optional
3
-
4
- import strawberry
5
- from strawberry import Info
6
- from strawberry.relay import Node, NodeID
7
- from strawberry.scalars import JSON
8
-
9
- from phoenix.db import models
10
- from phoenix.server.api.types.AnnotatorKind import AnnotatorKind
11
- from phoenix.server.api.types.Trace import Trace
12
-
13
-
14
- @strawberry.type
15
- class ExperimentRunAnnotation(Node):
16
- id_attr: NodeID[int]
17
- name: str
18
- annotator_kind: AnnotatorKind
19
- label: Optional[str]
20
- score: Optional[float]
21
- explanation: Optional[str]
22
- error: Optional[str]
23
- metadata: JSON
24
- start_time: datetime
25
- end_time: datetime
26
- trace_id: Optional[str]
27
-
28
- @strawberry.field
29
- async def trace(self, info: Info) -> Optional[Trace]:
30
- if not self.trace_id:
31
- return None
32
- dataloader = info.context.data_loaders.trace_row_ids
33
- if (trace := await dataloader.load(self.trace_id)) is None:
34
- return None
35
- trace_row_id, project_row_id = trace
36
- return Trace(id_attr=trace_row_id, trace_id=self.trace_id, project_rowid=project_row_id)
37
-
38
-
39
- def to_gql_experiment_run_annotation(
40
- annotation: models.ExperimentRunAnnotation,
41
- ) -> ExperimentRunAnnotation:
42
- """
43
- Converts an ORM experiment run annotation to a GraphQL ExperimentRunAnnotation.
44
- """
45
- return ExperimentRunAnnotation(
46
- id_attr=annotation.id,
47
- name=annotation.name,
48
- annotator_kind=AnnotatorKind(annotation.annotator_kind),
49
- label=annotation.label,
50
- score=annotation.score,
51
- explanation=annotation.explanation,
52
- error=annotation.error,
53
- metadata=annotation.metadata_,
54
- start_time=annotation.start_time,
55
- end_time=annotation.end_time,
56
- trace_id=annotation.trace_id,
57
- )
@@ -1,80 +0,0 @@
1
- from datetime import datetime
2
- from typing import Iterable, List, Optional, Set, Union
3
-
4
- import strawberry
5
- from strawberry.scalars import ID
6
- from strawberry.unset import UNSET
7
-
8
- import phoenix.core.model_schema as ms
9
- from phoenix.core.model_schema import FEATURE, TAG, ScalarDimension
10
-
11
- from ..input_types.DimensionInput import DimensionInput
12
- from .Dimension import Dimension, to_gql_dimension
13
- from .Event import Event, create_event, create_event_id, parse_event_ids_by_inferences_role
14
- from .InferencesRole import AncillaryInferencesRole, InferencesRole
15
-
16
-
17
- @strawberry.type
18
- class Inferences:
19
- start_time: datetime = strawberry.field(description="The start bookend of the data")
20
- end_time: datetime = strawberry.field(description="The end bookend of the data")
21
- record_count: int = strawberry.field(description="The record count of the data")
22
- inferences: strawberry.Private[ms.Inferences]
23
- inferences_role: strawberry.Private[Union[InferencesRole, AncillaryInferencesRole]]
24
- model: strawberry.Private[ms.Model]
25
-
26
- # type ignored here to get around the following: https://github.com/strawberry-graphql/strawberry/issues/1929
27
- @strawberry.field(description="Returns a human friendly name for the inferences.") # type: ignore
28
- def name(self) -> str:
29
- return self.inferences.display_name
30
-
31
- @strawberry.field
32
- def events(
33
- self,
34
- event_ids: List[ID],
35
- dimensions: Optional[List[DimensionInput]] = UNSET,
36
- ) -> List[Event]:
37
- """
38
- Returns events for specific event IDs and dimensions. If no input
39
- dimensions are provided, returns all features and tags.
40
- """
41
- if not event_ids:
42
- return []
43
- row_ids = parse_event_ids_by_inferences_role(event_ids)
44
- if len(row_ids) > 1 or self.inferences_role not in row_ids:
45
- raise ValueError("eventIds contains IDs from incorrect inferences.")
46
- events = self.inferences[row_ids[self.inferences_role]]
47
- requested_gql_dimensions = _get_requested_features_and_tags(
48
- core_dimensions=self.model.scalar_dimensions,
49
- requested_dimension_names=set(dim.name for dim in dimensions)
50
- if isinstance(dimensions, list)
51
- else None,
52
- )
53
- return [
54
- create_event(
55
- event_id=create_event_id(event.id.row_id, self.inferences_role),
56
- event=event,
57
- dimensions=requested_gql_dimensions,
58
- is_document_record=self.inferences_role is AncillaryInferencesRole.corpus,
59
- )
60
- for event in events
61
- ]
62
-
63
-
64
- def _get_requested_features_and_tags(
65
- core_dimensions: Iterable[ScalarDimension],
66
- requested_dimension_names: Optional[Set[str]] = UNSET,
67
- ) -> List[Dimension]:
68
- """
69
- Returns requested features and tags as a list of strawberry Inferences. If no
70
- dimensions are explicitly requested, returns all features and tags.
71
- """
72
- requested_features_and_tags: List[Dimension] = []
73
- for id, dim in enumerate(core_dimensions):
74
- is_requested = (
75
- not isinstance(requested_dimension_names, Set)
76
- ) or dim.name in requested_dimension_names
77
- is_feature_or_tag = dim.role in (FEATURE, TAG)
78
- if is_requested and is_feature_or_tag:
79
- requested_features_and_tags.append(to_gql_dimension(id_attr=id, dimension=dim))
80
- return requested_features_and_tags
@@ -1,23 +0,0 @@
1
- from enum import Enum
2
- from typing import Dict, Union
3
-
4
- import strawberry
5
-
6
- from phoenix.core.model_schema import PRIMARY, REFERENCE
7
-
8
-
9
- @strawberry.enum
10
- class InferencesRole(Enum):
11
- primary = PRIMARY
12
- reference = REFERENCE
13
-
14
-
15
- class AncillaryInferencesRole(Enum):
16
- corpus = "InferencesRole.CORPUS"
17
-
18
-
19
- STR_TO_INFEREENCES_ROLE: Dict[str, Union[InferencesRole, AncillaryInferencesRole]] = {
20
- str(InferencesRole.primary.value): InferencesRole.primary,
21
- str(InferencesRole.reference.value): InferencesRole.reference,
22
- str(AncillaryInferencesRole.corpus.value): AncillaryInferencesRole.corpus,
23
- }
phoenix/utilities/json.py DELETED
@@ -1,61 +0,0 @@
1
- import dataclasses
2
- import datetime
3
- from enum import Enum
4
- from pathlib import Path
5
- from typing import Any, Mapping, Sequence, Union, get_args, get_origin
6
-
7
- import numpy as np
8
-
9
-
10
- def jsonify(obj: Any) -> Any:
11
- """
12
- Coerce object to be json serializable.
13
- """
14
- if isinstance(obj, Enum):
15
- return jsonify(obj.value)
16
- if isinstance(obj, (str, int, float, bool)) or obj is None:
17
- return obj
18
- if isinstance(obj, (list, set, frozenset, Sequence)):
19
- return [jsonify(v) for v in obj]
20
- if isinstance(obj, (dict, Mapping)):
21
- return {jsonify(k): jsonify(v) for k, v in obj.items()}
22
- if dataclasses.is_dataclass(obj):
23
- return {
24
- k: jsonify(v)
25
- for field in dataclasses.fields(obj)
26
- if not (
27
- (v := getattr(obj, (k := field.name))) is None
28
- and get_origin(field) is Union
29
- and type(None) in get_args(field)
30
- )
31
- }
32
- if isinstance(obj, (datetime.date, datetime.datetime, datetime.time)):
33
- return obj.isoformat()
34
- if isinstance(obj, datetime.timedelta):
35
- return obj.total_seconds()
36
- if isinstance(obj, Path):
37
- return str(obj)
38
- if isinstance(obj, BaseException):
39
- return str(obj)
40
- if isinstance(obj, np.ndarray):
41
- return [jsonify(v) for v in obj]
42
- if hasattr(obj, "__float__"):
43
- return float(obj)
44
- if hasattr(obj, "model_dump") and callable(obj.model_dump):
45
- # pydantic v2
46
- try:
47
- assert isinstance(d := obj.model_dump(), dict)
48
- except BaseException:
49
- pass
50
- else:
51
- return jsonify(d)
52
- if hasattr(obj, "dict") and callable(obj.dict):
53
- # pydantic v1
54
- try:
55
- assert isinstance(d := obj.dict(), dict)
56
- except BaseException:
57
- pass
58
- else:
59
- return jsonify(d)
60
- cls = obj.__class__
61
- return f"<{cls.__module__}.{cls.__name__} object>"
phoenix/utilities/re.py DELETED
@@ -1,50 +0,0 @@
1
- from logging import getLogger
2
- from re import compile, split
3
- from typing import Dict, List
4
- from urllib.parse import unquote
5
-
6
- _logger = getLogger(__name__)
7
-
8
- # Optional whitespace
9
- _OWS = r"[ \t]*"
10
- # A key contains printable US-ASCII characters except: SP and "(),/:;<=>?@[\]{}
11
- _KEY_FORMAT = r"[\x21\x23-\x27\x2a\x2b\x2d\x2e\x30-\x39\x41-\x5a\x5e-\x7a\x7c\x7e]+"
12
- # A value contains a URL-encoded UTF-8 string. The encoded form can contain any
13
- # printable US-ASCII characters (0x20-0x7f) other than SP, DEL, and ",;/
14
- _VALUE_FORMAT = r"[\x21\x23-\x2b\x2d-\x3a\x3c-\x5b\x5d-\x7e]*"
15
- # A key-value is key=value, with optional whitespace surrounding key and value
16
- _KEY_VALUE_FORMAT = rf"{_OWS}{_KEY_FORMAT}{_OWS}={_OWS}{_VALUE_FORMAT}{_OWS}"
17
-
18
- _HEADER_PATTERN = compile(_KEY_VALUE_FORMAT)
19
- _DELIMITER_PATTERN = compile(r"[ \t]*,[ \t]*")
20
-
21
-
22
- def parse_env_headers(s: str) -> Dict[str, str]:
23
- """
24
- Parse ``s``, which is a ``str`` instance containing HTTP headers encoded
25
- for use in ENV variables per the W3C Baggage HTTP header format at
26
- https://www.w3.org/TR/baggage/#baggage-http-header-format, except that
27
- additional semi-colon delimited metadata is not supported.
28
-
29
- src: https://github.com/open-telemetry/opentelemetry-python/blob/2d5cd58f33bd8a16f45f30be620a96699bc14297/opentelemetry-api/src/opentelemetry/util/re.py#L52
30
- """
31
- headers: Dict[str, str] = {}
32
- headers_list: List[str] = split(_DELIMITER_PATTERN, s)
33
- for header in headers_list:
34
- if not header: # empty string
35
- continue
36
- match = _HEADER_PATTERN.fullmatch(header.strip())
37
- if not match:
38
- _logger.warning(
39
- "Header format invalid! Header values in environment variables must be "
40
- "URL encoded: %s",
41
- header,
42
- )
43
- continue
44
- # value may contain any number of `=`
45
- name, value = match.string.split("=", 1)
46
- name = unquote(name).strip().lower()
47
- value = unquote(value).strip()
48
- headers[name] = value
49
-
50
- return headers
File without changes