arize-phoenix 4.4.2__py3-none-any.whl → 4.4.4rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. {arize_phoenix-4.4.2.dist-info → arize_phoenix-4.4.4rc0.dist-info}/METADATA +12 -11
  2. {arize_phoenix-4.4.2.dist-info → arize_phoenix-4.4.4rc0.dist-info}/RECORD +110 -57
  3. phoenix/__init__.py +0 -27
  4. phoenix/config.py +21 -7
  5. phoenix/core/model.py +25 -25
  6. phoenix/core/model_schema.py +66 -64
  7. phoenix/core/model_schema_adapter.py +27 -25
  8. phoenix/datasets/__init__.py +0 -0
  9. phoenix/datasets/evaluators.py +275 -0
  10. phoenix/datasets/experiments.py +469 -0
  11. phoenix/datasets/tracing.py +66 -0
  12. phoenix/datasets/types.py +212 -0
  13. phoenix/db/bulk_inserter.py +54 -14
  14. phoenix/db/insertion/dataset.py +234 -0
  15. phoenix/db/insertion/evaluation.py +6 -6
  16. phoenix/db/insertion/helpers.py +13 -2
  17. phoenix/db/migrations/types.py +29 -0
  18. phoenix/db/migrations/versions/10460e46d750_datasets.py +291 -0
  19. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +2 -28
  20. phoenix/db/models.py +230 -3
  21. phoenix/inferences/fixtures.py +23 -23
  22. phoenix/inferences/inferences.py +7 -7
  23. phoenix/inferences/validation.py +1 -1
  24. phoenix/metrics/binning.py +2 -2
  25. phoenix/server/api/context.py +16 -0
  26. phoenix/server/api/dataloaders/__init__.py +16 -0
  27. phoenix/server/api/dataloaders/dataset_example_revisions.py +100 -0
  28. phoenix/server/api/dataloaders/dataset_example_spans.py +43 -0
  29. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +85 -0
  30. phoenix/server/api/dataloaders/experiment_error_rates.py +43 -0
  31. phoenix/server/api/dataloaders/experiment_sequence_number.py +49 -0
  32. phoenix/server/api/dataloaders/project_by_name.py +31 -0
  33. phoenix/server/api/dataloaders/span_descendants.py +2 -3
  34. phoenix/server/api/dataloaders/span_projects.py +33 -0
  35. phoenix/server/api/dataloaders/trace_row_ids.py +39 -0
  36. phoenix/server/api/helpers/dataset_helpers.py +178 -0
  37. phoenix/server/api/input_types/AddExamplesToDatasetInput.py +16 -0
  38. phoenix/server/api/input_types/AddSpansToDatasetInput.py +14 -0
  39. phoenix/server/api/input_types/CreateDatasetInput.py +12 -0
  40. phoenix/server/api/input_types/DatasetExampleInput.py +14 -0
  41. phoenix/server/api/input_types/DatasetSort.py +17 -0
  42. phoenix/server/api/input_types/DatasetVersionSort.py +16 -0
  43. phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +13 -0
  44. phoenix/server/api/input_types/DeleteDatasetInput.py +7 -0
  45. phoenix/server/api/input_types/DeleteExperimentsInput.py +9 -0
  46. phoenix/server/api/input_types/PatchDatasetExamplesInput.py +35 -0
  47. phoenix/server/api/input_types/PatchDatasetInput.py +14 -0
  48. phoenix/server/api/mutations/__init__.py +13 -0
  49. phoenix/server/api/mutations/auth.py +11 -0
  50. phoenix/server/api/mutations/dataset_mutations.py +520 -0
  51. phoenix/server/api/mutations/experiment_mutations.py +65 -0
  52. phoenix/server/api/{types/ExportEventsMutation.py → mutations/export_events_mutations.py} +17 -14
  53. phoenix/server/api/mutations/project_mutations.py +42 -0
  54. phoenix/server/api/queries.py +503 -0
  55. phoenix/server/api/routers/v1/__init__.py +77 -2
  56. phoenix/server/api/routers/v1/dataset_examples.py +178 -0
  57. phoenix/server/api/routers/v1/datasets.py +861 -0
  58. phoenix/server/api/routers/v1/evaluations.py +4 -2
  59. phoenix/server/api/routers/v1/experiment_evaluations.py +65 -0
  60. phoenix/server/api/routers/v1/experiment_runs.py +108 -0
  61. phoenix/server/api/routers/v1/experiments.py +174 -0
  62. phoenix/server/api/routers/v1/spans.py +3 -1
  63. phoenix/server/api/routers/v1/traces.py +1 -4
  64. phoenix/server/api/schema.py +2 -303
  65. phoenix/server/api/types/AnnotatorKind.py +10 -0
  66. phoenix/server/api/types/Cluster.py +19 -19
  67. phoenix/server/api/types/CreateDatasetPayload.py +8 -0
  68. phoenix/server/api/types/Dataset.py +282 -63
  69. phoenix/server/api/types/DatasetExample.py +85 -0
  70. phoenix/server/api/types/DatasetExampleRevision.py +34 -0
  71. phoenix/server/api/types/DatasetVersion.py +14 -0
  72. phoenix/server/api/types/Dimension.py +30 -29
  73. phoenix/server/api/types/EmbeddingDimension.py +40 -34
  74. phoenix/server/api/types/Event.py +16 -16
  75. phoenix/server/api/types/ExampleRevisionInterface.py +14 -0
  76. phoenix/server/api/types/Experiment.py +135 -0
  77. phoenix/server/api/types/ExperimentAnnotationSummary.py +13 -0
  78. phoenix/server/api/types/ExperimentComparison.py +19 -0
  79. phoenix/server/api/types/ExperimentRun.py +91 -0
  80. phoenix/server/api/types/ExperimentRunAnnotation.py +57 -0
  81. phoenix/server/api/types/Inferences.py +80 -0
  82. phoenix/server/api/types/InferencesRole.py +23 -0
  83. phoenix/server/api/types/Model.py +43 -42
  84. phoenix/server/api/types/Project.py +26 -12
  85. phoenix/server/api/types/Segments.py +1 -1
  86. phoenix/server/api/types/Span.py +78 -2
  87. phoenix/server/api/types/TimeSeries.py +6 -6
  88. phoenix/server/api/types/Trace.py +15 -4
  89. phoenix/server/api/types/UMAPPoints.py +1 -1
  90. phoenix/server/api/types/node.py +5 -111
  91. phoenix/server/api/types/pagination.py +10 -52
  92. phoenix/server/app.py +99 -49
  93. phoenix/server/main.py +49 -27
  94. phoenix/server/openapi/docs.py +3 -0
  95. phoenix/server/static/index.js +2246 -1368
  96. phoenix/server/templates/index.html +1 -0
  97. phoenix/services.py +15 -15
  98. phoenix/session/client.py +316 -21
  99. phoenix/session/session.py +47 -37
  100. phoenix/trace/exporter.py +14 -9
  101. phoenix/trace/fixtures.py +133 -7
  102. phoenix/trace/span_evaluations.py +3 -3
  103. phoenix/trace/trace_dataset.py +6 -6
  104. phoenix/utilities/json.py +61 -0
  105. phoenix/utilities/re.py +50 -0
  106. phoenix/version.py +1 -1
  107. phoenix/server/api/types/DatasetRole.py +0 -23
  108. {arize_phoenix-4.4.2.dist-info → arize_phoenix-4.4.4rc0.dist-info}/WHEEL +0 -0
  109. {arize_phoenix-4.4.2.dist-info → arize_phoenix-4.4.4rc0.dist-info}/licenses/IP_NOTICE +0 -0
  110. {arize_phoenix-4.4.2.dist-info → arize_phoenix-4.4.4rc0.dist-info}/licenses/LICENSE +0 -0
  111. /phoenix/server/api/{helpers.py → helpers/__init__.py} +0 -0
@@ -9,7 +9,7 @@ from typing import (
9
9
  from aioitertools.itertools import groupby
10
10
  from sqlalchemy import select
11
11
  from sqlalchemy.ext.asyncio import AsyncSession
12
- from sqlalchemy.orm import contains_eager
12
+ from sqlalchemy.orm import joinedload
13
13
  from strawberry.dataloader import DataLoader
14
14
  from typing_extensions import TypeAlias
15
15
 
@@ -52,8 +52,7 @@ class SpanDescendantsDataLoader(DataLoader[Key, Result]):
52
52
  stmt = (
53
53
  select(descendant_ids.c[root_id_label], models.Span)
54
54
  .join(descendant_ids, models.Span.id == descendant_ids.c.id)
55
- .join(models.Trace)
56
- .options(contains_eager(models.Span.trace))
55
+ .options(joinedload(models.Span.trace, innerjoin=True).load_only(models.Trace.trace_id))
57
56
  .order_by(descendant_ids.c[root_id_label])
58
57
  )
59
58
  results: Dict[SpanId, Result] = {key: [] for key in keys}
@@ -0,0 +1,33 @@
1
+ from typing import AsyncContextManager, Callable, List, Union
2
+
3
+ from sqlalchemy import select
4
+ from sqlalchemy.ext.asyncio import AsyncSession
5
+ from strawberry.dataloader import DataLoader
6
+ from typing_extensions import TypeAlias
7
+
8
+ from phoenix.db import models
9
+
10
+ SpanID: TypeAlias = int
11
+ Key: TypeAlias = SpanID
12
+ Result: TypeAlias = models.Project
13
+
14
+
15
+ class SpanProjectsDataLoader(DataLoader[Key, Result]):
16
+ def __init__(self, db: Callable[[], AsyncContextManager[AsyncSession]]) -> None:
17
+ super().__init__(load_fn=self._load_fn)
18
+ self._db = db
19
+
20
+ async def _load_fn(self, keys: List[Key]) -> List[Union[Result, ValueError]]:
21
+ span_ids = list(set(keys))
22
+ async with self._db() as session:
23
+ projects = {
24
+ span_id: project
25
+ async for span_id, project in await session.stream(
26
+ select(models.Span.id, models.Project)
27
+ .select_from(models.Span)
28
+ .join(models.Trace, models.Span.trace_rowid == models.Trace.id)
29
+ .join(models.Project, models.Trace.project_rowid == models.Project.id)
30
+ .where(models.Span.id.in_(span_ids))
31
+ )
32
+ }
33
+ return [projects.get(span_id) or ValueError("Invalid span ID") for span_id in span_ids]
@@ -0,0 +1,39 @@
1
+ from typing import (
2
+ AsyncContextManager,
3
+ Callable,
4
+ List,
5
+ Optional,
6
+ Tuple,
7
+ )
8
+
9
+ from sqlalchemy import select
10
+ from sqlalchemy.ext.asyncio import AsyncSession
11
+ from strawberry.dataloader import DataLoader
12
+ from typing_extensions import TypeAlias
13
+
14
+ from phoenix.db import models
15
+
16
+ TraceId: TypeAlias = str
17
+ Key: TypeAlias = TraceId
18
+ TraceRowId: TypeAlias = int
19
+ ProjectRowId: TypeAlias = int
20
+ Result: TypeAlias = Optional[Tuple[TraceRowId, ProjectRowId]]
21
+
22
+
23
+ class TraceRowIdsDataLoader(DataLoader[Key, Result]):
24
+ def __init__(self, db: Callable[[], AsyncContextManager[AsyncSession]]) -> None:
25
+ super().__init__(load_fn=self._load_fn)
26
+ self._db = db
27
+
28
+ async def _load_fn(self, keys: List[Key]) -> List[Result]:
29
+ stmt = select(
30
+ models.Trace.trace_id,
31
+ models.Trace.id,
32
+ models.Trace.project_rowid,
33
+ ).where(models.Trace.trace_id.in_(keys))
34
+ async with self._db() as session:
35
+ result = {
36
+ trace_id: (id_, project_rowid)
37
+ async for trace_id, id_, project_rowid in await session.stream(stmt)
38
+ }
39
+ return list(map(result.get, keys))
@@ -0,0 +1,178 @@
1
+ from typing import Any, Dict, Literal, Mapping, Optional, Protocol
2
+
3
+ from openinference.semconv.trace import (
4
+ MessageAttributes,
5
+ OpenInferenceMimeTypeValues,
6
+ OpenInferenceSpanKindValues,
7
+ ToolCallAttributes,
8
+ )
9
+
10
+ from phoenix.trace.attributes import get_attribute_value
11
+
12
+
13
+ class HasSpanIO(Protocol):
14
+ """
15
+ An interface that contains the information needed to extract dataset example
16
+ input and output values from a span.
17
+ """
18
+
19
+ span_kind: Optional[str]
20
+ input_value: Any
21
+ input_mime_type: Optional[str]
22
+ output_value: Any
23
+ output_mime_type: Optional[str]
24
+ llm_prompt_template_variables: Any
25
+ llm_input_messages: Any
26
+ llm_output_messages: Any
27
+ retrieval_documents: Any
28
+
29
+
30
+ def get_dataset_example_input(span: HasSpanIO) -> Dict[str, Any]:
31
+ """
32
+ Extracts the input value from a span and returns it as a dictionary. Input
33
+ values from LLM spans are extracted from the input messages and prompt
34
+ template variables (if present). For other span kinds, the input is
35
+ extracted from the input value and input mime type attributes.
36
+ """
37
+ input_value = span.input_value
38
+ input_mime_type = span.input_mime_type
39
+ if span.span_kind == OpenInferenceSpanKindValues.LLM.value:
40
+ return _get_llm_span_input(
41
+ input_messages=span.llm_input_messages,
42
+ input_value=input_value,
43
+ input_mime_type=input_mime_type,
44
+ prompt_template_variables=span.llm_prompt_template_variables,
45
+ )
46
+ return _get_generic_io_value(io_value=input_value, mime_type=input_mime_type, kind="input")
47
+
48
+
49
+ def get_dataset_example_output(span: HasSpanIO) -> Dict[str, Any]:
50
+ """
51
+ Extracts the output value from a span and returns it as a dictionary. Output
52
+ values from LLM spans are extracted from the output messages (if present).
53
+ Output from retriever spans are extracted from the retrieval documents (if
54
+ present). For other span kinds, the output is extracted from the output
55
+ value and output mime type attributes.
56
+ """
57
+
58
+ output_value = span.output_value
59
+ output_mime_type = span.output_mime_type
60
+ if (span_kind := span.span_kind) == OpenInferenceSpanKindValues.LLM.value:
61
+ return _get_llm_span_output(
62
+ output_messages=span.llm_output_messages,
63
+ output_value=output_value,
64
+ output_mime_type=output_mime_type,
65
+ )
66
+ if span_kind == OpenInferenceSpanKindValues.RETRIEVER.value:
67
+ return _get_retriever_span_output(
68
+ retrieval_documents=span.retrieval_documents,
69
+ output_value=output_value,
70
+ output_mime_type=output_mime_type,
71
+ )
72
+ return _get_generic_io_value(io_value=output_value, mime_type=output_mime_type, kind="output")
73
+
74
+
75
+ def _get_llm_span_input(
76
+ input_messages: Any,
77
+ input_value: Any,
78
+ input_mime_type: Optional[str],
79
+ prompt_template_variables: Any,
80
+ ) -> Dict[str, Any]:
81
+ """
82
+ Extracts the input value from an LLM span and returns it as a dictionary.
83
+ The input is extracted from the input messages (if present) and prompt
84
+ template variables (if present).
85
+ """
86
+ input: Dict[str, Any] = {}
87
+ if messages := [_get_message(m) for m in input_messages or ()]:
88
+ input["messages"] = messages
89
+ if not input:
90
+ input = _get_generic_io_value(io_value=input_value, mime_type=input_mime_type, kind="input")
91
+ if prompt_template_variables:
92
+ input = {**input, "prompt_template_variables": prompt_template_variables}
93
+ return input
94
+
95
+
96
+ def _get_llm_span_output(
97
+ output_messages: Any,
98
+ output_value: Any,
99
+ output_mime_type: Optional[str],
100
+ ) -> Dict[str, Any]:
101
+ """
102
+ Extracts the output value from an LLM span and returns it as a dictionary.
103
+ The output is extracted from the output messages (if present).
104
+ """
105
+ if messages := [_get_message(m) for m in output_messages or ()]:
106
+ return {"messages": messages}
107
+ return _get_generic_io_value(io_value=output_value, mime_type=output_mime_type, kind="output")
108
+
109
+
110
+ def _get_retriever_span_output(
111
+ retrieval_documents: Any,
112
+ output_value: Any,
113
+ output_mime_type: Optional[str],
114
+ ) -> Dict[str, Any]:
115
+ """
116
+ Extracts the output value from a retriever span and returns it as a dictionary.
117
+ The output is extracted from the retrieval documents (if present).
118
+ """
119
+ if retrieval_documents is not None:
120
+ return {"documents": retrieval_documents}
121
+ return _get_generic_io_value(io_value=output_value, mime_type=output_mime_type, kind="output")
122
+
123
+
124
+ def _get_generic_io_value(
125
+ io_value: Any, mime_type: Optional[str], kind: Literal["input", "output"]
126
+ ) -> Dict[str, Any]:
127
+ """
128
+ Makes a best-effort attempt to extract the input or output value from a span
129
+ and returns it as a dictionary.
130
+ """
131
+ if isinstance(io_value, str) and (
132
+ mime_type == OpenInferenceMimeTypeValues.TEXT.value or mime_type is None
133
+ ):
134
+ return {kind: io_value}
135
+ if isinstance(io_value, dict) and (
136
+ mime_type == OpenInferenceMimeTypeValues.JSON.value or mime_type is None
137
+ ):
138
+ return io_value
139
+ return {}
140
+
141
+
142
+ def _get_message(message: Mapping[str, Any]) -> Dict[str, Any]:
143
+ content = get_attribute_value(message, MESSAGE_CONTENT)
144
+ name = get_attribute_value(message, MESSAGE_NAME)
145
+ function_call_name = get_attribute_value(message, MESSAGE_FUNCTION_CALL_NAME)
146
+ function_call_arguments = get_attribute_value(message, MESSAGE_FUNCTION_CALL_ARGUMENTS_JSON)
147
+ function_call = (
148
+ {"name": function_call_name, "arguments": function_call_arguments}
149
+ if function_call_name is not None or function_call_arguments is not None
150
+ else None
151
+ )
152
+ tool_calls = [
153
+ {
154
+ "function": {
155
+ "name": get_attribute_value(tool_call, TOOL_CALL_FUNCTION_NAME),
156
+ "arguments": get_attribute_value(tool_call, TOOL_CALL_FUNCTION_ARGUMENTS_JSON),
157
+ }
158
+ }
159
+ for tool_call in get_attribute_value(message, MESSAGE_TOOL_CALLS) or ()
160
+ ]
161
+ return {
162
+ "role": get_attribute_value(message, MESSAGE_ROLE),
163
+ **({"content": content} if content is not None else {}),
164
+ **({"name": name} if name is not None else {}),
165
+ **({"function_call": function_call} if function_call is not None else {}),
166
+ **({"tool_calls": tool_calls} if tool_calls else {}),
167
+ }
168
+
169
+
170
+ MESSAGE_CONTENT = MessageAttributes.MESSAGE_CONTENT
171
+ MESSAGE_FUNCTION_CALL_ARGUMENTS_JSON = MessageAttributes.MESSAGE_FUNCTION_CALL_ARGUMENTS_JSON
172
+ MESSAGE_FUNCTION_CALL_NAME = MessageAttributes.MESSAGE_FUNCTION_CALL_NAME
173
+ MESSAGE_NAME = MessageAttributes.MESSAGE_NAME
174
+ MESSAGE_ROLE = MessageAttributes.MESSAGE_ROLE
175
+ MESSAGE_TOOL_CALLS = MessageAttributes.MESSAGE_TOOL_CALLS
176
+
177
+ TOOL_CALL_FUNCTION_NAME = ToolCallAttributes.TOOL_CALL_FUNCTION_NAME
178
+ TOOL_CALL_FUNCTION_ARGUMENTS_JSON = ToolCallAttributes.TOOL_CALL_FUNCTION_ARGUMENTS_JSON
@@ -0,0 +1,16 @@
1
+ from typing import List, Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+ from .DatasetExampleInput import DatasetExampleInput
9
+
10
+
11
+ @strawberry.input
12
+ class AddExamplesToDatasetInput:
13
+ dataset_id: GlobalID
14
+ examples: List[DatasetExampleInput]
15
+ dataset_version_description: Optional[str] = UNSET
16
+ dataset_version_metadata: Optional[JSON] = UNSET
@@ -0,0 +1,14 @@
1
+ from typing import List, Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+
9
+ @strawberry.input
10
+ class AddSpansToDatasetInput:
11
+ dataset_id: GlobalID
12
+ span_ids: List[GlobalID]
13
+ dataset_version_description: Optional[str] = UNSET
14
+ dataset_version_metadata: Optional[JSON] = UNSET
@@ -0,0 +1,12 @@
1
+ from typing import Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.scalars import JSON
6
+
7
+
8
+ @strawberry.input
9
+ class CreateDatasetInput:
10
+ name: str
11
+ description: Optional[str] = UNSET
12
+ metadata: Optional[JSON] = UNSET
@@ -0,0 +1,14 @@
1
+ from typing import Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+
9
+ @strawberry.input
10
+ class DatasetExampleInput:
11
+ input: JSON
12
+ output: JSON
13
+ metadata: JSON
14
+ span_id: Optional[GlobalID] = UNSET
@@ -0,0 +1,17 @@
1
+ from enum import Enum
2
+
3
+ import strawberry
4
+
5
+ from phoenix.server.api.types.SortDir import SortDir
6
+
7
+
8
+ @strawberry.enum
9
+ class DatasetColumn(Enum):
10
+ createdAt = "created_at"
11
+ name = "name"
12
+
13
+
14
+ @strawberry.input(description="The sort key and direction for dataset connections")
15
+ class DatasetSort:
16
+ col: DatasetColumn
17
+ dir: SortDir
@@ -0,0 +1,16 @@
1
+ from enum import Enum
2
+
3
+ import strawberry
4
+
5
+ from phoenix.server.api.types.SortDir import SortDir
6
+
7
+
8
+ @strawberry.enum
9
+ class DatasetVersionColumn(Enum):
10
+ createdAt = "created_at"
11
+
12
+
13
+ @strawberry.input(description="The sort key and direction for dataset version connections")
14
+ class DatasetVersionSort:
15
+ col: DatasetVersionColumn
16
+ dir: SortDir
@@ -0,0 +1,13 @@
1
+ from typing import List, Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+
9
+ @strawberry.input
10
+ class DeleteDatasetExamplesInput:
11
+ example_ids: List[GlobalID]
12
+ dataset_version_description: Optional[str] = UNSET
13
+ dataset_version_metadata: Optional[JSON] = UNSET
@@ -0,0 +1,7 @@
1
+ import strawberry
2
+ from strawberry.relay import GlobalID
3
+
4
+
5
+ @strawberry.input
6
+ class DeleteDatasetInput:
7
+ dataset_id: GlobalID
@@ -0,0 +1,9 @@
1
+ from typing import List
2
+
3
+ import strawberry
4
+ from strawberry.relay import GlobalID
5
+
6
+
7
+ @strawberry.input
8
+ class DeleteExperimentsInput:
9
+ experiment_ids: List[GlobalID]
@@ -0,0 +1,35 @@
1
+ from typing import List, Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+
9
+ @strawberry.input
10
+ class DatasetExamplePatch:
11
+ """
12
+ Contains the information needed to apply a patch revision to a dataset example.
13
+ """
14
+
15
+ example_id: GlobalID
16
+ input: Optional[JSON] = UNSET
17
+ output: Optional[JSON] = UNSET
18
+ metadata: Optional[JSON] = UNSET
19
+
20
+ def is_empty(self) -> bool:
21
+ """
22
+ Non-empty patches have at least one field set.
23
+ """
24
+ return all(field is UNSET for field in (self.input, self.output, self.metadata))
25
+
26
+
27
+ @strawberry.input
28
+ class PatchDatasetExamplesInput:
29
+ """
30
+ Input type to the patchDatasetExamples mutation.
31
+ """
32
+
33
+ patches: List[DatasetExamplePatch]
34
+ version_description: Optional[str] = UNSET
35
+ version_metadata: Optional[JSON] = UNSET
@@ -0,0 +1,14 @@
1
+ from typing import Optional
2
+
3
+ import strawberry
4
+ from strawberry import UNSET
5
+ from strawberry.relay import GlobalID
6
+ from strawberry.scalars import JSON
7
+
8
+
9
+ @strawberry.input
10
+ class PatchDatasetInput:
11
+ dataset_id: GlobalID
12
+ name: Optional[str] = UNSET
13
+ description: Optional[str] = UNSET
14
+ metadata: Optional[JSON] = UNSET
@@ -0,0 +1,13 @@
1
+ import strawberry
2
+
3
+ from phoenix.server.api.mutations.dataset_mutations import DatasetMutationMixin
4
+ from phoenix.server.api.mutations.experiment_mutations import ExperimentMutationMixin
5
+ from phoenix.server.api.mutations.export_events_mutations import ExportEventsMutationMixin
6
+ from phoenix.server.api.mutations.project_mutations import ProjectMutationMixin
7
+
8
+
9
+ @strawberry.type
10
+ class Mutation(
11
+ ProjectMutationMixin, DatasetMutationMixin, ExperimentMutationMixin, ExportEventsMutationMixin
12
+ ):
13
+ pass
@@ -0,0 +1,11 @@
1
+ from typing import Any
2
+
3
+ from strawberry import Info
4
+ from strawberry.permission import BasePermission
5
+
6
+
7
+ class IsAuthenticated(BasePermission):
8
+ message = "User is not authenticated"
9
+
10
+ async def has_permission(self, source: Any, info: Info, **kwargs: Any) -> bool:
11
+ return not info.context.read_only