arize-phoenix 4.4.3__py3-none-any.whl → 4.4.4rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of arize-phoenix might be problematic. Click here for more details.
- {arize_phoenix-4.4.3.dist-info → arize_phoenix-4.4.4rc1.dist-info}/METADATA +4 -4
- {arize_phoenix-4.4.3.dist-info → arize_phoenix-4.4.4rc1.dist-info}/RECORD +111 -55
- {arize_phoenix-4.4.3.dist-info → arize_phoenix-4.4.4rc1.dist-info}/WHEEL +1 -1
- phoenix/__init__.py +0 -27
- phoenix/config.py +21 -7
- phoenix/core/model.py +25 -25
- phoenix/core/model_schema.py +64 -62
- phoenix/core/model_schema_adapter.py +27 -25
- phoenix/datasets/__init__.py +0 -0
- phoenix/datasets/evaluators.py +275 -0
- phoenix/datasets/experiments.py +469 -0
- phoenix/datasets/tracing.py +66 -0
- phoenix/datasets/types.py +212 -0
- phoenix/db/bulk_inserter.py +54 -14
- phoenix/db/insertion/dataset.py +234 -0
- phoenix/db/insertion/evaluation.py +6 -6
- phoenix/db/insertion/helpers.py +13 -2
- phoenix/db/migrations/types.py +29 -0
- phoenix/db/migrations/versions/10460e46d750_datasets.py +291 -0
- phoenix/db/migrations/versions/cf03bd6bae1d_init.py +2 -28
- phoenix/db/models.py +230 -3
- phoenix/inferences/fixtures.py +23 -23
- phoenix/inferences/inferences.py +7 -7
- phoenix/inferences/validation.py +1 -1
- phoenix/server/api/context.py +16 -0
- phoenix/server/api/dataloaders/__init__.py +16 -0
- phoenix/server/api/dataloaders/dataset_example_revisions.py +100 -0
- phoenix/server/api/dataloaders/dataset_example_spans.py +43 -0
- phoenix/server/api/dataloaders/experiment_annotation_summaries.py +85 -0
- phoenix/server/api/dataloaders/experiment_error_rates.py +43 -0
- phoenix/server/api/dataloaders/experiment_sequence_number.py +49 -0
- phoenix/server/api/dataloaders/project_by_name.py +31 -0
- phoenix/server/api/dataloaders/span_descendants.py +2 -3
- phoenix/server/api/dataloaders/span_projects.py +33 -0
- phoenix/server/api/dataloaders/trace_row_ids.py +39 -0
- phoenix/server/api/helpers/dataset_helpers.py +178 -0
- phoenix/server/api/input_types/AddExamplesToDatasetInput.py +16 -0
- phoenix/server/api/input_types/AddSpansToDatasetInput.py +14 -0
- phoenix/server/api/input_types/CreateDatasetInput.py +12 -0
- phoenix/server/api/input_types/DatasetExampleInput.py +14 -0
- phoenix/server/api/input_types/DatasetSort.py +17 -0
- phoenix/server/api/input_types/DatasetVersionSort.py +16 -0
- phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +13 -0
- phoenix/server/api/input_types/DeleteDatasetInput.py +7 -0
- phoenix/server/api/input_types/DeleteExperimentsInput.py +9 -0
- phoenix/server/api/input_types/PatchDatasetExamplesInput.py +35 -0
- phoenix/server/api/input_types/PatchDatasetInput.py +14 -0
- phoenix/server/api/mutations/__init__.py +13 -0
- phoenix/server/api/mutations/auth.py +11 -0
- phoenix/server/api/mutations/dataset_mutations.py +520 -0
- phoenix/server/api/mutations/experiment_mutations.py +65 -0
- phoenix/server/api/{types/ExportEventsMutation.py → mutations/export_events_mutations.py} +17 -14
- phoenix/server/api/mutations/project_mutations.py +42 -0
- phoenix/server/api/openapi/__init__.py +0 -0
- phoenix/server/api/openapi/main.py +6 -0
- phoenix/server/api/openapi/schema.py +15 -0
- phoenix/server/api/queries.py +503 -0
- phoenix/server/api/routers/v1/__init__.py +77 -2
- phoenix/server/api/routers/v1/dataset_examples.py +178 -0
- phoenix/server/api/routers/v1/datasets.py +861 -0
- phoenix/server/api/routers/v1/evaluations.py +4 -2
- phoenix/server/api/routers/v1/experiment_evaluations.py +65 -0
- phoenix/server/api/routers/v1/experiment_runs.py +108 -0
- phoenix/server/api/routers/v1/experiments.py +174 -0
- phoenix/server/api/routers/v1/spans.py +3 -1
- phoenix/server/api/routers/v1/traces.py +1 -4
- phoenix/server/api/schema.py +2 -303
- phoenix/server/api/types/AnnotatorKind.py +10 -0
- phoenix/server/api/types/Cluster.py +19 -19
- phoenix/server/api/types/CreateDatasetPayload.py +8 -0
- phoenix/server/api/types/Dataset.py +282 -63
- phoenix/server/api/types/DatasetExample.py +85 -0
- phoenix/server/api/types/DatasetExampleRevision.py +34 -0
- phoenix/server/api/types/DatasetVersion.py +14 -0
- phoenix/server/api/types/Dimension.py +30 -29
- phoenix/server/api/types/EmbeddingDimension.py +40 -34
- phoenix/server/api/types/Event.py +16 -16
- phoenix/server/api/types/ExampleRevisionInterface.py +14 -0
- phoenix/server/api/types/Experiment.py +135 -0
- phoenix/server/api/types/ExperimentAnnotationSummary.py +13 -0
- phoenix/server/api/types/ExperimentComparison.py +19 -0
- phoenix/server/api/types/ExperimentRun.py +91 -0
- phoenix/server/api/types/ExperimentRunAnnotation.py +57 -0
- phoenix/server/api/types/Inferences.py +80 -0
- phoenix/server/api/types/InferencesRole.py +23 -0
- phoenix/server/api/types/Model.py +43 -42
- phoenix/server/api/types/Project.py +26 -12
- phoenix/server/api/types/Span.py +78 -2
- phoenix/server/api/types/TimeSeries.py +6 -6
- phoenix/server/api/types/Trace.py +15 -4
- phoenix/server/api/types/UMAPPoints.py +1 -1
- phoenix/server/api/types/node.py +5 -111
- phoenix/server/api/types/pagination.py +10 -52
- phoenix/server/app.py +99 -49
- phoenix/server/main.py +49 -27
- phoenix/server/openapi/docs.py +3 -0
- phoenix/server/static/index.js +2246 -1368
- phoenix/server/templates/index.html +1 -0
- phoenix/services.py +15 -15
- phoenix/session/client.py +316 -21
- phoenix/session/session.py +47 -37
- phoenix/trace/exporter.py +14 -9
- phoenix/trace/fixtures.py +133 -7
- phoenix/trace/span_evaluations.py +3 -3
- phoenix/trace/trace_dataset.py +6 -6
- phoenix/utilities/json.py +61 -0
- phoenix/utilities/re.py +50 -0
- phoenix/version.py +1 -1
- phoenix/server/api/types/DatasetRole.py +0 -23
- {arize_phoenix-4.4.3.dist-info → arize_phoenix-4.4.4rc1.dist-info}/licenses/IP_NOTICE +0 -0
- {arize_phoenix-4.4.3.dist-info → arize_phoenix-4.4.4rc1.dist-info}/licenses/LICENSE +0 -0
- /phoenix/server/api/{helpers.py → helpers/__init__.py} +0 -0
|
@@ -2,60 +2,18 @@ import base64
|
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from enum import Enum, auto
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Any, ClassVar, List, Optional, Tuple, Union
|
|
6
6
|
|
|
7
|
-
import strawberry
|
|
8
7
|
from strawberry import UNSET
|
|
8
|
+
from strawberry.relay.types import Connection, Edge, NodeType, PageInfo
|
|
9
9
|
from typing_extensions import TypeAlias, assert_never
|
|
10
10
|
|
|
11
11
|
ID: TypeAlias = int
|
|
12
|
-
GenericType = TypeVar("GenericType")
|
|
13
12
|
CursorSortColumnValue: TypeAlias = Union[str, int, float, datetime]
|
|
14
13
|
|
|
15
|
-
|
|
16
|
-
@strawberry.type
|
|
17
|
-
class Connection(Generic[GenericType]):
|
|
18
|
-
"""Represents a paginated relationship between two entities
|
|
19
|
-
|
|
20
|
-
This pattern is used when the relationship itself has attributes.
|
|
21
|
-
"""
|
|
22
|
-
|
|
23
|
-
page_info: "PageInfo"
|
|
24
|
-
edges: List["Edge[GenericType]"]
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
@strawberry.type
|
|
28
|
-
class PageInfo:
|
|
29
|
-
"""Pagination context to navigate objects with cursor-based pagination
|
|
30
|
-
|
|
31
|
-
Instead of classic offset pagination via `page` and `limit` parameters,
|
|
32
|
-
here we have a cursor of the last object and we fetch items starting from that one
|
|
33
|
-
|
|
34
|
-
Read more at:
|
|
35
|
-
- https://graphql.org/learn/pagination/#pagination-and-edges
|
|
36
|
-
- https://relay.dev/graphql/connections.htm
|
|
37
|
-
"""
|
|
38
|
-
|
|
39
|
-
has_next_page: bool
|
|
40
|
-
has_previous_page: bool
|
|
41
|
-
start_cursor: Optional[str]
|
|
42
|
-
end_cursor: Optional[str]
|
|
43
|
-
|
|
44
|
-
|
|
45
14
|
# A type alias for the connection cursor implementation
|
|
46
15
|
CursorString = str
|
|
47
16
|
|
|
48
|
-
|
|
49
|
-
@strawberry.type
|
|
50
|
-
class Edge(Generic[GenericType]):
|
|
51
|
-
"""
|
|
52
|
-
An edge may contain additional information of the relationship. This is the trivial case
|
|
53
|
-
"""
|
|
54
|
-
|
|
55
|
-
node: GenericType
|
|
56
|
-
cursor: str
|
|
57
|
-
|
|
58
|
-
|
|
59
17
|
# The hashing prefix for a connection cursor
|
|
60
18
|
CURSOR_PREFIX = "connection:"
|
|
61
19
|
|
|
@@ -218,9 +176,9 @@ class ConnectionArgs:
|
|
|
218
176
|
|
|
219
177
|
|
|
220
178
|
def connection_from_list(
|
|
221
|
-
data: List[
|
|
179
|
+
data: List[NodeType],
|
|
222
180
|
args: ConnectionArgs,
|
|
223
|
-
) -> Connection[
|
|
181
|
+
) -> Connection[NodeType]:
|
|
224
182
|
"""
|
|
225
183
|
A simple function that accepts a list and connection arguments, and returns
|
|
226
184
|
a connection object for use in GraphQL. It uses list offsets as pagination,
|
|
@@ -230,11 +188,11 @@ def connection_from_list(
|
|
|
230
188
|
|
|
231
189
|
|
|
232
190
|
def connection_from_list_slice(
|
|
233
|
-
list_slice: List[
|
|
191
|
+
list_slice: List[NodeType],
|
|
234
192
|
args: ConnectionArgs,
|
|
235
193
|
slice_start: int,
|
|
236
194
|
list_length: int,
|
|
237
|
-
) -> Connection[
|
|
195
|
+
) -> Connection[NodeType]:
|
|
238
196
|
"""
|
|
239
197
|
Given a slice (subset) of a list, returns a connection object for use in
|
|
240
198
|
GraphQL.
|
|
@@ -295,12 +253,12 @@ def connection_from_list_slice(
|
|
|
295
253
|
)
|
|
296
254
|
|
|
297
255
|
|
|
298
|
-
def
|
|
299
|
-
|
|
256
|
+
def connection_from_cursors_and_nodes(
|
|
257
|
+
cursors_and_nodes: List[Tuple[Any, NodeType]],
|
|
300
258
|
has_previous_page: bool,
|
|
301
259
|
has_next_page: bool,
|
|
302
|
-
) -> Connection[
|
|
303
|
-
edges = [Edge(node=node, cursor=str(cursor)) for cursor, node in
|
|
260
|
+
) -> Connection[NodeType]:
|
|
261
|
+
edges = [Edge(node=node, cursor=str(cursor)) for cursor, node in cursors_and_nodes]
|
|
304
262
|
has_edges = len(edges) > 0
|
|
305
263
|
first_edge = edges[0] if has_edges else None
|
|
306
264
|
last_edge = edges[-1] if has_edges else None
|
phoenix/server/app.py
CHANGED
|
@@ -33,7 +33,6 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoin
|
|
|
33
33
|
from starlette.requests import Request
|
|
34
34
|
from starlette.responses import FileResponse, PlainTextResponse, Response
|
|
35
35
|
from starlette.routing import Mount, Route
|
|
36
|
-
from starlette.schemas import SchemaGenerator
|
|
37
36
|
from starlette.staticfiles import StaticFiles
|
|
38
37
|
from starlette.templating import Jinja2Templates
|
|
39
38
|
from starlette.types import Scope, StatefulLifespan
|
|
@@ -58,18 +57,27 @@ from phoenix.pointcloud.umap_parameters import UMAPParameters
|
|
|
58
57
|
from phoenix.server.api.context import Context, DataLoaders
|
|
59
58
|
from phoenix.server.api.dataloaders import (
|
|
60
59
|
CacheForDataLoaders,
|
|
60
|
+
DatasetExampleRevisionsDataLoader,
|
|
61
|
+
DatasetExampleSpansDataLoader,
|
|
61
62
|
DocumentEvaluationsDataLoader,
|
|
62
63
|
DocumentEvaluationSummaryDataLoader,
|
|
63
64
|
DocumentRetrievalMetricsDataLoader,
|
|
64
65
|
EvaluationSummaryDataLoader,
|
|
66
|
+
ExperimentAnnotationSummaryDataLoader,
|
|
67
|
+
ExperimentErrorRatesDataLoader,
|
|
68
|
+
ExperimentSequenceNumberDataLoader,
|
|
65
69
|
LatencyMsQuantileDataLoader,
|
|
66
70
|
MinStartOrMaxEndTimeDataLoader,
|
|
71
|
+
ProjectByNameDataLoader,
|
|
67
72
|
RecordCountDataLoader,
|
|
68
73
|
SpanDescendantsDataLoader,
|
|
69
74
|
SpanEvaluationsDataLoader,
|
|
75
|
+
SpanProjectsDataLoader,
|
|
70
76
|
TokenCountDataLoader,
|
|
71
77
|
TraceEvaluationsDataLoader,
|
|
78
|
+
TraceRowIdsDataLoader,
|
|
72
79
|
)
|
|
80
|
+
from phoenix.server.api.openapi.schema import OPENAPI_SCHEMA_GENERATOR
|
|
73
81
|
from phoenix.server.api.routers.v1 import V1_ROUTES
|
|
74
82
|
from phoenix.server.api.schema import schema
|
|
75
83
|
from phoenix.server.grpc_server import GrpcServer
|
|
@@ -84,10 +92,6 @@ logger = logging.getLogger(__name__)
|
|
|
84
92
|
|
|
85
93
|
templates = Jinja2Templates(directory=SERVER_DIR / "templates")
|
|
86
94
|
|
|
87
|
-
schemas = SchemaGenerator(
|
|
88
|
-
{"openapi": "3.0.0", "info": {"title": "ArizePhoenix API", "version": "1.0"}}
|
|
89
|
-
)
|
|
90
|
-
|
|
91
95
|
|
|
92
96
|
class AppConfig(NamedTuple):
|
|
93
97
|
has_inferences: bool
|
|
@@ -126,6 +130,7 @@ class Static(StaticFiles):
|
|
|
126
130
|
"n_neighbors": self._app_config.n_neighbors,
|
|
127
131
|
"n_samples": self._app_config.n_samples,
|
|
128
132
|
"basename": request.scope.get("root_path", ""),
|
|
133
|
+
"platform_version": phoenix.__version__,
|
|
129
134
|
"request": request,
|
|
130
135
|
},
|
|
131
136
|
)
|
|
@@ -185,6 +190,8 @@ class GraphQLWithContext(GraphQL): # type: ignore
|
|
|
185
190
|
export_path=self.export_path,
|
|
186
191
|
streaming_last_updated_at=self.streaming_last_updated_at,
|
|
187
192
|
data_loaders=DataLoaders(
|
|
193
|
+
dataset_example_revisions=DatasetExampleRevisionsDataLoader(self.db),
|
|
194
|
+
dataset_example_spans=DatasetExampleSpansDataLoader(self.db),
|
|
188
195
|
document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
|
|
189
196
|
self.db,
|
|
190
197
|
cache_map=self.cache_for_dataloaders.document_evaluation_summary
|
|
@@ -199,6 +206,9 @@ class GraphQLWithContext(GraphQL): # type: ignore
|
|
|
199
206
|
if self.cache_for_dataloaders
|
|
200
207
|
else None,
|
|
201
208
|
),
|
|
209
|
+
experiment_annotation_summaries=ExperimentAnnotationSummaryDataLoader(self.db),
|
|
210
|
+
experiment_error_rates=ExperimentErrorRatesDataLoader(self.db),
|
|
211
|
+
experiment_sequence_number=ExperimentSequenceNumberDataLoader(self.db),
|
|
202
212
|
latency_ms_quantile=LatencyMsQuantileDataLoader(
|
|
203
213
|
self.db,
|
|
204
214
|
cache_map=self.cache_for_dataloaders.latency_ms_quantile
|
|
@@ -219,6 +229,7 @@ class GraphQLWithContext(GraphQL): # type: ignore
|
|
|
219
229
|
),
|
|
220
230
|
span_descendants=SpanDescendantsDataLoader(self.db),
|
|
221
231
|
span_evaluations=SpanEvaluationsDataLoader(self.db),
|
|
232
|
+
span_projects=SpanProjectsDataLoader(self.db),
|
|
222
233
|
token_counts=TokenCountDataLoader(
|
|
223
234
|
self.db,
|
|
224
235
|
cache_map=self.cache_for_dataloaders.token_count
|
|
@@ -226,6 +237,8 @@ class GraphQLWithContext(GraphQL): # type: ignore
|
|
|
226
237
|
else None,
|
|
227
238
|
),
|
|
228
239
|
trace_evaluations=TraceEvaluationsDataLoader(self.db),
|
|
240
|
+
trace_row_ids=TraceRowIdsDataLoader(self.db),
|
|
241
|
+
project_by_name=ProjectByNameDataLoader(self.db),
|
|
229
242
|
),
|
|
230
243
|
cache_for_dataloaders=self.cache_for_dataloaders,
|
|
231
244
|
read_only=self.read_only,
|
|
@@ -272,7 +285,11 @@ def _lifespan(
|
|
|
272
285
|
) -> StatefulLifespan[Starlette]:
|
|
273
286
|
@contextlib.asynccontextmanager
|
|
274
287
|
async def lifespan(_: Starlette) -> AsyncIterator[Dict[str, Any]]:
|
|
275
|
-
async with bulk_inserter as (
|
|
288
|
+
async with bulk_inserter as (
|
|
289
|
+
queue_span,
|
|
290
|
+
queue_evaluation,
|
|
291
|
+
enqueue_operation,
|
|
292
|
+
), GrpcServer(
|
|
276
293
|
queue_span,
|
|
277
294
|
disabled=read_only,
|
|
278
295
|
tracer_provider=tracer_provider,
|
|
@@ -281,6 +298,7 @@ def _lifespan(
|
|
|
281
298
|
yield {
|
|
282
299
|
"queue_span_for_bulk_insert": queue_span,
|
|
283
300
|
"queue_evaluation_for_bulk_insert": queue_evaluation,
|
|
301
|
+
"enqueue_operation": enqueue_operation,
|
|
284
302
|
}
|
|
285
303
|
for clean_up in clean_ups:
|
|
286
304
|
clean_up()
|
|
@@ -293,15 +311,63 @@ async def check_healthz(_: Request) -> PlainTextResponse:
|
|
|
293
311
|
|
|
294
312
|
|
|
295
313
|
async def openapi_schema(request: Request) -> Response:
|
|
296
|
-
return
|
|
314
|
+
return OPENAPI_SCHEMA_GENERATOR.OpenAPIResponse(request=request)
|
|
297
315
|
|
|
298
316
|
|
|
299
317
|
async def api_docs(request: Request) -> Response:
|
|
300
318
|
return get_swagger_ui_html(openapi_url="/schema", title="arize-phoenix API")
|
|
301
319
|
|
|
302
320
|
|
|
303
|
-
|
|
321
|
+
class SessionFactory:
|
|
322
|
+
def __init__(
|
|
323
|
+
self,
|
|
324
|
+
session_factory: Callable[[], AsyncContextManager[AsyncSession]],
|
|
325
|
+
dialect: str,
|
|
326
|
+
):
|
|
327
|
+
self.session_factory = session_factory
|
|
328
|
+
self.dialect = SupportedSQLDialect(dialect)
|
|
329
|
+
|
|
330
|
+
def __call__(self) -> AsyncContextManager[AsyncSession]:
|
|
331
|
+
return self.session_factory()
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def create_engine_and_run_migrations(
|
|
304
335
|
database_url: str,
|
|
336
|
+
) -> AsyncEngine:
|
|
337
|
+
try:
|
|
338
|
+
return create_engine(database_url)
|
|
339
|
+
except PhoenixMigrationError as e:
|
|
340
|
+
msg = (
|
|
341
|
+
"\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
|
|
342
|
+
"The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
|
|
343
|
+
"from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
|
|
344
|
+
"revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
|
|
345
|
+
"then try starting Phoenix again.\n\n"
|
|
346
|
+
"If issues persist, please reach out for support in the Arize community Slack:\n"
|
|
347
|
+
"https://arize-ai.slack.com\n\n"
|
|
348
|
+
"You can also refer to the Alembic documentation for more information:\n"
|
|
349
|
+
"https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
|
|
350
|
+
""
|
|
351
|
+
)
|
|
352
|
+
raise PhoenixMigrationError(msg) from e
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def instrument_engine_if_enabled(engine: AsyncEngine) -> List[Callable[[], None]]:
|
|
356
|
+
instrumentation_cleanups = []
|
|
357
|
+
if server_instrumentation_is_enabled():
|
|
358
|
+
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
|
359
|
+
|
|
360
|
+
tracer_provider = initialize_opentelemetry_tracer_provider()
|
|
361
|
+
SQLAlchemyInstrumentor().instrument(
|
|
362
|
+
engine=engine.sync_engine,
|
|
363
|
+
tracer_provider=tracer_provider,
|
|
364
|
+
)
|
|
365
|
+
instrumentation_cleanups.append(SQLAlchemyInstrumentor().uninstrument)
|
|
366
|
+
return instrumentation_cleanups
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def create_app(
|
|
370
|
+
db: SessionFactory,
|
|
305
371
|
export_path: Path,
|
|
306
372
|
model: Model,
|
|
307
373
|
umap_params: UMAPParameters,
|
|
@@ -311,8 +377,10 @@ def create_app(
|
|
|
311
377
|
enable_prometheus: bool = False,
|
|
312
378
|
initial_spans: Optional[Iterable[Union[Span, Tuple[Span, str]]]] = None,
|
|
313
379
|
initial_evaluations: Optional[Iterable[pb.Evaluation]] = None,
|
|
380
|
+
serve_ui: bool = True,
|
|
381
|
+
clean_up_callbacks: List[Callable[[], None]] = [],
|
|
314
382
|
) -> Starlette:
|
|
315
|
-
clean_ups: List[Callable[[], None]] =
|
|
383
|
+
clean_ups: List[Callable[[], None]] = clean_up_callbacks # To be called at app shutdown.
|
|
316
384
|
initial_batch_of_spans: Iterable[Tuple[Span, str]] = (
|
|
317
385
|
()
|
|
318
386
|
if initial_spans is None
|
|
@@ -322,28 +390,10 @@ def create_app(
|
|
|
322
390
|
)
|
|
323
391
|
)
|
|
324
392
|
initial_batch_of_evaluations = () if initial_evaluations is None else initial_evaluations
|
|
325
|
-
try:
|
|
326
|
-
engine = create_engine(database_url)
|
|
327
|
-
except PhoenixMigrationError as e:
|
|
328
|
-
msg = (
|
|
329
|
-
"\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
|
|
330
|
-
"The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
|
|
331
|
-
"from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
|
|
332
|
-
"revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
|
|
333
|
-
"then try starting Phoenix again.\n\n"
|
|
334
|
-
"If issues persist, please reach out for support in the Arize community Slack:\n"
|
|
335
|
-
"https://arize-ai.slack.com\n\n"
|
|
336
|
-
"You can also refer to the Alembic documentation for more information:\n"
|
|
337
|
-
"https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
|
|
338
|
-
""
|
|
339
|
-
)
|
|
340
|
-
raise PhoenixMigrationError(msg) from e
|
|
341
393
|
cache_for_dataloaders = (
|
|
342
|
-
CacheForDataLoaders()
|
|
343
|
-
if SupportedSQLDialect(engine.dialect.name) is SupportedSQLDialect.SQLITE
|
|
344
|
-
else None
|
|
394
|
+
CacheForDataLoaders() if db.dialect is SupportedSQLDialect.SQLITE else None
|
|
345
395
|
)
|
|
346
|
-
|
|
396
|
+
|
|
347
397
|
bulk_inserter = BulkInserter(
|
|
348
398
|
db,
|
|
349
399
|
enable_prometheus=enable_prometheus,
|
|
@@ -354,16 +404,9 @@ def create_app(
|
|
|
354
404
|
tracer_provider = None
|
|
355
405
|
strawberry_extensions = schema.get_extensions()
|
|
356
406
|
if server_instrumentation_is_enabled():
|
|
357
|
-
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
|
358
407
|
from opentelemetry.trace import TracerProvider
|
|
359
408
|
from strawberry.extensions.tracing import OpenTelemetryExtension
|
|
360
409
|
|
|
361
|
-
tracer_provider = initialize_opentelemetry_tracer_provider()
|
|
362
|
-
SQLAlchemyInstrumentor().instrument(
|
|
363
|
-
engine=engine.sync_engine,
|
|
364
|
-
tracer_provider=tracer_provider,
|
|
365
|
-
)
|
|
366
|
-
clean_ups.append(SQLAlchemyInstrumentor().uninstrument)
|
|
367
410
|
if TYPE_CHECKING:
|
|
368
411
|
# Type-check the class before monkey-patching its private attribute.
|
|
369
412
|
assert OpenTelemetryExtension._tracer
|
|
@@ -377,6 +420,7 @@ def create_app(
|
|
|
377
420
|
self._tracer = cast(TracerProvider, tracer_provider).get_tracer("strawberry")
|
|
378
421
|
|
|
379
422
|
strawberry_extensions.append(_OpenTelemetryExtension)
|
|
423
|
+
|
|
380
424
|
graphql = GraphQLWithContext(
|
|
381
425
|
db=db,
|
|
382
426
|
schema=strawberry.Schema(
|
|
@@ -433,21 +477,27 @@ def create_app(
|
|
|
433
477
|
"/graphql",
|
|
434
478
|
graphql,
|
|
435
479
|
),
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
480
|
+
]
|
|
481
|
+
+ (
|
|
482
|
+
[
|
|
483
|
+
Mount(
|
|
484
|
+
"/",
|
|
485
|
+
app=Static(
|
|
486
|
+
directory=SERVER_DIR / "static",
|
|
487
|
+
app_config=AppConfig(
|
|
488
|
+
has_inferences=model.is_empty is not True,
|
|
489
|
+
has_corpus=corpus is not None,
|
|
490
|
+
min_dist=umap_params.min_dist,
|
|
491
|
+
n_neighbors=umap_params.n_neighbors,
|
|
492
|
+
n_samples=umap_params.n_samples,
|
|
493
|
+
),
|
|
446
494
|
),
|
|
495
|
+
name="static",
|
|
447
496
|
),
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
497
|
+
]
|
|
498
|
+
if serve_ui
|
|
499
|
+
else []
|
|
500
|
+
),
|
|
451
501
|
)
|
|
452
502
|
app.state.read_only = read_only
|
|
453
503
|
app.state.db = db
|
phoenix/server/main.py
CHANGED
|
@@ -22,9 +22,9 @@ from phoenix.config import (
|
|
|
22
22
|
get_pids_path,
|
|
23
23
|
get_working_dir,
|
|
24
24
|
)
|
|
25
|
-
from phoenix.core.model_schema_adapter import
|
|
25
|
+
from phoenix.core.model_schema_adapter import create_model_from_inferences
|
|
26
26
|
from phoenix.db import get_printable_db_url
|
|
27
|
-
from phoenix.inferences.fixtures import FIXTURES,
|
|
27
|
+
from phoenix.inferences.fixtures import FIXTURES, get_inferences
|
|
28
28
|
from phoenix.inferences.inferences import EMPTY_INFERENCES, Inferences
|
|
29
29
|
from phoenix.pointcloud.umap_parameters import (
|
|
30
30
|
DEFAULT_MIN_DIST,
|
|
@@ -32,14 +32,22 @@ from phoenix.pointcloud.umap_parameters import (
|
|
|
32
32
|
DEFAULT_N_SAMPLES,
|
|
33
33
|
UMAPParameters,
|
|
34
34
|
)
|
|
35
|
-
from phoenix.server.app import
|
|
35
|
+
from phoenix.server.app import (
|
|
36
|
+
SessionFactory,
|
|
37
|
+
_db,
|
|
38
|
+
create_app,
|
|
39
|
+
create_engine_and_run_migrations,
|
|
40
|
+
instrument_engine_if_enabled,
|
|
41
|
+
)
|
|
36
42
|
from phoenix.settings import Settings
|
|
37
43
|
from phoenix.trace.fixtures import (
|
|
38
44
|
TRACES_FIXTURES,
|
|
39
45
|
download_traces_fixture,
|
|
46
|
+
get_dataset_fixtures,
|
|
40
47
|
get_evals_from_fixture,
|
|
41
48
|
get_trace_fixture_by_name,
|
|
42
49
|
reset_fixture_span_ids_and_timestamps,
|
|
50
|
+
send_dataset_fixtures,
|
|
43
51
|
)
|
|
44
52
|
from phoenix.trace.otel import decode_otlp_span, encode_span_to_otlp
|
|
45
53
|
from phoenix.trace.schemas import Span
|
|
@@ -99,14 +107,14 @@ def _get_pid_file() -> Path:
|
|
|
99
107
|
DEFAULT_UMAP_PARAMS_STR = f"{DEFAULT_MIN_DIST},{DEFAULT_N_NEIGHBORS},{DEFAULT_N_SAMPLES}"
|
|
100
108
|
|
|
101
109
|
if __name__ == "__main__":
|
|
102
|
-
|
|
103
|
-
|
|
110
|
+
primary_inferences_name: str
|
|
111
|
+
reference_inferences_name: Optional[str]
|
|
104
112
|
trace_dataset_name: Optional[str] = None
|
|
105
113
|
simulate_streaming: Optional[bool] = None
|
|
106
114
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
115
|
+
primary_inferences: Inferences = EMPTY_INFERENCES
|
|
116
|
+
reference_inferences: Optional[Inferences] = None
|
|
117
|
+
corpus_inferences: Optional[Inferences] = None
|
|
110
118
|
|
|
111
119
|
# Initialize the settings for the Server
|
|
112
120
|
Settings.log_migrations = True
|
|
@@ -150,34 +158,34 @@ if __name__ == "__main__":
|
|
|
150
158
|
)
|
|
151
159
|
export_path = Path(args.export_path) if args.export_path else EXPORT_DIR
|
|
152
160
|
if args.command == "datasets":
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
Inferences.from_name(
|
|
159
|
-
if
|
|
161
|
+
primary_inferences_name = args.primary
|
|
162
|
+
reference_inferences_name = args.reference
|
|
163
|
+
corpus_inferences_name = args.corpus
|
|
164
|
+
primary_inferences = Inferences.from_name(primary_inferences_name)
|
|
165
|
+
reference_inferences = (
|
|
166
|
+
Inferences.from_name(reference_inferences_name)
|
|
167
|
+
if reference_inferences_name is not None
|
|
160
168
|
else None
|
|
161
169
|
)
|
|
162
|
-
|
|
163
|
-
None if
|
|
170
|
+
corpus_inferences = (
|
|
171
|
+
None if corpus_inferences_name is None else Inferences.from_name(corpus_inferences_name)
|
|
164
172
|
)
|
|
165
173
|
elif args.command == "fixture":
|
|
166
174
|
fixture_name = args.fixture
|
|
167
175
|
primary_only = args.primary_only
|
|
168
|
-
|
|
176
|
+
primary_inferences, reference_inferences, corpus_inferences = get_inferences(
|
|
169
177
|
fixture_name,
|
|
170
178
|
args.no_internet,
|
|
171
179
|
)
|
|
172
180
|
if primary_only:
|
|
173
|
-
|
|
174
|
-
|
|
181
|
+
reference_inferences_name = None
|
|
182
|
+
reference_inferences = None
|
|
175
183
|
elif args.command == "trace-fixture":
|
|
176
184
|
trace_dataset_name = args.fixture
|
|
177
185
|
simulate_streaming = args.simulate_streaming
|
|
178
186
|
elif args.command == "demo":
|
|
179
187
|
fixture_name = args.fixture
|
|
180
|
-
|
|
188
|
+
primary_inferences, reference_inferences, corpus_inferences = get_inferences(
|
|
181
189
|
fixture_name,
|
|
182
190
|
args.no_internet,
|
|
183
191
|
)
|
|
@@ -197,9 +205,11 @@ if __name__ == "__main__":
|
|
|
197
205
|
|
|
198
206
|
port = args.port or get_env_port()
|
|
199
207
|
host_root_path = get_env_host_root_path()
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
208
|
+
read_only = args.read_only
|
|
209
|
+
|
|
210
|
+
model = create_model_from_inferences(
|
|
211
|
+
primary_inferences,
|
|
212
|
+
reference_inferences,
|
|
203
213
|
)
|
|
204
214
|
|
|
205
215
|
fixture_spans: List[Span] = []
|
|
@@ -216,13 +226,19 @@ if __name__ == "__main__":
|
|
|
216
226
|
),
|
|
217
227
|
get_evals_from_fixture(trace_dataset_name),
|
|
218
228
|
)
|
|
229
|
+
dataset_fixtures = list(get_dataset_fixtures(trace_dataset_name))
|
|
230
|
+
if not read_only:
|
|
231
|
+
Thread(
|
|
232
|
+
target=send_dataset_fixtures,
|
|
233
|
+
args=(f"http://{host}:{port}", dataset_fixtures),
|
|
234
|
+
).start()
|
|
219
235
|
umap_params_list = args.umap_params.split(",")
|
|
220
236
|
umap_params = UMAPParameters(
|
|
221
237
|
min_dist=float(umap_params_list[0]),
|
|
222
238
|
n_neighbors=int(umap_params_list[1]),
|
|
223
239
|
n_samples=int(umap_params_list[2]),
|
|
224
240
|
)
|
|
225
|
-
|
|
241
|
+
|
|
226
242
|
logger.info(f"Server umap params: {umap_params}")
|
|
227
243
|
if enable_prometheus := get_env_enable_prometheus():
|
|
228
244
|
from phoenix.server.prometheus import start_prometheus
|
|
@@ -230,17 +246,23 @@ if __name__ == "__main__":
|
|
|
230
246
|
start_prometheus()
|
|
231
247
|
|
|
232
248
|
working_dir = get_working_dir().resolve()
|
|
249
|
+
engine = create_engine_and_run_migrations(db_connection_str)
|
|
250
|
+
instrumentation_cleanups = instrument_engine_if_enabled(engine)
|
|
251
|
+
factory = SessionFactory(session_factory=_db(engine), dialect=engine.dialect.name)
|
|
233
252
|
app = create_app(
|
|
234
|
-
|
|
253
|
+
db=factory,
|
|
235
254
|
export_path=export_path,
|
|
236
255
|
model=model,
|
|
237
256
|
umap_params=umap_params,
|
|
238
|
-
corpus=None
|
|
257
|
+
corpus=None
|
|
258
|
+
if corpus_inferences is None
|
|
259
|
+
else create_model_from_inferences(corpus_inferences),
|
|
239
260
|
debug=args.debug,
|
|
240
261
|
read_only=read_only,
|
|
241
262
|
enable_prometheus=enable_prometheus,
|
|
242
263
|
initial_spans=fixture_spans,
|
|
243
264
|
initial_evaluations=fixture_evals,
|
|
265
|
+
clean_up_callbacks=instrumentation_cleanups,
|
|
244
266
|
)
|
|
245
267
|
server = Server(config=Config(app, host=host, port=port, root_path=host_root_path)) # type: ignore
|
|
246
268
|
Thread(target=_write_pid_file_when_ready, args=(server,), daemon=True).start()
|
phoenix/server/openapi/docs.py
CHANGED
|
@@ -43,6 +43,9 @@ def get_swagger_ui_html(
|
|
|
43
43
|
<div id="swagger-ui">
|
|
44
44
|
</div>
|
|
45
45
|
<script src="{swagger_js_url}"></script>
|
|
46
|
+
<style type="text/css">
|
|
47
|
+
div[id^="operations-private"]{{display:none}} #operations-tag-private{{display:none}}
|
|
48
|
+
</style>
|
|
46
49
|
<!-- `SwaggerUIBundle` is now available on the page -->
|
|
47
50
|
<script>
|
|
48
51
|
const ui = SwaggerUIBundle({{
|