arize-phoenix 4.14.1__py3-none-any.whl → 4.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (72) hide show
  1. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.15.0.dist-info}/METADATA +4 -3
  2. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.15.0.dist-info}/RECORD +69 -66
  3. phoenix/db/bulk_inserter.py +2 -3
  4. phoenix/db/engines.py +2 -1
  5. phoenix/experiments/functions.py +3 -2
  6. phoenix/server/api/context.py +7 -9
  7. phoenix/server/api/dataloaders/__init__.py +2 -0
  8. phoenix/server/api/dataloaders/average_experiment_run_latency.py +3 -3
  9. phoenix/server/api/dataloaders/dataset_example_revisions.py +2 -4
  10. phoenix/server/api/dataloaders/dataset_example_spans.py +2 -4
  11. phoenix/server/api/dataloaders/document_evaluation_summaries.py +2 -4
  12. phoenix/server/api/dataloaders/document_evaluations.py +2 -4
  13. phoenix/server/api/dataloaders/document_retrieval_metrics.py +2 -4
  14. phoenix/server/api/dataloaders/evaluation_summaries.py +2 -4
  15. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +2 -4
  16. phoenix/server/api/dataloaders/experiment_error_rates.py +2 -4
  17. phoenix/server/api/dataloaders/experiment_run_counts.py +2 -4
  18. phoenix/server/api/dataloaders/experiment_sequence_number.py +2 -4
  19. phoenix/server/api/dataloaders/latency_ms_quantile.py +2 -3
  20. phoenix/server/api/dataloaders/min_start_or_max_end_times.py +2 -4
  21. phoenix/server/api/dataloaders/project_by_name.py +3 -3
  22. phoenix/server/api/dataloaders/record_counts.py +2 -4
  23. phoenix/server/api/dataloaders/span_annotations.py +2 -4
  24. phoenix/server/api/dataloaders/span_dataset_examples.py +36 -0
  25. phoenix/server/api/dataloaders/span_descendants.py +2 -4
  26. phoenix/server/api/dataloaders/span_evaluations.py +2 -4
  27. phoenix/server/api/dataloaders/span_projects.py +3 -3
  28. phoenix/server/api/dataloaders/token_counts.py +2 -4
  29. phoenix/server/api/dataloaders/trace_evaluations.py +2 -4
  30. phoenix/server/api/dataloaders/trace_row_ids.py +2 -4
  31. phoenix/server/api/mutations/span_annotations_mutations.py +8 -3
  32. phoenix/server/api/mutations/trace_annotations_mutations.py +8 -3
  33. phoenix/server/api/openapi/main.py +18 -2
  34. phoenix/server/api/openapi/schema.py +12 -12
  35. phoenix/server/api/routers/v1/__init__.py +36 -83
  36. phoenix/server/api/routers/v1/datasets.py +515 -509
  37. phoenix/server/api/routers/v1/evaluations.py +75 -70
  38. phoenix/server/api/routers/v1/experiment_evaluations.py +68 -91
  39. phoenix/server/api/routers/v1/experiment_runs.py +98 -155
  40. phoenix/server/api/routers/v1/experiments.py +132 -181
  41. phoenix/server/api/routers/v1/pydantic_compat.py +78 -0
  42. phoenix/server/api/routers/v1/spans.py +144 -173
  43. phoenix/server/api/routers/v1/traces.py +115 -128
  44. phoenix/server/api/routers/v1/utils.py +95 -0
  45. phoenix/server/api/types/Span.py +5 -0
  46. phoenix/server/api/utils.py +4 -4
  47. phoenix/server/app.py +170 -192
  48. phoenix/server/grpc_server.py +2 -2
  49. phoenix/server/main.py +5 -9
  50. phoenix/server/static/.vite/manifest.json +31 -31
  51. phoenix/server/static/assets/{components-DeS0YEmv.js → components-kGgeFkHp.js} +150 -110
  52. phoenix/server/static/assets/{index-CQgXRwU0.js → index-BctFO6S7.js} +2 -2
  53. phoenix/server/static/assets/{pages-hdjlFZhO.js → pages-DabDCmVd.js} +372 -272
  54. phoenix/server/static/assets/{vendor-DPvSDRn3.js → vendor-CP0b0YG0.js} +2 -2
  55. phoenix/server/static/assets/{vendor-arizeai-CkvPT67c.js → vendor-arizeai-B5Hti8OB.js} +27 -27
  56. phoenix/server/static/assets/vendor-codemirror-DtdPDzrv.js +15 -0
  57. phoenix/server/static/assets/{vendor-recharts-5jlNaZuF.js → vendor-recharts-A0DA1O99.js} +1 -1
  58. phoenix/server/thread_server.py +2 -2
  59. phoenix/server/types.py +18 -0
  60. phoenix/session/client.py +5 -3
  61. phoenix/session/session.py +2 -2
  62. phoenix/trace/fixtures.py +17 -23
  63. phoenix/trace/utils.py +23 -0
  64. phoenix/utilities/client.py +116 -0
  65. phoenix/utilities/project.py +1 -1
  66. phoenix/version.py +1 -1
  67. phoenix/server/api/routers/v1/dataset_examples.py +0 -178
  68. phoenix/server/openapi/docs.py +0 -221
  69. phoenix/server/static/assets/vendor-codemirror-Cqwpwlua.js +0 -12
  70. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.15.0.dist-info}/WHEEL +0 -0
  71. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.15.0.dist-info}/licenses/IP_NOTICE +0 -0
  72. {arize_phoenix-4.14.1.dist-info → arize_phoenix-4.15.0.dist-info}/licenses/LICENSE +0 -0
phoenix/server/app.py CHANGED
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import contextlib
2
3
  import json
3
4
  import logging
@@ -21,25 +22,24 @@ from typing import (
21
22
  )
22
23
 
23
24
  import strawberry
25
+ from fastapi import APIRouter, FastAPI
26
+ from fastapi.middleware.gzip import GZipMiddleware
27
+ from fastapi.responses import FileResponse
28
+ from fastapi.utils import is_body_allowed_for_status_code
24
29
  from sqlalchemy.ext.asyncio import (
25
30
  AsyncEngine,
26
31
  AsyncSession,
27
32
  async_sessionmaker,
28
33
  )
29
- from starlette.applications import Starlette
30
- from starlette.datastructures import QueryParams
31
- from starlette.endpoints import HTTPEndpoint
32
34
  from starlette.exceptions import HTTPException
33
35
  from starlette.middleware import Middleware
34
36
  from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
35
37
  from starlette.requests import Request
36
- from starlette.responses import FileResponse, PlainTextResponse, Response
37
- from starlette.routing import Mount, Route
38
+ from starlette.responses import PlainTextResponse, Response
38
39
  from starlette.staticfiles import StaticFiles
39
40
  from starlette.templating import Jinja2Templates
40
41
  from starlette.types import Scope, StatefulLifespan
41
- from starlette.websockets import WebSocket
42
- from strawberry.asgi import GraphQL
42
+ from strawberry.fastapi import GraphQLRouter
43
43
  from strawberry.schema import BaseSchema
44
44
  from typing_extensions import TypeAlias
45
45
 
@@ -75,6 +75,7 @@ from phoenix.server.api.dataloaders import (
75
75
  ProjectByNameDataLoader,
76
76
  RecordCountDataLoader,
77
77
  SpanAnnotationsDataLoader,
78
+ SpanDatasetExamplesDataLoader,
78
79
  SpanDescendantsDataLoader,
79
80
  SpanEvaluationsDataLoader,
80
81
  SpanProjectsDataLoader,
@@ -82,19 +83,22 @@ from phoenix.server.api.dataloaders import (
82
83
  TraceEvaluationsDataLoader,
83
84
  TraceRowIdsDataLoader,
84
85
  )
85
- from phoenix.server.api.openapi.schema import OPENAPI_SCHEMA_GENERATOR
86
- from phoenix.server.api.routers.v1 import V1_ROUTES
86
+ from phoenix.server.api.routers.v1 import REST_API_VERSION
87
+ from phoenix.server.api.routers.v1 import router as v1_router
87
88
  from phoenix.server.api.schema import schema
88
89
  from phoenix.server.grpc_server import GrpcServer
89
- from phoenix.server.openapi.docs import get_swagger_ui_html
90
90
  from phoenix.server.telemetry import initialize_opentelemetry_tracer_provider
91
+ from phoenix.server.types import DbSessionFactory
91
92
  from phoenix.trace.schemas import Span
93
+ from phoenix.utilities.client import PHOENIX_SERVER_VERSION_HEADER
92
94
 
93
95
  if TYPE_CHECKING:
94
96
  from opentelemetry.trace import TracerProvider
95
97
 
96
98
  logger = logging.getLogger(__name__)
97
99
 
100
+ router = APIRouter(include_in_schema=False)
101
+
98
102
  templates = Jinja2Templates(directory=SERVER_DIR / "templates")
99
103
 
100
104
 
@@ -167,125 +171,35 @@ class HeadersMiddleware(BaseHTTPMiddleware):
167
171
  request: Request,
168
172
  call_next: RequestResponseEndpoint,
169
173
  ) -> Response:
174
+ from phoenix import __version__ as phoenix_version
175
+
170
176
  response = await call_next(request)
171
177
  response.headers["x-colab-notebook-cache-control"] = "no-cache"
178
+ response.headers[PHOENIX_SERVER_VERSION_HEADER] = phoenix_version
172
179
  return response
173
180
 
174
181
 
175
182
  ProjectRowId: TypeAlias = int
176
183
 
177
184
 
178
- class GraphQLWithContext(GraphQL): # type: ignore
179
- def __init__(
180
- self,
181
- schema: BaseSchema,
182
- db: Callable[[], AsyncContextManager[AsyncSession]],
183
- model: Model,
184
- export_path: Path,
185
- graphiql: bool = False,
186
- corpus: Optional[Model] = None,
187
- streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None,
188
- cache_for_dataloaders: Optional[CacheForDataLoaders] = None,
189
- read_only: bool = False,
190
- ) -> None:
191
- self.db = db
192
- self.model = model
193
- self.corpus = corpus
194
- self.export_path = export_path
195
- self.streaming_last_updated_at = streaming_last_updated_at
196
- self.cache_for_dataloaders = cache_for_dataloaders
197
- self.read_only = read_only
198
- super().__init__(schema, graphiql=graphiql)
199
-
200
- async def get_context(
201
- self,
202
- request: Union[Request, WebSocket],
203
- response: Optional[Response] = None,
204
- ) -> Context:
205
- return Context(
206
- request=request,
207
- response=response,
208
- db=self.db,
209
- model=self.model,
210
- corpus=self.corpus,
211
- export_path=self.export_path,
212
- streaming_last_updated_at=self.streaming_last_updated_at,
213
- data_loaders=DataLoaders(
214
- average_experiment_run_latency=AverageExperimentRunLatencyDataLoader(self.db),
215
- dataset_example_revisions=DatasetExampleRevisionsDataLoader(self.db),
216
- dataset_example_spans=DatasetExampleSpansDataLoader(self.db),
217
- document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
218
- self.db,
219
- cache_map=self.cache_for_dataloaders.document_evaluation_summary
220
- if self.cache_for_dataloaders
221
- else None,
222
- ),
223
- document_evaluations=DocumentEvaluationsDataLoader(self.db),
224
- document_retrieval_metrics=DocumentRetrievalMetricsDataLoader(self.db),
225
- evaluation_summaries=EvaluationSummaryDataLoader(
226
- self.db,
227
- cache_map=self.cache_for_dataloaders.evaluation_summary
228
- if self.cache_for_dataloaders
229
- else None,
230
- ),
231
- experiment_annotation_summaries=ExperimentAnnotationSummaryDataLoader(self.db),
232
- experiment_error_rates=ExperimentErrorRatesDataLoader(self.db),
233
- experiment_run_counts=ExperimentRunCountsDataLoader(self.db),
234
- experiment_sequence_number=ExperimentSequenceNumberDataLoader(self.db),
235
- latency_ms_quantile=LatencyMsQuantileDataLoader(
236
- self.db,
237
- cache_map=self.cache_for_dataloaders.latency_ms_quantile
238
- if self.cache_for_dataloaders
239
- else None,
240
- ),
241
- min_start_or_max_end_times=MinStartOrMaxEndTimeDataLoader(
242
- self.db,
243
- cache_map=self.cache_for_dataloaders.min_start_or_max_end_time
244
- if self.cache_for_dataloaders
245
- else None,
246
- ),
247
- record_counts=RecordCountDataLoader(
248
- self.db,
249
- cache_map=self.cache_for_dataloaders.record_count
250
- if self.cache_for_dataloaders
251
- else None,
252
- ),
253
- span_descendants=SpanDescendantsDataLoader(self.db),
254
- span_evaluations=SpanEvaluationsDataLoader(self.db),
255
- span_projects=SpanProjectsDataLoader(self.db),
256
- token_counts=TokenCountDataLoader(
257
- self.db,
258
- cache_map=self.cache_for_dataloaders.token_count
259
- if self.cache_for_dataloaders
260
- else None,
261
- ),
262
- trace_evaluations=TraceEvaluationsDataLoader(self.db),
263
- trace_row_ids=TraceRowIdsDataLoader(self.db),
264
- project_by_name=ProjectByNameDataLoader(self.db),
265
- span_annotations=SpanAnnotationsDataLoader(self.db),
266
- ),
267
- cache_for_dataloaders=self.cache_for_dataloaders,
268
- read_only=self.read_only,
269
- )
185
+ @router.get("/exports")
186
+ async def download_exported_file(request: Request, filename: str) -> FileResponse:
187
+ file = request.app.state.export_path / (filename + ".parquet")
188
+ if not file.is_file():
189
+ raise HTTPException(status_code=404)
190
+ return FileResponse(
191
+ path=file,
192
+ filename=file.name,
193
+ media_type="application/x-octet-stream",
194
+ )
270
195
 
271
196
 
272
- class Download(HTTPEndpoint):
273
- path: Path
197
+ @router.get("/arize_phoenix_version")
198
+ async def version() -> PlainTextResponse:
199
+ return PlainTextResponse(f"{phoenix.__version__}")
274
200
 
275
- async def get(self, request: Request) -> FileResponse:
276
- params = QueryParams(request.query_params)
277
- file = self.path / (params.get("filename", "") + ".parquet")
278
- if not file.is_file():
279
- raise HTTPException(status_code=404)
280
- return FileResponse(
281
- path=file,
282
- filename=file.name,
283
- media_type="application/x-octet-stream",
284
- )
285
201
 
286
-
287
- async def version(_: Request) -> PlainTextResponse:
288
- return PlainTextResponse(f"{phoenix.__version__}")
202
+ DB_MUTEX: Optional[asyncio.Lock] = None
289
203
 
290
204
 
291
205
  def _db(engine: AsyncEngine) -> Callable[[], AsyncContextManager[AsyncSession]]:
@@ -293,22 +207,27 @@ def _db(engine: AsyncEngine) -> Callable[[], AsyncContextManager[AsyncSession]]:
293
207
 
294
208
  @contextlib.asynccontextmanager
295
209
  async def factory() -> AsyncIterator[AsyncSession]:
296
- async with Session.begin() as session:
297
- yield session
210
+ async with contextlib.AsyncExitStack() as stack:
211
+ if DB_MUTEX:
212
+ await stack.enter_async_context(DB_MUTEX)
213
+ yield await stack.enter_async_context(Session.begin())
298
214
 
299
215
  return factory
300
216
 
301
217
 
302
218
  def _lifespan(
303
219
  *,
220
+ dialect: SupportedSQLDialect,
304
221
  bulk_inserter: BulkInserter,
305
222
  tracer_provider: Optional["TracerProvider"] = None,
306
223
  enable_prometheus: bool = False,
307
224
  clean_ups: Iterable[Callable[[], None]] = (),
308
225
  read_only: bool = False,
309
- ) -> StatefulLifespan[Starlette]:
226
+ ) -> StatefulLifespan[FastAPI]:
310
227
  @contextlib.asynccontextmanager
311
- async def lifespan(_: Starlette) -> AsyncIterator[Dict[str, Any]]:
228
+ async def lifespan(_: FastAPI) -> AsyncIterator[Dict[str, Any]]:
229
+ global DB_MUTEX
230
+ DB_MUTEX = asyncio.Lock() if dialect is SupportedSQLDialect.SQLITE else None
312
231
  async with bulk_inserter as (
313
232
  queue_span,
314
233
  queue_evaluation,
@@ -330,29 +249,91 @@ def _lifespan(
330
249
  return lifespan
331
250
 
332
251
 
252
+ @router.get("/healthz")
333
253
  async def check_healthz(_: Request) -> PlainTextResponse:
334
254
  return PlainTextResponse("OK")
335
255
 
336
256
 
337
- async def openapi_schema(request: Request) -> Response:
338
- return OPENAPI_SCHEMA_GENERATOR.OpenAPIResponse(request=request)
339
-
340
-
341
- async def api_docs(request: Request) -> Response:
342
- return get_swagger_ui_html(openapi_url="/schema", title="arize-phoenix API")
343
-
344
-
345
- class SessionFactory:
346
- def __init__(
347
- self,
348
- session_factory: Callable[[], AsyncContextManager[AsyncSession]],
349
- dialect: str,
350
- ):
351
- self.session_factory = session_factory
352
- self.dialect = SupportedSQLDialect(dialect)
257
+ def create_graphql_router(
258
+ *,
259
+ schema: BaseSchema,
260
+ db: DbSessionFactory,
261
+ model: Model,
262
+ export_path: Path,
263
+ corpus: Optional[Model] = None,
264
+ streaming_last_updated_at: Callable[[ProjectRowId], Optional[datetime]] = lambda _: None,
265
+ cache_for_dataloaders: Optional[CacheForDataLoaders] = None,
266
+ read_only: bool = False,
267
+ ) -> GraphQLRouter: # type: ignore[type-arg]
268
+ def get_context() -> Context:
269
+ return Context(
270
+ db=db,
271
+ model=model,
272
+ corpus=corpus,
273
+ export_path=export_path,
274
+ streaming_last_updated_at=streaming_last_updated_at,
275
+ data_loaders=DataLoaders(
276
+ average_experiment_run_latency=AverageExperimentRunLatencyDataLoader(db),
277
+ dataset_example_revisions=DatasetExampleRevisionsDataLoader(db),
278
+ dataset_example_spans=DatasetExampleSpansDataLoader(db),
279
+ document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
280
+ db,
281
+ cache_map=cache_for_dataloaders.document_evaluation_summary
282
+ if cache_for_dataloaders
283
+ else None,
284
+ ),
285
+ document_evaluations=DocumentEvaluationsDataLoader(db),
286
+ document_retrieval_metrics=DocumentRetrievalMetricsDataLoader(db),
287
+ evaluation_summaries=EvaluationSummaryDataLoader(
288
+ db,
289
+ cache_map=cache_for_dataloaders.evaluation_summary
290
+ if cache_for_dataloaders
291
+ else None,
292
+ ),
293
+ experiment_annotation_summaries=ExperimentAnnotationSummaryDataLoader(db),
294
+ experiment_error_rates=ExperimentErrorRatesDataLoader(db),
295
+ experiment_run_counts=ExperimentRunCountsDataLoader(db),
296
+ experiment_sequence_number=ExperimentSequenceNumberDataLoader(db),
297
+ latency_ms_quantile=LatencyMsQuantileDataLoader(
298
+ db,
299
+ cache_map=cache_for_dataloaders.latency_ms_quantile
300
+ if cache_for_dataloaders
301
+ else None,
302
+ ),
303
+ min_start_or_max_end_times=MinStartOrMaxEndTimeDataLoader(
304
+ db,
305
+ cache_map=cache_for_dataloaders.min_start_or_max_end_time
306
+ if cache_for_dataloaders
307
+ else None,
308
+ ),
309
+ record_counts=RecordCountDataLoader(
310
+ db,
311
+ cache_map=cache_for_dataloaders.record_count if cache_for_dataloaders else None,
312
+ ),
313
+ span_annotations=SpanAnnotationsDataLoader(db),
314
+ span_dataset_examples=SpanDatasetExamplesDataLoader(db),
315
+ span_descendants=SpanDescendantsDataLoader(db),
316
+ span_evaluations=SpanEvaluationsDataLoader(db),
317
+ span_projects=SpanProjectsDataLoader(db),
318
+ token_counts=TokenCountDataLoader(
319
+ db,
320
+ cache_map=cache_for_dataloaders.token_count if cache_for_dataloaders else None,
321
+ ),
322
+ trace_evaluations=TraceEvaluationsDataLoader(db),
323
+ trace_row_ids=TraceRowIdsDataLoader(db),
324
+ project_by_name=ProjectByNameDataLoader(db),
325
+ ),
326
+ cache_for_dataloaders=cache_for_dataloaders,
327
+ read_only=read_only,
328
+ )
353
329
 
354
- def __call__(self) -> AsyncContextManager[AsyncSession]:
355
- return self.session_factory()
330
+ return GraphQLRouter(
331
+ schema,
332
+ graphiql=True,
333
+ context_getter=get_context,
334
+ include_in_schema=False,
335
+ prefix="/graphql",
336
+ )
356
337
 
357
338
 
358
339
  def create_engine_and_run_migrations(
@@ -390,8 +371,20 @@ def instrument_engine_if_enabled(engine: AsyncEngine) -> List[Callable[[], None]
390
371
  return instrumentation_cleanups
391
372
 
392
373
 
374
+ async def plain_text_http_exception_handler(request: Request, exc: HTTPException) -> Response:
375
+ """
376
+ Overrides the default handler for HTTPExceptions to return a plain text
377
+ response instead of a JSON response. For the original source code, see
378
+ https://github.com/tiangolo/fastapi/blob/d3cdd3bbd14109f3b268df7ca496e24bb64593aa/fastapi/exception_handlers.py#L11
379
+ """
380
+ headers = getattr(exc, "headers", None)
381
+ if not is_body_allowed_for_status_code(exc.status_code):
382
+ return Response(status_code=exc.status_code, headers=headers)
383
+ return PlainTextResponse(str(exc.detail), status_code=exc.status_code, headers=headers)
384
+
385
+
393
386
  def create_app(
394
- db: SessionFactory,
387
+ db: DbSessionFactory,
395
388
  export_path: Path,
396
389
  model: Model,
397
390
  umap_params: UMAPParameters,
@@ -404,7 +397,7 @@ def create_app(
404
397
  initial_evaluations: Optional[Iterable[pb.Evaluation]] = None,
405
398
  serve_ui: bool = True,
406
399
  clean_up_callbacks: List[Callable[[], None]] = [],
407
- ) -> Starlette:
400
+ ) -> FastAPI:
408
401
  clean_ups: List[Callable[[], None]] = clean_up_callbacks # To be called at app shutdown.
409
402
  initial_batch_of_spans: Iterable[Tuple[Span, str]] = (
410
403
  ()
@@ -447,7 +440,7 @@ def create_app(
447
440
 
448
441
  strawberry_extensions.append(_OpenTelemetryExtension)
449
442
 
450
- graphql = GraphQLWithContext(
443
+ graphql_router = create_graphql_router(
451
444
  db=db,
452
445
  schema=strawberry.Schema(
453
446
  query=schema.query,
@@ -458,7 +451,6 @@ def create_app(
458
451
  model=model,
459
452
  corpus=corpus,
460
453
  export_path=export_path,
461
- graphiql=True,
462
454
  streaming_last_updated_at=bulk_inserter.last_updated_at,
463
455
  cache_for_dataloaders=cache_for_dataloaders,
464
456
  read_only=read_only,
@@ -469,8 +461,11 @@ def create_app(
469
461
  prometheus_middlewares = [Middleware(PrometheusMiddleware)]
470
462
  else:
471
463
  prometheus_middlewares = []
472
- app = Starlette(
464
+ app = FastAPI(
465
+ title="Arize-Phoenix REST API",
466
+ version=REST_API_VERSION,
473
467
  lifespan=_lifespan(
468
+ dialect=db.dialect,
474
469
  read_only=read_only,
475
470
  bulk_inserter=bulk_inserter,
476
471
  tracer_provider=tracer_provider,
@@ -481,58 +476,41 @@ def create_app(
481
476
  Middleware(HeadersMiddleware),
482
477
  *prometheus_middlewares,
483
478
  ],
479
+ exception_handlers={HTTPException: plain_text_http_exception_handler},
484
480
  debug=debug,
485
- routes=V1_ROUTES
486
- + [
487
- Route("/schema", endpoint=openapi_schema, include_in_schema=False),
488
- Route("/arize_phoenix_version", version),
489
- Route("/healthz", check_healthz),
490
- Route(
491
- "/exports",
492
- type(
493
- "DownloadExports",
494
- (Download,),
495
- {"path": export_path},
496
- ),
497
- ),
498
- Route(
499
- "/docs",
500
- api_docs,
501
- ),
502
- Route(
503
- "/graphql",
504
- graphql,
505
- ),
506
- ]
507
- + (
508
- [
509
- Mount(
510
- "/",
511
- app=Static(
512
- directory=SERVER_DIR / "static",
513
- app_config=AppConfig(
514
- has_inferences=model.is_empty is not True,
515
- has_corpus=corpus is not None,
516
- min_dist=umap_params.min_dist,
517
- n_neighbors=umap_params.n_neighbors,
518
- n_samples=umap_params.n_samples,
519
- is_development=dev,
520
- web_manifest_path=SERVER_DIR / "static" / ".vite" / "manifest.json",
521
- ),
522
- ),
523
- name="static",
524
- ),
525
- ]
526
- if serve_ui
527
- else []
528
- ),
481
+ swagger_ui_parameters={
482
+ "defaultModelsExpandDepth": -1, # hides the schema section in the Swagger UI
483
+ },
529
484
  )
530
485
  app.state.read_only = read_only
486
+ app.state.export_path = export_path
487
+ app.include_router(v1_router)
488
+ app.include_router(router)
489
+ app.include_router(graphql_router)
490
+ app.add_middleware(GZipMiddleware)
491
+ if serve_ui:
492
+ app.mount(
493
+ "/",
494
+ app=Static(
495
+ directory=SERVER_DIR / "static",
496
+ app_config=AppConfig(
497
+ has_inferences=model.is_empty is not True,
498
+ has_corpus=corpus is not None,
499
+ min_dist=umap_params.min_dist,
500
+ n_neighbors=umap_params.n_neighbors,
501
+ n_samples=umap_params.n_samples,
502
+ is_development=dev,
503
+ web_manifest_path=SERVER_DIR / "static" / ".vite" / "manifest.json",
504
+ ),
505
+ ),
506
+ name="static",
507
+ )
508
+
531
509
  app.state.db = db
532
510
  if tracer_provider:
533
- from opentelemetry.instrumentation.starlette import StarletteInstrumentor
511
+ from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
534
512
 
535
- StarletteInstrumentor().instrument(tracer_provider=tracer_provider)
536
- StarletteInstrumentor.instrument_app(app, tracer_provider=tracer_provider)
537
- clean_ups.append(StarletteInstrumentor().uninstrument)
513
+ FastAPIInstrumentor().instrument(tracer_provider=tracer_provider)
514
+ FastAPIInstrumentor.instrument_app(app, tracer_provider=tracer_provider)
515
+ clean_ups.append(FastAPIInstrumentor().uninstrument)
538
516
  return app
@@ -23,7 +23,7 @@ if TYPE_CHECKING:
23
23
  ProjectName: TypeAlias = str
24
24
 
25
25
 
26
- class Servicer(TraceServiceServicer):
26
+ class Servicer(TraceServiceServicer): # type:ignore
27
27
  def __init__(
28
28
  self,
29
29
  callback: Callable[[Span, ProjectName], Awaitable[None]],
@@ -78,7 +78,7 @@ class GrpcServer:
78
78
  interceptors=interceptors,
79
79
  )
80
80
  server.add_insecure_port(f"[::]:{get_env_grpc_port()}")
81
- add_TraceServiceServicer_to_server(Servicer(self._callback), server) # type: ignore
81
+ add_TraceServiceServicer_to_server(Servicer(self._callback), server)
82
82
  await server.start()
83
83
  self._server = server
84
84
 
phoenix/server/main.py CHANGED
@@ -33,25 +33,23 @@ from phoenix.pointcloud.umap_parameters import (
33
33
  UMAPParameters,
34
34
  )
35
35
  from phoenix.server.app import (
36
- SessionFactory,
37
36
  _db,
38
37
  create_app,
39
38
  create_engine_and_run_migrations,
40
39
  instrument_engine_if_enabled,
41
40
  )
41
+ from phoenix.server.types import DbSessionFactory
42
42
  from phoenix.settings import Settings
43
43
  from phoenix.trace.fixtures import (
44
44
  TRACES_FIXTURES,
45
- download_traces_fixture,
46
45
  get_dataset_fixtures,
47
46
  get_evals_from_fixture,
48
- get_trace_fixture_by_name,
47
+ load_example_traces,
49
48
  reset_fixture_span_ids_and_timestamps,
50
49
  send_dataset_fixtures,
51
50
  )
52
51
  from phoenix.trace.otel import decode_otlp_span, encode_span_to_otlp
53
52
  from phoenix.trace.schemas import Span
54
- from phoenix.trace.span_json_decoder import json_string_to_span
55
53
 
56
54
  logger = logging.getLogger(__name__)
57
55
 
@@ -221,10 +219,8 @@ if __name__ == "__main__":
221
219
  (
222
220
  # Apply `encode` here because legacy jsonl files contains UUIDs as strings.
223
221
  # `encode` removes the hyphens in the UUIDs.
224
- decode_otlp_span(encode_span_to_otlp(json_string_to_span(json_span)))
225
- for json_span in download_traces_fixture(
226
- get_trace_fixture_by_name(trace_dataset_name)
227
- )
222
+ decode_otlp_span(encode_span_to_otlp(span))
223
+ for span in load_example_traces(trace_dataset_name).to_spans()
228
224
  ),
229
225
  get_evals_from_fixture(trace_dataset_name),
230
226
  )
@@ -250,7 +246,7 @@ if __name__ == "__main__":
250
246
  working_dir = get_working_dir().resolve()
251
247
  engine = create_engine_and_run_migrations(db_connection_str)
252
248
  instrumentation_cleanups = instrument_engine_if_enabled(engine)
253
- factory = SessionFactory(session_factory=_db(engine), dialect=engine.dialect.name)
249
+ factory = DbSessionFactory(db=_db(engine), dialect=engine.dialect.name)
254
250
  app = create_app(
255
251
  db=factory,
256
252
  export_path=export_path,
@@ -1,32 +1,32 @@
1
1
  {
2
- "_components-DeS0YEmv.js": {
3
- "file": "assets/components-DeS0YEmv.js",
2
+ "_components-kGgeFkHp.js": {
3
+ "file": "assets/components-kGgeFkHp.js",
4
4
  "name": "components",
5
5
  "imports": [
6
- "_vendor-DPvSDRn3.js",
7
- "_vendor-arizeai-CkvPT67c.js",
8
- "_pages-hdjlFZhO.js",
6
+ "_vendor-CP0b0YG0.js",
7
+ "_vendor-arizeai-B5Hti8OB.js",
8
+ "_pages-DabDCmVd.js",
9
9
  "_vendor-three-DwGkEfCM.js",
10
- "_vendor-codemirror-Cqwpwlua.js"
10
+ "_vendor-codemirror-DtdPDzrv.js"
11
11
  ]
12
12
  },
13
- "_pages-hdjlFZhO.js": {
14
- "file": "assets/pages-hdjlFZhO.js",
13
+ "_pages-DabDCmVd.js": {
14
+ "file": "assets/pages-DabDCmVd.js",
15
15
  "name": "pages",
16
16
  "imports": [
17
- "_vendor-DPvSDRn3.js",
18
- "_components-DeS0YEmv.js",
19
- "_vendor-arizeai-CkvPT67c.js",
20
- "_vendor-recharts-5jlNaZuF.js",
21
- "_vendor-codemirror-Cqwpwlua.js"
17
+ "_vendor-CP0b0YG0.js",
18
+ "_components-kGgeFkHp.js",
19
+ "_vendor-arizeai-B5Hti8OB.js",
20
+ "_vendor-recharts-A0DA1O99.js",
21
+ "_vendor-codemirror-DtdPDzrv.js"
22
22
  ]
23
23
  },
24
24
  "_vendor-!~{003}~.js": {
25
25
  "file": "assets/vendor-DxkFTwjz.css",
26
26
  "src": "_vendor-!~{003}~.js"
27
27
  },
28
- "_vendor-DPvSDRn3.js": {
29
- "file": "assets/vendor-DPvSDRn3.js",
28
+ "_vendor-CP0b0YG0.js": {
29
+ "file": "assets/vendor-CP0b0YG0.js",
30
30
  "name": "vendor",
31
31
  "imports": [
32
32
  "_vendor-three-DwGkEfCM.js"
@@ -35,25 +35,25 @@
35
35
  "assets/vendor-DxkFTwjz.css"
36
36
  ]
37
37
  },
38
- "_vendor-arizeai-CkvPT67c.js": {
39
- "file": "assets/vendor-arizeai-CkvPT67c.js",
38
+ "_vendor-arizeai-B5Hti8OB.js": {
39
+ "file": "assets/vendor-arizeai-B5Hti8OB.js",
40
40
  "name": "vendor-arizeai",
41
41
  "imports": [
42
- "_vendor-DPvSDRn3.js"
42
+ "_vendor-CP0b0YG0.js"
43
43
  ]
44
44
  },
45
- "_vendor-codemirror-Cqwpwlua.js": {
46
- "file": "assets/vendor-codemirror-Cqwpwlua.js",
45
+ "_vendor-codemirror-DtdPDzrv.js": {
46
+ "file": "assets/vendor-codemirror-DtdPDzrv.js",
47
47
  "name": "vendor-codemirror",
48
48
  "imports": [
49
- "_vendor-DPvSDRn3.js"
49
+ "_vendor-CP0b0YG0.js"
50
50
  ]
51
51
  },
52
- "_vendor-recharts-5jlNaZuF.js": {
53
- "file": "assets/vendor-recharts-5jlNaZuF.js",
52
+ "_vendor-recharts-A0DA1O99.js": {
53
+ "file": "assets/vendor-recharts-A0DA1O99.js",
54
54
  "name": "vendor-recharts",
55
55
  "imports": [
56
- "_vendor-DPvSDRn3.js"
56
+ "_vendor-CP0b0YG0.js"
57
57
  ]
58
58
  },
59
59
  "_vendor-three-DwGkEfCM.js": {
@@ -61,18 +61,18 @@
61
61
  "name": "vendor-three"
62
62
  },
63
63
  "index.tsx": {
64
- "file": "assets/index-CQgXRwU0.js",
64
+ "file": "assets/index-BctFO6S7.js",
65
65
  "name": "index",
66
66
  "src": "index.tsx",
67
67
  "isEntry": true,
68
68
  "imports": [
69
- "_vendor-DPvSDRn3.js",
70
- "_vendor-arizeai-CkvPT67c.js",
71
- "_pages-hdjlFZhO.js",
72
- "_components-DeS0YEmv.js",
69
+ "_vendor-CP0b0YG0.js",
70
+ "_vendor-arizeai-B5Hti8OB.js",
71
+ "_pages-DabDCmVd.js",
72
+ "_components-kGgeFkHp.js",
73
73
  "_vendor-three-DwGkEfCM.js",
74
- "_vendor-recharts-5jlNaZuF.js",
75
- "_vendor-codemirror-Cqwpwlua.js"
74
+ "_vendor-recharts-A0DA1O99.js",
75
+ "_vendor-codemirror-DtdPDzrv.js"
76
76
  ]
77
77
  }
78
78
  }