arize-phoenix 4.4.4rc6__py3-none-any.whl → 4.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (123) hide show
  1. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/METADATA +8 -14
  2. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/RECORD +58 -122
  3. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/WHEEL +1 -1
  4. phoenix/__init__.py +27 -0
  5. phoenix/config.py +7 -42
  6. phoenix/core/model.py +25 -25
  7. phoenix/core/model_schema.py +62 -64
  8. phoenix/core/model_schema_adapter.py +25 -27
  9. phoenix/datetime_utils.py +0 -4
  10. phoenix/db/bulk_inserter.py +14 -54
  11. phoenix/db/insertion/evaluation.py +10 -10
  12. phoenix/db/insertion/helpers.py +14 -17
  13. phoenix/db/insertion/span.py +3 -3
  14. phoenix/db/migrations/versions/cf03bd6bae1d_init.py +28 -2
  15. phoenix/db/models.py +4 -236
  16. phoenix/inferences/fixtures.py +23 -23
  17. phoenix/inferences/inferences.py +7 -7
  18. phoenix/inferences/validation.py +1 -1
  19. phoenix/server/api/context.py +0 -20
  20. phoenix/server/api/dataloaders/__init__.py +0 -20
  21. phoenix/server/api/dataloaders/span_descendants.py +3 -2
  22. phoenix/server/api/routers/v1/__init__.py +2 -77
  23. phoenix/server/api/routers/v1/evaluations.py +13 -8
  24. phoenix/server/api/routers/v1/spans.py +5 -9
  25. phoenix/server/api/routers/v1/traces.py +4 -1
  26. phoenix/server/api/schema.py +303 -2
  27. phoenix/server/api/types/Cluster.py +19 -19
  28. phoenix/server/api/types/Dataset.py +63 -282
  29. phoenix/server/api/types/DatasetRole.py +23 -0
  30. phoenix/server/api/types/Dimension.py +29 -30
  31. phoenix/server/api/types/EmbeddingDimension.py +34 -40
  32. phoenix/server/api/types/Event.py +16 -16
  33. phoenix/server/api/{mutations/export_events_mutations.py → types/ExportEventsMutation.py} +14 -17
  34. phoenix/server/api/types/Model.py +42 -43
  35. phoenix/server/api/types/Project.py +12 -26
  36. phoenix/server/api/types/Span.py +2 -79
  37. phoenix/server/api/types/TimeSeries.py +6 -6
  38. phoenix/server/api/types/Trace.py +4 -15
  39. phoenix/server/api/types/UMAPPoints.py +1 -1
  40. phoenix/server/api/types/node.py +111 -5
  41. phoenix/server/api/types/pagination.py +52 -10
  42. phoenix/server/app.py +49 -103
  43. phoenix/server/main.py +27 -49
  44. phoenix/server/openapi/docs.py +0 -3
  45. phoenix/server/static/index.js +1384 -2390
  46. phoenix/server/templates/index.html +0 -1
  47. phoenix/services.py +15 -15
  48. phoenix/session/client.py +23 -611
  49. phoenix/session/session.py +37 -47
  50. phoenix/trace/exporter.py +9 -14
  51. phoenix/trace/fixtures.py +7 -133
  52. phoenix/trace/schemas.py +2 -1
  53. phoenix/trace/span_evaluations.py +3 -3
  54. phoenix/trace/trace_dataset.py +6 -6
  55. phoenix/version.py +1 -1
  56. phoenix/db/insertion/dataset.py +0 -237
  57. phoenix/db/migrations/types.py +0 -29
  58. phoenix/db/migrations/versions/10460e46d750_datasets.py +0 -291
  59. phoenix/experiments/__init__.py +0 -6
  60. phoenix/experiments/evaluators/__init__.py +0 -29
  61. phoenix/experiments/evaluators/base.py +0 -153
  62. phoenix/experiments/evaluators/code_evaluators.py +0 -99
  63. phoenix/experiments/evaluators/llm_evaluators.py +0 -244
  64. phoenix/experiments/evaluators/utils.py +0 -189
  65. phoenix/experiments/functions.py +0 -616
  66. phoenix/experiments/tracing.py +0 -85
  67. phoenix/experiments/types.py +0 -722
  68. phoenix/experiments/utils.py +0 -9
  69. phoenix/server/api/dataloaders/average_experiment_run_latency.py +0 -54
  70. phoenix/server/api/dataloaders/dataset_example_revisions.py +0 -100
  71. phoenix/server/api/dataloaders/dataset_example_spans.py +0 -43
  72. phoenix/server/api/dataloaders/experiment_annotation_summaries.py +0 -85
  73. phoenix/server/api/dataloaders/experiment_error_rates.py +0 -43
  74. phoenix/server/api/dataloaders/experiment_run_counts.py +0 -42
  75. phoenix/server/api/dataloaders/experiment_sequence_number.py +0 -49
  76. phoenix/server/api/dataloaders/project_by_name.py +0 -31
  77. phoenix/server/api/dataloaders/span_projects.py +0 -33
  78. phoenix/server/api/dataloaders/trace_row_ids.py +0 -39
  79. phoenix/server/api/helpers/dataset_helpers.py +0 -179
  80. phoenix/server/api/input_types/AddExamplesToDatasetInput.py +0 -16
  81. phoenix/server/api/input_types/AddSpansToDatasetInput.py +0 -14
  82. phoenix/server/api/input_types/ClearProjectInput.py +0 -15
  83. phoenix/server/api/input_types/CreateDatasetInput.py +0 -12
  84. phoenix/server/api/input_types/DatasetExampleInput.py +0 -14
  85. phoenix/server/api/input_types/DatasetSort.py +0 -17
  86. phoenix/server/api/input_types/DatasetVersionSort.py +0 -16
  87. phoenix/server/api/input_types/DeleteDatasetExamplesInput.py +0 -13
  88. phoenix/server/api/input_types/DeleteDatasetInput.py +0 -7
  89. phoenix/server/api/input_types/DeleteExperimentsInput.py +0 -9
  90. phoenix/server/api/input_types/PatchDatasetExamplesInput.py +0 -35
  91. phoenix/server/api/input_types/PatchDatasetInput.py +0 -14
  92. phoenix/server/api/mutations/__init__.py +0 -13
  93. phoenix/server/api/mutations/auth.py +0 -11
  94. phoenix/server/api/mutations/dataset_mutations.py +0 -520
  95. phoenix/server/api/mutations/experiment_mutations.py +0 -65
  96. phoenix/server/api/mutations/project_mutations.py +0 -47
  97. phoenix/server/api/openapi/__init__.py +0 -0
  98. phoenix/server/api/openapi/main.py +0 -6
  99. phoenix/server/api/openapi/schema.py +0 -16
  100. phoenix/server/api/queries.py +0 -503
  101. phoenix/server/api/routers/v1/dataset_examples.py +0 -178
  102. phoenix/server/api/routers/v1/datasets.py +0 -965
  103. phoenix/server/api/routers/v1/experiment_evaluations.py +0 -65
  104. phoenix/server/api/routers/v1/experiment_runs.py +0 -96
  105. phoenix/server/api/routers/v1/experiments.py +0 -174
  106. phoenix/server/api/types/AnnotatorKind.py +0 -10
  107. phoenix/server/api/types/CreateDatasetPayload.py +0 -8
  108. phoenix/server/api/types/DatasetExample.py +0 -85
  109. phoenix/server/api/types/DatasetExampleRevision.py +0 -34
  110. phoenix/server/api/types/DatasetVersion.py +0 -14
  111. phoenix/server/api/types/ExampleRevisionInterface.py +0 -14
  112. phoenix/server/api/types/Experiment.py +0 -147
  113. phoenix/server/api/types/ExperimentAnnotationSummary.py +0 -13
  114. phoenix/server/api/types/ExperimentComparison.py +0 -19
  115. phoenix/server/api/types/ExperimentRun.py +0 -91
  116. phoenix/server/api/types/ExperimentRunAnnotation.py +0 -57
  117. phoenix/server/api/types/Inferences.py +0 -80
  118. phoenix/server/api/types/InferencesRole.py +0 -23
  119. phoenix/utilities/json.py +0 -61
  120. phoenix/utilities/re.py +0 -50
  121. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/IP_NOTICE +0 -0
  122. {arize_phoenix-4.4.4rc6.dist-info → arize_phoenix-4.5.0.dist-info}/licenses/LICENSE +0 -0
  123. /phoenix/server/api/{helpers/__init__.py → helpers.py} +0 -0
phoenix/server/app.py CHANGED
@@ -33,6 +33,7 @@ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoin
33
33
  from starlette.requests import Request
34
34
  from starlette.responses import FileResponse, PlainTextResponse, Response
35
35
  from starlette.routing import Mount, Route
36
+ from starlette.schemas import SchemaGenerator
36
37
  from starlette.staticfiles import StaticFiles
37
38
  from starlette.templating import Jinja2Templates
38
39
  from starlette.types import Scope, StatefulLifespan
@@ -56,30 +57,19 @@ from phoenix.exceptions import PhoenixMigrationError
56
57
  from phoenix.pointcloud.umap_parameters import UMAPParameters
57
58
  from phoenix.server.api.context import Context, DataLoaders
58
59
  from phoenix.server.api.dataloaders import (
59
- AverageExperimentRunLatencyDataLoader,
60
60
  CacheForDataLoaders,
61
- DatasetExampleRevisionsDataLoader,
62
- DatasetExampleSpansDataLoader,
63
61
  DocumentEvaluationsDataLoader,
64
62
  DocumentEvaluationSummaryDataLoader,
65
63
  DocumentRetrievalMetricsDataLoader,
66
64
  EvaluationSummaryDataLoader,
67
- ExperimentAnnotationSummaryDataLoader,
68
- ExperimentErrorRatesDataLoader,
69
- ExperimentRunCountsDataLoader,
70
- ExperimentSequenceNumberDataLoader,
71
65
  LatencyMsQuantileDataLoader,
72
66
  MinStartOrMaxEndTimeDataLoader,
73
- ProjectByNameDataLoader,
74
67
  RecordCountDataLoader,
75
68
  SpanDescendantsDataLoader,
76
69
  SpanEvaluationsDataLoader,
77
- SpanProjectsDataLoader,
78
70
  TokenCountDataLoader,
79
71
  TraceEvaluationsDataLoader,
80
- TraceRowIdsDataLoader,
81
72
  )
82
- from phoenix.server.api.openapi.schema import OPENAPI_SCHEMA_GENERATOR
83
73
  from phoenix.server.api.routers.v1 import V1_ROUTES
84
74
  from phoenix.server.api.schema import schema
85
75
  from phoenix.server.grpc_server import GrpcServer
@@ -94,6 +84,10 @@ logger = logging.getLogger(__name__)
94
84
 
95
85
  templates = Jinja2Templates(directory=SERVER_DIR / "templates")
96
86
 
87
+ schemas = SchemaGenerator(
88
+ {"openapi": "3.0.0", "info": {"title": "ArizePhoenix API", "version": "1.0"}}
89
+ )
90
+
97
91
 
98
92
  class AppConfig(NamedTuple):
99
93
  has_inferences: bool
@@ -132,7 +126,6 @@ class Static(StaticFiles):
132
126
  "n_neighbors": self._app_config.n_neighbors,
133
127
  "n_samples": self._app_config.n_samples,
134
128
  "basename": request.scope.get("root_path", ""),
135
- "platform_version": phoenix.__version__,
136
129
  "request": request,
137
130
  },
138
131
  )
@@ -192,9 +185,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
192
185
  export_path=self.export_path,
193
186
  streaming_last_updated_at=self.streaming_last_updated_at,
194
187
  data_loaders=DataLoaders(
195
- average_experiment_run_latency=AverageExperimentRunLatencyDataLoader(self.db),
196
- dataset_example_revisions=DatasetExampleRevisionsDataLoader(self.db),
197
- dataset_example_spans=DatasetExampleSpansDataLoader(self.db),
198
188
  document_evaluation_summaries=DocumentEvaluationSummaryDataLoader(
199
189
  self.db,
200
190
  cache_map=self.cache_for_dataloaders.document_evaluation_summary
@@ -209,10 +199,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
209
199
  if self.cache_for_dataloaders
210
200
  else None,
211
201
  ),
212
- experiment_annotation_summaries=ExperimentAnnotationSummaryDataLoader(self.db),
213
- experiment_error_rates=ExperimentErrorRatesDataLoader(self.db),
214
- experiment_run_counts=ExperimentRunCountsDataLoader(self.db),
215
- experiment_sequence_number=ExperimentSequenceNumberDataLoader(self.db),
216
202
  latency_ms_quantile=LatencyMsQuantileDataLoader(
217
203
  self.db,
218
204
  cache_map=self.cache_for_dataloaders.latency_ms_quantile
@@ -233,7 +219,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
233
219
  ),
234
220
  span_descendants=SpanDescendantsDataLoader(self.db),
235
221
  span_evaluations=SpanEvaluationsDataLoader(self.db),
236
- span_projects=SpanProjectsDataLoader(self.db),
237
222
  token_counts=TokenCountDataLoader(
238
223
  self.db,
239
224
  cache_map=self.cache_for_dataloaders.token_count
@@ -241,8 +226,6 @@ class GraphQLWithContext(GraphQL): # type: ignore
241
226
  else None,
242
227
  ),
243
228
  trace_evaluations=TraceEvaluationsDataLoader(self.db),
244
- trace_row_ids=TraceRowIdsDataLoader(self.db),
245
- project_by_name=ProjectByNameDataLoader(self.db),
246
229
  ),
247
230
  cache_for_dataloaders=self.cache_for_dataloaders,
248
231
  read_only=self.read_only,
@@ -289,11 +272,7 @@ def _lifespan(
289
272
  ) -> StatefulLifespan[Starlette]:
290
273
  @contextlib.asynccontextmanager
291
274
  async def lifespan(_: Starlette) -> AsyncIterator[Dict[str, Any]]:
292
- async with bulk_inserter as (
293
- queue_span,
294
- queue_evaluation,
295
- enqueue_operation,
296
- ), GrpcServer(
275
+ async with bulk_inserter as (queue_span, queue_evaluation), GrpcServer(
297
276
  queue_span,
298
277
  disabled=read_only,
299
278
  tracer_provider=tracer_provider,
@@ -302,7 +281,6 @@ def _lifespan(
302
281
  yield {
303
282
  "queue_span_for_bulk_insert": queue_span,
304
283
  "queue_evaluation_for_bulk_insert": queue_evaluation,
305
- "enqueue_operation": enqueue_operation,
306
284
  }
307
285
  for clean_up in clean_ups:
308
286
  clean_up()
@@ -315,63 +293,15 @@ async def check_healthz(_: Request) -> PlainTextResponse:
315
293
 
316
294
 
317
295
  async def openapi_schema(request: Request) -> Response:
318
- return OPENAPI_SCHEMA_GENERATOR.OpenAPIResponse(request=request)
296
+ return schemas.OpenAPIResponse(request=request)
319
297
 
320
298
 
321
299
  async def api_docs(request: Request) -> Response:
322
300
  return get_swagger_ui_html(openapi_url="/schema", title="arize-phoenix API")
323
301
 
324
302
 
325
- class SessionFactory:
326
- def __init__(
327
- self,
328
- session_factory: Callable[[], AsyncContextManager[AsyncSession]],
329
- dialect: str,
330
- ):
331
- self.session_factory = session_factory
332
- self.dialect = SupportedSQLDialect(dialect)
333
-
334
- def __call__(self) -> AsyncContextManager[AsyncSession]:
335
- return self.session_factory()
336
-
337
-
338
- def create_engine_and_run_migrations(
339
- database_url: str,
340
- ) -> AsyncEngine:
341
- try:
342
- return create_engine(database_url)
343
- except PhoenixMigrationError as e:
344
- msg = (
345
- "\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
346
- "The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
347
- "from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
348
- "revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
349
- "then try starting Phoenix again.\n\n"
350
- "If issues persist, please reach out for support in the Arize community Slack:\n"
351
- "https://arize-ai.slack.com\n\n"
352
- "You can also refer to the Alembic documentation for more information:\n"
353
- "https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
354
- ""
355
- )
356
- raise PhoenixMigrationError(msg) from e
357
-
358
-
359
- def instrument_engine_if_enabled(engine: AsyncEngine) -> List[Callable[[], None]]:
360
- instrumentation_cleanups = []
361
- if server_instrumentation_is_enabled():
362
- from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
363
-
364
- tracer_provider = initialize_opentelemetry_tracer_provider()
365
- SQLAlchemyInstrumentor().instrument(
366
- engine=engine.sync_engine,
367
- tracer_provider=tracer_provider,
368
- )
369
- instrumentation_cleanups.append(SQLAlchemyInstrumentor().uninstrument)
370
- return instrumentation_cleanups
371
-
372
-
373
303
  def create_app(
374
- db: SessionFactory,
304
+ database_url: str,
375
305
  export_path: Path,
376
306
  model: Model,
377
307
  umap_params: UMAPParameters,
@@ -381,10 +311,8 @@ def create_app(
381
311
  enable_prometheus: bool = False,
382
312
  initial_spans: Optional[Iterable[Union[Span, Tuple[Span, str]]]] = None,
383
313
  initial_evaluations: Optional[Iterable[pb.Evaluation]] = None,
384
- serve_ui: bool = True,
385
- clean_up_callbacks: List[Callable[[], None]] = [],
386
314
  ) -> Starlette:
387
- clean_ups: List[Callable[[], None]] = clean_up_callbacks # To be called at app shutdown.
315
+ clean_ups: List[Callable[[], None]] = [] # To be called at app shutdown.
388
316
  initial_batch_of_spans: Iterable[Tuple[Span, str]] = (
389
317
  ()
390
318
  if initial_spans is None
@@ -394,10 +322,28 @@ def create_app(
394
322
  )
395
323
  )
396
324
  initial_batch_of_evaluations = () if initial_evaluations is None else initial_evaluations
325
+ try:
326
+ engine = create_engine(database_url)
327
+ except PhoenixMigrationError as e:
328
+ msg = (
329
+ "\n\n⚠️⚠️ Phoenix failed to migrate the database to the latest version. ⚠️⚠️\n\n"
330
+ "The database may be in a dirty state. To resolve this, the Alembic CLI can be used\n"
331
+ "from the `src/phoenix/db` directory inside the Phoenix project root. From here,\n"
332
+ "revert any partial migrations and run `alembic stamp` to reset the migration state,\n"
333
+ "then try starting Phoenix again.\n\n"
334
+ "If issues persist, please reach out for support in the Arize community Slack:\n"
335
+ "https://arize-ai.slack.com\n\n"
336
+ "You can also refer to the Alembic documentation for more information:\n"
337
+ "https://alembic.sqlalchemy.org/en/latest/tutorial.html\n\n"
338
+ ""
339
+ )
340
+ raise PhoenixMigrationError(msg) from e
397
341
  cache_for_dataloaders = (
398
- CacheForDataLoaders() if db.dialect is SupportedSQLDialect.SQLITE else None
342
+ CacheForDataLoaders()
343
+ if SupportedSQLDialect(engine.dialect.name) is SupportedSQLDialect.SQLITE
344
+ else None
399
345
  )
400
-
346
+ db = _db(engine)
401
347
  bulk_inserter = BulkInserter(
402
348
  db,
403
349
  enable_prometheus=enable_prometheus,
@@ -408,9 +354,16 @@ def create_app(
408
354
  tracer_provider = None
409
355
  strawberry_extensions = schema.get_extensions()
410
356
  if server_instrumentation_is_enabled():
357
+ from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
411
358
  from opentelemetry.trace import TracerProvider
412
359
  from strawberry.extensions.tracing import OpenTelemetryExtension
413
360
 
361
+ tracer_provider = initialize_opentelemetry_tracer_provider()
362
+ SQLAlchemyInstrumentor().instrument(
363
+ engine=engine.sync_engine,
364
+ tracer_provider=tracer_provider,
365
+ )
366
+ clean_ups.append(SQLAlchemyInstrumentor().uninstrument)
414
367
  if TYPE_CHECKING:
415
368
  # Type-check the class before monkey-patching its private attribute.
416
369
  assert OpenTelemetryExtension._tracer
@@ -424,7 +377,6 @@ def create_app(
424
377
  self._tracer = cast(TracerProvider, tracer_provider).get_tracer("strawberry")
425
378
 
426
379
  strawberry_extensions.append(_OpenTelemetryExtension)
427
-
428
380
  graphql = GraphQLWithContext(
429
381
  db=db,
430
382
  schema=strawberry.Schema(
@@ -481,27 +433,21 @@ def create_app(
481
433
  "/graphql",
482
434
  graphql,
483
435
  ),
484
- ]
485
- + (
486
- [
487
- Mount(
488
- "/",
489
- app=Static(
490
- directory=SERVER_DIR / "static",
491
- app_config=AppConfig(
492
- has_inferences=model.is_empty is not True,
493
- has_corpus=corpus is not None,
494
- min_dist=umap_params.min_dist,
495
- n_neighbors=umap_params.n_neighbors,
496
- n_samples=umap_params.n_samples,
497
- ),
436
+ Mount(
437
+ "/",
438
+ app=Static(
439
+ directory=SERVER_DIR / "static",
440
+ app_config=AppConfig(
441
+ has_inferences=model.is_empty is not True,
442
+ has_corpus=corpus is not None,
443
+ min_dist=umap_params.min_dist,
444
+ n_neighbors=umap_params.n_neighbors,
445
+ n_samples=umap_params.n_samples,
498
446
  ),
499
- name="static",
500
447
  ),
501
- ]
502
- if serve_ui
503
- else []
504
- ),
448
+ name="static",
449
+ ),
450
+ ],
505
451
  )
506
452
  app.state.read_only = read_only
507
453
  app.state.db = db
phoenix/server/main.py CHANGED
@@ -22,9 +22,9 @@ from phoenix.config import (
22
22
  get_pids_path,
23
23
  get_working_dir,
24
24
  )
25
- from phoenix.core.model_schema_adapter import create_model_from_inferences
25
+ from phoenix.core.model_schema_adapter import create_model_from_datasets
26
26
  from phoenix.db import get_printable_db_url
27
- from phoenix.inferences.fixtures import FIXTURES, get_inferences
27
+ from phoenix.inferences.fixtures import FIXTURES, get_datasets
28
28
  from phoenix.inferences.inferences import EMPTY_INFERENCES, Inferences
29
29
  from phoenix.pointcloud.umap_parameters import (
30
30
  DEFAULT_MIN_DIST,
@@ -32,22 +32,14 @@ from phoenix.pointcloud.umap_parameters import (
32
32
  DEFAULT_N_SAMPLES,
33
33
  UMAPParameters,
34
34
  )
35
- from phoenix.server.app import (
36
- SessionFactory,
37
- _db,
38
- create_app,
39
- create_engine_and_run_migrations,
40
- instrument_engine_if_enabled,
41
- )
35
+ from phoenix.server.app import create_app
42
36
  from phoenix.settings import Settings
43
37
  from phoenix.trace.fixtures import (
44
38
  TRACES_FIXTURES,
45
39
  download_traces_fixture,
46
- get_dataset_fixtures,
47
40
  get_evals_from_fixture,
48
41
  get_trace_fixture_by_name,
49
42
  reset_fixture_span_ids_and_timestamps,
50
- send_dataset_fixtures,
51
43
  )
52
44
  from phoenix.trace.otel import decode_otlp_span, encode_span_to_otlp
53
45
  from phoenix.trace.schemas import Span
@@ -107,14 +99,14 @@ def _get_pid_file() -> Path:
107
99
  DEFAULT_UMAP_PARAMS_STR = f"{DEFAULT_MIN_DIST},{DEFAULT_N_NEIGHBORS},{DEFAULT_N_SAMPLES}"
108
100
 
109
101
  if __name__ == "__main__":
110
- primary_inferences_name: str
111
- reference_inferences_name: Optional[str]
102
+ primary_dataset_name: str
103
+ reference_dataset_name: Optional[str]
112
104
  trace_dataset_name: Optional[str] = None
113
105
  simulate_streaming: Optional[bool] = None
114
106
 
115
- primary_inferences: Inferences = EMPTY_INFERENCES
116
- reference_inferences: Optional[Inferences] = None
117
- corpus_inferences: Optional[Inferences] = None
107
+ primary_dataset: Inferences = EMPTY_INFERENCES
108
+ reference_dataset: Optional[Inferences] = None
109
+ corpus_dataset: Optional[Inferences] = None
118
110
 
119
111
  # Initialize the settings for the Server
120
112
  Settings.log_migrations = True
@@ -158,34 +150,34 @@ if __name__ == "__main__":
158
150
  )
159
151
  export_path = Path(args.export_path) if args.export_path else EXPORT_DIR
160
152
  if args.command == "datasets":
161
- primary_inferences_name = args.primary
162
- reference_inferences_name = args.reference
163
- corpus_inferences_name = args.corpus
164
- primary_inferences = Inferences.from_name(primary_inferences_name)
165
- reference_inferences = (
166
- Inferences.from_name(reference_inferences_name)
167
- if reference_inferences_name is not None
153
+ primary_dataset_name = args.primary
154
+ reference_dataset_name = args.reference
155
+ corpus_dataset_name = args.corpus
156
+ primary_dataset = Inferences.from_name(primary_dataset_name)
157
+ reference_dataset = (
158
+ Inferences.from_name(reference_dataset_name)
159
+ if reference_dataset_name is not None
168
160
  else None
169
161
  )
170
- corpus_inferences = (
171
- None if corpus_inferences_name is None else Inferences.from_name(corpus_inferences_name)
162
+ corpus_dataset = (
163
+ None if corpus_dataset_name is None else Inferences.from_name(corpus_dataset_name)
172
164
  )
173
165
  elif args.command == "fixture":
174
166
  fixture_name = args.fixture
175
167
  primary_only = args.primary_only
176
- primary_inferences, reference_inferences, corpus_inferences = get_inferences(
168
+ primary_dataset, reference_dataset, corpus_dataset = get_datasets(
177
169
  fixture_name,
178
170
  args.no_internet,
179
171
  )
180
172
  if primary_only:
181
- reference_inferences_name = None
182
- reference_inferences = None
173
+ reference_dataset_name = None
174
+ reference_dataset = None
183
175
  elif args.command == "trace-fixture":
184
176
  trace_dataset_name = args.fixture
185
177
  simulate_streaming = args.simulate_streaming
186
178
  elif args.command == "demo":
187
179
  fixture_name = args.fixture
188
- primary_inferences, reference_inferences, corpus_inferences = get_inferences(
180
+ primary_dataset, reference_dataset, corpus_dataset = get_datasets(
189
181
  fixture_name,
190
182
  args.no_internet,
191
183
  )
@@ -205,11 +197,9 @@ if __name__ == "__main__":
205
197
 
206
198
  port = args.port or get_env_port()
207
199
  host_root_path = get_env_host_root_path()
208
- read_only = args.read_only
209
-
210
- model = create_model_from_inferences(
211
- primary_inferences,
212
- reference_inferences,
200
+ model = create_model_from_datasets(
201
+ primary_dataset,
202
+ reference_dataset,
213
203
  )
214
204
 
215
205
  fixture_spans: List[Span] = []
@@ -226,19 +216,13 @@ if __name__ == "__main__":
226
216
  ),
227
217
  get_evals_from_fixture(trace_dataset_name),
228
218
  )
229
- dataset_fixtures = list(get_dataset_fixtures(trace_dataset_name))
230
- if not read_only:
231
- Thread(
232
- target=send_dataset_fixtures,
233
- args=(f"http://{host}:{port}", dataset_fixtures),
234
- ).start()
235
219
  umap_params_list = args.umap_params.split(",")
236
220
  umap_params = UMAPParameters(
237
221
  min_dist=float(umap_params_list[0]),
238
222
  n_neighbors=int(umap_params_list[1]),
239
223
  n_samples=int(umap_params_list[2]),
240
224
  )
241
-
225
+ read_only = args.read_only
242
226
  logger.info(f"Server umap params: {umap_params}")
243
227
  if enable_prometheus := get_env_enable_prometheus():
244
228
  from phoenix.server.prometheus import start_prometheus
@@ -246,23 +230,17 @@ if __name__ == "__main__":
246
230
  start_prometheus()
247
231
 
248
232
  working_dir = get_working_dir().resolve()
249
- engine = create_engine_and_run_migrations(db_connection_str)
250
- instrumentation_cleanups = instrument_engine_if_enabled(engine)
251
- factory = SessionFactory(session_factory=_db(engine), dialect=engine.dialect.name)
252
233
  app = create_app(
253
- db=factory,
234
+ database_url=db_connection_str,
254
235
  export_path=export_path,
255
236
  model=model,
256
237
  umap_params=umap_params,
257
- corpus=None
258
- if corpus_inferences is None
259
- else create_model_from_inferences(corpus_inferences),
238
+ corpus=None if corpus_dataset is None else create_model_from_datasets(corpus_dataset),
260
239
  debug=args.debug,
261
240
  read_only=read_only,
262
241
  enable_prometheus=enable_prometheus,
263
242
  initial_spans=fixture_spans,
264
243
  initial_evaluations=fixture_evals,
265
- clean_up_callbacks=instrumentation_cleanups,
266
244
  )
267
245
  server = Server(config=Config(app, host=host, port=port, root_path=host_root_path)) # type: ignore
268
246
  Thread(target=_write_pid_file_when_ready, args=(server,), daemon=True).start()
@@ -43,9 +43,6 @@ def get_swagger_ui_html(
43
43
  <div id="swagger-ui">
44
44
  </div>
45
45
  <script src="{swagger_js_url}"></script>
46
- <style type="text/css">
47
- div[id^="operations-private"]{{display:none}} #operations-tag-private{{display:none}}
48
- </style>
49
46
  <!-- `SwaggerUIBundle` is now available on the page -->
50
47
  <script>
51
48
  const ui = SwaggerUIBundle({{